From aa9ab04e39d510b66118cbf291039fceb8c8cd13 Mon Sep 17 00:00:00 2001 From: Ehsan Date: Thu, 1 May 2025 14:20:03 -0700 Subject: [PATCH 01/16] feat: Add seven new types to public API (#796) * Add RegexValue class. * Add to/from proto logic. * Add comparison logic and unit tests. * Add serializer unit test. * Add Codable support, Improve Swift API, Add integration test. * Add more unit tests and integration tests. Also fixed a bug found by unit tests. * Add 6 other types' public API. * rename `.m` to `.mm` to stay consistent. * More unit tests. * WIP: Add the FieldValue methods for building new types. * Add the Swift API and the Obj-C Unit tests. * Add UserDataReader support for new types. * Add missing `extern`. * Add UserDataWriter support for types. Int32 is still missing. * UserDataWriter support for int32. * Update TypeOrder usages with new types. * Add comparison logic for more types. * Add Int32Value comparison logic. * Add SerializerTests for more types. * Use snake case. * Add more unit tests. * Fix bug and add integration test. * Add more integration tests. * Add more integration tests. * Add more integration and unit tests. * Expose public `isEqual` for new types and add tests for it. * Add cross-type order test. * Add Codable support along with integration tests for them. * Remove named parameter for Int32Value and BsonObjectId. * clang-format. * Use `uint8_t` for BsonBinaryData's subtype. * Add `description` for new types. * Reuse type check logic for new types. * Use uint8_t for BsonBinaryData. * Adds tests for FIRFieldValue static c'tors of new types. * Add a few missing tests. * Update tests to check listeners, src=cache, src=server. * Remove FieldValue factory methods. --- .../FirebaseFirestore/FIRBsonBinaryData.h | 15 + .../FirebaseFirestore/FIRBsonObjectId.h | 15 + .../FirebaseFirestore/FIRBsonTimestamp.h | 15 + .../FirebaseFirestore/FIRInt32Value.h | 15 + .../FirebaseFirestore/FIRMaxKey.h | 15 + .../FirebaseFirestore/FIRMinKey.h | 15 + .../FirebaseFirestore/FIRRegexValue.h | 15 + .../Firestore.xcodeproj/project.pbxproj | 40 +- .../xcschemes/Firestore_Tests_macOS.xcscheme | 28 +- .../Tests/API/FIRBsonTypesUnitTests.mm | 186 +++++ .../Example/Tests/API/FIRFieldValueTests.mm | 6 + .../Tests/Util/FSTIntegrationTestCase.mm | 6 + Firestore/Source/API/FIRBsonBinaryData.mm | 56 ++ Firestore/Source/API/FIRBsonObjectId.mm | 54 ++ Firestore/Source/API/FIRBsonTimestamp.mm | 56 ++ Firestore/Source/API/FIRFieldValue.mm | 7 + Firestore/Source/API/FIRInt32Value.mm | 54 ++ Firestore/Source/API/FIRMaxKey.mm | 57 ++ Firestore/Source/API/FIRMinKey.mm | 57 ++ Firestore/Source/API/FIRRegexValue.mm | 57 ++ Firestore/Source/API/FSTUserDataReader.mm | 163 +++++ Firestore/Source/API/FSTUserDataWriter.mm | 122 +++- .../FirebaseFirestore/FIRBsonBinaryData.h | 49 ++ .../FirebaseFirestore/FIRBsonObjectId.h | 45 ++ .../FirebaseFirestore/FIRBsonTimestamp.h | 50 ++ .../Public/FirebaseFirestore/FIRFieldValue.h | 7 + .../Public/FirebaseFirestore/FIRInt32Value.h | 45 ++ .../Public/FirebaseFirestore/FIRMaxKey.h | 36 + .../Public/FirebaseFirestore/FIRMinKey.h | 36 + .../Public/FirebaseFirestore/FIRRegexValue.h | 49 ++ .../Codable/BsonBinaryData+Codable.swift | 66 ++ .../Source/Codable/BsonObjectId+Codable.swift | 62 ++ .../Codable/BsonTimestamp+Codable.swift | 66 ++ .../Codable/CodablePassThroughTypes.swift | 9 +- .../Source/Codable/Int32Value+Codable.swift | 62 ++ .../Swift/Source/Codable/MaxKey+Codable.swift | 65 ++ .../Swift/Source/Codable/MinKey+Codable.swift | 65 ++ .../Source/Codable/RegexValue+Codable.swift | 66 ++ .../Source/Codable/VectorValue+Codable.swift | 2 +- .../BsonTypesIntegrationTests.swift | 427 ++++++++++++ .../Integration/CodableIntegrationTests.swift | 95 ++- .../SnapshotListenerSourceTests.swift | 83 +++ .../Swift/Tests/Integration/TypeTest.swift | 375 ++++++++++ Firestore/core/src/model/object_value.cc | 3 +- Firestore/core/src/model/value_util.cc | 642 +++++++++++++++++- Firestore/core/src/model/value_util.h | 150 +++- .../unit/bundle/bundle_serializer_test.cc | 88 +++ .../core/test/unit/model/document_test.cc | 26 + .../core/test/unit/model/object_value_test.cc | 84 ++- .../core/test/unit/model/value_util_test.cc | 187 ++++- .../core/test/unit/remote/serializer_test.cc | 99 +++ Firestore/core/test/unit/testutil/testutil.cc | 41 ++ Firestore/core/test/unit/testutil/testutil.h | 11 + 53 files changed, 4045 insertions(+), 100 deletions(-) create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonBinaryData.h create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonObjectId.h create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonTimestamp.h create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRInt32Value.h create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRMaxKey.h create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRMinKey.h create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRRegexValue.h create mode 100644 Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm create mode 100644 Firestore/Source/API/FIRBsonBinaryData.mm create mode 100644 Firestore/Source/API/FIRBsonObjectId.mm create mode 100644 Firestore/Source/API/FIRBsonTimestamp.mm create mode 100644 Firestore/Source/API/FIRInt32Value.mm create mode 100644 Firestore/Source/API/FIRMaxKey.mm create mode 100644 Firestore/Source/API/FIRMinKey.mm create mode 100644 Firestore/Source/API/FIRRegexValue.mm create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h create mode 100644 Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift create mode 100644 Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift create mode 100644 Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift create mode 100644 Firestore/Swift/Source/Codable/Int32Value+Codable.swift create mode 100644 Firestore/Swift/Source/Codable/MaxKey+Codable.swift create mode 100644 Firestore/Swift/Source/Codable/MinKey+Codable.swift create mode 100644 Firestore/Swift/Source/Codable/RegexValue+Codable.swift create mode 100644 Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift create mode 100644 Firestore/Swift/Tests/Integration/TypeTest.swift diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonBinaryData.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonBinaryData.h new file mode 100644 index 00000000000..688333d71d5 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonBinaryData.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonObjectId.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonObjectId.h new file mode 100644 index 00000000000..485f6356f89 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonObjectId.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonTimestamp.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonTimestamp.h new file mode 100644 index 00000000000..bc116101b82 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonTimestamp.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRInt32Value.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRInt32Value.h new file mode 100644 index 00000000000..d06f5f34aec --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRInt32Value.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRMaxKey.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMaxKey.h new file mode 100644 index 00000000000..506b7c20660 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMaxKey.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRMinKey.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMinKey.h new file mode 100644 index 00000000000..7a416ba9416 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRMinKey.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRRegexValue.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRRegexValue.h new file mode 100644 index 00000000000..2464c55fc20 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRRegexValue.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/Firestore/Example/Firestore.xcodeproj/project.pbxproj b/Firestore/Example/Firestore.xcodeproj/project.pbxproj index 8deefcabab8..4110ab951a1 100644 --- a/Firestore/Example/Firestore.xcodeproj/project.pbxproj +++ b/Firestore/Example/Firestore.xcodeproj/project.pbxproj @@ -237,6 +237,7 @@ 20A93AC59CD5A7AC41F10412 /* thread_safe_memoizer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1A8141230C7E3986EACEF0B6 /* thread_safe_memoizer_test.cc */; }; 211A60ECA3976D27C0BF59BB /* md5_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 3D050936A2D52257FD17FB6E /* md5_test.cc */; }; 21836C4D9D48F962E7A3A244 /* ordered_code_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB380D03201BC6E400D97691 /* ordered_code_test.cc */; }; + 21966DA1684600B6B9B912FE /* BsonTypesIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */; }; 21A2A881F71CB825299DF06E /* hard_assert_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 444B7AB3F5A2929070CB1363 /* hard_assert_test.cc */; }; 21C17F15579341289AD01051 /* persistence_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9113B6F513D0473AEABBAF1F /* persistence_testing.cc */; }; 21E588CF29C72813D8A7A0A1 /* FSTExceptionCatcher.m in Sources */ = {isa = PBXBuildFile; fileRef = B8BFD9B37D1029D238BDD71E /* FSTExceptionCatcher.m */; }; @@ -435,6 +436,7 @@ 44C4244E42FFFB6E9D7F28BA /* byte_stream_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 432C71959255C5DBDF522F52 /* byte_stream_test.cc */; }; 44EAF3E6EAC0CC4EB2147D16 /* transform_operation_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 33607A3AE91548BD219EC9C6 /* transform_operation_test.cc */; }; 451EFFB413364E5A420F8B2D /* thread_safe_memoizer_testing_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = EA10515F99A42D71DA2D2841 /* thread_safe_memoizer_testing_test.cc */; }; + 455C31EB671A1EC9EB7A58CC /* BsonTypesIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */; }; 4562CDD90F5FF0491F07C5DA /* leveldb_opener_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 75860CD13AF47EB1EA39EC2F /* leveldb_opener_test.cc */; }; 457171CE2510EEA46F7D8A30 /* FIRFirestoreTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5467FAFF203E56F8009C9584 /* FIRFirestoreTests.mm */; }; 45939AFF906155EA27D281AB /* annotations.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 618BBE9520B89AAC00B5BCE7 /* annotations.pb.cc */; }; @@ -755,6 +757,7 @@ 627253FDEC6BB5549FE77F4E /* tree_sorted_map_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 549CCA4D20A36DBB00BCEB75 /* tree_sorted_map_test.cc */; }; 62B1C1100A8C68D94565916C /* document_overlay_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = FFCA39825D9678A03D1845D0 /* document_overlay_cache_test.cc */; }; 62DA31B79FE97A90EEF28B0B /* delayed_constructor_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D0A6E9136804A41CEC9D55D4 /* delayed_constructor_test.cc */; }; + 62E181B9AB1568F9D332EA7C /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; 62E54B842A9E910B003347C8 /* IndexingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62E54B832A9E910A003347C8 /* IndexingTests.swift */; }; 62E54B852A9E910B003347C8 /* IndexingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62E54B832A9E910A003347C8 /* IndexingTests.swift */; }; 62E54B862A9E910B003347C8 /* IndexingTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 62E54B832A9E910A003347C8 /* IndexingTests.swift */; }; @@ -888,6 +891,7 @@ 79D86DD18BB54D2D69DC457F /* leveldb_remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 0840319686A223CC4AD3FAB1 /* leveldb_remote_document_cache_test.cc */; }; 7A2D523AEF58B1413CC8D64F /* query_engine_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B8A853940305237AFDA8050B /* query_engine_test.cc */; }; 7A3BE0ED54933C234FDE23D1 /* leveldb_util_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 332485C4DCC6BA0DBB5E31B7 /* leveldb_util_test.cc */; }; + 7A5E96499414E3D3DCFFF52F /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; 7A66A2CB5CF33F0C28202596 /* status_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54A0352C20A3B3D7003E0143 /* status_test.cc */; }; 7A7DB86955670B85B4514A1F /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4B59C0A7B2A4548496ED4E7D /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json */; }; 7A7EC216A0015D7620B4FF3E /* string_format_apple_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = 9CFD366B783AE27B9E79EE7A /* string_format_apple_test.mm */; }; @@ -907,6 +911,7 @@ 7C1DC1B44729381126D083AE /* leveldb_snappy_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D9D94300B9C02F7069523C00 /* leveldb_snappy_test.cc */; }; 7C5E017689012489AAB7718D /* CodableGeoPointTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5495EB022040E90200EBA509 /* CodableGeoPointTests.swift */; }; 7C7BA1DB0B66EB899A928283 /* hashing_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54511E8D209805F8005BD28F /* hashing_test.cc */; }; + 7CD026FE4246C540F4231E4C /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; 7D25D41B013BB70ADE526055 /* target_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 526D755F65AC676234F57125 /* target_test.cc */; }; 7D320113FD076A1EF9A8B612 /* filter_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = F02F734F272C3C70D1307076 /* filter_test.cc */; }; 7D3207DEE229EFCF16E52693 /* Validation_BloomFilterTest_MD5_500_01_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4BD051DBE754950FEAC7A446 /* Validation_BloomFilterTest_MD5_500_01_bloom_filter_proto.json */; }; @@ -996,6 +1001,7 @@ 8C39F6D4B3AA9074DF00CFB8 /* string_util_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB380CFC201A2EE200D97691 /* string_util_test.cc */; }; 8C602DAD4E8296AB5EFB962A /* firestore.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 544129D421C2DDC800EFB9CC /* firestore.pb.cc */; }; 8C82D4D3F9AB63E79CC52DC8 /* Pods_Firestore_IntegrationTests_iOS.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = ECEBABC7E7B693BE808A1052 /* Pods_Firestore_IntegrationTests_iOS.framework */; }; + 8CBCEB837CE378D44135F64A /* TypeTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2CBE52190D157CE1096CD12E /* TypeTest.swift */; }; 8D0EF43F1B7B156550E65C20 /* FSTGoogleTestTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 54764FAE1FAA21B90085E60A /* FSTGoogleTestTests.mm */; }; 8D67BAAD6D2F1913BACA6AC1 /* thread_safe_memoizer_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */; }; 8DBA8DC55722ED9D3A1BB2C9 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 1A7D48A017ECB54FD381D126 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json */; }; @@ -1075,6 +1081,7 @@ 9D71628E38D9F64C965DF29E /* FSTAPIHelpers.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5492E04E202154AA00B64F25 /* FSTAPIHelpers.mm */; }; 9E1997789F19BF2E9029012E /* FIRCompositeIndexQueryTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 65AF0AB593C3AD81A1F1A57E /* FIRCompositeIndexQueryTests.mm */; }; 9E656F4FE92E8BFB7F625283 /* to_string_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B696858D2214B53900271095 /* to_string_test.cc */; }; + 9ED94C2008F1475A0DC6D3BE /* BsonTypesIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */; }; 9EE1447AA8E68DF98D0590FF /* precondition_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 549CCA5520A36E1F00BCEB75 /* precondition_test.cc */; }; 9EE81B1FB9B7C664B7B0A904 /* resume_token_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 54DA12A41F315EE100DD57A1 /* resume_token_spec_test.json */; }; 9F41D724D9947A89201495AD /* limit_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 54DA129F1F315EE100DD57A1 /* limit_spec_test.json */; }; @@ -1128,6 +1135,7 @@ A873EE3C8A97C90BA978B68A /* firebase_app_check_credentials_provider_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = F119BDDF2F06B3C0883B8297 /* firebase_app_check_credentials_provider_test.mm */; }; A8AF92A35DFA30EEF9C27FB7 /* database_info_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB38D92E20235D22000A432D /* database_info_test.cc */; }; A8C9FF6D13E6C83D4AB54EA7 /* secure_random_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54740A531FC913E500713A1A /* secure_random_test.cc */; }; + A8F0E84A4D8F44B4EEE3155C /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; A907244EE37BC32C8D82948E /* FSTSpecTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5492E03020213FFC00B64F25 /* FSTSpecTests.mm */; }; A9206FF8FF8834347E9C7DDB /* leveldb_overlay_migration_manager_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D8A6D52723B1BABE1B7B8D8F /* leveldb_overlay_migration_manager_test.cc */; }; A97ED2BAAEDB0F765BBD5F98 /* local_store_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 307FF03D0297024D59348EBD /* local_store_test.cc */; }; @@ -1216,6 +1224,7 @@ B54BA1E76636C0C93334271B /* settings_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = DD12BC1DB2480886D2FB0005 /* settings_test.cc */; }; B576823475FBCA5EFA583F9C /* leveldb_migrations_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = EF83ACD5E1E9F25845A9ACED /* leveldb_migrations_test.cc */; }; B592DB7DB492B1C1D5E67D01 /* write.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 544129D921C2DDC800EFB9CC /* write.pb.cc */; }; + B59498DE96F8B6F8D5C0788F /* TypeTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2CBE52190D157CE1096CD12E /* TypeTest.swift */; }; B5AEF7E4EBC29653DEE856A2 /* strerror_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 358C3B5FE573B1D60A4F7592 /* strerror_test.cc */; }; B60BAF9ED610F9D4E245EEB3 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 1A7D48A017ECB54FD381D126 /* Validation_BloomFilterTest_MD5_5000_1_membership_test_result.json */; }; B6152AD7202A53CB000E5744 /* document_key_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B6152AD5202A5385000E5744 /* document_key_test.cc */; }; @@ -1450,6 +1459,7 @@ DD04F7FE7A1ADE230A247DBC /* byte_stream_apple_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = 7628664347B9C96462D4BF17 /* byte_stream_apple_test.mm */; }; DD0F288108714D5A406D0A9F /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 5C68EE4CB94C0DD6E333F546 /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json */; }; DD213F68A6F79E1D4924BD95 /* Pods_Firestore_Example_macOS.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E42355285B9EF55ABD785792 /* Pods_Firestore_Example_macOS.framework */; }; + DD540A3D4C3FC45FDBD89544 /* TypeTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2CBE52190D157CE1096CD12E /* TypeTest.swift */; }; DD5976A45071455FF3FE74B8 /* string_win_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 79507DF8378D3C42F5B36268 /* string_win_test.cc */; }; DD6C480629B3F87933FAF440 /* filesystem_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = BA02DA2FCD0001CFC6EB08DA /* filesystem_testing.cc */; }; DD935E243A64A4EB688E4C1C /* credentials_provider_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 2F4FA4576525144C5069A7A5 /* credentials_provider_test.cc */; }; @@ -1490,6 +1500,7 @@ E3319DC1804B69F0ED1FFE02 /* memory_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 74FBEFA4FE4B12C435011763 /* memory_mutation_queue_test.cc */; }; E375FBA0632EFB4D14C4E5A9 /* FSTGoogleTestTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 54764FAE1FAA21B90085E60A /* FSTGoogleTestTests.mm */; }; E37C52277CD00C57E5848A0E /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 5C68EE4CB94C0DD6E333F546 /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json */; }; + E3D0FC852ADF4BEE74460FEF /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; E434ACDF63F219F3031F292E /* ConditionalConformanceTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = E3228F51DCDC2E90D5C58F97 /* ConditionalConformanceTests.swift */; }; E435450184AEB51EE8435F66 /* write.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 544129D921C2DDC800EFB9CC /* write.pb.cc */; }; E441A53D035479C53C74A0E6 /* recovery_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 9C1AFCC9E616EC33D6E169CF /* recovery_spec_test.json */; }; @@ -1640,6 +1651,7 @@ FD6F5B4497D670330E7F89DA /* document_overlay_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = FFCA39825D9678A03D1845D0 /* document_overlay_cache_test.cc */; }; FD8EA96A604E837092ACA51D /* ordered_code_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AB380D03201BC6E400D97691 /* ordered_code_test.cc */; }; FE20E696E014CDCE918E91D6 /* md5_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = E2E39422953DE1D3C7B97E77 /* md5_testing.cc */; }; + FE4AC400F8F2D49B3E806420 /* FIRBsonTypesUnitTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */; }; FE701C2D739A5371BCBD62B9 /* leveldb_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5C7942B6244F4C416B11B86C /* leveldb_mutation_queue_test.cc */; }; FE9131E2D84A560D287B6F90 /* resource.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1C3F7302BF4AE6CBC00ECDD0 /* resource.pb.cc */; }; FF3405218188DFCE586FB26B /* app_testing.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5467FB07203E6A44009C9584 /* app_testing.mm */; }; @@ -1743,6 +1755,7 @@ 29D9C76922DAC6F710BC1EF4 /* memory_document_overlay_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = memory_document_overlay_cache_test.cc; sourceTree = ""; }; 2A0CF41BA5AED6049B0BEB2C /* objc_type_traits_apple_test.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; path = objc_type_traits_apple_test.mm; sourceTree = ""; }; 2B50B3A0DF77100EEE887891 /* Pods_Firestore_Tests_iOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_Tests_iOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + 2CBE52190D157CE1096CD12E /* TypeTest.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; path = TypeTest.swift; sourceTree = ""; }; 2D7472BC70C024D736FF74D9 /* watch_change_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = watch_change_test.cc; sourceTree = ""; }; 2DAA26538D1A93A39F8AC373 /* nanopb_testing.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = nanopb_testing.h; path = nanopb/nanopb_testing.h; sourceTree = ""; }; 2E48431B0EDA400BEA91D4AB /* Pods-Firestore_Tests_tvOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Tests_tvOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Tests_tvOS/Pods-Firestore_Tests_tvOS.debug.xcconfig"; sourceTree = ""; }; @@ -1777,7 +1790,7 @@ 4334F87873015E3763954578 /* status_testing.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = status_testing.h; sourceTree = ""; }; 4375BDCDBCA9938C7F086730 /* Validation_BloomFilterTest_MD5_5000_1_bloom_filter_proto.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_5000_1_bloom_filter_proto.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_5000_1_bloom_filter_proto.json; sourceTree = ""; }; 444B7AB3F5A2929070CB1363 /* hard_assert_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = hard_assert_test.cc; sourceTree = ""; }; - 4564AD9C55EC39C080EB9476 /* globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = globals_cache_test.cc; sourceTree = ""; }; + 4564AD9C55EC39C080EB9476 /* globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = globals_cache_test.cc; sourceTree = ""; }; 478DC75A0DCA6249A616DD30 /* Validation_BloomFilterTest_MD5_500_0001_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_500_0001_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_500_0001_membership_test_result.json; sourceTree = ""; }; 48D0915834C3D234E5A875A9 /* grpc_stream_tester.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = grpc_stream_tester.h; sourceTree = ""; }; 4B3E4A77493524333133C5DC /* Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json; sourceTree = ""; }; @@ -1895,7 +1908,7 @@ 5B5414D28802BC76FDADABD6 /* stream_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = stream_test.cc; sourceTree = ""; }; 5B96CC29E9946508F022859C /* Validation_BloomFilterTest_MD5_50000_0001_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_50000_0001_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_50000_0001_membership_test_result.json; sourceTree = ""; }; 5C68EE4CB94C0DD6E333F546 /* Validation_BloomFilterTest_MD5_1_01_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_1_01_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_1_01_membership_test_result.json; sourceTree = ""; }; - 5C6DEA63FBDE19D841291723 /* memory_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = memory_globals_cache_test.cc; sourceTree = ""; }; + 5C6DEA63FBDE19D841291723 /* memory_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = memory_globals_cache_test.cc; sourceTree = ""; }; 5C7942B6244F4C416B11B86C /* leveldb_mutation_queue_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = leveldb_mutation_queue_test.cc; sourceTree = ""; }; 5CAE131920FFFED600BE9A4A /* Firestore_Benchmarks_iOS.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = Firestore_Benchmarks_iOS.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 5CAE131D20FFFED600BE9A4A /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; @@ -1945,7 +1958,7 @@ 69E6C311558EC77729A16CF1 /* Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS/Pods-Firestore_Example_iOS-Firestore_SwiftTests_iOS.debug.xcconfig"; sourceTree = ""; }; 6A7A30A2DB3367E08939E789 /* bloom_filter.pb.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = bloom_filter.pb.h; sourceTree = ""; }; 6AE927CDFC7A72BF825BE4CB /* Pods-Firestore_Tests_tvOS.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Tests_tvOS.release.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Tests_tvOS/Pods-Firestore_Tests_tvOS.release.xcconfig"; sourceTree = ""; }; - 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = thread_safe_memoizer_testing.cc; sourceTree = ""; }; + 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = thread_safe_memoizer_testing.cc; sourceTree = ""; }; 6E8302DE210222ED003E1EA3 /* FSTFuzzTestFieldPath.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FSTFuzzTestFieldPath.h; sourceTree = ""; }; 6E8302DF21022309003E1EA3 /* FSTFuzzTestFieldPath.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = FSTFuzzTestFieldPath.mm; sourceTree = ""; }; 6EA39FDD20FE820E008D461F /* FSTFuzzTestSerializer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FSTFuzzTestSerializer.mm; sourceTree = ""; }; @@ -1981,6 +1994,7 @@ 7C3F995E040E9E9C5E8514BB /* query_listener_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = query_listener_test.cc; sourceTree = ""; }; 7C5C40C7BFBB86032F1DC632 /* FSTExceptionCatcher.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = FSTExceptionCatcher.h; sourceTree = ""; }; 7EB299CF85034F09CFD6F3FD /* remote_document_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = remote_document_cache_test.cc; sourceTree = ""; }; + 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRBsonTypesUnitTests.mm; sourceTree = ""; }; 84076EADF6872C78CDAC7291 /* bundle_builder.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = bundle_builder.h; sourceTree = ""; }; 84434E57CA72951015FC71BC /* Pods-Firestore_FuzzTests_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_FuzzTests_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_FuzzTests_iOS/Pods-Firestore_FuzzTests_iOS.debug.xcconfig"; sourceTree = ""; }; 872C92ABD71B12784A1C5520 /* async_testing.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = async_testing.cc; sourceTree = ""; }; @@ -2080,6 +2094,7 @@ C939D1789E38C09F9A0C1157 /* Validation_BloomFilterTest_MD5_1_0001_membership_test_result.json */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.json; name = Validation_BloomFilterTest_MD5_1_0001_membership_test_result.json; path = bloom_filter_golden_test_data/Validation_BloomFilterTest_MD5_1_0001_membership_test_result.json; sourceTree = ""; }; CB7B2D4691C380DE3EB59038 /* lru_garbage_collector_test.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = lru_garbage_collector_test.h; sourceTree = ""; }; CC572A9168BBEF7B83E4BBC5 /* view_snapshot_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = view_snapshot_test.cc; sourceTree = ""; }; + CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.swift; path = BsonTypesIntegrationTests.swift; sourceTree = ""; }; CCC9BD953F121B9E29F9AA42 /* user_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; name = user_test.cc; path = credentials/user_test.cc; sourceTree = ""; }; CD422AF3E4515FB8E9BE67A0 /* equals_tester.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = equals_tester.h; sourceTree = ""; }; CE37875365497FFA8687B745 /* message_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; name = message_test.cc; path = nanopb/message_test.cc; sourceTree = ""; }; @@ -2123,7 +2138,7 @@ E42355285B9EF55ABD785792 /* Pods_Firestore_Example_macOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_Example_macOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; E592181BFD7C53C305123739 /* Pods-Firestore_Tests_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Tests_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Tests_iOS/Pods-Firestore_Tests_iOS.debug.xcconfig"; sourceTree = ""; }; E76F0CDF28E5FA62D21DE648 /* leveldb_target_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = leveldb_target_cache_test.cc; sourceTree = ""; }; - EA10515F99A42D71DA2D2841 /* thread_safe_memoizer_testing_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = thread_safe_memoizer_testing_test.cc; sourceTree = ""; }; + EA10515F99A42D71DA2D2841 /* thread_safe_memoizer_testing_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = thread_safe_memoizer_testing_test.cc; sourceTree = ""; }; ECEBABC7E7B693BE808A1052 /* Pods_Firestore_IntegrationTests_iOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_IntegrationTests_iOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; EF3A65472C66B9560041EE69 /* FIRVectorValueTests.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRVectorValueTests.mm; sourceTree = ""; }; EF6C285029E462A200A7D4F1 /* FIRAggregateTests.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRAggregateTests.mm; sourceTree = ""; }; @@ -2141,7 +2156,7 @@ F848C41C03A25C42AD5A4BC2 /* target_cache_test.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = target_cache_test.h; sourceTree = ""; }; F869D85E900E5AF6CD02E2FC /* firebase_auth_credentials_provider_test.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; name = firebase_auth_credentials_provider_test.mm; path = credentials/firebase_auth_credentials_provider_test.mm; sourceTree = ""; }; FA2E9952BA2B299C1156C43C /* Pods-Firestore_Benchmarks_iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Benchmarks_iOS.debug.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Benchmarks_iOS/Pods-Firestore_Benchmarks_iOS.debug.xcconfig"; sourceTree = ""; }; - FC44D934D4A52C790659C8D6 /* leveldb_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; path = leveldb_globals_cache_test.cc; sourceTree = ""; }; + FC44D934D4A52C790659C8D6 /* leveldb_globals_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = leveldb_globals_cache_test.cc; sourceTree = ""; }; FC738525340E594EBFAB121E /* Pods-Firestore_Example_tvOS.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Firestore_Example_tvOS.release.xcconfig"; path = "Pods/Target Support Files/Pods-Firestore_Example_tvOS/Pods-Firestore_Example_tvOS.release.xcconfig"; sourceTree = ""; }; FF73B39D04D1760190E6B84A /* FIRQueryUnitTests.mm */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.objcpp; path = FIRQueryUnitTests.mm; sourceTree = ""; }; FFCA39825D9678A03D1845D0 /* document_overlay_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = document_overlay_cache_test.cc; sourceTree = ""; }; @@ -2269,11 +2284,13 @@ children = ( EF6C286C29E6D22200A7D4F1 /* AggregationIntegrationTests.swift */, 062072B62773A055001655D7 /* AsyncAwaitIntegrationTests.swift */, + CC5CCD0437D846AB23B12C99 /* BsonTypesIntegrationTests.swift */, 124C932B22C1642C00CA8C2D /* CodableIntegrationTests.swift */, 3355BE9391CC4857AF0BDAE3 /* DatabaseTests.swift */, 62E54B832A9E910A003347C8 /* IndexingTests.swift */, 621D620928F9CE7400D2FA26 /* QueryIntegrationTests.swift */, 4D65F6E69993611D47DC8E7C /* SnapshotListenerSourceTests.swift */, + 2CBE52190D157CE1096CD12E /* TypeTest.swift */, EFF22EA92C5060A4009A369B /* VectorIntegrationTests.swift */, ); path = Integration; @@ -2963,6 +2980,7 @@ isa = PBXGroup; children = ( 1B9F95EC29FAD3F100EEC075 /* FIRAggregateQueryUnitTests.mm */, + 82E339063156F8C99818E355 /* FIRBsonTypesUnitTests.mm */, 5492E045202154AA00B64F25 /* FIRCollectionReferenceTests.mm */, 5492E049202154AA00B64F25 /* FIRDocumentReferenceTests.mm */, 5492E04B202154AA00B64F25 /* FIRDocumentSnapshotTests.mm */, @@ -4157,6 +4175,7 @@ buildActionMask = 2147483647; files = ( 1B9F95F029FAD4D700EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, + E3D0FC852ADF4BEE74460FEF /* FIRBsonTypesUnitTests.mm in Sources */, E11DDA3DD75705F26245E295 /* FIRCollectionReferenceTests.mm in Sources */, 46999832F7D1709B4C29FAA8 /* FIRDocumentReferenceTests.mm in Sources */, 6FD2369F24E884A9D767DD80 /* FIRDocumentSnapshotTests.mm in Sources */, @@ -4380,6 +4399,7 @@ buildActionMask = 2147483647; files = ( 1B9F95F229FAD4E000EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, + 7CD026FE4246C540F4231E4C /* FIRBsonTypesUnitTests.mm in Sources */, 00B7AFE2A7C158DD685EB5EE /* FIRCollectionReferenceTests.mm in Sources */, 25FE27330996A59F31713A0C /* FIRDocumentReferenceTests.mm in Sources */, 28E4B4A53A739AE2C9CF4159 /* FIRDocumentSnapshotTests.mm in Sources */, @@ -4595,6 +4615,7 @@ EF6C286F29E6D22200A7D4F1 /* AggregationIntegrationTests.swift in Sources */, 062072B92773A055001655D7 /* AsyncAwaitIntegrationTests.swift in Sources */, 733AFC467B600967536BD70F /* BasicCompileTests.swift in Sources */, + 21966DA1684600B6B9B912FE /* BsonTypesIntegrationTests.swift in Sources */, 79987AF2DF1FCE799008B846 /* CodableGeoPointTests.swift in Sources */, 1C79AE3FBFC91800E30D092C /* CodableIntegrationTests.swift in Sources */, BA3C0BA8082A6FB2546E47AC /* CodableTimestampTests.swift in Sources */, @@ -4604,6 +4625,7 @@ 1B9F95F329FAD4E100EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, EF6C285329E462A200A7D4F1 /* FIRAggregateTests.mm in Sources */, 95ED06D2B0078D3CDB821B68 /* FIRArrayTransformTests.mm in Sources */, + A8F0E84A4D8F44B4EEE3155C /* FIRBsonTypesUnitTests.mm in Sources */, DB3ADDA51FB93E84142EA90D /* FIRBundlesTests.mm in Sources */, 0500A324CEC854C5B0CF364C /* FIRCollectionReferenceTests.mm in Sources */, CAEA2A42D3120B48C6EE39E8 /* FIRCompositeIndexQueryTests.mm in Sources */, @@ -4651,6 +4673,7 @@ 62E54B862A9E910B003347C8 /* IndexingTests.swift in Sources */, 621D620C28F9CE7400D2FA26 /* QueryIntegrationTests.swift in Sources */, 1CFBD4563960D8A20C4679A3 /* SnapshotListenerSourceTests.swift in Sources */, + B59498DE96F8B6F8D5C0788F /* TypeTest.swift in Sources */, EFF22EAC2C5060A4009A369B /* VectorIntegrationTests.swift in Sources */, 4D42E5C756229C08560DD731 /* XCTestCase+Await.mm in Sources */, 09BE8C01EC33D1FD82262D5D /* aggregate_query_test.cc in Sources */, @@ -4842,6 +4865,7 @@ EF6C286E29E6D22200A7D4F1 /* AggregationIntegrationTests.swift in Sources */, 062072B82773A055001655D7 /* AsyncAwaitIntegrationTests.swift in Sources */, B896E5DE1CC27347FAC009C3 /* BasicCompileTests.swift in Sources */, + 9ED94C2008F1475A0DC6D3BE /* BsonTypesIntegrationTests.swift in Sources */, 722F9A798F39F7D1FE7CF270 /* CodableGeoPointTests.swift in Sources */, CF5DE1ED21DD0A9783383A35 /* CodableIntegrationTests.swift in Sources */, 32B0739404FA588608E1F41A /* CodableTimestampTests.swift in Sources */, @@ -4851,6 +4875,7 @@ 1B9F95F129FAD4D800EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, EF6C285229E462A200A7D4F1 /* FIRAggregateTests.mm in Sources */, 660E99DEDA0A6FC1CCB200F9 /* FIRArrayTransformTests.mm in Sources */, + 62E181B9AB1568F9D332EA7C /* FIRBsonTypesUnitTests.mm in Sources */, AE068EDBC74AF27679CCB6DA /* FIRBundlesTests.mm in Sources */, BA0BB02821F1949783C8AA50 /* FIRCollectionReferenceTests.mm in Sources */, 9E1997789F19BF2E9029012E /* FIRCompositeIndexQueryTests.mm in Sources */, @@ -4898,6 +4923,7 @@ 62E54B852A9E910B003347C8 /* IndexingTests.swift in Sources */, 621D620B28F9CE7400D2FA26 /* QueryIntegrationTests.swift in Sources */, A0BC30D482B0ABD1A3A24CDC /* SnapshotListenerSourceTests.swift in Sources */, + 8CBCEB837CE378D44135F64A /* TypeTest.swift in Sources */, EFF22EAB2C5060A4009A369B /* VectorIntegrationTests.swift in Sources */, 736C4E82689F1CA1859C4A3F /* XCTestCase+Await.mm in Sources */, 412BE974741729A6683C386F /* aggregate_query_test.cc in Sources */, @@ -5107,6 +5133,7 @@ buildActionMask = 2147483647; files = ( 1B9F95EE29FAD4CD00EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, + FE4AC400F8F2D49B3E806420 /* FIRBsonTypesUnitTests.mm in Sources */, 5492E050202154AA00B64F25 /* FIRCollectionReferenceTests.mm in Sources */, 5492E053202154AB00B64F25 /* FIRDocumentReferenceTests.mm in Sources */, 5492E055202154AB00B64F25 /* FIRDocumentSnapshotTests.mm in Sources */, @@ -5341,6 +5368,7 @@ EF6C286D29E6D22200A7D4F1 /* AggregationIntegrationTests.swift in Sources */, 062072B72773A055001655D7 /* AsyncAwaitIntegrationTests.swift in Sources */, F731A0CCD0220B370BC1BE8B /* BasicCompileTests.swift in Sources */, + 455C31EB671A1EC9EB7A58CC /* BsonTypesIntegrationTests.swift in Sources */, 7C5E017689012489AAB7718D /* CodableGeoPointTests.swift in Sources */, 54C3242322D3B627000FE6DD /* CodableIntegrationTests.swift in Sources */, 70AB665EB6A473FF6C4CFD31 /* CodableTimestampTests.swift in Sources */, @@ -5350,6 +5378,7 @@ 1B9F95EF29FAD4CF00EEC075 /* FIRAggregateQueryUnitTests.mm in Sources */, EF6C285129E462A200A7D4F1 /* FIRAggregateTests.mm in Sources */, 73866AA12082B0A5009BB4FF /* FIRArrayTransformTests.mm in Sources */, + 7A5E96499414E3D3DCFFF52F /* FIRBsonTypesUnitTests.mm in Sources */, 4B54FA587C7107973FD76044 /* FIRBundlesTests.mm in Sources */, 7BCC5973C4F4FCC272150E31 /* FIRCollectionReferenceTests.mm in Sources */, 242BC62992ACC1A5B142CD4A /* FIRCompositeIndexQueryTests.mm in Sources */, @@ -5397,6 +5426,7 @@ 62E54B842A9E910B003347C8 /* IndexingTests.swift in Sources */, 621D620A28F9CE7400D2FA26 /* QueryIntegrationTests.swift in Sources */, B00F8D1819EE20C45B660940 /* SnapshotListenerSourceTests.swift in Sources */, + DD540A3D4C3FC45FDBD89544 /* TypeTest.swift in Sources */, EFF22EAA2C5060A4009A369B /* VectorIntegrationTests.swift in Sources */, 5492E0442021457E00B64F25 /* XCTestCase+Await.mm in Sources */, B04E4FE20930384DF3A402F9 /* aggregate_query_test.cc in Sources */, diff --git a/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme b/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme index 7a27b6f0c54..4884126571c 100644 --- a/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme +++ b/Firestore/Example/Firestore.xcodeproj/xcshareddata/xcschemes/Firestore_Tests_macOS.xcscheme @@ -7,7 +7,11 @@ buildImplicitDependencies = "YES"> + buildForTesting = "YES" + buildForRunning = "YES" + buildForProfiling = "YES" + buildForArchiving = "YES" + buildForAnalyzing = "YES"> + + + + @@ -39,17 +52,6 @@ - - - - - - - - +#import +#import +#import +#import +#import +#import +#import + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface FIRBsonTypesUnitTests : XCTestCase +@end + +@implementation FIRBsonTypesUnitTests + +- (void)testMinKeySingleton { + FIRMinKey *minKey1 = [FIRMinKey instance]; + FIRMinKey *minKey2 = [FIRMinKey instance]; + XCTAssertEqual(minKey1, minKey2); + XCTAssertTrue([minKey1 isEqual:minKey2]); +} + +- (void)testMaxKeySingleton { + FIRMaxKey *maxKey1 = [FIRMaxKey instance]; + FIRMaxKey *maxKey2 = [FIRMaxKey instance]; + XCTAssertEqual(maxKey1, maxKey2); + XCTAssertTrue([maxKey1 isEqual:maxKey2]); +} + +- (void)testCreateAndReadAndCompareRegexValue { + FIRRegexValue *regex1 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + FIRRegexValue *regex2 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + FIRRegexValue *regex3 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"x"]; + FIRRegexValue *regex4 = [[FIRRegexValue alloc] initWithPattern:@"^bar" options:@"i"]; + + // Test reading the values back. + XCTAssertEqual(regex1.pattern, @"^foo"); + XCTAssertEqual(regex1.options, @"i"); + + // Test isEqual + XCTAssertTrue([regex1 isEqual:regex2]); + XCTAssertFalse([regex1 isEqual:regex3]); + XCTAssertFalse([regex1 isEqual:regex4]); +} + +- (void)testCreateAndReadAndCompareInt32Value { + FIRInt32Value *val1 = [[FIRInt32Value alloc] initWithValue:5]; + FIRInt32Value *val2 = [[FIRInt32Value alloc] initWithValue:5]; + FIRInt32Value *val3 = [[FIRInt32Value alloc] initWithValue:3]; + + // Test reading the value back + XCTAssertEqual(5, val1.value); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); +} + +- (void)testCreateAndReadAndCompareBsonObjectId { + FIRBsonObjectId *val1 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; + FIRBsonObjectId *val2 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; + FIRBsonObjectId *val3 = [[FIRBsonObjectId alloc] initWithValue:@"efgh"]; + + // Test reading the value back + XCTAssertEqual(@"abcd", val1.value); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); +} + +- (void)testCreateAndReadAndCompareBsonTimestamp { + FIRBsonTimestamp *val1 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBsonTimestamp *val2 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBsonTimestamp *val3 = [[FIRBsonTimestamp alloc] initWithSeconds:4444 increment:100]; + FIRBsonTimestamp *val4 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:444]; + + // Test reading the values back. + XCTAssertEqual(1234U, val1.seconds); + XCTAssertEqual(100U, val1.increment); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); + XCTAssertFalse([val1 isEqual:val4]); +} + +- (void)testCreateAndReadAndCompareBsonBinaryData { + uint8_t byteArray1[] = {0x01, 0x02, 0x03, 0x04, 0x05}; + uint8_t byteArray2[] = {0x01, 0x02, 0x03, 0x04, 0x99}; + NSData *data1 = [NSData dataWithBytes:byteArray1 length:sizeof(byteArray1)]; + NSData *data2 = [NSData dataWithBytes:byteArray1 length:sizeof(byteArray1)]; + NSData *data3 = [NSData dataWithBytes:byteArray2 length:sizeof(byteArray2)]; + + FIRBsonBinaryData *val1 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data1]; + FIRBsonBinaryData *val2 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data2]; + FIRBsonBinaryData *val3 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data3]; + FIRBsonBinaryData *val4 = [[FIRBsonBinaryData alloc] initWithSubtype:1 data:data1]; + + // Test reading the values back. + XCTAssertEqual(128, val1.subtype); + XCTAssertEqual(data1, val1.data); + XCTAssertTrue([val1.data isEqualToData:data1]); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertFalse([val1 isEqual:val3]); + XCTAssertFalse([val1 isEqual:val4]); +} + +- (void)testFieldValueMinKey { + FIRMinKey *minKey1 = [FIRMinKey instance]; + FIRMinKey *minKey2 = [FIRMinKey instance]; + XCTAssertEqual(minKey1, minKey2); + XCTAssertTrue([minKey1 isEqual:minKey2]); +} + +- (void)testFieldValueMaxKey { + FIRMaxKey *maxKey1 = [FIRMaxKey instance]; + FIRMaxKey *maxKey2 = [FIRMaxKey instance]; + XCTAssertEqual(maxKey1, maxKey2); + XCTAssertTrue([maxKey1 isEqual:maxKey2]); +} + +- (void)testFieldValueRegex { + FIRRegexValue *regex1 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + FIRRegexValue *regex2 = [[FIRRegexValue alloc] initWithPattern:@"^foo" options:@"i"]; + XCTAssertTrue([regex1 isEqual:regex2]); + XCTAssertEqual(@"^foo", regex2.pattern); + XCTAssertEqual(@"i", regex2.options); +} + +- (void)testFieldValueInt32 { + FIRInt32Value *val1 = [[FIRInt32Value alloc] initWithValue:5]; + FIRInt32Value *val2 = [[FIRInt32Value alloc] initWithValue:5]; + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertEqual(5, val2.value); +} + +- (void)testFieldValueObjectId { + FIRBsonObjectId *oid1 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; + FIRBsonObjectId *oid2 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; + XCTAssertTrue([oid1 isEqual:oid2]); + XCTAssertEqual(@"abcd", oid2.value); +} + +- (void)testFieldValueBsonTimestamp { + FIRBsonTimestamp *val1 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBsonTimestamp *val2 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertEqual(1234U, val2.seconds); + XCTAssertEqual(100U, val2.increment); +} + +- (void)testFieldValueBsonBinaryData { + uint8_t byteArray[] = {0x01, 0x02, 0x03, 0x04, 0x05}; + NSData *data = [NSData dataWithBytes:byteArray length:sizeof(byteArray)]; + FIRBsonBinaryData *val1 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data]; + FIRBsonBinaryData *val2 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data]; + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertEqual(128, val2.subtype); + XCTAssertEqual(data, val2.data); +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Example/Tests/API/FIRFieldValueTests.mm b/Firestore/Example/Tests/API/FIRFieldValueTests.mm index a7a347b0328..af8ba7b5ed7 100644 --- a/Firestore/Example/Tests/API/FIRFieldValueTests.mm +++ b/Firestore/Example/Tests/API/FIRFieldValueTests.mm @@ -14,8 +14,14 @@ * limitations under the License. */ +#import +#import +#import #import +#import +#import #import +#import "Firestore/Example/Tests/Util/FSTHelpers.h" #import diff --git a/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm b/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm index 4b7c7b9f034..0a9949457b4 100644 --- a/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm +++ b/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm @@ -198,6 +198,12 @@ + (void)setUpDefaults { // SSL certs. NSString *project = [[NSProcessInfo processInfo] environment][@"PROJECT_ID"]; NSString *targetBackend = [[NSProcessInfo processInfo] environment][@"TARGET_BACKEND"]; + + // Forcing use of nightly. + // TODO(types/ehsann): remove this before merging into main. + targetBackend = @"nightly"; + project = @"firestore-sdk-nightly"; + NSString *host; if (targetBackend) { if ([targetBackend isEqualToString:@"emulator"]) { diff --git a/Firestore/Source/API/FIRBsonBinaryData.mm b/Firestore/Source/API/FIRBsonBinaryData.mm new file mode 100644 index 00000000000..c656d5dd3ee --- /dev/null +++ b/Firestore/Source/API/FIRBsonBinaryData.mm @@ -0,0 +1,56 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRBsonBinaryData + +- (instancetype)initWithSubtype:(uint8_t)subtype data:(NSData *)data { + self = [super init]; + if (self) { + _subtype = subtype; + _data = data; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRBsonBinaryData class]]) { + return NO; + } + + FIRBsonBinaryData *other = (FIRBsonBinaryData *)object; + return self.subtype == other.subtype && [self.data isEqualToData:other.data]; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRBsonBinaryData alloc] initWithSubtype:self.subtype data:self.data]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", + (unsigned int)self.subtype, self.data]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRBsonObjectId.mm b/Firestore/Source/API/FIRBsonObjectId.mm new file mode 100644 index 00000000000..defd0d64528 --- /dev/null +++ b/Firestore/Source/API/FIRBsonObjectId.mm @@ -0,0 +1,54 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRBsonObjectId + +- (instancetype)initWithValue:(NSString *)value { + self = [super init]; + if (self) { + _value = value; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRBsonObjectId class]]) { + return NO; + } + + FIRBsonObjectId *other = (FIRBsonObjectId *)object; + return [self.value isEqualToString:other.value]; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRBsonObjectId alloc] initWithValue:self.value]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", self.value]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRBsonTimestamp.mm b/Firestore/Source/API/FIRBsonTimestamp.mm new file mode 100644 index 00000000000..c75567f4495 --- /dev/null +++ b/Firestore/Source/API/FIRBsonTimestamp.mm @@ -0,0 +1,56 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRBsonTimestamp + +- (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment { + self = [super init]; + if (self) { + _seconds = seconds; + _increment = increment; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRBsonTimestamp class]]) { + return NO; + } + + FIRBsonTimestamp *other = (FIRBsonTimestamp *)object; + return self.seconds == other.seconds && self.increment == other.increment; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRBsonTimestamp alloc] initWithSeconds:self.seconds increment:self.increment]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", self.seconds, + self.increment]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRFieldValue.mm b/Firestore/Source/API/FIRFieldValue.mm index 23c5060a8ee..e86964f5f3d 100644 --- a/Firestore/Source/API/FIRFieldValue.mm +++ b/Firestore/Source/API/FIRFieldValue.mm @@ -15,6 +15,13 @@ */ #import "Firestore/Source/API/FIRFieldValue+Internal.h" +#import "Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h" +#import "Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h" +#import "Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h" +#import "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" +#import "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" +#import "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" +#import "Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h" #import "Firestore/Source/Public/FirebaseFirestore/FIRVectorValue.h" NS_ASSUME_NONNULL_BEGIN diff --git a/Firestore/Source/API/FIRInt32Value.mm b/Firestore/Source/API/FIRInt32Value.mm new file mode 100644 index 00000000000..8e61a07a856 --- /dev/null +++ b/Firestore/Source/API/FIRInt32Value.mm @@ -0,0 +1,54 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRInt32Value + +- (instancetype)initWithValue:(int)value { + self = [super init]; + if (self) { + _value = value; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRInt32Value class]]) { + return NO; + } + + FIRInt32Value *other = (FIRInt32Value *)object; + return self.value == other.value; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRInt32Value alloc] initWithValue:self.value]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", self.value]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRMaxKey.mm b/Firestore/Source/API/FIRMaxKey.mm new file mode 100644 index 00000000000..5eabc9aa731 --- /dev/null +++ b/Firestore/Source/API/FIRMaxKey.mm @@ -0,0 +1,57 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRMaxKey +static FIRMaxKey *sharedInstance = nil; +static dispatch_once_t onceToken; + ++ (FIRMaxKey *)instance { + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + self = [super init]; + return self; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + if (object == nil || [self class] != [object class]) { + return NO; + } + return YES; +} + +- (NSString *)description { + return [NSString stringWithFormat:@""]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRMinKey.mm b/Firestore/Source/API/FIRMinKey.mm new file mode 100644 index 00000000000..3b662031558 --- /dev/null +++ b/Firestore/Source/API/FIRMinKey.mm @@ -0,0 +1,57 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRMinKey +static FIRMinKey *sharedInstance = nil; +static dispatch_once_t onceToken; + ++ (FIRMinKey *)instance { + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (instancetype)init { + self = [super init]; + return self; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + if (object == nil || [self class] != [object class]) { + return NO; + } + return YES; +} + +- (NSString *)description { + return [NSString stringWithFormat:@""]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRRegexValue.mm b/Firestore/Source/API/FIRRegexValue.mm new file mode 100644 index 00000000000..cde5b3cf462 --- /dev/null +++ b/Firestore/Source/API/FIRRegexValue.mm @@ -0,0 +1,57 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h" + +NS_ASSUME_NONNULL_BEGIN + +@implementation FIRRegexValue + +- (instancetype)initWithPattern:(NSString *)pattern options:(NSString *)options { + self = [super init]; + if (self) { + _pattern = pattern; + _options = options; + } + return self; +} + +- (BOOL)isEqual:(id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRRegexValue class]]) { + return NO; + } + + FIRRegexValue *other = (FIRRegexValue *)object; + return + [self.pattern isEqualToString:other.pattern] && [self.options isEqualToString:other.options]; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRRegexValue alloc] initWithPattern:self.pattern options:self.options]; +} + +- (NSString *)description { + return [NSString + stringWithFormat:@"", self.pattern, self.options]; +} + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FSTUserDataReader.mm b/Firestore/Source/API/FSTUserDataReader.mm index 4720c78d493..35cb68721e4 100644 --- a/Firestore/Source/API/FSTUserDataReader.mm +++ b/Firestore/Source/API/FSTUserDataReader.mm @@ -24,7 +24,14 @@ #import "Firestore/Source/API/FSTUserDataReader.h" +#import "FIRBsonBinaryData.h" +#import "FIRBsonObjectId.h" +#import "FIRBsonTimestamp.h" #import "FIRGeoPoint.h" +#import "FIRInt32Value.h" +#import "FIRMaxKey.h" +#import "FIRMinKey.h" +#import "FIRRegexValue.h" #import "FIRVectorValue.h" #import "Firestore/Source/API/FIRDocumentReference+Internal.h" @@ -378,6 +385,143 @@ - (ParsedUpdateData)parsedUpdateData:(id)input { return std::move(result); } +- (Message)parseMinKey { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kMinKeyTypeFieldValue); + result->map_value.fields[0].value = *DeepClone(NullValue()).release(); + + return std::move(result); +} + +- (Message)parseMaxKey { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kMaxKeyTypeFieldValue); + result->map_value.fields[0].value = *DeepClone(NullValue()).release(); + + return std::move(result); +} + +- (Message)parseRegexValue:(FIRRegexValue *)regexValue + context:(ParseContext &&)context { + NSString *pattern = regexValue.pattern; + NSString *options = regexValue.options; + + __block Message regexMessage; + regexMessage->which_value_type = google_firestore_v1_Value_map_value_tag; + regexMessage->map_value = {}; + regexMessage->map_value.fields_count = 2; + regexMessage->map_value.fields = nanopb::MakeArray(2); + regexMessage->map_value.fields[0].key = + nanopb::CopyBytesArray(model::kRegexTypePatternFieldValue); + regexMessage->map_value.fields[0].value = *[self encodeStringValue:MakeString(pattern)].release(); + regexMessage->map_value.fields[1].key = + nanopb::CopyBytesArray(model::kRegexTypeOptionsFieldValue); + regexMessage->map_value.fields[1].value = *[self encodeStringValue:MakeString(options)].release(); + + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kRegexTypeFieldValue); + result->map_value.fields[0].value = *regexMessage.release(); + + return std::move(result); +} + +- (Message)parseInt32Value:(FIRInt32Value *)int32 + context:(ParseContext &&)context { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kInt32TypeFieldValue); + // The 32-bit integer value is encoded as a 64-bit long in the proto. + result->map_value.fields[0].value = + *[self encodeInteger:static_cast(int32.value)].release(); + + return std::move(result); +} + +- (Message)parseBsonObjectId:(FIRBsonObjectId *)oid + context:(ParseContext &&)context { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kBsonObjectIdTypeFieldValue); + result->map_value.fields[0].value = *[self encodeStringValue:MakeString(oid.value)].release(); + + return std::move(result); +} + +- (Message)parseBsonTimestamp:(FIRBsonTimestamp *)timestamp + context:(ParseContext &&)context { + uint32_t seconds = timestamp.seconds; + uint32_t increment = timestamp.increment; + + __block Message timestampMessage; + timestampMessage->which_value_type = google_firestore_v1_Value_map_value_tag; + timestampMessage->map_value = {}; + timestampMessage->map_value.fields_count = 2; + timestampMessage->map_value.fields = + nanopb::MakeArray(2); + + timestampMessage->map_value.fields[0].key = + nanopb::CopyBytesArray(model::kBsonTimestampTypeSecondsFieldValue); + // The 32-bit unsigned integer value is encoded as a 64-bit long in the proto. + timestampMessage->map_value.fields[0].value = + *[self encodeInteger:static_cast(seconds)].release(); + + timestampMessage->map_value.fields[1].key = + nanopb::CopyBytesArray(model::kBsonTimestampTypeIncrementFieldValue); + // The 32-bit unsigned integer value is encoded as a 64-bit long in the proto. + timestampMessage->map_value.fields[1].value = + *[self encodeInteger:static_cast(increment)].release(); + + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kBsonTimestampTypeFieldValue); + result->map_value.fields[0].value = *timestampMessage.release(); + + return std::move(result); +} + +- (Message)parseBsonBinaryData:(FIRBsonBinaryData *)binaryData + context:(ParseContext &&)context { + uint8_t subtypeByte = binaryData.subtype; + NSData *data = binaryData.data; + + // We need to prepend the data with one byte representation of the subtype. + NSMutableData *concatData = [NSMutableData data]; + [concatData appendBytes:&subtypeByte length:1]; + [concatData appendData:data]; + + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kBsonBinaryDataTypeFieldValue); + result->map_value.fields[0].value = + *[self encodeBlob:(nanopb::MakeByteString(concatData))].release(); + + return std::move(result); +} + - (Message)parseArray:(NSArray *)array context:(ParseContext &&)context { __block Message result; @@ -569,6 +713,25 @@ - (void)parseSentinelFieldValue:(FIRFieldValue *)fieldValue context:(ParseContex } else if ([input isKindOfClass:[FIRVectorValue class]]) { FIRVectorValue *vector = input; return [self parseVectorValue:vector context:std::move(context)]; + } else if ([input isKindOfClass:[FIRMinKey class]]) { + return [self parseMinKey]; + } else if ([input isKindOfClass:[FIRMaxKey class]]) { + return [self parseMaxKey]; + } else if ([input isKindOfClass:[FIRRegexValue class]]) { + FIRRegexValue *regex = input; + return [self parseRegexValue:regex context:std::move(context)]; + } else if ([input isKindOfClass:[FIRInt32Value class]]) { + FIRInt32Value *value = input; + return [self parseInt32Value:value context:std::move(context)]; + } else if ([input isKindOfClass:[FIRBsonObjectId class]]) { + FIRBsonObjectId *oid = input; + return [self parseBsonObjectId:oid context:std::move(context)]; + } else if ([input isKindOfClass:[FIRBsonTimestamp class]]) { + FIRBsonTimestamp *timestamp = input; + return [self parseBsonTimestamp:timestamp context:std::move(context)]; + } else if ([input isKindOfClass:[FIRBsonBinaryData class]]) { + FIRBsonBinaryData *binaryData = input; + return [self parseBsonBinaryData:binaryData context:std::move(context)]; } else { ThrowInvalidArgument("Unsupported type: %s%s", NSStringFromClass([input class]), context.FieldDescription()); diff --git a/Firestore/Source/API/FSTUserDataWriter.mm b/Firestore/Source/API/FSTUserDataWriter.mm index 1e170531782..c772819e440 100644 --- a/Firestore/Source/API/FSTUserDataWriter.mm +++ b/Firestore/Source/API/FSTUserDataWriter.mm @@ -23,6 +23,13 @@ #include "Firestore/Source/API/FIRDocumentReference+Internal.h" #include "Firestore/Source/API/FIRFieldValue+Internal.h" #include "Firestore/Source/API/converters.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h" #include "Firestore/core/include/firebase/firestore/geo_point.h" #include "Firestore/core/include/firebase/firestore/timestamp.h" #include "Firestore/core/src/api/firestore.h" @@ -49,6 +56,11 @@ using firebase::firestore::google_firestore_v1_MapValue; using firebase::firestore::google_firestore_v1_Value; using firebase::firestore::google_protobuf_Timestamp; +using firebase::firestore::model::kRawBsonTimestampTypeIncrementFieldValue; +using firebase::firestore::model::kRawBsonTimestampTypeSecondsFieldValue; +using firebase::firestore::model::kRawRegexTypeOptionsFieldValue; +using firebase::firestore::model::kRawRegexTypePatternFieldValue; +using firebase::firestore::model::kRawVectorValueFieldKey; using firebase::firestore::util::MakeNSString; using model::DatabaseId; using model::DocumentKey; @@ -96,6 +108,9 @@ - (id)convertedValue:(const google_firestore_v1_Value &)value { case TypeOrder::kBoolean: return value.boolean_value ? @YES : @NO; case TypeOrder::kNumber: + if (value.which_value_type == google_firestore_v1_Value_map_value_tag) { + return [self convertedInt32:value.map_value]; + } return value.which_value_type == google_firestore_v1_Value_integer_value_tag ? @(value.integer_value) : @(value.double_value); @@ -106,6 +121,18 @@ - (id)convertedValue:(const google_firestore_v1_Value &)value { case TypeOrder::kGeoPoint: return MakeFIRGeoPoint( GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude)); + case TypeOrder::kMinKey: + return [FIRMinKey instance]; + case TypeOrder::kMaxKey: + return [FIRMaxKey instance]; + case TypeOrder::kRegex: + return [self convertedRegex:value.map_value]; + case TypeOrder::kBsonObjectId: + return [self convertedBsonObjectId:value.map_value]; + case TypeOrder::kBsonTimestamp: + return [self convertedBsonTimestamp:value.map_value]; + case TypeOrder::kBsonBinaryData: + return [self convertedBsonBinaryData:value.map_value]; case TypeOrder::kVector: return [self convertedVector:value.map_value]; case TypeOrder::kMaxValue: @@ -130,7 +157,7 @@ - (FIRVectorValue *)convertedVector:(const google_firestore_v1_MapValue &)mapVal for (pb_size_t i = 0; i < mapValue.fields_count; ++i) { absl::string_view key = MakeStringView(mapValue.fields[i].key); const google_firestore_v1_Value &value = mapValue.fields[i].value; - if ((0 == key.compare(absl::string_view("value"))) && + if ((0 == key.compare(absl::string_view(kRawVectorValueFieldKey))) && value.which_value_type == google_firestore_v1_Value_array_value_tag) { return [FIRFieldValue vectorWithArray:[self convertedArray:value.array_value]]; } @@ -138,6 +165,99 @@ - (FIRVectorValue *)convertedVector:(const google_firestore_v1_MapValue &)mapVal return [FIRFieldValue vectorWithArray:@[]]; } +- (FIRRegexValue *)convertedRegex:(const google_firestore_v1_MapValue &)mapValue { + NSString *pattern = @""; + NSString *options = @""; + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &innerValue = mapValue.fields[0].value; + if (innerValue.which_value_type == google_firestore_v1_Value_map_value_tag) { + for (pb_size_t i = 0; i < innerValue.map_value.fields_count; ++i) { + absl::string_view key = MakeStringView(innerValue.map_value.fields[i].key); + const google_firestore_v1_Value &value = innerValue.map_value.fields[i].value; + if ((0 == key.compare(absl::string_view(kRawRegexTypePatternFieldValue))) && + value.which_value_type == google_firestore_v1_Value_string_value_tag) { + pattern = MakeNSString(MakeStringView(value.string_value)); + } + if ((0 == key.compare(absl::string_view(kRawRegexTypeOptionsFieldValue))) && + value.which_value_type == google_firestore_v1_Value_string_value_tag) { + options = MakeNSString(MakeStringView(value.string_value)); + } + } + } + } + + return [[FIRRegexValue alloc] initWithPattern:pattern options:options]; +} + +- (FIRInt32Value *)convertedInt32:(const google_firestore_v1_MapValue &)mapValue { + int32_t value = 0; + if (mapValue.fields_count == 1) { + value = static_cast(mapValue.fields[0].value.integer_value); + } + + return [[FIRInt32Value alloc] initWithValue:value]; +} + +- (FIRBsonObjectId *)convertedBsonObjectId:(const google_firestore_v1_MapValue &)mapValue { + NSString *oid = @""; + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &oidValue = mapValue.fields[0].value; + if (oidValue.which_value_type == google_firestore_v1_Value_string_value_tag) { + oid = MakeNSString(MakeStringView(oidValue.string_value)); + } + } + + return [[FIRBsonObjectId alloc] initWithValue:oid]; +} + +- (FIRBsonTimestamp *)convertedBsonTimestamp:(const google_firestore_v1_MapValue &)mapValue { + uint32_t seconds = 0; + uint32_t increment = 0; + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &innerValue = mapValue.fields[0].value; + if (innerValue.which_value_type == google_firestore_v1_Value_map_value_tag) { + for (pb_size_t i = 0; i < innerValue.map_value.fields_count; ++i) { + absl::string_view key = MakeStringView(innerValue.map_value.fields[i].key); + const google_firestore_v1_Value &value = innerValue.map_value.fields[i].value; + if ((0 == key.compare(absl::string_view(kRawBsonTimestampTypeSecondsFieldValue))) && + value.which_value_type == google_firestore_v1_Value_integer_value_tag) { + // The value from the server is guaranteed to fit in a 32-bit unsigned integer. + seconds = static_cast(value.integer_value); + } + if ((0 == key.compare(absl::string_view(kRawBsonTimestampTypeIncrementFieldValue))) && + value.which_value_type == google_firestore_v1_Value_integer_value_tag) { + // The value from the server is guaranteed to fit in a 32-bit unsigned integer. + increment = static_cast(value.integer_value); + } + } + } + } + + return [[FIRBsonTimestamp alloc] initWithSeconds:seconds increment:increment]; +} + +- (FIRBsonBinaryData *)convertedBsonBinaryData:(const google_firestore_v1_MapValue &)mapValue { + uint8_t subtype = 0; + NSData *data = [[NSData alloc] init]; + + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &dataValue = mapValue.fields[0].value; + if (dataValue.which_value_type == google_firestore_v1_Value_bytes_value_tag) { + NSData *concatData = MakeNSData(dataValue.bytes_value); + if (concatData.length > 0) { + uint8_t buffer[1]; + [concatData getBytes:buffer length:1]; + subtype = buffer[0]; + } + if (concatData.length > 1) { + data = [concatData subdataWithRange:NSMakeRange(1, concatData.length - 1)]; + } + } + } + + return [[FIRBsonBinaryData alloc] initWithSubtype:subtype data:data]; +} + - (NSArray *)convertedArray:(const google_firestore_v1_ArrayValue &)arrayValue { NSMutableArray *result = [NSMutableArray arrayWithCapacity:arrayValue.values_count]; for (pb_size_t i = 0; i < arrayValue.values_count; ++i) { diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h new file mode 100644 index 00000000000..55be1d51f52 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h @@ -0,0 +1,49 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a BSON Binary Data type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(BsonBinaryData) +@interface FIRBsonBinaryData : NSObject + +/** An 8-bit unsigned integer denoting the subtype of the data. */ +@property(atomic, readonly) uint8_t subtype; + +/** The binary data. */ +@property(atomic, copy, readonly) NSData *data; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `BsonBinaryData` constructed with the given subtype and data. + * @param subtype An 8-bit unsigned integer denoting the subtype of the data. + * @param data The binary data. + */ +- (instancetype)initWithSubtype:(uint8_t)subtype data:(nonnull NSData *)data; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h new file mode 100644 index 00000000000..708a40285b5 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h @@ -0,0 +1,45 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a BSON ObjectId type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(BsonObjectId) +@interface FIRBsonObjectId : NSObject + +/** The 24-character hex string representation of the ObjectId. */ +@property(atomic, copy, readonly) NSString *value; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `BsonObjectId` constructed with the given value. + * @param value The 24-character hex string representation of the ObjectId. + */ +- (instancetype)initWithValue:(nonnull NSString *)value NS_SWIFT_NAME(init(_:)); + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h new file mode 100644 index 00000000000..f281528da04 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h @@ -0,0 +1,50 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a BSON timestamp type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(BsonTimestamp) +@interface FIRBsonTimestamp : NSObject + +/** The underlying unsigned 32-bit integer for seconds */ +@property(atomic, readonly) uint32_t seconds; + +/** The underlying unsigned 32-bit integer for increment */ +@property(atomic, readonly) uint32_t increment; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `BsonTimestamp` with the given seconds and increment values. + * @param seconds The underlying unsigned 32-bit integer for seconds. + * @param increment The underlying unsigned 32-bit integer for increment. + */ +- (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h index 9defa3a0569..2e35547bace 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h @@ -18,6 +18,13 @@ NS_ASSUME_NONNULL_BEGIN @class FIRVectorValue; +@class FIRMinKey; +@class FIRMaxKey; +@class FIRRegexValue; +@class FIRInt32Value; +@class FIRBsonObjectId; +@class FIRBsonTimestamp; +@class FIRBsonBinaryData; /** * Sentinel values that can be used when writing document fields with `setData()` or `updateData()`. diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h new file mode 100644 index 00000000000..a8bfb3b08d7 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h @@ -0,0 +1,45 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a 32-bit integer type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(Int32Value) +@interface FIRInt32Value : NSObject + +/** The 32-bit integer value. */ +@property(atomic, assign, readonly) int32_t value; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates an `Int32Value` constructed with the given value. + * @param value The 32-bit integer value to be stored. + */ +- (instancetype)initWithValue:(int)value NS_SWIFT_NAME(init(_:)); + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h new file mode 100644 index 00000000000..e33c73ddb6f --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h @@ -0,0 +1,36 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a "Max Key" type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(MaxKey) +@interface FIRMaxKey : NSObject + +/** Returns the only instance of MaxKey. */ ++ (FIRMaxKey *)instance; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h new file mode 100644 index 00000000000..a6b8540dde8 --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h @@ -0,0 +1,36 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a "Min Key" type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(MinKey) +@interface FIRMinKey : NSObject + +/** Returns the only instance of MinKey. */ ++ (FIRMinKey *)instance; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h new file mode 100644 index 00000000000..5ce99f7c35a --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h @@ -0,0 +1,49 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a regular expression type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(RegexValue) +@interface FIRRegexValue : NSObject + +/** The regular expression pattern */ +@property(atomic, copy, readonly) NSString *pattern; + +/** The regular expression options */ +@property(atomic, copy, readonly) NSString *options; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `RegexValue` constructed with the given pattern and options. + * @param pattern The regular expression pattern. + * @param options The regular expression options. + */ +- (instancetype)initWithPattern:(nonnull NSString *)pattern options:(nonnull NSString *)options; + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift b/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift new file mode 100644 index 00000000000..ad0114e3180 --- /dev/null +++ b/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an BsonBinaryData. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the BsonBinaryData class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableBsonBinaryData: Codable { + var subtype: UInt8 { get } + var data: Data { get } + + init(subtype: UInt8, data: Data) +} + +/** The keys in an BsonBinaryData. Must match the properties of CodableBsonBinaryData. */ +private enum BsonBinaryDataKeys: String, CodingKey { + case subtype + case data +} + +/** + * An extension of BsonBinaryData that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableBsonBinaryData { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: BsonBinaryDataKeys.self) + let subtype = try container.decode(UInt8.self, forKey: .subtype) + let data = try container.decode(Data.self, forKey: .data) + self.init(subtype: subtype, data: data) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: BsonBinaryDataKeys.self) + try container.encode(subtype, forKey: .subtype) + try container.encode(data, forKey: .data) + } +} + +/** Extends BsonBinaryData to conform to Codable. */ +extension FirebaseFirestore.BsonBinaryData: FirebaseFirestore.CodableBsonBinaryData {} diff --git a/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift b/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift new file mode 100644 index 00000000000..3669b6e9c1b --- /dev/null +++ b/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift @@ -0,0 +1,62 @@ +/* + * Copyright 2025 Google + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an BsonObjectId. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the BsonObjectId class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableBsonObjectId: Codable { + var value: String { get } + + init(_ value: String) +} + +/** The keys in an BsonObjectId. Must match the properties of CodableBsonObjectId. */ +private enum BsonObjectIdKeys: String, CodingKey { + case value +} + +/** + * An extension of BsonObjectId that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableBsonObjectId { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: BsonObjectIdKeys.self) + let value = try container.decode(String.self, forKey: .value) + self.init(value) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: BsonObjectIdKeys.self) + try container.encode(value, forKey: .value) + } +} + +/** Extends BsonObjectId to conform to Codable. */ +extension FirebaseFirestore.BsonObjectId: FirebaseFirestore.CodableBsonObjectId {} diff --git a/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift b/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift new file mode 100644 index 00000000000..e1fa1f1695d --- /dev/null +++ b/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an BsonTimestamp. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the BsonTimestamp class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableBsonTimestamp: Codable { + var seconds: UInt32 { get } + var increment: UInt32 { get } + + init(seconds: UInt32, increment: UInt32) +} + +/** The keys in an BsonTimestamp. Must match the properties of CodableBsonTimestamp. */ +private enum BsonTimestampKeys: String, CodingKey { + case seconds + case increment +} + +/** + * An extension of BsonTimestamp that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableBsonTimestamp { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: BsonTimestampKeys.self) + let seconds = try container.decode(UInt32.self, forKey: .seconds) + let increment = try container.decode(UInt32.self, forKey: .increment) + self.init(seconds: seconds, increment: increment) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: BsonTimestampKeys.self) + try container.encode(seconds, forKey: .seconds) + try container.encode(increment, forKey: .increment) + } +} + +/** Extends BsonTimestamp to conform to Codable. */ +extension FirebaseFirestore.BsonTimestamp: FirebaseFirestore.CodableBsonTimestamp {} diff --git a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift index 20f5a6e8b2b..c2b4790d624 100644 --- a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift +++ b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift @@ -32,6 +32,13 @@ struct FirestorePassthroughTypes: StructureCodingPassthroughTypeResolver { t is Timestamp || t is FieldValue || t is DocumentReference || - t is VectorValue + t is VectorValue || + t is MinKey || + t is MaxKey || + t is RegexValue || + t is Int32Value || + t is BsonObjectId || + t is BsonTimestamp || + t is BsonBinaryData } } diff --git a/Firestore/Swift/Source/Codable/Int32Value+Codable.swift b/Firestore/Swift/Source/Codable/Int32Value+Codable.swift new file mode 100644 index 00000000000..170f4b30c29 --- /dev/null +++ b/Firestore/Swift/Source/Codable/Int32Value+Codable.swift @@ -0,0 +1,62 @@ +/* + * Copyright 2025 Google + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of an Int32Value. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the Int32Value class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableInt32Value: Codable { + var value: Int32 { get } + + init(_ value: Int32) +} + +/** The keys in an Int32Value. Must match the properties of CodableInt32Value. */ +private enum Int32ValueKeys: String, CodingKey { + case value +} + +/** + * An extension of Int32Value that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableInt32Value { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: Int32ValueKeys.self) + let value = try container.decode(Int32.self, forKey: .value) + self.init(value) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: Int32ValueKeys.self) + try container.encode(value, forKey: .value) + } +} + +/** Extends Int32Value to conform to Codable. */ +extension FirebaseFirestore.Int32Value: FirebaseFirestore.CodableInt32Value {} diff --git a/Firestore/Swift/Source/Codable/MaxKey+Codable.swift b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift new file mode 100644 index 00000000000..f164bd53e37 --- /dev/null +++ b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift @@ -0,0 +1,65 @@ +/* + * Copyright 2025 Google + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of a MaxKey. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the MaxKey class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableMaxKey: Codable { + init() +} + +/** The keys in a MaxKey. */ +private enum MaxKeyKeys: String, CodingKey { + // We'll use a simple CodingKeys enum with a single case + // to represent the presence of the singleton. + case isFirestoreMaxKey +} + +/** + * An extension of MaxKey that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableMaxKey { + public init(from decoder: Decoder) throws { + // The presence of the `isFirestoreMaxKey` is enough to know that we + // should return the singleton. + let container = try decoder.container(keyedBy: MaxKeyKeys.self) + _ = try container.decodeIfPresent(Bool.self, forKey: .isFirestoreMaxKey) + self.init() + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: MaxKeyKeys.self) + // Encode a value of `true` to indicate the presence of MaxKey + try container.encode(true, forKey: .isFirestoreMaxKey) + } +} + +/** Extends RegexValue to conform to Codable. */ +extension FirebaseFirestore.MaxKey: FirebaseFirestore.CodableMaxKey {} diff --git a/Firestore/Swift/Source/Codable/MinKey+Codable.swift b/Firestore/Swift/Source/Codable/MinKey+Codable.swift new file mode 100644 index 00000000000..efb4b1da5eb --- /dev/null +++ b/Firestore/Swift/Source/Codable/MinKey+Codable.swift @@ -0,0 +1,65 @@ +/* + * Copyright 2025 Google + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of a MinKey. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the MinKey class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableMinKey: Codable { + init() +} + +/** The keys in a MinKey. */ +private enum MinKeyKeys: String, CodingKey { + // We'll use a simple CodingKeys enum with a single case + // to represent the presence of the singleton. + case isFirestoreMinKey +} + +/** + * An extension of MinKey that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableMinKey { + public init(from decoder: Decoder) throws { + // The presence of the `isFirestoreMinKey` is enough to know that we + // should return the singleton. + let container = try decoder.container(keyedBy: MinKeyKeys.self) + _ = try container.decodeIfPresent(Bool.self, forKey: .isFirestoreMinKey) + self.init() + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: MinKeyKeys.self) + // Encode a value of `true` to indicate the presence of MinKey + try container.encode(true, forKey: .isFirestoreMinKey) + } +} + +/** Extends RegexValue to conform to Codable. */ +extension FirebaseFirestore.MinKey: FirebaseFirestore.CodableMinKey {} diff --git a/Firestore/Swift/Source/Codable/RegexValue+Codable.swift b/Firestore/Swift/Source/Codable/RegexValue+Codable.swift new file mode 100644 index 00000000000..db145ea0bfd --- /dev/null +++ b/Firestore/Swift/Source/Codable/RegexValue+Codable.swift @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of a RegexValue. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the RegexValue class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableRegexValue: Codable { + var pattern: String { get } + var options: String { get } + + init(pattern: String, options: String) +} + +/** The keys in a RegexValue. Must match the properties of CodableRegexValue. */ +private enum RegexValueKeys: String, CodingKey { + case pattern + case options +} + +/** + * An extension of RegexValue that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableRegexValue { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: RegexValueKeys.self) + let pattern = try container.decode(String.self, forKey: .pattern) + let options = try container.decode(String.self, forKey: .options) + self.init(pattern: pattern, options: options) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: RegexValueKeys.self) + try container.encode(pattern, forKey: .pattern) + try container.encode(options, forKey: .options) + } +} + +/** Extends RegexValue to conform to Codable. */ +extension FirebaseFirestore.RegexValue: FirebaseFirestore.CodableRegexValue {} diff --git a/Firestore/Swift/Source/Codable/VectorValue+Codable.swift b/Firestore/Swift/Source/Codable/VectorValue+Codable.swift index 45f3176a74a..0c69db6eee9 100644 --- a/Firestore/Swift/Source/Codable/VectorValue+Codable.swift +++ b/Firestore/Swift/Source/Codable/VectorValue+Codable.swift @@ -29,7 +29,7 @@ private protocol CodableVectorValue: Codable { init(__array: [NSNumber]) } -/** The keys in a Timestamp. Must match the properties of CodableTimestamp. */ +/** The keys in a VectorValue. Must match the properties of CodableVectorValue. */ private enum VectorValueKeys: String, CodingKey { case array } diff --git a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift new file mode 100644 index 00000000000..0585fd84607 --- /dev/null +++ b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift @@ -0,0 +1,427 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Combine +import FirebaseFirestore +import Foundation + +// iOS 15 required for test implementation, not BSON types +@available(iOS 15, tvOS 15, macOS 12.0, macCatalyst 13, watchOS 7, *) +class BsonTypesIntegrationTests: FSTIntegrationTestCase { + func toDataArray(_ snapshot: QuerySnapshot) -> [[String: Any]] { + return snapshot.documents.map { document in + document.data() + } + } + + func toDocIdArray(_ snapshot: QuerySnapshot) -> [String] { + return snapshot.documents.map { document in + document.documentID + } + } + + func setDocumentData( + _ documentDataMap: [String: [String: Any]], + toCollection: CollectionReference + ) async { + for (documentName, documentData) in documentDataMap { + do { + try await toCollection.document(documentName).setData(documentData) + } catch { + print("Failed to write documents to collection.") + } + } + } + + func verifySnapshot(snapshot: QuerySnapshot, + allData: [String: [String: Any]], + expectedDocIds: [String], + description: String) throws { + XCTAssertEqual(snapshot.count, expectedDocIds.count) + + XCTAssertTrue(expectedDocIds == toDocIdArray(snapshot), + "Did not get the same documents in query result set for '\(description)'. Expected Doc IDs: \(expectedDocIds), Actual Doc IDs: \(toDocIdArray(snapshot))") + + for i in 0 ..< expectedDocIds.count { + let expectedDocId = expectedDocIds[i] + let expectedDocData = allData[expectedDocId]! + let actualDocData = snapshot.documents[i].data() + + // We don't need to compare expectedDocId and actualDocId because + // it's already been checked above. We only compare the data below. + let nsExpected = NSDictionary(dictionary: expectedDocData) + let nsActual = NSDictionary(dictionary: actualDocData) + XCTAssertTrue( + nsExpected.isEqual(nsActual), + "Did not get the same document content. Expected Doc Data: \(nsExpected), Actual Doc Data:\(nsActual)" + ) + } + } + + // Asserts that the given query produces the expected result for all of the + // following scenarios: + // 1. Using a snapshot listener to get the first snapshot for the query. + // 2. Performing the given query using source=server. + // 3. Performing the given query using source=cache. + func assertSdkQueryResultsConsistentWithBackend(_ documentDataMap: [String: [String: Any]], + query: Query, + expectedResult: [String]) async throws { + let watchSnapshot = try await Future() { promise in + query.addSnapshotListener { snapshot, error in + if let error { + promise(Result.failure(error)) + } + if let snapshot { + promise(Result.success(snapshot)) + } + } + }.value + + try verifySnapshot( + snapshot: watchSnapshot, + allData: documentDataMap, + expectedDocIds: expectedResult, + description: "snapshot listener" + ) + + checkOnlineAndOfflineQuery(query, matchesResult: expectedResult) + } + + func testCanWriteAndReadBsonTypes() async throws { + let collection = collectionRef() + let ref = try await collection.addDocument(data: [ + "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BsonObjectId("507f191e810c19729de860ea"), + "int32": Int32Value(1), + "min": MinKey.instance(), + "max": MaxKey.instance(), + "regex": RegexValue(pattern: "^foo", options: "i"), + ]) + + try await ref.updateData([ + "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), + "timestamp": BsonTimestamp(seconds: 1, increment: 2), + "int32": Int32Value(2), + ]) + + let snapshot = try await ref.getDocument() + XCTAssertEqual( + snapshot.get("objectId") as? BsonObjectId, + BsonObjectId("507f191e810c19729de860ea") + ) + XCTAssertEqual( + snapshot.get("int32") as? Int32Value, + Int32Value(2) + ) + XCTAssertEqual( + snapshot.get("min") as? MinKey, + MinKey.instance() + ) + XCTAssertEqual( + snapshot.get("max") as? MaxKey, + MaxKey.instance() + ) + XCTAssertEqual( + snapshot.get("binary") as? BsonBinaryData, + BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + ) + XCTAssertEqual( + snapshot.get("timestamp") as? BsonTimestamp, + BsonTimestamp(seconds: 1, increment: 2) + ) + XCTAssertEqual( + snapshot.get("regex") as? RegexValue, + RegexValue(pattern: "^foo", options: "i") + ) + } + + func testCanFilterAndOrderObjectIds() async throws { + let testDocs = [ + "a": ["key": BsonObjectId("507f191e810c19729de860ea")], + "b": ["key": BsonObjectId("507f191e810c19729de860eb")], + "c": ["key": BsonObjectId("507f191e810c19729de860ec")], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isGreaterThan: BsonObjectId("507f191e810c19729de860ea")) + .order(by: "key", descending: true) + + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField("key", in: + [ + BsonObjectId("507f191e810c19729de860ea"), + BsonObjectId("507f191e810c19729de860eb"), + ]) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["b", "a"] + ) + } + + func testCanFilterAndOrderInt32Values() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": Int32Value(-1)], + "b": ["key": Int32Value(1)], + "c": ["key": Int32Value(2)], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isGreaterThanOrEqualTo: Int32Value(1)) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField("key", notIn: [Int32Value(1)]) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "a"] + ) + } + + func testCanFilterAndOrderTimestampValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": BsonTimestamp(seconds: 1, increment: 1)], + "b": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "c": ["key": BsonTimestamp(seconds: 2, increment: 1)], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isGreaterThan: BsonTimestamp(seconds: 1, increment: 1)) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField("key", isNotEqualTo: BsonTimestamp(seconds: 1, increment: 1)) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "b"] + ) + } + + func testCanFilterAndOrderBinaryValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "b": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 4]))], + "c": ["key": BsonBinaryData(subtype: 2, data: Data([1, 2, 3]))], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField( + "key", + isGreaterThan: BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + ) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "b"] + ) + + query = collection + .whereField( + "key", + isGreaterThanOrEqualTo: BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + ) + .whereField( + "key", + isLessThan: BsonBinaryData(subtype: 2, data: Data([1, 2, 3])) + ) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["b", "a"] + ) + } + + func testCanFilterAndOrderRegexValues() async throws { + let testDocs = [ + "a": ["key": RegexValue(pattern: "^bar", options: "i")], + "b": ["key": RegexValue(pattern: "^bar", options: "x")], + "c": ["key": RegexValue(pattern: "^baz", options: "i")], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = + collection.whereFilter( + Filter.orFilter([ + Filter.whereField("key", isGreaterThan: RegexValue(pattern: "^bar", options: "x")), + Filter.whereField("key", isNotEqualTo: RegexValue(pattern: "^bar", options: "x")), + ]) + ).order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "a"] + ) + } + + func testCanFilterAndOrderMinKeyValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": MinKey.instance()], + "b": ["key": MinKey.instance()], + "c": ["key": MaxKey.instance()], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = collection + .whereField("key", isEqualTo: MinKey.instance()) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["b", "a"] + ) + } + + func testCanFilterAndOrderMaxKeyValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": MinKey.instance()], + "b": ["key": MaxKey.instance()], + "c": ["key": MaxKey.instance()], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = collection + .whereField("key", isEqualTo: MaxKey.instance()) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "b"] + ) + } + + func testCanOrderBsonTypesTogether() async throws { + let testDocs: [String: [String: Any]] = [ + "bsonObjectId1": ["key": BsonObjectId("507f191e810c19729de860ea")], + "bsonObjectId2": ["key": BsonObjectId("507f191e810c19729de860eb")], + "bsonObjectId3": ["key": BsonObjectId("407f191e810c19729de860ea")], + "regex1": ["key": RegexValue(pattern: "^bar", options: "m")], + "regex2": ["key": RegexValue(pattern: "^bar", options: "i")], + "regex3": ["key": RegexValue(pattern: "^baz", options: "i")], + "bsonTimestamp1": ["key": BsonTimestamp(seconds: 2, increment: 0)], + "bsonTimestamp2": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "bsonTimestamp3": ["key": BsonTimestamp(seconds: 1, increment: 1)], + "bsonBinary1": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "bsonBinary2": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 4]))], + "bsonBinary3": ["key": BsonBinaryData(subtype: 2, data: Data([1, 2, 2]))], + "int32Value1": ["key": Int32Value(-1)], + "int32Value2": ["key": Int32Value(1)], + "int32Value3": ["key": Int32Value(0)], + "minKey1": ["key": MinKey.instance()], + "minKey2": ["key": MinKey.instance()], + "maxKey1": ["key": MaxKey.instance()], + "maxKey2": ["key": MaxKey.instance()], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = collection.order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend(testDocs, query: query, expectedResult: [ + "maxKey2", + "maxKey1", + "regex3", + "regex1", + "regex2", + "bsonObjectId2", + "bsonObjectId1", + "bsonObjectId3", + "bsonBinary3", + "bsonBinary2", + "bsonBinary1", + "bsonTimestamp1", + "bsonTimestamp2", + "bsonTimestamp3", + "int32Value2", + "int32Value3", + "int32Value1", + "minKey2", + "minKey1", + ]) + } + + func testCanRunTransactionsOnDocumentsWithBsonTypes() async throws { + let testDocs = [ + "a": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "b": ["key": "placeholder"], + "c": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + try await runTransaction(collection.firestore, block: { transaction, errorPointer -> Any? in + transaction.setData( + ["key": RegexValue(pattern: "^foo", options: "i")], + forDocument: collection.document("b") + ) + transaction.deleteDocument(collection.document("c")) + return true + }) + + let snapshot = try await collection.getDocuments() + print("snapshot.size=") + print(snapshot.documents.count) + print(toDataArray(snapshot)) + XCTAssertEqual( + toDataArray(snapshot) as? [[String: RegexValue]], + [ + ["key": BsonTimestamp(seconds: 1, increment: 2)], + ["key": RegexValue(pattern: "^foo", options: "i")], + ] as? [[String: RegexValue]] + ) + } +} diff --git a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift index 406fb823a2a..75a66e94852 100644 --- a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift @@ -76,6 +76,33 @@ class CodableIntegrationTests: FSTIntegrationTestCase { awaitExpectations() } + private struct ModelWithTestField: Codable { + var name: String + var testField: T + } + + private func assertCanWriteAndReadCodableValueWithAllFlavors(value: T) throws { + let model = ModelWithTestField( + name: "name", + testField: value + ) + + let docToWrite = documentRef() + + for flavor in allFlavors { + try setData(from: model, forDocument: docToWrite, withFlavor: flavor) + + let data = try readDocument(forRef: docToWrite).data(as: ModelWithTestField.self) + + XCTAssertEqual( + data.testField, + value, + "Failed with flavor \(flavor)" + ) + } + } + func testCodableRoundTrip() throws { struct Model: Codable, Equatable { var name: String @@ -84,6 +111,13 @@ class CodableIntegrationTests: FSTIntegrationTestCase { var geoPoint: GeoPoint var docRef: DocumentReference var vector: VectorValue + var regex: RegexValue + var int32: Int32Value + var minKey: MinKey + var maxKey: MaxKey + var bsonOjectId: BsonObjectId + var bsonTimestamp: BsonTimestamp + var bsonBinaryData: BsonBinaryData } let docToWrite = documentRef() let model = Model(name: "test", @@ -91,7 +125,14 @@ class CodableIntegrationTests: FSTIntegrationTestCase { ts: Timestamp(seconds: 987_654_321, nanoseconds: 0), geoPoint: GeoPoint(latitude: 45, longitude: 54), docRef: docToWrite, - vector: FieldValue.vector([0.7, 0.6])) + vector: FieldValue.vector([0.7, 0.6]), + regex: RegexValue(pattern: "^foo", options: "i"), + int32: Int32Value(1), + minKey: MinKey.instance(), + maxKey: MaxKey.instance(), + bsonOjectId: BsonObjectId("507f191e810c19729de860ec"), + bsonTimestamp: BsonTimestamp(seconds: 123, increment: 456), + bsonBinaryData: BsonBinaryData(subtype: 128, data: Data([1, 2]))) for flavor in allFlavors { try setData(from: model, forDocument: docToWrite, withFlavor: flavor) @@ -188,28 +229,44 @@ class CodableIntegrationTests: FSTIntegrationTestCase { } func testVectorValue() throws { - struct Model: Codable { - var name: String - var embedding: VectorValue - } - let model = Model( - name: "name", - embedding: VectorValue([0.1, 0.3, 0.4]) - ) + try assertCanWriteAndReadCodableValueWithAllFlavors(value: VectorValue([0.1, 0.3, 0.4])) + } - let docToWrite = documentRef() + func testMinKey() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: MinKey.instance()) + } - for flavor in allFlavors { - try setData(from: model, forDocument: docToWrite, withFlavor: flavor) + func testMaxKey() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: MaxKey.instance()) + } - let data = try readDocument(forRef: docToWrite).data(as: Model.self) + func testRegexValue() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: RegexValue( + pattern: "^foo", + options: "i" + )) + } - XCTAssertEqual( - data.embedding, - VectorValue([0.1, 0.3, 0.4]), - "Failed with flavor \(flavor)" - ) - } + func testInt32Value() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: Int32Value(123)) + } + + func testBsonObjectId() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors( + value: BsonObjectId("507f191e810c19729de860ec") + ) + } + + func testBsonTimestamp() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors( + value: BsonTimestamp(seconds: 123, increment: 456) + ) + } + + func testBsonBinaryData() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors( + value: BsonBinaryData(subtype: 128, data: Data([1, 2, 3])) + ) } func testDataBlob() throws { diff --git a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift index 61b4da23530..494423d8599 100644 --- a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift +++ b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift @@ -753,4 +753,87 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { eventAccumulator.assertNoAdditionalEvents() registration.remove() } + + func testCanListenToDocumentsWithBsonTypes() throws { + let collection = collectionRef() + let testData = [ + "a": ["key": MaxKey.instance()], + "b": ["key": MinKey.instance()], + "c": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "d": ["key": BsonObjectId("507f191e810c19729de860ea")], + "e": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "f": ["key": RegexValue(pattern: "^foo", options: "i")], + ] + + let query = collection.order(by: "key", descending: false) + let registration = query.addSnapshotListener(eventAccumulator.valueEventHandler) + + var querySnap = eventAccumulator.awaitEvent(withName: "snapshot") as! QuerySnapshot + XCTAssertEqual(querySnap.isEmpty, true) + + writeAllDocuments(testData, toCollection: collection) + + querySnap = eventAccumulator.awaitEvent(withName: "snapshot") as! QuerySnapshot + XCTAssertEqual(querySnap.isEmpty, false) + XCTAssertEqual( + querySnap.documents[0].data()["key"] as! MinKey, + testData["b"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[1].data()["key"] as! BsonTimestamp, + testData["c"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[2].data()["key"] as! BsonBinaryData, + testData["e"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[3].data()["key"] as! BsonObjectId, + testData["d"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[4].data()["key"] as! RegexValue, + testData["f"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[5].data()["key"] as! MaxKey, + testData["a"]!["key"] + ) + + let newData = ["key": Int32Value(2)] + collection.document("g").setData(newData) + + querySnap = eventAccumulator.awaitEvent(withName: "snapshot") as! QuerySnapshot + XCTAssertEqual(querySnap.isEmpty, false) + XCTAssertEqual( + querySnap.documents[0].data()["key"] as! MinKey, + testData["b"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[1].data()["key"] as! Int32Value, + newData["key"]! + ) + XCTAssertEqual( + querySnap.documents[2].data()["key"] as! BsonTimestamp, + testData["c"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[3].data()["key"] as! BsonBinaryData, + testData["e"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[4].data()["key"] as! BsonObjectId, + testData["d"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[5].data()["key"] as! RegexValue, + testData["f"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[6].data()["key"] as! MaxKey, + testData["a"]!["key"] + ) + + registration.remove() + } } diff --git a/Firestore/Swift/Tests/Integration/TypeTest.swift b/Firestore/Swift/Tests/Integration/TypeTest.swift new file mode 100644 index 00000000000..32d1aac96d5 --- /dev/null +++ b/Firestore/Swift/Tests/Integration/TypeTest.swift @@ -0,0 +1,375 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import Combine +import FirebaseFirestore +import Foundation + +// iOS 15 required for test implementation, not BSON types +@available(iOS 13, tvOS 13, macOS 10.15, macCatalyst 13, watchOS 7, *) +class TypeTest: FSTIntegrationTestCase { + // Note: Type tests are missing from our Swift Integration tests. + // Below we're adding new tests for BSON types. + // TODO(b/403333631): Port other (non-BSON) tests to Swift. + + func expectRoundtrip(coll: CollectionReference, + data: [String: Any], + validateSnapshots: Bool = true, + expectedData: [String: Any]? = nil) async throws -> DocumentSnapshot { + let expectedData = expectedData ?? data + let docRef = coll.document("a") + + try await docRef.setData(data) + var docSnapshot = try await docRef.getDocument() + XCTAssertEqual(docSnapshot.data() as NSDictionary?, expectedData as NSDictionary?) + + try await docRef.updateData(data) + docSnapshot = try await docRef.getDocument() + XCTAssertEqual(docSnapshot.data() as NSDictionary?, expectedData as NSDictionary?) + + // Validate that the transaction API returns the same types + _ = try await db.runTransaction { transaction, errorPointer in + do { + let transactionSnapshot = try transaction.getDocument(docRef) + XCTAssertEqual( + transactionSnapshot.data() as NSDictionary?, + expectedData as NSDictionary? + ) + return nil // Transaction doesn't need to modify data in this test + } catch { + errorPointer?.pointee = error as NSError + return nil + } + } + + if validateSnapshots { + let querySnapshot = try await coll.getDocuments() + if let firstDoc = querySnapshot.documents.first { + docSnapshot = firstDoc + XCTAssertEqual(docSnapshot.data() as NSDictionary?, expectedData as NSDictionary?) + } else { + XCTFail("No documents found in collection snapshot") + } + + let expectation = XCTestExpectation(description: "Snapshot listener received data") + var listener: ListenerRegistration? + listener = coll.addSnapshotListener { snapshot, error in + guard let snapshot = snapshot, let firstDoc = snapshot.documents.first, + error == nil else { + XCTFail( + "Error fetching snapshot: \(error?.localizedDescription ?? "Unknown error")" + ) + expectation.fulfill() + return + } + XCTAssertEqual(firstDoc.data() as NSDictionary?, expectedData as NSDictionary?) + expectation.fulfill() + + // Stop listening after receiving the first snapshot + listener?.remove() + } + + // Wait for the listener to fire + await fulfillment(of: [expectation], timeout: 5.0) + } + + return docSnapshot + } + + /* + * A Note on Equality Tests: + * + * Since `isEqual` is a public Obj-c API, we should test that the + * `==` and `!=` operator in Swift is comparing objects correctly. + */ + + func testMinKeyEquality() { + let k1 = MinKey.instance() + let k2 = MinKey.instance() + XCTAssertTrue(k1 == k2) + XCTAssertFalse(k1 != k2) + } + + func testMaxKeyEquality() { + let k1 = MaxKey.instance() + let k2 = MaxKey.instance() + XCTAssertTrue(k1 == k2) + XCTAssertFalse(k1 != k2) + } + + func testRegexValueEquality() { + let v1 = RegexValue(pattern: "foo", options: "bar") + let v2 = RegexValue(pattern: "foo", options: "bar") + let v3 = RegexValue(pattern: "foo_3", options: "bar") + let v4 = RegexValue(pattern: "foo", options: "bar_4") + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + XCTAssertFalse(v1 == v4) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + XCTAssertTrue(v1 != v4) + } + + func testInt32ValueEquality() { + let v1 = Int32Value(1) + let v2 = Int32Value(1) + let v3 = Int32Value(-1) + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + } + + func testBsonTimestampEquality() { + let v1 = BsonTimestamp(seconds: 1, increment: 1) + let v2 = BsonTimestamp(seconds: 1, increment: 1) + let v3 = BsonTimestamp(seconds: 1, increment: 2) + let v4 = BsonTimestamp(seconds: 2, increment: 1) + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + XCTAssertFalse(v1 == v4) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + XCTAssertTrue(v1 != v4) + } + + func testBsonObjectIdEquality() { + let v1 = BsonObjectId("foo") + let v2 = BsonObjectId("foo") + let v3 = BsonObjectId("bar") + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + } + + func testBsonBinaryDataEquality() { + let v1 = BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + let v2 = BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + let v3 = BsonBinaryData(subtype: 128, data: Data([1, 2, 3])) + let v4 = BsonBinaryData(subtype: 1, data: Data([1, 2, 3, 4])) + + XCTAssertTrue(v1 == v2) + XCTAssertFalse(v1 == v3) + XCTAssertFalse(v1 == v4) + + XCTAssertFalse(v1 != v2) + XCTAssertTrue(v1 != v3) + XCTAssertTrue(v1 != v4) + } + + func testCanReadAndWriteMinKeyFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["min": MinKey.instance()] + ) + } + + func testCanReadAndWriteMaxKeyFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["max": MaxKey.instance()] + ) + } + + func testCanReadAndWriteRegexFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["regex": RegexValue(pattern: "^foo", options: "i")] + ) + } + + func testCanReadAndWriteInt32Fields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["int32": Int32Value(1)] + ) + } + + func testCanReadAndWriteBsonTimestampFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonTimestamp": BsonTimestamp(seconds: 1, increment: 2)] + ) + } + + func testCanReadAndWriteBsonObjectIdFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonObjectId": BsonObjectId("507f191e810c19729de860ea")] + ) + } + + func testCanReadAndWriteBsonBinaryDataFields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonBinaryData": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))] + ) + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonBinaryData": BsonBinaryData(subtype: 128, data: Data([1, 2, 3]))] + ) + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["bsonBinaryData": BsonBinaryData(subtype: 255, data: Data([]))] + ) + } + + func testCanReadAndWriteBsonFieldsInAnArray() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["array": [ + BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), + BsonObjectId("507f191e810c19729de860ea"), + BsonTimestamp(seconds: 123, increment: 456), + Int32Value(1), + MinKey.instance(), + MaxKey.instance(), + RegexValue(pattern: "^foo", options: "i"), + ]] + ) + } + + func testCanReadAndWriteBsonFieldsInAnObject() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["array": [ + "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BsonObjectId("507f191e810c19729de860ea"), + "bsonTimestamp": BsonTimestamp(seconds: 123, increment: 456), + "int32": Int32Value(1), + "min": MinKey.instance(), + "max": MaxKey.instance(), + "regex": RegexValue(pattern: "^foo", options: "i"), + ]] + ) + } + + func testInvalidRegexValueGetsRejected() async throws { + let docRef = collectionRef().document("test-doc") + var errorMessage: String? + + do { + // Using an invalid regex option "a" + try await docRef.setData(["key": RegexValue(pattern: "foo", options: "a")]) + XCTFail("Expected error for invalid regex option") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue( + errorMessage! + .contains("Invalid regex option 'a'. Supported options are 'i', 'm', 's', 'u', and 'x'."), + "Unexpected error message: \(errorMessage ?? "nil")" + ) + } + } + + func testInvalidBsonObjectIdValueGetsRejected() async throws { + let docRef = collectionRef().document("test-doc") + var errorMessage: String? + + do { + // BsonObjectId with string length not equal to 24 + try await docRef.setData(["key": BsonObjectId("foo")]) + XCTFail("Expected error for invalid BSON Object ID string length") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue( + errorMessage!.contains("Object ID hex string has incorrect length."), + "Unexpected error message: \(errorMessage ?? "nil")" + ) + } + } + + func testCanOrderValuesOfDifferentTypeOrderTogether() async throws { + let collection = collectionRef() + let testDocs: [String: [String: Any?]] = [ + "nullValue": ["key": NSNull()], + "minValue": ["key": MinKey.instance()], + "booleanValue": ["key": true], + "nanValue": ["key": Double.nan], + "int32Value": ["key": Int32Value(1)], + "doubleValue": ["key": 2.0], + "integerValue": ["key": 3], + "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], + "bsonTimestampValue": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "stringValue": ["key": "string"], + "bytesValue": ["key": Data([0, 1, 255])], + "bsonBinaryValue": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "referenceValue": ["key": collection.document("doc")], + "objectIdValue": ["key": BsonObjectId("507f191e810c19729de860ea")], + "geoPointValue": ["key": GeoPoint(latitude: 0, longitude: 0)], + "regexValue": ["key": RegexValue(pattern: "^foo", options: "i")], + "arrayValue": ["key": [1, 2]], + "vectorValue": ["key": VectorValue([1.0, 2.0])], + "objectValue": ["key": ["a": 1]], + "maxValue": ["key": MaxKey.instance()], + ] + + for (docId, data) in testDocs { + try await collection.document(docId).setData(data as [String: Any]) + } + + let orderedQuery = collection.order(by: "key") + let snapshot = try await orderedQuery.getDocuments() + + let expectedOrder = [ + "nullValue", + "minValue", + "booleanValue", + "nanValue", + "int32Value", + "doubleValue", + "integerValue", + "timestampValue", + "bsonTimestampValue", + "stringValue", + "bytesValue", + "bsonBinaryValue", + "referenceValue", + "objectIdValue", + "geoPointValue", + "regexValue", + "arrayValue", + "vectorValue", + "objectValue", + "maxValue", + ] + + XCTAssertEqual(snapshot.documents.count, testDocs.count) + + for i in 0 ..< snapshot.documents.count { + let actualDocSnapshot = snapshot.documents[i] + let actualKeyValue = actualDocSnapshot.data()["key"] + let expectedDocId = expectedOrder[i] + let expectedKeyValue = testDocs[expectedDocId]!["key"] + + XCTAssertEqual(actualDocSnapshot.documentID, expectedDocId) + + // Since we have a 'nullValue' case, we should use `as?`. + XCTAssert(actualKeyValue as? NSObject == expectedKeyValue as? NSObject) + } + } +} diff --git a/Firestore/core/src/model/object_value.cc b/Firestore/core/src/model/object_value.cc index 3b812fe535d..b5a74d0f427 100644 --- a/Firestore/core/src/model/object_value.cc +++ b/Firestore/core/src/model/object_value.cc @@ -270,7 +270,8 @@ FieldMask ObjectValue::ExtractFieldMask( const google_firestore_v1_MapValue_FieldsEntry& entry = value.fields[i]; FieldPath current_path{MakeString(entry.key)}; - if (!IsMap(entry.value)) { + // BSON types do not need to extract reserved keys such as '__regex__', etc. + if (!IsMap(entry.value) || IsBsonType(entry.value)) { fields.insert(std::move(current_path)); continue; } diff --git a/Firestore/core/src/model/value_util.cc b/Firestore/core/src/model/value_util.cc index f363d2d7090..5f3524237a7 100644 --- a/Firestore/core/src/model/value_util.cc +++ b/Firestore/core/src/model/value_util.cc @@ -66,6 +66,101 @@ const char* kRawVectorValueFieldKey = "value"; pb_bytes_array_s* kVectorValueFieldKey = nanopb::MakeBytesArray(kRawVectorValueFieldKey); +/** The key of a MinKey in a map proto. */ +const char* kRawMinKeyTypeFieldValue = "__min__"; +pb_bytes_array_s* kMinKeyTypeFieldValue = + nanopb::MakeBytesArray(kRawMinKeyTypeFieldValue); + +/** The key of a MaxKey in a map proto. */ +const char* kRawMaxKeyTypeFieldValue = "__max__"; +pb_bytes_array_s* kMaxKeyTypeFieldValue = + nanopb::MakeBytesArray(kRawMaxKeyTypeFieldValue); + +/** The key of a regex in a map proto. */ +const char* kRawRegexTypeFieldValue = "__regex__"; +pb_bytes_array_s* kRegexTypeFieldValue = + nanopb::MakeBytesArray(kRawRegexTypeFieldValue); + +/** The regex pattern key. */ +const char* kRawRegexTypePatternFieldValue = "pattern"; +pb_bytes_array_s* kRegexTypePatternFieldValue = + nanopb::MakeBytesArray(kRawRegexTypePatternFieldValue); + +/** The regex options key. */ +const char* kRawRegexTypeOptionsFieldValue = "options"; +pb_bytes_array_s* kRegexTypeOptionsFieldValue = + nanopb::MakeBytesArray(kRawRegexTypeOptionsFieldValue); + +/** The key of an int32 in a map proto. */ +const char* kRawInt32TypeFieldValue = "__int__"; +pb_bytes_array_s* kInt32TypeFieldValue = + nanopb::MakeBytesArray(kRawInt32TypeFieldValue); + +/** The key of a BSON ObjectId in a map proto. */ +const char* kRawBsonObjectIdTypeFieldValue = "__oid__"; +pb_bytes_array_s* kBsonObjectIdTypeFieldValue = + nanopb::MakeBytesArray(kRawBsonObjectIdTypeFieldValue); + +/** The key of a BSON Timestamp in a map proto. */ +const char* kRawBsonTimestampTypeFieldValue = "__request_timestamp__"; +pb_bytes_array_s* kBsonTimestampTypeFieldValue = + nanopb::MakeBytesArray(kRawBsonTimestampTypeFieldValue); + +/** The key of a BSON Timestamp seconds in a map proto. */ +const char* kRawBsonTimestampTypeSecondsFieldValue = "seconds"; +pb_bytes_array_s* kBsonTimestampTypeSecondsFieldValue = + nanopb::MakeBytesArray(kRawBsonTimestampTypeSecondsFieldValue); + +/** The key of a BSON Timestamp increment in a map proto. */ +const char* kRawBsonTimestampTypeIncrementFieldValue = "increment"; +pb_bytes_array_s* kBsonTimestampTypeIncrementFieldValue = + nanopb::MakeBytesArray(kRawBsonTimestampTypeIncrementFieldValue); + +/** The key of a BSON Binary Data in a map proto. */ +const char* kRawBsonBinaryDataTypeFieldValue = "__binary__"; +pb_bytes_array_s* kBsonBinaryDataTypeFieldValue = + nanopb::MakeBytesArray(kRawBsonBinaryDataTypeFieldValue); + +MapType DetectMapType(const google_firestore_v1_Value& value) { + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count == 0) { + return MapType::kNormal; + } + + // Check for type-based mappings + if (IsServerTimestamp(value)) { + return MapType::kServerTimestamp; + } else if (IsMaxValue(value)) { + return MapType::kMaxValue; + } else if (IsVectorValue(value)) { + return MapType::kVector; + } + + // Check for BSON-related mappings + if (value.map_value.fields_count != 1) { + // All BSON types have 1 key in the map. To improve performance, we can + // return early if the map is empty or has more than 1 key. + return MapType::kNormal; + } else if (IsMinKeyValue(value)) { + return MapType::kMinKey; + } + if (IsMaxKeyValue(value)) { + return MapType::kMaxKey; + } else if (IsRegexValue(value)) { + return MapType::kRegex; + } else if (IsInt32Value(value)) { + return MapType::kInt32; + } else if (IsBsonObjectId(value)) { + return MapType::kBsonObjectId; + } else if (IsBsonTimestamp(value)) { + return MapType::kBsonTimestamp; + } else if (IsBsonBinaryData(value)) { + return MapType::kBsonBinaryData; + } + + return MapType::kNormal; +} + TypeOrder GetTypeOrder(const google_firestore_v1_Value& value) { switch (value.which_value_type) { case google_firestore_v1_Value_null_value_tag: @@ -97,14 +192,31 @@ TypeOrder GetTypeOrder(const google_firestore_v1_Value& value) { return TypeOrder::kArray; case google_firestore_v1_Value_map_value_tag: { - if (IsServerTimestamp(value)) { - return TypeOrder::kServerTimestamp; - } else if (IsMaxValue(value)) { - return TypeOrder::kMaxValue; - } else if (IsVectorValue(value)) { - return TypeOrder::kVector; + switch (DetectMapType(value)) { + case MapType::kServerTimestamp: + return TypeOrder::kServerTimestamp; + case MapType::kMaxValue: + return TypeOrder::kMaxValue; + case MapType::kVector: + return TypeOrder::kVector; + case MapType::kMinKey: + return TypeOrder::kMinKey; + case MapType::kMaxKey: + return TypeOrder::kMaxKey; + case MapType::kRegex: + return TypeOrder::kRegex; + case MapType::kInt32: + return TypeOrder::kNumber; + case MapType::kBsonObjectId: + return TypeOrder::kBsonObjectId; + case MapType::kBsonTimestamp: + return TypeOrder::kBsonTimestamp; + case MapType::kBsonBinaryData: + return TypeOrder::kBsonBinaryData; + case MapType::kNormal: + default: + return TypeOrder::kMap; } - return TypeOrder::kMap; } default: @@ -145,13 +257,21 @@ ComparisonResult CompareNumbers(const google_firestore_v1_Value& left, double left_double = left.double_value; if (right.which_value_type == google_firestore_v1_Value_double_value_tag) { return util::Compare(left_double, right.double_value); + } else if (IsInt32Value(right)) { + return util::CompareMixedNumber( + left_double, right.map_value.fields[0].value.integer_value); } else { return util::CompareMixedNumber(left_double, right.integer_value); } } else { - int64_t left_long = left.integer_value; + int64_t left_long = IsInt32Value(left) + ? left.map_value.fields[0].value.integer_value + : left.integer_value; if (right.which_value_type == google_firestore_v1_Value_integer_value_tag) { return util::Compare(left_long, right.integer_value); + } else if (IsInt32Value(right)) { + return util::Compare(left_long, + right.map_value.fields[0].value.integer_value); } else { return util::ReverseOrder( util::CompareMixedNumber(right.double_value, left_long)); @@ -299,6 +419,124 @@ ComparisonResult CompareVectors(const google_firestore_v1_Value& left, return CompareArrays(leftArray, rightArray); } +ComparisonResult CompareRegexValues(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT(IsRegexValue(left) && IsRegexValue(right), + "Cannot compare non-regex values as regex values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + const google_firestore_v1_MapValue& left_inner_map_value = + left.map_value.fields[0].value.map_value; + const google_firestore_v1_MapValue& right_inner_map_value = + right.map_value.fields[0].value.map_value; + + // Find the left and right patterns. + absl::optional left_pattern_index = + IndexOfKey(left_inner_map_value, kRawRegexTypePatternFieldValue, + kRegexTypePatternFieldValue); + const auto& left_pattern_value = + left_inner_map_value.fields[left_pattern_index.value()].value; + absl::optional right_pattern_index = + IndexOfKey(right_inner_map_value, kRawRegexTypePatternFieldValue, + kRegexTypePatternFieldValue); + const auto& right_pattern_value = + right_inner_map_value.fields[right_pattern_index.value()].value; + + // First compare patterns. + const auto compare_patterns = + CompareStrings(left_pattern_value, right_pattern_value); + if (compare_patterns != ComparisonResult::Same) { + return compare_patterns; + } + + // Find the left and right options. + absl::optional left_options_index = + IndexOfKey(left_inner_map_value, kRawRegexTypeOptionsFieldValue, + kRegexTypeOptionsFieldValue); + const auto& left_options_value = + left_inner_map_value.fields[left_options_index.value()].value; + absl::optional right_options_index = + IndexOfKey(right_inner_map_value, kRawRegexTypeOptionsFieldValue, + kRegexTypeOptionsFieldValue); + const auto& right_options_value = + right_inner_map_value.fields[right_options_index.value()].value; + + // If patterns are equal, compare the options. + return CompareStrings(left_options_value, right_options_value); +} + +ComparisonResult CompareBsonObjectId(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT(IsBsonObjectId(left) && IsBsonObjectId(right), + "Cannot compare non-BsonObjectId values as BsonObjectId values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + return CompareStrings(left.map_value.fields[0].value, + right.map_value.fields[0].value); +} + +ComparisonResult CompareBsonTimestamp(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT( + IsBsonTimestamp(left) && IsBsonTimestamp(right), + "Cannot compare non-BsonTimestamp values as BsonTimestamp values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + const google_firestore_v1_MapValue& left_inner_map_value = + left.map_value.fields[0].value.map_value; + const google_firestore_v1_MapValue& right_inner_map_value = + right.map_value.fields[0].value.map_value; + + // Find the left and right 'seconds'. + absl::optional left_seconds_index = + IndexOfKey(left_inner_map_value, kRawBsonTimestampTypeSecondsFieldValue, + kBsonTimestampTypeSecondsFieldValue); + const auto& left_seconds_value = + left_inner_map_value.fields[left_seconds_index.value()].value; + absl::optional right_seconds_index = + IndexOfKey(right_inner_map_value, kRawBsonTimestampTypeSecondsFieldValue, + kBsonTimestampTypeSecondsFieldValue); + const auto& right_seconds_value = + right_inner_map_value.fields[right_seconds_index.value()].value; + + // First compare 'seconds'. + const auto compare_seconds = + CompareNumbers(left_seconds_value, right_seconds_value); + if (compare_seconds != ComparisonResult::Same) { + return compare_seconds; + } + + // Find the left and right 'increment'. + absl::optional left_increment_index = + IndexOfKey(left_inner_map_value, kRawBsonTimestampTypeIncrementFieldValue, + kBsonTimestampTypeIncrementFieldValue); + const auto& left_increment_value = + left_inner_map_value.fields[left_increment_index.value()].value; + absl::optional right_increment_index = IndexOfKey( + right_inner_map_value, kRawBsonTimestampTypeIncrementFieldValue, + kBsonTimestampTypeIncrementFieldValue); + const auto& right_increment_value = + right_inner_map_value.fields[right_increment_index.value()].value; + + // If 'seconds' are equal, compare the 'increment'. + return CompareNumbers(left_increment_value, right_increment_value); +} + +ComparisonResult CompareBsonBinaryData(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + HARD_ASSERT( + IsBsonBinaryData(left) && IsBsonBinaryData(right), + "Cannot compare non-BsonBinaryData values as BsonBinaryData values."); + + // Since the above assertion ensures the given values have the expected format + // we can safely access the fields as we expect. + return CompareBlobs(left.map_value.fields[0].value, + right.map_value.fields[0].value); +} + ComparisonResult Compare(const google_firestore_v1_Value& left, const google_firestore_v1_Value& right) { TypeOrder left_type = GetTypeOrder(left); @@ -310,6 +548,11 @@ ComparisonResult Compare(const google_firestore_v1_Value& left, switch (left_type) { case TypeOrder::kNull: + case TypeOrder::kMaxValue: + // All MinKeys are equal. + case TypeOrder::kMinKey: + // All MaxKeys are equal. + case TypeOrder::kMaxKey: return ComparisonResult::Same; case TypeOrder::kBoolean: @@ -337,6 +580,18 @@ ComparisonResult Compare(const google_firestore_v1_Value& left, case TypeOrder::kGeoPoint: return CompareGeoPoints(left, right); + case TypeOrder::kRegex: + return CompareRegexValues(left, right); + + case TypeOrder::kBsonObjectId: + return CompareBsonObjectId(left, right); + + case TypeOrder::kBsonTimestamp: + return CompareBsonTimestamp(left, right); + + case TypeOrder::kBsonBinaryData: + return CompareBsonBinaryData(left, right); + case TypeOrder::kArray: return CompareArrays(left, right); @@ -346,9 +601,6 @@ ComparisonResult Compare(const google_firestore_v1_Value& left, case TypeOrder::kVector: return CompareVectors(left, right); - case TypeOrder::kMaxValue: - return util::ComparisonResult::Same; - default: HARD_FAIL("Invalid type value: %s", left_type); } @@ -400,6 +652,9 @@ bool NumberEquals(const google_firestore_v1_Value& left, right.which_value_type == google_firestore_v1_Value_double_value_tag) { return util::DoubleBitwiseEquals(left.double_value, right.double_value); + } else if (IsInt32Value(left) && IsInt32Value(right)) { + return left.map_value.fields[0].value.integer_value == + right.map_value.fields[0].value.integer_value; } return false; } @@ -437,6 +692,8 @@ bool Equals(const google_firestore_v1_Value& lhs, switch (left_type) { case TypeOrder::kNull: + case TypeOrder::kMinKey: + case TypeOrder::kMaxKey: return true; case TypeOrder::kBoolean: @@ -474,6 +731,18 @@ bool Equals(const google_firestore_v1_Value& lhs, case TypeOrder::kArray: return ArrayEquals(lhs.array_value, rhs.array_value); + case TypeOrder::kRegex: + return CompareRegexValues(lhs, rhs) == ComparisonResult::Same; + + case TypeOrder::kBsonObjectId: + return CompareBsonObjectId(lhs, rhs) == ComparisonResult::Same; + + case TypeOrder::kBsonTimestamp: + return CompareBsonTimestamp(lhs, rhs) == ComparisonResult::Same; + + case TypeOrder::kBsonBinaryData: + return CompareBsonBinaryData(lhs, rhs) == ComparisonResult::Same; + case TypeOrder::kVector: case TypeOrder::kMap: return MapValueEquals(lhs.map_value, rhs.map_value); @@ -826,6 +1095,260 @@ bool IsVectorValue(const google_firestore_v1_Value& value) { return true; } +bool IsMinKeyValue(const google_firestore_v1_Value& value) { + // A MinKey is expected to be a map as follows: { "__min__": null } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a '__min__' key. + absl::optional min_key_field_index = IndexOfKey( + value.map_value, kRawMinKeyTypeFieldValue, kMinKeyTypeFieldValue); + if (!min_key_field_index.has_value()) { + return false; + } + + // The inner value should be null. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_null_value_tag) { + return false; + } + + return true; +} + +bool IsMaxKeyValue(const google_firestore_v1_Value& value) { + // A MaxKey is expected to be a map as follows: { "__max__": null } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a '__max__' key. + absl::optional max_key_field_index = IndexOfKey( + value.map_value, kRawMaxKeyTypeFieldValue, kMaxKeyTypeFieldValue); + if (!max_key_field_index.has_value()) { + return false; + } + + // The inner value should be null. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_null_value_tag) { + return false; + } + + return true; +} + +bool IsRegexValue(const google_firestore_v1_Value& value) { + // A regex is expected to be a map as follows: + // { + // "__regex__": { + // "pattern": "...", + // "options": "..." + // } + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__regex__" key. + absl::optional regex_field_index = IndexOfKey( + value.map_value, kRawRegexTypeFieldValue, kRegexTypeFieldValue); + if (!regex_field_index.has_value()) { + return false; + } + + // The inner value should be a map with 2 fields. + google_firestore_v1_Value& inner_value = value.map_value.fields[0].value; + if (inner_value.which_value_type != google_firestore_v1_Value_map_value_tag || + inner_value.map_value.fields_count != 2) { + return false; + } + + // Must have a string 'pattern'. + absl::optional pattern_field_index = + IndexOfKey(inner_value.map_value, kRawRegexTypePatternFieldValue, + kRegexTypePatternFieldValue); + if (!pattern_field_index.has_value() || + inner_value.map_value.fields[pattern_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_string_value_tag) { + return false; + } + + // Must have a string 'options'. + absl::optional options_field_index = + IndexOfKey(inner_value.map_value, kRawRegexTypeOptionsFieldValue, + kRegexTypeOptionsFieldValue); + if (!options_field_index.has_value() || + inner_value.map_value.fields[options_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_string_value_tag) { + return false; + } + + return true; +} + +bool IsBsonObjectId(const google_firestore_v1_Value& value) { + // A BsonObjectId is expected to be a map as follows: + // { + // "__oid__": "..." + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__oid__" key. + absl::optional field_index = + IndexOfKey(value.map_value, kRawBsonObjectIdTypeFieldValue, + kBsonObjectIdTypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // Must have a string value. + google_firestore_v1_Value& oid = value.map_value.fields[0].value; + if (oid.which_value_type != google_firestore_v1_Value_string_value_tag) { + return false; + } + + return true; +} + +bool IsBsonTimestamp(const google_firestore_v1_Value& value) { + // A BsonTimestamp is expected to be a map as follows: + // { + // "__request_timestamp__": { + // "seconds": "...", + // "increment": "..." + // } + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__request_timestamp__" key. + absl::optional field_index = + IndexOfKey(value.map_value, kRawBsonTimestampTypeFieldValue, + kBsonTimestampTypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // The inner value should be a map with 2 fields. + google_firestore_v1_Value& innerValue = value.map_value.fields[0].value; + if (innerValue.which_value_type != google_firestore_v1_Value_map_value_tag || + innerValue.map_value.fields_count != 2) { + return false; + } + + // Must have an integer 'seconds' field. + absl::optional seconds_field_index = + IndexOfKey(innerValue.map_value, kRawBsonTimestampTypeSecondsFieldValue, + kBsonTimestampTypeSecondsFieldValue); + if (!seconds_field_index.has_value() || + innerValue.map_value.fields[seconds_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_integer_value_tag) { + return false; + } + + // Must have an integer 'increment'. + absl::optional increment_field_index = + IndexOfKey(innerValue.map_value, kRawBsonTimestampTypeIncrementFieldValue, + kBsonTimestampTypeIncrementFieldValue); + if (!increment_field_index.has_value() || + innerValue.map_value.fields[increment_field_index.value()] + .value.which_value_type != + google_firestore_v1_Value_integer_value_tag) { + return false; + } + + return true; +} + +bool IsBsonBinaryData(const google_firestore_v1_Value& value) { + // A BsonTimestamp is expected to be a map as follows: + // { + // "__binary__": <> + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__binary__" key. + absl::optional field_index = + IndexOfKey(value.map_value, kRawBsonBinaryDataTypeFieldValue, + kBsonBinaryDataTypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // Must have a 'bytes' value. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_bytes_value_tag) { + return false; + } + + return true; +} + +bool IsInt32Value(const google_firestore_v1_Value& value) { + // An Int32Value is expected to be a map as follows: + // { + // "__int__": 12345 + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__int__" key. + absl::optional field_index = IndexOfKey( + value.map_value, kRawInt32TypeFieldValue, kInt32TypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // Must have an integer value. + if (value.map_value.fields[0].value.which_value_type != + google_firestore_v1_Value_integer_value_tag) { + return false; + } + + return true; +} + +bool IsBsonType(const google_firestore_v1_Value& value) { + MapType mapType = DetectMapType(value); + return mapType == MapType::kMinKey || mapType == MapType::kMaxKey || + mapType == MapType::kRegex || mapType == MapType::kInt32 || + mapType == MapType::kBsonObjectId || + mapType == MapType::kBsonTimestamp || + mapType == MapType::kBsonBinaryData; +} + google_firestore_v1_Value NaNValue() { google_firestore_v1_Value nan_value; nan_value.which_value_type = google_firestore_v1_Value_double_value_tag; @@ -833,6 +1356,13 @@ google_firestore_v1_Value NaNValue() { return nan_value; } +google_firestore_v1_Value ZeroIntegerValue() { + google_firestore_v1_Value zero_value; + zero_value.which_value_type = google_firestore_v1_Value_integer_value_tag; + zero_value.integer_value = 0; + return zero_value; +} + bool IsNaNValue(const google_firestore_v1_Value& value) { return value.which_value_type == google_firestore_v1_Value_double_value_tag && std::isnan(value.double_value); @@ -922,6 +1452,96 @@ google_firestore_v1_Value MinVector() { return lowerBound; } +google_firestore_v1_Value MinRegex() { + google_firestore_v1_MapValue_FieldsEntry* inner_field_entries = + nanopb::MakeArray(2); + inner_field_entries[0].key = kRegexTypePatternFieldValue; + inner_field_entries[0].value = MinString(); + inner_field_entries[1].key = kRegexTypeOptionsFieldValue; + inner_field_entries[1].value = MinString(); + google_firestore_v1_MapValue inner_map_value; + inner_map_value.fields_count = 2; + inner_map_value.fields = inner_field_entries; + google_firestore_v1_Value inner_value; + inner_value.which_value_type = google_firestore_v1_Value_map_value_tag; + inner_value.map_value = inner_map_value; + + google_firestore_v1_MapValue_FieldsEntry* outer_field_entries = + nanopb::MakeArray(1); + outer_field_entries[0].key = kRegexTypeFieldValue; + outer_field_entries[0].value = inner_value; + google_firestore_v1_MapValue outer_map_value; + outer_map_value.fields_count = 1; + outer_map_value.fields = outer_field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = outer_map_value; + + return lower_bound; +} + +google_firestore_v1_Value MinBsonObjectId() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kBsonObjectIdTypeFieldValue; + field_entries[0].value = MinString(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + +google_firestore_v1_Value MinBsonTimestamp() { + google_firestore_v1_MapValue_FieldsEntry* inner_field_entries = + nanopb::MakeArray(2); + inner_field_entries[0].key = kBsonTimestampTypeSecondsFieldValue; + inner_field_entries[0].value = ZeroIntegerValue(); + inner_field_entries[1].key = kBsonTimestampTypeIncrementFieldValue; + inner_field_entries[1].value = ZeroIntegerValue(); + google_firestore_v1_MapValue inner_map_value; + inner_map_value.fields_count = 2; + inner_map_value.fields = inner_field_entries; + google_firestore_v1_Value inner_value; + inner_value.which_value_type = google_firestore_v1_Value_map_value_tag; + inner_value.map_value = inner_map_value; + + google_firestore_v1_MapValue_FieldsEntry* outer_field_entries = + nanopb::MakeArray(1); + outer_field_entries[0].key = kBsonTimestampTypeFieldValue; + outer_field_entries[0].value = inner_value; + google_firestore_v1_MapValue outer_map_value; + outer_map_value.fields_count = 1; + outer_map_value.fields = outer_field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = outer_map_value; + + return lower_bound; +} + +google_firestore_v1_Value MinBsonBinaryData() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kBsonBinaryDataTypeFieldValue; + field_entries[0].value = MinBytes(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + google_firestore_v1_Value MinMap() { google_firestore_v1_Value lowerBound; lowerBound.which_value_type = google_firestore_v1_Value_map_value_tag; diff --git a/Firestore/core/src/model/value_util.h b/Firestore/core/src/model/value_util.h index 708b71ccd16..5ae2395816b 100644 --- a/Firestore/core/src/model/value_util.h +++ b/Firestore/core/src/model/value_util.h @@ -57,26 +57,102 @@ extern pb_bytes_array_s* kVectorTypeFieldValue; extern const char* kRawVectorValueFieldKey; extern pb_bytes_array_s* kVectorValueFieldKey; +/** The key of a MinKey in a map proto. */ +extern const char* kRawMinKeyTypeFieldValue; +extern pb_bytes_array_s* kMinKeyTypeFieldValue; + +/** The key of a MaxKey in a map proto. */ +extern const char* kRawMaxKeyTypeFieldValue; +extern pb_bytes_array_s* kMaxKeyTypeFieldValue; + +/** The key of a regex in a map proto. */ +extern const char* kRawRegexTypeFieldValue; +extern pb_bytes_array_s* kRegexTypeFieldValue; + +/** The regex pattern key. */ +extern const char* kRawRegexTypePatternFieldValue; +extern pb_bytes_array_s* kRegexTypePatternFieldValue; + +/** The regex options key. */ +extern const char* kRawRegexTypeOptionsFieldValue; +extern pb_bytes_array_s* kRegexTypeOptionsFieldValue; + +/** The key of an int32 in a map proto. */ +extern const char* kRawInt32TypeFieldValue; +extern pb_bytes_array_s* kInt32TypeFieldValue; + +/** The key of a BSON ObjectId in a map proto. */ +extern const char* kRawBsonObjectIdTypeFieldValue; +extern pb_bytes_array_s* kBsonObjectIdTypeFieldValue; + +/** The key of a BSON Timestamp in a map proto. */ +extern const char* kRawBsonTimestampTypeFieldValue; +extern pb_bytes_array_s* kBsonTimestampTypeFieldValue; + +/** The key of a BSON Timestamp seconds in a map proto. */ +extern const char* kRawBsonTimestampTypeSecondsFieldValue; +extern pb_bytes_array_s* kBsonTimestampTypeSecondsFieldValue; + +/** The key of a BSON Timestamp increment in a map proto. */ +extern const char* kRawBsonTimestampTypeIncrementFieldValue; +extern pb_bytes_array_s* kBsonTimestampTypeIncrementFieldValue; + +/** The key of a BSON Binary Data in a map proto. */ +extern const char* kRawBsonBinaryDataTypeFieldValue; +extern pb_bytes_array_s* kBsonBinaryDataTypeFieldValue; + /** * The order of types in Firestore. This order is based on the backend's - * ordering, but modified to support server timestamps. + * ordering, but modified to support server timestamps and `MAX_VALUE` inside + * the SDK. */ enum class TypeOrder { kNull = 0, - kBoolean = 1, - kNumber = 2, - kTimestamp = 3, - kServerTimestamp = 4, - kString = 5, - kBlob = 6, - kReference = 7, - kGeoPoint = 8, - kArray = 9, - kVector = 10, - kMap = 11, - kMaxValue = 12 + kMinKey = 1, + kBoolean = 2, + // Note: all numbers (32-bit int, 64-bit int, 64-bit double, 128-bit decimal, + // etc.) are sorted together numerically. The `CompareNumbers` function + // distinguishes between different number types and compares them accordingly. + kNumber = 3, + kTimestamp = 4, + kBsonTimestamp = 5, + kServerTimestamp = 6, + kString = 7, + kBlob = 8, + kBsonBinaryData = 9, + kReference = 10, + kBsonObjectId = 11, + kGeoPoint = 12, + kRegex = 13, + kArray = 14, + kVector = 15, + kMap = 16, + kMaxKey = 17, + kMaxValue = 18 }; +/** + * The type that a Map is used to represent. + * Most Maps are NORMAL maps, however, some maps are used to identify more + * complex types. + */ +enum class MapType { + kNormal = 0, + kServerTimestamp = 1, + kMaxValue = 2, + kVector = 3, + kMinKey = 4, + kMaxKey = 5, + kRegex = 6, + kInt32 = 7, + kBsonObjectId = 8, + kBsonTimestamp = 9, + kBsonBinaryData = 10 +}; + +/** Returns the Map type for the given value. */ +MapType DetectMapType(const google_firestore_v1_Value& value); + /** Returns the backend's type order of the given Value type. */ TypeOrder GetTypeOrder(const google_firestore_v1_Value& value); @@ -177,10 +253,48 @@ google_firestore_v1_Value MaxValue(); bool IsMaxValue(const google_firestore_v1_Value& value); /** - * Returns `true` if `value` represents a VectorValue.. + * Returns `true` if `value` represents a VectorValue. */ bool IsVectorValue(const google_firestore_v1_Value& value); +/** + * Returns `true` if `value` represents a MinKey. + */ +bool IsMinKeyValue(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a MaxKey. + */ +bool IsMaxKeyValue(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a RegexValue. + */ +bool IsRegexValue(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents an Int32Value. + */ +bool IsInt32Value(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a BsonObjectId. + */ +bool IsBsonObjectId(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a BsonTimestamp. + */ +bool IsBsonTimestamp(const google_firestore_v1_Value& value); + +/** + * Returns `true` if `value` represents a BsonBinaryData. + */ +bool IsBsonBinaryData(const google_firestore_v1_Value& value); + +/** Returns true if `value` is a BSON Type. */ +bool IsBsonType(const google_firestore_v1_Value& value); + /** * Returns the index of the specified key (`kRawTypeValueFieldKey`) in the * map (`mapValue`). `kTypeValueFieldKey` is an alternative representation @@ -217,6 +331,14 @@ google_firestore_v1_Value MinReference(); google_firestore_v1_Value MinGeoPoint(); +google_firestore_v1_Value MinBsonBinaryData(); + +google_firestore_v1_Value MinBsonObjectId(); + +google_firestore_v1_Value MinBsonTimestamp(); + +google_firestore_v1_Value MinRegex(); + google_firestore_v1_Value MinArray(); google_firestore_v1_Value MinVector(); diff --git a/Firestore/core/test/unit/bundle/bundle_serializer_test.cc b/Firestore/core/test/unit/bundle/bundle_serializer_test.cc index 72d788c7832..8d384de40ea 100644 --- a/Firestore/core/test/unit/bundle/bundle_serializer_test.cc +++ b/Firestore/core/test/unit/bundle/bundle_serializer_test.cc @@ -50,6 +50,7 @@ using ProtoDocument = ::google::firestore::v1::Document; using ProtoMaybeDocument = ::firestore::client::MaybeDocument; using ProtoNamedQuery = ::firestore::NamedQuery; using ProtoValue = ::google::firestore::v1::Value; +using MapValue = ::google::firestore::v1::MapValue; using core::Query; using core::Target; using local::LocalSerializer; @@ -622,6 +623,93 @@ TEST_F(BundleSerializerTest, DecodesArrayValues) { VerifyFieldValueRoundtrip(value); } +TEST_F(BundleSerializerTest, DecodesMinKey) { + ProtoValue null_value; + null_value.set_null_value(google::protobuf::NULL_VALUE); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawMinKeyTypeFieldValue, null_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesMaxKey) { + ProtoValue null_value; + null_value.set_null_value(google::protobuf::NULL_VALUE); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawMaxKeyTypeFieldValue, null_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesInt32Value) { + ProtoValue int_value; + int_value.set_integer_value(1234L); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawInt32TypeFieldValue, int_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesRegexValue) { + ProtoValue pattern_value; + ProtoValue options_value; + ProtoValue inner_map_value; + ProtoValue value; + + pattern_value.set_string_value("^foo"); + options_value.set_string_value("i"); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawRegexTypePatternFieldValue, pattern_value}); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawRegexTypeOptionsFieldValue, options_value}); + value.mutable_map_value()->mutable_fields()->insert( + {model::kRawRegexTypeFieldValue, inner_map_value}); + + VerifyFieldValueRoundtrip(value); +} + +TEST_F(BundleSerializerTest, DecodesBsonObjectId) { + ProtoValue oid_value; + oid_value.set_string_value("foo"); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonObjectIdTypeFieldValue, oid_value}); + + VerifyFieldValueRoundtrip(object); +} + +TEST_F(BundleSerializerTest, DecodesBsonTimestamp) { + ProtoValue seconds_value; + ProtoValue increment_value; + ProtoValue inner_map_value; + ProtoValue value; + + seconds_value.set_integer_value(1234L); + increment_value.set_integer_value(5678L); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonTimestampTypeSecondsFieldValue, seconds_value}); + inner_map_value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonTimestampTypeIncrementFieldValue, increment_value}); + value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonTimestampTypeFieldValue, inner_map_value}); + + VerifyFieldValueRoundtrip(value); +} + +TEST_F(BundleSerializerTest, DecodesBsonBinaryData) { + ProtoValue binary_value; + uint8_t array[]{0, 1, 2, 3}; + binary_value.set_bytes_value(array, 4); + ProtoValue value; + value.mutable_map_value()->mutable_fields()->insert( + {model::kRawBsonBinaryDataTypeFieldValue, binary_value}); + + VerifyFieldValueRoundtrip(value); +} + TEST_F(BundleSerializerTest, DecodesNestedObjectValues) { ProtoValue b; b.set_boolean_value(true); diff --git a/Firestore/core/test/unit/model/document_test.cc b/Firestore/core/test/unit/model/document_test.cc index 9172bc2d516..31e7aec0c07 100644 --- a/Firestore/core/test/unit/model/document_test.cc +++ b/Firestore/core/test/unit/model/document_test.cc @@ -25,11 +25,18 @@ namespace firebase { namespace firestore { namespace model { +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DeletedDoc; using testutil::Doc; using testutil::Field; +using testutil::Int32; using testutil::Key; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; using testutil::UnknownDoc; using testutil::Value; using testutil::Version; @@ -71,6 +78,25 @@ TEST(DocumentTest, ExtractsFields) { EXPECT_EQ(doc.field(Field("owner.title")), *Value("scallywag")); } +TEST(DocumentTest, CanContainBsonTypes) { + auto data = WrapObject( + Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^foo", "i"), + "int32", Int32(1234), "objectId", BsonObjectId("foo"), "timestamp", + BsonTimestamp(123, 456), "binary", BsonBinaryData(128, {7, 8, 9}))); + + auto doc = MutableDocument::FoundDocument(Key("col/doc"), Version(1), data); + + EXPECT_EQ(doc.data(), data); + EXPECT_EQ(doc.has_local_mutations(), false); + EXPECT_EQ(doc.field(Field("minKey")), *MinKey()); + EXPECT_EQ(doc.field(Field("maxKey")), *MaxKey()); + EXPECT_EQ(doc.field(Field("regex")), *Regex("^foo", "i")); + EXPECT_EQ(doc.field(Field("int32")), *Int32(1234)); + EXPECT_EQ(doc.field(Field("objectId")), *BsonObjectId("foo")); + EXPECT_EQ(doc.field(Field("timestamp")), *BsonTimestamp(123, 456)); + EXPECT_EQ(doc.field(Field("binary")), *BsonBinaryData(128, {7, 8, 9})); +} + TEST(DocumentTest, Equality) { MutableDocument doc = Doc("some/path", 1, Map("a", 1)); EXPECT_EQ(doc, Doc("some/path", 1, Map("a", 1))); diff --git a/Firestore/core/test/unit/model/object_value_test.cc b/Firestore/core/test/unit/model/object_value_test.cc index e5538f08847..594322fdc11 100644 --- a/Firestore/core/test/unit/model/object_value_test.cc +++ b/Firestore/core/test/unit/model/object_value_test.cc @@ -31,9 +31,16 @@ const char kBarString[] = "bar"; namespace { using absl::nullopt; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DbId; using testutil::Field; +using testutil::Int32; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; using testutil::Value; using testutil::WrapObject; @@ -43,7 +50,11 @@ class ObjectValueTest : public ::testing::Test { }; TEST_F(ObjectValueTest, ExtractsFields) { - ObjectValue value = WrapObject("foo", Map("a", 1, "b", true, "c", "string")); + ObjectValue value = WrapObject( + "foo", Map("a", 1, "b", true, "c", "string"), "bson", + Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^foo", "i"), + "int32", Int32(1234), "objectId", BsonObjectId("foo"), "timestamp", + BsonTimestamp(123, 456), "binary", BsonBinaryData(128, {7, 8, 9}))); ASSERT_EQ(google_firestore_v1_Value_map_value_tag, value.Get(Field("foo"))->which_value_type); @@ -51,21 +62,32 @@ TEST_F(ObjectValueTest, ExtractsFields) { EXPECT_EQ(*Value(1), *value.Get(Field("foo.a"))); EXPECT_EQ(*Value(true), *value.Get(Field("foo.b"))); EXPECT_EQ(*Value("string"), *value.Get(Field("foo.c"))); - + EXPECT_EQ( + *Value(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", + Regex("^foo", "i"), "int32", Int32(1234), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), + "binary", BsonBinaryData(128, {7, 8, 9}))), + *value.Get(Field("bson"))); EXPECT_EQ(nullopt, value.Get(Field("foo.a.b"))); EXPECT_EQ(nullopt, value.Get(Field("bar"))); EXPECT_EQ(nullopt, value.Get(Field("bar.a"))); } TEST_F(ObjectValueTest, ExtractsFieldMask) { - ObjectValue value = - WrapObject("a", "b", "Map", - Map("a", 1, "b", true, "c", "string", "nested", Map("d", "e")), - "emptymap", Map()); - - FieldMask expected_mask = - FieldMask({Field("a"), Field("Map.a"), Field("Map.b"), Field("Map.c"), - Field("Map.nested.d"), Field("emptymap")}); + ObjectValue value = WrapObject( + "a", "b", "Map", + Map("a", 1, "b", true, "c", "string", "nested", Map("d", "e")), + "emptymap", Map(), "bson", + Value(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", + Regex("^foo", "i"), "int32", Int32(1234), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), + "binary", BsonBinaryData(128, {7, 8, 9})))); + + FieldMask expected_mask = FieldMask( + {Field("a"), Field("Map.a"), Field("Map.b"), Field("Map.c"), + Field("Map.nested.d"), Field("emptymap"), Field("bson.minKey"), + Field("bson.maxKey"), Field("bson.regex"), Field("bson.int32"), + Field("bson.objectId"), Field("bson.timestamp"), Field("bson.binary")}); FieldMask actual_mask = value.ToFieldMask(); EXPECT_EQ(expected_mask, actual_mask); @@ -335,6 +357,48 @@ TEST_F(ObjectValueTest, DoesNotRequireSortedInserts) { EXPECT_EQ(*Value(2), *object_value.Get(Field("nested.nested.c"))); } +TEST_F(ObjectValueTest, CanHandleBsonTypesInObjectValue) { + ObjectValue object_value{}; + object_value.Set(Field("minKey"), MinKey()); + object_value.Set(Field("maxKey"), MaxKey()); + object_value.Set(Field("regex"), Regex("^foo", "i")); + object_value.Set(Field("int32"), Int32(1234)); + object_value.Set(Field("objectId"), BsonObjectId("foo")); + object_value.Set(Field("timestamp"), BsonTimestamp(123, 456)); + object_value.Set(Field("binary"), BsonBinaryData(128, {7, 8, 9})); + + EXPECT_EQ( + WrapObject(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", + Regex("^foo", "i"), "int32", Int32(1234), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), + "binary", BsonBinaryData(128, {7, 8, 9}))), + object_value); + + // Overwrite existing fields + object_value.Set(Field("regex"), Regex("^baz", "g")); + object_value.Set(Field("objectId"), BsonObjectId("new-foo-value")); + + // Create nested objects + object_value.Set(Field("foo.regex1"), Regex("^foo", "i")); + object_value.Set(Field("foo.regex2"), Regex("^bar", "i")); + object_value.Set(Field("foo.timestamp"), BsonTimestamp(2, 1)); + + // Delete fields + object_value.Delete(Field("foo.regex1")); + + // Overwrite nested objects + object_value.Set(Field("foo.regex2"), Regex("^bar", "x")); + + EXPECT_EQ( + WrapObject(Map( + "minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^baz", "g"), + "int32", Int32(1234), "objectId", BsonObjectId("new-foo-value"), + "timestamp", BsonTimestamp(123, 456), "binary", + BsonBinaryData(128, {7, 8, 9}), "foo", + Map("regex2", Regex("^bar", "x"), "timestamp", BsonTimestamp(2, 1)))), + object_value); +} + } // namespace } // namespace model diff --git a/Firestore/core/test/unit/model/value_util_test.cc b/Firestore/core/test/unit/model/value_util_test.cc index c6d2479929c..acb6b6f6607 100644 --- a/Firestore/core/test/unit/model/value_util_test.cc +++ b/Firestore/core/test/unit/model/value_util_test.cc @@ -40,12 +40,20 @@ using model::RefValue; using nanopb::Message; using testutil::Array; using testutil::BlobValue; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DbId; +using testutil::Int32; using testutil::kCanonicalNanBits; using testutil::Key; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; using testutil::time_point; using testutil::Value; +using testutil::VectorType; using util::ComparisonResult; namespace { @@ -99,9 +107,6 @@ class ValueUtilTest : public ::testing::Test { ComparisonResult expected_result) { for (pb_size_t i = 0; i < left->values_count; ++i) { for (pb_size_t j = 0; j < right->values_count; ++j) { - if (expected_result != Compare(left->values[i], right->values[j])) { - std::cout << "here" << std::endl; - } EXPECT_EQ(expected_result, Compare(left->values[i], right->values[j])) << "Order check failed for '" << CanonicalId(left->values[i]) << "' and '" << CanonicalId(right->values[j]) << "' (expected " @@ -184,6 +189,50 @@ TEST(FieldValueTest, ValueHelpers) { auto double_value = Value(2.0); ASSERT_EQ(GetTypeOrder(*double_value), TypeOrder::kNumber); EXPECT_EQ(double_value->double_value, 2.0); + + auto map_value = Map("foo", "bar"); + ASSERT_EQ(GetTypeOrder(*map_value), TypeOrder::kMap); + ASSERT_EQ(DetectMapType(*map_value), MapType::kNormal); + + auto max_value = DeepClone(MaxValue()); + ASSERT_EQ(GetTypeOrder(*max_value), TypeOrder::kMaxValue); + ASSERT_EQ(DetectMapType(*max_value), MapType::kMaxValue); + + auto server_timestamp = EncodeServerTimestamp(kTimestamp1, absl::nullopt); + ASSERT_EQ(GetTypeOrder(*server_timestamp), TypeOrder::kServerTimestamp); + ASSERT_EQ(DetectMapType(*server_timestamp), MapType::kServerTimestamp); + + auto vector_value = VectorType(100); + ASSERT_EQ(GetTypeOrder(*vector_value), TypeOrder::kVector); + ASSERT_EQ(DetectMapType(*vector_value), MapType::kVector); + + auto min_key_value = MinKey(); + ASSERT_EQ(GetTypeOrder(*min_key_value), TypeOrder::kMinKey); + ASSERT_EQ(DetectMapType(*min_key_value), MapType::kMinKey); + + auto max_key_value = MaxKey(); + ASSERT_EQ(GetTypeOrder(*max_key_value), TypeOrder::kMaxKey); + ASSERT_EQ(DetectMapType(*max_key_value), MapType::kMaxKey); + + auto regex_value = Regex("^foo", "x"); + ASSERT_EQ(GetTypeOrder(*regex_value), TypeOrder::kRegex); + ASSERT_EQ(DetectMapType(*regex_value), MapType::kRegex); + + auto int32_value = Int32(1); + ASSERT_EQ(GetTypeOrder(*int32_value), TypeOrder::kNumber); + ASSERT_EQ(DetectMapType(*int32_value), MapType::kInt32); + + auto bson_object_id_value = BsonObjectId("foo"); + ASSERT_EQ(GetTypeOrder(*bson_object_id_value), TypeOrder::kBsonObjectId); + ASSERT_EQ(DetectMapType(*bson_object_id_value), MapType::kBsonObjectId); + + auto bson_timestamp_value = BsonTimestamp(1, 2); + ASSERT_EQ(GetTypeOrder(*bson_timestamp_value), TypeOrder::kBsonTimestamp); + ASSERT_EQ(DetectMapType(*bson_timestamp_value), MapType::kBsonTimestamp); + + auto bson_binary_data_value = BsonBinaryData(1, {1, 2, 3}); + ASSERT_EQ(GetTypeOrder(*bson_binary_data_value), TypeOrder::kBsonBinaryData); + ASSERT_EQ(DetectMapType(*bson_binary_data_value), MapType::kBsonBinaryData); } #if __APPLE__ @@ -210,6 +259,7 @@ TEST_F(ValueUtilTest, Equality) { std::vector> equals_group; Add(equals_group, nullptr, nullptr); + Add(equals_group, MinKey(), MinKey()); Add(equals_group, false, false); Add(equals_group, true, true); Add(equals_group, std::numeric_limits::quiet_NaN(), @@ -222,6 +272,8 @@ TEST_F(ValueUtilTest, Equality) { // Doubles and Longs aren't equal (even though they compare same). Add(equals_group, 1.0, 1.0); Add(equals_group, 1.1, 1.1); + Add(equals_group, Int32(-1), Int32(-1)); + Add(equals_group, Int32(1), Int32(1)); Add(equals_group, BlobValue(0, 1, 1)); Add(equals_group, BlobValue(0, 1)); Add(equals_group, "string", "string"); @@ -248,10 +300,21 @@ TEST_F(ValueUtilTest, Equality) { Add(equals_group, Array("foo")); Add(equals_group, Map("__type__", "__vector__", "value", Array()), DeepClone(MinVector())); + Add(equals_group, Regex("foo", "bar"), Regex("foo", "bar")); + Add(equals_group, BsonObjectId("bar")); + Add(equals_group, BsonObjectId("foo"), BsonObjectId("foo")); + Add(equals_group, BsonTimestamp(1, 3)); + Add(equals_group, BsonTimestamp(1, 2), BsonTimestamp(1, 2)); + Add(equals_group, BsonTimestamp(2, 3)); + Add(equals_group, BsonBinaryData(1, {7, 8, 9})); + Add(equals_group, BsonBinaryData(128, {7, 8, 9}), + BsonBinaryData(128, {7, 8, 9})); + Add(equals_group, BsonBinaryData(128, {7, 8, 10})); Add(equals_group, Map("bar", 1, "foo", 2), Map("bar", 1, "foo", 2)); Add(equals_group, Map("bar", 2, "foo", 1)); Add(equals_group, Map("bar", 1)); Add(equals_group, Map("foo", 1)); + Add(equals_group, MaxKey(), MaxKey()); for (size_t i = 0; i < equals_group.size(); ++i) { for (size_t j = i; j < equals_group.size(); ++j) { @@ -271,6 +334,9 @@ TEST_F(ValueUtilTest, StrictOrdering) { // null first Add(comparison_groups, nullptr); + // MinKey + Add(comparison_groups, MinKey()); + // booleans Add(comparison_groups, false); Add(comparison_groups, true); @@ -281,10 +347,12 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, std::numeric_limits::min()); Add(comparison_groups, -0.1); // Zeros all compare the same. - Add(comparison_groups, -0.0, 0.0, 0L); + Add(comparison_groups, -0.0, 0.0, 0L, Int32(0)); Add(comparison_groups, 0.1); - // Doubles and longs Compare() the same. - Add(comparison_groups, 1.0, 1L); + // Doubles, longs, and Int32 Compare() the same. + Add(comparison_groups, 1.0, 1L, Int32(1)); + Add(comparison_groups, Int32(2)); + Add(comparison_groups, Int32(2147483647)); Add(comparison_groups, std::numeric_limits::max()); Add(comparison_groups, 1e20); @@ -293,6 +361,12 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, kTimestamp1); Add(comparison_groups, kTimestamp2); + // BSON Timestamp + Add(comparison_groups, DeepClone(MinBsonTimestamp())); + Add(comparison_groups, BsonTimestamp(123, 4), BsonTimestamp(123, 4)); + Add(comparison_groups, BsonTimestamp(123, 5)); + Add(comparison_groups, BsonTimestamp(124, 0)); + // server timestamps come after all concrete timestamps. // NOTE: server timestamps can't be parsed with . Add(comparison_groups, EncodeServerTimestamp(kTimestamp1, absl::nullopt)); @@ -318,6 +392,13 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, BlobValue(0, 1, 2, 4, 3)); Add(comparison_groups, BlobValue(255)); + // BSON Binary Data + Add(comparison_groups, DeepClone(MinBsonBinaryData())); + Add(comparison_groups, BsonBinaryData(5, {1, 2, 3}), + BsonBinaryData(5, {1, 2, 3})); + Add(comparison_groups, BsonBinaryData(7, {1})); + Add(comparison_groups, BsonBinaryData(7, {2})); + // resource names Add(comparison_groups, DeepClone(MinReference())); Add(comparison_groups, RefValue(DbId("p1/d1"), Key("c1/doc1"))); @@ -327,6 +408,14 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, RefValue(DbId("p1/d2"), Key("c1/doc1"))); Add(comparison_groups, RefValue(DbId("p2/d1"), Key("c1/doc1"))); + // BSON ObjectId + Add(comparison_groups, DeepClone(MinBsonObjectId())); + Add(comparison_groups, BsonObjectId("foo"), BsonObjectId("foo")); + // TODO(types/ehsann): uncomment after string sort bug is fixed + // Add(comparison_groups, BsonObjectId("Ḟoo")); + // Add(comparison_groups, BsonObjectId("foo\u0301")); + Add(comparison_groups, BsonObjectId("xyz")); + // geo points Add(comparison_groups, GeoPoint(-90, -180)); Add(comparison_groups, GeoPoint(-90, 0)); @@ -341,8 +430,15 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, GeoPoint(90, 0)); Add(comparison_groups, GeoPoint(90, 180)); - // arrays - Add(comparison_groups, DeepClone(MinArray())); + // regular expressions + Add(comparison_groups, DeepClone(MinRegex())); + Add(comparison_groups, Regex("a", "bar1")), + Add(comparison_groups, Regex("foo", "bar1")), + Add(comparison_groups, Regex("foo", "bar2")), + Add(comparison_groups, Regex("go", "bar1")), + + // arrays + Add(comparison_groups, DeepClone(MinArray())); Add(comparison_groups, Array("bar")); Add(comparison_groups, Array("foo", 1)); Add(comparison_groups, Array("foo", 2)); @@ -363,6 +459,10 @@ TEST_F(ValueUtilTest, StrictOrdering) { Add(comparison_groups, Map("foo", 1)); Add(comparison_groups, Map("foo", 2)); Add(comparison_groups, Map("foo", "0")); + + // MaxKey + Add(comparison_groups, MaxKey()); + Add(comparison_groups, DeepClone(MaxValue())); for (size_t i = 0; i < comparison_groups.size(); ++i) { @@ -386,6 +486,9 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { // null first Add(comparison_groups, DeepClone(NullValue())); Add(comparison_groups, nullptr); + + // MinKey + Add(comparison_groups, MinKey()); Add(comparison_groups, DeepClone(MinBoolean())); // booleans @@ -401,10 +504,12 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, std::numeric_limits::min()); Add(comparison_groups, -0.1); // Zeros all compare the same. - Add(comparison_groups, -0.0, 0.0, 0L); + Add(comparison_groups, -0.0, 0.0, 0L, Int32(0)); Add(comparison_groups, 0.1); // Doubles and longs Compare() the same. - Add(comparison_groups, 1.0, 1L); + Add(comparison_groups, 1.0, 1L, Int32(1)); + Add(comparison_groups, Int32(2)); + Add(comparison_groups, Int32(2147483647)); Add(comparison_groups, std::numeric_limits::max()); Add(comparison_groups, 1e20); Add(comparison_groups, DeepClone(MinTimestamp())); @@ -415,6 +520,12 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, kTimestamp1); Add(comparison_groups, kTimestamp2); + // BSON Timestamp + Add(comparison_groups, DeepClone(MinBsonTimestamp())); + Add(comparison_groups, BsonTimestamp(123, 4), BsonTimestamp(123, 4)); + Add(comparison_groups, BsonTimestamp(123, 5)); + Add(comparison_groups, BsonTimestamp(124, 0)); + // server timestamps come after all concrete timestamps. // NOTE: server timestamps can't be parsed with . Add(comparison_groups, EncodeServerTimestamp(kTimestamp1, absl::nullopt)); @@ -443,7 +554,13 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, BlobValue(0, 1, 2, 3, 4)); Add(comparison_groups, BlobValue(0, 1, 2, 4, 3)); Add(comparison_groups, BlobValue(255)); - Add(comparison_groups, DeepClone(MinReference())); + + // BSON Binary Data + Add(comparison_groups, DeepClone(MinBsonBinaryData())); + Add(comparison_groups, BsonBinaryData(5, {1, 2, 3}), + BsonBinaryData(5, {1, 2, 3})); + Add(comparison_groups, BsonBinaryData(7, {1})); + Add(comparison_groups, BsonBinaryData(7, {2})); // resource names Add(comparison_groups, DeepClone(MinReference())); @@ -453,7 +570,14 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, RefValue(DbId("p1/d1"), Key("c2/doc1"))); Add(comparison_groups, RefValue(DbId("p1/d2"), Key("c1/doc1"))); Add(comparison_groups, RefValue(DbId("p2/d1"), Key("c1/doc1"))); - Add(comparison_groups, DeepClone(MinGeoPoint())); + + // BSON ObjectId + Add(comparison_groups, DeepClone(MinBsonObjectId())); + Add(comparison_groups, BsonObjectId("foo"), BsonObjectId("foo")); + // TODO(types/ehsann): uncomment after string sort bug is fixed + // Add(comparison_groups, BsonObjectId("Ḟoo")); + // Add(comparison_groups, BsonObjectId("foo\u0301")); + Add(comparison_groups, BsonObjectId("xyz")); // geo points Add(comparison_groups, DeepClone(MinGeoPoint())); @@ -469,10 +593,16 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, GeoPoint(90, -180)); Add(comparison_groups, GeoPoint(90, 0)); Add(comparison_groups, GeoPoint(90, 180)); - Add(comparison_groups, DeepClone(MinArray())); - // arrays - Add(comparison_groups, DeepClone(MinArray())); + // regular expressions + Add(comparison_groups, DeepClone(MinRegex())); + Add(comparison_groups, Regex("a", "bar1")), + Add(comparison_groups, Regex("foo", "bar1")), + Add(comparison_groups, Regex("foo", "bar2")), + Add(comparison_groups, Regex("go", "bar1")), + + // arrays + Add(comparison_groups, DeepClone(MinArray())); Add(comparison_groups, Array("bar")); Add(comparison_groups, Array("foo", 1)); Add(comparison_groups, Array("foo", 2)); @@ -481,11 +611,9 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { // vectors Add(comparison_groups, DeepClone(MinVector())); - Add(comparison_groups, Map("__type__", "__vector__", "value", Array(100))); - Add(comparison_groups, - Map("__type__", "__vector__", "value", Array(1.0, 2.0, 3.0))); - Add(comparison_groups, - Map("__type__", "__vector__", "value", Array(1.0, 3.0, 2.0))); + Add(comparison_groups, VectorType(100)); + Add(comparison_groups, VectorType(1.0, 2.0, 3.0)); + Add(comparison_groups, VectorType(1.0, 3.0, 2.0)); // objects Add(comparison_groups, DeepClone(MinMap())); @@ -494,6 +622,11 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, Map("foo", 1)); Add(comparison_groups, Map("foo", 2)); Add(comparison_groups, Map("foo", "0")); + + // MaxKey + Add(comparison_groups, MaxKey()); + + // MaxValue (internal) Add(comparison_groups, DeepClone(MaxValue())); for (size_t i = 0; i < comparison_groups.size(); ++i) { @@ -519,9 +652,17 @@ TEST_F(ValueUtilTest, CanonicalId) { VerifyCanonicalId(Map("a", 1, "b", 2, "c", "3"), "{a:1,b:2,c:3}"); VerifyCanonicalId(Map("a", Array("b", Map("c", GeoPoint(30, 60)))), "{a:[b,{c:geo(30.0,60.0)}]}"); - VerifyCanonicalId( - Map("__type__", "__vector__", "value", Array(1.0, 1.0, -2.0, 3.14)), - "{__type__:__vector__,value:[1.0,1.0,-2.0,3.1]}"); + VerifyCanonicalId(VectorType(1.0, 1.0, -2.0, 3.14), + "{__type__:__vector__,value:[1.0,1.0,-2.0,3.1]}"); + VerifyCanonicalId(MinKey(), "{__min__:null}"); + VerifyCanonicalId(MaxKey(), "{__max__:null}"); + VerifyCanonicalId(Regex("^foo", "x"), "{__regex__:{pattern:^foo,options:x}}"); + VerifyCanonicalId(Int32(123), "{__int__:123}"); + VerifyCanonicalId(BsonObjectId("foo"), "{__oid__:foo}"); + VerifyCanonicalId(BsonTimestamp(1, 2), + "{__request_timestamp__:{seconds:1,increment:2}}"); + // Binary representation: 128 = 0x80, 2 = 0x02, 3 = 0x03, 4 = 0x04 + VerifyCanonicalId(BsonBinaryData(128, {2, 3, 4}), "{__binary__:80020304}"); } TEST_F(ValueUtilTest, DeepClone) { diff --git a/Firestore/core/test/unit/remote/serializer_test.cc b/Firestore/core/test/unit/remote/serializer_test.cc index 14b08b1e13f..5bf758098b6 100644 --- a/Firestore/core/test/unit/remote/serializer_test.cc +++ b/Firestore/core/test/unit/remote/serializer_test.cc @@ -111,16 +111,23 @@ using nanopb::Writer; using remote::Serializer; using testutil::AndFilters; using testutil::Array; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::Bytes; using testutil::DeletedDoc; using testutil::Doc; using testutil::Filter; +using testutil::Int32; using testutil::Key; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; using testutil::OrderBy; using testutil::OrFilters; using testutil::Query; using testutil::Ref; +using testutil::Regex; using testutil::Value; using testutil::Version; using util::Status; @@ -821,6 +828,98 @@ TEST_F(SerializerTest, EncodesNestedObjects) { ExpectRoundTrip(model, proto, TypeOrder::kMap); } +TEST_F(SerializerTest, EncodesMinKey) { + Message model = MinKey(); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__min__"] = ValueProto(nullptr); + + ExpectRoundTrip(model, proto, TypeOrder::kMinKey); +} + +TEST_F(SerializerTest, EncodesMaxKey) { + Message model = MaxKey(); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__max__"] = ValueProto(nullptr); + + ExpectRoundTrip(model, proto, TypeOrder::kMaxKey); +} + +TEST_F(SerializerTest, EncodesRegexValue) { + Message model = Regex("^foo", "i"); + + v1::Value inner_map_proto; + google::protobuf::Map* inner_fields = + inner_map_proto.mutable_map_value()->mutable_fields(); + (*inner_fields)["pattern"] = ValueProto("^foo"); + (*inner_fields)["options"] = ValueProto("i"); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__regex__"] = inner_map_proto; + + ExpectRoundTrip(model, proto, TypeOrder::kRegex); +} + +TEST_F(SerializerTest, EncodesInt32Value) { + Message model = Int32(78); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__int__"] = ValueProto(78); + + ExpectRoundTrip(model, proto, TypeOrder::kNumber); +} + +TEST_F(SerializerTest, EncodesBsonObjectId) { + Message model = BsonObjectId("foo"); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__oid__"] = ValueProto("foo"); + + ExpectRoundTrip(model, proto, TypeOrder::kBsonObjectId); +} + +TEST_F(SerializerTest, EncodesBsonTimestamp) { + Message model = BsonTimestamp(123u, 456u); + + v1::Value inner_map_proto; + google::protobuf::Map* inner_fields = + inner_map_proto.mutable_map_value()->mutable_fields(); + (*inner_fields)["seconds"] = ValueProto(123); + (*inner_fields)["increment"] = ValueProto(456); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__request_timestamp__"] = inner_map_proto; + + ExpectRoundTrip(model, proto, TypeOrder::kBsonTimestamp); +} + +TEST_F(SerializerTest, EncodesBsonBinaryData) { + Message model = + BsonBinaryData(128u, {0x1, 0x2, 0x3}); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + std::vector concat{128, 1, 2, 3}; + (*fields)["__binary__"] = + ValueProto(ByteString(concat.data(), concat.size())); + + ExpectRoundTrip(model, proto, TypeOrder::kBsonBinaryData); +} + TEST_F(SerializerTest, EncodesVectorValue) { Message model = Map("__type__", "__vector__", "value", Array(1.0, 2.0, 3.0)); diff --git a/Firestore/core/test/unit/testutil/testutil.cc b/Firestore/core/test/unit/testutil/testutil.cc index 0e851af695d..7709abd33a3 100644 --- a/Firestore/core/test/unit/testutil/testutil.cc +++ b/Firestore/core/test/unit/testutil/testutil.cc @@ -106,6 +106,13 @@ Message BlobValue( return result; } +Message BlobValue(std::vector octets) { + Message result; + result->which_value_type = google_firestore_v1_Value_bytes_value_tag; + result->bytes_value = nanopb::MakeBytesArray(octets.data(), octets.size()); + return result; +} + } // namespace details ByteString Bytes(std::initializer_list octets) { @@ -185,6 +192,40 @@ Message Value(const model::ObjectValue& value) { return DeepClone(value.Get()); } +Message MinKey() { + return Map("__min__", nullptr); +} + +Message MaxKey() { + return Map("__max__", nullptr); +} + +Message Regex(std::string pattern, + std::string options) { + return Map("__regex__", Map("pattern", pattern, "options", options)); +} + +nanopb::Message Int32(int32_t value) { + return Map("__int__", Value(value)); +} + +nanopb::Message BsonObjectId(std::string oid) { + return Map("__oid__", Value(oid)); +} + +nanopb::Message BsonTimestamp(uint32_t seconds, + uint32_t increment) { + return Map("__request_timestamp__", + Map("seconds", Value(seconds), "increment", Value(increment))); +} + +nanopb::Message BsonBinaryData( + uint8_t subtype, std::initializer_list data) { + std::vector bytes{subtype}; + bytes.insert(bytes.end(), data.begin(), data.end()); + return Map("__binary__", details::BlobValue(bytes)); +} + ObjectValue WrapObject(Message value) { return ObjectValue{std::move(value)}; } diff --git a/Firestore/core/test/unit/testutil/testutil.h b/Firestore/core/test/unit/testutil/testutil.h index 234ef3d5d12..dd3924434dc 100644 --- a/Firestore/core/test/unit/testutil/testutil.h +++ b/Firestore/core/test/unit/testutil/testutil.h @@ -294,6 +294,17 @@ nanopb::Message VectorType(Args&&... values) { details::MakeArray(std::move(values)...)); } +nanopb::Message MinKey(); +nanopb::Message MaxKey(); +nanopb::Message Regex(std::string pattern, + std::string options); +nanopb::Message Int32(int32_t value); +nanopb::Message BsonObjectId(std::string oid); +nanopb::Message BsonTimestamp(uint32_t seconds, + uint32_t increment); +nanopb::Message BsonBinaryData( + uint8_t subtype, std::initializer_list data); + model::DocumentKey Key(absl::string_view path); model::FieldPath Field(absl::string_view field); From 55c698bfd7467d403c16a3e8c255ccee178fce0a Mon Sep 17 00:00:00 2001 From: Ehsan Date: Thu, 1 May 2025 15:10:16 -0700 Subject: [PATCH 02/16] Implement indexing for bson types. (#798) * Add RegexValue class. * Add to/from proto logic. * Add comparison logic and unit tests. * Add serializer unit test. * Add Codable support, Improve Swift API, Add integration test. * Add more unit tests and integration tests. Also fixed a bug found by unit tests. * Add 6 other types' public API. * rename `.m` to `.mm` to stay consistent. * More unit tests. * WIP: Add the FieldValue methods for building new types. * Add the Swift API and the Obj-C Unit tests. * Add UserDataReader support for new types. * Add missing `extern`. * Add UserDataWriter support for types. Int32 is still missing. * UserDataWriter support for int32. * Update TypeOrder usages with new types. * Add comparison logic for more types. * Add Int32Value comparison logic. * Add SerializerTests for more types. * Use snake case. * Add more unit tests. * Fix bug and add integration test. * Add more integration tests. * Add more integration tests. * Add more integration and unit tests. * Expose public `isEqual` for new types and add tests for it. * Add cross-type order test. * Add Codable support along with integration tests for them. * Remove named parameter for Int32Value and BsonObjectId. * clang-format. * Use `uint8_t` for BsonBinaryData's subtype. * Add `description` for new types. * Reuse type check logic for new types. * Use uint8_t for BsonBinaryData. * Adds tests for FIRFieldValue static c'tors of new types. * Add a few missing tests. * Update tests to check listeners, src=cache, src=server. * Implement indexing for bson types. * Add test for offline write and read from cache. * WIP: Adding more unit tests and integration tests. * Add IndexAllTypesTogether test. * Properly handle empty segments in ref. * Add ComputesLowerBound test. * Add ComputesUpperBound test. * Add IndexValueWriterTest. * Add bson types to index_value_writer_test.cc. * Add CSI tests for bson types. * Check ordered results for queries served from the index. * Add TODO for failing tests. * Address comments. * Improve doc ordering in the new leveldb_local_store_test tests. --- .../Firestore.xcodeproj/project.pbxproj | 22 + .../Example/Tests/API/FIRFieldValueTests.mm | 32 + Firestore/Source/API/FIRFieldValue.mm | 30 + Firestore/Source/API/FSTUserDataWriter.mm | 4 +- .../Public/FirebaseFirestore/FIRFieldValue.h | 61 ++ .../Source/SwiftAPI/FieldValue+Swift.swift | 50 ++ .../BsonTypesIntegrationTests.swift | 280 ++++++- Firestore/core/CMakeLists.txt | 1 + Firestore/core/src/core/target.cc | 12 +- Firestore/core/src/core/target.h | 4 +- .../src/index/firestore_index_value_writer.cc | 139 +++- .../src/index/firestore_index_value_writer.h | 25 + Firestore/core/src/model/value_util.cc | 112 ++- Firestore/core/src/model/value_util.h | 31 +- Firestore/core/test/unit/core/target_test.cc | 8 +- Firestore/core/test/unit/index/CMakeLists.txt | 27 + .../unit/index/index_value_writer_test.cc | 363 +++++++++ .../unit/local/leveldb_index_manager_test.cc | 530 ++++++++++++++ .../unit/local/leveldb_local_store_test.cc | 693 +++++++++++++++++- .../core/test/unit/local/local_store_test.cc | 36 +- .../core/test/unit/local/local_store_test.h | 30 +- .../core/test/unit/model/value_util_test.cc | 191 ++++- 22 files changed, 2563 insertions(+), 118 deletions(-) create mode 100644 Firestore/core/test/unit/index/CMakeLists.txt create mode 100644 Firestore/core/test/unit/index/index_value_writer_test.cc diff --git a/Firestore/Example/Firestore.xcodeproj/project.pbxproj b/Firestore/Example/Firestore.xcodeproj/project.pbxproj index 4110ab951a1..711cce62166 100644 --- a/Firestore/Example/Firestore.xcodeproj/project.pbxproj +++ b/Firestore/Example/Firestore.xcodeproj/project.pbxproj @@ -276,6 +276,7 @@ 26C4E52128C8E7B5B96BECC4 /* defer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 8ABAC2E0402213D837F73DC3 /* defer_test.cc */; }; 26C577D159CFFD73E24D543C /* memory_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 74FBEFA4FE4B12C435011763 /* memory_mutation_queue_test.cc */; }; 26CB3D7C871BC56456C6021E /* timestamp_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = ABF6506B201131F8005F2C74 /* timestamp_test.cc */; }; + 26E36A01DA79CC72F88E3A21 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 276A563D546698B6AAC20164 /* annotations.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 618BBE9520B89AAC00B5BCE7 /* annotations.pb.cc */; }; 27AF4C4BAFE079892D4F5341 /* Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4B3E4A77493524333133C5DC /* Validation_BloomFilterTest_MD5_50000_1_bloom_filter_proto.json */; }; 27E46C94AAB087C80A97FF7F /* FIRServerTimestampTests.mm in Sources */ = {isa = PBXBuildFile; fileRef = 5492E06E202154D600B64F25 /* FIRServerTimestampTests.mm */; }; @@ -299,6 +300,7 @@ 2AD8EE91928AE68DF268BEDA /* limbo_spec_test.json in Resources */ = {isa = PBXBuildFile; fileRef = 54DA129E1F315EE100DD57A1 /* limbo_spec_test.json */; }; 2AD98CD29CC6F820A74CDD5E /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json in Resources */ = {isa = PBXBuildFile; fileRef = 4B59C0A7B2A4548496ED4E7D /* Validation_BloomFilterTest_MD5_1_0001_bloom_filter_proto.json */; }; 2AE3914BBC4EDF91BD852939 /* memory_query_engine_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 8EF6A33BC2D84233C355F1D0 /* memory_query_engine_test.cc */; }; + 2B3C73B6702180419FC5460A /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 2B4021C3E663DDDDD512E961 /* objc_type_traits_apple_test.mm in Sources */ = {isa = PBXBuildFile; fileRef = 2A0CF41BA5AED6049B0BEB2C /* objc_type_traits_apple_test.mm */; }; 2B4234B962625F9EE68B31AC /* index_manager_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = AE4A9E38D65688EE000EE2A1 /* index_manager_test.cc */; }; 2B4D0509577E5CE0B0B8CEDF /* message_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = CE37875365497FFA8687B745 /* message_test.cc */; }; @@ -501,6 +503,7 @@ 4DAF501EE4B4DB79ED4239B0 /* secure_random_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54740A531FC913E500713A1A /* secure_random_test.cc */; }; 4DAFC3A3FD5E96910A517320 /* fake_target_metadata_provider.cc in Sources */ = {isa = PBXBuildFile; fileRef = 71140E5D09C6E76F7C71B2FC /* fake_target_metadata_provider.cc */; }; 4DC660A62BC2B6369DA5C563 /* status_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54A0352C20A3B3D7003E0143 /* status_test.cc */; }; + 4DE1DCA66D728E812A72F624 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 4DF18D15AC926FB7A4888313 /* lru_garbage_collector_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 277EAACC4DD7C21332E8496A /* lru_garbage_collector_test.cc */; }; 4E0777435A9A26B8B2C08A1E /* remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 7EB299CF85034F09CFD6F3FD /* remote_document_cache_test.cc */; }; 4E2E0314F9FDD7BCED60254A /* counting_query_engine.cc in Sources */ = {isa = PBXBuildFile; fileRef = 99434327614FEFF7F7DC88EC /* counting_query_engine.cc */; }; @@ -1056,6 +1059,7 @@ 977E0DA564D6EAF975A4A1A0 /* settings_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = DD12BC1DB2480886D2FB0005 /* settings_test.cc */; }; 9783FAEA4CF758E8C4C2D76E /* hashing_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 54511E8D209805F8005BD28F /* hashing_test.cc */; }; 978D9EFDC56CC2E1FA468712 /* leveldb_snappy_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = D9D94300B9C02F7069523C00 /* leveldb_snappy_test.cc */; }; + 984135015B443110FF60F86F /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; 9860F493EBF43AF5AC0A88BD /* empty_credentials_provider_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 8FA60B08D59FEA0D6751E87F /* empty_credentials_provider_test.cc */; }; 98708140787A9465D883EEC9 /* leveldb_mutation_queue_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5C7942B6244F4C416B11B86C /* leveldb_mutation_queue_test.cc */; }; 98FE82875A899A40A98AAC22 /* leveldb_opener_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 75860CD13AF47EB1EA39EC2F /* leveldb_opener_test.cc */; }; @@ -1105,6 +1109,7 @@ A25FF76DEF542E01A2DF3B0E /* time_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5497CB76229DECDE000FB92F /* time_testing.cc */; }; A27096F764227BC73526FED3 /* leveldb_remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 0840319686A223CC4AD3FAB1 /* leveldb_remote_document_cache_test.cc */; }; A27908A198E1D2230C1801AC /* bundle_serializer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B5C2A94EE24E60543F62CC35 /* bundle_serializer_test.cc */; }; + A2905C9606C844D7C44B21D7 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; A2E9978E02F7BCB016555F09 /* Validation_BloomFilterTest_MD5_1_1_membership_test_result.json in Resources */ = {isa = PBXBuildFile; fileRef = 3369AC938F82A70685C5ED58 /* Validation_BloomFilterTest_MD5_1_1_membership_test_result.json */; }; A3262936317851958C8EABAF /* byte_stream_cpp_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 01D10113ECC5B446DB35E96D /* byte_stream_cpp_test.cc */; }; A4757C171D2407F61332EA38 /* byte_stream_cpp_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 01D10113ECC5B446DB35E96D /* byte_stream_cpp_test.cc */; }; @@ -1377,6 +1382,7 @@ CE2962775B42BDEEE8108567 /* leveldb_lru_garbage_collector_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = B629525F7A1AAC1AB765C74F /* leveldb_lru_garbage_collector_test.cc */; }; CE411D4B70353823DE63C0D5 /* bundle_loader_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = A853C81A6A5A51C9D0389EDA /* bundle_loader_test.cc */; }; CEA91CE103B42533C54DBAD6 /* memory_remote_document_cache_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 1CA9800A53669EFBFFB824E3 /* memory_remote_document_cache_test.cc */; }; + CEE39EC40FC07EBB02C2E341 /* index_value_writer_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */; }; CF18D52A88F4F6F62C5495EF /* thread_safe_memoizer_testing.cc in Sources */ = {isa = PBXBuildFile; fileRef = 6E42FA109D363EA7F3387AAE /* thread_safe_memoizer_testing.cc */; }; CF1FB026CCB901F92B4B2C73 /* watch_change_test.cc in Sources */ = {isa = PBXBuildFile; fileRef = 2D7472BC70C024D736FF74D9 /* watch_change_test.cc */; }; CF5DE1ED21DD0A9783383A35 /* CodableIntegrationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 124C932B22C1642C00CA8C2D /* CodableIntegrationTests.swift */; }; @@ -1749,6 +1755,7 @@ 214877F52A705012D6720CA0 /* object_value_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = object_value_test.cc; sourceTree = ""; }; 2220F583583EFC28DE792ABE /* Pods_Firestore_IntegrationTests_tvOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Firestore_IntegrationTests_tvOS.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 2286F308EFB0534B1BDE05B9 /* memory_target_cache_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = memory_target_cache_test.cc; sourceTree = ""; }; + 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; name = index_value_writer_test.cc; path = index/index_value_writer_test.cc; sourceTree = ""; }; 26DDBA115DEB88631B93F203 /* thread_safe_memoizer_testing.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = thread_safe_memoizer_testing.h; sourceTree = ""; }; 277EAACC4DD7C21332E8496A /* lru_garbage_collector_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = lru_garbage_collector_test.cc; sourceTree = ""; }; 28B45B2104E2DAFBBF86DBB7 /* logic_utils_test.cc */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.cpp.cpp; path = logic_utils_test.cc; sourceTree = ""; }; @@ -2461,6 +2468,7 @@ AB380CF7201937B800D97691 /* core */, 11BB7A1B7F6F482EFDBC5303 /* credentials */, 54EB764B202277970088B8F3 /* immutable */, + F952916DDF5B3977111173CC /* index */, 54995F70205B6E1A004EFFA0 /* local */, AB356EF5200E9D1A0089B766 /* model */, 5C332D7293E6114E491D3662 /* nanopb */, @@ -3107,6 +3115,14 @@ name = bundle; sourceTree = ""; }; + F952916DDF5B3977111173CC /* index */ = { + isa = PBXGroup; + children = ( + 22B1FBDD47024F03B8CAA054 /* index_value_writer_test.cc */, + ); + name = index; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -4273,6 +4289,7 @@ 48BC5801432127A90CFF55E3 /* index.pb.cc in Sources */, 167659CDCA47B450F2441454 /* index_backfiller_test.cc in Sources */, FAD97B82766AEC29B7B5A1B7 /* index_manager_test.cc in Sources */, + 26E36A01DA79CC72F88E3A21 /* index_value_writer_test.cc in Sources */, E084921EFB7CF8CB1E950D6C /* iterator_adaptors_test.cc in Sources */, 49C04B97AB282FFA82FD98CD /* latlng.pb.cc in Sources */, 292BCC76AF1B916752764A8F /* leveldb_bundle_cache_test.cc in Sources */, @@ -4497,6 +4514,7 @@ 190F9885BAA81587F08CD26C /* index.pb.cc in Sources */, B845B9EDED330D0FDAD891BC /* index_backfiller_test.cc in Sources */, F58A23FEF328EB74F681FE83 /* index_manager_test.cc in Sources */, + CEE39EC40FC07EBB02C2E341 /* index_value_writer_test.cc in Sources */, 0E4C94369FFF7EC0C9229752 /* iterator_adaptors_test.cc in Sources */, 0FBDD5991E8F6CD5F8542474 /* latlng.pb.cc in Sources */, 513D34C9964E8C60C5C2EE1C /* leveldb_bundle_cache_test.cc in Sources */, @@ -4747,6 +4765,7 @@ 096BA3A3703AC1491F281618 /* index.pb.cc in Sources */, 9236478E01DF2EC7DF58B1FC /* index_backfiller_test.cc in Sources */, 4BFEEB7FDD7CD5A693B5B5C1 /* index_manager_test.cc in Sources */, + 4DE1DCA66D728E812A72F624 /* index_value_writer_test.cc in Sources */, FA334ADC73CFDB703A7C17CD /* iterator_adaptors_test.cc in Sources */, CBC891BEEC525F4D8F40A319 /* latlng.pb.cc in Sources */, 2E76BC76BBCE5FCDDCF5EEBE /* leveldb_bundle_cache_test.cc in Sources */, @@ -4997,6 +5016,7 @@ 6E8CD8F545C8EDA84918977C /* index.pb.cc in Sources */, E25DCFEF318E003B8B7B9DC8 /* index_backfiller_test.cc in Sources */, 650B31A5EC6F8D2AEA79C350 /* index_manager_test.cc in Sources */, + A2905C9606C844D7C44B21D7 /* index_value_writer_test.cc in Sources */, 86494278BE08F10A8AAF9603 /* iterator_adaptors_test.cc in Sources */, 4173B61CB74EB4CD1D89EE68 /* latlng.pb.cc in Sources */, 1E8F5F37052AB0C087D69DF9 /* leveldb_bundle_cache_test.cc in Sources */, @@ -5231,6 +5251,7 @@ 77D38E78F7CCB8504450A8FB /* index.pb.cc in Sources */, 76FEBDD2793B729BAD2E84C7 /* index_backfiller_test.cc in Sources */, E6357221227031DD77EE5265 /* index_manager_test.cc in Sources */, + 2B3C73B6702180419FC5460A /* index_value_writer_test.cc in Sources */, 54A0353520A3D8CB003E0143 /* iterator_adaptors_test.cc in Sources */, 618BBEAE20B89AAC00B5BCE7 /* latlng.pb.cc in Sources */, 0EDFC8A6593477E1D17CDD8F /* leveldb_bundle_cache_test.cc in Sources */, @@ -5500,6 +5521,7 @@ 78E8DDDBE131F3DA9AF9F8B8 /* index.pb.cc in Sources */, CCE596E8654A4D2EEA75C219 /* index_backfiller_test.cc in Sources */, 2B4234B962625F9EE68B31AC /* index_manager_test.cc in Sources */, + 984135015B443110FF60F86F /* index_value_writer_test.cc in Sources */, 8A79DDB4379A063C30A76329 /* iterator_adaptors_test.cc in Sources */, 23C04A637090E438461E4E70 /* latlng.pb.cc in Sources */, 77C459976DCF7503AEE18F7F /* leveldb_bundle_cache_test.cc in Sources */, diff --git a/Firestore/Example/Tests/API/FIRFieldValueTests.mm b/Firestore/Example/Tests/API/FIRFieldValueTests.mm index af8ba7b5ed7..0849538a94e 100644 --- a/Firestore/Example/Tests/API/FIRFieldValueTests.mm +++ b/Firestore/Example/Tests/API/FIRFieldValueTests.mm @@ -48,6 +48,38 @@ - (void)testEquals { XCTAssertNotEqual([deleted hash], [serverTimestamp hash]); } +- (void)testCanCreateRegexValue { + FIRRegexValue *regex = [FIRFieldValue regexWithPattern:@"^foo" options:@"x"]; + XCTAssertEqual(regex.pattern, @"^foo"); + XCTAssertEqual(regex.options, @"x"); +} + +- (void)testCanCreateInt32Value { + FIRInt32Value *int1 = [FIRFieldValue int32WithValue:1234]; + XCTAssertEqual(int1.value, 1234); + + FIRInt32Value *int2 = [FIRFieldValue int32WithValue:-1234]; + XCTAssertEqual(int2.value, -1234); +} + +- (void)testCanCreateBsonObjectId { + FIRBsonObjectId *objectId = [FIRFieldValue bsonObjectIdWithValue:@"foo"]; + XCTAssertEqual(objectId.value, @"foo"); +} + +- (void)testCanCreateBsonTimestamp { + FIRBsonTimestamp *timestamp = [FIRFieldValue bsonTimestampWithSeconds:123 increment:456]; + XCTAssertEqual(timestamp.seconds, 123U); + XCTAssertEqual(timestamp.increment, 456U); +} + +- (void)testCanCreateBsonBinaryData { + FIRBsonBinaryData *binData = [FIRFieldValue bsonBinaryDataWithSubtype:128 + data:FSTTestData(1, 2, 3, -1)]; + XCTAssertEqual(binData.subtype, 128); + XCTAssertTrue([binData.data isEqualToData:FSTTestData(1, 2, 3, -1)]); +} + @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRFieldValue.mm b/Firestore/Source/API/FIRFieldValue.mm index e86964f5f3d..5dfe298e859 100644 --- a/Firestore/Source/API/FIRFieldValue.mm +++ b/Firestore/Source/API/FIRFieldValue.mm @@ -188,6 +188,36 @@ + (nonnull FIRVectorValue *)vectorWithArray:(nonnull NSArray *)array return [[FIRVectorValue alloc] initWithArray:array]; } ++ (nonnull FIRMinKey *)minKey { + return [FIRMinKey instance]; +} + ++ (nonnull FIRMaxKey *)maxKey { + return [FIRMaxKey instance]; +} + ++ (nonnull FIRRegexValue *)regexWithPattern:(nonnull NSString *)pattern + options:(nonnull NSString *)options { + return [[FIRRegexValue alloc] initWithPattern:pattern options:options]; +} + ++ (nonnull FIRInt32Value *)int32WithValue:(int)value { + return [[FIRInt32Value alloc] initWithValue:value]; +} + ++ (nonnull FIRBsonObjectId *)bsonObjectIdWithValue:(NSString *)value { + return [[FIRBsonObjectId alloc] initWithValue:value]; +} + ++ (nonnull FIRBsonTimestamp *)bsonTimestampWithSeconds:(uint32_t)seconds + increment:(uint32_t)increment { + return [[FIRBsonTimestamp alloc] initWithSeconds:seconds increment:increment]; +} + ++ (nonnull FIRBsonBinaryData *)bsonBinaryDataWithSubtype:(uint8_t)subtype data:(NSData *)data { + return [[FIRBsonBinaryData alloc] initWithSubtype:subtype data:data]; +} + @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FSTUserDataWriter.mm b/Firestore/Source/API/FSTUserDataWriter.mm index c772819e440..2b0c4d31cab 100644 --- a/Firestore/Source/API/FSTUserDataWriter.mm +++ b/Firestore/Source/API/FSTUserDataWriter.mm @@ -135,8 +135,8 @@ - (id)convertedValue:(const google_firestore_v1_Value &)value { return [self convertedBsonBinaryData:value.map_value]; case TypeOrder::kVector: return [self convertedVector:value.map_value]; - case TypeOrder::kMaxValue: - // It is not possible for users to construct a kMaxValue manually. + case TypeOrder::kInternalMaxValue: + // It is not possible for users to construct a kInternalMaxValue manually. break; } diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h index 2e35547bace..e81158fb506 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h @@ -107,6 +107,67 @@ NS_SWIFT_NAME(FieldValue) */ + (FIRVectorValue *)vectorWithArray:(NSArray *)array NS_REFINED_FOR_SWIFT; +/** + * Returns a `MinKey` value instance. + * + * @return A `MinKey` value instance. + */ ++ (nonnull FIRMinKey *)minKey NS_REFINED_FOR_SWIFT; + +/** + * Returns a `MaxKey` value instance. + * + * @return A `MaxKey` value instance. + */ ++ (nonnull FIRMaxKey *)maxKey NS_REFINED_FOR_SWIFT; + +/** + * Creates a new `RegexValue` constructed with the given pattern and options. + * + * @param pattern The pattern to use for the regular expression. + * @param options The options to use for the regular expression. + * @return A new `RegexValue` constructed with the given pattern and options. + */ ++ (nonnull FIRRegexValue *)regexWithPattern:(nonnull NSString *)pattern + options:(nonnull NSString *)options NS_REFINED_FOR_SWIFT; + +/** + * Creates a new `Int32Value` with the given signed 32-bit integer value. + * + * @param value The 32-bit number to be used for constructing the Int32Value. + * @return A new `Int32Value` instance. + */ ++ (nonnull FIRInt32Value *)int32WithValue:(int)value NS_REFINED_FOR_SWIFT; + +/** + * Creates a new `BsonObjectId` with the given value. + * + * @param value The 24-character hex string representation of the ObjectId. + * @return A new `BsonObjectId` instance constructed with the given value. + */ ++ (nonnull FIRBsonObjectId *)bsonObjectIdWithValue:(nonnull NSString *)value NS_REFINED_FOR_SWIFT; + +/** + * Creates a new `BsonTimestamp` with the given values. + * + * @param seconds The underlying unsigned 32-bit integer for seconds. + * @param increment The underlying unsigned 32-bit integer for increment. + * @return A new `BsonTimestamp` instance constructed with the given values. + */ ++ (nonnull FIRBsonTimestamp *)bsonTimestampWithSeconds:(uint32_t)seconds + increment:(uint32_t)increment NS_REFINED_FOR_SWIFT; + +/** + * Creates a new `BsonBinaryData` object with the given subtype and data. + * + * @param subtype An 8-bit unsigned integer denoting the subtype of the data. + * @param data The binary data. + * @return A new `BsonBinaryData` instance constructed with the given values. + */ ++ (nonnull FIRBsonBinaryData *)bsonBinaryDataWithSubtype:(uint8_t)subtype + data:(nonnull NSData *)data + NS_REFINED_FOR_SWIFT; + @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift b/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift index ccab6238267..4f7e3700263 100644 --- a/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift +++ b/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift @@ -40,4 +40,54 @@ public extension FieldValue { } return FieldValue.__vector(with: nsNumbers) } + + /// Returns a `MinKey` instance. + /// - Returns: A `MinKey` instance. + static func minKey() -> MinKey { + return FieldValue.__minKey() + } + + /// Returns a `MaxKey` instance. + /// - Returns: A `MaxKey` instance. + static func maxKey() -> MaxKey { + return FieldValue.__maxKey() + } + + /// Creates a new `RegexValue` constructed with the given pattern and options. + /// - Parameter pattern: The pattern of the regular expression. + /// - Parameter options: The options of the regular expression. + /// - Returns: A new `RegexValue` constructed with the given pattern and options. + static func regex(pattern: String, options: String) -> RegexValue { + return FieldValue.__regex(withPattern: pattern, options: options) + } + + /// Creates a new `Int32Value` with the given signed 32-bit integer value. + /// - Parameter value: The 32-bit number to be used for constructing the Int32Value. + /// - Returns: A new `Int32Value` instance. + static func int32(_ value: Int32) -> Int32Value { + return FieldValue.__int32(withValue: value) + } + + /// Creates a new `BsonObjectId` with the given value. + /// - Parameter value: The 24-character hex string representation of the ObjectId. + /// - Returns: A new `BsonObjectId` instance constructed with the given value. + static func bsonObjectId(_ value: String) -> BsonObjectId { + return FieldValue.__bsonObjectId(withValue: value) + } + + /// Creates a new `BsonTimestamp` with the given values. + /// @param seconds The underlying unsigned 32-bit integer for seconds. + /// @param increment The underlying unsigned 32-bit integer for increment. + /// @return A new `BsonTimestamp` instance constructed with the given values. + static func bsonTimestamp(seconds: UInt32, increment: UInt32) -> BsonTimestamp { + return FieldValue.__bsonTimestamp(withSeconds: seconds, increment: increment) + } + + /// Creates a new `BsonBinaryData` object with the given subtype and data. + /// @param subtype The subtype of the data. + /// @param data The binary data. + /// @return A new `BsonBinaryData` instance constructed with the given values. + static func bsonBinaryData(subtype: UInt8, data: Data) -> BsonBinaryData { + return FieldValue.__bsonBinaryData(withSubtype: subtype, data: data) + } } diff --git a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift index 0585fd84607..19213756779 100644 --- a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift @@ -57,7 +57,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { for i in 0 ..< expectedDocIds.count { let expectedDocId = expectedDocIds[i] - let expectedDocData = allData[expectedDocId]! + let expectedDocData = allData[expectedDocId] ?? [:] let actualDocData = snapshot.documents[i].data() // We don't need to compare expectedDocId and actualDocId because @@ -148,6 +148,58 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) } + func testCanWriteAndReadBsonTypesOffline() throws { + let collection = collectionRef() + disableNetwork() + + let ref = collection.document("doc") + + // Adding docs to cache, do not wait for promise to resolve. + ref.setData([ + "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BsonObjectId("507f191e810c19729de860ea"), + "int32": Int32Value(1), + "min": MinKey.instance(), + "max": MaxKey.instance(), + "regex": RegexValue(pattern: "^foo", options: "i"), + ]) + ref.updateData([ + "binary": BsonBinaryData(subtype: 128, data: Data([1, 2, 3])), + "timestamp": BsonTimestamp(seconds: 1, increment: 2), + "int32": Int32Value(2), + ]) + + let snapshot = readDocument(forRef: ref, source: FirestoreSource.cache) + XCTAssertEqual( + snapshot.get("objectId") as? BsonObjectId, + BsonObjectId("507f191e810c19729de860ea") + ) + XCTAssertEqual( + snapshot.get("int32") as? Int32Value, + Int32Value(2) + ) + XCTAssertEqual( + snapshot.get("min") as? MinKey, + MinKey.instance() + ) + XCTAssertEqual( + snapshot.get("max") as? MaxKey, + MaxKey.instance() + ) + XCTAssertEqual( + snapshot.get("binary") as? BsonBinaryData, + BsonBinaryData(subtype: 128, data: Data([1, 2, 3])) + ) + XCTAssertEqual( + snapshot.get("timestamp") as? BsonTimestamp, + BsonTimestamp(seconds: 1, increment: 2) + ) + XCTAssertEqual( + snapshot.get("regex") as? RegexValue, + RegexValue(pattern: "^foo", options: "i") + ) + } + func testCanFilterAndOrderObjectIds() async throws { let testDocs = [ "a": ["key": BsonObjectId("507f191e810c19729de860ea")], @@ -307,13 +359,15 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { let testDocs: [String: [String: Any]] = [ "a": ["key": MinKey.instance()], "b": ["key": MinKey.instance()], - "c": ["key": MaxKey.instance()], + "c": ["key": NSNull()], + "d": ["key": 1], + "e": ["key": MaxKey.instance()], ] let collection = collectionRef() await setDocumentData(testDocs, toCollection: collection) - let query = collection + var query = collection .whereField("key", isEqualTo: MinKey.instance()) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( @@ -321,29 +375,176 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { query: query, expectedResult: ["b", "a"] ) + + // TODO(b/410032145): This currently fails, and is fixed by + // PR #14704. Uncomment this when moving to the main branch. + // var query2 = collection + // .whereField("key", isNotEqualTo: MinKey.instance()) + // .order(by: "key") + // try await assertSdkQueryResultsConsistentWithBackend( + // testDocs, + // query: query2, + // expectedResult: ["d", "e"] + // ) + + query = collection + .whereField("key", isGreaterThanOrEqualTo: MinKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["a", "b"] + ) + + query = collection + .whereField("key", isLessThanOrEqualTo: MinKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["a", "b"] + ) + + query = collection + .whereField("key", isGreaterThan: MinKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isLessThan: MinKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isLessThan: 1) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: [] + ) } func testCanFilterAndOrderMaxKeyValues() async throws { let testDocs: [String: [String: Any]] = [ "a": ["key": MinKey.instance()], - "b": ["key": MaxKey.instance()], + "b": ["key": 1], "c": ["key": MaxKey.instance()], + "d": ["key": MaxKey.instance()], + "e": ["key": NSNull()], ] let collection = collectionRef() await setDocumentData(testDocs, toCollection: collection) - let query = collection + var query = collection .whereField("key", isEqualTo: MaxKey.instance()) - .order(by: "key", descending: true) + .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, query: query, - expectedResult: ["c", "b"] + expectedResult: ["c", "d"] + ) + + // TODO(b/410032145): This currently fails, and is fixed by + // PR #14704. Uncomment this when moving to the main branch. + // query = collection + // .whereField("key", isNotEqualTo: MaxKey.instance()) + // .order(by: "key") + // try await assertSdkQueryResultsConsistentWithBackend( + // testDocs, + // query: query, + // expectedResult: ["a", "b"] + // ) + + query = collection + .whereField("key", isGreaterThanOrEqualTo: MaxKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "d"] + ) + + query = collection + .whereField("key", isLessThanOrEqualTo: MaxKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["c", "d"] + ) + + query = collection + .whereField("key", isGreaterThan: MaxKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isLessThan: MaxKey.instance()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: [] + ) + + query = collection + .whereField("key", isGreaterThan: 1) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: [] ) } - func testCanOrderBsonTypesTogether() async throws { + func testCanHandleNullWithBsonValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": MinKey.instance()], + "b": ["key": NSNull()], + "c": ["key": NSNull()], + "d": ["key": 1], + "e": ["key": MaxKey.instance()], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isEqualTo: NSNull()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["b", "c"] + ) + + query = collection + .whereField("key", isNotEqualTo: NSNull()) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: query, + expectedResult: ["a", "d", "e"] + ) + } + + func testCanOrderBsonValues() async throws { + // This test includes several BSON values of different types and ensures + // correct inter-type and intra-type order for BSON values. let testDocs: [String: [String: Any]] = [ "bsonObjectId1": ["key": BsonObjectId("507f191e810c19729de860ea")], "bsonObjectId2": ["key": BsonObjectId("507f191e810c19729de860eb")], @@ -393,6 +594,69 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ]) } + func testCanOrderValuesOfDifferentTypes() async throws { + // This test has only 1 value of each type, and ensures correct order + // across all types. + let collection = collectionRef() + let testDocs: [String: [String: Any]] = [ + "nullValue": ["key": NSNull()], + "minValue": ["key": MinKey.instance()], + "booleanValue": ["key": true], + "nanValue": ["key": Double.nan], + "int32Value": ["key": Int32Value(1)], + "doubleValue": ["key": 2.0], + "integerValue": ["key": 3], + "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], + "bsonTimestampValue": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "stringValue": ["key": "string"], + "bytesValue": ["key": Data([0, 1, 255])], + "bsonBinaryValue": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "referenceValue": ["key": collection.document("doc")], + "objectIdValue": ["key": BsonObjectId("507f191e810c19729de860ea")], + "geoPointValue": ["key": GeoPoint(latitude: 0, longitude: 0)], + "regexValue": ["key": RegexValue(pattern: "^foo", options: "i")], + "arrayValue": ["key": [1, 2]], + "vectorValue": ["key": VectorValue([1.0, 2.0])], + "objectValue": ["key": ["a": 1]], + "maxValue": ["key": MaxKey.instance()], + ] + + for (docId, data) in testDocs { + try await collection.document(docId).setData(data as [String: Any]) + } + + let orderedQuery = collection.order(by: "key") + + let expectedOrder = [ + "nullValue", + "minValue", + "booleanValue", + "nanValue", + "int32Value", + "doubleValue", + "integerValue", + "timestampValue", + "bsonTimestampValue", + "stringValue", + "bytesValue", + "bsonBinaryValue", + "referenceValue", + "objectIdValue", + "geoPointValue", + "regexValue", + "arrayValue", + "vectorValue", + "objectValue", + "maxValue", + ] + + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + query: orderedQuery, + expectedResult: expectedOrder + ) + } + func testCanRunTransactionsOnDocumentsWithBsonTypes() async throws { let testDocs = [ "a": ["key": BsonTimestamp(seconds: 1, increment: 2)], diff --git a/Firestore/core/CMakeLists.txt b/Firestore/core/CMakeLists.txt index 65238165549..49600ce193e 100644 --- a/Firestore/core/CMakeLists.txt +++ b/Firestore/core/CMakeLists.txt @@ -323,6 +323,7 @@ add_subdirectory(test/unit/bundle) add_subdirectory(test/unit/credentials) add_subdirectory(test/unit/core) add_subdirectory(test/unit/immutable) +add_subdirectory(test/unit/index) add_subdirectory(test/unit/local) add_subdirectory(test/unit/model) add_subdirectory(test/unit/objc) diff --git a/Firestore/core/src/core/target.cc b/Firestore/core/src/core/target.cc index 3002a955da1..20423be2c93 100644 --- a/Firestore/core/src/core/target.cc +++ b/Firestore/core/src/core/target.cc @@ -207,13 +207,13 @@ IndexBoundValues Target::GetUpperBound( Target::IndexBoundValue Target::GetAscendingBound( const Segment& segment, const absl::optional& bound) const { - google_firestore_v1_Value segment_value = model::MinValue(); + google_firestore_v1_Value segment_value = model::InternalMinValue(); bool segment_inclusive = true; // Process all filters to find a value for the current field segment for (const auto& field_filter : GetFieldFiltersForPath(segment.field_path())) { - google_firestore_v1_Value filter_value = model::MinValue(); + google_firestore_v1_Value filter_value = model::InternalMinValue(); bool filter_inclusive = true; switch (field_filter.op()) { @@ -232,7 +232,7 @@ Target::IndexBoundValue Target::GetAscendingBound( break; case FieldFilter::Operator::NotEqual: case FieldFilter::Operator::NotIn: - filter_value = model::MinValue(); + filter_value = model::MinKeyValue(); break; default: // Remaining filters cannot be used as bound. @@ -271,13 +271,13 @@ Target::IndexBoundValue Target::GetAscendingBound( Target::IndexBoundValue Target::GetDescendingBound( const Segment& segment, const absl::optional& bound) const { - google_firestore_v1_Value segment_value = model::MaxValue(); + google_firestore_v1_Value segment_value = model::InternalMaxValue(); bool segment_inclusive = true; // Process all filters to find a value for the current field segment for (const auto& field_filter : GetFieldFiltersForPath(segment.field_path())) { - google_firestore_v1_Value filter_value = model::MaxValue(); + google_firestore_v1_Value filter_value = model::InternalMaxValue(); bool filter_inclusive = true; switch (field_filter.op()) { @@ -297,7 +297,7 @@ Target::IndexBoundValue Target::GetDescendingBound( break; case FieldFilter::Operator::NotIn: case FieldFilter::Operator::NotEqual: - filter_value = model::MaxValue(); + filter_value = model::MaxKeyValue(); break; default: // Remaining filters cannot be used as bound. diff --git a/Firestore/core/src/core/target.h b/Firestore/core/src/core/target.h index e22543a5b3e..645907f5e46 100644 --- a/Firestore/core/src/core/target.h +++ b/Firestore/core/src/core/target.h @@ -137,7 +137,7 @@ class Target { * Returns a lower bound of field values that can be used as a starting point * to scan the index defined by `field_index`. * - * Returns `model::MinValue()` if no lower bound exists. + * Returns `model::InternalMinValue()` if no lower bound exists. */ IndexBoundValues GetLowerBound(const model::FieldIndex& field_index) const; @@ -145,7 +145,7 @@ class Target { * Returns an upper bound of field values that can be used as an ending point * when scanning the index defined by `field_index`. * - * Returns `model::MaxValue()` if no upper bound exists. + * Returns `model::InternalMaxValue()` if no upper bound exists. */ IndexBoundValues GetUpperBound(const model::FieldIndex& field_index) const; diff --git a/Firestore/core/src/index/firestore_index_value_writer.cc b/Firestore/core/src/index/firestore_index_value_writer.cc index 4587844b930..9ad2f614bf2 100644 --- a/Firestore/core/src/index/firestore_index_value_writer.cc +++ b/Firestore/core/src/index/firestore_index_value_writer.cc @@ -24,37 +24,20 @@ #include "Firestore/core/src/model/value_util.h" #include "Firestore/core/src/nanopb/nanopb_util.h" +#include "absl/strings/str_split.h" + namespace firebase { namespace firestore { namespace index { namespace { -// Note: This code is copied from the backend. Code that is not used by -// Firestore was removed. +// Note: This file is copied from the backend. Code that is not used by +// Firestore was removed. Code that has different behavior was modified. // The client SDK only supports references to documents from the same database. // We can skip the first five segments. constexpr int DocumentNameOffset = 5; -enum IndexType { - kNull = 5, - kBoolean = 10, - kNan = 13, - kNumber = 15, - kTimestamp = 20, - kString = 25, - kBlob = 30, - kReference = 37, - kGeopoint = 45, - kArray = 50, - kVector = 53, - kMap = 55, - kReferenceSegment = 60, - // A terminator that indicates that a truncatable value was not truncated. - // This must be smaller than all other type labels. - kNotTruncated = 2 -}; - void WriteValueTypeLabel(DirectionalIndexByteEncoder* encoder, int type_order) { encoder->WriteLong(type_order); } @@ -86,11 +69,16 @@ void WriteIndexEntityRef(pb_bytes_array_t* reference_value, DirectionalIndexByteEncoder* encoder) { WriteValueTypeLabel(encoder, IndexType::kReference); - auto path = model::ResourcePath::FromStringView( - nanopb::MakeStringView(reference_value)); - auto num_segments = path.size(); + // We must allow empty strings. We could be dealing with a reference_value + // with empty segmenets. The reference value has the following format: + // projects//databases//documents// + // So we may have something like: + // projects//databases//documents/coll_1/doc_1 + std::vector segments = absl::StrSplit( + nanopb::MakeStringView(reference_value), '/', absl::AllowEmpty()); + auto num_segments = segments.size(); for (size_t index = DocumentNameOffset; index < num_segments; ++index) { - const std::string& segment = path[index]; + const std::string& segment = segments[index]; WriteValueTypeLabel(encoder, IndexType::kReferenceSegment); WriteUnlabeledIndexString(segment, encoder); } @@ -141,6 +129,78 @@ void WriteIndexMap(google_firestore_v1_MapValue map_index_value, } } +void WriteIndexBsonBinaryData( + const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kBsonBinaryData); + encoder->WriteBytes(map_index_value.fields[0].value.bytes_value); + WriteTruncationMarker(encoder); +} + +void WriteIndexBsonObjectId(const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kBsonObjectId); + encoder->WriteBytes(map_index_value.fields[0].value.string_value); +} + +void WriteIndexBsonTimestamp( + const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kBsonTimestamp); + + // Figure out the seconds and increment value. + const google_firestore_v1_MapValue& inner_map = + map_index_value.fields[0].value.map_value; + absl::optional seconds_index = model::IndexOfKey( + inner_map, model::kRawBsonTimestampTypeSecondsFieldValue, + model::kBsonTimestampTypeSecondsFieldValue); + absl::optional increment_index = model::IndexOfKey( + inner_map, model::kRawBsonTimestampTypeIncrementFieldValue, + model::kBsonTimestampTypeIncrementFieldValue); + const int64_t seconds = + inner_map.fields[seconds_index.value()].value.integer_value; + const int64_t increment = + inner_map.fields[increment_index.value()].value.integer_value; + + // BsonTimestamp is encoded as a 64-bit long. + int64_t value_to_encode = (seconds << 32) | (increment & 0xFFFFFFFFL); + encoder->WriteLong(value_to_encode); +} + +void WriteIndexRegexValue(const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kRegex); + + // Figure out the pattern and options. + const google_firestore_v1_MapValue& inner_map = + map_index_value.fields[0].value.map_value; + absl::optional pattern_index = + model::IndexOfKey(inner_map, model::kRawRegexTypePatternFieldValue, + model::kRegexTypePatternFieldValue); + absl::optional options_index = + model::IndexOfKey(inner_map, model::kRawRegexTypeOptionsFieldValue, + model::kRegexTypeOptionsFieldValue); + const auto& pattern = + inner_map.fields[pattern_index.value()].value.string_value; + const auto& options = + inner_map.fields[options_index.value()].value.string_value; + + // Write pattern and then options. + WriteUnlabeledIndexString(pattern, encoder); + WriteUnlabeledIndexString(options, encoder); + + // Also needs truncation marker. + WriteTruncationMarker(encoder); +} + +void WriteIndexInt32Value(const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + WriteValueTypeLabel(encoder, IndexType::kNumber); + // Similar to 64-bit integers (see integer_value below), we write 32-bit + // integers as double so that 0 and 0.0 are considered the same. + encoder->WriteDouble(map_index_value.fields[0].value.integer_value); +} + void WriteIndexValueAux(const google_firestore_v1_Value& index_value, DirectionalIndexByteEncoder* encoder) { switch (index_value.which_value_type) { @@ -205,15 +265,38 @@ void WriteIndexValueAux(const google_firestore_v1_Value& index_value, break; } case google_firestore_v1_Value_map_value_tag: - // model::MaxValue() is sentinel map value (see the comment there). - // In that case, we encode the max int value instead. - if (model::IsMaxValue(index_value)) { + // model::InternalMaxValue() is a sentinel map value (see the comment + // there). In that case, we encode the max int value instead. + if (model::IsInternalMaxValue(index_value)) { WriteValueTypeLabel(encoder, std::numeric_limits::max()); break; } else if (model::IsVectorValue(index_value)) { WriteIndexVector(index_value.map_value, encoder); break; + } else if (model::IsMaxKeyValue(index_value)) { + WriteValueTypeLabel(encoder, IndexType::kMaxKey); + break; + } else if (model::IsMinKeyValue(index_value)) { + WriteValueTypeLabel(encoder, IndexType::kMinKey); + break; + } else if (model::IsBsonBinaryData(index_value)) { + WriteIndexBsonBinaryData(index_value.map_value, encoder); + break; + } else if (model::IsRegexValue(index_value)) { + WriteIndexRegexValue(index_value.map_value, encoder); + break; + } else if (model::IsBsonTimestamp(index_value)) { + WriteIndexBsonTimestamp(index_value.map_value, encoder); + break; + } else if (model::IsBsonObjectId(index_value)) { + WriteIndexBsonObjectId(index_value.map_value, encoder); + break; + } else if (model::IsInt32Value(index_value)) { + WriteIndexInt32Value(index_value.map_value, encoder); + break; } + + // For regular maps: WriteIndexMap(index_value.map_value, encoder); WriteTruncationMarker(encoder); break; diff --git a/Firestore/core/src/index/firestore_index_value_writer.h b/Firestore/core/src/index/firestore_index_value_writer.h index 54ac559f2d5..6963b95b6a0 100644 --- a/Firestore/core/src/index/firestore_index_value_writer.h +++ b/Firestore/core/src/index/firestore_index_value_writer.h @@ -24,6 +24,31 @@ namespace firebase { namespace firestore { namespace index { +enum IndexType { + kNull = 5, + kMinKey = 7, + kBoolean = 10, + kNan = 13, + kNumber = 15, + kTimestamp = 20, + kBsonTimestamp = 22, + kString = 25, + kBlob = 30, + kBsonBinaryData = 31, + kReference = 37, + kBsonObjectId = 43, + kGeopoint = 45, + kRegex = 47, + kArray = 50, + kVector = 53, + kMap = 55, + kReferenceSegment = 60, + kMaxKey = 999, + // A terminator that indicates that a truncatable value was not truncated. + // This must be smaller than all other type labels. + kNotTruncated = 2 +}; + /** * Writes an index value using the given encoder. The encoder writes the encoded * bytes into a buffer maintained by `IndexEncodingBuffer` who owns the diff --git a/Firestore/core/src/model/value_util.cc b/Firestore/core/src/model/value_util.cc index 5f3524237a7..e96cfb8707b 100644 --- a/Firestore/core/src/model/value_util.cc +++ b/Firestore/core/src/model/value_util.cc @@ -52,9 +52,9 @@ pb_bytes_array_s* kTypeValueFieldKey = nanopb::MakeBytesArray(kRawTypeValueFieldKey); /** The field value of a maximum proto value. */ -const char* kRawMaxValueFieldValue = "__max__"; -pb_bytes_array_s* kMaxValueFieldValue = - nanopb::MakeBytesArray(kRawMaxValueFieldValue); +const char* kRawInternalMaxValueFieldValue = "__max__"; +pb_bytes_array_s* kInternalMaxValueFieldValue = + nanopb::MakeBytesArray(kRawInternalMaxValueFieldValue); /** The type of a VectorValue proto. */ const char* kRawVectorTypeFieldValue = "__vector__"; @@ -130,8 +130,8 @@ MapType DetectMapType(const google_firestore_v1_Value& value) { // Check for type-based mappings if (IsServerTimestamp(value)) { return MapType::kServerTimestamp; - } else if (IsMaxValue(value)) { - return MapType::kMaxValue; + } else if (IsInternalMaxValue(value)) { + return MapType::kInternalMaxValue; } else if (IsVectorValue(value)) { return MapType::kVector; } @@ -195,8 +195,8 @@ TypeOrder GetTypeOrder(const google_firestore_v1_Value& value) { switch (DetectMapType(value)) { case MapType::kServerTimestamp: return TypeOrder::kServerTimestamp; - case MapType::kMaxValue: - return TypeOrder::kMaxValue; + case MapType::kInternalMaxValue: + return TypeOrder::kInternalMaxValue; case MapType::kVector: return TypeOrder::kVector; case MapType::kMinKey: @@ -548,7 +548,7 @@ ComparisonResult Compare(const google_firestore_v1_Value& left, switch (left_type) { case TypeOrder::kNull: - case TypeOrder::kMaxValue: + case TypeOrder::kInternalMaxValue: // All MinKeys are equal. case TypeOrder::kMinKey: // All MaxKeys are equal. @@ -745,9 +745,7 @@ bool Equals(const google_firestore_v1_Value& lhs, case TypeOrder::kVector: case TypeOrder::kMap: - return MapValueEquals(lhs.map_value, rhs.map_value); - - case TypeOrder::kMaxValue: + case TypeOrder::kInternalMaxValue: return MapValueEquals(lhs.map_value, rhs.map_value); default: @@ -900,6 +898,21 @@ google_firestore_v1_Value GetLowerBound( case google_firestore_v1_Value_map_value_tag: { if (IsVectorValue(value)) { return MinVector(); + } else if (IsBsonObjectId(value)) { + return MinBsonObjectId(); + } else if (IsBsonTimestamp(value)) { + return MinBsonTimestamp(); + } else if (IsBsonBinaryData(value)) { + return MinBsonBinaryData(); + } else if (IsRegexValue(value)) { + return MinRegex(); + } else if (IsInt32Value(value)) { + // int32Value is treated the same as integerValue and doubleValue. + return MinNumber(); + } else if (IsMinKeyValue(value)) { + return MinKeyValue(); + } else if (IsMaxKeyValue(value)) { + return MaxKeyValue(); } return MinMap(); @@ -914,29 +927,47 @@ google_firestore_v1_Value GetUpperBound( const google_firestore_v1_Value& value) { switch (value.which_value_type) { case google_firestore_v1_Value_null_value_tag: - return MinBoolean(); + return MinKeyValue(); case google_firestore_v1_Value_boolean_value_tag: return MinNumber(); case google_firestore_v1_Value_integer_value_tag: case google_firestore_v1_Value_double_value_tag: return MinTimestamp(); case google_firestore_v1_Value_timestamp_value_tag: - return MinString(); + return MinBsonTimestamp(); case google_firestore_v1_Value_string_value_tag: return MinBytes(); case google_firestore_v1_Value_bytes_value_tag: - return MinReference(); + return MinBsonBinaryData(); case google_firestore_v1_Value_reference_value_tag: - return MinGeoPoint(); + return MinBsonObjectId(); case google_firestore_v1_Value_geo_point_value_tag: - return MinArray(); + return MinRegex(); case google_firestore_v1_Value_array_value_tag: return MinVector(); case google_firestore_v1_Value_map_value_tag: if (IsVectorValue(value)) { return MinMap(); + } else if (IsMinKeyValue(value)) { + return MinBoolean(); + } else if (IsInt32Value(value)) { + // int32Value is treated the same as integerValue and doubleValue. + return MinTimestamp(); + } else if (IsBsonTimestamp(value)) { + return MinString(); + } else if (IsBsonBinaryData(value)) { + return MinReference(); + } else if (IsBsonObjectId(value)) { + return MinGeoPoint(); + } else if (IsRegexValue(value)) { + return MinArray(); + } else if (IsMaxKeyValue(value)) { + // The upper bound for MaxKey is the internal max value. + return InternalMaxValue(); } - return MaxValue(); + + // For normal maps, the upper bound is MaxKey. + return MaxKeyValue(); default: HARD_FAIL("Invalid type value: %s", value.which_value_type); } @@ -963,14 +994,14 @@ bool IsNullValue(const google_firestore_v1_Value& value) { return value.which_value_type == google_firestore_v1_Value_null_value_tag; } -google_firestore_v1_Value MinValue() { +google_firestore_v1_Value InternalMinValue() { google_firestore_v1_Value null_value; null_value.which_value_type = google_firestore_v1_Value_null_value_tag; null_value.null_value = {}; return null_value; } -bool IsMinValue(const google_firestore_v1_Value& value) { +bool IsInternalMinValue(const google_firestore_v1_Value& value) { return IsNullValue(value); } @@ -979,10 +1010,10 @@ bool IsMinValue(const google_firestore_v1_Value& value) { * values. Underlying it is a map value with a special map field that SDK user * cannot possibly construct. */ -google_firestore_v1_Value MaxValue() { +google_firestore_v1_Value InternalMaxValue() { google_firestore_v1_Value value; value.which_value_type = google_firestore_v1_Value_string_value_tag; - value.string_value = kMaxValueFieldValue; + value.string_value = kInternalMaxValueFieldValue; // Make `field_entry` static so that it has a memory address that outlives // this function's scope; otherwise, using its address in the `map_value` @@ -1007,7 +1038,7 @@ google_firestore_v1_Value MaxValue() { return max_value; } -bool IsMaxValue(const google_firestore_v1_Value& value) { +bool IsInternalMaxValue(const google_firestore_v1_Value& value) { if (value.which_value_type != google_firestore_v1_Value_map_value_tag) { return false; } @@ -1031,9 +1062,10 @@ bool IsMaxValue(const google_firestore_v1_Value& value) { // Comparing the pointer address, then actual content if addresses are // different. - return value.map_value.fields[0].value.string_value == kMaxValueFieldValue || + return value.map_value.fields[0].value.string_value == + kInternalMaxValueFieldValue || nanopb::MakeStringView(value.map_value.fields[0].value.string_value) == - kRawMaxValueFieldValue; + kRawInternalMaxValueFieldValue; } absl::optional IndexOfKey( @@ -1542,6 +1574,38 @@ google_firestore_v1_Value MinBsonBinaryData() { return lower_bound; } +google_firestore_v1_Value MinKeyValue() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kMinKeyTypeFieldValue; + field_entries[0].value = NullValue(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + +google_firestore_v1_Value MaxKeyValue() { + google_firestore_v1_MapValue_FieldsEntry* field_entries = + nanopb::MakeArray(1); + field_entries[0].key = kMaxKeyTypeFieldValue; + field_entries[0].value = NullValue(); + google_firestore_v1_MapValue map_value; + map_value.fields_count = 1; + map_value.fields = field_entries; + + google_firestore_v1_Value lower_bound; + lower_bound.which_value_type = google_firestore_v1_Value_map_value_tag; + lower_bound.map_value = map_value; + + return lower_bound; +} + google_firestore_v1_Value MinMap() { google_firestore_v1_Value lowerBound; lowerBound.which_value_type = google_firestore_v1_Value_map_value_tag; diff --git a/Firestore/core/src/model/value_util.h b/Firestore/core/src/model/value_util.h index 5ae2395816b..fbea964a8ec 100644 --- a/Firestore/core/src/model/value_util.h +++ b/Firestore/core/src/model/value_util.h @@ -46,8 +46,8 @@ extern const char* kRawTypeValueFieldKey; extern pb_bytes_array_s* kTypeValueFieldKey; /** The field value of a maximum proto value. */ -extern const char* kRawMaxValueFieldValue; -extern pb_bytes_array_s* kMaxValueFieldValue; +extern const char* kRawInternalMaxValueFieldValue; +extern pb_bytes_array_s* kInternalMaxValueFieldValue; /** The type of a VectorValue proto. */ extern const char* kRawVectorTypeFieldValue; @@ -128,7 +128,7 @@ enum class TypeOrder { kVector = 15, kMap = 16, kMaxKey = 17, - kMaxValue = 18 + kInternalMaxValue = 18 }; /** @@ -139,7 +139,7 @@ enum class TypeOrder { enum class MapType { kNormal = 0, kServerTimestamp = 1, - kMaxValue = 2, + kInternalMaxValue = 2, kVector = 3, kMinKey = 4, kMaxKey = 5, @@ -229,28 +229,31 @@ bool IsNullValue(const google_firestore_v1_Value& value); * The returned value might point to heap allocated memory that is owned by * this function. To take ownership of this memory, call `DeepClone`. */ -google_firestore_v1_Value MinValue(); +google_firestore_v1_Value InternalMinValue(); -/** Returns `true` if `value` is MinValue() in its Protobuf representation. */ -bool IsMinValue(const google_firestore_v1_Value& value); +/** + * Returns `true` if `value` is InternalMinValue() in its Protobuf + * representation. + */ +bool IsInternalMinValue(const google_firestore_v1_Value& value); /** * Returns a Protobuf value that is larger than any legitimate value SDK * users can create. * * Under the hood, it is a sentinel Protobuf Map with special fields that - * Firestore comparison logic always return true for `MaxValue() > v`, for any - * v users can create, regardless `v`'s type and value. + * Firestore comparison logic always return true for `InternalMaxValue() > v`, + * for any `v` users can create, regardless `v`'s type and value. * * The returned value might point to heap allocated memory that is owned by * this function. To take ownership of this memory, call `DeepClone`. */ -google_firestore_v1_Value MaxValue(); +google_firestore_v1_Value InternalMaxValue(); /** - * Returns `true` if `value` is equal to `MaxValue()`. + * Returns `true` if `value` is equal to `InternalMaxValue()`. */ -bool IsMaxValue(const google_firestore_v1_Value& value); +bool IsInternalMaxValue(const google_firestore_v1_Value& value); /** * Returns `true` if `value` represents a VectorValue. @@ -331,6 +334,10 @@ google_firestore_v1_Value MinReference(); google_firestore_v1_Value MinGeoPoint(); +google_firestore_v1_Value MinKeyValue(); + +google_firestore_v1_Value MaxKeyValue(); + google_firestore_v1_Value MinBsonBinaryData(); google_firestore_v1_Value MinBsonObjectId(); diff --git a/Firestore/core/test/unit/core/target_test.cc b/Firestore/core/test/unit/core/target_test.cc index 18de92c15f4..e48db89a13b 100644 --- a/Firestore/core/test/unit/core/target_test.cc +++ b/Firestore/core/test/unit/core/target_test.cc @@ -186,11 +186,11 @@ TEST(TargetTest, OrderByQueryBound) { Target target = Query("c").AddingOrderBy(OrderBy("foo")).ToTarget(); FieldIndex index = MakeFieldIndex("c", "foo", Segment::Kind::kAscending); auto lower_bound = target.GetLowerBound(index); - EXPECT_EQ(lower_bound.values[0], model::MinValue()); + EXPECT_EQ(lower_bound.values[0], model::InternalMinValue()); EXPECT_TRUE(lower_bound.inclusive); auto upper_bound = target.GetUpperBound(index); - EXPECT_EQ(upper_bound.values[0], model::MaxValue()); + EXPECT_EQ(upper_bound.values[0], model::InternalMaxValue()); EXPECT_TRUE(upper_bound.inclusive); } @@ -219,7 +219,7 @@ TEST(TargetTest, StartingAtQueryBound) { VerifyBound(lower_bound, true, {*Value("bar")}); auto upper_bound = target.GetUpperBound(index); - EXPECT_EQ(upper_bound.values[0], model::MaxValue()); + EXPECT_EQ(upper_bound.values[0], model::InternalMaxValue()); EXPECT_TRUE(upper_bound.inclusive); } @@ -287,7 +287,7 @@ TEST(TargetTest, EndingAtQueryBound) { FieldIndex index = MakeFieldIndex("c", "foo", Segment::Kind::kAscending); auto lower_bound = target.GetLowerBound(index); - ASSERT_EQ(lower_bound.values[0], model::MinValue()); + ASSERT_EQ(lower_bound.values[0], model::InternalMinValue()); ASSERT_TRUE(lower_bound.inclusive); auto upper_bound = target.GetUpperBound(index); diff --git a/Firestore/core/test/unit/index/CMakeLists.txt b/Firestore/core/test/unit/index/CMakeLists.txt new file mode 100644 index 00000000000..648359c5c24 --- /dev/null +++ b/Firestore/core/test/unit/index/CMakeLists.txt @@ -0,0 +1,27 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +if(NOT FIREBASE_IOS_BUILD_TESTS) + return() +endif() + +file(GLOB sources *.cc) +firebase_ios_add_test(firestore_index_test ${sources}) + +target_link_libraries( + firestore_index_test PRIVATE + GMock::GMock + firestore_core + firestore_testutil +) diff --git a/Firestore/core/test/unit/index/index_value_writer_test.cc b/Firestore/core/test/unit/index/index_value_writer_test.cc new file mode 100644 index 00000000000..b66bd182a54 --- /dev/null +++ b/Firestore/core/test/unit/index/index_value_writer_test.cc @@ -0,0 +1,363 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/core/src/index/firestore_index_value_writer.h" +#include "Firestore/core/src/index/index_byte_encoder.h" +#include "Firestore/core/src/nanopb/nanopb_util.h" +#include "Firestore/core/test/unit/testutil/testutil.h" +#include "gtest/gtest.h" + +namespace firebase { +namespace firestore { +namespace index { + +namespace { + +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; +using testutil::Int32; +using testutil::MaxKey; +using testutil::MinKey; +using testutil::Regex; +using testutil::VectorType; + +TEST(IndexValueWriterTest, writeIndexValueSupportsVector) { + // Value + auto vector = VectorType(1, 2, 3); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*vector, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kVector); // Vector type + index_byte_encoder->WriteLong(IndexType::kNumber); // Number type + index_byte_encoder->WriteLong(3); // Vector Length + index_byte_encoder->WriteLong(IndexType::kString); + index_byte_encoder->WriteString("value"); + index_byte_encoder->WriteLong(IndexType::kArray); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(1); // position 0 + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(2); // position 1 + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(3); // position 2 + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsEmptyVector) { + // Value - Create an empty vector + auto vector = VectorType(); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*vector, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + + index_byte_encoder->WriteLong(IndexType::kVector); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteLong(0); // vector length + index_byte_encoder->WriteLong(IndexType::kString); + index_byte_encoder->WriteString("value"); + index_byte_encoder->WriteLong(IndexType::kArray); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonObjectId) { + // Value + auto value = BsonObjectId("507f191e810c19729de860ea"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonObjectId); + index_byte_encoder->WriteBytes( + nanopb::MakeBytesArray("507f191e810c19729de860ea")); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonBinaryData) { + // Value + auto value = BsonBinaryData(1, {1, 2, 3}); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonBinaryData); + // Expected bytes: subtype (1) + data {1, 2, 3} + const uint8_t binary_payload[] = {1, 1, 2, 3}; + index_byte_encoder->WriteBytes( + nanopb::MakeBytesArray(binary_payload, sizeof(binary_payload))); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonBinaryWithEmptyData) { + // Value + auto value = BsonBinaryData(1, {}); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonBinaryData); + // Expected bytes: subtype (1) only + const uint8_t binary_payload[] = {1}; + index_byte_encoder->WriteBytes( + nanopb::MakeBytesArray(binary_payload, sizeof(binary_payload))); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsBsonTimestamp) { + // Value + auto value = BsonTimestamp(1, 2); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonTimestamp); + uint64_t timestamp_encoded = (1ULL << 32) | (2); + index_byte_encoder->WriteLong(timestamp_encoded); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsLargestBsonTimestamp) { + // Value + auto value = BsonTimestamp(4294967295ULL, 4294967295ULL); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonTimestamp); + uint64_t timestamp_encoded = (4294967295ULL << 32) | (4294967295ULL); + index_byte_encoder->WriteLong(timestamp_encoded); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsSmallestBsonTimestamp) { + // Value + auto value = BsonTimestamp(0, 0); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kBsonTimestamp); + index_byte_encoder->WriteLong(0); // (0 << 32 | 0) + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsRegex) { + // Value + auto value = Regex("^foo", "i"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kRegex); + index_byte_encoder->WriteString("^foo"); + index_byte_encoder->WriteString("i"); + index_byte_encoder->WriteLong(IndexType::kNotTruncated); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsInt32) { + // Value + auto value = Int32(1); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(1.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsLargestInt32) { + // Value + auto value = Int32(2147483647); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(2147483647.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsSmallestInt32) { + // Value + auto value = Int32(-2147483648); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + index_byte_encoder->WriteDouble(-2147483648.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsMinKey) { + // Value + auto value = MinKey(); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kMinKey); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsMaxKey) { + // Value + auto value = MaxKey(); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kMaxKey); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +} // namespace +} // namespace index +} // namespace firestore +} // namespace firebase diff --git a/Firestore/core/test/unit/local/leveldb_index_manager_test.cc b/Firestore/core/test/unit/local/leveldb_index_manager_test.cc index 3cbc7667dd8..895d8acb2e0 100644 --- a/Firestore/core/test/unit/local/leveldb_index_manager_test.cc +++ b/Firestore/core/test/unit/local/leveldb_index_manager_test.cc @@ -15,6 +15,7 @@ */ #include "Firestore/core/src/local/leveldb_index_manager.h" +#include "Firestore/core/include/firebase/firestore/geo_point.h" #include "Firestore/core/src/core/bound.h" #include "Firestore/core/src/local/leveldb_persistence.h" #include "Firestore/core/src/model/field_index.h" @@ -39,16 +40,26 @@ using model::ResourcePath; using model::Segment; using testutil::AndFilters; using testutil::Array; +using testutil::BlobValue; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::CollectionGroupQuery; using testutil::DeletedDoc; using testutil::Doc; using testutil::Filter; +using testutil::Int32; using testutil::Key; using testutil::MakeFieldIndex; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; using testutil::OrderBy; using testutil::OrFilters; using testutil::Query; +using testutil::Ref; +using testutil::Regex; +using testutil::Value; using testutil::VectorType; using testutil::Version; @@ -976,6 +987,525 @@ TEST_F(LevelDbIndexManagerTest, IndexVectorValueFields) { }); } +TEST_F(LevelDbIndexManagerTest, IndexBsonObjectIdFields) { + persistence_->Run("TestIndexBsonObjectIdFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/doc2", Map("key", BsonObjectId("507f191e810c19729de860eb"))); + AddDoc("coll/doc3", Map("key", BsonObjectId("507f191e810c19729de860ec"))); + + auto query = Query("coll").AddingOrderBy(OrderBy("key")); + { + SCOPED_TRACE("no filter"); + VerifyResults(query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", "==", BsonObjectId("507f191e810c19729de860ea"))); + { + SCOPED_TRACE("Query BsonObjectId with EqualTo filter"); + VerifyResults(query, {"coll/doc1"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", "!=", BsonObjectId("507f191e810c19729de860ea"))); + { + SCOPED_TRACE("Query BsonObjectId with NotEqualTo filter"); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", ">=", BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with GreaterThanOrEqualTo filter"); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter( + "key", "<=", BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with LessThanOrEqualTo filter"); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", ">", + BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with GreaterThan filter"); + VerifyResults(query, {"coll/doc3"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", "<", + BsonObjectId("507f191e810c19729de860eb"))); + { + SCOPED_TRACE("Query BsonObjectId with LessThan filter"); + VerifyResults(query, {"coll/doc1"}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", ">", + BsonObjectId("507f191e810c19729de860ec"))); + { + SCOPED_TRACE( + "Query BsonObjectId with GreaterThan filter and empty result set"); + VerifyResults(query, {}); + } + + query = Query("coll") + .AddingOrderBy(OrderBy("key")) + .AddingFilter(Filter("key", "<", + BsonObjectId("507f191e810c19729de860ea"))); + { + SCOPED_TRACE( + "Query BsonObjectId with LessThan filter and empty result set"); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexBsonBinaryDataFields) { + persistence_->Run("TestIndexBsonBinaryDataFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/doc2", Map("key", BsonBinaryData(1, {1, 2, 4}))); + AddDoc("coll/doc3", Map("key", BsonBinaryData(1, {2, 1, 2}))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with EqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", "==", BsonBinaryData(1, {1, 2, 3}))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with NotEqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", "!=", BsonBinaryData(1, {1, 2, 3}))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", ">=", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", "<=", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with GreaterThan filter"); + auto query = base_query.AddingFilter( + Filter("key", ">", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonBinaryData with LessThan filter"); + auto query = base_query.AddingFilter( + Filter("key", "<", BsonBinaryData(1, {1, 2, 4}))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE( + "Query BsonBinaryData with GreaterThan filter and empty result set"); + auto query = base_query.AddingFilter( + Filter("key", ">", BsonBinaryData(1, {2, 1, 2}))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE( + "Query BsonBinaryData with LessThan filter and empty result set"); + auto query = base_query.AddingFilter( + Filter("key", "<", BsonBinaryData(1, {1, 2, 3}))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexBsonTimestampFields) { + persistence_->Run("TestIndexBsonTimestampFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", BsonTimestamp(1, 1))); + AddDoc("coll/doc2", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/doc3", Map("key", BsonTimestamp(2, 1))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with EqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "==", BsonTimestamp(1, 1))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with NotEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "!=", BsonTimestamp(1, 1))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with GreaterThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", ">=", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with LessThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "<=", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with GreaterThan filter"); + auto query = + base_query.AddingFilter(Filter("key", ">", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query BsonTimestamp with LessThan filter"); + auto query = + base_query.AddingFilter(Filter("key", "<", BsonTimestamp(1, 2))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE( + "Query BsonTimestamp with GreaterThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", ">", BsonTimestamp(2, 1))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE( + "Query BsonTimestamp with LessThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", "<", BsonTimestamp(1, 1))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexInt32Fields) { + persistence_->Run("TestIndexInt32Fields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", Int32(1))); + AddDoc("coll/doc2", Map("key", Int32(2))); + AddDoc("coll/doc3", Map("key", Int32(3))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with EqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "==", Int32(1))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Int32 with NotEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "!=", Int32(1))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", ">=", Int32(2))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "<=", Int32(2))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query Int32 with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", Int32(2))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query Int32 with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", Int32(2))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Int32 with GreaterThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", ">", Int32(3))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query Int32 with LessThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", "<", Int32(1))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexRegexFields) { + persistence_->Run("TestIndexRegexFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", Regex("a", "i"))); + AddDoc("coll/doc2", Map("key", Regex("a", "m"))); + AddDoc("coll/doc3", Map("key", Regex("b", "i"))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with EqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "==", Regex("a", "i"))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Regex with NotEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "!=", Regex("a", "i"))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with GreaterThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", ">=", Regex("a", "m"))); + VerifyResults(query, {"coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with LessThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "<=", Regex("a", "m"))); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query Regex with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", Regex("a", "m"))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query Regex with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", Regex("a", "m"))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE("Query Regex with GreaterThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", ">", Regex("b", "i"))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query Regex with LessThan filter and empty result set"); + auto query = base_query.AddingFilter(Filter("key", "<", Regex("a", "i"))); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexMinKeyFields) { + persistence_->Run("TestIndexMinKeyFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", MinKey())); + AddDoc("coll/doc2", Map("key", MinKey())); + AddDoc("coll/doc3", Map("key", nullptr)); + AddDoc("coll/doc4", Map("key", 1)); + AddDoc("coll/doc5", Map("key", MaxKey())); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc3", "coll/doc1", "coll/doc2", + "coll/doc4", "coll/doc5"}); + } + { + SCOPED_TRACE("Query MinKey with EqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "==", MinKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MinKey with NotEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "!=", MinKey())); + VerifyResults(query, {"coll/doc4", "coll/doc5"}); + } + { + SCOPED_TRACE("Query MinKey with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", ">=", MinKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MinKey with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "<=", MinKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MinKey with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", MinKey())); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query MinKey with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", MinKey())); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexMaxKeyFields) { + persistence_->Run("TestIndexMaxKeyFields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", MinKey())); + AddDoc("coll/doc2", Map("key", 1)); + AddDoc("coll/doc3", Map("key", MaxKey())); + AddDoc("coll/doc4", Map("key", MaxKey())); + AddDoc("coll/doc5", Map("key", nullptr)); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, {"coll/doc5", "coll/doc1", "coll/doc2", + "coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with EqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "==", MaxKey())); + VerifyResults(query, {"coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with NotEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "!=", MaxKey())); + VerifyResults(query, {"coll/doc1", "coll/doc2"}); + } + { + SCOPED_TRACE("Query MaxKey with GreaterThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", ">=", MaxKey())); + VerifyResults(query, {"coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with LessThanOrEqualTo filter"); + auto query = base_query.AddingFilter(Filter("key", "<=", MaxKey())); + VerifyResults(query, {"coll/doc3", "coll/doc4"}); + } + { + SCOPED_TRACE("Query MaxKey with GreaterThan filter"); + auto query = base_query.AddingFilter(Filter("key", ">", MaxKey())); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query MaxKey with LessThan filter"); + auto query = base_query.AddingFilter(Filter("key", "<", MaxKey())); + VerifyResults(query, {}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexBsonTypesTogether) { + persistence_->Run("TestIndexBsonTypesTogether", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kDescending)); + + AddDoc("coll/doc1", Map("key", MinKey())); + AddDoc("coll/doc2", Map("key", Int32(2))); + AddDoc("coll/doc3", Map("key", Int32(1))); + AddDoc("coll/doc4", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/doc5", Map("key", BsonTimestamp(1, 1))); + AddDoc("coll/doc6", Map("key", BsonBinaryData(1, {1, 2, 4}))); + AddDoc("coll/doc7", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/doc8", Map("key", BsonObjectId("507f191e810c19729de860eb"))); + AddDoc("coll/doc9", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/doc10", Map("key", Regex("a", "m"))); + AddDoc("coll/doc11", Map("key", Regex("a", "i"))); + AddDoc("coll/doc12", Map("key", MaxKey())); + + auto query = Query("coll").AddingOrderBy(OrderBy("key", "desc")); + + VerifyResults(query, {"coll/doc12", "coll/doc10", "coll/doc11", "coll/doc8", + "coll/doc9", "coll/doc6", "coll/doc7", "coll/doc4", + "coll/doc5", "coll/doc2", "coll/doc3", "coll/doc1"}); + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexAllTypesTogether) { + persistence_->Run("TestIndexAllTypesTogether", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kDescending)); + + AddDoc("coll/a", Map("key", nullptr)); + AddDoc("coll/b", Map("key", MinKey())); + AddDoc("coll/c", Map("key", true)); + AddDoc("coll/d", Map("key", std::numeric_limits::quiet_NaN())); + AddDoc("coll/e", Map("key", Int32(1))); + AddDoc("coll/f", Map("key", 2.0)); + AddDoc("coll/g", Map("key", 3)); + AddDoc("coll/h", Map("key", Timestamp(100, 123456000))); + AddDoc("coll/i", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/j", Map("key", "string")); + AddDoc("coll/k", Map("key", BlobValue(0, 1, 255))); + AddDoc("coll/l", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/m", Map("key", Ref("project", "coll/doc"))); + AddDoc("coll/n", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/o", Map("key", GeoPoint(0, 1))); + AddDoc("coll/p", Map("key", Regex("^foo", "i"))); + AddDoc("coll/q", Map("key", Array(1, 2))); + AddDoc("coll/r", Map("key", VectorType(1, 2))); + AddDoc("coll/s", Map("key", Map("a", 1))); + AddDoc("coll/t", Map("key", MaxKey())); + + auto query = Query("coll").AddingOrderBy(OrderBy("key", "desc")); + + VerifyResults(query, {"coll/t", "coll/s", "coll/r", "coll/q", "coll/p", + "coll/o", "coll/n", "coll/m", "coll/l", "coll/k", + "coll/j", "coll/i", "coll/h", "coll/g", "coll/f", + "coll/e", "coll/d", "coll/c", "coll/b", "coll/a"}); + }); +} + TEST_F(LevelDbIndexManagerTest, AdvancedQueries) { // This test compares local query results with those received from the Java // Server SDK. diff --git a/Firestore/core/test/unit/local/leveldb_local_store_test.cc b/Firestore/core/test/unit/local/leveldb_local_store_test.cc index 85e4286698b..8f53cbbee03 100644 --- a/Firestore/core/test/unit/local/leveldb_local_store_test.cc +++ b/Firestore/core/test/unit/local/leveldb_local_store_test.cc @@ -14,6 +14,7 @@ * limitations under the License. */ +#include "Firestore/core/include/firebase/firestore/geo_point.h" #include "Firestore/core/src/core/filter.h" #include "Firestore/core/src/core/query.h" #include "Firestore/core/src/local/leveldb_persistence.h" @@ -37,20 +38,30 @@ using model::IndexState; using testutil::AddedRemoteEvent; using testutil::Array; +using testutil::BlobValue; +using testutil::BsonBinaryData; +using testutil::BsonObjectId; +using testutil::BsonTimestamp; using testutil::DeletedDoc; using testutil::DeleteMutation; using testutil::Doc; using testutil::Field; using testutil::Filter; +using testutil::Int32; using testutil::Key; using testutil::MakeFieldIndex; using testutil::Map; +using testutil::MaxKey; +using testutil::MinKey; using testutil::OrderBy; using testutil::OrFilters; using testutil::OverlayTypeMap; +using testutil::Ref; +using testutil::Regex; using testutil::SetMutation; using testutil::UpdateRemoteEvent; using testutil::Vector; +using testutil::VectorType; using testutil::Version; class TestHelper : public LocalStoreTestHelper { @@ -309,6 +320,680 @@ TEST_F(LevelDbLocalStoreTest, UsesIndexForLimitQueryWhenIndexIsUpdated) { FSTAssertQueryReturned("coll/a", "coll/c"); } +TEST_F(LevelDbLocalStoreTest, IndexesBsonObjectId) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation( + "coll/doc1", Map("key", BsonObjectId("507f191e810c19729de860ea")))); + WriteMutation(SetMutation( + "coll/doc2", Map("key", BsonObjectId("507f191e810c19729de860eb")))); + WriteMutation(SetMutation( + "coll/doc3", Map("key", BsonObjectId("507f191e810c19729de860ec")))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", BsonObjectId("507f191e810c19729de860ea"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", BsonObjectId("507f191e810c19729de860ea"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", BsonObjectId("507f191e810c19729de860eb"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", BsonObjectId("507f191e810c19729de860eb"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", BsonObjectId("507f191e810c19729de860ec"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", BsonObjectId("507f191e810c19729de860ea"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", + Array(BsonObjectId("507f191e810c19729de860ea"), + BsonObjectId("507f191e810c19729de860eb")))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesBsonTimestamp) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation( + SetMutation("coll/doc1", Map("key", BsonTimestamp(1000, 1000)))); + WriteMutation( + SetMutation("coll/doc2", Map("key", BsonTimestamp(1001, 1000)))); + WriteMutation( + SetMutation("coll/doc3", Map("key", BsonTimestamp(1000, 1001)))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", BsonTimestamp(1000, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", BsonTimestamp(1000, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc3", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", BsonTimestamp(1000, 1001))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc3", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", BsonTimestamp(1000, 1001))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", BsonTimestamp(1001, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", BsonTimestamp(1000, 1000))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", + Array(BsonTimestamp(1000, 1000), BsonTimestamp(1000, 1001)))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesBsonBinary) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation( + SetMutation("coll/doc1", Map("key", BsonBinaryData(1, {1, 2, 3})))); + WriteMutation( + SetMutation("coll/doc2", Map("key", BsonBinaryData(1, {1, 2})))); + WriteMutation( + SetMutation("coll/doc3", Map("key", BsonBinaryData(1, {1, 2, 4})))); + WriteMutation( + SetMutation("coll/doc4", Map("key", BsonBinaryData(2, {1, 2})))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 4, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc1", "coll/doc3", "coll/doc4"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3", "coll/doc4"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3", "coll/doc4"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", BsonBinaryData(1, {1, 2, 3}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", BsonBinaryData(2, {1, 2}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", BsonBinaryData(1, {1, 2}))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", + Array(BsonBinaryData(1, {1, 2, 3}), BsonBinaryData(1, {1, 2})))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesRegex) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", Regex("^bar", "i")))); + WriteMutation(SetMutation("coll/doc2", Map("key", Regex("^bar", "m")))); + WriteMutation(SetMutation("coll/doc3", Map("key", Regex("^foo", "i")))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", Regex("^bar", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", Regex("^bar", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", Regex("^foo", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", Regex("^bar", "i"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", Array(Regex("^bar", "i"), Regex("^foo", "i")))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesInt32) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", Int32(-1)))); + WriteMutation(SetMutation("coll/doc2", Map("key", Int32(0)))); + WriteMutation(SetMutation("coll/doc3", Map("key", Int32(1)))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter(Filter("key", "==", Int32(-1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter(Filter("key", "!=", Int32(-1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter(Filter("key", ">=", Int32(0))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter(Filter("key", "<=", Int32(0))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); + + query = testutil::Query("coll").AddingFilter(Filter("key", ">", Int32(1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter(Filter("key", "<", Int32(-1))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", Array(Int32(-1), Int32(0)))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesMinKey) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", nullptr))); + WriteMutation(SetMutation("coll/doc2", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc3", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc4", Map("key", Int32(1)))); + WriteMutation(SetMutation("coll/doc5", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", + "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", testutil::MinKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", Array(testutil::MinKey(), testutil::MaxKey()))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3", "coll/doc5"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesMaxKey) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", nullptr))); + WriteMutation(SetMutation("coll/doc2", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc3", Map("key", Int32(1)))); + WriteMutation(SetMutation("coll/doc4", Map("key", MaxKey()))); + WriteMutation(SetMutation("coll/doc5", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", + "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", testutil::MaxKey())); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); +} + +TEST_F(LevelDbLocalStoreTest, IndexesAllBsonTypesTogether) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kDescending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc2", Map("key", Int32(2)))); + WriteMutation(SetMutation("coll/doc3", Map("key", Int32(1)))); + WriteMutation( + SetMutation("coll/doc4", Map("key", BsonTimestamp(1000, 1001)))); + WriteMutation( + SetMutation("coll/doc5", Map("key", BsonTimestamp(1000, 1000)))); + WriteMutation( + SetMutation("coll/doc6", Map("key", BsonBinaryData(1, {1, 2, 4})))); + WriteMutation( + SetMutation("coll/doc7", Map("key", BsonBinaryData(1, {1, 2, 3})))); + WriteMutation(SetMutation( + "coll/doc8", Map("key", BsonObjectId("507f191e810c19729de860eb")))); + WriteMutation(SetMutation( + "coll/doc9", Map("key", BsonObjectId("507f191e810c19729de860ea")))); + WriteMutation(SetMutation("coll/doc10", Map("key", Regex("^bar", "m")))); + WriteMutation(SetMutation("coll/doc11", Map("key", Regex("^bar", "i")))); + WriteMutation(SetMutation("coll/doc12", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "desc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 12, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}, + {Key("coll/doc6"), model::Mutation::Type::Set}, + {Key("coll/doc7"), model::Mutation::Type::Set}, + {Key("coll/doc8"), model::Mutation::Type::Set}, + {Key("coll/doc9"), model::Mutation::Type::Set}, + {Key("coll/doc10"), model::Mutation::Type::Set}, + {Key("coll/doc11"), model::Mutation::Type::Set}, + {Key("coll/doc12"), model::Mutation::Type::Set}})); + + FSTAssertQueryReturned("coll/doc12", "coll/doc10", "coll/doc11", "coll/doc8", + "coll/doc9", "coll/doc6", "coll/doc7", "coll/doc4", + "coll/doc5", "coll/doc2", "coll/doc3", "coll/doc1"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesAllTypesTogether) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", nullptr))); + WriteMutation(SetMutation("coll/doc2", Map("key", MinKey()))); + WriteMutation(SetMutation("coll/doc3", Map("key", true))); + WriteMutation(SetMutation("coll/doc4", Map("key", NAN))); + WriteMutation(SetMutation("coll/doc5", Map("key", Int32(1)))); + WriteMutation(SetMutation("coll/doc6", Map("key", 2.0))); + WriteMutation(SetMutation("coll/doc7", Map("key", 3L))); + WriteMutation( + SetMutation("coll/doc8", Map("key", Timestamp(100, 123456000)))); + WriteMutation(SetMutation("coll/doc9", Map("key", BsonTimestamp(1, 2)))); + WriteMutation(SetMutation("coll/doc10", Map("key", "string"))); + WriteMutation(SetMutation("coll/doc11", Map("key", BlobValue(1, 2, 3)))); + WriteMutation( + SetMutation("coll/doc12", Map("key", BsonBinaryData(1, {1, 2, 3})))); + WriteMutation( + SetMutation("coll/doc13", Map("key", Ref("project/db", "col/doc")))); + WriteMutation(SetMutation( + "coll/doc14", Map("key", BsonObjectId("507f191e810c19729de860ea")))); + WriteMutation(SetMutation("coll/doc15", Map("key", GeoPoint(1, 2)))); + WriteMutation(SetMutation("coll/doc16", Map("key", Regex("^bar", "m")))); + WriteMutation(SetMutation("coll/doc17", Map("key", Array(2L, "foo")))); + WriteMutation( + SetMutation("coll/doc18", Map("key", VectorType(1.0, 2.0, 3.0)))); + WriteMutation( + SetMutation("coll/doc19", Map("key", Map("bar", 1L, "foo", 2L)))); + WriteMutation(SetMutation("coll/doc20", Map("key", MaxKey()))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 20, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}, + {Key("coll/doc6"), model::Mutation::Type::Set}, + {Key("coll/doc7"), model::Mutation::Type::Set}, + {Key("coll/doc8"), model::Mutation::Type::Set}, + {Key("coll/doc9"), model::Mutation::Type::Set}, + {Key("coll/doc10"), model::Mutation::Type::Set}, + {Key("coll/doc11"), model::Mutation::Type::Set}, + {Key("coll/doc12"), model::Mutation::Type::Set}, + {Key("coll/doc13"), model::Mutation::Type::Set}, + {Key("coll/doc14"), model::Mutation::Type::Set}, + {Key("coll/doc15"), model::Mutation::Type::Set}, + {Key("coll/doc16"), model::Mutation::Type::Set}, + {Key("coll/doc17"), model::Mutation::Type::Set}, + {Key("coll/doc18"), model::Mutation::Type::Set}, + {Key("coll/doc19"), model::Mutation::Type::Set}, + {Key("coll/doc20"), model::Mutation::Type::Set}})); + + FSTAssertQueryReturned( + "coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5", + "coll/doc6", "coll/doc7", "coll/doc8", "coll/doc9", "coll/doc10", + "coll/doc11", "coll/doc12", "coll/doc13", "coll/doc14", "coll/doc15", + "coll/doc16", "coll/doc17", "coll/doc18", "coll/doc19", "coll/doc20"); +} + TEST_F(LevelDbLocalStoreTest, IndexesServerTimestamps) { FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), "time", model::Segment::Kind::kAscending); @@ -426,7 +1111,7 @@ TEST_F(LevelDbLocalStoreTest, DoesNotAutoCreateIndexesForSmallCollections) { // SDK will not create indexes since collection size is too small. ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - FSTAssertQueryReturned("coll/a", "coll/e"); + FSTAssertQueryReturned("coll/e", "coll/a"); BackfillIndexes(); @@ -435,7 +1120,7 @@ TEST_F(LevelDbLocalStoreTest, DoesNotAutoCreateIndexesForSmallCollections) { ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 3); - FSTAssertQueryReturned("coll/a", "coll/e", "coll/f"); + FSTAssertQueryReturned("coll/e", "coll/a", "coll/f"); } TEST_F(LevelDbLocalStoreTest, @@ -541,7 +1226,7 @@ TEST_F(LevelDbLocalStoreTest, // (2). Full matched index should be created. ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - FSTAssertQueryReturned("coll/a", "coll/e"); + FSTAssertQueryReturned("coll/e", "coll/a"); SetIndexAutoCreationEnabled(false); @@ -552,7 +1237,7 @@ TEST_F(LevelDbLocalStoreTest, ExecuteQuery(query); FSTAssertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - FSTAssertQueryReturned("coll/a", "coll/e", "coll/f"); + FSTAssertQueryReturned("coll/e", "coll/a", "coll/f"); } TEST_F(LevelDbLocalStoreTest, DisableIndexAutoCreationWorks) { diff --git a/Firestore/core/test/unit/local/local_store_test.cc b/Firestore/core/test/unit/local/local_store_test.cc index 2c0affe91ee..65012b63248 100644 --- a/Firestore/core/test/unit/local/local_store_test.cc +++ b/Firestore/core/test/unit/local/local_store_test.cc @@ -67,6 +67,7 @@ using model::Document; using model::DocumentKey; using model::DocumentKeySet; using model::DocumentMap; +using model::DocumentSet; using model::ListenSequenceNumber; using model::MutableDocument; using model::MutableDocumentMap; @@ -105,6 +106,16 @@ using testutil::UpdateRemoteEventWithLimboTargets; using testutil::Value; using testutil::Vector; +std::vector DocSetToVector(const absl::optional& docs) { + std::vector result; + if (docs.has_value()) { + for (const auto& doc : *docs) { + result.push_back(doc); + } + } + return result; +} + std::vector DocMapToVector(const DocumentMap& docs) { std::vector result; for (const auto& kv : docs) { @@ -273,10 +284,19 @@ TargetData LocalStoreTestBase::GetTargetData(const core::Query& query) { }); } -QueryResult LocalStoreTestBase::ExecuteQuery(const core::Query& query) { +absl::optional LocalStoreTestBase::ExecuteQuery( + const core::Query& query) { ResetPersistenceStats(); - last_query_result_ = + local::QueryResult query_result = local_store_.ExecuteQuery(query, /* use_previous_results= */ true); + + // Start from an empty set. Use the query's comparator which is what + // ultimately gets used to order documents. + last_query_result_ = DocumentSet(query.Comparator()); + for (const auto& document : query_result.documents()) { + last_query_result_ = last_query_result_->insert(document.second); + } + return last_query_result_; } @@ -880,8 +900,8 @@ TEST_P(LocalStoreTest, CanExecuteDocumentQueries) { testutil::SetMutation("foo/baz", Map("foo", "baz")), testutil::SetMutation("foo/bar/Foo/Bar", Map("Foo", "Bar"))}); core::Query query = Query("foo/bar"); - QueryResult query_result = ExecuteQuery(query); - ASSERT_EQ(DocMapToVector(query_result.documents()), + auto query_result = ExecuteQuery(query); + ASSERT_EQ(DocSetToVector(query_result), Vector(Document{ Doc("foo/bar", 0, Map("foo", "bar")).SetHasLocalMutations()})); } @@ -894,9 +914,9 @@ TEST_P(LocalStoreTest, CanExecuteCollectionQueries) { testutil::SetMutation("foo/bar/Foo/Bar", Map("Foo", "Bar")), testutil::SetMutation("fooo/blah", Map("fooo", "blah"))}); core::Query query = Query("foo"); - QueryResult query_result = ExecuteQuery(query); + auto query_result = ExecuteQuery(query); ASSERT_EQ( - DocMapToVector(query_result.documents()), + DocSetToVector(query_result), Vector( Document{Doc("foo/bar", 0, Map("foo", "bar")).SetHasLocalMutations()}, Document{ @@ -915,9 +935,9 @@ TEST_P(LocalStoreTest, CanExecuteMixedCollectionQueries) { local_store_.WriteLocally({testutil::SetMutation("foo/bonk", Map("a", "b"))}); - QueryResult query_result = ExecuteQuery(query); + auto query_result = ExecuteQuery(query); ASSERT_EQ( - DocMapToVector(query_result.documents()), + DocSetToVector(query_result), Vector( Document{Doc("foo/bar", 20, Map("a", "b"))}, Document{Doc("foo/baz", 10, Map("a", "b"))}, diff --git a/Firestore/core/test/unit/local/local_store_test.h b/Firestore/core/test/unit/local/local_store_test.h index 1271bc4fa1b..5213b6276a7 100644 --- a/Firestore/core/test/unit/local/local_store_test.h +++ b/Firestore/core/test/unit/local/local_store_test.h @@ -25,6 +25,7 @@ #include "Firestore/core/src/local/local_store.h" #include "Firestore/core/src/local/query_engine.h" #include "Firestore/core/src/local/query_result.h" +#include "Firestore/core/src/model/document_set.h" #include "Firestore/core/src/model/mutation_batch.h" #include "Firestore/core/test/unit/local/counting_query_engine.h" #include "gtest/gtest.h" @@ -89,7 +90,7 @@ class LocalStoreTestBase : public testing::Test { std::vector&& new_field_indexes); model::TargetId AllocateQuery(core::Query query); local::TargetData GetTargetData(const core::Query& query); - local::QueryResult ExecuteQuery(const core::Query& query); + absl::optional ExecuteQuery(const core::Query& query); void SetIndexAutoCreationEnabled(bool is_enabled); void DeleteAllIndexes() const; void SetMinCollectionSizeToAutoCreateIndex(size_t new_min); @@ -112,7 +113,7 @@ class LocalStoreTestBase : public testing::Test { model::DocumentMap last_changes_; model::TargetId last_target_id_ = 0; - local::QueryResult last_query_result_; + absl::optional last_query_result_; }; /** @@ -151,20 +152,19 @@ class LocalStoreTest : public LocalStoreTestBase, /** * Asserts that the last ExecuteQuery results contain the docs in the given - * array. + * array in the same order. */ -#define FSTAssertQueryReturned(...) \ - do { \ - std::vector expected_keys = {__VA_ARGS__}; \ - ASSERT_EQ(last_query_result_.documents().size(), expected_keys.size()); \ - auto expected_keys_iterator = expected_keys.begin(); \ - for (const auto& kv : last_query_result_.documents()) { \ - const DocumentKey& actual_key = kv.first; \ - DocumentKey expected_key = Key(*expected_keys_iterator); \ - ASSERT_EQ(actual_key, expected_key); \ - ++expected_keys_iterator; \ - } \ - last_query_result_ = QueryResult{}; \ +#define FSTAssertQueryReturned(...) \ + do { \ + std::vector expected_keys = {__VA_ARGS__}; \ + ASSERT_EQ(last_query_result_->size(), expected_keys.size()); \ + auto expected_keys_iterator = expected_keys.begin(); \ + for (const auto& doc : *last_query_result_) { \ + const DocumentKey& actual_key = doc.get().key(); \ + DocumentKey expected_key = Key(*expected_keys_iterator); \ + ASSERT_EQ(actual_key, expected_key); \ + ++expected_keys_iterator; \ + } \ } while (0) /** Asserts that the given keys were removed. */ diff --git a/Firestore/core/test/unit/model/value_util_test.cc b/Firestore/core/test/unit/model/value_util_test.cc index acb6b6f6607..4f528e78575 100644 --- a/Firestore/core/test/unit/model/value_util_test.cc +++ b/Firestore/core/test/unit/model/value_util_test.cc @@ -194,9 +194,9 @@ TEST(FieldValueTest, ValueHelpers) { ASSERT_EQ(GetTypeOrder(*map_value), TypeOrder::kMap); ASSERT_EQ(DetectMapType(*map_value), MapType::kNormal); - auto max_value = DeepClone(MaxValue()); - ASSERT_EQ(GetTypeOrder(*max_value), TypeOrder::kMaxValue); - ASSERT_EQ(DetectMapType(*max_value), MapType::kMaxValue); + auto max_value = DeepClone(InternalMaxValue()); + ASSERT_EQ(GetTypeOrder(*max_value), TypeOrder::kInternalMaxValue); + ASSERT_EQ(DetectMapType(*max_value), MapType::kInternalMaxValue); auto server_timestamp = EncodeServerTimestamp(kTimestamp1, absl::nullopt); ASSERT_EQ(GetTypeOrder(*server_timestamp), TypeOrder::kServerTimestamp); @@ -463,7 +463,7 @@ TEST_F(ValueUtilTest, StrictOrdering) { // MaxKey Add(comparison_groups, MaxKey()); - Add(comparison_groups, DeepClone(MaxValue())); + Add(comparison_groups, DeepClone(InternalMaxValue())); for (size_t i = 0; i < comparison_groups.size(); ++i) { for (size_t j = i; j < comparison_groups.size(); ++j) { @@ -627,7 +627,7 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { Add(comparison_groups, MaxKey()); // MaxValue (internal) - Add(comparison_groups, DeepClone(MaxValue())); + Add(comparison_groups, DeepClone(InternalMaxValue())); for (size_t i = 0; i < comparison_groups.size(); ++i) { for (size_t j = i; j < comparison_groups.size(); ++j) { @@ -636,6 +636,187 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { } } +TEST_F(ValueUtilTest, ComputesLowerBound) { + auto GetLowerBoundMessage = [](Message value) { + return DeepClone(GetLowerBound(*value)); + }; + + std::vector> groups; + + // Lower bound of null is null + Add(groups, DeepClone(NullValue()), + GetLowerBoundMessage(DeepClone(NullValue()))); + + // Lower bound of MinKey is MinKey + Add(groups, MinKey(), GetLowerBoundMessage(DeepClone(MinKeyValue())), + DeepClone(MinKeyValue())); + + // Booleans + Add(groups, false, GetLowerBoundMessage(Value(true))); + Add(groups, true); + + // Numbers + Add(groups, GetLowerBoundMessage(Value(0.0)), GetLowerBoundMessage(Value(0L)), + GetLowerBoundMessage(Int32(0)), std::nan(""), DeepClone(MinNumber())); + Add(groups, INT_MIN); + + // Timestamps + Add(groups, GetLowerBoundMessage(Value(kTimestamp1)), + DeepClone(MinTimestamp())); + Add(groups, kTimestamp1); + + // BSON Timestamps + Add(groups, GetLowerBoundMessage(BsonTimestamp(500, 600)), + BsonTimestamp(0, 0), DeepClone(MinBsonTimestamp())); + Add(groups, BsonTimestamp(1, 1)); + + // Strings + Add(groups, GetLowerBoundMessage(Value("Z")), "", DeepClone(MinString())); + Add(groups, "\u0000"); + + // Blobs + Add(groups, GetLowerBoundMessage(BlobValue(1, 2, 3)), BlobValue(), + DeepClone(MinBytes())); + Add(groups, BlobValue(0)); + + // BSON Binary Data + Add(groups, GetLowerBoundMessage(BsonBinaryData(128, {128, 128})), + DeepClone(MinBsonBinaryData())); + Add(groups, BsonBinaryData(0, {0})); + + // References + Add(groups, GetLowerBoundMessage(RefValue(DbId("p1/d1"), Key("c1/doc1"))), + DeepClone(MinReference())); + Add(groups, RefValue(DbId(), Key("a/a"))); + + // BSON Object Ids + Add(groups, GetLowerBoundMessage(BsonObjectId("ZZZ")), BsonObjectId(""), + DeepClone(MinBsonObjectId())); + Add(groups, BsonObjectId("a")); + + // GeoPoints + Add(groups, GetLowerBoundMessage(Value(GeoPoint(30, 60))), + GeoPoint(-90, -180), DeepClone(MinGeoPoint())); + Add(groups, GeoPoint(-90, 0)); + + // Regular Expressions + Add(groups, GetLowerBoundMessage(Regex("ZZZ", "i")), Regex("", ""), + DeepClone(MinRegex())); + Add(groups, Regex("a", "i")); + + // Arrays + Add(groups, GetLowerBoundMessage(Value(Array())), Array(), + DeepClone(MinArray())); + Add(groups, Array(false)); + + // Vectors + Add(groups, GetLowerBoundMessage(VectorType(1.0)), VectorType(), + DeepClone(MinVector())); + Add(groups, VectorType(1.0)); + + // Maps + Add(groups, GetLowerBoundMessage(Map()), Map(), DeepClone(MinMap())); + Add(groups, Map("a", "b")); + + // MaxKey + Add(groups, MaxKey(), GetLowerBoundMessage(DeepClone(MaxKeyValue())), + DeepClone(MaxKeyValue())); + + for (size_t i = 0; i < groups.size(); ++i) { + for (size_t j = i; j < groups.size(); ++j) { + VerifyRelaxedAscending(groups[i], groups[j]); + } + } +} + +TEST_F(ValueUtilTest, ComputesUpperBound) { + auto GetUpperBoundMessage = [](Message value) { + return DeepClone(GetUpperBound(*value)); + }; + + std::vector> groups; + + // Null first + Add(groups, DeepClone(NullValue())); + + // The upper bound of null is MinKey + Add(groups, MinKey(), GetUpperBoundMessage(DeepClone(NullValue()))); + + // The upper bound of MinKey is boolean `false` + Add(groups, false, GetUpperBoundMessage(MinKey())); + + // Booleans + Add(groups, true); + Add(groups, GetUpperBoundMessage(Value(false))); + + // Numbers + Add(groups, INT_MAX); + Add(groups, GetUpperBoundMessage(Value(INT_MAX)), + GetUpperBoundMessage(Value(0L)), GetUpperBoundMessage(Int32(0)), + GetUpperBoundMessage(Value(std::nan("")))); + + // Timestamps + Add(groups, kTimestamp1); + Add(groups, GetUpperBoundMessage(Value(kTimestamp1))); + + // BSON Timestamps + Add(groups, BsonTimestamp(4294967295, 4294967295)); // largest BSON Timestamp + Add(groups, GetUpperBoundMessage(DeepClone(MinBsonTimestamp()))); + + // Strings + Add(groups, "\u0000"); + Add(groups, GetUpperBoundMessage(DeepClone(MinString()))); + + // Blobs + Add(groups, BlobValue(255)); + Add(groups, GetUpperBoundMessage(BlobValue())); + + // BSON Binary Data + Add(groups, BsonBinaryData(255, {255, 255})); + Add(groups, GetUpperBoundMessage(DeepClone(MinBsonBinaryData()))); + + // References + Add(groups, DeepClone(MinReference())); + Add(groups, RefValue(DbId(), Key("c/d"))); + Add(groups, GetUpperBoundMessage(RefValue(DbId(), Key("a/b")))); + + // BSON Object Ids + Add(groups, BsonObjectId("foo")); + Add(groups, GetUpperBoundMessage(DeepClone(MinBsonObjectId()))); + + // GeoPoints + Add(groups, GeoPoint(90, 180)); + Add(groups, GetUpperBoundMessage(DeepClone(MinGeoPoint()))); + + // Regular Expressions + Add(groups, Regex("a", "i")); + Add(groups, GetUpperBoundMessage(DeepClone(MinRegex()))); + + // Arrays + Add(groups, Array(false)); + Add(groups, GetUpperBoundMessage(DeepClone(MinArray()))); + + // Vectors + Add(groups, VectorType(1.0, 2.0, 3.0)); + Add(groups, GetUpperBoundMessage(DeepClone(MinVector()))); + + // Maps + Add(groups, Map("a", "b")); + Add(groups, GetUpperBoundMessage(DeepClone(MinMap()))); + + // MaxKey + Add(groups, MaxKey()); + + // The upper bound of MaxKey is internal max value. + Add(groups, GetUpperBoundMessage(DeepClone(MaxKeyValue()))); + + for (size_t i = 0; i < groups.size(); ++i) { + for (size_t j = i; j < groups.size(); ++j) { + VerifyRelaxedAscending(groups[i], groups[j]); + } + } +} + TEST_F(ValueUtilTest, CanonicalId) { VerifyCanonicalId(Value(nullptr), "null"); VerifyCanonicalId(Value(true), "true"); From 2d4fd68d915a0377cd4931f1397deb423309ebaa Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Thu, 1 May 2025 15:12:57 -0700 Subject: [PATCH 03/16] Remove temporary test code. --- Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm | 6 ------ 1 file changed, 6 deletions(-) diff --git a/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm b/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm index 0a9949457b4..4b7c7b9f034 100644 --- a/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm +++ b/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm @@ -198,12 +198,6 @@ + (void)setUpDefaults { // SSL certs. NSString *project = [[NSProcessInfo processInfo] environment][@"PROJECT_ID"]; NSString *targetBackend = [[NSProcessInfo processInfo] environment][@"TARGET_BACKEND"]; - - // Forcing use of nightly. - // TODO(types/ehsann): remove this before merging into main. - targetBackend = @"nightly"; - project = @"firestore-sdk-nightly"; - NSString *host; if (targetBackend) { if ([targetBackend isEqualToString:@"emulator"]) { From a33a3879aab5958afbda6c4c9296b59d823002c3 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Thu, 1 May 2025 15:17:03 -0700 Subject: [PATCH 04/16] Add CHANGELOG. --- Firestore/CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Firestore/CHANGELOG.md b/Firestore/CHANGELOG.md index a006d39ab7b..b0cebc50911 100644 --- a/Firestore/CHANGELOG.md +++ b/Firestore/CHANGELOG.md @@ -1,3 +1,7 @@ +# Unreleased +- [feature] Adds support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BsonObjectId, + BsonTimestamp, and BsonBinaryData. (#14800) + # 11.9.0 - [fixed] Fixed memory leak in `Query.whereField()`. (#13978) From 2333e8d77971d0576fbaf0a8fb4df8a4a8c4cac5 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Thu, 1 May 2025 15:28:36 -0700 Subject: [PATCH 05/16] Fix the copyright header. --- Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift | 2 +- Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift | 2 +- Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift | 2 +- Firestore/Swift/Source/Codable/Int32Value+Codable.swift | 2 +- Firestore/Swift/Source/Codable/MaxKey+Codable.swift | 2 +- Firestore/Swift/Source/Codable/MinKey+Codable.swift | 2 +- Firestore/Swift/Source/Codable/RegexValue+Codable.swift | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift b/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift index ad0114e3180..5ebb6e04554 100644 --- a/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift +++ b/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift @@ -1,5 +1,5 @@ /* - * Copyright 2025 Google + * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift b/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift index 3669b6e9c1b..aa2901582d9 100644 --- a/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift +++ b/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift @@ -1,5 +1,5 @@ /* - * Copyright 2025 Google + * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift b/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift index e1fa1f1695d..73941bc7ae1 100644 --- a/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift +++ b/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift @@ -1,5 +1,5 @@ /* - * Copyright 2025 Google + * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/Firestore/Swift/Source/Codable/Int32Value+Codable.swift b/Firestore/Swift/Source/Codable/Int32Value+Codable.swift index 170f4b30c29..798cdcb9efb 100644 --- a/Firestore/Swift/Source/Codable/Int32Value+Codable.swift +++ b/Firestore/Swift/Source/Codable/Int32Value+Codable.swift @@ -1,5 +1,5 @@ /* - * Copyright 2025 Google + * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/Firestore/Swift/Source/Codable/MaxKey+Codable.swift b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift index f164bd53e37..cf3e840903c 100644 --- a/Firestore/Swift/Source/Codable/MaxKey+Codable.swift +++ b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift @@ -1,5 +1,5 @@ /* - * Copyright 2025 Google + * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/Firestore/Swift/Source/Codable/MinKey+Codable.swift b/Firestore/Swift/Source/Codable/MinKey+Codable.swift index efb4b1da5eb..9f388eceadb 100644 --- a/Firestore/Swift/Source/Codable/MinKey+Codable.swift +++ b/Firestore/Swift/Source/Codable/MinKey+Codable.swift @@ -1,5 +1,5 @@ /* - * Copyright 2025 Google + * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/Firestore/Swift/Source/Codable/RegexValue+Codable.swift b/Firestore/Swift/Source/Codable/RegexValue+Codable.swift index db145ea0bfd..fc750f49495 100644 --- a/Firestore/Swift/Source/Codable/RegexValue+Codable.swift +++ b/Firestore/Swift/Source/Codable/RegexValue+Codable.swift @@ -1,5 +1,5 @@ /* - * Copyright 2025 Google + * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From 927106718504a9190ea19886b272bee4867d1958 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Thu, 1 May 2025 15:42:06 -0700 Subject: [PATCH 06/16] Remove remaining FieldValue factory method usages. --- .../Example/Tests/API/FIRFieldValueTests.mm | 40 +------------------ .../src/index/firestore_index_value_writer.cc | 1 + 2 files changed, 2 insertions(+), 39 deletions(-) diff --git a/Firestore/Example/Tests/API/FIRFieldValueTests.mm b/Firestore/Example/Tests/API/FIRFieldValueTests.mm index 0849538a94e..a9b288a0e46 100644 --- a/Firestore/Example/Tests/API/FIRFieldValueTests.mm +++ b/Firestore/Example/Tests/API/FIRFieldValueTests.mm @@ -14,14 +14,8 @@ * limitations under the License. */ -#import -#import -#import #import -#import -#import #import -#import "Firestore/Example/Tests/Util/FSTHelpers.h" #import @@ -48,38 +42,6 @@ - (void)testEquals { XCTAssertNotEqual([deleted hash], [serverTimestamp hash]); } -- (void)testCanCreateRegexValue { - FIRRegexValue *regex = [FIRFieldValue regexWithPattern:@"^foo" options:@"x"]; - XCTAssertEqual(regex.pattern, @"^foo"); - XCTAssertEqual(regex.options, @"x"); -} - -- (void)testCanCreateInt32Value { - FIRInt32Value *int1 = [FIRFieldValue int32WithValue:1234]; - XCTAssertEqual(int1.value, 1234); - - FIRInt32Value *int2 = [FIRFieldValue int32WithValue:-1234]; - XCTAssertEqual(int2.value, -1234); -} - -- (void)testCanCreateBsonObjectId { - FIRBsonObjectId *objectId = [FIRFieldValue bsonObjectIdWithValue:@"foo"]; - XCTAssertEqual(objectId.value, @"foo"); -} - -- (void)testCanCreateBsonTimestamp { - FIRBsonTimestamp *timestamp = [FIRFieldValue bsonTimestampWithSeconds:123 increment:456]; - XCTAssertEqual(timestamp.seconds, 123U); - XCTAssertEqual(timestamp.increment, 456U); -} - -- (void)testCanCreateBsonBinaryData { - FIRBsonBinaryData *binData = [FIRFieldValue bsonBinaryDataWithSubtype:128 - data:FSTTestData(1, 2, 3, -1)]; - XCTAssertEqual(binData.subtype, 128); - XCTAssertTrue([binData.data isEqualToData:FSTTestData(1, 2, 3, -1)]); -} - @end -NS_ASSUME_NONNULL_END +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/Firestore/core/src/index/firestore_index_value_writer.cc b/Firestore/core/src/index/firestore_index_value_writer.cc index 9ad2f614bf2..bfeaa4dfbef 100644 --- a/Firestore/core/src/index/firestore_index_value_writer.cc +++ b/Firestore/core/src/index/firestore_index_value_writer.cc @@ -19,6 +19,7 @@ #include #include #include +#include #include "Firestore/core/src/model/resource_path.h" #include "Firestore/core/src/model/value_util.h" From 5fd1c1c46c7df2c3380ba6ba40240b75ad1aa127 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 9 May 2025 15:44:31 -0700 Subject: [PATCH 07/16] Fix merge conflicts. --- .../BsonTypesIntegrationTests.swift | 73 +++++++++++++------ 1 file changed, 51 insertions(+), 22 deletions(-) diff --git a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift index 19213756779..371721cd868 100644 --- a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift @@ -77,6 +77,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // 2. Performing the given query using source=server. // 3. Performing the given query using source=cache. func assertSdkQueryResultsConsistentWithBackend(_ documentDataMap: [String: [String: Any]], + collection: CollectionReference, query: Query, expectedResult: [String]) async throws { let watchSnapshot = try await Future() { promise in @@ -97,7 +98,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { description: "snapshot listener" ) - checkOnlineAndOfflineQuery(query, matchesResult: expectedResult) + checkOnlineAndOfflineCollection(collection, query: query, matchesResult: expectedResult) } func testCanWriteAndReadBsonTypes() async throws { @@ -216,6 +217,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "b"] ) @@ -229,6 +231,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["b", "a"] ) @@ -249,6 +252,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "b"] ) @@ -258,6 +262,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "a"] ) @@ -278,6 +283,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "b"] ) @@ -287,6 +293,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "b"] ) @@ -310,6 +317,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "b"] ) @@ -326,6 +334,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["b", "a"] ) @@ -350,6 +359,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ).order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "a"] ) @@ -372,6 +382,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["b", "a"] ) @@ -383,6 +394,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // .order(by: "key") // try await assertSdkQueryResultsConsistentWithBackend( // testDocs, + // collection: collection, // query: query2, // expectedResult: ["d", "e"] // ) @@ -392,6 +404,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["a", "b"] ) @@ -401,6 +414,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["a", "b"] ) @@ -410,6 +424,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: [] ) @@ -419,6 +434,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: [] ) @@ -428,6 +444,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: [] ) @@ -450,6 +467,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "d"] ) @@ -461,6 +479,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // .order(by: "key") // try await assertSdkQueryResultsConsistentWithBackend( // testDocs, + // collection: collection, // query: query, // expectedResult: ["a", "b"] // ) @@ -470,6 +489,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "d"] ) @@ -479,6 +499,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["c", "d"] ) @@ -488,6 +509,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: [] ) @@ -497,6 +519,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: [] ) @@ -506,6 +529,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: [] ) @@ -528,6 +552,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["b", "c"] ) @@ -537,6 +562,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: query, expectedResult: ["a", "d", "e"] ) @@ -571,27 +597,29 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { await setDocumentData(testDocs, toCollection: collection) let query = collection.order(by: "key", descending: true) - try await assertSdkQueryResultsConsistentWithBackend(testDocs, query: query, expectedResult: [ - "maxKey2", - "maxKey1", - "regex3", - "regex1", - "regex2", - "bsonObjectId2", - "bsonObjectId1", - "bsonObjectId3", - "bsonBinary3", - "bsonBinary2", - "bsonBinary1", - "bsonTimestamp1", - "bsonTimestamp2", - "bsonTimestamp3", - "int32Value2", - "int32Value3", - "int32Value1", - "minKey2", - "minKey1", - ]) + try await assertSdkQueryResultsConsistentWithBackend(testDocs, + collection: collection, + query: query, expectedResult: [ + "maxKey2", + "maxKey1", + "regex3", + "regex1", + "regex2", + "bsonObjectId2", + "bsonObjectId1", + "bsonObjectId3", + "bsonBinary3", + "bsonBinary2", + "bsonBinary1", + "bsonTimestamp1", + "bsonTimestamp2", + "bsonTimestamp3", + "int32Value2", + "int32Value3", + "int32Value1", + "minKey2", + "minKey1", + ]) } func testCanOrderValuesOfDifferentTypes() async throws { @@ -652,6 +680,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { try await assertSdkQueryResultsConsistentWithBackend( testDocs, + collection: collection, query: orderedQuery, expectedResult: expectedOrder ) From fcb6ebb5fa082d032a2026290f0907cd1332f91f Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 9 May 2025 16:31:06 -0700 Subject: [PATCH 08/16] Address API proposal feedback. --- .../Tests/API/FIRBsonTypesUnitTests.mm | 16 ++--- Firestore/Source/API/FIRFieldValue.mm | 4 +- Firestore/Source/API/FIRMaxKey.mm | 2 +- Firestore/Source/API/FIRMinKey.mm | 2 +- Firestore/Source/API/FSTUserDataWriter.mm | 4 +- .../FirebaseFirestore/FIRBsonBinaryData.h | 3 +- .../FirebaseFirestore/FIRBsonObjectId.h | 3 +- .../FirebaseFirestore/FIRBsonTimestamp.h | 3 +- .../Public/FirebaseFirestore/FIRInt32Value.h | 3 +- .../Public/FirebaseFirestore/FIRMaxKey.h | 3 +- .../Public/FirebaseFirestore/FIRMinKey.h | 5 +- .../Public/FirebaseFirestore/FIRRegexValue.h | 3 +- .../BsonTypesIntegrationTests.swift | 68 +++++++++---------- .../Integration/CodableIntegrationTests.swift | 8 +-- .../SnapshotListenerSourceTests.swift | 4 +- .../Swift/Tests/Integration/TypeTest.swift | 24 +++---- 16 files changed, 81 insertions(+), 74 deletions(-) diff --git a/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm b/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm index 0aeee4e2969..ff1ce7561d7 100644 --- a/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm +++ b/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm @@ -33,15 +33,15 @@ @interface FIRBsonTypesUnitTests : XCTestCase @implementation FIRBsonTypesUnitTests - (void)testMinKeySingleton { - FIRMinKey *minKey1 = [FIRMinKey instance]; - FIRMinKey *minKey2 = [FIRMinKey instance]; + FIRMinKey *minKey1 = [FIRMinKey shared]; + FIRMinKey *minKey2 = [FIRMinKey shared]; XCTAssertEqual(minKey1, minKey2); XCTAssertTrue([minKey1 isEqual:minKey2]); } - (void)testMaxKeySingleton { - FIRMaxKey *maxKey1 = [FIRMaxKey instance]; - FIRMaxKey *maxKey2 = [FIRMaxKey instance]; + FIRMaxKey *maxKey1 = [FIRMaxKey shared]; + FIRMaxKey *maxKey2 = [FIRMaxKey shared]; XCTAssertEqual(maxKey1, maxKey2); XCTAssertTrue([maxKey1 isEqual:maxKey2]); } @@ -128,15 +128,15 @@ - (void)testCreateAndReadAndCompareBsonBinaryData { } - (void)testFieldValueMinKey { - FIRMinKey *minKey1 = [FIRMinKey instance]; - FIRMinKey *minKey2 = [FIRMinKey instance]; + FIRMinKey *minKey1 = [FIRMinKey shared]; + FIRMinKey *minKey2 = [FIRMinKey shared]; XCTAssertEqual(minKey1, minKey2); XCTAssertTrue([minKey1 isEqual:minKey2]); } - (void)testFieldValueMaxKey { - FIRMaxKey *maxKey1 = [FIRMaxKey instance]; - FIRMaxKey *maxKey2 = [FIRMaxKey instance]; + FIRMaxKey *maxKey1 = [FIRMaxKey shared]; + FIRMaxKey *maxKey2 = [FIRMaxKey shared]; XCTAssertEqual(maxKey1, maxKey2); XCTAssertTrue([maxKey1 isEqual:maxKey2]); } diff --git a/Firestore/Source/API/FIRFieldValue.mm b/Firestore/Source/API/FIRFieldValue.mm index 5dfe298e859..25a60517744 100644 --- a/Firestore/Source/API/FIRFieldValue.mm +++ b/Firestore/Source/API/FIRFieldValue.mm @@ -189,11 +189,11 @@ + (nonnull FIRVectorValue *)vectorWithArray:(nonnull NSArray *)array } + (nonnull FIRMinKey *)minKey { - return [FIRMinKey instance]; + return [FIRMinKey shared]; } + (nonnull FIRMaxKey *)maxKey { - return [FIRMaxKey instance]; + return [FIRMaxKey shared]; } + (nonnull FIRRegexValue *)regexWithPattern:(nonnull NSString *)pattern diff --git a/Firestore/Source/API/FIRMaxKey.mm b/Firestore/Source/API/FIRMaxKey.mm index 5eabc9aa731..84c970b9bd7 100644 --- a/Firestore/Source/API/FIRMaxKey.mm +++ b/Firestore/Source/API/FIRMaxKey.mm @@ -22,7 +22,7 @@ @implementation FIRMaxKey static FIRMaxKey *sharedInstance = nil; static dispatch_once_t onceToken; -+ (FIRMaxKey *)instance { ++ (FIRMaxKey *)shared { dispatch_once(&onceToken, ^{ sharedInstance = [[self alloc] init]; }); diff --git a/Firestore/Source/API/FIRMinKey.mm b/Firestore/Source/API/FIRMinKey.mm index 3b662031558..ee0489bfdbc 100644 --- a/Firestore/Source/API/FIRMinKey.mm +++ b/Firestore/Source/API/FIRMinKey.mm @@ -22,7 +22,7 @@ @implementation FIRMinKey static FIRMinKey *sharedInstance = nil; static dispatch_once_t onceToken; -+ (FIRMinKey *)instance { ++ (FIRMinKey *)shared { dispatch_once(&onceToken, ^{ sharedInstance = [[self alloc] init]; }); diff --git a/Firestore/Source/API/FSTUserDataWriter.mm b/Firestore/Source/API/FSTUserDataWriter.mm index 2b0c4d31cab..44cd90e079a 100644 --- a/Firestore/Source/API/FSTUserDataWriter.mm +++ b/Firestore/Source/API/FSTUserDataWriter.mm @@ -122,9 +122,9 @@ - (id)convertedValue:(const google_firestore_v1_Value &)value { return MakeFIRGeoPoint( GeoPoint(value.geo_point_value.latitude, value.geo_point_value.longitude)); case TypeOrder::kMinKey: - return [FIRMinKey instance]; + return [FIRMinKey shared]; case TypeOrder::kMaxKey: - return [FIRMaxKey instance]; + return [FIRMaxKey shared]; case TypeOrder::kRegex: return [self convertedRegex:value.map_value]; case TypeOrder::kBsonObjectId: diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h index 55be1d51f52..dd4f25475c5 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h @@ -23,7 +23,8 @@ NS_ASSUME_NONNULL_BEGIN */ NS_SWIFT_SENDABLE NS_SWIFT_NAME(BsonBinaryData) -@interface FIRBsonBinaryData : NSObject +__attribute__((objc_subclassing_restricted)) +@interface FIRBsonBinaryData : NSObject /** An 8-bit unsigned integer denoting the subtype of the data. */ @property(atomic, readonly) uint8_t subtype; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h index 708a40285b5..f674ffdd8ba 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h @@ -23,7 +23,8 @@ NS_ASSUME_NONNULL_BEGIN */ NS_SWIFT_SENDABLE NS_SWIFT_NAME(BsonObjectId) -@interface FIRBsonObjectId : NSObject +__attribute__((objc_subclassing_restricted)) +@interface FIRBsonObjectId : NSObject /** The 24-character hex string representation of the ObjectId. */ @property(atomic, copy, readonly) NSString *value; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h index f281528da04..2f631644e04 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h @@ -24,7 +24,8 @@ NS_ASSUME_NONNULL_BEGIN */ NS_SWIFT_SENDABLE NS_SWIFT_NAME(BsonTimestamp) -@interface FIRBsonTimestamp : NSObject +__attribute__((objc_subclassing_restricted)) +@interface FIRBsonTimestamp : NSObject /** The underlying unsigned 32-bit integer for seconds */ @property(atomic, readonly) uint32_t seconds; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h index a8bfb3b08d7..1c019f238c8 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h @@ -23,7 +23,8 @@ NS_ASSUME_NONNULL_BEGIN */ NS_SWIFT_SENDABLE NS_SWIFT_NAME(Int32Value) -@interface FIRInt32Value : NSObject +__attribute__((objc_subclassing_restricted)) +@interface FIRInt32Value : NSObject /** The 32-bit integer value. */ @property(atomic, assign, readonly) int32_t value; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h index e33c73ddb6f..d6c3c0e3385 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h @@ -23,10 +23,11 @@ NS_ASSUME_NONNULL_BEGIN */ NS_SWIFT_SENDABLE NS_SWIFT_NAME(MaxKey) +__attribute__((objc_subclassing_restricted)) @interface FIRMaxKey : NSObject /** Returns the only instance of MaxKey. */ -+ (FIRMaxKey *)instance; +@property(class, readonly) FIRMaxKey *shared; /** Returns true if the given object is equal to this, and false otherwise. */ - (BOOL)isEqual:(id)object; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h index a6b8540dde8..d82520574b6 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h @@ -23,10 +23,11 @@ NS_ASSUME_NONNULL_BEGIN */ NS_SWIFT_SENDABLE NS_SWIFT_NAME(MinKey) +__attribute__((objc_subclassing_restricted)) @interface FIRMinKey : NSObject -/** Returns the only instance of MinKey. */ -+ (FIRMinKey *)instance; +/** The only instance of MinKey. */ +@property(class, readonly) FIRMinKey *shared; /** Returns true if the given object is equal to this, and false otherwise. */ - (BOOL)isEqual:(id)object; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h index 5ce99f7c35a..fcfb6608de7 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h @@ -23,7 +23,8 @@ NS_ASSUME_NONNULL_BEGIN */ NS_SWIFT_SENDABLE NS_SWIFT_NAME(RegexValue) -@interface FIRRegexValue : NSObject +__attribute__((objc_subclassing_restricted)) +@interface FIRRegexValue : NSObject /** The regular expression pattern */ @property(atomic, copy, readonly) NSString *pattern; diff --git a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift index 371721cd868..910514078dd 100644 --- a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift @@ -107,8 +107,8 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), "objectId": BsonObjectId("507f191e810c19729de860ea"), "int32": Int32Value(1), - "min": MinKey.instance(), - "max": MaxKey.instance(), + "min": MinKey.shared, + "max": MaxKey.shared, "regex": RegexValue(pattern: "^foo", options: "i"), ]) @@ -129,11 +129,11 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) XCTAssertEqual( snapshot.get("min") as? MinKey, - MinKey.instance() + MinKey.shared ) XCTAssertEqual( snapshot.get("max") as? MaxKey, - MaxKey.instance() + MaxKey.shared ) XCTAssertEqual( snapshot.get("binary") as? BsonBinaryData, @@ -160,8 +160,8 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), "objectId": BsonObjectId("507f191e810c19729de860ea"), "int32": Int32Value(1), - "min": MinKey.instance(), - "max": MaxKey.instance(), + "min": MinKey.shared, + "max": MaxKey.shared, "regex": RegexValue(pattern: "^foo", options: "i"), ]) ref.updateData([ @@ -181,11 +181,11 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) XCTAssertEqual( snapshot.get("min") as? MinKey, - MinKey.instance() + MinKey.shared ) XCTAssertEqual( snapshot.get("max") as? MaxKey, - MaxKey.instance() + MaxKey.shared ) XCTAssertEqual( snapshot.get("binary") as? BsonBinaryData, @@ -367,18 +367,18 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanFilterAndOrderMinKeyValues() async throws { let testDocs: [String: [String: Any]] = [ - "a": ["key": MinKey.instance()], - "b": ["key": MinKey.instance()], + "a": ["key": MinKey.shared], + "b": ["key": MinKey.shared], "c": ["key": NSNull()], "d": ["key": 1], - "e": ["key": MaxKey.instance()], + "e": ["key": MaxKey.shared], ] let collection = collectionRef() await setDocumentData(testDocs, toCollection: collection) var query = collection - .whereField("key", isEqualTo: MinKey.instance()) + .whereField("key", isEqualTo: MinKey.shared) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -390,7 +390,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // TODO(b/410032145): This currently fails, and is fixed by // PR #14704. Uncomment this when moving to the main branch. // var query2 = collection - // .whereField("key", isNotEqualTo: MinKey.instance()) + // .whereField("key", isNotEqualTo: MinKey.shared)) // .order(by: "key") // try await assertSdkQueryResultsConsistentWithBackend( // testDocs, @@ -400,7 +400,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // ) query = collection - .whereField("key", isGreaterThanOrEqualTo: MinKey.instance()) + .whereField("key", isGreaterThanOrEqualTo: MinKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -410,7 +410,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) query = collection - .whereField("key", isLessThanOrEqualTo: MinKey.instance()) + .whereField("key", isLessThanOrEqualTo: MinKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -420,7 +420,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) query = collection - .whereField("key", isGreaterThan: MinKey.instance()) + .whereField("key", isGreaterThan: MinKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -430,7 +430,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) query = collection - .whereField("key", isLessThan: MinKey.instance()) + .whereField("key", isLessThan: MinKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -452,10 +452,10 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanFilterAndOrderMaxKeyValues() async throws { let testDocs: [String: [String: Any]] = [ - "a": ["key": MinKey.instance()], + "a": ["key": MinKey.shared], "b": ["key": 1], - "c": ["key": MaxKey.instance()], - "d": ["key": MaxKey.instance()], + "c": ["key": MaxKey.shared], + "d": ["key": MaxKey.shared], "e": ["key": NSNull()], ] @@ -463,7 +463,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { await setDocumentData(testDocs, toCollection: collection) var query = collection - .whereField("key", isEqualTo: MaxKey.instance()) + .whereField("key", isEqualTo: MaxKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -475,7 +475,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // TODO(b/410032145): This currently fails, and is fixed by // PR #14704. Uncomment this when moving to the main branch. // query = collection - // .whereField("key", isNotEqualTo: MaxKey.instance()) + // .whereField("key", isNotEqualTo: MaxKey.shared)) // .order(by: "key") // try await assertSdkQueryResultsConsistentWithBackend( // testDocs, @@ -485,7 +485,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // ) query = collection - .whereField("key", isGreaterThanOrEqualTo: MaxKey.instance()) + .whereField("key", isGreaterThanOrEqualTo: MaxKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -495,7 +495,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) query = collection - .whereField("key", isLessThanOrEqualTo: MaxKey.instance()) + .whereField("key", isLessThanOrEqualTo: MaxKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -505,7 +505,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) query = collection - .whereField("key", isGreaterThan: MaxKey.instance()) + .whereField("key", isGreaterThan: MaxKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -515,7 +515,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) query = collection - .whereField("key", isLessThan: MaxKey.instance()) + .whereField("key", isLessThan: MaxKey.shared) .order(by: "key") try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -537,11 +537,11 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanHandleNullWithBsonValues() async throws { let testDocs: [String: [String: Any]] = [ - "a": ["key": MinKey.instance()], + "a": ["key": MinKey.shared], "b": ["key": NSNull()], "c": ["key": NSNull()], "d": ["key": 1], - "e": ["key": MaxKey.instance()], + "e": ["key": MaxKey.shared], ] let collection = collectionRef() @@ -587,10 +587,10 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "int32Value1": ["key": Int32Value(-1)], "int32Value2": ["key": Int32Value(1)], "int32Value3": ["key": Int32Value(0)], - "minKey1": ["key": MinKey.instance()], - "minKey2": ["key": MinKey.instance()], - "maxKey1": ["key": MaxKey.instance()], - "maxKey2": ["key": MaxKey.instance()], + "minKey1": ["key": MinKey.shared], + "minKey2": ["key": MinKey.shared], + "maxKey1": ["key": MaxKey.shared], + "maxKey2": ["key": MaxKey.shared], ] let collection = collectionRef() @@ -628,7 +628,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { let collection = collectionRef() let testDocs: [String: [String: Any]] = [ "nullValue": ["key": NSNull()], - "minValue": ["key": MinKey.instance()], + "minValue": ["key": MinKey.shared], "booleanValue": ["key": true], "nanValue": ["key": Double.nan], "int32Value": ["key": Int32Value(1)], @@ -646,7 +646,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "arrayValue": ["key": [1, 2]], "vectorValue": ["key": VectorValue([1.0, 2.0])], "objectValue": ["key": ["a": 1]], - "maxValue": ["key": MaxKey.instance()], + "maxValue": ["key": MaxKey.shared], ] for (docId, data) in testDocs { diff --git a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift index 75a66e94852..fe9e95466ad 100644 --- a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift @@ -128,8 +128,8 @@ class CodableIntegrationTests: FSTIntegrationTestCase { vector: FieldValue.vector([0.7, 0.6]), regex: RegexValue(pattern: "^foo", options: "i"), int32: Int32Value(1), - minKey: MinKey.instance(), - maxKey: MaxKey.instance(), + minKey: MinKey.shared, + maxKey: MaxKey.shared, bsonOjectId: BsonObjectId("507f191e810c19729de860ec"), bsonTimestamp: BsonTimestamp(seconds: 123, increment: 456), bsonBinaryData: BsonBinaryData(subtype: 128, data: Data([1, 2]))) @@ -233,11 +233,11 @@ class CodableIntegrationTests: FSTIntegrationTestCase { } func testMinKey() throws { - try assertCanWriteAndReadCodableValueWithAllFlavors(value: MinKey.instance()) + try assertCanWriteAndReadCodableValueWithAllFlavors(value: MinKey.shared) } func testMaxKey() throws { - try assertCanWriteAndReadCodableValueWithAllFlavors(value: MaxKey.instance()) + try assertCanWriteAndReadCodableValueWithAllFlavors(value: MaxKey.shared) } func testRegexValue() throws { diff --git a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift index 494423d8599..1eae723a4c6 100644 --- a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift +++ b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift @@ -757,8 +757,8 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { func testCanListenToDocumentsWithBsonTypes() throws { let collection = collectionRef() let testData = [ - "a": ["key": MaxKey.instance()], - "b": ["key": MinKey.instance()], + "a": ["key": MaxKey.shared], + "b": ["key": MinKey.shared], "c": ["key": BsonTimestamp(seconds: 1, increment: 2)], "d": ["key": BsonObjectId("507f191e810c19729de860ea")], "e": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], diff --git a/Firestore/Swift/Tests/Integration/TypeTest.swift b/Firestore/Swift/Tests/Integration/TypeTest.swift index 32d1aac96d5..e31edfce737 100644 --- a/Firestore/Swift/Tests/Integration/TypeTest.swift +++ b/Firestore/Swift/Tests/Integration/TypeTest.swift @@ -97,15 +97,15 @@ class TypeTest: FSTIntegrationTestCase { */ func testMinKeyEquality() { - let k1 = MinKey.instance() - let k2 = MinKey.instance() + let k1 = MinKey.shared + let k2 = MinKey.shared XCTAssertTrue(k1 == k2) XCTAssertFalse(k1 != k2) } func testMaxKeyEquality() { - let k1 = MaxKey.instance() - let k2 = MaxKey.instance() + let k1 = MaxKey.shared + let k2 = MaxKey.shared XCTAssertTrue(k1 == k2) XCTAssertFalse(k1 != k2) } @@ -182,14 +182,14 @@ class TypeTest: FSTIntegrationTestCase { func testCanReadAndWriteMinKeyFields() async throws { _ = try await expectRoundtrip( coll: collectionRef(), - data: ["min": MinKey.instance()] + data: ["min": MinKey.shared] ) } func testCanReadAndWriteMaxKeyFields() async throws { _ = try await expectRoundtrip( coll: collectionRef(), - data: ["max": MaxKey.instance()] + data: ["max": MaxKey.shared] ) } @@ -244,8 +244,8 @@ class TypeTest: FSTIntegrationTestCase { BsonObjectId("507f191e810c19729de860ea"), BsonTimestamp(seconds: 123, increment: 456), Int32Value(1), - MinKey.instance(), - MaxKey.instance(), + MinKey.shared, + MaxKey.shared, RegexValue(pattern: "^foo", options: "i"), ]] ) @@ -259,8 +259,8 @@ class TypeTest: FSTIntegrationTestCase { "objectId": BsonObjectId("507f191e810c19729de860ea"), "bsonTimestamp": BsonTimestamp(seconds: 123, increment: 456), "int32": Int32Value(1), - "min": MinKey.instance(), - "max": MaxKey.instance(), + "min": MinKey.shared, + "max": MaxKey.shared, "regex": RegexValue(pattern: "^foo", options: "i"), ]] ) @@ -307,7 +307,7 @@ class TypeTest: FSTIntegrationTestCase { let collection = collectionRef() let testDocs: [String: [String: Any?]] = [ "nullValue": ["key": NSNull()], - "minValue": ["key": MinKey.instance()], + "minValue": ["key": MinKey.shared], "booleanValue": ["key": true], "nanValue": ["key": Double.nan], "int32Value": ["key": Int32Value(1)], @@ -325,7 +325,7 @@ class TypeTest: FSTIntegrationTestCase { "arrayValue": ["key": [1, 2]], "vectorValue": ["key": VectorValue([1.0, 2.0])], "objectValue": ["key": ["a": 1]], - "maxValue": ["key": MaxKey.instance()], + "maxValue": ["key": MaxKey.shared], ] for (docId, data) in testDocs { From d7deaf86a2b5c10e621cb469762015650f95932f Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Thu, 15 May 2025 15:07:24 -0700 Subject: [PATCH 09/16] Address API feedback (2). --- Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h | 4 ++-- Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h | 2 +- Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h | 4 ++-- Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h | 2 +- Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h | 4 ++-- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h index dd4f25475c5..8b1d39cae62 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h @@ -27,10 +27,10 @@ __attribute__((objc_subclassing_restricted)) @interface FIRBsonBinaryData : NSObject /** An 8-bit unsigned integer denoting the subtype of the data. */ -@property(atomic, readonly) uint8_t subtype; +@property(nonatomic, readonly) uint8_t subtype; /** The binary data. */ -@property(atomic, copy, readonly) NSData *data; +@property(nonatomic, copy, readonly) NSData *data; /** :nodoc: */ - (instancetype)init NS_UNAVAILABLE; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h index f674ffdd8ba..da7ab0c27c4 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h @@ -27,7 +27,7 @@ __attribute__((objc_subclassing_restricted)) @interface FIRBsonObjectId : NSObject /** The 24-character hex string representation of the ObjectId. */ -@property(atomic, copy, readonly) NSString *value; +@property(nonatomic, copy, readonly) NSString *value; /** :nodoc: */ - (instancetype)init NS_UNAVAILABLE; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h b/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h index 2f631644e04..e9453044324 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h @@ -28,10 +28,10 @@ __attribute__((objc_subclassing_restricted)) @interface FIRBsonTimestamp : NSObject /** The underlying unsigned 32-bit integer for seconds */ -@property(atomic, readonly) uint32_t seconds; +@property(nonatomic, readonly) uint32_t seconds; /** The underlying unsigned 32-bit integer for increment */ -@property(atomic, readonly) uint32_t increment; +@property(nonatomic, readonly) uint32_t increment; /** :nodoc: */ - (instancetype)init NS_UNAVAILABLE; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h index 1c019f238c8..6c583dcf4e5 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h @@ -27,7 +27,7 @@ __attribute__((objc_subclassing_restricted)) @interface FIRInt32Value : NSObject /** The 32-bit integer value. */ -@property(atomic, assign, readonly) int32_t value; +@property(nonatomic, readonly) int32_t value; /** :nodoc: */ - (instancetype)init NS_UNAVAILABLE; diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h index fcfb6608de7..a13ac2e57c9 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h @@ -27,10 +27,10 @@ __attribute__((objc_subclassing_restricted)) @interface FIRRegexValue : NSObject /** The regular expression pattern */ -@property(atomic, copy, readonly) NSString *pattern; +@property(nonatomic, copy, readonly) NSString *pattern; /** The regular expression options */ -@property(atomic, copy, readonly) NSString *options; +@property(nonatomic, copy, readonly) NSString *options; /** :nodoc: */ - (instancetype)init NS_UNAVAILABLE; From ba3dada7dfd810ba160b1c7e57f2683bc5c09531 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Thu, 15 May 2025 16:22:46 -0700 Subject: [PATCH 10/16] Rename to BSON* and _actually_ remove the FIRFieldValue APIs. --- ...IRBsonBinaryData.h => FIRBSONBinaryData.h} | 2 +- .../{FIRBsonObjectId.h => FIRBSONObjectId.h} | 2 +- ...{FIRBsonTimestamp.h => FIRBSONTimestamp.h} | 2 +- Firestore/CHANGELOG.md | 4 +- .../Tests/API/FIRBsonTypesUnitTests.mm | 40 +++---- ...BsonBinaryData.mm => FIRBSONBinaryData.mm} | 12 +- ...{FIRBsonObjectId.mm => FIRBSONObjectId.mm} | 12 +- ...IRBsonTimestamp.mm => FIRBSONTimestamp.mm} | 12 +- Firestore/Source/API/FIRFieldValue.mm | 37 ------- Firestore/Source/API/FSTUserDataReader.mm | 24 ++-- Firestore/Source/API/FSTUserDataWriter.mm | 18 +-- ...IRBsonBinaryData.h => FIRBSONBinaryData.h} | 6 +- .../{FIRBsonObjectId.h => FIRBSONObjectId.h} | 6 +- ...{FIRBsonTimestamp.h => FIRBSONTimestamp.h} | 6 +- .../Public/FirebaseFirestore/FIRFieldValue.h | 68 ------------ ...ble.swift => BSONBinaryData+Codable.swift} | 22 ++-- ...dable.swift => BSONObjectId+Codable.swift} | 22 ++-- ...able.swift => BSONTimestamp+Codable.swift} | 22 ++-- .../Codable/CodablePassThroughTypes.swift | 6 +- .../Source/SwiftAPI/FieldValue+Swift.swift | 50 --------- .../BsonTypesIntegrationTests.swift | 104 +++++++++--------- .../Integration/CodableIntegrationTests.swift | 18 +-- .../SnapshotListenerSourceTests.swift | 18 +-- .../Swift/Tests/Integration/TypeTest.swift | 54 ++++----- 24 files changed, 206 insertions(+), 361 deletions(-) rename FirebaseFirestoreInternal/FirebaseFirestore/{FIRBsonBinaryData.h => FIRBSONBinaryData.h} (91%) rename FirebaseFirestoreInternal/FirebaseFirestore/{FIRBsonObjectId.h => FIRBSONObjectId.h} (91%) rename FirebaseFirestoreInternal/FirebaseFirestore/{FIRBsonTimestamp.h => FIRBSONTimestamp.h} (91%) rename Firestore/Source/API/{FIRBsonBinaryData.mm => FIRBSONBinaryData.mm} (77%) rename Firestore/Source/API/{FIRBsonObjectId.mm => FIRBSONObjectId.mm} (75%) rename Firestore/Source/API/{FIRBsonTimestamp.mm => FIRBSONTimestamp.mm} (78%) rename Firestore/Source/Public/FirebaseFirestore/{FIRBsonBinaryData.h => FIRBSONBinaryData.h} (90%) rename Firestore/Source/Public/FirebaseFirestore/{FIRBsonObjectId.h => FIRBSONObjectId.h} (90%) rename Firestore/Source/Public/FirebaseFirestore/{FIRBsonTimestamp.h => FIRBSONTimestamp.h} (91%) rename Firestore/Swift/Source/Codable/{BsonBinaryData+Codable.swift => BSONBinaryData+Codable.swift} (72%) rename Firestore/Swift/Source/Codable/{BsonObjectId+Codable.swift => BSONObjectId+Codable.swift} (71%) rename Firestore/Swift/Source/Codable/{BsonTimestamp+Codable.swift => BSONTimestamp+Codable.swift} (73%) diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonBinaryData.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONBinaryData.h similarity index 91% rename from FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonBinaryData.h rename to FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONBinaryData.h index 688333d71d5..5b6a92cb444 100644 --- a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonBinaryData.h +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONBinaryData.h @@ -12,4 +12,4 @@ // See the License for the specific language governing permissions and // limitations under the License. -#import +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonObjectId.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONObjectId.h similarity index 91% rename from FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonObjectId.h rename to FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONObjectId.h index 485f6356f89..46b0097587a 100644 --- a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonObjectId.h +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONObjectId.h @@ -12,4 +12,4 @@ // See the License for the specific language governing permissions and // limitations under the License. -#import +#import diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonTimestamp.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONTimestamp.h similarity index 91% rename from FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonTimestamp.h rename to FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONTimestamp.h index bc116101b82..ee940379f10 100644 --- a/FirebaseFirestoreInternal/FirebaseFirestore/FIRBsonTimestamp.h +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRBSONTimestamp.h @@ -12,4 +12,4 @@ // See the License for the specific language governing permissions and // limitations under the License. -#import +#import diff --git a/Firestore/CHANGELOG.md b/Firestore/CHANGELOG.md index f33001c7619..fa8af8f9e0f 100644 --- a/Firestore/CHANGELOG.md +++ b/Firestore/CHANGELOG.md @@ -1,6 +1,6 @@ # Unreleased -- [feature] Adds support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BsonObjectId, - BsonTimestamp, and BsonBinaryData. (#14800) +- [feature] Adds support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BSONObjectId, + BSONTimestamp, and BSONBinaryData. (#14800) # 11.12.0 - [fixed] Fixed the `null` value handling in `isNotEqualTo` and `notIn` filters. diff --git a/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm b/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm index ff1ce7561d7..7fe6d9226c6 100644 --- a/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm +++ b/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm @@ -14,9 +14,9 @@ * limitations under the License. */ -#import -#import -#import +#import +#import +#import #import #import #import @@ -76,9 +76,9 @@ - (void)testCreateAndReadAndCompareInt32Value { } - (void)testCreateAndReadAndCompareBsonObjectId { - FIRBsonObjectId *val1 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; - FIRBsonObjectId *val2 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; - FIRBsonObjectId *val3 = [[FIRBsonObjectId alloc] initWithValue:@"efgh"]; + FIRBSONObjectId *val1 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; + FIRBSONObjectId *val2 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; + FIRBSONObjectId *val3 = [[FIRBSONObjectId alloc] initWithValue:@"efgh"]; // Test reading the value back XCTAssertEqual(@"abcd", val1.value); @@ -89,10 +89,10 @@ - (void)testCreateAndReadAndCompareBsonObjectId { } - (void)testCreateAndReadAndCompareBsonTimestamp { - FIRBsonTimestamp *val1 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; - FIRBsonTimestamp *val2 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; - FIRBsonTimestamp *val3 = [[FIRBsonTimestamp alloc] initWithSeconds:4444 increment:100]; - FIRBsonTimestamp *val4 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:444]; + FIRBSONTimestamp *val1 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBSONTimestamp *val2 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBSONTimestamp *val3 = [[FIRBSONTimestamp alloc] initWithSeconds:4444 increment:100]; + FIRBSONTimestamp *val4 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:444]; // Test reading the values back. XCTAssertEqual(1234U, val1.seconds); @@ -111,10 +111,10 @@ - (void)testCreateAndReadAndCompareBsonBinaryData { NSData *data2 = [NSData dataWithBytes:byteArray1 length:sizeof(byteArray1)]; NSData *data3 = [NSData dataWithBytes:byteArray2 length:sizeof(byteArray2)]; - FIRBsonBinaryData *val1 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data1]; - FIRBsonBinaryData *val2 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data2]; - FIRBsonBinaryData *val3 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data3]; - FIRBsonBinaryData *val4 = [[FIRBsonBinaryData alloc] initWithSubtype:1 data:data1]; + FIRBSONBinaryData *val1 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data1]; + FIRBSONBinaryData *val2 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data2]; + FIRBSONBinaryData *val3 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data3]; + FIRBSONBinaryData *val4 = [[FIRBSONBinaryData alloc] initWithSubtype:1 data:data1]; // Test reading the values back. XCTAssertEqual(128, val1.subtype); @@ -157,15 +157,15 @@ - (void)testFieldValueInt32 { } - (void)testFieldValueObjectId { - FIRBsonObjectId *oid1 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; - FIRBsonObjectId *oid2 = [[FIRBsonObjectId alloc] initWithValue:@"abcd"]; + FIRBSONObjectId *oid1 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; + FIRBSONObjectId *oid2 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; XCTAssertTrue([oid1 isEqual:oid2]); XCTAssertEqual(@"abcd", oid2.value); } - (void)testFieldValueBsonTimestamp { - FIRBsonTimestamp *val1 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; - FIRBsonTimestamp *val2 = [[FIRBsonTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBSONTimestamp *val1 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; + FIRBSONTimestamp *val2 = [[FIRBSONTimestamp alloc] initWithSeconds:1234 increment:100]; XCTAssertTrue([val1 isEqual:val2]); XCTAssertEqual(1234U, val2.seconds); XCTAssertEqual(100U, val2.increment); @@ -174,8 +174,8 @@ - (void)testFieldValueBsonTimestamp { - (void)testFieldValueBsonBinaryData { uint8_t byteArray[] = {0x01, 0x02, 0x03, 0x04, 0x05}; NSData *data = [NSData dataWithBytes:byteArray length:sizeof(byteArray)]; - FIRBsonBinaryData *val1 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data]; - FIRBsonBinaryData *val2 = [[FIRBsonBinaryData alloc] initWithSubtype:128 data:data]; + FIRBSONBinaryData *val1 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data]; + FIRBSONBinaryData *val2 = [[FIRBSONBinaryData alloc] initWithSubtype:128 data:data]; XCTAssertTrue([val1 isEqual:val2]); XCTAssertEqual(128, val2.subtype); XCTAssertEqual(data, val2.data); diff --git a/Firestore/Source/API/FIRBsonBinaryData.mm b/Firestore/Source/API/FIRBSONBinaryData.mm similarity index 77% rename from Firestore/Source/API/FIRBsonBinaryData.mm rename to Firestore/Source/API/FIRBSONBinaryData.mm index c656d5dd3ee..cb53b0ab54f 100644 --- a/Firestore/Source/API/FIRBsonBinaryData.mm +++ b/Firestore/Source/API/FIRBSONBinaryData.mm @@ -14,11 +14,11 @@ * limitations under the License. */ -#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h" NS_ASSUME_NONNULL_BEGIN -@implementation FIRBsonBinaryData +@implementation FIRBSONBinaryData - (instancetype)initWithSubtype:(uint8_t)subtype data:(NSData *)data { self = [super init]; @@ -34,20 +34,20 @@ - (BOOL)isEqual:(id)object { return YES; } - if (![object isKindOfClass:[FIRBsonBinaryData class]]) { + if (![object isKindOfClass:[FIRBSONBinaryData class]]) { return NO; } - FIRBsonBinaryData *other = (FIRBsonBinaryData *)object; + FIRBSONBinaryData *other = (FIRBSONBinaryData *)object; return self.subtype == other.subtype && [self.data isEqualToData:other.data]; } - (id)copyWithZone:(__unused NSZone *_Nullable)zone { - return [[FIRBsonBinaryData alloc] initWithSubtype:self.subtype data:self.data]; + return [[FIRBSONBinaryData alloc] initWithSubtype:self.subtype data:self.data]; } - (NSString *)description { - return [NSString stringWithFormat:@"", + return [NSString stringWithFormat:@"", (unsigned int)self.subtype, self.data]; } diff --git a/Firestore/Source/API/FIRBsonObjectId.mm b/Firestore/Source/API/FIRBSONObjectId.mm similarity index 75% rename from Firestore/Source/API/FIRBsonObjectId.mm rename to Firestore/Source/API/FIRBSONObjectId.mm index defd0d64528..470e5f18fce 100644 --- a/Firestore/Source/API/FIRBsonObjectId.mm +++ b/Firestore/Source/API/FIRBSONObjectId.mm @@ -14,11 +14,11 @@ * limitations under the License. */ -#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h" NS_ASSUME_NONNULL_BEGIN -@implementation FIRBsonObjectId +@implementation FIRBSONObjectId - (instancetype)initWithValue:(NSString *)value { self = [super init]; @@ -33,20 +33,20 @@ - (BOOL)isEqual:(id)object { return YES; } - if (![object isKindOfClass:[FIRBsonObjectId class]]) { + if (![object isKindOfClass:[FIRBSONObjectId class]]) { return NO; } - FIRBsonObjectId *other = (FIRBsonObjectId *)object; + FIRBSONObjectId *other = (FIRBSONObjectId *)object; return [self.value isEqualToString:other.value]; } - (id)copyWithZone:(__unused NSZone *_Nullable)zone { - return [[FIRBsonObjectId alloc] initWithValue:self.value]; + return [[FIRBSONObjectId alloc] initWithValue:self.value]; } - (NSString *)description { - return [NSString stringWithFormat:@"", self.value]; + return [NSString stringWithFormat:@"", self.value]; } @end diff --git a/Firestore/Source/API/FIRBsonTimestamp.mm b/Firestore/Source/API/FIRBSONTimestamp.mm similarity index 78% rename from Firestore/Source/API/FIRBsonTimestamp.mm rename to Firestore/Source/API/FIRBSONTimestamp.mm index c75567f4495..9ae6735692d 100644 --- a/Firestore/Source/API/FIRBsonTimestamp.mm +++ b/Firestore/Source/API/FIRBSONTimestamp.mm @@ -14,11 +14,11 @@ * limitations under the License. */ -#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h" NS_ASSUME_NONNULL_BEGIN -@implementation FIRBsonTimestamp +@implementation FIRBSONTimestamp - (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment { self = [super init]; @@ -34,20 +34,20 @@ - (BOOL)isEqual:(id)object { return YES; } - if (![object isKindOfClass:[FIRBsonTimestamp class]]) { + if (![object isKindOfClass:[FIRBSONTimestamp class]]) { return NO; } - FIRBsonTimestamp *other = (FIRBsonTimestamp *)object; + FIRBSONTimestamp *other = (FIRBSONTimestamp *)object; return self.seconds == other.seconds && self.increment == other.increment; } - (id)copyWithZone:(__unused NSZone *_Nullable)zone { - return [[FIRBsonTimestamp alloc] initWithSeconds:self.seconds increment:self.increment]; + return [[FIRBSONTimestamp alloc] initWithSeconds:self.seconds increment:self.increment]; } - (NSString *)description { - return [NSString stringWithFormat:@"", self.seconds, + return [NSString stringWithFormat:@"", self.seconds, self.increment]; } diff --git a/Firestore/Source/API/FIRFieldValue.mm b/Firestore/Source/API/FIRFieldValue.mm index 25a60517744..23c5060a8ee 100644 --- a/Firestore/Source/API/FIRFieldValue.mm +++ b/Firestore/Source/API/FIRFieldValue.mm @@ -15,13 +15,6 @@ */ #import "Firestore/Source/API/FIRFieldValue+Internal.h" -#import "Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h" -#import "Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h" -#import "Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h" -#import "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" -#import "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" -#import "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" -#import "Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h" #import "Firestore/Source/Public/FirebaseFirestore/FIRVectorValue.h" NS_ASSUME_NONNULL_BEGIN @@ -188,36 +181,6 @@ + (nonnull FIRVectorValue *)vectorWithArray:(nonnull NSArray *)array return [[FIRVectorValue alloc] initWithArray:array]; } -+ (nonnull FIRMinKey *)minKey { - return [FIRMinKey shared]; -} - -+ (nonnull FIRMaxKey *)maxKey { - return [FIRMaxKey shared]; -} - -+ (nonnull FIRRegexValue *)regexWithPattern:(nonnull NSString *)pattern - options:(nonnull NSString *)options { - return [[FIRRegexValue alloc] initWithPattern:pattern options:options]; -} - -+ (nonnull FIRInt32Value *)int32WithValue:(int)value { - return [[FIRInt32Value alloc] initWithValue:value]; -} - -+ (nonnull FIRBsonObjectId *)bsonObjectIdWithValue:(NSString *)value { - return [[FIRBsonObjectId alloc] initWithValue:value]; -} - -+ (nonnull FIRBsonTimestamp *)bsonTimestampWithSeconds:(uint32_t)seconds - increment:(uint32_t)increment { - return [[FIRBsonTimestamp alloc] initWithSeconds:seconds increment:increment]; -} - -+ (nonnull FIRBsonBinaryData *)bsonBinaryDataWithSubtype:(uint8_t)subtype data:(NSData *)data { - return [[FIRBsonBinaryData alloc] initWithSubtype:subtype data:data]; -} - @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FSTUserDataReader.mm b/Firestore/Source/API/FSTUserDataReader.mm index 35cb68721e4..eac2e83f655 100644 --- a/Firestore/Source/API/FSTUserDataReader.mm +++ b/Firestore/Source/API/FSTUserDataReader.mm @@ -24,9 +24,9 @@ #import "Firestore/Source/API/FSTUserDataReader.h" -#import "FIRBsonBinaryData.h" -#import "FIRBsonObjectId.h" -#import "FIRBsonTimestamp.h" +#import "FIRBSONBinaryData.h" +#import "FIRBSONObjectId.h" +#import "FIRBSONTimestamp.h" #import "FIRGeoPoint.h" #import "FIRInt32Value.h" #import "FIRMaxKey.h" @@ -452,7 +452,7 @@ - (ParsedUpdateData)parsedUpdateData:(id)input { return std::move(result); } -- (Message)parseBsonObjectId:(FIRBsonObjectId *)oid +- (Message)parseBsonObjectId:(FIRBSONObjectId *)oid context:(ParseContext &&)context { __block Message result; result->which_value_type = google_firestore_v1_Value_map_value_tag; @@ -465,7 +465,7 @@ - (ParsedUpdateData)parsedUpdateData:(id)input { return std::move(result); } -- (Message)parseBsonTimestamp:(FIRBsonTimestamp *)timestamp +- (Message)parseBsonTimestamp:(FIRBSONTimestamp *)timestamp context:(ParseContext &&)context { uint32_t seconds = timestamp.seconds; uint32_t increment = timestamp.increment; @@ -500,7 +500,7 @@ - (ParsedUpdateData)parsedUpdateData:(id)input { return std::move(result); } -- (Message)parseBsonBinaryData:(FIRBsonBinaryData *)binaryData +- (Message)parseBsonBinaryData:(FIRBSONBinaryData *)binaryData context:(ParseContext &&)context { uint8_t subtypeByte = binaryData.subtype; NSData *data = binaryData.data; @@ -723,14 +723,14 @@ - (void)parseSentinelFieldValue:(FIRFieldValue *)fieldValue context:(ParseContex } else if ([input isKindOfClass:[FIRInt32Value class]]) { FIRInt32Value *value = input; return [self parseInt32Value:value context:std::move(context)]; - } else if ([input isKindOfClass:[FIRBsonObjectId class]]) { - FIRBsonObjectId *oid = input; + } else if ([input isKindOfClass:[FIRBSONObjectId class]]) { + FIRBSONObjectId *oid = input; return [self parseBsonObjectId:oid context:std::move(context)]; - } else if ([input isKindOfClass:[FIRBsonTimestamp class]]) { - FIRBsonTimestamp *timestamp = input; + } else if ([input isKindOfClass:[FIRBSONTimestamp class]]) { + FIRBSONTimestamp *timestamp = input; return [self parseBsonTimestamp:timestamp context:std::move(context)]; - } else if ([input isKindOfClass:[FIRBsonBinaryData class]]) { - FIRBsonBinaryData *binaryData = input; + } else if ([input isKindOfClass:[FIRBSONBinaryData class]]) { + FIRBSONBinaryData *binaryData = input; return [self parseBsonBinaryData:binaryData context:std::move(context)]; } else { ThrowInvalidArgument("Unsupported type: %s%s", NSStringFromClass([input class]), diff --git a/Firestore/Source/API/FSTUserDataWriter.mm b/Firestore/Source/API/FSTUserDataWriter.mm index 44cd90e079a..5cc6eaf1bc7 100644 --- a/Firestore/Source/API/FSTUserDataWriter.mm +++ b/Firestore/Source/API/FSTUserDataWriter.mm @@ -23,9 +23,9 @@ #include "Firestore/Source/API/FIRDocumentReference+Internal.h" #include "Firestore/Source/API/FIRFieldValue+Internal.h" #include "Firestore/Source/API/converters.h" -#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h" -#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h" -#include "Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" @@ -198,7 +198,7 @@ - (FIRInt32Value *)convertedInt32:(const google_firestore_v1_MapValue &)mapValue return [[FIRInt32Value alloc] initWithValue:value]; } -- (FIRBsonObjectId *)convertedBsonObjectId:(const google_firestore_v1_MapValue &)mapValue { +- (FIRBSONObjectId *)convertedBsonObjectId:(const google_firestore_v1_MapValue &)mapValue { NSString *oid = @""; if (mapValue.fields_count == 1) { const google_firestore_v1_Value &oidValue = mapValue.fields[0].value; @@ -207,10 +207,10 @@ - (FIRBsonObjectId *)convertedBsonObjectId:(const google_firestore_v1_MapValue & } } - return [[FIRBsonObjectId alloc] initWithValue:oid]; + return [[FIRBSONObjectId alloc] initWithValue:oid]; } -- (FIRBsonTimestamp *)convertedBsonTimestamp:(const google_firestore_v1_MapValue &)mapValue { +- (FIRBSONTimestamp *)convertedBsonTimestamp:(const google_firestore_v1_MapValue &)mapValue { uint32_t seconds = 0; uint32_t increment = 0; if (mapValue.fields_count == 1) { @@ -233,10 +233,10 @@ - (FIRBsonTimestamp *)convertedBsonTimestamp:(const google_firestore_v1_MapValue } } - return [[FIRBsonTimestamp alloc] initWithSeconds:seconds increment:increment]; + return [[FIRBSONTimestamp alloc] initWithSeconds:seconds increment:increment]; } -- (FIRBsonBinaryData *)convertedBsonBinaryData:(const google_firestore_v1_MapValue &)mapValue { +- (FIRBSONBinaryData *)convertedBsonBinaryData:(const google_firestore_v1_MapValue &)mapValue { uint8_t subtype = 0; NSData *data = [[NSData alloc] init]; @@ -255,7 +255,7 @@ - (FIRBsonBinaryData *)convertedBsonBinaryData:(const google_firestore_v1_MapVal } } - return [[FIRBsonBinaryData alloc] initWithSubtype:subtype data:data]; + return [[FIRBSONBinaryData alloc] initWithSubtype:subtype data:data]; } - (NSArray *)convertedArray:(const google_firestore_v1_ArrayValue &)arrayValue { diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h similarity index 90% rename from Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h rename to Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h index 8b1d39cae62..69282eea806 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonBinaryData.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h @@ -22,9 +22,9 @@ NS_ASSUME_NONNULL_BEGIN * Represents a BSON Binary Data type in Firestore documents. */ NS_SWIFT_SENDABLE -NS_SWIFT_NAME(BsonBinaryData) +NS_SWIFT_NAME(BSONBinaryData) __attribute__((objc_subclassing_restricted)) -@interface FIRBsonBinaryData : NSObject +@interface FIRBSONBinaryData : NSObject /** An 8-bit unsigned integer denoting the subtype of the data. */ @property(nonatomic, readonly) uint8_t subtype; @@ -36,7 +36,7 @@ __attribute__((objc_subclassing_restricted)) - (instancetype)init NS_UNAVAILABLE; /** - * Creates a `BsonBinaryData` constructed with the given subtype and data. + * Creates a `BSONBinaryData` constructed with the given subtype and data. * @param subtype An 8-bit unsigned integer denoting the subtype of the data. * @param data The binary data. */ diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h similarity index 90% rename from Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h rename to Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h index da7ab0c27c4..cea2c1066be 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonObjectId.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h @@ -22,9 +22,9 @@ NS_ASSUME_NONNULL_BEGIN * Represents a BSON ObjectId type in Firestore documents. */ NS_SWIFT_SENDABLE -NS_SWIFT_NAME(BsonObjectId) +NS_SWIFT_NAME(BSONObjectId) __attribute__((objc_subclassing_restricted)) -@interface FIRBsonObjectId : NSObject +@interface FIRBSONObjectId : NSObject /** The 24-character hex string representation of the ObjectId. */ @property(nonatomic, copy, readonly) NSString *value; @@ -33,7 +33,7 @@ __attribute__((objc_subclassing_restricted)) - (instancetype)init NS_UNAVAILABLE; /** - * Creates a `BsonObjectId` constructed with the given value. + * Creates a `BSONObjectId` constructed with the given value. * @param value The 24-character hex string representation of the ObjectId. */ - (instancetype)initWithValue:(nonnull NSString *)value NS_SWIFT_NAME(init(_:)); diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h similarity index 91% rename from Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h rename to Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h index e9453044324..cded36b8d7e 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBsonTimestamp.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h @@ -23,9 +23,9 @@ NS_ASSUME_NONNULL_BEGIN * Represents a BSON timestamp type in Firestore documents. */ NS_SWIFT_SENDABLE -NS_SWIFT_NAME(BsonTimestamp) +NS_SWIFT_NAME(BSONTimestamp) __attribute__((objc_subclassing_restricted)) -@interface FIRBsonTimestamp : NSObject +@interface FIRBSONTimestamp : NSObject /** The underlying unsigned 32-bit integer for seconds */ @property(nonatomic, readonly) uint32_t seconds; @@ -37,7 +37,7 @@ __attribute__((objc_subclassing_restricted)) - (instancetype)init NS_UNAVAILABLE; /** - * Creates a `BsonTimestamp` with the given seconds and increment values. + * Creates a `BSONTimestamp` with the given seconds and increment values. * @param seconds The underlying unsigned 32-bit integer for seconds. * @param increment The underlying unsigned 32-bit integer for increment. */ diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h index e81158fb506..9defa3a0569 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRFieldValue.h @@ -18,13 +18,6 @@ NS_ASSUME_NONNULL_BEGIN @class FIRVectorValue; -@class FIRMinKey; -@class FIRMaxKey; -@class FIRRegexValue; -@class FIRInt32Value; -@class FIRBsonObjectId; -@class FIRBsonTimestamp; -@class FIRBsonBinaryData; /** * Sentinel values that can be used when writing document fields with `setData()` or `updateData()`. @@ -107,67 +100,6 @@ NS_SWIFT_NAME(FieldValue) */ + (FIRVectorValue *)vectorWithArray:(NSArray *)array NS_REFINED_FOR_SWIFT; -/** - * Returns a `MinKey` value instance. - * - * @return A `MinKey` value instance. - */ -+ (nonnull FIRMinKey *)minKey NS_REFINED_FOR_SWIFT; - -/** - * Returns a `MaxKey` value instance. - * - * @return A `MaxKey` value instance. - */ -+ (nonnull FIRMaxKey *)maxKey NS_REFINED_FOR_SWIFT; - -/** - * Creates a new `RegexValue` constructed with the given pattern and options. - * - * @param pattern The pattern to use for the regular expression. - * @param options The options to use for the regular expression. - * @return A new `RegexValue` constructed with the given pattern and options. - */ -+ (nonnull FIRRegexValue *)regexWithPattern:(nonnull NSString *)pattern - options:(nonnull NSString *)options NS_REFINED_FOR_SWIFT; - -/** - * Creates a new `Int32Value` with the given signed 32-bit integer value. - * - * @param value The 32-bit number to be used for constructing the Int32Value. - * @return A new `Int32Value` instance. - */ -+ (nonnull FIRInt32Value *)int32WithValue:(int)value NS_REFINED_FOR_SWIFT; - -/** - * Creates a new `BsonObjectId` with the given value. - * - * @param value The 24-character hex string representation of the ObjectId. - * @return A new `BsonObjectId` instance constructed with the given value. - */ -+ (nonnull FIRBsonObjectId *)bsonObjectIdWithValue:(nonnull NSString *)value NS_REFINED_FOR_SWIFT; - -/** - * Creates a new `BsonTimestamp` with the given values. - * - * @param seconds The underlying unsigned 32-bit integer for seconds. - * @param increment The underlying unsigned 32-bit integer for increment. - * @return A new `BsonTimestamp` instance constructed with the given values. - */ -+ (nonnull FIRBsonTimestamp *)bsonTimestampWithSeconds:(uint32_t)seconds - increment:(uint32_t)increment NS_REFINED_FOR_SWIFT; - -/** - * Creates a new `BsonBinaryData` object with the given subtype and data. - * - * @param subtype An 8-bit unsigned integer denoting the subtype of the data. - * @param data The binary data. - * @return A new `BsonBinaryData` instance constructed with the given values. - */ -+ (nonnull FIRBsonBinaryData *)bsonBinaryDataWithSubtype:(uint8_t)subtype - data:(nonnull NSData *)data - NS_REFINED_FOR_SWIFT; - @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift b/Firestore/Swift/Source/Codable/BSONBinaryData+Codable.swift similarity index 72% rename from Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift rename to Firestore/Swift/Source/Codable/BSONBinaryData+Codable.swift index 5ebb6e04554..4a61ca4f812 100644 --- a/Firestore/Swift/Source/Codable/BsonBinaryData+Codable.swift +++ b/Firestore/Swift/Source/Codable/BSONBinaryData+Codable.swift @@ -21,46 +21,46 @@ #endif // SWIFT_PACKAGE /** - * A protocol describing the encodable properties of an BsonBinaryData. + * A protocol describing the encodable properties of an BSONBinaryData. * - * Note: this protocol exists as a workaround for the Swift compiler: if the BsonBinaryData class + * Note: this protocol exists as a workaround for the Swift compiler: if the BSONBinaryData class * was extended directly to conform to Codable, the methods implementing the protocol would be need * to be marked required but that can't be done in an extension. Declaring the extension on the * protocol sidesteps this issue. */ -private protocol CodableBsonBinaryData: Codable { +private protocol CodableBSONBinaryData: Codable { var subtype: UInt8 { get } var data: Data { get } init(subtype: UInt8, data: Data) } -/** The keys in an BsonBinaryData. Must match the properties of CodableBsonBinaryData. */ -private enum BsonBinaryDataKeys: String, CodingKey { +/** The keys in an BSONBinaryData. Must match the properties of CodableBSONBinaryData. */ +private enum BSONBinaryDataKeys: String, CodingKey { case subtype case data } /** - * An extension of BsonBinaryData that implements the behavior of the Codable protocol. + * An extension of BSONBinaryData that implements the behavior of the Codable protocol. * * Note: this is implemented manually here because the Swift compiler can't synthesize these methods * when declaring an extension to conform to Codable. */ -extension CodableBsonBinaryData { +extension CodableBSONBinaryData { public init(from decoder: Decoder) throws { - let container = try decoder.container(keyedBy: BsonBinaryDataKeys.self) + let container = try decoder.container(keyedBy: BSONBinaryDataKeys.self) let subtype = try container.decode(UInt8.self, forKey: .subtype) let data = try container.decode(Data.self, forKey: .data) self.init(subtype: subtype, data: data) } public func encode(to encoder: Encoder) throws { - var container = encoder.container(keyedBy: BsonBinaryDataKeys.self) + var container = encoder.container(keyedBy: BSONBinaryDataKeys.self) try container.encode(subtype, forKey: .subtype) try container.encode(data, forKey: .data) } } -/** Extends BsonBinaryData to conform to Codable. */ -extension FirebaseFirestore.BsonBinaryData: FirebaseFirestore.CodableBsonBinaryData {} +/** Extends BSONBinaryData to conform to Codable. */ +extension FirebaseFirestore.BSONBinaryData: FirebaseFirestore.CodableBSONBinaryData {} diff --git a/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift b/Firestore/Swift/Source/Codable/BSONObjectId+Codable.swift similarity index 71% rename from Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift rename to Firestore/Swift/Source/Codable/BSONObjectId+Codable.swift index aa2901582d9..2f7761bb5c0 100644 --- a/Firestore/Swift/Source/Codable/BsonObjectId+Codable.swift +++ b/Firestore/Swift/Source/Codable/BSONObjectId+Codable.swift @@ -21,42 +21,42 @@ #endif // SWIFT_PACKAGE /** - * A protocol describing the encodable properties of an BsonObjectId. + * A protocol describing the encodable properties of an BSONObjectId. * - * Note: this protocol exists as a workaround for the Swift compiler: if the BsonObjectId class + * Note: this protocol exists as a workaround for the Swift compiler: if the BSONObjectId class * was extended directly to conform to Codable, the methods implementing the protocol would be need * to be marked required but that can't be done in an extension. Declaring the extension on the * protocol sidesteps this issue. */ -private protocol CodableBsonObjectId: Codable { +private protocol CodableBSONObjectId: Codable { var value: String { get } init(_ value: String) } -/** The keys in an BsonObjectId. Must match the properties of CodableBsonObjectId. */ -private enum BsonObjectIdKeys: String, CodingKey { +/** The keys in an BSONObjectId. Must match the properties of CodableBSONObjectId. */ +private enum BSONObjectIdKeys: String, CodingKey { case value } /** - * An extension of BsonObjectId that implements the behavior of the Codable protocol. + * An extension of BSONObjectId that implements the behavior of the Codable protocol. * * Note: this is implemented manually here because the Swift compiler can't synthesize these methods * when declaring an extension to conform to Codable. */ -extension CodableBsonObjectId { +extension CodableBSONObjectId { public init(from decoder: Decoder) throws { - let container = try decoder.container(keyedBy: BsonObjectIdKeys.self) + let container = try decoder.container(keyedBy: BSONObjectIdKeys.self) let value = try container.decode(String.self, forKey: .value) self.init(value) } public func encode(to encoder: Encoder) throws { - var container = encoder.container(keyedBy: BsonObjectIdKeys.self) + var container = encoder.container(keyedBy: BSONObjectIdKeys.self) try container.encode(value, forKey: .value) } } -/** Extends BsonObjectId to conform to Codable. */ -extension FirebaseFirestore.BsonObjectId: FirebaseFirestore.CodableBsonObjectId {} +/** Extends BSONObjectId to conform to Codable. */ +extension FirebaseFirestore.BSONObjectId: FirebaseFirestore.CodableBSONObjectId {} diff --git a/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift b/Firestore/Swift/Source/Codable/BSONTimestamp+Codable.swift similarity index 73% rename from Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift rename to Firestore/Swift/Source/Codable/BSONTimestamp+Codable.swift index 73941bc7ae1..a3ab054a5fc 100644 --- a/Firestore/Swift/Source/Codable/BsonTimestamp+Codable.swift +++ b/Firestore/Swift/Source/Codable/BSONTimestamp+Codable.swift @@ -21,46 +21,46 @@ #endif // SWIFT_PACKAGE /** - * A protocol describing the encodable properties of an BsonTimestamp. + * A protocol describing the encodable properties of an BSONTimestamp. * - * Note: this protocol exists as a workaround for the Swift compiler: if the BsonTimestamp class + * Note: this protocol exists as a workaround for the Swift compiler: if the BSONTimestamp class * was extended directly to conform to Codable, the methods implementing the protocol would be need * to be marked required but that can't be done in an extension. Declaring the extension on the * protocol sidesteps this issue. */ -private protocol CodableBsonTimestamp: Codable { +private protocol CodableBSONTimestamp: Codable { var seconds: UInt32 { get } var increment: UInt32 { get } init(seconds: UInt32, increment: UInt32) } -/** The keys in an BsonTimestamp. Must match the properties of CodableBsonTimestamp. */ -private enum BsonTimestampKeys: String, CodingKey { +/** The keys in an BSONTimestamp. Must match the properties of CodableBSONTimestamp. */ +private enum BSONTimestampKeys: String, CodingKey { case seconds case increment } /** - * An extension of BsonTimestamp that implements the behavior of the Codable protocol. + * An extension of BSONTimestamp that implements the behavior of the Codable protocol. * * Note: this is implemented manually here because the Swift compiler can't synthesize these methods * when declaring an extension to conform to Codable. */ -extension CodableBsonTimestamp { +extension CodableBSONTimestamp { public init(from decoder: Decoder) throws { - let container = try decoder.container(keyedBy: BsonTimestampKeys.self) + let container = try decoder.container(keyedBy: BSONTimestampKeys.self) let seconds = try container.decode(UInt32.self, forKey: .seconds) let increment = try container.decode(UInt32.self, forKey: .increment) self.init(seconds: seconds, increment: increment) } public func encode(to encoder: Encoder) throws { - var container = encoder.container(keyedBy: BsonTimestampKeys.self) + var container = encoder.container(keyedBy: BSONTimestampKeys.self) try container.encode(seconds, forKey: .seconds) try container.encode(increment, forKey: .increment) } } -/** Extends BsonTimestamp to conform to Codable. */ -extension FirebaseFirestore.BsonTimestamp: FirebaseFirestore.CodableBsonTimestamp {} +/** Extends BSONTimestamp to conform to Codable. */ +extension FirebaseFirestore.BSONTimestamp: FirebaseFirestore.CodableBSONTimestamp {} diff --git a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift index c2b4790d624..65b19af0517 100644 --- a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift +++ b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift @@ -37,8 +37,8 @@ struct FirestorePassthroughTypes: StructureCodingPassthroughTypeResolver { t is MaxKey || t is RegexValue || t is Int32Value || - t is BsonObjectId || - t is BsonTimestamp || - t is BsonBinaryData + t is BSONObjectId || + t is BSONTimestamp || + t is BSONBinaryData } } diff --git a/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift b/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift index 4f7e3700263..ccab6238267 100644 --- a/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift +++ b/Firestore/Swift/Source/SwiftAPI/FieldValue+Swift.swift @@ -40,54 +40,4 @@ public extension FieldValue { } return FieldValue.__vector(with: nsNumbers) } - - /// Returns a `MinKey` instance. - /// - Returns: A `MinKey` instance. - static func minKey() -> MinKey { - return FieldValue.__minKey() - } - - /// Returns a `MaxKey` instance. - /// - Returns: A `MaxKey` instance. - static func maxKey() -> MaxKey { - return FieldValue.__maxKey() - } - - /// Creates a new `RegexValue` constructed with the given pattern and options. - /// - Parameter pattern: The pattern of the regular expression. - /// - Parameter options: The options of the regular expression. - /// - Returns: A new `RegexValue` constructed with the given pattern and options. - static func regex(pattern: String, options: String) -> RegexValue { - return FieldValue.__regex(withPattern: pattern, options: options) - } - - /// Creates a new `Int32Value` with the given signed 32-bit integer value. - /// - Parameter value: The 32-bit number to be used for constructing the Int32Value. - /// - Returns: A new `Int32Value` instance. - static func int32(_ value: Int32) -> Int32Value { - return FieldValue.__int32(withValue: value) - } - - /// Creates a new `BsonObjectId` with the given value. - /// - Parameter value: The 24-character hex string representation of the ObjectId. - /// - Returns: A new `BsonObjectId` instance constructed with the given value. - static func bsonObjectId(_ value: String) -> BsonObjectId { - return FieldValue.__bsonObjectId(withValue: value) - } - - /// Creates a new `BsonTimestamp` with the given values. - /// @param seconds The underlying unsigned 32-bit integer for seconds. - /// @param increment The underlying unsigned 32-bit integer for increment. - /// @return A new `BsonTimestamp` instance constructed with the given values. - static func bsonTimestamp(seconds: UInt32, increment: UInt32) -> BsonTimestamp { - return FieldValue.__bsonTimestamp(withSeconds: seconds, increment: increment) - } - - /// Creates a new `BsonBinaryData` object with the given subtype and data. - /// @param subtype The subtype of the data. - /// @param data The binary data. - /// @return A new `BsonBinaryData` instance constructed with the given values. - static func bsonBinaryData(subtype: UInt8, data: Data) -> BsonBinaryData { - return FieldValue.__bsonBinaryData(withSubtype: subtype, data: data) - } } diff --git a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift index 910514078dd..5354b856e88 100644 --- a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift @@ -104,8 +104,8 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanWriteAndReadBsonTypes() async throws { let collection = collectionRef() let ref = try await collection.addDocument(data: [ - "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), - "objectId": BsonObjectId("507f191e810c19729de860ea"), + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BSONObjectId("507f191e810c19729de860ea"), "int32": Int32Value(1), "min": MinKey.shared, "max": MaxKey.shared, @@ -113,15 +113,15 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ]) try await ref.updateData([ - "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), - "timestamp": BsonTimestamp(seconds: 1, increment: 2), + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "timestamp": BSONTimestamp(seconds: 1, increment: 2), "int32": Int32Value(2), ]) let snapshot = try await ref.getDocument() XCTAssertEqual( - snapshot.get("objectId") as? BsonObjectId, - BsonObjectId("507f191e810c19729de860ea") + snapshot.get("objectId") as? BSONObjectId, + BSONObjectId("507f191e810c19729de860ea") ) XCTAssertEqual( snapshot.get("int32") as? Int32Value, @@ -136,12 +136,12 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { MaxKey.shared ) XCTAssertEqual( - snapshot.get("binary") as? BsonBinaryData, - BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + snapshot.get("binary") as? BSONBinaryData, + BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) ) XCTAssertEqual( - snapshot.get("timestamp") as? BsonTimestamp, - BsonTimestamp(seconds: 1, increment: 2) + snapshot.get("timestamp") as? BSONTimestamp, + BSONTimestamp(seconds: 1, increment: 2) ) XCTAssertEqual( snapshot.get("regex") as? RegexValue, @@ -157,23 +157,23 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // Adding docs to cache, do not wait for promise to resolve. ref.setData([ - "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), - "objectId": BsonObjectId("507f191e810c19729de860ea"), + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BSONObjectId("507f191e810c19729de860ea"), "int32": Int32Value(1), "min": MinKey.shared, "max": MaxKey.shared, "regex": RegexValue(pattern: "^foo", options: "i"), ]) ref.updateData([ - "binary": BsonBinaryData(subtype: 128, data: Data([1, 2, 3])), - "timestamp": BsonTimestamp(seconds: 1, increment: 2), + "binary": BSONBinaryData(subtype: 128, data: Data([1, 2, 3])), + "timestamp": BSONTimestamp(seconds: 1, increment: 2), "int32": Int32Value(2), ]) let snapshot = readDocument(forRef: ref, source: FirestoreSource.cache) XCTAssertEqual( - snapshot.get("objectId") as? BsonObjectId, - BsonObjectId("507f191e810c19729de860ea") + snapshot.get("objectId") as? BSONObjectId, + BSONObjectId("507f191e810c19729de860ea") ) XCTAssertEqual( snapshot.get("int32") as? Int32Value, @@ -188,12 +188,12 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { MaxKey.shared ) XCTAssertEqual( - snapshot.get("binary") as? BsonBinaryData, - BsonBinaryData(subtype: 128, data: Data([1, 2, 3])) + snapshot.get("binary") as? BSONBinaryData, + BSONBinaryData(subtype: 128, data: Data([1, 2, 3])) ) XCTAssertEqual( - snapshot.get("timestamp") as? BsonTimestamp, - BsonTimestamp(seconds: 1, increment: 2) + snapshot.get("timestamp") as? BSONTimestamp, + BSONTimestamp(seconds: 1, increment: 2) ) XCTAssertEqual( snapshot.get("regex") as? RegexValue, @@ -203,16 +203,16 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanFilterAndOrderObjectIds() async throws { let testDocs = [ - "a": ["key": BsonObjectId("507f191e810c19729de860ea")], - "b": ["key": BsonObjectId("507f191e810c19729de860eb")], - "c": ["key": BsonObjectId("507f191e810c19729de860ec")], + "a": ["key": BSONObjectId("507f191e810c19729de860ea")], + "b": ["key": BSONObjectId("507f191e810c19729de860eb")], + "c": ["key": BSONObjectId("507f191e810c19729de860ec")], ] let collection = collectionRef() await setDocumentData(testDocs, toCollection: collection) var query = collection - .whereField("key", isGreaterThan: BsonObjectId("507f191e810c19729de860ea")) + .whereField("key", isGreaterThan: BSONObjectId("507f191e810c19729de860ea")) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( @@ -225,8 +225,8 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { query = collection .whereField("key", in: [ - BsonObjectId("507f191e810c19729de860ea"), - BsonObjectId("507f191e810c19729de860eb"), + BSONObjectId("507f191e810c19729de860ea"), + BSONObjectId("507f191e810c19729de860eb"), ]) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( @@ -270,16 +270,16 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanFilterAndOrderTimestampValues() async throws { let testDocs: [String: [String: Any]] = [ - "a": ["key": BsonTimestamp(seconds: 1, increment: 1)], - "b": ["key": BsonTimestamp(seconds: 1, increment: 2)], - "c": ["key": BsonTimestamp(seconds: 2, increment: 1)], + "a": ["key": BSONTimestamp(seconds: 1, increment: 1)], + "b": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "c": ["key": BSONTimestamp(seconds: 2, increment: 1)], ] let collection = collectionRef() await setDocumentData(testDocs, toCollection: collection) var query = collection - .whereField("key", isGreaterThan: BsonTimestamp(seconds: 1, increment: 1)) + .whereField("key", isGreaterThan: BSONTimestamp(seconds: 1, increment: 1)) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -289,7 +289,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) query = collection - .whereField("key", isNotEqualTo: BsonTimestamp(seconds: 1, increment: 1)) + .whereField("key", isNotEqualTo: BSONTimestamp(seconds: 1, increment: 1)) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( testDocs, @@ -301,9 +301,9 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanFilterAndOrderBinaryValues() async throws { let testDocs: [String: [String: Any]] = [ - "a": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], - "b": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 4]))], - "c": ["key": BsonBinaryData(subtype: 2, data: Data([1, 2, 3]))], + "a": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "b": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 4]))], + "c": ["key": BSONBinaryData(subtype: 2, data: Data([1, 2, 3]))], ] let collection = collectionRef() @@ -312,7 +312,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { var query = collection .whereField( "key", - isGreaterThan: BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + isGreaterThan: BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) ) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( @@ -325,11 +325,11 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { query = collection .whereField( "key", - isGreaterThanOrEqualTo: BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) + isGreaterThanOrEqualTo: BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) ) .whereField( "key", - isLessThan: BsonBinaryData(subtype: 2, data: Data([1, 2, 3])) + isLessThan: BSONBinaryData(subtype: 2, data: Data([1, 2, 3])) ) .order(by: "key", descending: true) try await assertSdkQueryResultsConsistentWithBackend( @@ -572,18 +572,18 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { // This test includes several BSON values of different types and ensures // correct inter-type and intra-type order for BSON values. let testDocs: [String: [String: Any]] = [ - "bsonObjectId1": ["key": BsonObjectId("507f191e810c19729de860ea")], - "bsonObjectId2": ["key": BsonObjectId("507f191e810c19729de860eb")], - "bsonObjectId3": ["key": BsonObjectId("407f191e810c19729de860ea")], + "bsonObjectId1": ["key": BSONObjectId("507f191e810c19729de860ea")], + "bsonObjectId2": ["key": BSONObjectId("507f191e810c19729de860eb")], + "bsonObjectId3": ["key": BSONObjectId("407f191e810c19729de860ea")], "regex1": ["key": RegexValue(pattern: "^bar", options: "m")], "regex2": ["key": RegexValue(pattern: "^bar", options: "i")], "regex3": ["key": RegexValue(pattern: "^baz", options: "i")], - "bsonTimestamp1": ["key": BsonTimestamp(seconds: 2, increment: 0)], - "bsonTimestamp2": ["key": BsonTimestamp(seconds: 1, increment: 2)], - "bsonTimestamp3": ["key": BsonTimestamp(seconds: 1, increment: 1)], - "bsonBinary1": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], - "bsonBinary2": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 4]))], - "bsonBinary3": ["key": BsonBinaryData(subtype: 2, data: Data([1, 2, 2]))], + "bsonTimestamp1": ["key": BSONTimestamp(seconds: 2, increment: 0)], + "bsonTimestamp2": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "bsonTimestamp3": ["key": BSONTimestamp(seconds: 1, increment: 1)], + "bsonBinary1": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "bsonBinary2": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 4]))], + "bsonBinary3": ["key": BSONBinaryData(subtype: 2, data: Data([1, 2, 2]))], "int32Value1": ["key": Int32Value(-1)], "int32Value2": ["key": Int32Value(1)], "int32Value3": ["key": Int32Value(0)], @@ -635,12 +635,12 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "doubleValue": ["key": 2.0], "integerValue": ["key": 3], "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], - "bsonTimestampValue": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "bsonTimestampValue": ["key": BSONTimestamp(seconds: 1, increment: 2)], "stringValue": ["key": "string"], "bytesValue": ["key": Data([0, 1, 255])], - "bsonBinaryValue": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "bsonBinaryValue": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], "referenceValue": ["key": collection.document("doc")], - "objectIdValue": ["key": BsonObjectId("507f191e810c19729de860ea")], + "objectIdValue": ["key": BSONObjectId("507f191e810c19729de860ea")], "geoPointValue": ["key": GeoPoint(latitude: 0, longitude: 0)], "regexValue": ["key": RegexValue(pattern: "^foo", options: "i")], "arrayValue": ["key": [1, 2]], @@ -688,9 +688,9 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { func testCanRunTransactionsOnDocumentsWithBsonTypes() async throws { let testDocs = [ - "a": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "a": ["key": BSONTimestamp(seconds: 1, increment: 2)], "b": ["key": "placeholder"], - "c": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "c": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], ] let collection = collectionRef() @@ -712,7 +712,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { XCTAssertEqual( toDataArray(snapshot) as? [[String: RegexValue]], [ - ["key": BsonTimestamp(seconds: 1, increment: 2)], + ["key": BSONTimestamp(seconds: 1, increment: 2)], ["key": RegexValue(pattern: "^foo", options: "i")], ] as? [[String: RegexValue]] ) diff --git a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift index fe9e95466ad..c060dfb109f 100644 --- a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift @@ -115,9 +115,9 @@ class CodableIntegrationTests: FSTIntegrationTestCase { var int32: Int32Value var minKey: MinKey var maxKey: MaxKey - var bsonOjectId: BsonObjectId - var bsonTimestamp: BsonTimestamp - var bsonBinaryData: BsonBinaryData + var bsonOjectId: BSONObjectId + var bsonTimestamp: BSONTimestamp + var bsonBinaryData: BSONBinaryData } let docToWrite = documentRef() let model = Model(name: "test", @@ -130,9 +130,9 @@ class CodableIntegrationTests: FSTIntegrationTestCase { int32: Int32Value(1), minKey: MinKey.shared, maxKey: MaxKey.shared, - bsonOjectId: BsonObjectId("507f191e810c19729de860ec"), - bsonTimestamp: BsonTimestamp(seconds: 123, increment: 456), - bsonBinaryData: BsonBinaryData(subtype: 128, data: Data([1, 2]))) + bsonOjectId: BSONObjectId("507f191e810c19729de860ec"), + bsonTimestamp: BSONTimestamp(seconds: 123, increment: 456), + bsonBinaryData: BSONBinaryData(subtype: 128, data: Data([1, 2]))) for flavor in allFlavors { try setData(from: model, forDocument: docToWrite, withFlavor: flavor) @@ -253,19 +253,19 @@ class CodableIntegrationTests: FSTIntegrationTestCase { func testBsonObjectId() throws { try assertCanWriteAndReadCodableValueWithAllFlavors( - value: BsonObjectId("507f191e810c19729de860ec") + value: BSONObjectId("507f191e810c19729de860ec") ) } func testBsonTimestamp() throws { try assertCanWriteAndReadCodableValueWithAllFlavors( - value: BsonTimestamp(seconds: 123, increment: 456) + value: BSONTimestamp(seconds: 123, increment: 456) ) } func testBsonBinaryData() throws { try assertCanWriteAndReadCodableValueWithAllFlavors( - value: BsonBinaryData(subtype: 128, data: Data([1, 2, 3])) + value: BSONBinaryData(subtype: 128, data: Data([1, 2, 3])) ) } diff --git a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift index 1eae723a4c6..260772b5270 100644 --- a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift +++ b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift @@ -759,9 +759,9 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { let testData = [ "a": ["key": MaxKey.shared], "b": ["key": MinKey.shared], - "c": ["key": BsonTimestamp(seconds: 1, increment: 2)], - "d": ["key": BsonObjectId("507f191e810c19729de860ea")], - "e": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "c": ["key": BSONTimestamp(seconds: 1, increment: 2)], + "d": ["key": BSONObjectId("507f191e810c19729de860ea")], + "e": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], "f": ["key": RegexValue(pattern: "^foo", options: "i")], ] @@ -780,15 +780,15 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { testData["b"]!["key"] ) XCTAssertEqual( - querySnap.documents[1].data()["key"] as! BsonTimestamp, + querySnap.documents[1].data()["key"] as! BSONTimestamp, testData["c"]!["key"] ) XCTAssertEqual( - querySnap.documents[2].data()["key"] as! BsonBinaryData, + querySnap.documents[2].data()["key"] as! BSONBinaryData, testData["e"]!["key"] ) XCTAssertEqual( - querySnap.documents[3].data()["key"] as! BsonObjectId, + querySnap.documents[3].data()["key"] as! BSONObjectId, testData["d"]!["key"] ) XCTAssertEqual( @@ -814,15 +814,15 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { newData["key"]! ) XCTAssertEqual( - querySnap.documents[2].data()["key"] as! BsonTimestamp, + querySnap.documents[2].data()["key"] as! BSONTimestamp, testData["c"]!["key"] ) XCTAssertEqual( - querySnap.documents[3].data()["key"] as! BsonBinaryData, + querySnap.documents[3].data()["key"] as! BSONBinaryData, testData["e"]!["key"] ) XCTAssertEqual( - querySnap.documents[4].data()["key"] as! BsonObjectId, + querySnap.documents[4].data()["key"] as! BSONObjectId, testData["d"]!["key"] ) XCTAssertEqual( diff --git a/Firestore/Swift/Tests/Integration/TypeTest.swift b/Firestore/Swift/Tests/Integration/TypeTest.swift index e31edfce737..a7ef1804e16 100644 --- a/Firestore/Swift/Tests/Integration/TypeTest.swift +++ b/Firestore/Swift/Tests/Integration/TypeTest.swift @@ -138,10 +138,10 @@ class TypeTest: FSTIntegrationTestCase { } func testBsonTimestampEquality() { - let v1 = BsonTimestamp(seconds: 1, increment: 1) - let v2 = BsonTimestamp(seconds: 1, increment: 1) - let v3 = BsonTimestamp(seconds: 1, increment: 2) - let v4 = BsonTimestamp(seconds: 2, increment: 1) + let v1 = BSONTimestamp(seconds: 1, increment: 1) + let v2 = BSONTimestamp(seconds: 1, increment: 1) + let v3 = BSONTimestamp(seconds: 1, increment: 2) + let v4 = BSONTimestamp(seconds: 2, increment: 1) XCTAssertTrue(v1 == v2) XCTAssertFalse(v1 == v3) @@ -153,9 +153,9 @@ class TypeTest: FSTIntegrationTestCase { } func testBsonObjectIdEquality() { - let v1 = BsonObjectId("foo") - let v2 = BsonObjectId("foo") - let v3 = BsonObjectId("bar") + let v1 = BSONObjectId("foo") + let v2 = BSONObjectId("foo") + let v3 = BSONObjectId("bar") XCTAssertTrue(v1 == v2) XCTAssertFalse(v1 == v3) @@ -165,10 +165,10 @@ class TypeTest: FSTIntegrationTestCase { } func testBsonBinaryDataEquality() { - let v1 = BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) - let v2 = BsonBinaryData(subtype: 1, data: Data([1, 2, 3])) - let v3 = BsonBinaryData(subtype: 128, data: Data([1, 2, 3])) - let v4 = BsonBinaryData(subtype: 1, data: Data([1, 2, 3, 4])) + let v1 = BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) + let v2 = BSONBinaryData(subtype: 1, data: Data([1, 2, 3])) + let v3 = BSONBinaryData(subtype: 128, data: Data([1, 2, 3])) + let v4 = BSONBinaryData(subtype: 1, data: Data([1, 2, 3, 4])) XCTAssertTrue(v1 == v2) XCTAssertFalse(v1 == v3) @@ -210,29 +210,29 @@ class TypeTest: FSTIntegrationTestCase { func testCanReadAndWriteBsonTimestampFields() async throws { _ = try await expectRoundtrip( coll: collectionRef(), - data: ["bsonTimestamp": BsonTimestamp(seconds: 1, increment: 2)] + data: ["bsonTimestamp": BSONTimestamp(seconds: 1, increment: 2)] ) } func testCanReadAndWriteBsonObjectIdFields() async throws { _ = try await expectRoundtrip( coll: collectionRef(), - data: ["bsonObjectId": BsonObjectId("507f191e810c19729de860ea")] + data: ["bsonObjectId": BSONObjectId("507f191e810c19729de860ea")] ) } func testCanReadAndWriteBsonBinaryDataFields() async throws { _ = try await expectRoundtrip( coll: collectionRef(), - data: ["bsonBinaryData": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))] + data: ["bsonBinaryData": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))] ) _ = try await expectRoundtrip( coll: collectionRef(), - data: ["bsonBinaryData": BsonBinaryData(subtype: 128, data: Data([1, 2, 3]))] + data: ["bsonBinaryData": BSONBinaryData(subtype: 128, data: Data([1, 2, 3]))] ) _ = try await expectRoundtrip( coll: collectionRef(), - data: ["bsonBinaryData": BsonBinaryData(subtype: 255, data: Data([]))] + data: ["bsonBinaryData": BSONBinaryData(subtype: 255, data: Data([]))] ) } @@ -240,9 +240,9 @@ class TypeTest: FSTIntegrationTestCase { _ = try await expectRoundtrip( coll: collectionRef(), data: ["array": [ - BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), - BsonObjectId("507f191e810c19729de860ea"), - BsonTimestamp(seconds: 123, increment: 456), + BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + BSONObjectId("507f191e810c19729de860ea"), + BSONTimestamp(seconds: 123, increment: 456), Int32Value(1), MinKey.shared, MaxKey.shared, @@ -255,9 +255,9 @@ class TypeTest: FSTIntegrationTestCase { _ = try await expectRoundtrip( coll: collectionRef(), data: ["array": [ - "binary": BsonBinaryData(subtype: 1, data: Data([1, 2, 3])), - "objectId": BsonObjectId("507f191e810c19729de860ea"), - "bsonTimestamp": BsonTimestamp(seconds: 123, increment: 456), + "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), + "objectId": BSONObjectId("507f191e810c19729de860ea"), + "bsonTimestamp": BSONTimestamp(seconds: 123, increment: 456), "int32": Int32Value(1), "min": MinKey.shared, "max": MaxKey.shared, @@ -290,8 +290,8 @@ class TypeTest: FSTIntegrationTestCase { var errorMessage: String? do { - // BsonObjectId with string length not equal to 24 - try await docRef.setData(["key": BsonObjectId("foo")]) + // BSONObjectId with string length not equal to 24 + try await docRef.setData(["key": BSONObjectId("foo")]) XCTFail("Expected error for invalid BSON Object ID string length") } catch { errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String @@ -314,12 +314,12 @@ class TypeTest: FSTIntegrationTestCase { "doubleValue": ["key": 2.0], "integerValue": ["key": 3], "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], - "bsonTimestampValue": ["key": BsonTimestamp(seconds: 1, increment: 2)], + "bsonTimestampValue": ["key": BSONTimestamp(seconds: 1, increment: 2)], "stringValue": ["key": "string"], "bytesValue": ["key": Data([0, 1, 255])], - "bsonBinaryValue": ["key": BsonBinaryData(subtype: 1, data: Data([1, 2, 3]))], + "bsonBinaryValue": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], "referenceValue": ["key": collection.document("doc")], - "objectIdValue": ["key": BsonObjectId("507f191e810c19729de860ea")], + "objectIdValue": ["key": BSONObjectId("507f191e810c19729de860ea")], "geoPointValue": ["key": GeoPoint(latitude: 0, longitude: 0)], "regexValue": ["key": RegexValue(pattern: "^foo", options: "i")], "arrayValue": ["key": [1, 2]], From 32f35a63369dfe4d00491c8d516f6853184ff286 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 16 May 2025 14:45:57 -0700 Subject: [PATCH 11/16] Address feedback (3). --- Firestore/CHANGELOG.md | 4 ++-- Firestore/Source/API/FIRBSONBinaryData.mm | 8 ++------ Firestore/Source/API/FIRBSONObjectId.mm | 4 ---- Firestore/Source/API/FIRBSONTimestamp.mm | 4 ---- Firestore/Source/API/FIRInt32Value.mm | 4 ---- Firestore/Source/API/FIRMaxKey.mm | 4 ---- Firestore/Source/API/FIRMinKey.mm | 4 ---- Firestore/Source/API/FIRRegexValue.mm | 4 ---- .../Source/Public/FirebaseFirestore/FIRBSONBinaryData.h | 4 ++-- 9 files changed, 6 insertions(+), 34 deletions(-) diff --git a/Firestore/CHANGELOG.md b/Firestore/CHANGELOG.md index fa8af8f9e0f..5a47d7eb2db 100644 --- a/Firestore/CHANGELOG.md +++ b/Firestore/CHANGELOG.md @@ -1,6 +1,6 @@ # Unreleased -- [feature] Adds support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BSONObjectId, - BSONTimestamp, and BSONBinaryData. (#14800) +- [feature] Adds support for the following new types: `MinKey`, `MaxKey`, `RegexValue`, + `Int32Value`, `BSONObjectId`, `BSONTimestamp`, and `BSONBinaryData`. (#14800) # 11.12.0 - [fixed] Fixed the `null` value handling in `isNotEqualTo` and `notIn` filters. diff --git a/Firestore/Source/API/FIRBSONBinaryData.mm b/Firestore/Source/API/FIRBSONBinaryData.mm index cb53b0ab54f..8af843127c7 100644 --- a/Firestore/Source/API/FIRBSONBinaryData.mm +++ b/Firestore/Source/API/FIRBSONBinaryData.mm @@ -16,20 +16,18 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h" -NS_ASSUME_NONNULL_BEGIN - @implementation FIRBSONBinaryData - (instancetype)initWithSubtype:(uint8_t)subtype data:(NSData *)data { self = [super init]; if (self) { _subtype = subtype; - _data = data; + _data = [data copy]; } return self; } -- (BOOL)isEqual:(id)object { +- (BOOL)isEqual:(nullable id)object { if (self == object) { return YES; } @@ -52,5 +50,3 @@ - (NSString *)description { } @end - -NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRBSONObjectId.mm b/Firestore/Source/API/FIRBSONObjectId.mm index 470e5f18fce..1ea9b65d0c9 100644 --- a/Firestore/Source/API/FIRBSONObjectId.mm +++ b/Firestore/Source/API/FIRBSONObjectId.mm @@ -16,8 +16,6 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h" -NS_ASSUME_NONNULL_BEGIN - @implementation FIRBSONObjectId - (instancetype)initWithValue:(NSString *)value { @@ -50,5 +48,3 @@ - (NSString *)description { } @end - -NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRBSONTimestamp.mm b/Firestore/Source/API/FIRBSONTimestamp.mm index 9ae6735692d..4b8b8de3f9e 100644 --- a/Firestore/Source/API/FIRBSONTimestamp.mm +++ b/Firestore/Source/API/FIRBSONTimestamp.mm @@ -16,8 +16,6 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h" -NS_ASSUME_NONNULL_BEGIN - @implementation FIRBSONTimestamp - (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment { @@ -52,5 +50,3 @@ - (NSString *)description { } @end - -NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRInt32Value.mm b/Firestore/Source/API/FIRInt32Value.mm index 8e61a07a856..a6a03d09c28 100644 --- a/Firestore/Source/API/FIRInt32Value.mm +++ b/Firestore/Source/API/FIRInt32Value.mm @@ -16,8 +16,6 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" -NS_ASSUME_NONNULL_BEGIN - @implementation FIRInt32Value - (instancetype)initWithValue:(int)value { @@ -50,5 +48,3 @@ - (NSString *)description { } @end - -NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRMaxKey.mm b/Firestore/Source/API/FIRMaxKey.mm index 84c970b9bd7..79d82122a55 100644 --- a/Firestore/Source/API/FIRMaxKey.mm +++ b/Firestore/Source/API/FIRMaxKey.mm @@ -16,8 +16,6 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" -NS_ASSUME_NONNULL_BEGIN - @implementation FIRMaxKey static FIRMaxKey *sharedInstance = nil; static dispatch_once_t onceToken; @@ -53,5 +51,3 @@ - (NSString *)description { } @end - -NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRMinKey.mm b/Firestore/Source/API/FIRMinKey.mm index ee0489bfdbc..f200dd084a4 100644 --- a/Firestore/Source/API/FIRMinKey.mm +++ b/Firestore/Source/API/FIRMinKey.mm @@ -16,8 +16,6 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" -NS_ASSUME_NONNULL_BEGIN - @implementation FIRMinKey static FIRMinKey *sharedInstance = nil; static dispatch_once_t onceToken; @@ -53,5 +51,3 @@ - (NSString *)description { } @end - -NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/API/FIRRegexValue.mm b/Firestore/Source/API/FIRRegexValue.mm index cde5b3cf462..9177e345930 100644 --- a/Firestore/Source/API/FIRRegexValue.mm +++ b/Firestore/Source/API/FIRRegexValue.mm @@ -16,8 +16,6 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h" -NS_ASSUME_NONNULL_BEGIN - @implementation FIRRegexValue - (instancetype)initWithPattern:(NSString *)pattern options:(NSString *)options { @@ -53,5 +51,3 @@ - (NSString *)description { } @end - -NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h index 69282eea806..e27f8150f74 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h @@ -40,10 +40,10 @@ __attribute__((objc_subclassing_restricted)) * @param subtype An 8-bit unsigned integer denoting the subtype of the data. * @param data The binary data. */ -- (instancetype)initWithSubtype:(uint8_t)subtype data:(nonnull NSData *)data; +- (instancetype)initWithSubtype:(uint8_t)subtype data:(NSData *)data; /** Returns true if the given object is equal to this, and false otherwise. */ -- (BOOL)isEqual:(id)object; +- (BOOL)isEqual:(nullable id)object; @end From 31a1bd8b5c4c681b0f41dfcc380bcf0f6c4f60ba Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 16 May 2025 16:18:59 -0700 Subject: [PATCH 12/16] Address feedback (4). --- Firestore/Source/API/FIRBSONObjectId.mm | 4 ++-- Firestore/Source/API/FIRBSONTimestamp.mm | 2 +- Firestore/Source/API/FIRInt32Value.mm | 2 +- Firestore/Source/API/FIRMaxKey.mm | 2 +- Firestore/Source/API/FIRMinKey.mm | 2 +- Firestore/Source/API/FIRRegexValue.mm | 6 +++--- Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h | 2 +- .../Source/Public/FirebaseFirestore/FIRBSONTimestamp.h | 2 +- Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h | 2 +- Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h | 2 +- Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h | 2 +- Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h | 2 +- 12 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Firestore/Source/API/FIRBSONObjectId.mm b/Firestore/Source/API/FIRBSONObjectId.mm index 1ea9b65d0c9..d37d3001cca 100644 --- a/Firestore/Source/API/FIRBSONObjectId.mm +++ b/Firestore/Source/API/FIRBSONObjectId.mm @@ -21,12 +21,12 @@ @implementation FIRBSONObjectId - (instancetype)initWithValue:(NSString *)value { self = [super init]; if (self) { - _value = value; + _value = [value copy]; } return self; } -- (BOOL)isEqual:(id)object { +- (BOOL)isEqual:(nullable id)object { if (self == object) { return YES; } diff --git a/Firestore/Source/API/FIRBSONTimestamp.mm b/Firestore/Source/API/FIRBSONTimestamp.mm index 4b8b8de3f9e..1c812b0218c 100644 --- a/Firestore/Source/API/FIRBSONTimestamp.mm +++ b/Firestore/Source/API/FIRBSONTimestamp.mm @@ -27,7 +27,7 @@ - (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment return self; } -- (BOOL)isEqual:(id)object { +- (BOOL)isEqual:(nullable id)object { if (self == object) { return YES; } diff --git a/Firestore/Source/API/FIRInt32Value.mm b/Firestore/Source/API/FIRInt32Value.mm index a6a03d09c28..5cb080b14bb 100644 --- a/Firestore/Source/API/FIRInt32Value.mm +++ b/Firestore/Source/API/FIRInt32Value.mm @@ -26,7 +26,7 @@ - (instancetype)initWithValue:(int)value { return self; } -- (BOOL)isEqual:(id)object { +- (BOOL)isEqual:(nullable id)object { if (self == object) { return YES; } diff --git a/Firestore/Source/API/FIRMaxKey.mm b/Firestore/Source/API/FIRMaxKey.mm index 79d82122a55..9d72782898a 100644 --- a/Firestore/Source/API/FIRMaxKey.mm +++ b/Firestore/Source/API/FIRMaxKey.mm @@ -36,7 +36,7 @@ - (id)copyWithZone:(__unused NSZone *_Nullable)zone { return self; } -- (BOOL)isEqual:(id)object { +- (BOOL)isEqual:(nullable id)object { if (self == object) { return YES; } diff --git a/Firestore/Source/API/FIRMinKey.mm b/Firestore/Source/API/FIRMinKey.mm index f200dd084a4..2612ad65f93 100644 --- a/Firestore/Source/API/FIRMinKey.mm +++ b/Firestore/Source/API/FIRMinKey.mm @@ -36,7 +36,7 @@ - (id)copyWithZone:(__unused NSZone *_Nullable)zone { return self; } -- (BOOL)isEqual:(id)object { +- (BOOL)isEqual:(nullable id)object { if (self == object) { return YES; } diff --git a/Firestore/Source/API/FIRRegexValue.mm b/Firestore/Source/API/FIRRegexValue.mm index 9177e345930..ff248a7818f 100644 --- a/Firestore/Source/API/FIRRegexValue.mm +++ b/Firestore/Source/API/FIRRegexValue.mm @@ -21,13 +21,13 @@ @implementation FIRRegexValue - (instancetype)initWithPattern:(NSString *)pattern options:(NSString *)options { self = [super init]; if (self) { - _pattern = pattern; - _options = options; + _pattern = [pattern copy]; + _options = [options copy]; } return self; } -- (BOOL)isEqual:(id)object { +- (BOOL)isEqual:(nullable id)object { if (self == object) { return YES; } diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h index cea2c1066be..3a9655b090e 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h @@ -39,7 +39,7 @@ __attribute__((objc_subclassing_restricted)) - (instancetype)initWithValue:(nonnull NSString *)value NS_SWIFT_NAME(init(_:)); /** Returns true if the given object is equal to this, and false otherwise. */ -- (BOOL)isEqual:(id)object; +- (BOOL)isEqual:(nullable id)object; @end diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h b/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h index cded36b8d7e..e04d25cba80 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h @@ -44,7 +44,7 @@ __attribute__((objc_subclassing_restricted)) - (instancetype)initWithSeconds:(uint32_t)seconds increment:(uint32_t)increment; /** Returns true if the given object is equal to this, and false otherwise. */ -- (BOOL)isEqual:(id)object; +- (BOOL)isEqual:(nullable id)object; @end diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h index 6c583dcf4e5..df430957f63 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h @@ -39,7 +39,7 @@ __attribute__((objc_subclassing_restricted)) - (instancetype)initWithValue:(int)value NS_SWIFT_NAME(init(_:)); /** Returns true if the given object is equal to this, and false otherwise. */ -- (BOOL)isEqual:(id)object; +- (BOOL)isEqual:(nullable id)object; @end diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h index d6c3c0e3385..9a6872fbddf 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h @@ -30,7 +30,7 @@ __attribute__((objc_subclassing_restricted)) @property(class, readonly) FIRMaxKey *shared; /** Returns true if the given object is equal to this, and false otherwise. */ -- (BOOL)isEqual:(id)object; +- (BOOL)isEqual:(nullable id)object; @end diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h index d82520574b6..04d164b9093 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h @@ -30,7 +30,7 @@ __attribute__((objc_subclassing_restricted)) @property(class, readonly) FIRMinKey *shared; /** Returns true if the given object is equal to this, and false otherwise. */ -- (BOOL)isEqual:(id)object; +- (BOOL)isEqual:(nullable id)object; @end diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h index a13ac2e57c9..07e50a156fc 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRRegexValue.h @@ -43,7 +43,7 @@ __attribute__((objc_subclassing_restricted)) - (instancetype)initWithPattern:(nonnull NSString *)pattern options:(nonnull NSString *)options; /** Returns true if the given object is equal to this, and false otherwise. */ -- (BOOL)isEqual:(id)object; +- (BOOL)isEqual:(nullable id)object; @end From fe873849eaeec07901800a09a48d8f8259c0a7ff Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Tue, 3 Jun 2025 11:59:52 -0700 Subject: [PATCH 13/16] Address feedback (5). --- .../Source/API/{FIRBSONBinaryData.mm => FIRBSONBinaryData.m} | 0 .../Source/API/{FIRBSONObjectId.mm => FIRBSONObjectId.m} | 0 .../Source/API/{FIRBSONTimestamp.mm => FIRBSONTimestamp.m} | 0 Firestore/Source/API/{FIRInt32Value.mm => FIRInt32Value.m} | 0 Firestore/Source/API/{FIRMaxKey.mm => FIRMaxKey.m} | 0 Firestore/Source/API/{FIRMinKey.mm => FIRMinKey.m} | 0 Firestore/Source/API/{FIRRegexValue.mm => FIRRegexValue.m} | 0 Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h | 4 +++- Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h | 4 +++- 9 files changed, 6 insertions(+), 2 deletions(-) rename Firestore/Source/API/{FIRBSONBinaryData.mm => FIRBSONBinaryData.m} (100%) rename Firestore/Source/API/{FIRBSONObjectId.mm => FIRBSONObjectId.m} (100%) rename Firestore/Source/API/{FIRBSONTimestamp.mm => FIRBSONTimestamp.m} (100%) rename Firestore/Source/API/{FIRInt32Value.mm => FIRInt32Value.m} (100%) rename Firestore/Source/API/{FIRMaxKey.mm => FIRMaxKey.m} (100%) rename Firestore/Source/API/{FIRMinKey.mm => FIRMinKey.m} (100%) rename Firestore/Source/API/{FIRRegexValue.mm => FIRRegexValue.m} (100%) diff --git a/Firestore/Source/API/FIRBSONBinaryData.mm b/Firestore/Source/API/FIRBSONBinaryData.m similarity index 100% rename from Firestore/Source/API/FIRBSONBinaryData.mm rename to Firestore/Source/API/FIRBSONBinaryData.m diff --git a/Firestore/Source/API/FIRBSONObjectId.mm b/Firestore/Source/API/FIRBSONObjectId.m similarity index 100% rename from Firestore/Source/API/FIRBSONObjectId.mm rename to Firestore/Source/API/FIRBSONObjectId.m diff --git a/Firestore/Source/API/FIRBSONTimestamp.mm b/Firestore/Source/API/FIRBSONTimestamp.m similarity index 100% rename from Firestore/Source/API/FIRBSONTimestamp.mm rename to Firestore/Source/API/FIRBSONTimestamp.m diff --git a/Firestore/Source/API/FIRInt32Value.mm b/Firestore/Source/API/FIRInt32Value.m similarity index 100% rename from Firestore/Source/API/FIRInt32Value.mm rename to Firestore/Source/API/FIRInt32Value.m diff --git a/Firestore/Source/API/FIRMaxKey.mm b/Firestore/Source/API/FIRMaxKey.m similarity index 100% rename from Firestore/Source/API/FIRMaxKey.mm rename to Firestore/Source/API/FIRMaxKey.m diff --git a/Firestore/Source/API/FIRMinKey.mm b/Firestore/Source/API/FIRMinKey.m similarity index 100% rename from Firestore/Source/API/FIRMinKey.mm rename to Firestore/Source/API/FIRMinKey.m diff --git a/Firestore/Source/API/FIRRegexValue.mm b/Firestore/Source/API/FIRRegexValue.m similarity index 100% rename from Firestore/Source/API/FIRRegexValue.mm rename to Firestore/Source/API/FIRRegexValue.m diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h index 9a6872fbddf..7fd20da9088 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h @@ -26,12 +26,14 @@ NS_SWIFT_NAME(MaxKey) __attribute__((objc_subclassing_restricted)) @interface FIRMaxKey : NSObject -/** Returns the only instance of MaxKey. */ +/** The shared singleton `MaxKey` instance. */ @property(class, readonly) FIRMaxKey *shared; /** Returns true if the given object is equal to this, and false otherwise. */ - (BOOL)isEqual:(nullable id)object; +- (instancetype)init NS_UNAVAILABLE; + @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h index 04d164b9093..1a2c778b524 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h @@ -26,12 +26,14 @@ NS_SWIFT_NAME(MinKey) __attribute__((objc_subclassing_restricted)) @interface FIRMinKey : NSObject -/** The only instance of MinKey. */ +/** The shared singleton `MinKey` instance. */ @property(class, readonly) FIRMinKey *shared; /** Returns true if the given object is equal to this, and false otherwise. */ - (BOOL)isEqual:(nullable id)object; +- (instancetype)init NS_UNAVAILABLE; + @end NS_ASSUME_NONNULL_END From 8a4075e179bbddf1d68361b2dbfc0b0783afbea8 Mon Sep 17 00:00:00 2001 From: Ehsan Date: Fri, 27 Jun 2025 16:52:40 -0700 Subject: [PATCH 14/16] feat: Add Decimal128Value (#799) * WIP: Decimal128Value. * WIP: decimal128 values. next: add quadruple + compare. next: add tests. * Add quadruple and quadruple_builder. * Implement comparison logic. * Fix bug and add integration tests. * Add unit tests. * clang-format. * Fix the NumberEquals logic. * Port the missing tests from Android. * Port more integration tests from Android. * Address feedback. * Update Quadruple library and re-enable tests (passing now). * Fix FIRDecimal128Value.isEqual to handle -/+0. --- .../FirebaseFirestore/FIRDecimal128Value.h | 15 + Firestore/CHANGELOG.md | 2 +- .../Tests/API/FIRBsonTypesUnitTests.mm | 34 + Firestore/Source/API/FIRDecimal128Value.mm | 66 ++ Firestore/Source/API/FSTUserDataReader.mm | 18 + Firestore/Source/API/FSTUserDataWriter.mm | 32 +- .../FirebaseFirestore/FIRDecimal128Value.h | 46 + .../Codable/CodablePassThroughTypes.swift | 1 + .../Codable/Decimal128Value+Codable.swift | 62 ++ .../BsonTypesIntegrationTests.swift | 277 +++++- .../Integration/CodableIntegrationTests.swift | 6 + .../SnapshotListenerSourceTests.swift | 40 + .../Swift/Tests/Integration/TypeTest.swift | 95 +- .../src/index/firestore_index_value_writer.cc | 45 +- Firestore/core/src/model/value_util.cc | 102 +- Firestore/core/src/model/value_util.h | 16 +- Firestore/core/src/util/quadruple.cc | 244 +++++ Firestore/core/src/util/quadruple.h | 100 ++ Firestore/core/src/util/quadruple_builder.cc | 913 ++++++++++++++++++ Firestore/core/src/util/quadruple_builder.h | 98 ++ .../unit/bundle/bundle_serializer_test.cc | 10 + .../unit/index/index_value_writer_test.cc | 109 +++ .../unit/local/leveldb_index_manager_test.cc | 250 ++++- .../unit/local/leveldb_local_store_test.cc | 263 ++++- .../core/test/unit/model/document_test.cc | 7 +- .../core/test/unit/model/object_value_test.cc | 52 +- .../core/test/unit/model/value_util_test.cc | 74 +- .../core/test/unit/remote/serializer_test.cc | 12 + Firestore/core/test/unit/testutil/testutil.cc | 4 + Firestore/core/test/unit/testutil/testutil.h | 1 + 30 files changed, 2843 insertions(+), 151 deletions(-) create mode 100644 FirebaseFirestoreInternal/FirebaseFirestore/FIRDecimal128Value.h create mode 100644 Firestore/Source/API/FIRDecimal128Value.mm create mode 100644 Firestore/Source/Public/FirebaseFirestore/FIRDecimal128Value.h create mode 100644 Firestore/Swift/Source/Codable/Decimal128Value+Codable.swift create mode 100644 Firestore/core/src/util/quadruple.cc create mode 100644 Firestore/core/src/util/quadruple.h create mode 100644 Firestore/core/src/util/quadruple_builder.cc create mode 100644 Firestore/core/src/util/quadruple_builder.h diff --git a/FirebaseFirestoreInternal/FirebaseFirestore/FIRDecimal128Value.h b/FirebaseFirestoreInternal/FirebaseFirestore/FIRDecimal128Value.h new file mode 100644 index 00000000000..30376feff31 --- /dev/null +++ b/FirebaseFirestoreInternal/FirebaseFirestore/FIRDecimal128Value.h @@ -0,0 +1,15 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import diff --git a/Firestore/CHANGELOG.md b/Firestore/CHANGELOG.md index 5a47d7eb2db..4ac0e7f1c78 100644 --- a/Firestore/CHANGELOG.md +++ b/Firestore/CHANGELOG.md @@ -1,6 +1,6 @@ # Unreleased - [feature] Adds support for the following new types: `MinKey`, `MaxKey`, `RegexValue`, - `Int32Value`, `BSONObjectId`, `BSONTimestamp`, and `BSONBinaryData`. (#14800) + `Int32Value`, `Decimal128Value`, `BSONObjectId`, `BSONTimestamp`, and `BSONBinaryData`. (#14800) # 11.12.0 - [fixed] Fixed the `null` value handling in `isNotEqualTo` and `notIn` filters. diff --git a/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm b/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm index 7fe6d9226c6..e9cc0e66a37 100644 --- a/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm +++ b/Firestore/Example/Tests/API/FIRBsonTypesUnitTests.mm @@ -17,6 +17,7 @@ #import #import #import +#import #import #import #import @@ -75,6 +76,39 @@ - (void)testCreateAndReadAndCompareInt32Value { XCTAssertFalse([val1 isEqual:val3]); } +- (void)testCreateAndReadAndCompareDecimal128Value { + FIRDecimal128Value *val1 = [[FIRDecimal128Value alloc] initWithValue:@"1.2e3"]; + FIRDecimal128Value *val2 = [[FIRDecimal128Value alloc] initWithValue:@"12e2"]; + FIRDecimal128Value *val3 = [[FIRDecimal128Value alloc] initWithValue:@"0.12e4"]; + FIRDecimal128Value *val4 = [[FIRDecimal128Value alloc] initWithValue:@"12000e-1"]; + FIRDecimal128Value *val5 = [[FIRDecimal128Value alloc] initWithValue:@"1.2"]; + FIRDecimal128Value *val6 = [[FIRDecimal128Value alloc] initWithValue:@"NaN"]; + FIRDecimal128Value *val7 = [[FIRDecimal128Value alloc] initWithValue:@"Infinity"]; + FIRDecimal128Value *val8 = [[FIRDecimal128Value alloc] initWithValue:@"-Infinity"]; + FIRDecimal128Value *val9 = [[FIRDecimal128Value alloc] initWithValue:@"NaN"]; + FIRDecimal128Value *val10 = [[FIRDecimal128Value alloc] initWithValue:@"-0"]; + FIRDecimal128Value *val11 = [[FIRDecimal128Value alloc] initWithValue:@"0"]; + FIRDecimal128Value *val12 = [[FIRDecimal128Value alloc] initWithValue:@"-0.0"]; + FIRDecimal128Value *val13 = [[FIRDecimal128Value alloc] initWithValue:@"0.0"]; + + // Test reading the value back + XCTAssertEqual(@"1.2e3", val1.value); + + // Test isEqual + XCTAssertTrue([val1 isEqual:val2]); + XCTAssertTrue([val1 isEqual:val3]); + XCTAssertTrue([val1 isEqual:val4]); + XCTAssertFalse([val1 isEqual:val5]); + + // Test isEqual for special values. + XCTAssertTrue([val6 isEqual:val9]); + XCTAssertFalse([val7 isEqual:val8]); + XCTAssertFalse([val7 isEqual:val9]); + XCTAssertTrue([val10 isEqual:val11]); + XCTAssertTrue([val10 isEqual:val12]); + XCTAssertTrue([val10 isEqual:val13]); +} + - (void)testCreateAndReadAndCompareBsonObjectId { FIRBSONObjectId *val1 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; FIRBSONObjectId *val2 = [[FIRBSONObjectId alloc] initWithValue:@"abcd"]; diff --git a/Firestore/Source/API/FIRDecimal128Value.mm b/Firestore/Source/API/FIRDecimal128Value.mm new file mode 100644 index 00000000000..36fe4ddf04a --- /dev/null +++ b/Firestore/Source/API/FIRDecimal128Value.mm @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Firestore/Source/Public/FirebaseFirestore/FIRDecimal128Value.h" + +#include "Firestore/core/src/util/quadruple.h" +#include "Firestore/core/src/util/string_apple.h" + +using firebase::firestore::util::MakeString; +using firebase::firestore::util::Quadruple; + +@implementation FIRDecimal128Value + +- (instancetype)initWithValue:(NSString *)value { + self = [super init]; + if (self) { + _value = [value copy]; + } + return self; +} + +- (BOOL)isEqual:(nullable id)object { + if (self == object) { + return YES; + } + + if (![object isKindOfClass:[FIRDecimal128Value class]]) { + return NO; + } + + FIRDecimal128Value *other = (FIRDecimal128Value *)object; + + Quadruple lhs = Quadruple(); + Quadruple rhs = Quadruple(); + lhs.Parse(MakeString(self.value)); + rhs.Parse(MakeString(other.value)); + + // Firestore considers +0 and -0 to be equal, but `Quadruple::Compare()` does not. + if (lhs.Compare(Quadruple(-0.0)) == 0) lhs = Quadruple(); + if (rhs.Compare(Quadruple(-0.0)) == 0) rhs = Quadruple(); + + return lhs.Compare(rhs) == 0; +} + +- (id)copyWithZone:(__unused NSZone *_Nullable)zone { + return [[FIRDecimal128Value alloc] initWithValue:self.value]; +} + +- (NSString *)description { + return [NSString stringWithFormat:@"", self.value]; +} + +@end diff --git a/Firestore/Source/API/FSTUserDataReader.mm b/Firestore/Source/API/FSTUserDataReader.mm index eac2e83f655..5928a5b713e 100644 --- a/Firestore/Source/API/FSTUserDataReader.mm +++ b/Firestore/Source/API/FSTUserDataReader.mm @@ -27,6 +27,7 @@ #import "FIRBSONBinaryData.h" #import "FIRBSONObjectId.h" #import "FIRBSONTimestamp.h" +#import "FIRDecimal128Value.h" #import "FIRGeoPoint.h" #import "FIRInt32Value.h" #import "FIRMaxKey.h" @@ -452,6 +453,20 @@ - (ParsedUpdateData)parsedUpdateData:(id)input { return std::move(result); } +- (Message)parseDecimal128Value:(FIRDecimal128Value *)decimal128 + context:(ParseContext &&)context { + __block Message result; + result->which_value_type = google_firestore_v1_Value_map_value_tag; + result->map_value = {}; + result->map_value.fields_count = 1; + result->map_value.fields = nanopb::MakeArray(1); + result->map_value.fields[0].key = nanopb::CopyBytesArray(model::kDecimal128TypeFieldValue); + result->map_value.fields[0].value = + *[self encodeStringValue:MakeString(decimal128.value)].release(); + + return std::move(result); +} + - (Message)parseBsonObjectId:(FIRBSONObjectId *)oid context:(ParseContext &&)context { __block Message result; @@ -723,6 +738,9 @@ - (void)parseSentinelFieldValue:(FIRFieldValue *)fieldValue context:(ParseContex } else if ([input isKindOfClass:[FIRInt32Value class]]) { FIRInt32Value *value = input; return [self parseInt32Value:value context:std::move(context)]; + } else if ([input isKindOfClass:[FIRDecimal128Value class]]) { + FIRDecimal128Value *value = input; + return [self parseDecimal128Value:value context:std::move(context)]; } else if ([input isKindOfClass:[FIRBSONObjectId class]]) { FIRBSONObjectId *oid = input; return [self parseBsonObjectId:oid context:std::move(context)]; diff --git a/Firestore/Source/API/FSTUserDataWriter.mm b/Firestore/Source/API/FSTUserDataWriter.mm index 5cc6eaf1bc7..df91bef48be 100644 --- a/Firestore/Source/API/FSTUserDataWriter.mm +++ b/Firestore/Source/API/FSTUserDataWriter.mm @@ -26,6 +26,7 @@ #include "Firestore/Source/Public/FirebaseFirestore/FIRBSONBinaryData.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRBSONObjectId.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRBSONTimestamp.h" +#include "Firestore/Source/Public/FirebaseFirestore/FIRDecimal128Value.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRInt32Value.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h" #include "Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h" @@ -58,6 +59,8 @@ using firebase::firestore::google_protobuf_Timestamp; using firebase::firestore::model::kRawBsonTimestampTypeIncrementFieldValue; using firebase::firestore::model::kRawBsonTimestampTypeSecondsFieldValue; +using firebase::firestore::model::kRawDecimal128TypeFieldValue; +using firebase::firestore::model::kRawInt32TypeFieldValue; using firebase::firestore::model::kRawRegexTypeOptionsFieldValue; using firebase::firestore::model::kRawRegexTypePatternFieldValue; using firebase::firestore::model::kRawVectorValueFieldKey; @@ -109,7 +112,12 @@ - (id)convertedValue:(const google_firestore_v1_Value &)value { return value.boolean_value ? @YES : @NO; case TypeOrder::kNumber: if (value.which_value_type == google_firestore_v1_Value_map_value_tag) { - return [self convertedInt32:value.map_value]; + absl::string_view key = MakeStringView(value.map_value.fields[0].key); + if (key.compare(absl::string_view(kRawInt32TypeFieldValue)) == 0) { + return [self convertedInt32:value.map_value]; + } else if (key.compare(absl::string_view(kRawDecimal128TypeFieldValue)) == 0) { + return [self convertedDecimal128Value:value.map_value]; + } } return value.which_value_type == google_firestore_v1_Value_integer_value_tag ? @(value.integer_value) @@ -157,7 +165,7 @@ - (FIRVectorValue *)convertedVector:(const google_firestore_v1_MapValue &)mapVal for (pb_size_t i = 0; i < mapValue.fields_count; ++i) { absl::string_view key = MakeStringView(mapValue.fields[i].key); const google_firestore_v1_Value &value = mapValue.fields[i].value; - if ((0 == key.compare(absl::string_view(kRawVectorValueFieldKey))) && + if ((key.compare(absl::string_view(kRawVectorValueFieldKey)) == 0) && value.which_value_type == google_firestore_v1_Value_array_value_tag) { return [FIRFieldValue vectorWithArray:[self convertedArray:value.array_value]]; } @@ -174,11 +182,11 @@ - (FIRRegexValue *)convertedRegex:(const google_firestore_v1_MapValue &)mapValue for (pb_size_t i = 0; i < innerValue.map_value.fields_count; ++i) { absl::string_view key = MakeStringView(innerValue.map_value.fields[i].key); const google_firestore_v1_Value &value = innerValue.map_value.fields[i].value; - if ((0 == key.compare(absl::string_view(kRawRegexTypePatternFieldValue))) && + if ((key.compare(absl::string_view(kRawRegexTypePatternFieldValue)) == 0) && value.which_value_type == google_firestore_v1_Value_string_value_tag) { pattern = MakeNSString(MakeStringView(value.string_value)); } - if ((0 == key.compare(absl::string_view(kRawRegexTypeOptionsFieldValue))) && + if ((key.compare(absl::string_view(kRawRegexTypeOptionsFieldValue)) == 0) && value.which_value_type == google_firestore_v1_Value_string_value_tag) { options = MakeNSString(MakeStringView(value.string_value)); } @@ -198,6 +206,18 @@ - (FIRInt32Value *)convertedInt32:(const google_firestore_v1_MapValue &)mapValue return [[FIRInt32Value alloc] initWithValue:value]; } +- (FIRDecimal128Value *)convertedDecimal128Value:(const google_firestore_v1_MapValue &)mapValue { + NSString *decimalString = @""; + if (mapValue.fields_count == 1) { + const google_firestore_v1_Value &decimalValue = mapValue.fields[0].value; + if (decimalValue.which_value_type == google_firestore_v1_Value_string_value_tag) { + decimalString = MakeNSString(MakeStringView(decimalValue.string_value)); + } + } + + return [[FIRDecimal128Value alloc] initWithValue:decimalString]; +} + - (FIRBSONObjectId *)convertedBsonObjectId:(const google_firestore_v1_MapValue &)mapValue { NSString *oid = @""; if (mapValue.fields_count == 1) { @@ -219,12 +239,12 @@ - (FIRBSONTimestamp *)convertedBsonTimestamp:(const google_firestore_v1_MapValue for (pb_size_t i = 0; i < innerValue.map_value.fields_count; ++i) { absl::string_view key = MakeStringView(innerValue.map_value.fields[i].key); const google_firestore_v1_Value &value = innerValue.map_value.fields[i].value; - if ((0 == key.compare(absl::string_view(kRawBsonTimestampTypeSecondsFieldValue))) && + if ((key.compare(absl::string_view(kRawBsonTimestampTypeSecondsFieldValue)) == 0) && value.which_value_type == google_firestore_v1_Value_integer_value_tag) { // The value from the server is guaranteed to fit in a 32-bit unsigned integer. seconds = static_cast(value.integer_value); } - if ((0 == key.compare(absl::string_view(kRawBsonTimestampTypeIncrementFieldValue))) && + if ((key.compare(absl::string_view(kRawBsonTimestampTypeIncrementFieldValue)) == 0) && value.which_value_type == google_firestore_v1_Value_integer_value_tag) { // The value from the server is guaranteed to fit in a 32-bit unsigned integer. increment = static_cast(value.integer_value); diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRDecimal128Value.h b/Firestore/Source/Public/FirebaseFirestore/FIRDecimal128Value.h new file mode 100644 index 00000000000..b3c30bd980b --- /dev/null +++ b/Firestore/Source/Public/FirebaseFirestore/FIRDecimal128Value.h @@ -0,0 +1,46 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +NS_ASSUME_NONNULL_BEGIN + +/** + * Represents a 128-bit decimal number type in Firestore documents. + */ +NS_SWIFT_SENDABLE +NS_SWIFT_NAME(Decimal128Value) +__attribute__((objc_subclassing_restricted)) +@interface FIRDecimal128Value : NSObject + +/** The string representation of the 128-bit decimal value. */ +@property(nonatomic, copy, readonly) NSString *value; + +/** :nodoc: */ +- (instancetype)init NS_UNAVAILABLE; + +/** + * Creates a `Decimal128Value` with the given value. + * @param value The string representation of the number to be stored. + */ +- (instancetype)initWithValue:(NSString *)value NS_SWIFT_NAME(init(_:)); + +/** Returns true if the given object is equal to this, and false otherwise. */ +- (BOOL)isEqual:(nullable id)object; + +@end + +NS_ASSUME_NONNULL_END diff --git a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift index 65b19af0517..388622734a7 100644 --- a/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift +++ b/Firestore/Swift/Source/Codable/CodablePassThroughTypes.swift @@ -37,6 +37,7 @@ struct FirestorePassthroughTypes: StructureCodingPassthroughTypeResolver { t is MaxKey || t is RegexValue || t is Int32Value || + t is Decimal128Value || t is BSONObjectId || t is BSONTimestamp || t is BSONBinaryData diff --git a/Firestore/Swift/Source/Codable/Decimal128Value+Codable.swift b/Firestore/Swift/Source/Codable/Decimal128Value+Codable.swift new file mode 100644 index 00000000000..3766487f07f --- /dev/null +++ b/Firestore/Swift/Source/Codable/Decimal128Value+Codable.swift @@ -0,0 +1,62 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#if SWIFT_PACKAGE + @_exported import FirebaseFirestoreInternalWrapper +#else + @_exported import FirebaseFirestoreInternal +#endif // SWIFT_PACKAGE + +/** + * A protocol describing the encodable properties of a Decimal128Value. + * + * Note: this protocol exists as a workaround for the Swift compiler: if the Decimal128Value class + * was extended directly to conform to Codable, the methods implementing the protocol would be need + * to be marked required but that can't be done in an extension. Declaring the extension on the + * protocol sidesteps this issue. + */ +private protocol CodableDecimal128Value: Codable { + var value: String { get } + + init(_ value: String) +} + +/** The keys in a Decimal128Value. Must match the properties of Decimal128Value. */ +private enum Decimal128ValueKeys: String, CodingKey { + case value +} + +/** + * An extension of Decimal128Value that implements the behavior of the Codable protocol. + * + * Note: this is implemented manually here because the Swift compiler can't synthesize these methods + * when declaring an extension to conform to Codable. + */ +extension CodableDecimal128Value { + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: Decimal128ValueKeys.self) + let value = try container.decode(String.self, forKey: .value) + self.init(value) + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: Decimal128ValueKeys.self) + try container.encode(value, forKey: .value) + } +} + +/** Extends Decimal128Value to conform to Codable. */ +extension FirebaseFirestore.Decimal128Value: FirebaseFirestore.CodableDecimal128Value {} diff --git a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift index 5354b856e88..584f9a15fcd 100644 --- a/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/BsonTypesIntegrationTests.swift @@ -107,6 +107,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), "objectId": BSONObjectId("507f191e810c19729de860ea"), "int32": Int32Value(1), + "decimal128": Decimal128Value("1.2e3"), "min": MinKey.shared, "max": MaxKey.shared, "regex": RegexValue(pattern: "^foo", options: "i"), @@ -127,6 +128,10 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { snapshot.get("int32") as? Int32Value, Int32Value(2) ) + XCTAssertEqual( + snapshot.get("decimal128") as? Decimal128Value, + Decimal128Value("1.2e3") + ) XCTAssertEqual( snapshot.get("min") as? MinKey, MinKey.shared @@ -160,6 +165,7 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), "objectId": BSONObjectId("507f191e810c19729de860ea"), "int32": Int32Value(1), + "decimal128": Decimal128Value("-1.23e-4"), "min": MinKey.shared, "max": MaxKey.shared, "regex": RegexValue(pattern: "^foo", options: "i"), @@ -179,6 +185,10 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { snapshot.get("int32") as? Int32Value, Int32Value(2) ) + XCTAssertEqual( + snapshot.get("decimal128") as? Decimal128Value, + Decimal128Value("-1.23e-4") + ) XCTAssertEqual( snapshot.get("min") as? MinKey, MinKey.shared @@ -268,6 +278,209 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { ) } + func testCanFilterAndOrderDecimal128Values() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": Decimal128Value("-Infinity")], + "b": ["key": Decimal128Value("NaN")], + "c": ["key": Decimal128Value("-0")], + "d": ["key": Decimal128Value("0")], + "e": ["key": Decimal128Value("0.0")], + "f": ["key": Decimal128Value("-01.23e-4")], + "g": ["key": Decimal128Value("1.5e6")], + "h": ["key": Decimal128Value("Infinity")], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + var query = collection + .whereField("key", isGreaterThanOrEqualTo: Decimal128Value("0")) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["h", "g", "e", "d", "c"] + ) + + query = collection + .whereField("key", isNotEqualTo: Decimal128Value("0")) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["b", "a", "f", "g", "h"] + ) + + query = collection + .whereField("key", isNotEqualTo: Decimal128Value("NaN")) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["a", "f", "c", "d", "e", "g", "h"] + ) + + query = collection + .whereField("key", isEqualTo: Decimal128Value("-01.23e-4")) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["f"] + ) + + query = collection + .whereField("key", isNotEqualTo: Decimal128Value("-01.23e-4")) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["b", "a", "c", "d", "e", "g", "h"] + ) + + query = collection + .whereField("key", in: [Decimal128Value("0")]) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["e", "d", "c"] + ) + + // Note: The server sends document `b` incorrectly, but the client filters + // it out. Currently `FieldFilter.NOT_IN` with `NaN` in the list does not + // behave the same as `UnaryFilter.IS_NOT_NAN`. + query = collection + .whereField("key", notIn: [Decimal128Value("NaN"), Decimal128Value("Infinity")]) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["g", "e", "d", "c", "f", "a"] + ) + } + + func testCanFilterAndOrderNumericalValues() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": Decimal128Value("-1.2e3")], + "b": ["key": Int32Value(0)], + "c": ["key": Decimal128Value("1")], + "d": ["key": Int32Value(1)], + "e": ["key": 1], + "f": ["key": 1.0], + "g": ["key": Decimal128Value("1.2e-3")], + "h": ["key": Int32Value(2)], + "i": ["key": Decimal128Value("NaN")], + "j": ["key": Decimal128Value("-Infinity")], + "k": ["key": Double.nan], + "l": ["key": Decimal128Value("Infinity")], + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = collection.order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: [ + "l", // Infinity + "h", // 2 + "f", // 1.0 + "e", // 1 + "d", // 1 + "c", // 1 + "g", // 0.0012 + "b", // 0 + "a", // -1200 + "j", // -Infinity + "k", // NaN + "i", // NaN + ] + ) + + let query2 = collection + .whereField("key", isNotEqualTo: Decimal128Value("1.0")) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query2, + expectedResult: ["l", "h", "g", "b", "a", "j", "k", "i"] + ) + + let query3 = collection + .whereField("key", isEqualTo: 1) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query3, + expectedResult: ["f", "e", "d", "c"] + ) + } + + func testDecimal128ValuesWithNo2sComplementRepresentation() async throws { + let testDocs: [String: [String: Any]] = [ + "a": ["key": Decimal128Value("-1.1e-3")], // -0.0011 + "b": ["key": Decimal128Value("1.1")], + "c": ["key": 1.1], + "d": ["key": 1.0], + "e": ["key": Decimal128Value("1.1e-3")], // 0.0011 + ] + + let collection = collectionRef() + await setDocumentData(testDocs, toCollection: collection) + + let query = collection + .whereField("key", isEqualTo: Decimal128Value("1.1")) + .order(by: "key", descending: true) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["b"] + ) + + let query2 = collection + .whereField("key", isNotEqualTo: Decimal128Value("1.1")) + .order(by: "key", descending: false) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query2, + expectedResult: ["a", "e", "d", "c"] + ) + + let query3 = collection + .whereField("key", isEqualTo: 1.1) + .order(by: "key", descending: false) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query3, + expectedResult: ["c"] + ) + + let query4 = collection + .whereField("key", isNotEqualTo: 1.1) + .order(by: "key", descending: false) + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query4, + expectedResult: ["a", "e", "d", "b"] + ) + } + func testCanFilterAndOrderTimestampValues() async throws { let testDocs: [String: [String: Any]] = [ "a": ["key": BSONTimestamp(seconds: 1, increment: 1)], @@ -387,17 +600,15 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { expectedResult: ["b", "a"] ) - // TODO(b/410032145): This currently fails, and is fixed by - // PR #14704. Uncomment this when moving to the main branch. - // var query2 = collection - // .whereField("key", isNotEqualTo: MinKey.shared)) - // .order(by: "key") - // try await assertSdkQueryResultsConsistentWithBackend( - // testDocs, - // collection: collection, - // query: query2, - // expectedResult: ["d", "e"] - // ) + query = collection + .whereField("key", isNotEqualTo: MinKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["d", "e"] + ) query = collection .whereField("key", isGreaterThanOrEqualTo: MinKey.shared) @@ -472,17 +683,15 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { expectedResult: ["c", "d"] ) - // TODO(b/410032145): This currently fails, and is fixed by - // PR #14704. Uncomment this when moving to the main branch. - // query = collection - // .whereField("key", isNotEqualTo: MaxKey.shared)) - // .order(by: "key") - // try await assertSdkQueryResultsConsistentWithBackend( - // testDocs, - // collection: collection, - // query: query, - // expectedResult: ["a", "b"] - // ) + query = collection + .whereField("key", isNotEqualTo: MaxKey.shared) + .order(by: "key") + try await assertSdkQueryResultsConsistentWithBackend( + testDocs, + collection: collection, + query: query, + expectedResult: ["a", "b"] + ) query = collection .whereField("key", isGreaterThanOrEqualTo: MaxKey.shared) @@ -584,9 +793,17 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "bsonBinary1": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 3]))], "bsonBinary2": ["key": BSONBinaryData(subtype: 1, data: Data([1, 2, 4]))], "bsonBinary3": ["key": BSONBinaryData(subtype: 2, data: Data([1, 2, 2]))], + "decimal128Value1": ["key": Decimal128Value("NaN")], + "decimal128Value2": ["key": Decimal128Value("-Infinity")], + "decimal128Value3": ["key": Decimal128Value("-1.0")], "int32Value1": ["key": Int32Value(-1)], + "decimal128Value4": ["key": Decimal128Value("1.0")], "int32Value2": ["key": Int32Value(1)], + "decimal128Value5": ["key": Decimal128Value("-0.0")], + "decimal128Value6": ["key": Decimal128Value("0.0")], "int32Value3": ["key": Int32Value(0)], + "decimal128Value7": ["key": Decimal128Value("1.23e-4")], + "decimal128Value8": ["key": Decimal128Value("Infinity")], "minKey1": ["key": MinKey.shared], "minKey2": ["key": MinKey.shared], "maxKey1": ["key": MaxKey.shared], @@ -614,9 +831,17 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "bsonTimestamp1", "bsonTimestamp2", "bsonTimestamp3", + "decimal128Value8", "int32Value2", + "decimal128Value4", + "decimal128Value7", "int32Value3", + "decimal128Value6", + "decimal128Value5", "int32Value1", + "decimal128Value3", + "decimal128Value2", + "decimal128Value1", "minKey2", "minKey1", ]) @@ -631,9 +856,13 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "minValue": ["key": MinKey.shared], "booleanValue": ["key": true], "nanValue": ["key": Double.nan], + "nanValue2": ["key": Decimal128Value("NaN")], + "negativeInfinity": ["key": Decimal128Value("-Infinity")], "int32Value": ["key": Int32Value(1)], "doubleValue": ["key": 2.0], "integerValue": ["key": 3], + "decimal128Value": ["key": Decimal128Value("3.4e-5")], + "infinity": ["key": Decimal128Value("Infinity")], "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], "bsonTimestampValue": ["key": BSONTimestamp(seconds: 1, increment: 2)], "stringValue": ["key": "string"], @@ -660,9 +889,13 @@ class BsonTypesIntegrationTests: FSTIntegrationTestCase { "minValue", "booleanValue", "nanValue", + "nanValue2", + "negativeInfinity", + "decimal128Value", "int32Value", "doubleValue", "integerValue", + "infinity", "timestampValue", "bsonTimestampValue", "stringValue", diff --git a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift index c060dfb109f..edc402b895e 100644 --- a/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift +++ b/Firestore/Swift/Tests/Integration/CodableIntegrationTests.swift @@ -113,6 +113,7 @@ class CodableIntegrationTests: FSTIntegrationTestCase { var vector: VectorValue var regex: RegexValue var int32: Int32Value + var decimal128: Decimal128Value var minKey: MinKey var maxKey: MaxKey var bsonOjectId: BSONObjectId @@ -128,6 +129,7 @@ class CodableIntegrationTests: FSTIntegrationTestCase { vector: FieldValue.vector([0.7, 0.6]), regex: RegexValue(pattern: "^foo", options: "i"), int32: Int32Value(1), + decimal128: Decimal128Value("1.5"), minKey: MinKey.shared, maxKey: MaxKey.shared, bsonOjectId: BSONObjectId("507f191e810c19729de860ec"), @@ -251,6 +253,10 @@ class CodableIntegrationTests: FSTIntegrationTestCase { try assertCanWriteAndReadCodableValueWithAllFlavors(value: Int32Value(123)) } + func testDecimal128Value() throws { + try assertCanWriteAndReadCodableValueWithAllFlavors(value: Decimal128Value("1.2e3")) + } + func testBsonObjectId() throws { try assertCanWriteAndReadCodableValueWithAllFlavors( value: BSONObjectId("507f191e810c19729de860ec") diff --git a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift index 260772b5270..ffcc95841da 100644 --- a/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift +++ b/Firestore/Swift/Tests/Integration/SnapshotListenerSourceTests.swift @@ -800,6 +800,7 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { testData["a"]!["key"] ) + // Add a 32-bit int value. let newData = ["key": Int32Value(2)] collection.document("g").setData(newData) @@ -834,6 +835,45 @@ class SnapshotListenerSourceTests: FSTIntegrationTestCase { testData["a"]!["key"] ) + // Add a 128-bit decimal value. + let decimalData = ["key": Decimal128Value("-4.123e-5")] + collection.document("h").setData(decimalData) + + querySnap = eventAccumulator.awaitEvent(withName: "snapshot") as! QuerySnapshot + XCTAssertEqual(querySnap.isEmpty, false) + XCTAssertEqual( + querySnap.documents[0].data()["key"] as! MinKey, + testData["b"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[1].data()["key"] as! Decimal128Value, + decimalData["key"]! + ) + XCTAssertEqual( + querySnap.documents[2].data()["key"] as! Int32Value, + newData["key"]! + ) + XCTAssertEqual( + querySnap.documents[3].data()["key"] as! BSONTimestamp, + testData["c"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[4].data()["key"] as! BSONBinaryData, + testData["e"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[5].data()["key"] as! BSONObjectId, + testData["d"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[6].data()["key"] as! RegexValue, + testData["f"]!["key"] + ) + XCTAssertEqual( + querySnap.documents[7].data()["key"] as! MaxKey, + testData["a"]!["key"] + ) + registration.remove() } } diff --git a/Firestore/Swift/Tests/Integration/TypeTest.swift b/Firestore/Swift/Tests/Integration/TypeTest.swift index a7ef1804e16..3f737fe1624 100644 --- a/Firestore/Swift/Tests/Integration/TypeTest.swift +++ b/Firestore/Swift/Tests/Integration/TypeTest.swift @@ -137,6 +137,24 @@ class TypeTest: FSTIntegrationTestCase { XCTAssertTrue(v1 != v3) } + func testDecimal128ValueEquality() { + let v1 = Decimal128Value("1.2e3") + let v2 = Decimal128Value("12e2") + let v3 = Decimal128Value("0.12e4") + let v4 = Decimal128Value("12000e-1") + let v5 = Decimal128Value("1.2") + + XCTAssertTrue(v1 == v2) + XCTAssertTrue(v1 == v3) + XCTAssertTrue(v1 == v4) + XCTAssertFalse(v1 == v5) + + XCTAssertFalse(v1 != v2) + XCTAssertFalse(v1 != v3) + XCTAssertFalse(v1 != v4) + XCTAssertTrue(v1 != v5) + } + func testBsonTimestampEquality() { let v1 = BSONTimestamp(seconds: 1, increment: 1) let v2 = BSONTimestamp(seconds: 1, increment: 1) @@ -207,6 +225,27 @@ class TypeTest: FSTIntegrationTestCase { ) } + func testCanReadAndWriteDecimal128Fields() async throws { + _ = try await expectRoundtrip( + coll: collectionRef(), + data: ["map": [ + "decimalSciPositive": Decimal128Value("1.2e3"), + "decimalSciNegative": Decimal128Value("-1.2e3"), + "decimalSciNegativeExponent": Decimal128Value("1.2e-3"), + "decimalSciNegativeValueAndExponent": Decimal128Value("-1.2e-3"), + "decimalSciExplicitPositiveExponent": Decimal128Value("1.2e+3"), + "decimalFloatPositive": Decimal128Value("1.1"), + "decimalIntNegative": Decimal128Value("-1"), + "decimalZeroNegative": Decimal128Value("-0"), + "decimalZeroInt": Decimal128Value("0"), + "decimalZeroFloat": Decimal128Value("0.0"), + "decimalNaN": Decimal128Value("NaN"), + "decimalInfinityPositive": Decimal128Value("Infinity"), + "decimalInfinityNegative": Decimal128Value("-Infinity"), + ]] + ) + } + func testCanReadAndWriteBsonTimestampFields() async throws { _ = try await expectRoundtrip( coll: collectionRef(), @@ -244,6 +283,7 @@ class TypeTest: FSTIntegrationTestCase { BSONObjectId("507f191e810c19729de860ea"), BSONTimestamp(seconds: 123, increment: 456), Int32Value(1), + Decimal128Value("1.2e3"), MinKey.shared, MaxKey.shared, RegexValue(pattern: "^foo", options: "i"), @@ -254,11 +294,12 @@ class TypeTest: FSTIntegrationTestCase { func testCanReadAndWriteBsonFieldsInAnObject() async throws { _ = try await expectRoundtrip( coll: collectionRef(), - data: ["array": [ + data: ["map": [ "binary": BSONBinaryData(subtype: 1, data: Data([1, 2, 3])), "objectId": BSONObjectId("507f191e810c19729de860ea"), "bsonTimestamp": BSONTimestamp(seconds: 123, increment: 456), "int32": Int32Value(1), + "decimal128": Decimal128Value("-Infinity"), "min": MinKey.shared, "max": MaxKey.shared, "regex": RegexValue(pattern: "^foo", options: "i"), @@ -303,6 +344,50 @@ class TypeTest: FSTIntegrationTestCase { } } + func testInvalidDecimal128ValuesGetsRejected() async throws { + let docRef = collectionRef().document("test-doc") + var errorMessage: String? + + do { + try await docRef.setData(["key": Decimal128Value("")]) + XCTFail("Expected error for invalid Decimal128Value") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue(errorMessage!.contains("Invalid decimal128 string")) + } + + errorMessage = nil + do { + try await docRef.setData(["key": Decimal128Value("abc")]) + XCTFail("Expected error for invalid Decimal128Value") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue(errorMessage!.contains("Invalid decimal128 string")) + } + + errorMessage = nil + do { + try await docRef.setData(["key": Decimal128Value("1 23.45")]) + XCTFail("Expected error for invalid Decimal128Value") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue(errorMessage!.contains("Invalid decimal128 string")) + } + + errorMessage = nil + do { + try await docRef.setData(["key": Decimal128Value("1e1234567890")]) + XCTFail("Expected error for invalid Decimal128Value") + } catch { + errorMessage = (error as NSError).userInfo[NSLocalizedDescriptionKey] as? String + XCTAssertNotNil(errorMessage) + XCTAssertTrue(errorMessage!.contains("Invalid decimal128 string")) + } + } + func testCanOrderValuesOfDifferentTypeOrderTogether() async throws { let collection = collectionRef() let testDocs: [String: [String: Any?]] = [ @@ -310,9 +395,13 @@ class TypeTest: FSTIntegrationTestCase { "minValue": ["key": MinKey.shared], "booleanValue": ["key": true], "nanValue": ["key": Double.nan], + "nanValue2": ["key": Decimal128Value("NaN")], + "negativeInfinity": ["key": Decimal128Value("-Infinity")], "int32Value": ["key": Int32Value(1)], "doubleValue": ["key": 2.0], "integerValue": ["key": 3], + "decimal128Value": ["key": Decimal128Value("345e-2")], + "infinity": ["key": Decimal128Value("Infinity")], "timestampValue": ["key": Timestamp(seconds: 100, nanoseconds: 123_456_000)], "bsonTimestampValue": ["key": BSONTimestamp(seconds: 1, increment: 2)], "stringValue": ["key": "string"], @@ -340,9 +429,13 @@ class TypeTest: FSTIntegrationTestCase { "minValue", "booleanValue", "nanValue", + "nanValue2", + "negativeInfinity", "int32Value", "doubleValue", "integerValue", + "decimal128Value", + "infinity", "timestampValue", "bsonTimestampValue", "stringValue", diff --git a/Firestore/core/src/index/firestore_index_value_writer.cc b/Firestore/core/src/index/firestore_index_value_writer.cc index bfeaa4dfbef..808553ae2e1 100644 --- a/Firestore/core/src/index/firestore_index_value_writer.cc +++ b/Firestore/core/src/index/firestore_index_value_writer.cc @@ -202,6 +202,35 @@ void WriteIndexInt32Value(const google_firestore_v1_MapValue& map_index_value, encoder->WriteDouble(map_index_value.fields[0].value.integer_value); } +void WriteIndexDoubleValue(double number, + DirectionalIndexByteEncoder* encoder) { + if (std::isnan(number)) { + WriteValueTypeLabel(encoder, IndexType::kNan); + return; + } + + WriteValueTypeLabel(encoder, IndexType::kNumber); + if (number == -0.0) { + // -0.0, 0 and 0.0 are all considered the same + encoder->WriteDouble(0.0); + } else { + encoder->WriteDouble(number); + } +} + +void WriteIndexDecimal128Value( + const google_firestore_v1_MapValue& map_index_value, + DirectionalIndexByteEncoder* encoder) { + // Note: We currently give up some precision and store the 128-bit decimal as + // a 64-bit double for client-side indexing purposes. We could consider + // improving this in the future. + // Note: std::stod is able to parse 'NaN', '-NaN', 'Infinity' and '-Infinity', + // with different string cases. + const double number = std::stod( + nanopb::MakeString(map_index_value.fields[0].value.string_value)); + WriteIndexDoubleValue(number, encoder); +} + void WriteIndexValueAux(const google_firestore_v1_Value& index_value, DirectionalIndexByteEncoder* encoder) { switch (index_value.which_value_type) { @@ -215,18 +244,7 @@ void WriteIndexValueAux(const google_firestore_v1_Value& index_value, break; } case google_firestore_v1_Value_double_value_tag: { - double number = index_value.double_value; - if (std::isnan(number)) { - WriteValueTypeLabel(encoder, IndexType::kNan); - break; - } - WriteValueTypeLabel(encoder, IndexType::kNumber); - if (number == -0.0) { - // -0.0, 0 and 0.0 are all considered the same - encoder->WriteDouble(0.0); - } else { - encoder->WriteDouble(number); - } + WriteIndexDoubleValue(index_value.double_value, encoder); break; } case google_firestore_v1_Value_integer_value_tag: { @@ -292,6 +310,9 @@ void WriteIndexValueAux(const google_firestore_v1_Value& index_value, } else if (model::IsBsonObjectId(index_value)) { WriteIndexBsonObjectId(index_value.map_value, encoder); break; + } else if (model::IsDecimal128Value(index_value)) { + WriteIndexDecimal128Value(index_value.map_value, encoder); + break; } else if (model::IsInt32Value(index_value)) { WriteIndexInt32Value(index_value.map_value, encoder); break; diff --git a/Firestore/core/src/model/value_util.cc b/Firestore/core/src/model/value_util.cc index e96cfb8707b..ee4f4101ff0 100644 --- a/Firestore/core/src/model/value_util.cc +++ b/Firestore/core/src/model/value_util.cc @@ -30,6 +30,7 @@ #include "Firestore/core/src/nanopb/nanopb_util.h" #include "Firestore/core/src/util/comparison.h" #include "Firestore/core/src/util/hard_assert.h" +#include "Firestore/core/src/util/quadruple.h" #include "absl/strings/escaping.h" #include "absl/strings/str_format.h" #include "absl/strings/str_join.h" @@ -41,6 +42,7 @@ namespace model { using nanopb::Message; using util::ComparisonResult; +using util::Quadruple; /** The smallest reference value. */ pb_bytes_array_s* kMinimumReferenceValue = @@ -96,6 +98,11 @@ const char* kRawInt32TypeFieldValue = "__int__"; pb_bytes_array_s* kInt32TypeFieldValue = nanopb::MakeBytesArray(kRawInt32TypeFieldValue); +/** The key of a decimal128 in a map proto. */ +const char* kRawDecimal128TypeFieldValue = "__decimal128__"; +pb_bytes_array_s* kDecimal128TypeFieldValue = + nanopb::MakeBytesArray(kRawDecimal128TypeFieldValue); + /** The key of a BSON ObjectId in a map proto. */ const char* kRawBsonObjectIdTypeFieldValue = "__oid__"; pb_bytes_array_s* kBsonObjectIdTypeFieldValue = @@ -148,6 +155,8 @@ MapType DetectMapType(const google_firestore_v1_Value& value) { return MapType::kMaxKey; } else if (IsRegexValue(value)) { return MapType::kRegex; + } else if (IsDecimal128Value(value)) { + return MapType::kDecimal128; } else if (IsInt32Value(value)) { return MapType::kInt32; } else if (IsBsonObjectId(value)) { @@ -206,6 +215,7 @@ TypeOrder GetTypeOrder(const google_firestore_v1_Value& value) { case MapType::kRegex: return TypeOrder::kRegex; case MapType::kInt32: + case MapType::kDecimal128: return TypeOrder::kNumber; case MapType::kBsonObjectId: return TypeOrder::kBsonObjectId; @@ -251,8 +261,54 @@ void SortFields(google_firestore_v1_Value& value) { } } +Quadruple ConvertNumericValueToQuadruple( + const google_firestore_v1_Value& value) { + if (value.which_value_type == google_firestore_v1_Value_double_value_tag) { + return Quadruple(value.double_value); + } else if (value.which_value_type == + google_firestore_v1_Value_integer_value_tag) { + return Quadruple(value.integer_value); + } else if (IsInt32Value(value)) { + return Quadruple(value.map_value.fields[0].value.integer_value); + } else if (IsDecimal128Value(value)) { + Quadruple result; + result.Parse( + nanopb::MakeString(value.map_value.fields[0].value.string_value)); + return result; + } + + HARD_FAIL( + "ConvertNumericValueToQuadruple was called with non-numeric value: %s", + value.ToString()); +} + +ComparisonResult Compare128BitNumbers(const google_firestore_v1_Value& left, + const google_firestore_v1_Value& right) { + Quadruple lhs = ConvertNumericValueToQuadruple(left); + Quadruple rhs = ConvertNumericValueToQuadruple(right); + if (lhs.is_nan()) { + return rhs.is_nan() ? ComparisonResult::Same : ComparisonResult::Ascending; + } else if (rhs.is_nan()) { + // rhs is NaN, but lhs is not. + return ComparisonResult::Descending; + } + + // Firestore considers +0 and -0 equal, but `Quadruple.Compare()` does not. + // SO, override negative zero to positive zero. + if (lhs.Compare(Quadruple(-0.0)) == 0) lhs = Quadruple(); + if (rhs.Compare(Quadruple(-0.0)) == 0) rhs = Quadruple(); + + // Since `Compare` returns `-1`, `0`, and `1` with the same semantics as the + // `ComparisonResult` enum, we can safely cast it. + return static_cast(lhs.Compare(rhs)); +} + ComparisonResult CompareNumbers(const google_firestore_v1_Value& left, const google_firestore_v1_Value& right) { + if (IsDecimal128Value(left) || IsDecimal128Value(right)) { + return Compare128BitNumbers(left, right); + } + if (left.which_value_type == google_firestore_v1_Value_double_value_tag) { double left_double = left.double_value; if (right.which_value_type == google_firestore_v1_Value_double_value_tag) { @@ -655,7 +711,10 @@ bool NumberEquals(const google_firestore_v1_Value& left, } else if (IsInt32Value(left) && IsInt32Value(right)) { return left.map_value.fields[0].value.integer_value == right.map_value.fields[0].value.integer_value; + } else if (IsDecimal128Value(left) && IsDecimal128Value(right)) { + return Compare128BitNumbers(left, right) == util::ComparisonResult::Same; } + return false; } @@ -906,8 +965,9 @@ google_firestore_v1_Value GetLowerBound( return MinBsonBinaryData(); } else if (IsRegexValue(value)) { return MinRegex(); - } else if (IsInt32Value(value)) { - // int32Value is treated the same as integerValue and doubleValue. + } else if (IsInt32Value(value) || IsDecimal128Value(value)) { + // Int32Value and Decimal128Value are treated the same as integerValue + // and doubleValue. return MinNumber(); } else if (IsMinKeyValue(value)) { return MinKeyValue(); @@ -950,8 +1010,9 @@ google_firestore_v1_Value GetUpperBound( return MinMap(); } else if (IsMinKeyValue(value)) { return MinBoolean(); - } else if (IsInt32Value(value)) { - // int32Value is treated the same as integerValue and doubleValue. + } else if (IsInt32Value(value) || IsDecimal128Value(value)) { + // Int32Value and Decimal128Value are treated the same as integerValue + // and doubleValue. return MinTimestamp(); } else if (IsBsonTimestamp(value)) { return MinString(); @@ -1372,11 +1433,40 @@ bool IsInt32Value(const google_firestore_v1_Value& value) { return true; } +bool IsDecimal128Value(const google_firestore_v1_Value& value) { + // A Decimal128Value is expected to be a map as follows: + // { + // "__decimal128__": 12345 + // } + + // Must be a map with 1 field. + if (value.which_value_type != google_firestore_v1_Value_map_value_tag || + value.map_value.fields_count != 1) { + return false; + } + + // Must have a "__decimal128__" key. + absl::optional field_index = IndexOfKey( + value.map_value, kRawDecimal128TypeFieldValue, kDecimal128TypeFieldValue); + if (!field_index.has_value()) { + return false; + } + + // Must have a string value. + google_firestore_v1_Value& decimal_str = value.map_value.fields[0].value; + if (decimal_str.which_value_type != + google_firestore_v1_Value_string_value_tag) { + return false; + } + + return true; +} + bool IsBsonType(const google_firestore_v1_Value& value) { - MapType mapType = DetectMapType(value); + const MapType mapType = DetectMapType(value); return mapType == MapType::kMinKey || mapType == MapType::kMaxKey || mapType == MapType::kRegex || mapType == MapType::kInt32 || - mapType == MapType::kBsonObjectId || + mapType == MapType::kDecimal128 || mapType == MapType::kBsonObjectId || mapType == MapType::kBsonTimestamp || mapType == MapType::kBsonBinaryData; } diff --git a/Firestore/core/src/model/value_util.h b/Firestore/core/src/model/value_util.h index fbea964a8ec..4e01f102107 100644 --- a/Firestore/core/src/model/value_util.h +++ b/Firestore/core/src/model/value_util.h @@ -81,6 +81,10 @@ extern pb_bytes_array_s* kRegexTypeOptionsFieldValue; extern const char* kRawInt32TypeFieldValue; extern pb_bytes_array_s* kInt32TypeFieldValue; +/** The key of a decimal128 in a map proto. */ +extern const char* kRawDecimal128TypeFieldValue; +extern pb_bytes_array_s* kDecimal128TypeFieldValue; + /** The key of a BSON ObjectId in a map proto. */ extern const char* kRawBsonObjectIdTypeFieldValue; extern pb_bytes_array_s* kBsonObjectIdTypeFieldValue; @@ -145,9 +149,10 @@ enum class MapType { kMaxKey = 5, kRegex = 6, kInt32 = 7, - kBsonObjectId = 8, - kBsonTimestamp = 9, - kBsonBinaryData = 10 + kDecimal128 = 8, + kBsonObjectId = 9, + kBsonTimestamp = 10, + kBsonBinaryData = 11 }; /** Returns the Map type for the given value. */ @@ -280,6 +285,11 @@ bool IsRegexValue(const google_firestore_v1_Value& value); */ bool IsInt32Value(const google_firestore_v1_Value& value); +/** + * Returns `true` if `value` represents a Decimal128Value. + */ +bool IsDecimal128Value(const google_firestore_v1_Value& value); + /** * Returns `true` if `value` represents a BsonObjectId. */ diff --git a/Firestore/core/src/util/quadruple.cc b/Firestore/core/src/util/quadruple.cc new file mode 100644 index 00000000000..5a311624eb7 --- /dev/null +++ b/Firestore/core/src/util/quadruple.cc @@ -0,0 +1,244 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "quadruple.h" +#include +#include +#include +#include +#include "quadruple_builder.h" + +namespace firebase { +namespace firestore { +namespace util { + +namespace { +constexpr int64_t kHashCodeOfNan = 7652541255; +} + +Quadruple::Quadruple(double x) { + negative_ = signbit(x); + switch (fpclassify(x)) { + case FP_NAN: + negative_ = false; + exponent_ = kInfiniteExponent; + mantissa_hi_ = 1ULL << 63; + mantissa_lo_ = 0; + break; + case FP_INFINITE: + exponent_ = kInfiniteExponent; + mantissa_hi_ = 0; + mantissa_lo_ = 0; + break; + case FP_ZERO: + exponent_ = 0; + mantissa_hi_ = 0; + mantissa_lo_ = 0; + break; + case FP_SUBNORMAL: + case FP_NORMAL: + negative_ = x < 0; + int x_exponent; + double small = frexp(std::abs(x), &x_exponent); + exponent_ = static_cast(x_exponent - 1) + kExponentBias; + // Scale 'small' to its 53-bit mantissa value as a long, then left-justify + // it with the leading 1 bit dropped in mantissa_hi (65-53=12). + mantissa_hi_ = static_cast(ldexp(small, 53)) << 12; + mantissa_lo_ = 0; + break; + } +} +Quadruple::Quadruple(int64_t x) { + if (x == 0) { + negative_ = false; + exponent_ = 0; + mantissa_hi_ = 0; + mantissa_lo_ = 0; + } else if (x == std::numeric_limits::min()) { + // -2^63 cannot be negated, so special-case it. + negative_ = true; + exponent_ = 63 + kExponentBias; + mantissa_hi_ = 0; + mantissa_lo_ = 0; + } else { + negative_ = x < 0; + if (negative_) { + x = -x; + } + if (x == 1) { + // The shift below wraps around for x=1, so special-case it. + exponent_ = kExponentBias; + mantissa_hi_ = 0; + mantissa_lo_ = 0; + } else { + uint64_t ux = static_cast(x); + int leading_zeros = __builtin_clzll(ux); + // Left-justify with the leading 1 dropped. + mantissa_hi_ = ux << (leading_zeros + 1); + mantissa_lo_ = 0; + exponent_ = static_cast(63 - leading_zeros) + kExponentBias; + } + } +} +bool Quadruple::Parse(std::string s) { + if (s == "NaN") { + negative_ = false; + exponent_ = kInfiniteExponent; + mantissa_hi_ = 1LL << 63; + mantissa_lo_ = 0; + return true; + } + if (s == "-Infinity") { + negative_ = true; + exponent_ = kInfiniteExponent; + mantissa_hi_ = 0; + mantissa_lo_ = 0; + return true; + } + if (s == "Infinity" || s == "+Infinity") { + negative_ = false; + exponent_ = kInfiniteExponent; + mantissa_hi_ = 0; + mantissa_lo_ = 0; + return true; + } + bool negative = false; + int len = s.size(); + uint8_t* digits = new uint8_t[len]; + int i = 0; + int j = 0; + int64_t exponent = 0; + if (i < len) { + if (s[i] == '-') { + negative = true; + i++; + } else if (s[i] == '+') { + i++; + } + } + while (i < len && isdigit(s[i])) { + digits[j++] = static_cast(s[i++] - '0'); + } + if (i < len && s[i] == '.') { + int decimal = ++i; + while (i < len && isdigit(s[i])) { + digits[j++] = static_cast(s[i++] - '0'); + } + exponent = decimal - i; + } + if (i < len && (s[i] == 'e' || s[i] == 'E')) { + int64_t exponentValue = 0; + i++; + int exponentSign = 1; + if (i < len) { + if (s[i] == '-') { + exponentSign = -1; + i++; + } else if (s[i] == '+') { + i++; + } + } + int firstExponent = i; + while (i < len && isdigit(s[i])) { + exponentValue = exponentValue * 10 + s[i++] - '0'; + if (i - firstExponent > 9) { + return false; + } + } + if (i == firstExponent) { + return false; + } + exponent += exponentValue * exponentSign; + } + if (j == 0 || i != len) { + return false; + } + std::vector digits_copy(j); + for (int k = 0; k < j; k++) { + digits_copy[k] = digits[k]; + } + QuadrupleBuilder parsed; + parsed.parseDecimal(digits_copy, exponent); + negative_ = negative; + exponent_ = parsed.exponent; + mantissa_hi_ = parsed.mantHi; + mantissa_lo_ = parsed.mantLo; + return true; +} +// Compare two quadruples, with -0 < 0, and NaNs larger than all numbers. +int Quadruple::Compare(const Quadruple& other) const { + int lessThan; + int greaterThan; + if (negative_) { + if (!other.negative_) { + return -1; + } + lessThan = 1; + greaterThan = -1; + } else { + if (other.negative_) { + return 1; + } + lessThan = -1; + greaterThan = 1; + } + if (exponent_ < other.exponent_) { + return lessThan; + } else if (exponent_ > other.exponent_) { + return greaterThan; + } else if (mantissa_hi_ < other.mantissa_hi_) { + return lessThan; + } else if (mantissa_hi_ > other.mantissa_hi_) { + return greaterThan; + } else if (mantissa_lo_ < other.mantissa_lo_) { + return lessThan; + } else if (mantissa_lo_ > other.mantissa_lo_) { + return greaterThan; + } else { + return 0; + } +} +Quadruple::operator double() const { + switch (exponent_) { + case 0: + // zero or Quadruple subnormal + return negative_ ? -0.0 : 0.0; + case kInfiniteExponent: { + if (is_nan()) { + return NAN; + } + return negative_ ? -INFINITY : INFINITY; + } + default: + int32_t unbiased_exp = static_cast(exponent_ - kExponentBias); + return scalb((1LL << 52) | (mantissa_hi_ >> 12), -52 + unbiased_exp) * + (negative_ ? -1 : 1); + } +} +int64_t Quadruple::HashValue() const { + if (is_nan()) { + return kHashCodeOfNan; + } + const int64_t prime = 31; + int64_t result = 1; + result = prime * result + static_cast(exponent_); + result = prime * result + static_cast(mantissa_hi_); + result = prime * result + static_cast(mantissa_lo_); + result = prime * result + (negative_ ? 1231 : 1237); + return result; +} + +} // namespace util +} // namespace firestore +} // namespace firebase \ No newline at end of file diff --git a/Firestore/core/src/util/quadruple.h b/Firestore/core/src/util/quadruple.h new file mode 100644 index 00000000000..eebb3371f6c --- /dev/null +++ b/Firestore/core/src/util/quadruple.h @@ -0,0 +1,100 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include +#include + +#ifndef FIRESTORE_CORE_UTIL_QUADRUPLE_H_ +#define FIRESTORE_CORE_UTIL_QUADRUPLE_H_ + +namespace firebase { +namespace firestore { +namespace util { + +// A minimal C++ implementation of a 128-bit mantissa / 32-bit exponent binary +// floating point number equivalent to https://github.com/m-vokhm/Quadruple +// +// Supports: +// - creation from string +// - creation from serialised format (3 longs), long and double +// - comparisons +class Quadruple { + public: + // Initialises a Quadruple to +0.0 + Quadruple() : Quadruple(0, 0, 0) { + } + + Quadruple(uint64_t exponent_and_sign, + uint64_t mantissa_hi, + uint64_t mantissa_lo) + : negative_(exponent_and_sign >> 63), + exponent_(static_cast(exponent_and_sign)), + mantissa_hi_(mantissa_hi), + mantissa_lo_(mantissa_lo) { + } + explicit Quadruple(double x); + explicit Quadruple(int64_t x); + // Updates this Quadruple with the decimal number specified in s. + // Returns true for valid numbers, false for invalid numbers. + // The Quadruple is unchanged if the result is false. + // + // The supported format (no whitespace allowed) is: + // - NaN, Infinity, +Infinity, -Infinity for the corresponding constants + // - a string matching [+-]?[0-9]*(.[0-9]*)?([eE][+-]?[0-9]+)? + // with the exponent at most 9 characters, and the whole string not empty + bool Parse(std::string s); + // Rounds out-of-range numbers to +/- 0/HUGE_VAL. Rounds towards 0. + explicit operator double() const; + // Compare two quadruples, with -0 < 0, and NaNs larger than all numbers. + int Compare(const Quadruple& other) const; + bool operator==(const Quadruple& other) const { + return Compare(other) == 0; + } + bool is_nan() const { + return (exponent_ == kInfiniteExponent) && + !(mantissa_hi_ == 0 && mantissa_lo_ == 0); + } + // The actual exponent is exponent_-kExponentBias. + static const uint32_t kExponentBias = 0x7FFFFFFF; + int64_t HashValue() const; + std::string DebugString() { + std::stringstream out; + if (negative_) { + out << "-"; + } + out << "1x" << std::hex << std::setfill('0'); + out << std::setw(16) << mantissa_hi_; + out << std::setw(16) << mantissa_lo_; + out << "*2^" << std::dec << exponent_ - static_cast(kExponentBias); + out << " =~ " << static_cast(*this); + + return out.str(); + } + + private: + static const uint32_t kInfiniteExponent = 0xFFFFFFFF; // including its bias + bool negative_; + uint32_t exponent_; + uint64_t mantissa_hi_; + uint64_t mantissa_lo_; +}; + +} // namespace util +} // namespace firestore +} // namespace firebase + +#endif // FIRESTORE_CORE_UTIL_QUADRUPLE_H_ \ No newline at end of file diff --git a/Firestore/core/src/util/quadruple_builder.cc b/Firestore/core/src/util/quadruple_builder.cc new file mode 100644 index 00000000000..c723842717a --- /dev/null +++ b/Firestore/core/src/util/quadruple_builder.cc @@ -0,0 +1,913 @@ +// Copyright 2025 Google LLC +// Copyright 2021 M.Vokhmentsev +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "quadruple_builder.h" + +#include +#include +#include + +namespace firebase { +namespace firestore { +namespace util { +// 2^192 = 6.277e57, so the 58-th digit after point may affect the result +static constexpr int32_t MAX_MANTISSA_LENGTH = 59; +// Max value of the decimal exponent, corresponds to EXPONENT_OF_MAX_VALUE +static constexpr int32_t MAX_EXP10 = 646456993; +// Min value of the decimal exponent, corresponds to EXPONENT_OF_MIN_NORMAL +static constexpr int32_t MIN_EXP10 = -646457032; +// (2^63) / 10 =~ 9.223372e17 +static constexpr double TWO_POW_63_DIV_10 = 922337203685477580.0; +// Just for convenience: 0x8000_0000_0000_0000L +// static constexpr uint64_t HIGH_BIT = 0x8000000000000000L; +// Just for convenience: 0x8000_0000L, 2^31 +static constexpr double POW_2_31 = 2147483648.0; +// Just for convenience: 0x0000_0000_FFFF_FFFFL +static constexpr uint64_t LOWER_32_BITS = 0x00000000FFFFFFFFL; +// Just for convenience: 0xFFFF_FFFF_0000_0000L; +static constexpr uint64_t HIGHER_32_BITS = 0xFFFFFFFF00000000L; +// Approximate value of log2(10) +static const double LOG2_10 = log(10) / log(2); +// Approximate value of log2(e) +static const double LOG2_E = 1 / log(2.0); +// The value of the exponent (biased) corresponding to {@code 1.0 == 2^0}; +// equals to 2_147_483_647 +// ({@code 0x7FFF_FFFF}). +static constexpr int32_t EXPONENT_BIAS = 0x7FFFFFFF; +// The value of the exponent (biased), corresponding to {@code Infinity}, {@code +// _Infinty}, and +// {@code NaN} +static constexpr uint64_t EXPONENT_OF_INFINITY = 0xFFFFFFFFL; +// An array of positive powers of two, each value consists of 4 longs: decimal +// exponent and 3 x 64 bits of mantissa, divided by ten Used to find an +// arbitrary power of 2 (by powerOfTwo(long exp)) +static std::array, 33> POS_POWERS_OF_2 = { + {// 0: 2^0 = 1 = 0.1e1 + {{static_cast(1), 0x1999999999999999LL, 0x9999999999999999LL, + 0x999999999999999aLL}}, // 1: 2^(2^0) = 2^1 = 2 = 0.2e1 + {{static_cast(1), 0x3333333333333333LL, 0x3333333333333333LL, + 0x3333333333333334LL}}, // *** + // 2: 2^(2^1) = 2^2 = 4 = 0.4e1 + {{static_cast(1), 0x6666666666666666LL, 0x6666666666666666LL, + 0x6666666666666667LL}}, // *** + // 3: 2^(2^2) = 2^4 = 16 = 0.16e2 + {{static_cast(2), 0x28f5c28f5c28f5c2LL, 0x8f5c28f5c28f5c28LL, + 0xf5c28f5c28f5c290LL}}, // *** + // 4: 2^(2^3) = 2^8 = 256 = 0.256e3 + {{static_cast(3), 0x4189374bc6a7ef9dLL, 0xb22d0e5604189374LL, + 0xbc6a7ef9db22d0e6LL}}, // *** + // 5: 2^(2^4) = 2^16 = 65536 = 0.65536e5 + {{static_cast(5), 0xa7c5ac471b478423LL, 0x0fcf80dc33721d53LL, + 0xcddd6e04c0592104LL}}, // 6: 2^(2^5) = 2^32 = 4294967296 = + // 0.4294967296e10 + {{static_cast(10), 0x6df37f675ef6eadfLL, 0x5ab9a2072d44268dLL, + 0x97df837e6748956eLL}}, // 7: 2^(2^6) = 2^64 = 18446744073709551616 + // = 0.18446744073709551616e20 + {{static_cast(20), 0x2f394219248446baLL, 0xa23d2ec729af3d61LL, + 0x0607aa0167dd94cbLL}}, // 8: 2^(2^7) = 2^128 = + // 340282366920938463463374607431768211456 = + // 0.340282366920938463463374607431768211456e39 + {{static_cast(39), 0x571cbec554b60dbbLL, 0xd5f64baf0506840dLL, + 0x451db70d5904029bLL}}, // 9: 2^(2^8) = 2^256 = + // 1.1579208923731619542357098500868790785326998466564056403945758401E+77 + // = + // 0.11579208923731619542357098500868790785326998466564056403945758401e78 + {{static_cast(78), 0x1da48ce468e7c702LL, 0x6520247d3556476dLL, + 0x1469caf6db224cfaLL}}, // *** + // 10: 2^(2^9) = 2^512 = + // 1.3407807929942597099574024998205846127479365820592393377723561444E+154 + // = + // 0.13407807929942597099574024998205846127479365820592393377723561444e155 + {{static_cast(155), 0x2252f0e5b39769dcLL, 0x9ae2eea30ca3ade0LL, + 0xeeaa3c08dfe84e30LL}}, // 11: 2^(2^10) = 2^1024 = + // 1.7976931348623159077293051907890247336179769789423065727343008116E+308 + // = + // 0.17976931348623159077293051907890247336179769789423065727343008116e309 + {{static_cast(309), 0x2e055c9a3f6ba793LL, 0x16583a816eb60a59LL, + 0x22c4b0826cf1ebf7LL}}, // 12: 2^(2^11) = 2^2048 = + // 3.2317006071311007300714876688669951960444102669715484032130345428E+616 + // = + // 0.32317006071311007300714876688669951960444102669715484032130345428e617 + {{static_cast(617), 0x52bb45e9cf23f17fLL, 0x7688c07606e50364LL, + 0xb34479aa9d449a57LL}}, // 13: 2^(2^12) = 2^4096 = + // 1.0443888814131525066917527107166243825799642490473837803842334833E+1233 + // = + // 0.10443888814131525066917527107166243825799642490473837803842334833e1234 + {{static_cast(1234), 0x1abc81c8ff5f846cLL, 0x8f5e3c9853e38c97LL, + 0x45060097f3bf9296LL}}, // 14: 2^(2^13) = 2^8192 = + // 1.0907481356194159294629842447337828624482641619962326924318327862E+2466 + // = + // 0.10907481356194159294629842447337828624482641619962326924318327862e2467 + {{static_cast(2467), 0x1bec53b510daa7b4LL, 0x48369ed77dbb0eb1LL, + 0x3b05587b2187b41eLL}}, // 15: 2^(2^14) = 2^16384 = + // 1.1897314953572317650857593266280071307634446870965102374726748212E+4932 + // = + // 0.11897314953572317650857593266280071307634446870965102374726748212e4933 + {{static_cast(4933), 0x1e75063a5ba91326LL, 0x8abfb8e460016ae3LL, + 0x28008702d29e8a3cLL}}, // 16: 2^(2^15) = 2^32768 = + // 1.4154610310449547890015530277449516013481307114723881672343857483E+9864 + // = + // 0.14154610310449547890015530277449516013481307114723881672343857483e9865 + {{static_cast(9865), 0x243c5d8bb5c5fa55LL, 0x40c6d248c5881915LL, + 0x4c0fd99fd5befc22LL}}, // 17: 2^(2^16) = 2^65536 = + // 2.0035299304068464649790723515602557504478254755697514192650169737E+19728 + // = + // 0.20035299304068464649790723515602557504478254755697514192650169737e19729 + {{static_cast(19729), 0x334a5570c3f4ef3cLL, 0xa13c36c43f979c90LL, + 0xda7ac473555fb7a8LL}}, // 18: 2^(2^17) = 2^131072 = + // 4.0141321820360630391660606060388767343771510270414189955825538065E+39456 + // = + // 0.40141321820360630391660606060388767343771510270414189955825538065e39457 + {{static_cast(39457), 0x66c304445dd98f3bLL, 0xa8c293a20e47a41bLL, + 0x4c5b03dc12604964LL}}, // 19: 2^(2^18) = 2^262144 = + // 1.6113257174857604736195721184520050106440238745496695174763712505E+78913 + // = + // 0.16113257174857604736195721184520050106440238745496695174763712505e78914 + {{static_cast(78914), 0x293ffbf5fb028cc4LL, 0x89d3e5ff44238406LL, + 0x369a339e1bfe8c9bLL}}, // 20: 2^(2^19) = 2^524288 = + // 2.5963705678310007761265964957268828277447343763484560463573654868E+157826 + // = + // 0.25963705678310007761265964957268828277447343763484560463573654868e157827 + {{static_cast(157827), 0x427792fbb68e5d20LL, + 0x7b297cd9fc154b62LL, + 0xf09142114aa9a20cLL}}, // 21: 2^(2^20) = 2^1048576 = + // 6.7411401254990734022690651047042454376201859485326882846944915676E+315652 + // = + // 0.67411401254990734022690651047042454376201859485326882846944915676e315653 + {{static_cast(315653), 0xac92bc65ad5c08fcLL, + 0x00beeb115a566c19LL, + 0x4ba882d8a4622437LL}}, // 22: 2^(2^21) = 2^2097152 = + // 4.5442970191613663099961595907970650433180103994591456270882095573E+631305 + // = + // 0.45442970191613663099961595907970650433180103994591456270882095573e631306 + {{static_cast(631306), 0x745581440f92e80eLL, + 0x4da822cf7f896f41LL, + 0x509d598678164ecdLL}}, // 23: 2^(2^22) = 2^4194304 = + // 2.0650635398358879243991194945816501695274360493029670347841664177E+1262611 + // = + // 0.20650635398358879243991194945816501695274360493029670347841664177e1262612 + {{static_cast(1262612), 0x34dd99b4c69523a5LL, + 0x64bc2e8f0d8b1044LL, + 0xb03b1c96da5dd349LL}}, // 24: 2^(2^23) = 2^8388608 = + // 4.2644874235595278724327289260856157547554200794957122157246170406E+2525222 + // = + // 0.42644874235595278724327289260856157547554200794957122157246170406e2525223 + {{static_cast(2525223), 0x6d2bbea9d6d25a08LL, + 0xa0a4606a88e96b70LL, + 0x182063bbc2fe8520LL}}, // 25: 2^(2^24) = 2^16777216 = + // 1.8185852985697380078927713277749906189248596809789408311078112486E+5050445 + // = + // 0.18185852985697380078927713277749906189248596809789408311078112486e5050446 + {{static_cast(5050446), 0x2e8e47d63bfdd6e3LL, + 0x2b55fa8976eaa3e9LL, + 0x1a6b9d3086412a73LL}}, // 26: 2^(2^25) = 2^33554432 = + // 3.3072524881739831340558051919726975471129152081195558970611353362E+10100890 + // = + // 0.33072524881739831340558051919726975471129152081195558970611353362e10100891 + {{static_cast(10100891), 0x54aa68efa1d719dfLL, + 0xd8505806612c5c8fLL, + 0xad068837fee8b43aLL}}, // 27: 2^(2^26) = 2^67108864 = + // 1.0937919020533002449982468634925923461910249420785622990340704603E+20201781 + // = + // 0.10937919020533002449982468634925923461910249420785622990340704603e20201782 + {{static_cast(20201782), 0x1c00464ccb7bae77LL, + 0x9e3877784c77982cLL, + 0xd94af3b61717404fLL}}, // 28: 2^(2^27) = 2^134217728 = + // 1.1963807249973763567102377630870670302911237824129274789063323723E+40403562 + // = + // 0.11963807249973763567102377630870670302911237824129274789063323723e40403563 + {{static_cast(40403563), 0x1ea099c8be2b6cd0LL, + 0x8bfb6d539fa50466LL, + 0x6d3bc37e69a84218LL}}, // 29: 2^(2^28) = 2^268435456 = + // 1.4313268391452478724777126233530788980596273340675193575004129517E+80807124 + // = + // 0.14313268391452478724777126233530788980596273340675193575004129517e80807125 + {{static_cast(80807125), 0x24a457f466ce8d18LL, + 0xf2c8f3b81bc6bb59LL, + 0xa78c757692e02d49LL}}, // 30: 2^(2^29) = 2^536870912 = + // 2.0486965204575262773910959587280218683219330308711312100181276813E+161614248 + // = + // 0.20486965204575262773910959587280218683219330308711312100181276813e161614249 + {{static_cast(161614249), 0x347256677aba6b53LL, + 0x3fbf90d30611a67cLL, + 0x1e039d87e0bdb32bLL}}, // 31: 2^(2^30) = 2^1073741824 = + // 4.1971574329347753848087162337676781412761959309467052555732924370E+323228496 + // = + // 0.41971574329347753848087162337676781412761959309467052555732924370e323228497 + {{static_cast(323228497), 0x6b727daf0fd3432aLL, + 0x71f71121f9e4200fLL, + 0x8fcd9942d486c10cLL}}, // 32: 2^(2^31) = 2^2147483648 = + // 1.7616130516839633532074931497918402856671115581881347960233679023E+646456993 + // = + // 0.17616130516839633532074931497918402856671115581881347960233679023e646456994 + {{static_cast(646456994), 0x2d18e84484d91f78LL, + 0x4079bfe7829dec6fLL, 0x21551643e365abc6LL}}}}; +// An array of negative powers of two, each value consists of 4 longs: decimal +// exponent and 3 x 64 bits of mantissa, divided by ten. Used to find an +// arbitrary power of 2 (by powerOfTwo(long exp)) +static std::array, 33> NEG_POWERS_OF_2 = { + {// v18 + // 0: 2^0 = 1 = 0.1e1 + {{static_cast(1), 0x1999999999999999LL, 0x9999999999999999LL, + 0x999999999999999aLL}}, // 1: 2^-(2^0) = 2^-1 = 0.5 = 0.5e0 + {{static_cast(0), 0x8000000000000000LL, 0x0000000000000000LL, + 0x0000000000000000LL}}, // 2: 2^-(2^1) = 2^-2 = 0.25 = 0.25e0 + // {0, 0x4000_0000_0000_0000L, + // 0x0000_0000_0000_0000L, + // 0x0000_0000_0000_0000L}, + {{static_cast(0), 0x4000000000000000LL, 0x0000000000000000LL, + 0x0000000000000001LL}}, // *** + // 3: 2^-(2^2) = 2^-4 = 0.0625 = 0.625e-1 + {{static_cast(-1), 0xa000000000000000LL, 0x0000000000000000LL, + 0x0000000000000000LL}}, // 4: 2^-(2^3) = 2^-8 = 0.00390625 = + // 0.390625e-2 + {{static_cast(-2), 0x6400000000000000LL, 0x0000000000000000LL, + 0x0000000000000000LL}}, // 5: 2^-(2^4) = 2^-16 = 0.0000152587890625 + // = 0.152587890625e-4 + {{static_cast(-4), 0x2710000000000000LL, 0x0000000000000000LL, + 0x0000000000000001LL}}, // *** + // 6: 2^-(2^5) = 2^-32 + // = 2.3283064365386962890625E-10 = + // 0.23283064365386962890625e-9 + {{static_cast(-9), 0x3b9aca0000000000LL, 0x0000000000000000LL, + 0x0000000000000001LL}}, // *** + // 7: 2^-(2^6) = 2^-64 + // = 5.42101086242752217003726400434970855712890625E-20 + // = + // 0.542101086242752217003726400434970855712890625e-19 + {{static_cast(-19), 0x8ac7230489e80000LL, 0x0000000000000000LL, + 0x0000000000000000LL}}, // 8: 2^-(2^7) = 2^-128 = + // 2.9387358770557187699218413430556141945466638919302188037718792657E-39 + // = + // 0.29387358770557187699218413430556141945466638919302188037718792657e-38 + {{static_cast(-38), 0x4b3b4ca85a86c47aLL, 0x098a224000000000LL, + 0x0000000000000001LL}}, // *** + // 9: 2^-(2^8) = 2^-256 = + // 8.6361685550944446253863518628003995711160003644362813850237034700E-78 + // = + // 0.86361685550944446253863518628003995711160003644362813850237034700e-77 + {{static_cast(-77), 0xdd15fe86affad912LL, 0x49ef0eb713f39ebeLL, + 0xaa987b6e6fd2a002LL}}, // 10: 2^-(2^9) = 2^-512 = + // 7.4583407312002067432909653154629338373764715346004068942715183331E-155 + // = + // 0.74583407312002067432909653154629338373764715346004068942715183331e-154 + {{static_cast(-154), 0xbeeefb584aff8603LL, 0xaafb550ffacfd8faLL, + 0x5ca47e4f88d45371LL}}, // 11: 2^-(2^10) = 2^-1024 = + // 5.5626846462680034577255817933310101605480399511558295763833185421E-309 + // = + // 0.55626846462680034577255817933310101605480399511558295763833185421e-308 + {{static_cast(-308), 0x8e679c2f5e44ff8fLL, 0x570f09eaa7ea7648LL, + 0x5961db50c6d2b888LL}}, // *** + // 12: 2^-(2^11) = 2^-2048 = + // 3.0943460473825782754801833699711978538925563038849690459540984582E-617 + // = + // 0.30943460473825782754801833699711978538925563038849690459540984582e-616 + {{static_cast(-616), 0x4f371b3399fc2ab0LL, 0x8170041c9feb05aaLL, + 0xc7c343447c75bcf6LL}}, // 13: 2^-(2^12) = 2^-4096 = + // 9.5749774609521853579467310122804202420597417413514981491308464986E-1234 + // = + // 0.95749774609521853579467310122804202420597417413514981491308464986e-1233 + {{static_cast(-1233), 0xf51e928179013fd3LL, 0xde4bd12cde4d985cLL, + 0x4a573ca6f94bff14LL}}, // 14: 2^-(2^13) = 2^-8192 = + // 9.1680193377742358281070619602424158297818248567928361864131947526E-2467 + // = + // 0.91680193377742358281070619602424158297818248567928361864131947526e-2466 + {{static_cast(-2466), 0xeab388127bccaff7LL, 0x1667639142b9fbaeLL, + 0x775ec9995e1039fbLL}}, // 15: 2^-(2^14) = 2^-16384 = + // 8.4052578577802337656566945433043815064951983621161781002720680748E-4933 + // = + // 0.84052578577802337656566945433043815064951983621161781002720680748e-4932 + {{static_cast(-4932), 0xd72cb2a95c7ef6ccLL, 0xe81bf1e825ba7515LL, + 0xc2feb521d6cb5dcdLL}}, // 16: 2^-(2^15) = 2^-32768 = + // 7.0648359655776364427774021878587184537374439102725065590941425796E-9865 + // = + // 0.70648359655776364427774021878587184537374439102725065590941425796e-9864 + {{static_cast(-9864), 0xb4dc1be6604502dcLL, 0xd491079b8eef6535LL, + 0x578d3965d24de84dLL}}, // *** + // 17: 2^-(2^16) = 2^-65536 = + // 4.9911907220519294656590574792132451973746770423207674161425040336E-19729 + // = + // 0.49911907220519294656590574792132451973746770423207674161425040336e-19728 + {{static_cast(-19728), 0x7fc6447bee60ea43LL, + 0x2548da5c8b125b27LL, + 0x5f42d1142f41d349LL}}, // *** + // 18: 2^-(2^17) = 2^-131072 = + // 2.4911984823897261018394507280431349807329035271689521242878455599E-39457 + // = + // 0.24911984823897261018394507280431349807329035271689521242878455599e-39456 + {{static_cast(-39456), 0x3fc65180f88af8fbLL, + 0x6a6915f383349413LL, + 0x063c3708b6ceb291LL}}, // *** + // 19: 2^-(2^18) = 2^-262144 = + // 6.2060698786608744707483205572846793091942192651991171731773832448E-78914 + // = + // 0.62060698786608744707483205572846793091942192651991171731773832448e-78913 + {{static_cast(-78913), 0x9ee0197c8dcd55bfLL, + 0x2b2b9b942c38f4a2LL, + 0x0f8ba634e9c706aeLL}}, // 20: 2^-(2^19) = 2^-524288 = + // 3.8515303338821801176537443725392116267291403078581314096728076497E-157827 + // = + // 0.38515303338821801176537443725392116267291403078581314096728076497e-157826 + {{static_cast(-157826), 0x629963a25b8b2d79LL, + 0xd00b9d2286f70876LL, + 0xe97004700c3644fcLL}}, // *** + // 21: 2^-(2^20) = 2^-1048576 = + // 1.4834285912814577854404052243709225888043963245995136935174170977E-315653 + // = + // 0.14834285912814577854404052243709225888043963245995136935174170977e-315652 + {{static_cast(-315652), 0x25f9cc308ceef4f3LL, + 0x40f19543911a4546LL, + 0xa2cd389452cfc366LL}}, // 22: 2^-(2^21) = 2^-2097152 = + // 2.2005603854312903332428997579002102976620485709683755186430397089E-631306 + // = + // 0.22005603854312903332428997579002102976620485709683755186430397089e-631305 + {{static_cast(-631305), 0x385597b0d47e76b8LL, + 0x1b9f67e103bf2329LL, + 0xc3119848595985f7LL}}, // 23: 2^-(2^22) = 2^-4194304 = + // 4.8424660099295090687215589310713586524081268589231053824420510106E-1262612 + // = + // 0.48424660099295090687215589310713586524081268589231053824420510106e-1262611 + {{static_cast(-1262611), 0x7bf795d276c12f66LL, + 0x66a61d62a446659aLL, + 0xa1a4d73bebf093d5LL}}, // *** + // 24: 2^-(2^23) = 2^-8388608 = + // 2.3449477057322620222546775527242476219043877555386221929831430440E-2525223 + // = + // 0.23449477057322620222546775527242476219043877555386221929831430440e-2525222 + {{static_cast(-2525222), 0x3c07d96ab1ed7799LL, + 0xcb7355c22cc05ac0LL, + 0x4ffc0ab73b1f6a49LL}}, // *** + // 25: 2^-(2^24) = 2^-16777216 = + // 5.4987797426189993226257377747879918011694025935111951649826798628E-5050446 + // = + // 0.54987797426189993226257377747879918011694025935111951649826798628e-5050445 + {{static_cast(-5050445), 0x8cc4cd8c3edefb9aLL, + 0x6c8ff86a90a97e0cLL, + 0x166cfddbf98b71bfLL}}, // *** + // 26: 2^-(2^25) = 2^-33554432 = + // 3.0236578657837068435515418409027857523343464783010706819696074665E-10100891 + // = + // 0.30236578657837068435515418409027857523343464783010706819696074665e-10100890 + {{static_cast(-10100890), 0x4d67d81cc88e1228LL, + 0x1d7cfb06666b79b3LL, + 0x7b916728aaa4e70dLL}}, // *** + // 27: 2^-(2^26) = 2^-67108864 = + // 9.1425068893156809483320844568740945600482370635012633596231964471E-20201782 + // = + // 0.91425068893156809483320844568740945600482370635012633596231964471e-20201781 + {{static_cast(-20201781), 0xea0c55494e7a552dLL, + 0xb88cb9484bb86c61LL, + 0x8d44893c610bb7dFLL}}, // *** + // 28: 2^-(2^27) = 2^-134217728 = + // 8.3585432221184688810803924874542310018191301711943564624682743545E-40403563 + // = + // 0.83585432221184688810803924874542310018191301711943564624682743545e-40403562 + {{static_cast(-40403562), 0xd5fa8c821ec0c24aLL, + 0xa80e46e764e0f8b0LL, + 0xa7276bfa432fac7eLL}}, // 29: 2^-(2^28) = 2^-268435456 = + // 6.9865244796022595809958912202005005328020601847785697028605460277E-80807125 + // = + // 0.69865244796022595809958912202005005328020601847785697028605460277e-80807124 + {{static_cast(-80807124), 0xb2dae307426f6791LL, + 0xc970b82f58b12918LL, + 0x0472592f7f39190eLL}}, // 30: 2^-(2^29) = 2^-536870912 = + // 4.8811524304081624052042871019605298977947353140996212667810837790E-161614249 + // = + // 0.48811524304081624052042871019605298977947353140996212667810837790e-161614248 + // {-161614248, 0x7cf5_1edd_8a15_f1c9L, + // 0x656d_ab34_98f8_e697L, + // 0x12da_a2a8_0e53_c809L}, + {{static_cast(-161614248), 0x7cf51edd8a15f1c9LL, + 0x656dab3498f8e697LL, + 0x12daa2a80e53c807LL}}, // 31: 2^-(2^30) = 2^-1073741824 = + // 2.3825649048879510732161697817326745204151961255592397879550237608E-323228497 + // = + // 0.23825649048879510732161697817326745204151961255592397879550237608e-323228496 + {{static_cast(-323228496), 0x3cfe609ab5883c50LL, + 0xbec8b5d22b198871LL, + 0xe18477703b4622b4LL}}, // 32: 2^-(2^31) = 2^-2147483648 = + // 5.6766155260037313438164181629489689531186932477276639365773003794E-646456994 + // = + // 0.56766155260037313438164181629489689531186932477276639365773003794e-646456993 + {{static_cast(-646456993), 0x9152447b9d7cda9aLL, + 0x3b4d3f6110d77aadLL, 0xfa81bad1c394adb4LL}}}}; +// Buffers used internally +// The order of words in the arrays is big-endian: the highest part is in +// buff[0] (in buff[1] for buffers of 10 words) + +void QuadrupleBuilder::parse(std::vector& digits, int32_t exp10) { + exp10 += static_cast((digits).size()) - + 1; // digits is viewed as x.yyy below. + this->exponent = 0; + this->mantHi = 0LL; + this->mantLo = 0LL; + // Finds numeric value of the decimal mantissa + std::array& mantissa = this->buffer6x32C; + int32_t exp10Corr = parseMantissa(digits, mantissa); + if (exp10Corr == 0 && isEmpty(mantissa)) { + // Mantissa == 0 + return; + } + // takes account of the point position in the mant string and possible carry + // as a result of round-up (like 9.99e1 -> 1.0e2) + exp10 += exp10Corr; + if (exp10 < MIN_EXP10) { + return; + } + if (exp10 > MAX_EXP10) { + this->exponent = (static_cast(EXPONENT_OF_INFINITY)); + return; + } + double exp2 = findBinaryExponent(exp10, mantissa); + // Finds binary mantissa and possible exponent correction. Fills the fields. + findBinaryMantissa(exp10, exp2, mantissa); +} +int32_t QuadrupleBuilder::parseMantissa(std::vector& digits, + std::array& mantissa) { + for (int32_t i = (0); i < (6); i++) { + mantissa[i] = 0LL; + } + // Skip leading zeroes + int32_t firstDigit = 0; + while (firstDigit < static_cast((digits).size()) && + digits[firstDigit] == 0) { + firstDigit += 1; + } + if (firstDigit == static_cast((digits).size())) { + return 0; // All zeroes + } + int32_t expCorr = -firstDigit; + // Limit the string length to avoid unnecessary fuss + if (static_cast((digits).size()) - firstDigit > + MAX_MANTISSA_LENGTH) { + bool carry = + digits[MAX_MANTISSA_LENGTH] >= 5; // The highest digit to be truncated + std::vector truncated(MAX_MANTISSA_LENGTH); + for (int32_t i = (0); i < (MAX_MANTISSA_LENGTH); i++) { + truncated[i] = digits[i + firstDigit]; + } + if (carry) { // Round-up: add carry + expCorr += addCarry( + truncated); // May add an extra digit in front of it (99..99 -> 100) + } + digits = truncated; + firstDigit = 0; + } + for (int32_t i = (static_cast((digits).size())) - 1; + i >= (firstDigit); i--) { // digits, starting from the last + mantissa[0] |= (static_cast(digits[i])) << 32LL; + divBuffBy10(mantissa); + } + return expCorr; +} +// Divides the unpacked value stored in the given buffer by 10 +// @param buffer contains the unpacked value to divide (32 least significant +// bits are used) +template +void QuadrupleBuilder::divBuffBy10(std::array& buffer) { + int32_t maxIdx = static_cast((buffer).size()); + // big/endian + for (int32_t i = (0); i < (maxIdx); i++) { + uint64_t r = buffer[i] % 10LL; + buffer[i] = ((buffer[i]) / (10LL)); + if (i + 1 < maxIdx) { + buffer[i + 1] += r << 32LL; + } + } +} +// Checks if the buffer is empty (contains nothing but zeros) +// @param buffer the buffer to check +// @return {@code true} if the buffer is empty, {@code false} otherwise +template +bool QuadrupleBuilder::isEmpty(std::array& buffer) { + for (int32_t i = (0); i < (static_cast((buffer).size())); i++) { + if (buffer[i] != 0LL) { + return false; + } + } + return true; +} +// Adds one to a decimal number represented as a sequence of decimal digits. +// propagates carry as needed, so that {@code addCarryTo("6789") = "6790", +// addCarryTo("9999") = "10000"} etc. +// @return 1 if an additional higher "1" was added in front of the number as a +// result of +// rounding-up, 0 otherwise +int32_t QuadrupleBuilder::addCarry(std::vector& digits) { + for (int32_t i = (static_cast((digits).size())) - 1; i >= (0); + i--) { // starting with the lowest digit + uint8_t c = digits[i]; + if (c == 9) { + digits[i] = 0; + } else { + digits[i] = (static_cast(digits[i] + 1)); + return 0; + } + } + digits[0] = 1; + return 1; +} +// Finds binary exponent, using decimal exponent and mantissa.
+// exp2 = exp10 * log2(10) + log2(mant)
+// @param exp10 decimal exponent +// @param mantissa array of longs containing decimal mantissa (divided by 10) +// @return found value of binary exponent +double QuadrupleBuilder::findBinaryExponent(int32_t exp10, + std::array& mantissa) { + uint64_t mant10 = + mantissa[0] << 31LL | + ((mantissa[1]) >> (1LL)); // Higher 63 bits of the mantissa, in range + // 0x0CC..CCC -- 0x7FF..FFF (2^63/10 -- 2^63-1) + // decimal value of the mantissa in range 1.0..9.9999... + double mant10d = (static_cast(mant10)) / TWO_POW_63_DIV_10; + return floor((static_cast(exp10)) * LOG2_10 + + log2(mant10d)); // Binary exponent +} +// Calculates log2 of the given x +// @param x argument that can't be 0 +// @return the value of log2(x) +double QuadrupleBuilder::log2(double x) { + // x can't be 0 + return LOG2_E * log(x); +} +void QuadrupleBuilder::findBinaryMantissa(int32_t exp10, + double exp2, + std::array& mantissa) { + // pow(2, -exp2): division by 2^exp2 is multiplication by 2^(-exp2) actually + std::array& powerOf2 = this->buffer4x64B; + powerOfTwo(-exp2, powerOf2); + std::array& product = + this->buffer12x32; // use it for the product (M * 10^E / 2^e) + multUnpacked6x32byPacked(mantissa, powerOf2, + product); // product in buff_12x32 + multBuffBy10(product); // "Quasidecimals" are numbers divided by 10 + // The powerOf2[0] is stored as an unsigned value + if ((static_cast(powerOf2[0])) != (static_cast(-exp10))) { + // For some combinations of exp2 and exp10, additional multiplication needed + // (see mant2_from_M_E_e.xls) + multBuffBy10(product); + } + // compensate possible inaccuracy of logarithms used to compute exp2 + exp2 += normalizeMant(product); + exp2 += EXPONENT_BIAS; // add bias + // For subnormal values, exp2 <= 0. We just return 0 for them, as they are + // far from any range we are interested in. + if (exp2 <= 0) { + return; + } + exp2 += roundUp(product); // round up, may require exponent correction + if ((static_cast(exp2)) >= EXPONENT_OF_INFINITY) { + this->exponent = (static_cast(EXPONENT_OF_INFINITY)); + } else { + this->exponent = (static_cast(exp2)); + this->mantHi = (static_cast((product[0] << 32LL) + product[1])); + this->mantLo = (static_cast((product[2] << 32LL) + product[3])); + } +} +// Calculates the required power and returns the result in the quasidecimal +// format (an array of longs, where result[0] is the decimal exponent of the +// resulting value, and result[1] -- result[3] contain 192 bits of the mantissa +// divided by ten (so that 8 looks like
{@code {1, 0xCCCC_.._CCCCL,
+// 0xCCCC_.._CCCCL, 0xCCCC_.._CCCDL}}}
uses arrays buffer4x64B, +// buffer6x32A, buffer6x32B, buffer12x32, +// @param exp the power to raise 2 to +// @param power (result) the value of {@code2^exp} +void QuadrupleBuilder::powerOfTwo(double exp, std::array& power) { + if (exp == 0) { + array_copy(POS_POWERS_OF_2[0], power); + return; + } + // positive powers of 2 (2^0, 2^1, 2^2, 2^4, 2^8 ... 2^(2^31) ) + std::array, 33>* powers = (&(POS_POWERS_OF_2)); + if (exp < 0) { + exp = -exp; + powers = (&(NEG_POWERS_OF_2)); // positive powers of 2 (2^0, 2^-1, 2^-2, + // 2^-4, 2^-8 ... 2^30) + } + // 2^31 = 0x8000_0000L; a single bit that will be shifted right at every + // iteration + double currPowOf2 = POW_2_31; + int32_t idx = 32; // Index in the table of powers + bool first_power = true; + // if exp = b31 * 2^31 + b30 * 2^30 + .. + b0 * 2^0, where b0..b31 are the + // values of the bits in exp, then 2^exp = 2^b31 * 2^b30 ... * 2^b0. Find the + // product, using a table of powers of 2. + while (exp > 0) { + if (exp >= currPowOf2) { // the current bit in the exponent is 1 + if (first_power) { + // 4 longs, power[0] -- decimal (?) exponent, power[1..3] -- 192 bits of + // mantissa + array_copy((*(powers))[idx], power); + first_power = false; + } else { + // Multiply by the corresponding power of 2 + multPacked3x64_AndAdjustExponent(power, (*(powers))[idx], power); + } + exp -= currPowOf2; + } + idx -= 1; + currPowOf2 = currPowOf2 * 0.5; // Note: this is exact + } +} +// Copies from into to. +template +void QuadrupleBuilder::array_copy(std::array& source, + std::array& dest) { + for (int32_t i = (0); i < (static_cast((dest).size())); i++) { + dest[i] = source[i]; + } +} +// Multiplies two quasidecimal numbers contained in buffers of 3 x 64 bits with +// exponents, puts the product to buffer4x64B
and returns it. +// Both each of the buffers and the product contain 4 longs - exponent and 3 x +// 64 bits of mantissa. If the higher word of mantissa of the product is less +// than 0x1999_9999_9999_9999L (i.e. mantissa is less than 0.1) multiplies +// mantissa by 10 and adjusts the exponent respectively. +void QuadrupleBuilder::multPacked3x64_AndAdjustExponent( + std::array& factor1, + std::array& factor2, + std::array& result) { + multPacked3x64_simply(factor1, factor2, this->buffer12x32); + int32_t expCorr = correctPossibleUnderflow(this->buffer12x32); + pack_6x32_to_3x64(this->buffer12x32, result); + // result[0] is a signed int64 value stored in an uint64 + result[0] = + factor1[0] + factor2[0] + + (static_cast(expCorr)); // product.exp = f1.exp + f2.exp +} +// Multiplies mantissas of two packed quasidecimal values (each is an array of 4 +// longs, exponent + 3 x 64 bits of mantissa) Returns the product as unpacked +// buffer of 12 x 32 (12 x 32 bits of product) uses arrays buffer6x32A, +// buffer6x32B +// @param factor1 an array of longs containing factor 1 as packed quasidecimal +// @param factor2 an array of longs containing factor 2 as packed quasidecimal +// @param result an array of 12 longs filled with the product of mantissas +void QuadrupleBuilder::multPacked3x64_simply(std::array& factor1, + std::array& factor2, + std::array& result) { + for (int32_t i = (0); i < (static_cast((result).size())); i++) { + result[i] = 0LL; + } + // TODO2 19.01.16 21:23:06 for the next version -- rebuild the table of powers + // to make the numbers unpacked, to avoid packing/unpacking + unpack_3x64_to_6x32(factor1, this->buffer6x32A); + unpack_3x64_to_6x32(factor2, this->buffer6x32B); + for (int32_t i = (6) - 1; i >= (0); i--) { // compute partial 32-bit products + for (int32_t j = (6) - 1; j >= (0); j--) { + uint64_t part = this->buffer6x32A[i] * this->buffer6x32B[j]; + result[j + i + 1] = + (static_cast(result[j + i + 1] + (part & LOWER_32_BITS))); + result[j + i] = + (static_cast(result[j + i] + ((part) >> (32LL)))); + } + } + // Carry higher bits of the product to the lower bits of the next word + for (int32_t i = (12) - 1; i >= (1); i--) { + result[i - 1] = + (static_cast(result[i - 1] + ((result[i]) >> (32LL)))); + result[i] &= LOWER_32_BITS; + } +} +// Corrects possible underflow of the decimal mantissa, passed in in the {@code +// mantissa}, by multiplying it by a power of ten. The corresponding value to +// adjust the decimal exponent is returned as the result +// @param mantissa a buffer containing the mantissa to be corrected +// @return a corrective (addition) that is needed to adjust the decimal exponent +// of the number +template +int32_t QuadrupleBuilder::correctPossibleUnderflow( + std::array& mantissa) { + int32_t expCorr = 0; + while (isLessThanOne(mantissa)) { // Underflow + multBuffBy10(mantissa); + expCorr -= 1; + } + return expCorr; +} +// Checks if the unpacked quasidecimal value held in the given buffer is less +// than one (in this format, one is represented as { 0x1999_9999L, 0x9999_9999L, +// 0x9999_9999L,...} +// @param buffer a buffer containing the value to check +// @return {@code true}, if the value is less than one +template +bool QuadrupleBuilder::isLessThanOne(std::array& buffer) { + if (buffer[0] < 0x19999999LL) { + return true; + } + if (buffer[0] > 0x19999999LL) { + return false; + } + // A note regarding the coverage: + // Multiplying a 128-bit number by another 192-bit number, + // as well as multiplying of two 192-bit numbers, + // can never produce 320 (or 384 bits, respectively) of 0x1999_9999L, + // 0x9999_9999L, + for (int32_t i = (1); i < (static_cast((buffer).size())); i++) { + // so this loop can't be covered entirely + if (buffer[i] < 0x99999999LL) { + return true; + } + if (buffer[i] > 0x99999999LL) { + return false; + } + } + // and it can never reach this point in real life. + return false; // Still Java requires the return statement here. +} +// Multiplies unpacked 192-bit value by a packed 192-bit factor
+// uses static arrays buffer6x32B +// @param factor1 a buffer containing unpacked quasidecimal mantissa (6 x 32 +// bits) +// @param factor2 an array of 4 longs containing packed quasidecimal power of +// two +// @param product a buffer of at least 12 longs to hold the product +void QuadrupleBuilder::multUnpacked6x32byPacked( + std::array& factor1, + std::array& factor2, + std::array& product) { + for (int32_t i = (0); i < (static_cast((product).size())); i++) { + product[i] = 0LL; + } + std::array& unpacked2 = this->buffer6x32B; + unpack_3x64_to_6x32( + factor2, unpacked2); // It's the powerOf2, with exponent in 0'th word + int32_t maxFactIdx = static_cast((factor1).size()); + for (int32_t i = (maxFactIdx)-1; i >= (0); + i--) { // compute partial 32-bit products + for (int32_t j = (maxFactIdx)-1; j >= (0); j--) { + uint64_t part = factor1[i] * unpacked2[j]; + product[j + i + 1] = + (static_cast(product[j + i + 1] + (part & LOWER_32_BITS))); + product[j + i] = + (static_cast(product[j + i] + ((part) >> (32LL)))); + } + } + // Carry higher bits of the product to the lower bits of the next word + for (int32_t i = (12) - 1; i >= (1); i--) { + product[i - 1] = + (static_cast(product[i - 1] + ((product[i]) >> (32LL)))); + product[i] &= LOWER_32_BITS; + } +} +// Multiplies the unpacked value stored in the given buffer by 10 +// @param buffer contains the unpacked value to multiply (32 least significant +// bits are used) +template +void QuadrupleBuilder::multBuffBy10(std::array& buffer) { + int32_t maxIdx = static_cast((buffer).size()) - 1; + buffer[0] &= LOWER_32_BITS; + buffer[maxIdx] *= 10LL; + for (int32_t i = (maxIdx)-1; i >= (0); i--) { + buffer[i] = + (static_cast(buffer[i] * 10LL + ((buffer[i + 1]) >> (32LL)))); + buffer[i + 1] &= LOWER_32_BITS; + } +} +// Makes sure that the (unpacked) mantissa is normalized, +// i.e. buff[0] contains 1 in bit 32 (the implied integer part) and higher 32 of +// mantissa in bits 31..0, and buff[1]..buff[4] contain other 96 bits of +// mantissa in their lower halves:
0x0000_0001_XXXX_XXXXL,
+// 0x0000_0000_XXXX_XXXXL...
If necessary, divides the mantissa by +// appropriate power of 2 to make it normal. +// @param mantissa a buffer containing unpacked mantissa +// @return if the mantissa was not normal initially, a correction that should be +// added to the result's exponent, or 0 otherwise +template +int32_t QuadrupleBuilder::normalizeMant(std::array& mantissa) { + int32_t expCorr = 31 - __builtin_clzll(mantissa[0]); + if (expCorr != 0) { + divBuffByPower2(mantissa, expCorr); + } + return expCorr; +} +// Rounds up the contents of the unpacked buffer to 128 bits by adding unity one +// bit lower than the lowest of these 128 bits. If carry propagates up to bit 33 +// of buff[0], shifts the buffer rightwards to keep it normalized. +// @param mantissa the buffer to get rounded +// @return 1 if the buffer was shifted, 0 otherwise +template +int32_t QuadrupleBuilder::roundUp(std::array& mantissa) { + // due to the limited precision of the power of 2, a number with exactly half + // LSB in its mantissa (i.e that would have 0x8000_0000_0000_0000L in bits + // 128..191 if it were computed precisely), after multiplication by this power + // of 2, may get erroneous bits 185..191 (counting from the MSB), taking a + // value from 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL + // 0x7FFF_FFFF_FFFF_FFD8L. to 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL + // 0x8000_0000_0000_0014L, or something alike. To round it up, we first add + // 0x0000_0000_0000_0000L 0x0000_0000_0000_0000L 0x0000_0000_0000_0028L, to + // turn it into 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL + // 0x8000_0000_0000_00XXL, and then add 0x0000_0000_0000_0000L + // 0x0000_0000_0000_0000L 0x8000_0000_0000_0000L, to provide carry to higher + // bits. + addToBuff(mantissa, 5, 100LL); // to compensate possible inaccuracy + addToBuff(mantissa, 4, + 0x80000000LL); // round-up, if bits 128..159 >= 0x8000_0000L + if ((mantissa[0] & (HIGHER_32_BITS << 1LL)) != 0LL) { + // carry's got propagated beyond the highest bit + divBuffByPower2(mantissa, 1); + return 1; + } + return 0; +} +// converts 192 most significant bits of the mantissa of a number from an +// unpacked quasidecimal form (where 32 least significant bits only used) to a +// packed quasidecimal form (where buff[0] contains the exponent and +// buff[1]..buff[3] contain 3 x 64 = 192 bits of mantissa) +// @param unpackedMant a buffer of at least 6 longs containing an unpacked value +// @param result a buffer of at least 4 long to hold the packed value +// @return packedQD192 with words 1..3 filled with the packed mantissa. +// packedQD192[0] is not +// affected. +template +void QuadrupleBuilder::pack_6x32_to_3x64(std::array& unpackedMant, + std::array& result) { + result[1] = (unpackedMant[0] << 32LL) + unpackedMant[1]; + result[2] = (unpackedMant[2] << 32LL) + unpackedMant[3]; + result[3] = (unpackedMant[4] << 32LL) + unpackedMant[5]; +} +// Unpacks the mantissa of a 192-bit quasidecimal (4 longs: exp10, mantHi, +// mantMid, mantLo) to a buffer of 6 longs, where the least significant 32 bits +// of each long contains respective 32 bits of the mantissa +// @param qd192 array of 4 longs containing the number to unpack +// @param buff_6x32 buffer of 6 long to hold the unpacked mantissa +void QuadrupleBuilder::unpack_3x64_to_6x32(std::array& qd192, + std::array& buff_6x32) { + buff_6x32[0] = ((qd192[1]) >> (32LL)); + buff_6x32[1] = qd192[1] & LOWER_32_BITS; + buff_6x32[2] = ((qd192[2]) >> (32LL)); + buff_6x32[3] = qd192[2] & LOWER_32_BITS; + buff_6x32[4] = ((qd192[3]) >> (32LL)); + buff_6x32[5] = qd192[3] & LOWER_32_BITS; +} +// Divides the contents of the buffer by 2^exp2
+// (shifts the buffer rightwards by exp2 if the exp2 is positive, and leftwards +// if it's negative), keeping it unpacked (only lower 32 bits of each element +// are used, except the buff[0] whose higher half is intended to contain integer +// part) +// @param buffer the buffer to divide +// @param exp2 the exponent of the power of two to divide by, expected to be +template +void QuadrupleBuilder::divBuffByPower2(std::array& buffer, + int32_t exp2) { + int32_t maxIdx = static_cast((buffer).size()) - 1; + uint64_t backShift = + (static_cast(32 - static_cast(labs(exp2)))); + if (exp2 > 0) { // Shift to the right + uint64_t exp2Shift = (static_cast(exp2)); + for (int32_t i = (maxIdx + 1) - 1; i >= (1); i--) { + buffer[i] = ((buffer[i]) >> (exp2Shift)) | + ((buffer[i - 1] << backShift) & LOWER_32_BITS); + } + buffer[0] = + ((buffer[0]) >> (exp2Shift)); // Preserve the high half of buff[0] + } else if (exp2 < 0) { // Shift to the left + uint64_t exp2Shift = (static_cast(-exp2)); + buffer[0] = (static_cast( + (buffer[0] << exp2Shift) | + ((buffer[1]) >> (backShift)))); // Preserve the high half of buff[0] + for (int32_t i = (1); i < (maxIdx); i++) { + buffer[i] = + (static_cast(((buffer[i] << exp2Shift) & LOWER_32_BITS) | + ((buffer[i + 1]) >> (backShift)))); + } + buffer[maxIdx] = (buffer[maxIdx] << exp2Shift) & LOWER_32_BITS; + } +} +// Adds the summand to the idx'th word of the unpacked value stored in the +// buffer and propagates carry as necessary +// @param buff the buffer to add the summand to +// @param idx the index of the element to which the summand is to be added +// @param summand the summand to add to the idx'th element of the buffer +template +void QuadrupleBuilder::addToBuff(std::array& buff, + int32_t idx, + uint64_t summand) { + int32_t maxIdx = idx; + buff[maxIdx] = (static_cast( + buff[maxIdx] + summand)); // Big-endian, the lowest word + for (int32_t i = (maxIdx + 1) - 1; i >= (1); + i--) { // from the lowest word upwards, except the highest + if ((buff[i] & HIGHER_32_BITS) != 0LL) { + buff[i] &= LOWER_32_BITS; + buff[i - 1] += 1LL; + } else { + break; + } + } +} + +} // namespace util +} // namespace firestore +} // namespace firebase diff --git a/Firestore/core/src/util/quadruple_builder.h b/Firestore/core/src/util/quadruple_builder.h new file mode 100644 index 00000000000..d5ae34238c6 --- /dev/null +++ b/Firestore/core/src/util/quadruple_builder.h @@ -0,0 +1,98 @@ +// Copyright 2025 Google LLC +// Copyright 2021 M.Vokhmentsev +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_BUILDER_H_ +#define FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_BUILDER_H_ + +#include +#include +#include +#include +#include +#include +#include + +namespace firebase { +namespace firestore { +namespace util { + +class QuadrupleBuilder { + public: + void parseDecimal(std::vector& digits, int64_t exp10) { + parse(digits, exp10); + } + // The fields containing the value of the instance + uint32_t exponent; + uint64_t mantHi; + uint64_t mantLo; + + private: + std::array buffer4x64B; + std::array buffer6x32A; + std::array buffer6x32B; + std::array buffer6x32C; + std::array buffer12x32; + void parse(std::vector& digits, int32_t exp10); + int32_t parseMantissa(std::vector& digits, + std::array& mantissa); + template + void divBuffBy10(std::array& buffer); + template + bool isEmpty(std::array& buffer); + int32_t addCarry(std::vector& digits); + double findBinaryExponent(int32_t exp10, std::array& mantissa); + double log2(double x); + void findBinaryMantissa(int32_t exp10, + double exp2, + std::array& mantissa); + void powerOfTwo(double exp, std::array& power); + template + void array_copy(std::array& source, + std::array& dest); + void multPacked3x64_AndAdjustExponent(std::array& factor1, + std::array& factor2, + std::array& result); + void multPacked3x64_simply(std::array& factor1, + std::array& factor2, + std::array& result); + template + int32_t correctPossibleUnderflow(std::array& mantissa); + template + bool isLessThanOne(std::array& buffer); + void multUnpacked6x32byPacked(std::array& factor1, + std::array& factor2, + std::array& product); + template + void multBuffBy10(std::array& buffer); + template + int32_t normalizeMant(std::array& mantissa); + template + int32_t roundUp(std::array& mantissa); + template + void pack_6x32_to_3x64(std::array& unpackedMant, + std::array& result); + void unpack_3x64_to_6x32(std::array& qd192, + std::array& buff_6x32); + template + void divBuffByPower2(std::array& buffer, int32_t exp2); + template + void addToBuff(std::array& buff, int32_t idx, uint64_t summand); +}; + +} // namespace util +} // namespace firestore +} // namespace firebase + +#endif \ No newline at end of file diff --git a/Firestore/core/test/unit/bundle/bundle_serializer_test.cc b/Firestore/core/test/unit/bundle/bundle_serializer_test.cc index 8d384de40ea..44c6bed3a7f 100644 --- a/Firestore/core/test/unit/bundle/bundle_serializer_test.cc +++ b/Firestore/core/test/unit/bundle/bundle_serializer_test.cc @@ -653,6 +653,16 @@ TEST_F(BundleSerializerTest, DecodesInt32Value) { VerifyFieldValueRoundtrip(object); } +TEST_F(BundleSerializerTest, DecodesDecimal128Value) { + ProtoValue decimal_value; + decimal_value.set_string_value("1.2e3"); + ProtoValue object; + object.mutable_map_value()->mutable_fields()->insert( + {model::kRawDecimal128TypeFieldValue, decimal_value}); + + VerifyFieldValueRoundtrip(object); +} + TEST_F(BundleSerializerTest, DecodesRegexValue) { ProtoValue pattern_value; ProtoValue options_value; diff --git a/Firestore/core/test/unit/index/index_value_writer_test.cc b/Firestore/core/test/unit/index/index_value_writer_test.cc index b66bd182a54..4341220998f 100644 --- a/Firestore/core/test/unit/index/index_value_writer_test.cc +++ b/Firestore/core/test/unit/index/index_value_writer_test.cc @@ -29,6 +29,7 @@ namespace { using testutil::BsonBinaryData; using testutil::BsonObjectId; using testutil::BsonTimestamp; +using testutil::Decimal128; using testutil::Int32; using testutil::MaxKey; using testutil::MinKey; @@ -296,6 +297,114 @@ TEST(IndexValueWriterTest, writeIndexValueSupportsLargestInt32) { EXPECT_EQ(actual_bytes, expected_bytes); } +TEST(IndexValueWriterTest, writeIndexValueSupportsDecimal128) { + // Value + auto value = Decimal128("1.2e3"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + // We currently store a 64-bit double representation in the client-side index. + index_byte_encoder->WriteDouble(1200.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsNegativeDecimal128) { + // Value + auto value = Decimal128("-1.2e3"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + // We currently store a 64-bit double representation in the client-side index. + index_byte_encoder->WriteDouble(-1200.0); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsNaNDecimal128) { + // Value + auto value = Decimal128("NaN"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNan); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsDecimal128Infinity) { + // Value + auto value = Decimal128("Infinity"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + // We currently store a 64-bit double representation in the client-side index. + index_byte_encoder->WriteDouble(std::stod("Infinity")); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + +TEST(IndexValueWriterTest, writeIndexValueSupportsDecimal128NegativeInfinity) { + // Value + auto value = Decimal128("-Infinity"); + + // Actual + IndexEncodingBuffer encoder; + WriteIndexValue(*value, encoder.ForKind(model::Segment::Kind::kAscending)); + auto& actual_bytes = encoder.GetEncodedBytes(); + + // Expected + IndexEncodingBuffer expected_encoder; + DirectionalIndexByteEncoder* index_byte_encoder = + expected_encoder.ForKind(model::Segment::Kind::kAscending); + index_byte_encoder->WriteLong(IndexType::kNumber); + // We currently store a 64-bit double representation in the client-side index. + index_byte_encoder->WriteDouble(std::stod("-Infinity")); + index_byte_encoder->WriteInfinity(); + auto& expected_bytes = expected_encoder.GetEncodedBytes(); + + EXPECT_EQ(actual_bytes, expected_bytes); +} + TEST(IndexValueWriterTest, writeIndexValueSupportsSmallestInt32) { // Value auto value = Int32(-2147483648); diff --git a/Firestore/core/test/unit/local/leveldb_index_manager_test.cc b/Firestore/core/test/unit/local/leveldb_index_manager_test.cc index 895d8acb2e0..560a4c66284 100644 --- a/Firestore/core/test/unit/local/leveldb_index_manager_test.cc +++ b/Firestore/core/test/unit/local/leveldb_index_manager_test.cc @@ -45,6 +45,7 @@ using testutil::BsonBinaryData; using testutil::BsonObjectId; using testutil::BsonTimestamp; using testutil::CollectionGroupQuery; +using testutil::Decimal128; using testutil::DeletedDoc; using testutil::Doc; using testutil::Filter; @@ -1276,6 +1277,161 @@ TEST_F(LevelDbIndexManagerTest, IndexInt32Fields) { }); } +TEST_F(LevelDbIndexManagerTest, IndexDecimal128Fields) { + persistence_->Run("TestIndexDecimal128Fields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", Map("key", Decimal128("-Infinity"))); + AddDoc("coll/doc2", Map("key", Decimal128("-0.0"))); + AddDoc("coll/doc3", Map("key", Decimal128("0"))); + AddDoc("coll/doc4", Map("key", Decimal128("1.3e-3"))); + AddDoc("coll/doc5", Map("key", Decimal128("1.2e3"))); + AddDoc("coll/doc6", Map("key", Decimal128("Infinity"))); + AddDoc("coll/doc7", Map("key", Decimal128("NaN"))); + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("no filter"); + VerifyResults(base_query, + {"coll/doc7", "coll/doc1", "coll/doc2", "coll/doc3", + "coll/doc4", "coll/doc5", "coll/doc6"}); + } + { + SCOPED_TRACE("Query Decimal128 with EqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "==", Decimal128("1200"))); + VerifyResults(query, {"coll/doc5"}); + } + { + SCOPED_TRACE("Query Decimal128 with NotEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "!=", Decimal128("0"))); + VerifyResults(query, {"coll/doc7", "coll/doc1", "coll/doc4", "coll/doc5", + "coll/doc6"}); + } + { + SCOPED_TRACE("Query Decimal128 with GreaterThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", ">=", Decimal128("0.12e4"))); + VerifyResults(query, {"coll/doc5", "coll/doc6"}); + } + { + SCOPED_TRACE("Query Decimal128 with LessThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "<=", Decimal128("0.0"))); + VerifyResults(query, + {"coll/doc7", "coll/doc1", "coll/doc2", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Decimal128 with GreaterThan filter"); + auto query = + base_query.AddingFilter(Filter("key", ">", Decimal128("1200"))); + VerifyResults(query, {"coll/doc6"}); + } + { + SCOPED_TRACE("Query Decimal128 with LessThan filter"); + auto query = + base_query.AddingFilter(Filter("key", "<", Decimal128("-Infinity"))); + VerifyResults(query, {"coll/doc7"}); + } + { + SCOPED_TRACE( + "Query Decimal128 with GreaterThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", ">", Decimal128("Infinity"))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE( + "Query Decimal128 with LessThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", "<", Decimal128("NaN"))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE("Query Decimal128 with GreaterThan NaN filter"); + auto query = + base_query.AddingFilter(Filter("key", ">", Decimal128("NaN"))); + VerifyResults(query, {"coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", + "coll/doc5", "coll/doc6"}); + } + }); +} + +TEST_F(LevelDbIndexManagerTest, IndexDecimal128FieldsWithPrecisionLoss) { + persistence_->Run("TestIndexDecimal128Fields", [&]() { + index_manager_->Start(); + index_manager_->AddFieldIndex( + MakeFieldIndex("coll", "key", model::Segment::kAscending)); + + AddDoc("coll/doc1", + Map("key", + Decimal128("-0.1234567890123456789"))); // will be rounded to + // -0.12345678901234568 + AddDoc("coll/doc2", Map("key", Decimal128("0"))); + AddDoc("coll/doc3", + Map("key", + Decimal128("0.1234567890123456789"))); // will be rounded to + // 0.12345678901234568 + + auto base_query = Query("coll").AddingOrderBy(OrderBy("key")); + + { + SCOPED_TRACE("Query Decimal128 with EqualTo filter"); + auto query = base_query.AddingFilter( + Filter("key", "==", Decimal128("0.1234567890123456789"))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query Decimal128 with EqualTo filter with rounding error"); + // Mismatch behaviour caused by rounding error. Firestore fetches the doc3 + // from LevelDb as doc3 rounds to the same number, even though the actual + // number in doc3 is different. + auto query = base_query.AddingFilter( + Filter("key", "==", Decimal128("0.12345678901234568"))); + VerifyResults(query, {"coll/doc3"}); + } + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't + // be affected by this rounding errors. + { + SCOPED_TRACE("Query Decimal128 with NotEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "!=", Decimal128("0.0"))); + VerifyResults(query, {"coll/doc1", "coll/doc3"}); + } + { + SCOPED_TRACE("Query Decimal128 with GreaterThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", ">=", Decimal128("1.23e-1"))); + VerifyResults(query, {"coll/doc3"}); + } + { + SCOPED_TRACE("Query Decimal128 with LessThanOrEqualTo filter"); + auto query = + base_query.AddingFilter(Filter("key", "<=", Decimal128("-1.23e-1"))); + VerifyResults(query, {"coll/doc1"}); + } + { + SCOPED_TRACE( + "Query Decimal128 with GreaterThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", ">", Decimal128("1.2e3"))); + VerifyResults(query, {}); + } + { + SCOPED_TRACE( + "Query Decimal128 with LessThan filter and empty result set"); + auto query = + base_query.AddingFilter(Filter("key", "<", Decimal128("-1.2e3"))); + VerifyResults(query, {}); + } + }); +} + TEST_F(LevelDbIndexManagerTest, IndexRegexFields) { persistence_->Run("TestIndexRegexFields", [&]() { index_manager_->Start(); @@ -1450,23 +1606,31 @@ TEST_F(LevelDbIndexManagerTest, IndexBsonTypesTogether) { MakeFieldIndex("coll", "key", model::Segment::kDescending)); AddDoc("coll/doc1", Map("key", MinKey())); - AddDoc("coll/doc2", Map("key", Int32(2))); - AddDoc("coll/doc3", Map("key", Int32(1))); - AddDoc("coll/doc4", Map("key", BsonTimestamp(1, 2))); - AddDoc("coll/doc5", Map("key", BsonTimestamp(1, 1))); - AddDoc("coll/doc6", Map("key", BsonBinaryData(1, {1, 2, 4}))); - AddDoc("coll/doc7", Map("key", BsonBinaryData(1, {1, 2, 3}))); - AddDoc("coll/doc8", Map("key", BsonObjectId("507f191e810c19729de860eb"))); - AddDoc("coll/doc9", Map("key", BsonObjectId("507f191e810c19729de860ea"))); - AddDoc("coll/doc10", Map("key", Regex("a", "m"))); - AddDoc("coll/doc11", Map("key", Regex("a", "i"))); - AddDoc("coll/doc12", Map("key", MaxKey())); + AddDoc("coll/doc2", Map("key", Decimal128("NaN"))); + AddDoc("coll/doc3", Map("key", Decimal128("-Infinity"))); + AddDoc("coll/doc4", Map("key", Decimal128("Infinity"))); + AddDoc("coll/doc5", Map("key", Decimal128("0"))); + AddDoc("coll/doc6", Map("key", Decimal128("-1.2e3"))); + AddDoc("coll/doc7", Map("key", Decimal128("2.3e-4"))); + AddDoc("coll/doc8", Map("key", Int32(2))); + AddDoc("coll/doc9", Map("key", Int32(1))); + AddDoc("coll/doc10", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/doc11", Map("key", BsonTimestamp(1, 1))); + AddDoc("coll/doc12", Map("key", BsonBinaryData(1, {1, 2, 4}))); + AddDoc("coll/doc13", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/doc14", Map("key", BsonObjectId("507f191e810c19729de860eb"))); + AddDoc("coll/doc15", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/doc16", Map("key", Regex("a", "m"))); + AddDoc("coll/doc17", Map("key", Regex("a", "i"))); + AddDoc("coll/doc18", Map("key", MaxKey())); auto query = Query("coll").AddingOrderBy(OrderBy("key", "desc")); - VerifyResults(query, {"coll/doc12", "coll/doc10", "coll/doc11", "coll/doc8", - "coll/doc9", "coll/doc6", "coll/doc7", "coll/doc4", - "coll/doc5", "coll/doc2", "coll/doc3", "coll/doc1"}); + VerifyResults(query, {"coll/doc18", "coll/doc16", "coll/doc17", + "coll/doc14", "coll/doc15", "coll/doc12", + "coll/doc13", "coll/doc10", "coll/doc11", "coll/doc4", + "coll/doc8", "coll/doc9", "coll/doc7", "coll/doc5", + "coll/doc6", "coll/doc3", "coll/doc2", "coll/doc1"}); }); } @@ -1476,33 +1640,43 @@ TEST_F(LevelDbIndexManagerTest, IndexAllTypesTogether) { index_manager_->AddFieldIndex( MakeFieldIndex("coll", "key", model::Segment::kDescending)); - AddDoc("coll/a", Map("key", nullptr)); - AddDoc("coll/b", Map("key", MinKey())); - AddDoc("coll/c", Map("key", true)); - AddDoc("coll/d", Map("key", std::numeric_limits::quiet_NaN())); - AddDoc("coll/e", Map("key", Int32(1))); - AddDoc("coll/f", Map("key", 2.0)); - AddDoc("coll/g", Map("key", 3)); - AddDoc("coll/h", Map("key", Timestamp(100, 123456000))); - AddDoc("coll/i", Map("key", BsonTimestamp(1, 2))); - AddDoc("coll/j", Map("key", "string")); - AddDoc("coll/k", Map("key", BlobValue(0, 1, 255))); - AddDoc("coll/l", Map("key", BsonBinaryData(1, {1, 2, 3}))); - AddDoc("coll/m", Map("key", Ref("project", "coll/doc"))); - AddDoc("coll/n", Map("key", BsonObjectId("507f191e810c19729de860ea"))); - AddDoc("coll/o", Map("key", GeoPoint(0, 1))); - AddDoc("coll/p", Map("key", Regex("^foo", "i"))); - AddDoc("coll/q", Map("key", Array(1, 2))); - AddDoc("coll/r", Map("key", VectorType(1, 2))); - AddDoc("coll/s", Map("key", Map("a", 1))); - AddDoc("coll/t", Map("key", MaxKey())); + AddDoc("coll/doc1", Map("key", nullptr)); + AddDoc("coll/doc2", Map("key", MinKey())); + AddDoc("coll/doc3", Map("key", true)); + AddDoc("coll/doc4", Map("key", std::numeric_limits::quiet_NaN())); + AddDoc("coll/doc5", Map("key", Decimal128("NaN"))); + AddDoc("coll/doc6", Map("key", Decimal128("-Infinifty"))); + AddDoc("coll/doc7", Map("key", Decimal128("-1.2e-3"))); + AddDoc("coll/doc8", Map("key", Decimal128("0"))); + AddDoc("coll/doc9", Map("key", Int32(1))); + AddDoc("coll/doc10", Map("key", 2.0)); + AddDoc("coll/doc11", Map("key", 3)); + AddDoc("coll/doc12", Map("key", Decimal128("2.3e4"))); + AddDoc("coll/doc13", Map("key", Decimal128("Infinifty"))); + AddDoc("coll/doc14", Map("key", Timestamp(100, 123456000))); + AddDoc("coll/doc15", Map("key", BsonTimestamp(1, 2))); + AddDoc("coll/doc16", Map("key", "string")); + AddDoc("coll/doc17", Map("key", BlobValue(0, 1, 255))); + AddDoc("coll/doc18", Map("key", BsonBinaryData(1, {1, 2, 3}))); + AddDoc("coll/doc19", Map("key", Ref("project", "coll/doc"))); + AddDoc("coll/doc20", Map("key", BsonObjectId("507f191e810c19729de860ea"))); + AddDoc("coll/doc21", Map("key", GeoPoint(0, 1))); + AddDoc("coll/doc22", Map("key", Regex("^foo", "i"))); + AddDoc("coll/doc23", Map("key", Array(1, 2))); + AddDoc("coll/doc24", Map("key", VectorType(1, 2))); + AddDoc("coll/doc25", Map("key", Map("a", 1))); + AddDoc("coll/doc26", Map("key", MaxKey())); auto query = Query("coll").AddingOrderBy(OrderBy("key", "desc")); - VerifyResults(query, {"coll/t", "coll/s", "coll/r", "coll/q", "coll/p", - "coll/o", "coll/n", "coll/m", "coll/l", "coll/k", - "coll/j", "coll/i", "coll/h", "coll/g", "coll/f", - "coll/e", "coll/d", "coll/c", "coll/b", "coll/a"}); + VerifyResults( + query, + {"coll/doc26", "coll/doc25", "coll/doc24", "coll/doc23", "coll/doc22", + "coll/doc21", "coll/doc20", "coll/doc19", "coll/doc18", "coll/doc17", + "coll/doc16", "coll/doc15", "coll/doc14", "coll/doc13", "coll/doc12", + "coll/doc11", "coll/doc10", "coll/doc9", "coll/doc8", "coll/doc7", + "coll/doc6", "coll/doc5", "coll/doc4", "coll/doc3", "coll/doc2", + "coll/doc1"}); }); } diff --git a/Firestore/core/test/unit/local/leveldb_local_store_test.cc b/Firestore/core/test/unit/local/leveldb_local_store_test.cc index 8f53cbbee03..1203245cb56 100644 --- a/Firestore/core/test/unit/local/leveldb_local_store_test.cc +++ b/Firestore/core/test/unit/local/leveldb_local_store_test.cc @@ -42,6 +42,7 @@ using testutil::BlobValue; using testutil::BsonBinaryData; using testutil::BsonObjectId; using testutil::BsonTimestamp; +using testutil::Decimal128; using testutil::DeletedDoc; using testutil::DeleteMutation; using testutil::Doc; @@ -714,6 +715,188 @@ TEST_F(LevelDbLocalStoreTest, IndexesInt32) { FSTAssertQueryReturned("coll/doc1", "coll/doc2"); } +TEST_F(LevelDbLocalStoreTest, IndexesDecimal128) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation("coll/doc1", Map("key", Decimal128("NaN")))); + WriteMutation(SetMutation("coll/doc2", Map("key", Decimal128("-Infinity")))); + WriteMutation(SetMutation("coll/doc3", Map("key", Decimal128("-1.2e3")))); + WriteMutation(SetMutation("coll/doc4", Map("key", Decimal128("0")))); + WriteMutation(SetMutation("coll/doc5", Map("key", Decimal128("2.3e-4")))); + WriteMutation(SetMutation("coll/doc6", Map("key", Decimal128("Infinity")))); + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 6, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}, + {Key("coll/doc6"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", + "coll/doc5", "coll/doc6"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", Decimal128("-1200"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", Decimal128("0.0"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}, + {Key("coll/doc6"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc5", + "coll/doc6"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", Decimal128("1e-5"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc5"), model::Mutation::Type::Set}, + {Key("coll/doc6"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc5", "coll/doc6"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", Decimal128("-1.2e3"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", Decimal128("Infinity"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", Decimal128("NaN"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "in", Array(Decimal128("0"), Decimal128("2.3e-4")))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc4"), model::Mutation::Type::Set}, + {Key("coll/doc5"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc4", "coll/doc5"); +} + +TEST_F(LevelDbLocalStoreTest, IndexesDecimal128WithPrecisionLoss) { + FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), + "key", model::Segment::Kind::kAscending); + ConfigureFieldIndexes({index}); + + WriteMutation(SetMutation( + "coll/doc1", + Map("key", + Decimal128("-0.1234567890123456789")))); // will be rounded to + // -0.12345678901234568 + WriteMutation(SetMutation("coll/doc2", Map("key", Decimal128("0")))); + WriteMutation(SetMutation( + "coll/doc3", + Map("key", Decimal128("0.1234567890123456789")))); // will be rounded to + // 0.12345678901234568 + + BackfillIndexes(); + + core::Query query = + testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc2"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", Decimal128("0.1234567890123456789"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc3"); + + // Mismatch behaviour caused by rounding error. Firestore fetches the doc3 + // from LevelDB as doc3 rounds to the same number, but, it is not presented in + // the final query result. + query = testutil::Query("coll").AddingFilter( + Filter("key", "==", Decimal128("0.12345678901234568"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned(); + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't be + // affected by rounding errors. + + query = testutil::Query("coll").AddingFilter( + Filter("key", "!=", Decimal128("0"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, + {Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1", "coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">=", Decimal128("1.23e-1"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc3"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc3"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<=", Decimal128("-1.23e-1"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + FSTAssertOverlayTypes( + OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}})); + FSTAssertQueryReturned("coll/doc1"); + + query = testutil::Query("coll").AddingFilter( + Filter("key", ">", Decimal128("1.2e3"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); + + query = testutil::Query("coll").AddingFilter( + Filter("key", "<", Decimal128("-1.2e3"))); + ExecuteQuery(query); + FSTAssertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + FSTAssertOverlayTypes(OverlayTypeMap()); + FSTAssertQueryReturned(); +} + TEST_F(LevelDbLocalStoreTest, IndexesMinKey) { FieldIndex index = MakeFieldIndex("coll", 0, FieldIndex::InitialState(), "key", model::Segment::Kind::kAscending); @@ -901,13 +1084,19 @@ TEST_F(LevelDbLocalStoreTest, IndexesAllBsonTypesTogether) { WriteMutation(SetMutation("coll/doc10", Map("key", Regex("^bar", "m")))); WriteMutation(SetMutation("coll/doc11", Map("key", Regex("^bar", "i")))); WriteMutation(SetMutation("coll/doc12", Map("key", MaxKey()))); + WriteMutation(SetMutation("coll/doc13", Map("key", Decimal128("NaN")))); + WriteMutation(SetMutation("coll/doc14", Map("key", Decimal128("-Infinity")))); + WriteMutation(SetMutation("coll/doc15", Map("key", Decimal128("Infinity")))); + WriteMutation(SetMutation("coll/doc16", Map("key", Decimal128("0")))); + WriteMutation(SetMutation("coll/doc17", Map("key", Decimal128("-1.2e-3")))); + WriteMutation(SetMutation("coll/doc18", Map("key", Decimal128("1.2e3")))); BackfillIndexes(); core::Query query = testutil::Query("coll").AddingOrderBy(OrderBy("key", "desc")); ExecuteQuery(query); - FSTAssertOverlaysRead(/* byKey= */ 12, /* byCollection= */ 0); + FSTAssertOverlaysRead(/* byKey= */ 18, /* byCollection= */ 0); FSTAssertOverlayTypes( OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, {Key("coll/doc2"), model::Mutation::Type::Set}, @@ -920,11 +1109,19 @@ TEST_F(LevelDbLocalStoreTest, IndexesAllBsonTypesTogether) { {Key("coll/doc9"), model::Mutation::Type::Set}, {Key("coll/doc10"), model::Mutation::Type::Set}, {Key("coll/doc11"), model::Mutation::Type::Set}, - {Key("coll/doc12"), model::Mutation::Type::Set}})); + {Key("coll/doc12"), model::Mutation::Type::Set}, + {Key("coll/doc13"), model::Mutation::Type::Set}, + {Key("coll/doc14"), model::Mutation::Type::Set}, + {Key("coll/doc15"), model::Mutation::Type::Set}, + {Key("coll/doc16"), model::Mutation::Type::Set}, + {Key("coll/doc17"), model::Mutation::Type::Set}, + {Key("coll/doc18"), model::Mutation::Type::Set}})); FSTAssertQueryReturned("coll/doc12", "coll/doc10", "coll/doc11", "coll/doc8", "coll/doc9", "coll/doc6", "coll/doc7", "coll/doc4", - "coll/doc5", "coll/doc2", "coll/doc3", "coll/doc1"); + "coll/doc5", "coll/doc15", "coll/doc18", "coll/doc2", + "coll/doc3", "coll/doc16", "coll/doc17", "coll/doc14", + "coll/doc13", "coll/doc1"); } TEST_F(LevelDbLocalStoreTest, IndexesAllTypesTogether) { @@ -936,35 +1133,41 @@ TEST_F(LevelDbLocalStoreTest, IndexesAllTypesTogether) { WriteMutation(SetMutation("coll/doc2", Map("key", MinKey()))); WriteMutation(SetMutation("coll/doc3", Map("key", true))); WriteMutation(SetMutation("coll/doc4", Map("key", NAN))); - WriteMutation(SetMutation("coll/doc5", Map("key", Int32(1)))); - WriteMutation(SetMutation("coll/doc6", Map("key", 2.0))); - WriteMutation(SetMutation("coll/doc7", Map("key", 3L))); + WriteMutation(SetMutation("coll/doc5", Map("key", Decimal128("NaN")))); + WriteMutation(SetMutation("coll/doc6", Map("key", Decimal128("-Infinity")))); + WriteMutation(SetMutation("coll/doc7", Map("key", Decimal128("-1.2e-3")))); + WriteMutation(SetMutation("coll/doc8", Map("key", Decimal128("0")))); + WriteMutation(SetMutation("coll/doc9", Map("key", Int32(1)))); + WriteMutation(SetMutation("coll/doc10", Map("key", 2.0))); + WriteMutation(SetMutation("coll/doc11", Map("key", 3L))); + WriteMutation(SetMutation("coll/doc12", Map("key", Decimal128("1.2e3")))); + WriteMutation(SetMutation("coll/doc13", Map("key", Decimal128("Infinity")))); WriteMutation( - SetMutation("coll/doc8", Map("key", Timestamp(100, 123456000)))); - WriteMutation(SetMutation("coll/doc9", Map("key", BsonTimestamp(1, 2)))); - WriteMutation(SetMutation("coll/doc10", Map("key", "string"))); - WriteMutation(SetMutation("coll/doc11", Map("key", BlobValue(1, 2, 3)))); + SetMutation("coll/doc14", Map("key", Timestamp(100, 123456000)))); + WriteMutation(SetMutation("coll/doc15", Map("key", BsonTimestamp(1, 2)))); + WriteMutation(SetMutation("coll/doc16", Map("key", "string"))); + WriteMutation(SetMutation("coll/doc17", Map("key", BlobValue(1, 2, 3)))); WriteMutation( - SetMutation("coll/doc12", Map("key", BsonBinaryData(1, {1, 2, 3})))); + SetMutation("coll/doc18", Map("key", BsonBinaryData(1, {1, 2, 3})))); WriteMutation( - SetMutation("coll/doc13", Map("key", Ref("project/db", "col/doc")))); + SetMutation("coll/doc19", Map("key", Ref("project/db", "col/doc")))); WriteMutation(SetMutation( - "coll/doc14", Map("key", BsonObjectId("507f191e810c19729de860ea")))); - WriteMutation(SetMutation("coll/doc15", Map("key", GeoPoint(1, 2)))); - WriteMutation(SetMutation("coll/doc16", Map("key", Regex("^bar", "m")))); - WriteMutation(SetMutation("coll/doc17", Map("key", Array(2L, "foo")))); + "coll/doc20", Map("key", BsonObjectId("507f191e810c19729de860ea")))); + WriteMutation(SetMutation("coll/doc21", Map("key", GeoPoint(1, 2)))); + WriteMutation(SetMutation("coll/doc22", Map("key", Regex("^bar", "m")))); + WriteMutation(SetMutation("coll/doc23", Map("key", Array(2L, "foo")))); WriteMutation( - SetMutation("coll/doc18", Map("key", VectorType(1.0, 2.0, 3.0)))); + SetMutation("coll/doc24", Map("key", VectorType(1.0, 2.0, 3.0)))); WriteMutation( - SetMutation("coll/doc19", Map("key", Map("bar", 1L, "foo", 2L)))); - WriteMutation(SetMutation("coll/doc20", Map("key", MaxKey()))); + SetMutation("coll/doc25", Map("key", Map("bar", 1L, "foo", 2L)))); + WriteMutation(SetMutation("coll/doc26", Map("key", MaxKey()))); BackfillIndexes(); core::Query query = testutil::Query("coll").AddingOrderBy(OrderBy("key", "asc")); ExecuteQuery(query); - FSTAssertOverlaysRead(/* byKey= */ 20, /* byCollection= */ 0); + FSTAssertOverlaysRead(/* byKey= */ 26, /* byCollection= */ 0); FSTAssertOverlayTypes( OverlayTypeMap({{Key("coll/doc1"), model::Mutation::Type::Set}, {Key("coll/doc2"), model::Mutation::Type::Set}, @@ -985,13 +1188,21 @@ TEST_F(LevelDbLocalStoreTest, IndexesAllTypesTogether) { {Key("coll/doc17"), model::Mutation::Type::Set}, {Key("coll/doc18"), model::Mutation::Type::Set}, {Key("coll/doc19"), model::Mutation::Type::Set}, - {Key("coll/doc20"), model::Mutation::Type::Set}})); + {Key("coll/doc20"), model::Mutation::Type::Set}, + {Key("coll/doc21"), model::Mutation::Type::Set}, + {Key("coll/doc22"), model::Mutation::Type::Set}, + {Key("coll/doc23"), model::Mutation::Type::Set}, + {Key("coll/doc24"), model::Mutation::Type::Set}, + {Key("coll/doc25"), model::Mutation::Type::Set}, + {Key("coll/doc26"), model::Mutation::Type::Set}})); - FSTAssertQueryReturned( - "coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5", - "coll/doc6", "coll/doc7", "coll/doc8", "coll/doc9", "coll/doc10", - "coll/doc11", "coll/doc12", "coll/doc13", "coll/doc14", "coll/doc15", - "coll/doc16", "coll/doc17", "coll/doc18", "coll/doc19", "coll/doc20"); + FSTAssertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", + "coll/doc5", "coll/doc6", "coll/doc7", "coll/doc8", + "coll/doc9", "coll/doc10", "coll/doc11", "coll/doc12", + "coll/doc13", "coll/doc14", "coll/doc15", "coll/doc16", + "coll/doc17", "coll/doc18", "coll/doc19", "coll/doc20", + "coll/doc21", "coll/doc22", "coll/doc23", "coll/doc24", + "coll/doc25", "coll/doc26"); } TEST_F(LevelDbLocalStoreTest, IndexesServerTimestamps) { diff --git a/Firestore/core/test/unit/model/document_test.cc b/Firestore/core/test/unit/model/document_test.cc index 31e7aec0c07..afa680087c4 100644 --- a/Firestore/core/test/unit/model/document_test.cc +++ b/Firestore/core/test/unit/model/document_test.cc @@ -28,6 +28,7 @@ namespace model { using testutil::BsonBinaryData; using testutil::BsonObjectId; using testutil::BsonTimestamp; +using testutil::Decimal128; using testutil::DeletedDoc; using testutil::Doc; using testutil::Field; @@ -81,8 +82,9 @@ TEST(DocumentTest, ExtractsFields) { TEST(DocumentTest, CanContainBsonTypes) { auto data = WrapObject( Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^foo", "i"), - "int32", Int32(1234), "objectId", BsonObjectId("foo"), "timestamp", - BsonTimestamp(123, 456), "binary", BsonBinaryData(128, {7, 8, 9}))); + "int32", Int32(1234), "decimal128", Decimal128("1.234e2"), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), "binary", + BsonBinaryData(128, {7, 8, 9}))); auto doc = MutableDocument::FoundDocument(Key("col/doc"), Version(1), data); @@ -92,6 +94,7 @@ TEST(DocumentTest, CanContainBsonTypes) { EXPECT_EQ(doc.field(Field("maxKey")), *MaxKey()); EXPECT_EQ(doc.field(Field("regex")), *Regex("^foo", "i")); EXPECT_EQ(doc.field(Field("int32")), *Int32(1234)); + EXPECT_EQ(doc.field(Field("decimal128")), *Decimal128("1.234e2")); EXPECT_EQ(doc.field(Field("objectId")), *BsonObjectId("foo")); EXPECT_EQ(doc.field(Field("timestamp")), *BsonTimestamp(123, 456)); EXPECT_EQ(doc.field(Field("binary")), *BsonBinaryData(128, {7, 8, 9})); diff --git a/Firestore/core/test/unit/model/object_value_test.cc b/Firestore/core/test/unit/model/object_value_test.cc index 594322fdc11..ac32a000bf2 100644 --- a/Firestore/core/test/unit/model/object_value_test.cc +++ b/Firestore/core/test/unit/model/object_value_test.cc @@ -35,6 +35,7 @@ using testutil::BsonBinaryData; using testutil::BsonObjectId; using testutil::BsonTimestamp; using testutil::DbId; +using testutil::Decimal128; using testutil::Field; using testutil::Int32; using testutil::Map; @@ -53,8 +54,9 @@ TEST_F(ObjectValueTest, ExtractsFields) { ObjectValue value = WrapObject( "foo", Map("a", 1, "b", true, "c", "string"), "bson", Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^foo", "i"), - "int32", Int32(1234), "objectId", BsonObjectId("foo"), "timestamp", - BsonTimestamp(123, 456), "binary", BsonBinaryData(128, {7, 8, 9}))); + "int32", Int32(1234), "decimal128", Decimal128("1.234e5"), "objectId", + BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), "binary", + BsonBinaryData(128, {7, 8, 9}))); ASSERT_EQ(google_firestore_v1_Value_map_value_tag, value.Get(Field("foo"))->which_value_type); @@ -62,12 +64,12 @@ TEST_F(ObjectValueTest, ExtractsFields) { EXPECT_EQ(*Value(1), *value.Get(Field("foo.a"))); EXPECT_EQ(*Value(true), *value.Get(Field("foo.b"))); EXPECT_EQ(*Value("string"), *value.Get(Field("foo.c"))); - EXPECT_EQ( - *Value(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", - Regex("^foo", "i"), "int32", Int32(1234), "objectId", - BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), - "binary", BsonBinaryData(128, {7, 8, 9}))), - *value.Get(Field("bson"))); + EXPECT_EQ(*Value(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", + Regex("^foo", "i"), "int32", Int32(1234), "decimal128", + Decimal128("1.234e5"), "objectId", BsonObjectId("foo"), + "timestamp", BsonTimestamp(123, 456), "binary", + BsonBinaryData(128, {7, 8, 9}))), + *value.Get(Field("bson"))); EXPECT_EQ(nullopt, value.Get(Field("foo.a.b"))); EXPECT_EQ(nullopt, value.Get(Field("bar"))); EXPECT_EQ(nullopt, value.Get(Field("bar.a"))); @@ -79,15 +81,17 @@ TEST_F(ObjectValueTest, ExtractsFieldMask) { Map("a", 1, "b", true, "c", "string", "nested", Map("d", "e")), "emptymap", Map(), "bson", Value(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", - Regex("^foo", "i"), "int32", Int32(1234), "objectId", - BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), - "binary", BsonBinaryData(128, {7, 8, 9})))); - - FieldMask expected_mask = FieldMask( - {Field("a"), Field("Map.a"), Field("Map.b"), Field("Map.c"), - Field("Map.nested.d"), Field("emptymap"), Field("bson.minKey"), - Field("bson.maxKey"), Field("bson.regex"), Field("bson.int32"), - Field("bson.objectId"), Field("bson.timestamp"), Field("bson.binary")}); + Regex("^foo", "i"), "int32", Int32(1234), "decimal128", + Decimal128("1.234e5"), "objectId", BsonObjectId("foo"), + "timestamp", BsonTimestamp(123, 456), "binary", + BsonBinaryData(128, {7, 8, 9})))); + + FieldMask expected_mask = + FieldMask({Field("a"), Field("Map.a"), Field("Map.b"), Field("Map.c"), + Field("Map.nested.d"), Field("emptymap"), Field("bson.minKey"), + Field("bson.maxKey"), Field("bson.regex"), Field("bson.int32"), + Field("bson.decimal128"), Field("bson.objectId"), + Field("bson.timestamp"), Field("bson.binary")}); FieldMask actual_mask = value.ToFieldMask(); EXPECT_EQ(expected_mask, actual_mask); @@ -363,15 +367,17 @@ TEST_F(ObjectValueTest, CanHandleBsonTypesInObjectValue) { object_value.Set(Field("maxKey"), MaxKey()); object_value.Set(Field("regex"), Regex("^foo", "i")); object_value.Set(Field("int32"), Int32(1234)); + object_value.Set(Field("decimal128"), Decimal128("1.2e3")); object_value.Set(Field("objectId"), BsonObjectId("foo")); object_value.Set(Field("timestamp"), BsonTimestamp(123, 456)); object_value.Set(Field("binary"), BsonBinaryData(128, {7, 8, 9})); EXPECT_EQ( WrapObject(Map("minKey", MinKey(), "maxKey", MaxKey(), "regex", - Regex("^foo", "i"), "int32", Int32(1234), "objectId", - BsonObjectId("foo"), "timestamp", BsonTimestamp(123, 456), - "binary", BsonBinaryData(128, {7, 8, 9}))), + Regex("^foo", "i"), "int32", Int32(1234), "decimal128", + Decimal128("1.2e3"), "objectId", BsonObjectId("foo"), + "timestamp", BsonTimestamp(123, 456), "binary", + BsonBinaryData(128, {7, 8, 9}))), object_value); // Overwrite existing fields @@ -392,9 +398,9 @@ TEST_F(ObjectValueTest, CanHandleBsonTypesInObjectValue) { EXPECT_EQ( WrapObject(Map( "minKey", MinKey(), "maxKey", MaxKey(), "regex", Regex("^baz", "g"), - "int32", Int32(1234), "objectId", BsonObjectId("new-foo-value"), - "timestamp", BsonTimestamp(123, 456), "binary", - BsonBinaryData(128, {7, 8, 9}), "foo", + "int32", Int32(1234), "decimal128", Decimal128("1.2e3"), "objectId", + BsonObjectId("new-foo-value"), "timestamp", BsonTimestamp(123, 456), + "binary", BsonBinaryData(128, {7, 8, 9}), "foo", Map("regex2", Regex("^bar", "x"), "timestamp", BsonTimestamp(2, 1)))), object_value); } diff --git a/Firestore/core/test/unit/model/value_util_test.cc b/Firestore/core/test/unit/model/value_util_test.cc index 4f528e78575..048c4cdfbb0 100644 --- a/Firestore/core/test/unit/model/value_util_test.cc +++ b/Firestore/core/test/unit/model/value_util_test.cc @@ -44,6 +44,7 @@ using testutil::BsonBinaryData; using testutil::BsonObjectId; using testutil::BsonTimestamp; using testutil::DbId; +using testutil::Decimal128; using testutil::Int32; using testutil::kCanonicalNanBits; using testutil::Key; @@ -222,6 +223,10 @@ TEST(FieldValueTest, ValueHelpers) { ASSERT_EQ(GetTypeOrder(*int32_value), TypeOrder::kNumber); ASSERT_EQ(DetectMapType(*int32_value), MapType::kInt32); + auto decimal128_value = Decimal128("1.2e3"); + ASSERT_EQ(GetTypeOrder(*decimal128_value), TypeOrder::kNumber); + ASSERT_EQ(DetectMapType(*decimal128_value), MapType::kDecimal128); + auto bson_object_id_value = BsonObjectId("foo"); ASSERT_EQ(GetTypeOrder(*bson_object_id_value), TypeOrder::kBsonObjectId); ASSERT_EQ(DetectMapType(*bson_object_id_value), MapType::kBsonObjectId); @@ -251,6 +256,35 @@ TEST(FieldValueTest, CanonicalBitsAreCanonical) { } #endif // __APPLE__ +TEST_F(ValueUtilTest, Decimal128Comparison) { + EXPECT_EQ(Compare(*Decimal128("NaN"), *Value(std::nan("1"))), + ComparisonResult::Same); + EXPECT_EQ(Compare(*Decimal128("NaN"), *Decimal128("NaN")), + ComparisonResult::Same); + EXPECT_EQ(Compare(*Decimal128("NaN"), *Decimal128("-Infinity")), + ComparisonResult::Ascending); + EXPECT_EQ(Compare(*Decimal128("NaN"), *Decimal128("+Infinity")), + ComparisonResult::Ascending); + EXPECT_EQ(Compare(*Decimal128("NaN"), *Decimal128("-1.2e-3")), + ComparisonResult::Ascending); + EXPECT_EQ(Compare(*Decimal128("1"), *Decimal128("2e-4")), + ComparisonResult::Descending); + EXPECT_EQ(Compare(*Decimal128("-1"), *Decimal128("-2e-4")), + ComparisonResult::Ascending); + EXPECT_EQ(Compare(*Decimal128("-0.0"), *Decimal128("0.0")), + ComparisonResult::Same); + EXPECT_EQ(Compare(*Decimal128("-0.0"), *Decimal128("+0.0")), + ComparisonResult::Same); + EXPECT_EQ(Compare(*Decimal128("-0"), *Decimal128("0")), + ComparisonResult::Same); + EXPECT_EQ(Compare(*Decimal128("1500e-3"), *Value(1.5F)), + ComparisonResult::Same); + EXPECT_EQ(Compare(*Decimal128("-1000e-3"), *Value(-1L)), + ComparisonResult::Same); + EXPECT_EQ(Compare(*Decimal128("3.4e-5"), *Value(2.0F)), + ComparisonResult::Ascending); +} + TEST_F(ValueUtilTest, Equality) { // Create a matrix that defines an equality group. The outer vector has // multiple rows and each row can have an arbitrary number of entries. @@ -265,6 +299,8 @@ TEST_F(ValueUtilTest, Equality) { Add(equals_group, std::numeric_limits::quiet_NaN(), ToDouble(kCanonicalNanBits), ToDouble(kAlternateNanBits), std::nan("1"), std::nan("2")); + + Add(equals_group, Decimal128("-Infinity")); // -0.0 and 0.0 compare the same but are not equal. Add(equals_group, -0.0); Add(equals_group, 0.0); @@ -272,8 +308,11 @@ TEST_F(ValueUtilTest, Equality) { // Doubles and Longs aren't equal (even though they compare same). Add(equals_group, 1.0, 1.0); Add(equals_group, 1.1, 1.1); + Add(equals_group, Decimal128("-1.2"), Decimal128("-12e-1")); Add(equals_group, Int32(-1), Int32(-1)); Add(equals_group, Int32(1), Int32(1)); + Add(equals_group, Decimal128("1.2"), Decimal128("12e-1")); + Add(equals_group, Decimal128("Infinity")); Add(equals_group, BlobValue(0, 1, 1)); Add(equals_group, BlobValue(0, 1)); Add(equals_group, "string", "string"); @@ -343,18 +382,19 @@ TEST_F(ValueUtilTest, StrictOrdering) { // numbers Add(comparison_groups, DeepClone(MinNumber())); - Add(comparison_groups, -1e20); + Add(comparison_groups, -1e20, Decimal128("-1e20")); Add(comparison_groups, std::numeric_limits::min()); Add(comparison_groups, -0.1); // Zeros all compare the same. - Add(comparison_groups, -0.0, 0.0, 0L, Int32(0)); + Add(comparison_groups, -0.0, 0.0, 0L, Int32(0), Decimal128("0"), + Decimal128("-0.0")); Add(comparison_groups, 0.1); // Doubles, longs, and Int32 Compare() the same. - Add(comparison_groups, 1.0, 1L, Int32(1)); - Add(comparison_groups, Int32(2)); + Add(comparison_groups, 1.0, 1L, Int32(1), Decimal128("1.0e0")); + Add(comparison_groups, Int32(2), Decimal128("0.2e1")); Add(comparison_groups, Int32(2147483647)); Add(comparison_groups, std::numeric_limits::max()); - Add(comparison_groups, 1e20); + Add(comparison_groups, 1e20, Decimal128("1e20")); // dates Add(comparison_groups, DeepClone(MinTimestamp())); @@ -500,18 +540,20 @@ TEST_F(ValueUtilTest, RelaxedOrdering) { // numbers Add(comparison_groups, DeepClone(MinNumber())); Add(comparison_groups, DeepClone(MinNumber())); - Add(comparison_groups, -1e20); + Add(comparison_groups, -1e20, Decimal128("-1e20")); Add(comparison_groups, std::numeric_limits::min()); Add(comparison_groups, -0.1); // Zeros all compare the same. - Add(comparison_groups, -0.0, 0.0, 0L, Int32(0)); + Add(comparison_groups, -0.0, 0.0, 0L, Int32(0), Decimal128("-0.0"), + Decimal128("0.0")); Add(comparison_groups, 0.1); // Doubles and longs Compare() the same. - Add(comparison_groups, 1.0, 1L, Int32(1)); - Add(comparison_groups, Int32(2)); + Add(comparison_groups, 1.0, 1L, Int32(1), Decimal128("100.0e-2"), + Decimal128("1")); + Add(comparison_groups, Int32(2), Decimal128("2")); Add(comparison_groups, Int32(2147483647)); Add(comparison_groups, std::numeric_limits::max()); - Add(comparison_groups, 1e20); + Add(comparison_groups, 1e20, Decimal128("1e20")); Add(comparison_groups, DeepClone(MinTimestamp())); Add(comparison_groups, DeepClone(MinTimestamp())); @@ -657,7 +699,11 @@ TEST_F(ValueUtilTest, ComputesLowerBound) { // Numbers Add(groups, GetLowerBoundMessage(Value(0.0)), GetLowerBoundMessage(Value(0L)), - GetLowerBoundMessage(Int32(0)), std::nan(""), DeepClone(MinNumber())); + GetLowerBoundMessage(Decimal128("0.0")), + GetLowerBoundMessage(Decimal128("-Infinity")), + GetLowerBoundMessage(Decimal128("Infinity")), + GetLowerBoundMessage(Decimal128("NaN")), GetLowerBoundMessage(Int32(0)), + std::nan(""), DeepClone(MinNumber())); Add(groups, INT_MIN); // Timestamps @@ -753,6 +799,11 @@ TEST_F(ValueUtilTest, ComputesUpperBound) { Add(groups, INT_MAX); Add(groups, GetUpperBoundMessage(Value(INT_MAX)), GetUpperBoundMessage(Value(0L)), GetUpperBoundMessage(Int32(0)), + GetUpperBoundMessage(Decimal128("0")), + GetUpperBoundMessage(Decimal128("NaN")), + GetUpperBoundMessage(Decimal128("Infinity")), + GetUpperBoundMessage(Decimal128("-Infinity")), + GetUpperBoundMessage(Decimal128("2.0e5")), GetUpperBoundMessage(Value(std::nan("")))); // Timestamps @@ -839,6 +890,7 @@ TEST_F(ValueUtilTest, CanonicalId) { VerifyCanonicalId(MaxKey(), "{__max__:null}"); VerifyCanonicalId(Regex("^foo", "x"), "{__regex__:{pattern:^foo,options:x}}"); VerifyCanonicalId(Int32(123), "{__int__:123}"); + VerifyCanonicalId(Decimal128("1.2e3"), "{__decimal128__:1.2e3}"); VerifyCanonicalId(BsonObjectId("foo"), "{__oid__:foo}"); VerifyCanonicalId(BsonTimestamp(1, 2), "{__request_timestamp__:{seconds:1,increment:2}}"); diff --git a/Firestore/core/test/unit/remote/serializer_test.cc b/Firestore/core/test/unit/remote/serializer_test.cc index 5bf758098b6..389644dec6b 100644 --- a/Firestore/core/test/unit/remote/serializer_test.cc +++ b/Firestore/core/test/unit/remote/serializer_test.cc @@ -115,6 +115,7 @@ using testutil::BsonBinaryData; using testutil::BsonObjectId; using testutil::BsonTimestamp; using testutil::Bytes; +using testutil::Decimal128; using testutil::DeletedDoc; using testutil::Doc; using testutil::Filter; @@ -878,6 +879,17 @@ TEST_F(SerializerTest, EncodesInt32Value) { ExpectRoundTrip(model, proto, TypeOrder::kNumber); } +TEST_F(SerializerTest, EncodesDecimal128Value) { + Message model = Decimal128("1.2e3"); + + v1::Value proto; + google::protobuf::Map* fields = + proto.mutable_map_value()->mutable_fields(); + (*fields)["__decimal128__"] = ValueProto("1.2e3"); + + ExpectRoundTrip(model, proto, TypeOrder::kNumber); +} + TEST_F(SerializerTest, EncodesBsonObjectId) { Message model = BsonObjectId("foo"); diff --git a/Firestore/core/test/unit/testutil/testutil.cc b/Firestore/core/test/unit/testutil/testutil.cc index 7709abd33a3..a34458f1475 100644 --- a/Firestore/core/test/unit/testutil/testutil.cc +++ b/Firestore/core/test/unit/testutil/testutil.cc @@ -209,6 +209,10 @@ nanopb::Message Int32(int32_t value) { return Map("__int__", Value(value)); } +nanopb::Message Decimal128(std::string value) { + return Map("__decimal128__", Value(value)); +} + nanopb::Message BsonObjectId(std::string oid) { return Map("__oid__", Value(oid)); } diff --git a/Firestore/core/test/unit/testutil/testutil.h b/Firestore/core/test/unit/testutil/testutil.h index dd3924434dc..be3e463bca2 100644 --- a/Firestore/core/test/unit/testutil/testutil.h +++ b/Firestore/core/test/unit/testutil/testutil.h @@ -299,6 +299,7 @@ nanopb::Message MaxKey(); nanopb::Message Regex(std::string pattern, std::string options); nanopb::Message Int32(int32_t value); +nanopb::Message Decimal128(std::string value); nanopb::Message BsonObjectId(std::string oid); nanopb::Message BsonTimestamp(uint32_t seconds, uint32_t increment); From c2f9b80bb0fe6141c2f29c8266be6c5520bd66c3 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Tue, 1 Jul 2025 15:51:55 -0700 Subject: [PATCH 15/16] allow `init` for minkey/maxkey needed by *+Codable.swift. --- Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h | 3 --- Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h | 3 --- Firestore/Swift/Source/Codable/MaxKey+Codable.swift | 2 +- Firestore/Swift/Source/Codable/MinKey+Codable.swift | 2 +- 4 files changed, 2 insertions(+), 8 deletions(-) diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h index 7fd20da9088..cdf96108c62 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMaxKey.h @@ -31,9 +31,6 @@ __attribute__((objc_subclassing_restricted)) /** Returns true if the given object is equal to this, and false otherwise. */ - (BOOL)isEqual:(nullable id)object; - -- (instancetype)init NS_UNAVAILABLE; - @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h index 1a2c778b524..4aaef128031 100644 --- a/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h +++ b/Firestore/Source/Public/FirebaseFirestore/FIRMinKey.h @@ -31,9 +31,6 @@ __attribute__((objc_subclassing_restricted)) /** Returns true if the given object is equal to this, and false otherwise. */ - (BOOL)isEqual:(nullable id)object; - -- (instancetype)init NS_UNAVAILABLE; - @end NS_ASSUME_NONNULL_END diff --git a/Firestore/Swift/Source/Codable/MaxKey+Codable.swift b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift index cf3e840903c..bccef6da28c 100644 --- a/Firestore/Swift/Source/Codable/MaxKey+Codable.swift +++ b/Firestore/Swift/Source/Codable/MaxKey+Codable.swift @@ -61,5 +61,5 @@ extension CodableMaxKey { } } -/** Extends RegexValue to conform to Codable. */ +/** Extends MaxKey to conform to Codable. */ extension FirebaseFirestore.MaxKey: FirebaseFirestore.CodableMaxKey {} diff --git a/Firestore/Swift/Source/Codable/MinKey+Codable.swift b/Firestore/Swift/Source/Codable/MinKey+Codable.swift index 9f388eceadb..6f4232fa3ca 100644 --- a/Firestore/Swift/Source/Codable/MinKey+Codable.swift +++ b/Firestore/Swift/Source/Codable/MinKey+Codable.swift @@ -61,5 +61,5 @@ extension CodableMinKey { } } -/** Extends RegexValue to conform to Codable. */ +/** Extends MinKey to conform to Codable. */ extension FirebaseFirestore.MinKey: FirebaseFirestore.CodableMinKey {} From 422f14fc93e86e53c962539287530c4e2b7fca9d Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Tue, 1 Jul 2025 16:23:03 -0700 Subject: [PATCH 16/16] Fix lint errors. --- .../Example/Tests/Util/FSTIntegrationTestCase.mm | 4 ++++ Firestore/core/src/util/quadruple.cc | 16 +++++++++++----- Firestore/core/src/util/quadruple.h | 8 ++++---- Firestore/core/src/util/quadruple_builder.cc | 7 +++---- Firestore/core/src/util/quadruple_builder.h | 7 ++++--- 5 files changed, 26 insertions(+), 16 deletions(-) diff --git a/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm b/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm index c64b1e80706..b64ffca0b5e 100644 --- a/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm +++ b/Firestore/Example/Tests/Util/FSTIntegrationTestCase.mm @@ -198,6 +198,10 @@ + (void)setUpDefaults { // SSL certs. NSString *project = [[NSProcessInfo processInfo] environment][@"PROJECT_ID"]; NSString *targetBackend = [[NSProcessInfo processInfo] environment][@"TARGET_BACKEND"]; + // Forcing use of nightly.Add commentMore actions + // TODO(types/ehsann): remove this before merging into main. + targetBackend = @"nightly"; + project = @"firestore-sdk-nightly"; NSString *host; if (targetBackend) { if ([targetBackend isEqualToString:@"emulator"]) { diff --git a/Firestore/core/src/util/quadruple.cc b/Firestore/core/src/util/quadruple.cc index 5a311624eb7..901ff041cd1 100644 --- a/Firestore/core/src/util/quadruple.cc +++ b/Firestore/core/src/util/quadruple.cc @@ -12,17 +12,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "quadruple.h" -#include -#include +#include "Firestore/core/src/util/quadruple.h" + +#include #include +#include #include -#include "quadruple_builder.h" +#include + +#include "Firestore/core/src/util/quadruple_builder.h" namespace firebase { namespace firestore { namespace util { +using std::fpclassify; +using std::signbit; + namespace { constexpr int64_t kHashCodeOfNan = 7652541255; } @@ -241,4 +247,4 @@ int64_t Quadruple::HashValue() const { } // namespace util } // namespace firestore -} // namespace firebase \ No newline at end of file +} // namespace firebase diff --git a/Firestore/core/src/util/quadruple.h b/Firestore/core/src/util/quadruple.h index eebb3371f6c..5376cb862a3 100644 --- a/Firestore/core/src/util/quadruple.h +++ b/Firestore/core/src/util/quadruple.h @@ -12,14 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include +#include #include #include #include #include -#ifndef FIRESTORE_CORE_UTIL_QUADRUPLE_H_ -#define FIRESTORE_CORE_UTIL_QUADRUPLE_H_ +#ifndef FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_H_ +#define FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_H_ namespace firebase { namespace firestore { @@ -97,4 +97,4 @@ class Quadruple { } // namespace firestore } // namespace firebase -#endif // FIRESTORE_CORE_UTIL_QUADRUPLE_H_ \ No newline at end of file +#endif // FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_H_ diff --git a/Firestore/core/src/util/quadruple_builder.cc b/Firestore/core/src/util/quadruple_builder.cc index c723842717a..81ae46094f1 100644 --- a/Firestore/core/src/util/quadruple_builder.cc +++ b/Firestore/core/src/util/quadruple_builder.cc @@ -13,7 +13,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include "quadruple_builder.h" +#include "Firestore/core/src/util/quadruple_builder.h" #include #include @@ -244,7 +244,6 @@ static std::array, 33> NEG_POWERS_OF_2 = { {{static_cast(-9), 0x3b9aca0000000000LL, 0x0000000000000000LL, 0x0000000000000001LL}}, // *** // 7: 2^-(2^6) = 2^-64 - // = 5.42101086242752217003726400434970855712890625E-20 // = // 0.542101086242752217003726400434970855712890625e-19 {{static_cast(-19), 0x8ac7230489e80000LL, 0x0000000000000000LL, @@ -660,8 +659,8 @@ void QuadrupleBuilder::multPacked3x64_simply(std::array& factor1, for (int32_t i = (0); i < (static_cast((result).size())); i++) { result[i] = 0LL; } - // TODO2 19.01.16 21:23:06 for the next version -- rebuild the table of powers - // to make the numbers unpacked, to avoid packing/unpacking + // TODO(dgay): 19.01.16 21:23:06 for the next version -- rebuild the table of + // powers to make the numbers unpacked, to avoid packing/unpacking unpack_3x64_to_6x32(factor1, this->buffer6x32A); unpack_3x64_to_6x32(factor2, this->buffer6x32B); for (int32_t i = (6) - 1; i >= (0); i--) { // compute partial 32-bit products diff --git a/Firestore/core/src/util/quadruple_builder.h b/Firestore/core/src/util/quadruple_builder.h index d5ae34238c6..6d67b389f10 100644 --- a/Firestore/core/src/util/quadruple_builder.h +++ b/Firestore/core/src/util/quadruple_builder.h @@ -16,11 +16,12 @@ #ifndef FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_BUILDER_H_ #define FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_BUILDER_H_ -#include -#include #include #include #include + +#include +#include #include #include @@ -95,4 +96,4 @@ class QuadrupleBuilder { } // namespace firestore } // namespace firebase -#endif \ No newline at end of file +#endif // FIRESTORE_CORE_SRC_UTIL_QUADRUPLE_BUILDER_H_