From 0716bd7511dba2b068aa6b31800772b6b387b9ef Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Mon, 10 Mar 2025 13:17:54 -0400 Subject: [PATCH 1/9] Add new BSON types to public API (#330) --- common/api-review/firestore-lite.api.md | 79 ++++ common/api-review/firestore.api.md | 79 ++++ packages/firestore/lite/index.ts | 23 +- packages/firestore/src/api.ts | 23 +- .../firestore/src/api/field_value_impl.ts | 9 +- packages/firestore/src/core/target.ts | 20 +- .../src/index/firestore_index_value_writer.ts | 1 + .../src/lite-api/bson_binary_data.ts | 59 +++ .../firestore/src/lite-api/bson_object_Id.ts | 35 ++ .../src/lite-api/bson_timestamp_value.ts | 35 ++ .../src/lite-api/field_value_impl.ts | 89 ++++ .../firestore/src/lite-api/int32_value.ts | 35 ++ packages/firestore/src/lite-api/max_key.ts | 36 ++ packages/firestore/src/lite-api/min_key.ts | 36 ++ .../firestore/src/lite-api/regex_value.ts | 35 ++ .../src/lite-api/user_data_reader.ts | 149 +++++- .../src/lite-api/user_data_writer.ts | 90 +++- packages/firestore/src/model/object_value.ts | 5 +- packages/firestore/src/model/type_order.ts | 27 +- packages/firestore/src/model/values.ts | 332 ++++++++++--- .../test/integration/api/database.test.ts | 440 +++++++++++++++++- .../test/integration/api/type.test.ts | 395 +++++++++++++++- .../firestore/test/lite/integration.test.ts | 48 ++ .../test/unit/model/document.test.ts | 37 ++ .../test/unit/model/object_value.test.ts | 153 +++++- .../firestore/test/unit/model/target.test.ts | 12 +- .../firestore/test/unit/model/values.test.ts | 158 ++++++- .../test/unit/remote/serializer.helper.ts | 62 ++- 28 files changed, 2386 insertions(+), 116 deletions(-) create mode 100644 packages/firestore/src/lite-api/bson_binary_data.ts create mode 100644 packages/firestore/src/lite-api/bson_object_Id.ts create mode 100644 packages/firestore/src/lite-api/bson_timestamp_value.ts create mode 100644 packages/firestore/src/lite-api/int32_value.ts create mode 100644 packages/firestore/src/lite-api/max_key.ts create mode 100644 packages/firestore/src/lite-api/min_key.ts create mode 100644 packages/firestore/src/lite-api/regex_value.ts diff --git a/common/api-review/firestore-lite.api.md b/common/api-review/firestore-lite.api.md index 4a9ef4c0171..04faa9c47c6 100644 --- a/common/api-review/firestore-lite.api.md +++ b/common/api-review/firestore-lite.api.md @@ -65,6 +65,41 @@ export function arrayUnion(...elements: unknown[]): FieldValue; // @public export function average(field: string | FieldPath): AggregateField; +// @public +export class BsonBinaryData { + constructor(subtype: number, data: Uint8Array); + readonly data: Uint8Array; + isEqual(other: BsonBinaryData): boolean; + readonly subtype: number; +} + +// @public +export function bsonBinaryData(subtype: number, data: Uint8Array): BsonBinaryData; + +// @public +export class BsonObjectId { + constructor(value: string); + isEqual(other: BsonObjectId): boolean; + // (undocumented) + readonly value: string; +} + +// @public +export function bsonObjectId(value: string): BsonObjectId; + +// @public +export function bsonTimestamp(seconds: number, increment: number): BsonTimestampValue; + +// @public +export class BsonTimestampValue { + constructor(seconds: number, increment: number); + // (undocumented) + readonly increment: number; + isEqual(other: BsonTimestampValue): boolean; + // (undocumented) + readonly seconds: number; +} + // @public export class Bytes { static fromBase64String(base64: string): Bytes; @@ -249,6 +284,17 @@ export function initializeFirestore(app: FirebaseApp, settings: Settings): Fires // @beta export function initializeFirestore(app: FirebaseApp, settings: Settings, databaseId?: string): Firestore; +// @public +export function int32(value: number): Int32Value; + +// @public +export class Int32Value { + constructor(value: number); + isEqual(other: Int32Value): boolean; + // (undocumented) + readonly value: number; +} + // @public export function limit(limit: number): QueryLimitConstraint; @@ -257,6 +303,26 @@ export function limitToLast(limit: number): QueryLimitConstraint; export { LogLevel } +// @public +export class MaxKey { + // (undocumented) + static instance(): MaxKey; + readonly type = "MaxKey"; +} + +// @public +export function maxKey(): MaxKey; + +// @public +export class MinKey { + // (undocumented) + static instance(): MinKey; + readonly type = "MinKey"; +} + +// @public +export function minKey(): MinKey; + // @public export type NestedUpdateFields> = UnionToIntersection<{ [K in keyof T & string]: ChildUpdateFields; @@ -360,6 +426,19 @@ export class QueryStartAtConstraint extends QueryConstraint { // @public export function refEqual(left: DocumentReference | CollectionReference, right: DocumentReference | CollectionReference): boolean; +// @public +export function regex(pattern: string, options: string): RegexValue; + +// @public +export class RegexValue { + constructor(pattern: string, options: string); + isEqual(other: RegexValue): boolean; + // (undocumented) + readonly options: string; + // (undocumented) + readonly pattern: string; +} + // @public export function runTransaction(firestore: Firestore, updateFunction: (transaction: Transaction) => Promise, options?: TransactionOptions): Promise; diff --git a/common/api-review/firestore.api.md b/common/api-review/firestore.api.md index 34b56b97f21..5d3c2286859 100644 --- a/common/api-review/firestore.api.md +++ b/common/api-review/firestore.api.md @@ -65,6 +65,41 @@ export function arrayUnion(...elements: unknown[]): FieldValue; // @public export function average(field: string | FieldPath): AggregateField; +// @public +export class BsonBinaryData { + constructor(subtype: number, data: Uint8Array); + readonly data: Uint8Array; + isEqual(other: BsonBinaryData): boolean; + readonly subtype: number; +} + +// @public +export function bsonBinaryData(subtype: number, data: Uint8Array): BsonBinaryData; + +// @public +export class BsonObjectId { + constructor(value: string); + isEqual(other: BsonObjectId): boolean; + // (undocumented) + readonly value: string; +} + +// @public +export function bsonObjectId(value: string): BsonObjectId; + +// @public +export function bsonTimestamp(seconds: number, increment: number): BsonTimestampValue; + +// @public +export class BsonTimestampValue { + constructor(seconds: number, increment: number); + // (undocumented) + readonly increment: number; + isEqual(other: BsonTimestampValue): boolean; + // (undocumented) + readonly seconds: number; +} + // @public export class Bytes { static fromBase64String(base64: string): Bytes; @@ -344,6 +379,17 @@ export interface IndexField { // @public export function initializeFirestore(app: FirebaseApp, settings: FirestoreSettings, databaseId?: string): Firestore; +// @public +export function int32(value: number): Int32Value; + +// @public +export class Int32Value { + constructor(value: number); + isEqual(other: Int32Value): boolean; + // (undocumented) + readonly value: number; +} + // @public export function limit(limit: number): QueryLimitConstraint; @@ -374,6 +420,16 @@ export interface LoadBundleTaskProgress { export { LogLevel } +// @public +export class MaxKey { + // (undocumented) + static instance(): MaxKey; + readonly type = "MaxKey"; +} + +// @public +export function maxKey(): MaxKey; + // @public export interface MemoryCacheSettings { garbageCollector?: MemoryGarbageCollector; @@ -411,6 +467,16 @@ export function memoryLruGarbageCollector(settings?: { cacheSizeBytes?: number; }): MemoryLruGarbageCollector; +// @public +export class MinKey { + // (undocumented) + static instance(): MinKey; + readonly type = "MinKey"; +} + +// @public +export function minKey(): MinKey; + // @public export function namedQuery(firestore: Firestore, name: string): Promise; @@ -620,6 +686,19 @@ export class QueryStartAtConstraint extends QueryConstraint { // @public export function refEqual(left: DocumentReference | CollectionReference, right: DocumentReference | CollectionReference): boolean; +// @public +export function regex(pattern: string, options: string): RegexValue; + +// @public +export class RegexValue { + constructor(pattern: string, options: string); + isEqual(other: RegexValue): boolean; + // (undocumented) + readonly options: string; + // (undocumented) + readonly pattern: string; +} + // @public export function runTransaction(firestore: Firestore, updateFunction: (transaction: Transaction) => Promise, options?: TransactionOptions): Promise; diff --git a/packages/firestore/lite/index.ts b/packages/firestore/lite/index.ts index b751f0a8254..6d1d6c01998 100644 --- a/packages/firestore/lite/index.ts +++ b/packages/firestore/lite/index.ts @@ -128,7 +128,14 @@ export { arrayUnion, serverTimestamp, deleteField, - vector + vector, + int32, + regex, + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + minKey, + maxKey } from '../src/lite-api/field_value_impl'; export { @@ -141,6 +148,20 @@ export { export { VectorValue } from '../src/lite-api/vector_value'; +export { Int32Value } from '../src/lite-api/int32_value'; + +export { RegexValue } from '../src/lite-api/regex_value'; + +export { BsonBinaryData } from '../src/lite-api/bson_binary_data'; + +export { BsonObjectId } from '../src/lite-api/bson_object_Id'; + +export { BsonTimestampValue } from '../src/lite-api/bson_timestamp_value'; + +export { MinKey } from '../src/lite-api/min_key'; + +export { MaxKey } from '../src/lite-api/max_key'; + export { WriteBatch, writeBatch } from '../src/lite-api/write_batch'; export { TransactionOptions } from '../src/lite-api/transaction_options'; diff --git a/packages/firestore/src/api.ts b/packages/firestore/src/api.ts index ea969c6b94c..46fb1b3bba3 100644 --- a/packages/firestore/src/api.ts +++ b/packages/firestore/src/api.ts @@ -173,11 +173,32 @@ export { deleteField, increment, serverTimestamp, - vector + vector, + int32, + regex, + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + minKey, + maxKey } from './api/field_value_impl'; export { VectorValue } from './lite-api/vector_value'; +export { Int32Value } from './lite-api/int32_value'; + +export { RegexValue } from './lite-api/regex_value'; + +export { BsonBinaryData } from './lite-api/bson_binary_data'; + +export { BsonObjectId } from './lite-api/bson_object_Id'; + +export { BsonTimestampValue } from './lite-api/bson_timestamp_value'; + +export { MinKey } from './lite-api/min_key'; + +export { MaxKey } from './lite-api/max_key'; + export { LogLevelString as LogLevel, setLogLevel } from './util/log'; export { Bytes } from './api/bytes'; diff --git a/packages/firestore/src/api/field_value_impl.ts b/packages/firestore/src/api/field_value_impl.ts index 1b1283a3543..6e65d273259 100644 --- a/packages/firestore/src/api/field_value_impl.ts +++ b/packages/firestore/src/api/field_value_impl.ts @@ -21,5 +21,12 @@ export { arrayUnion, serverTimestamp, deleteField, - vector + vector, + int32, + regex, + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + minKey, + maxKey } from '../lite-api/field_value_impl'; diff --git a/packages/firestore/src/core/target.ts b/packages/firestore/src/core/target.ts index 4b12857fc2a..664a2ef9a08 100644 --- a/packages/firestore/src/core/target.ts +++ b/packages/firestore/src/core/target.ts @@ -25,8 +25,8 @@ import { import { FieldPath, ResourcePath } from '../model/path'; import { canonicalId, - MAX_VALUE, - MIN_VALUE, + INTERNAL_MAX_VALUE, + INTERNAL_MIN_VALUE, lowerBoundCompare, upperBoundCompare, valuesGetLowerBound, @@ -302,7 +302,7 @@ export function targetGetNotInValues( /** * Returns a lower bound of field values that can be used as a starting point to - * scan the index defined by `fieldIndex`. Returns `MIN_VALUE` if no lower bound + * scan the index defined by `fieldIndex`. Returns `INTERNAL_MIN_VALUE` if no lower bound * exists. */ export function targetGetLowerBound( @@ -328,7 +328,7 @@ export function targetGetLowerBound( /** * Returns an upper bound of field values that can be used as an ending point - * when scanning the index defined by `fieldIndex`. Returns `MAX_VALUE` if no + * when scanning the index defined by `fieldIndex`. Returns `INTERNAL_MAX_VALUE` if no * upper bound exists. */ export function targetGetUpperBound( @@ -362,13 +362,13 @@ function targetGetAscendingBound( fieldPath: FieldPath, bound: Bound | null ): { value: ProtoValue; inclusive: boolean } { - let value: ProtoValue = MIN_VALUE; + let value: ProtoValue = INTERNAL_MIN_VALUE; let inclusive = true; // Process all filters to find a value for the current field segment for (const fieldFilter of targetGetFieldFiltersForPath(target, fieldPath)) { - let filterValue: ProtoValue = MIN_VALUE; + let filterValue: ProtoValue = INTERNAL_MIN_VALUE; let filterInclusive = true; switch (fieldFilter.op) { @@ -387,7 +387,7 @@ function targetGetAscendingBound( break; case Operator.NOT_EQUAL: case Operator.NOT_IN: - filterValue = MIN_VALUE; + filterValue = INTERNAL_MIN_VALUE; break; default: // Remaining filters cannot be used as lower bounds. @@ -437,12 +437,12 @@ function targetGetDescendingBound( fieldPath: FieldPath, bound: Bound | null ): { value: ProtoValue; inclusive: boolean } { - let value: ProtoValue = MAX_VALUE; + let value: ProtoValue = INTERNAL_MAX_VALUE; let inclusive = true; // Process all filters to find a value for the current field segment for (const fieldFilter of targetGetFieldFiltersForPath(target, fieldPath)) { - let filterValue: ProtoValue = MAX_VALUE; + let filterValue: ProtoValue = INTERNAL_MAX_VALUE; let filterInclusive = true; switch (fieldFilter.op) { @@ -462,7 +462,7 @@ function targetGetDescendingBound( break; case Operator.NOT_EQUAL: case Operator.NOT_IN: - filterValue = MAX_VALUE; + filterValue = INTERNAL_MAX_VALUE; break; default: // Remaining filters cannot be used as upper bounds. diff --git a/packages/firestore/src/index/firestore_index_value_writer.ts b/packages/firestore/src/index/firestore_index_value_writer.ts index dfdb3836578..f831862a0de 100644 --- a/packages/firestore/src/index/firestore_index_value_writer.ts +++ b/packages/firestore/src/index/firestore_index_value_writer.ts @@ -124,6 +124,7 @@ export class FirestoreIndexValueWriter { encoder.writeNumber(geoPoint.latitude || 0); encoder.writeNumber(geoPoint.longitude || 0); } else if ('mapValue' in indexValue) { + // TODO(Mila/BSON): add bson types for indexing if (isMaxValue(indexValue)) { this.writeValueTypeLabel(encoder, Number.MAX_SAFE_INTEGER); } else if (isVectorValue(indexValue)) { diff --git a/packages/firestore/src/lite-api/bson_binary_data.ts b/packages/firestore/src/lite-api/bson_binary_data.ts new file mode 100644 index 00000000000..233dd790aec --- /dev/null +++ b/packages/firestore/src/lite-api/bson_binary_data.ts @@ -0,0 +1,59 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { ByteString } from '../util/byte_string'; +import { Code, FirestoreError } from '../util/error'; + +/** + * Represents a BSON Binary Data type in Firestore documents. + * + * @class BsonBinaryData + */ +export class BsonBinaryData { + /** The subtype for the data */ + readonly subtype: number; + + /** The binary data as a byte array */ + readonly data: Uint8Array; + + constructor(subtype: number, data: Uint8Array) { + if (subtype < 0 || subtype > 255) { + throw new FirestoreError( + Code.INVALID_ARGUMENT, + 'The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range.' + ); + } + this.subtype = subtype; + // Make a copy of the data. + this.data = Uint8Array.from(data); + } + + /** + * Returns true if this `BsonBinaryData` is equal to the provided one. + * + * @param other - The `BsonBinaryData` to compare against. + * @return 'true' if this `BsonBinaryData` is equal to the provided one. + */ + isEqual(other: BsonBinaryData): boolean { + return ( + this.subtype === other.subtype && + ByteString.fromUint8Array(this.data).isEqual( + ByteString.fromUint8Array(other.data) + ) + ); + } +} diff --git a/packages/firestore/src/lite-api/bson_object_Id.ts b/packages/firestore/src/lite-api/bson_object_Id.ts new file mode 100644 index 00000000000..71ee13d8860 --- /dev/null +++ b/packages/firestore/src/lite-api/bson_object_Id.ts @@ -0,0 +1,35 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Represents a BSON ObjectId type in Firestore documents. + * + * @class BsonObjectId + */ +export class BsonObjectId { + constructor(readonly value: string) {} + + /** + * Returns true if this `BsonObjectId` is equal to the provided one. + * + * @param other - The `BsonObjectId` to compare against. + * @return 'true' if this `BsonObjectId` is equal to the provided one. + */ + isEqual(other: BsonObjectId): boolean { + return this.value === other.value; + } +} diff --git a/packages/firestore/src/lite-api/bson_timestamp_value.ts b/packages/firestore/src/lite-api/bson_timestamp_value.ts new file mode 100644 index 00000000000..60b48157906 --- /dev/null +++ b/packages/firestore/src/lite-api/bson_timestamp_value.ts @@ -0,0 +1,35 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Represents a BSON Timestamp type in Firestore documents. + * + * @class BsonTimestampValue + */ +export class BsonTimestampValue { + constructor(readonly seconds: number, readonly increment: number) {} + + /** + * Returns true if this `BsonTimestampValue` is equal to the provided one. + * + * @param other - The `BsonTimestampValue` to compare against. + * @return 'true' if this `BsonTimestampValue` is equal to the provided one. + */ + isEqual(other: BsonTimestampValue): boolean { + return this.seconds === other.seconds && this.increment === other.increment; + } +} diff --git a/packages/firestore/src/lite-api/field_value_impl.ts b/packages/firestore/src/lite-api/field_value_impl.ts index 2c910bdace5..2cc1e3522b0 100644 --- a/packages/firestore/src/lite-api/field_value_impl.ts +++ b/packages/firestore/src/lite-api/field_value_impl.ts @@ -15,7 +15,14 @@ * limitations under the License. */ +import { BsonBinaryData } from './bson_binary_data'; +import { BsonObjectId } from './bson_object_Id'; +import { BsonTimestampValue } from './bson_timestamp_value'; import { FieldValue } from './field_value'; +import { Int32Value } from './int32_value'; +import { MaxKey } from './max_key'; +import { MinKey } from './min_key'; +import { RegexValue } from './regex_value'; import { ArrayRemoveFieldValueImpl, ArrayUnionFieldValueImpl, @@ -109,3 +116,85 @@ export function increment(n: number): FieldValue { export function vector(values?: number[]): VectorValue { return new VectorValue(values); } + +/** + * Creates a new `Int32Value` constructed with the given number. + * + * @param value - The 32-bit number to be used for constructing the Int32Value + * + * @returns A new `Int32Value` constructed with the given number. + */ +export function int32(value: number): Int32Value { + return new Int32Value(value); +} + +/** + * Creates a new `RegexValue` constructed with the given pattern and options. + * + * @param subtype - The subtype of the BSON binary data. + * @param data - The data to use for the BSON binary data. + * + * @returns A new `RegexValue` constructed with the given pattern and options. + */ +export function regex(pattern: string, options: string): RegexValue { + return new RegexValue(pattern, options); +} + +/** + * Creates a new `BsonBinaryData` constructed with the given subtype and data. + * + * @param subtype - Create a `BsonBinaryData` instance with the given subtype. + * @param data - Create a `BsonBinaryData` instance with a copy of this array of numbers. + * + * @returns A new `BsonBinaryData` constructed with the given subtype and data. + */ +export function bsonBinaryData( + subtype: number, + data: Uint8Array +): BsonBinaryData { + return new BsonBinaryData(subtype, data); +} + +/** + * Creates a new `BsonObjectId` constructed with the given string. + * + * @param value - The 24-character hex string representing the ObjectId. + * + * @returns A new `BsonObjectId` constructed with the given string. + */ +export function bsonObjectId(value: string): BsonObjectId { + return new BsonObjectId(value); +} + +/** + * Creates a new `BsonTimestampValue` constructed with the given seconds and increment. + * + * @param seconds - The underlying unsigned 32-bit integer for seconds. + * @param seconds - The underlying unsigned 32-bit integer for increment. + * + * @returns A new `BsonTimestampValue` constructed with the given seconds and increment. + */ +export function bsonTimestamp( + seconds: number, + increment: number +): BsonTimestampValue { + return new BsonTimestampValue(seconds, increment); +} + +/** + * Creates or returns a `MinKey` instance. + * + * @returns A `MinKey` instance. + */ +export function minKey(): MinKey { + return MinKey.instance(); +} + +/** + * Creates or returns a `MaxKey` instance. + * + * @returns A `MaxKey` instance. + */ +export function maxKey(): MaxKey { + return MaxKey.instance(); +} diff --git a/packages/firestore/src/lite-api/int32_value.ts b/packages/firestore/src/lite-api/int32_value.ts new file mode 100644 index 00000000000..cfa0003c0f6 --- /dev/null +++ b/packages/firestore/src/lite-api/int32_value.ts @@ -0,0 +1,35 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Represents a 32-bit integer type in Firestore documents. + * + * @class Int32Value + */ +export class Int32Value { + constructor(readonly value: number) {} + + /** + * Returns true if this `Int32Value` is equal to the provided one. + * + * @param other - The `Int32Value` to compare against. + * @return 'true' if this `Int32Value` is equal to the provided one. + */ + isEqual(other: Int32Value): boolean { + return this.value === other.value; + } +} diff --git a/packages/firestore/src/lite-api/max_key.ts b/packages/firestore/src/lite-api/max_key.ts new file mode 100644 index 00000000000..3f37986315e --- /dev/null +++ b/packages/firestore/src/lite-api/max_key.ts @@ -0,0 +1,36 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Represent a "Max Key" type in Firestore documents. + * + * @class MaxKey + */ +export class MaxKey { + private static MAX_KEY_VALUE_INSTANCE: MaxKey | null = null; + /** A type string to uniquely identify instances of this class. */ + readonly type = 'MaxKey'; + + private constructor() {} + + static instance(): MaxKey { + if (!MaxKey.MAX_KEY_VALUE_INSTANCE) { + MaxKey.MAX_KEY_VALUE_INSTANCE = new MaxKey(); + } + return MaxKey.MAX_KEY_VALUE_INSTANCE; + } +} diff --git a/packages/firestore/src/lite-api/min_key.ts b/packages/firestore/src/lite-api/min_key.ts new file mode 100644 index 00000000000..a901b9611a5 --- /dev/null +++ b/packages/firestore/src/lite-api/min_key.ts @@ -0,0 +1,36 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Represent a "Min Key" type in Firestore documents. + * + * @class MinKey + */ +export class MinKey { + private static MIN_KEY_VALUE_INSTANCE: MinKey | null = null; + /** A type string to uniquely identify instances of this class. */ + readonly type = 'MinKey'; + + private constructor() {} + + static instance(): MinKey { + if (!MinKey.MIN_KEY_VALUE_INSTANCE) { + MinKey.MIN_KEY_VALUE_INSTANCE = new MinKey(); + } + return MinKey.MIN_KEY_VALUE_INSTANCE; + } +} diff --git a/packages/firestore/src/lite-api/regex_value.ts b/packages/firestore/src/lite-api/regex_value.ts new file mode 100644 index 00000000000..b4d4f70962b --- /dev/null +++ b/packages/firestore/src/lite-api/regex_value.ts @@ -0,0 +1,35 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Represents a regular expression type in Firestore documents. + * + * @class RegexValue + */ +export class RegexValue { + constructor(readonly pattern: string, readonly options: string) {} + + /** + * Returns true if this `RegexValue` is equal to the provided one. + * + * @param other - The `RegexValue` to compare against. + * @return 'true' if this `RegexValue` is equal to the provided one. + */ + isEqual(other: RegexValue): boolean { + return this.pattern === other.pattern && this.options === other.options; + } +} diff --git a/packages/firestore/src/lite-api/user_data_reader.ts b/packages/firestore/src/lite-api/user_data_reader.ts index ebd4b49085f..3d0ce031599 100644 --- a/packages/firestore/src/lite-api/user_data_reader.ts +++ b/packages/firestore/src/lite-api/user_data_reader.ts @@ -44,14 +44,25 @@ import { import { TYPE_KEY, VECTOR_MAP_VECTORS_KEY, - VECTOR_VALUE_SENTINEL + RESERVED_VECTOR_KEY, + RESERVED_REGEX_KEY, + RESERVED_REGEX_PATTERN_KEY, + RESERVED_REGEX_OPTIONS_KEY, + RESERVED_BSON_OBJECT_ID_KEY, + RESERVED_INT32_KEY, + RESERVED_BSON_TIMESTAMP_KEY, + RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + RESERVED_BSON_BINARY_KEY, + RESERVED_MIN_KEY, + RESERVED_MAX_KEY } from '../model/values'; import { newSerializer } from '../platform/serializer'; import { MapValue as ProtoMapValue, Value as ProtoValue } from '../protos/firestore_proto_api'; -import { toDouble, toNumber } from '../remote/number_serializer'; +import { toDouble, toInteger, toNumber } from '../remote/number_serializer'; import { JsonProtoSerializer, toBytes, @@ -59,20 +70,28 @@ import { toTimestamp } from '../remote/serializer'; import { debugAssert, fail } from '../util/assert'; +import { ByteString } from '../util/byte_string'; import { Code, FirestoreError } from '../util/error'; import { isPlainObject, valueDescription } from '../util/input_validation'; import { Dict, forEach, isEmpty } from '../util/obj'; +import { BsonBinaryData } from './bson_binary_data'; +import { BsonObjectId } from './bson_object_Id'; +import { BsonTimestampValue } from './bson_timestamp_value'; import { Bytes } from './bytes'; import { Firestore } from './database'; import { FieldPath } from './field_path'; import { FieldValue } from './field_value'; import { GeoPoint } from './geo_point'; +import { Int32Value } from './int32_value'; +import { MaxKey } from './max_key'; +import { MinKey } from './min_key'; import { DocumentReference, PartialWithFieldValue, WithFieldValue } from './reference'; +import { RegexValue } from './regex_value'; import { Timestamp } from './timestamp'; import { VectorValue } from './vector_value'; @@ -909,6 +928,20 @@ function parseScalarValue( }; } else if (value instanceof VectorValue) { return parseVectorValue(value, context); + } else if (value instanceof RegexValue) { + return parseRegexValue(value); + } else if (value instanceof BsonObjectId) { + return parseBsonObjectId(value); + } else if (value instanceof Int32Value) { + return parseInt32Value(value); + } else if (value instanceof BsonTimestampValue) { + return parseBsonTimestamp(value); + } else if (value instanceof BsonBinaryData) { + return parseBsonBinaryData(context.serializer, value); + } else if (value instanceof MinKey) { + return parseMinKey(); + } else if (value instanceof MaxKey) { + return parseMaxKey(); } else { throw context.createError( `Unsupported field value: ${valueDescription(value)}` @@ -926,7 +959,7 @@ export function parseVectorValue( const mapValue: ProtoMapValue = { fields: { [TYPE_KEY]: { - stringValue: VECTOR_VALUE_SENTINEL + stringValue: RESERVED_VECTOR_KEY }, [VECTOR_MAP_VECTORS_KEY]: { arrayValue: { @@ -947,6 +980,107 @@ export function parseVectorValue( return { mapValue }; } +export function parseRegexValue(value: RegexValue): ProtoValue { + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_REGEX_KEY]: { + mapValue: { + fields: { + [RESERVED_REGEX_PATTERN_KEY]: { + stringValue: value.pattern + }, + [RESERVED_REGEX_OPTIONS_KEY]: { + stringValue: value.options + } + } + } + } + } + }; + + return { mapValue }; +} + +export function parseMinKey(): ProtoValue { + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_MIN_KEY]: { + nullValue: 'NULL_VALUE' + } + } + }; + return { mapValue }; +} + +export function parseMaxKey(): ProtoValue { + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_MAX_KEY]: { + nullValue: 'NULL_VALUE' + } + } + }; + return { mapValue }; +} + +export function parseBsonObjectId(value: BsonObjectId): ProtoValue { + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_BSON_OBJECT_ID_KEY]: { + stringValue: value.value + } + } + }; + return { mapValue }; +} + +export function parseInt32Value(value: Int32Value): ProtoValue { + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_INT32_KEY]: toInteger(value.value) + } + }; + return { mapValue }; +} + +export function parseBsonTimestamp(value: BsonTimestampValue): ProtoValue { + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_BSON_TIMESTAMP_KEY]: { + mapValue: { + fields: { + [RESERVED_BSON_TIMESTAMP_SECONDS_KEY]: toInteger(value.seconds), + [RESERVED_BSON_TIMESTAMP_INCREMENT_KEY]: toInteger(value.increment) + } + } + } + } + }; + return { mapValue }; +} + +export function parseBsonBinaryData( + serializer: JsonProtoSerializer, + value: BsonBinaryData +): ProtoValue { + const subtypeAndData = new Uint8Array(value.data.length + 1); + // This converts the subtype from `number` to a byte. + subtypeAndData[0] = value.subtype; + // Concatenate the rest of the data starting at index 1. + subtypeAndData.set(value.data, /* offset */ 1); + + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_BSON_BINARY_KEY]: { + bytesValue: toBytes( + serializer, + ByteString.fromUint8Array(subtypeAndData) + ) + } + } + }; + return { mapValue }; +} /** * Checks whether an object looks like a JSON object that should be converted * into a struct. Normal class/prototype instances are considered to look like @@ -965,7 +1099,14 @@ function looksLikeJsonObject(input: unknown): boolean { !(input instanceof Bytes) && !(input instanceof DocumentReference) && !(input instanceof FieldValue) && - !(input instanceof VectorValue) + !(input instanceof VectorValue) && + !(input instanceof MinKey) && + !(input instanceof MaxKey) && + !(input instanceof Int32Value) && + !(input instanceof RegexValue) && + !(input instanceof BsonObjectId) && + !(input instanceof BsonTimestampValue) && + !(input instanceof BsonBinaryData) ); } diff --git a/packages/firestore/src/lite-api/user_data_writer.ts b/packages/firestore/src/lite-api/user_data_writer.ts index e903991cb58..0de02b822b2 100644 --- a/packages/firestore/src/lite-api/user_data_writer.ts +++ b/packages/firestore/src/lite-api/user_data_writer.ts @@ -30,7 +30,19 @@ import { getPreviousValue } from '../model/server_timestamps'; import { TypeOrder } from '../model/type_order'; -import { VECTOR_MAP_VECTORS_KEY, typeOrder } from '../model/values'; +import { + RESERVED_BSON_BINARY_KEY, + RESERVED_INT32_KEY, + RESERVED_BSON_OBJECT_ID_KEY, + RESERVED_REGEX_KEY, + RESERVED_REGEX_OPTIONS_KEY, + RESERVED_REGEX_PATTERN_KEY, + RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + RESERVED_BSON_TIMESTAMP_KEY, + RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + typeOrder, + VECTOR_MAP_VECTORS_KEY +} from '../model/values'; import { ApiClientObjectMap, ArrayValue as ProtoArrayValue, @@ -46,7 +58,13 @@ import { ByteString } from '../util/byte_string'; import { logError } from '../util/log'; import { forEach } from '../util/obj'; +import { BsonBinaryData } from './bson_binary_data'; +import { BsonObjectId } from './bson_object_Id'; +import { BsonTimestampValue } from './bson_timestamp_value'; +import { maxKey, minKey } from './field_value_impl'; import { GeoPoint } from './geo_point'; +import { Int32Value } from './int32_value'; +import { RegexValue } from './regex_value'; import { Timestamp } from './timestamp'; import { VectorValue } from './vector_value'; @@ -65,10 +83,16 @@ export abstract class AbstractUserDataWriter { ): unknown { switch (typeOrder(value)) { case TypeOrder.NullValue: + if ('mapValue' in value) { + return minKey(); + } return null; case TypeOrder.BooleanValue: return value.booleanValue!; case TypeOrder.NumberValue: + if ('mapValue' in value) { + return this.convertToInt32Value(value.mapValue!); + } return normalizeNumber(value.integerValue || value.doubleValue); case TypeOrder.TimestampValue: return this.convertTimestamp(value.timestampValue!); @@ -88,6 +112,16 @@ export abstract class AbstractUserDataWriter { return this.convertObject(value.mapValue!, serverTimestampBehavior); case TypeOrder.VectorValue: return this.convertVectorValue(value.mapValue!); + case TypeOrder.RegexValue: + return this.convertToRegexValue(value.mapValue!); + case TypeOrder.BsonObjectIdValue: + return this.convertToBsonObjectIdValue(value.mapValue!); + case TypeOrder.BsonBinaryValue: + return this.convertToBsonBinaryValue(value.mapValue!); + case TypeOrder.BsonTimestampValue: + return this.convertToBsonTimestampValue(value.mapValue!); + case TypeOrder.MaxKeyValue: + return maxKey(); default: throw fail('Invalid value type: ' + JSON.stringify(value)); } @@ -127,6 +161,60 @@ export abstract class AbstractUserDataWriter { return new VectorValue(values); } + private convertToBsonObjectIdValue(mapValue: ProtoMapValue): BsonObjectId { + const oid = + mapValue!.fields?.[RESERVED_BSON_OBJECT_ID_KEY]?.stringValue ?? ''; + return new BsonObjectId(oid); + } + + private convertToBsonBinaryValue(mapValue: ProtoMapValue): BsonBinaryData { + const fields = mapValue!.fields?.[RESERVED_BSON_BINARY_KEY]; + const subtypeAndData = fields?.bytesValue; + if (!subtypeAndData) { + throw new Error('Received incorrect bytesValue for BsonBinaryData'); + } + + const bytes = normalizeByteString(subtypeAndData).toUint8Array(); + if (bytes.length === 0) { + throw new Error('Received empty bytesValue for BsonBinaryData'); + } + const subtype = bytes.at(0); + const data = bytes.slice(1); + return new BsonBinaryData(Number(subtype), data); + } + + private convertToBsonTimestampValue( + mapValue: ProtoMapValue + ): BsonTimestampValue { + const fields = mapValue!.fields?.[RESERVED_BSON_TIMESTAMP_KEY]; + const seconds = Number( + fields?.mapValue?.fields?.[RESERVED_BSON_TIMESTAMP_SECONDS_KEY] + ?.integerValue + ); + const increment = Number( + fields?.mapValue?.fields?.[RESERVED_BSON_TIMESTAMP_INCREMENT_KEY] + ?.integerValue + ); + return new BsonTimestampValue(seconds, increment); + } + + private convertToRegexValue(mapValue: ProtoMapValue): RegexValue { + const pattern = + mapValue!.fields?.[RESERVED_REGEX_KEY]?.mapValue?.fields?.[ + RESERVED_REGEX_PATTERN_KEY + ]?.stringValue ?? ''; + const options = + mapValue!.fields?.[RESERVED_REGEX_KEY]?.mapValue?.fields?.[ + RESERVED_REGEX_OPTIONS_KEY + ]?.stringValue ?? ''; + return new RegexValue(pattern, options); + } + + private convertToInt32Value(mapValue: ProtoMapValue): Int32Value { + const value = Number(mapValue!.fields?.[RESERVED_INT32_KEY]?.integerValue); + return new Int32Value(value); + } + private convertGeoPoint(value: ProtoLatLng): GeoPoint { return new GeoPoint( normalizeNumber(value.latitude), diff --git a/packages/firestore/src/model/object_value.ts b/packages/firestore/src/model/object_value.ts index d5cb273eb9d..35d8733e40a 100644 --- a/packages/firestore/src/model/object_value.ts +++ b/packages/firestore/src/model/object_value.ts @@ -25,7 +25,7 @@ import { forEach } from '../util/obj'; import { FieldMask } from './field_mask'; import { FieldPath } from './path'; import { isServerTimestamp } from './server_timestamps'; -import { deepClone, isMapValue, valueEquals } from './values'; +import { deepClone, isBsonType, isMapValue, valueEquals } from './values'; export interface JsonObject { [name: string]: T; @@ -188,7 +188,8 @@ export function extractFieldMask(value: ProtoMapValue): FieldMask { const fields: FieldPath[] = []; forEach(value!.fields, (key, value) => { const currentPath = new FieldPath([key]); - if (isMapValue(value)) { + // BSON types do not need to extract reserved keys, ie,__regex__. + if (isMapValue(value) && !isBsonType(value)) { const nestedMask = extractFieldMask(value.mapValue!); const nestedFields = nestedMask.fields; if (nestedFields.length === 0) { diff --git a/packages/firestore/src/model/type_order.ts b/packages/firestore/src/model/type_order.ts index 749b8e8036d..7ac6ed11e47 100644 --- a/packages/firestore/src/model/type_order.ts +++ b/packages/firestore/src/model/type_order.ts @@ -24,18 +24,27 @@ */ export const enum TypeOrder { // This order is based on the backend's ordering, but modified to support - // server timestamps and `MAX_VALUE`. + // server timestamps and `MAX_VALUE` inside the SDK. + // NULL and MIN_KEY sort the same. NullValue = 0, + MinKeyValue = 0, BooleanValue = 1, NumberValue = 2, TimestampValue = 3, - ServerTimestampValue = 4, - StringValue = 5, - BlobValue = 6, - RefValue = 7, - GeoPointValue = 8, - ArrayValue = 9, - VectorValue = 10, - ObjectValue = 11, + // TODO(Mila/BSON): which should come first considering indexes? + BsonTimestampValue = 4, + ServerTimestampValue = 5, + StringValue = 6, + BlobValue = 7, + BsonBinaryValue = 8, + RefValue = 9, + BsonObjectIdValue = 10, + GeoPointValue = 11, + RegexValue = 12, + ArrayValue = 13, + VectorValue = 14, + ObjectValue = 15, + // TODO(Mila/BSON):should MaxKeyValue and MaxValue combined? how would this affect indexes? + MaxKeyValue = 16, MaxValue = 9007199254740991 // Number.MAX_SAFE_INTEGER } diff --git a/packages/firestore/src/model/values.ts b/packages/firestore/src/model/values.ts index 1977767515e..d6344409ed9 100644 --- a/packages/firestore/src/model/values.ts +++ b/packages/firestore/src/model/values.ts @@ -35,28 +35,54 @@ import { normalizeNumber, normalizeTimestamp } from './normalize'; -import { - getLocalWriteTime, - getPreviousValue, - isServerTimestamp -} from './server_timestamps'; +import { getLocalWriteTime, getPreviousValue } from './server_timestamps'; import { TypeOrder } from './type_order'; export const TYPE_KEY = '__type__'; -const MAX_VALUE_TYPE = '__max__'; -export const MAX_VALUE: Value = { + +export const RESERVED_VECTOR_KEY = '__vector__'; +export const VECTOR_MAP_VECTORS_KEY = 'value'; + +const RESERVED_SERVER_TIMESTAMP_KEY = 'server_timestamp'; + +export const RESERVED_MIN_KEY = '__min__'; +export const RESERVED_MAX_KEY = '__max__'; + +export const RESERVED_REGEX_KEY = '__regex__'; +export const RESERVED_REGEX_PATTERN_KEY = 'pattern'; +export const RESERVED_REGEX_OPTIONS_KEY = 'options'; + +export const RESERVED_BSON_OBJECT_ID_KEY = '__oid__'; + +export const RESERVED_INT32_KEY = '__int__'; + +export const RESERVED_BSON_TIMESTAMP_KEY = '__request_timestamp__'; +export const RESERVED_BSON_TIMESTAMP_SECONDS_KEY = 'seconds'; +export const RESERVED_BSON_TIMESTAMP_INCREMENT_KEY = 'increment'; + +export const RESERVED_BSON_BINARY_KEY = '__binary__'; + +export const INTERNAL_MIN_VALUE: Value = { + nullValue: 'NULL_VALUE' +}; + +export const INTERNAL_MAX_VALUE: Value = { mapValue: { fields: { - '__type__': { stringValue: MAX_VALUE_TYPE } + '__type__': { stringValue: RESERVED_MAX_KEY } } } }; -export const VECTOR_VALUE_SENTINEL = '__vector__'; -export const VECTOR_MAP_VECTORS_KEY = 'value'; - -export const MIN_VALUE: Value = { - nullValue: 'NULL_VALUE' +export const MIN_VECTOR_VALUE = { + mapValue: { + fields: { + [TYPE_KEY]: { stringValue: RESERVED_VECTOR_KEY }, + [VECTOR_MAP_VECTORS_KEY]: { + arrayValue: {} + } + } + } }; /** Extracts the backend's type order for the provided value. */ @@ -80,14 +106,31 @@ export function typeOrder(value: Value): TypeOrder { } else if ('arrayValue' in value) { return TypeOrder.ArrayValue; } else if ('mapValue' in value) { - if (isServerTimestamp(value)) { - return TypeOrder.ServerTimestampValue; - } else if (isMaxValue(value)) { - return TypeOrder.MaxValue; - } else if (isVectorValue(value)) { - return TypeOrder.VectorValue; + const valueType = detectSpecialMapType(value); + switch (valueType) { + case 'serverTimestampValue': + return TypeOrder.ServerTimestampValue; + case 'maxValue': + return TypeOrder.MaxValue; + case 'vectorValue': + return TypeOrder.VectorValue; + case 'regexValue': + return TypeOrder.RegexValue; + case 'bsonObjectIdValue': + return TypeOrder.BsonObjectIdValue; + case 'int32Value': + return TypeOrder.NumberValue; + case 'bsonTimestampValue': + return TypeOrder.BsonTimestampValue; + case 'bsonBinaryValue': + return TypeOrder.BsonBinaryValue; + case 'minKeyValue': + return TypeOrder.MinKeyValue; + case 'maxKeyValue': + return TypeOrder.MaxKeyValue; + default: + return TypeOrder.ObjectValue; } - return TypeOrder.ObjectValue; } else { return fail('Invalid value type: ' + JSON.stringify(value)); } @@ -107,6 +150,11 @@ export function valueEquals(left: Value, right: Value): boolean { switch (leftType) { case TypeOrder.NullValue: + case TypeOrder.MaxValue: + // MaxKeys are all equal. + case TypeOrder.MaxKeyValue: + // MinKeys are all equal. + case TypeOrder.MinKeyValue: return true; case TypeOrder.BooleanValue: return left.booleanValue === right.booleanValue; @@ -133,8 +181,14 @@ export function valueEquals(left: Value, right: Value): boolean { case TypeOrder.VectorValue: case TypeOrder.ObjectValue: return objectEquals(left, right); - case TypeOrder.MaxValue: - return true; + case TypeOrder.BsonBinaryValue: + return compareBsonBinaryData(left, right) === 0; + case TypeOrder.BsonTimestampValue: + return compareBsonTimestamps(left, right) === 0; + case TypeOrder.RegexValue: + return compareRegex(left, right) === 0; + case TypeOrder.BsonObjectIdValue: + return compareBsonObjectIds(left, right) === 0; default: return fail('Unexpected value type: ' + JSON.stringify(left)); } @@ -174,10 +228,12 @@ function blobEquals(left: Value, right: Value): boolean { } export function numberEquals(left: Value, right: Value): boolean { - if ('integerValue' in left && 'integerValue' in right) { - return ( - normalizeNumber(left.integerValue) === normalizeNumber(right.integerValue) - ); + if ( + ('integerValue' in left && 'integerValue' in right) || + (detectSpecialMapType(left) === 'int32Value' && + detectSpecialMapType(right) === 'int32Value') + ) { + return extractNumber(left) === extractNumber(right); } else if ('doubleValue' in left && 'doubleValue' in right) { const n1 = normalizeNumber(left.doubleValue!); const n2 = normalizeNumber(right.doubleValue!); @@ -237,6 +293,8 @@ export function valueCompare(left: Value, right: Value): number { switch (leftType) { case TypeOrder.NullValue: + case TypeOrder.MinKeyValue: + case TypeOrder.MaxKeyValue: case TypeOrder.MaxValue: return 0; case TypeOrder.BooleanValue: @@ -264,14 +322,33 @@ export function valueCompare(left: Value, right: Value): number { return compareVectors(left.mapValue!, right.mapValue!); case TypeOrder.ObjectValue: return compareMaps(left.mapValue!, right.mapValue!); + case TypeOrder.BsonTimestampValue: + return compareBsonTimestamps(left, right); + case TypeOrder.BsonBinaryValue: + return compareBsonBinaryData(left, right); + case TypeOrder.RegexValue: + return compareRegex(left, right); + case TypeOrder.BsonObjectIdValue: + return compareBsonObjectIds(left, right); + default: throw fail('Invalid value type: ' + leftType); } } +export function extractNumber(value: Value): number { + let numberValue; + if (detectSpecialMapType(value) === 'int32Value') { + numberValue = value.mapValue!.fields![RESERVED_INT32_KEY].integerValue!; + } else { + numberValue = value.integerValue || value.doubleValue; + } + return normalizeNumber(numberValue); +} + function compareNumbers(left: Value, right: Value): number { - const leftNumber = normalizeNumber(left.integerValue || left.doubleValue); - const rightNumber = normalizeNumber(right.integerValue || right.doubleValue); + const leftNumber = extractNumber(left); + const rightNumber = extractNumber(right); if (leftNumber < rightNumber) { return -1; @@ -379,11 +456,14 @@ function compareVectors(left: MapValue, right: MapValue): number { } function compareMaps(left: MapValue, right: MapValue): number { - if (left === MAX_VALUE.mapValue && right === MAX_VALUE.mapValue) { + if ( + left === INTERNAL_MAX_VALUE.mapValue && + right === INTERNAL_MAX_VALUE.mapValue + ) { return 0; - } else if (left === MAX_VALUE.mapValue) { + } else if (left === INTERNAL_MAX_VALUE.mapValue) { return 1; - } else if (right === MAX_VALUE.mapValue) { + } else if (right === INTERNAL_MAX_VALUE.mapValue) { return -1; } @@ -413,6 +493,80 @@ function compareMaps(left: MapValue, right: MapValue): number { return primitiveComparator(leftKeys.length, rightKeys.length); } +function compareBsonTimestamps(left: Value, right: Value): number { + const leftSecondField = + left.mapValue!.fields?.[RESERVED_BSON_TIMESTAMP_KEY].mapValue?.fields?.[ + RESERVED_BSON_TIMESTAMP_SECONDS_KEY + ]; + const rightSecondField = + right.mapValue!.fields?.[RESERVED_BSON_TIMESTAMP_KEY].mapValue?.fields?.[ + RESERVED_BSON_TIMESTAMP_SECONDS_KEY + ]; + + const leftIncrementField = + left.mapValue!.fields?.[RESERVED_BSON_TIMESTAMP_KEY].mapValue?.fields?.[ + RESERVED_BSON_TIMESTAMP_INCREMENT_KEY + ]; + const rightIncrementField = + right.mapValue!.fields?.[RESERVED_BSON_TIMESTAMP_KEY].mapValue?.fields?.[ + RESERVED_BSON_TIMESTAMP_INCREMENT_KEY + ]; + + const secondsDiff = compareNumbers(leftSecondField!, rightSecondField!); + return secondsDiff !== 0 + ? secondsDiff + : compareNumbers(leftIncrementField!, rightIncrementField!); +} + +function compareBsonBinaryData(left: Value, right: Value): number { + const leftBytes = + left.mapValue!.fields?.[RESERVED_BSON_BINARY_KEY]?.bytesValue; + const rightBytes = + right.mapValue!.fields?.[RESERVED_BSON_BINARY_KEY]?.bytesValue; + if (!rightBytes || !leftBytes) { + throw new Error('Received incorrect bytesValue for BsonBinaryData'); + } + return compareBlobs(leftBytes, rightBytes); +} + +function compareRegex(left: Value, right: Value): number { + const leftFields = left.mapValue!.fields; + const leftPattern = + leftFields?.[RESERVED_REGEX_KEY]?.mapValue?.fields?.[ + RESERVED_REGEX_PATTERN_KEY + ]?.stringValue ?? ''; + const leftOptions = + leftFields?.[RESERVED_REGEX_KEY]?.mapValue?.fields?.[ + RESERVED_REGEX_OPTIONS_KEY + ]?.stringValue ?? ''; + + const rightFields = right.mapValue!.fields; + const rightPattern = + rightFields?.[RESERVED_REGEX_KEY]?.mapValue?.fields?.[ + RESERVED_REGEX_PATTERN_KEY + ]?.stringValue ?? ''; + const rightOptions = + rightFields?.[RESERVED_REGEX_KEY]?.mapValue?.fields?.[ + RESERVED_REGEX_OPTIONS_KEY + ]?.stringValue ?? ''; + + // First order by patterns, and then options. + const patternDiff = primitiveComparator(leftPattern, rightPattern); + return patternDiff !== 0 + ? patternDiff + : primitiveComparator(leftOptions, rightOptions); +} + +function compareBsonObjectIds(left: Value, right: Value): number { + const leftOid = + left.mapValue!.fields?.[RESERVED_BSON_OBJECT_ID_KEY]?.stringValue ?? ''; + const rightOid = + right.mapValue!.fields?.[RESERVED_BSON_OBJECT_ID_KEY]?.stringValue ?? ''; + + // TODO(Mila/BSON): use compareUtf8Strings once the bug fix is merged. + return primitiveComparator(leftOid, rightOid); +} + /** * Generates the canonical ID for the provided field value (as used in Target * serialization). @@ -443,6 +597,10 @@ function canonifyValue(value: Value): string { } else if ('arrayValue' in value) { return canonifyArray(value.arrayValue!); } else if ('mapValue' in value) { + // BsonBinaryValue contains an array of bytes, and needs to extract `subtype` and `data` from it before canonifying. + if (detectSpecialMapType(value) === 'bsonBinaryValue') { + return canonifyBsonBinaryData(value.mapValue!); + } return canonifyMap(value.mapValue!); } else { return fail('Invalid value type: ' + JSON.stringify(value)); @@ -466,6 +624,19 @@ function canonifyReference(referenceValue: string): string { return DocumentKey.fromName(referenceValue).toString(); } +function canonifyBsonBinaryData(mapValue: MapValue): string { + const fields = mapValue!.fields?.[RESERVED_BSON_BINARY_KEY]; + const subtypeAndData = fields?.bytesValue; + if (!subtypeAndData) { + throw new Error('Received incorrect bytesValue for BsonBinaryData'); + } + // Normalize the bytesValue to Uint8Array before extracting subtype and data. + const bytes = normalizeByteString(subtypeAndData).toUint8Array(); + return `{__binary__:{subType:${bytes.at(0)},data:${canonifyByteString( + bytes.slice(1) + )}}}`; +} + function canonifyMap(mapValue: MapValue): string { // Iteration order in JavaScript is not guaranteed. To ensure that we generate // matching canonical IDs for identical maps, we need to sort the keys. @@ -508,10 +679,16 @@ function canonifyArray(arrayValue: ArrayValue): string { export function estimateByteSize(value: Value): number { switch (typeOrder(value)) { case TypeOrder.NullValue: + // MinKeyValue and NullValue has same TypeOrder number, but MinKeyValue is encoded as MapValue + // and its size should be estimated differently. + if ('mapValue' in value) { + return estimateMapByteSize(value.mapValue!); + } return 4; case TypeOrder.BooleanValue: return 4; case TypeOrder.NumberValue: + // TODO(Mila/BSON): return 16 if the value is 128 decimal value return 8; case TypeOrder.TimestampValue: // Timestamps are made up of two distinct numbers (seconds + nanoseconds) @@ -535,6 +712,11 @@ export function estimateByteSize(value: Value): number { return estimateArrayByteSize(value.arrayValue!); case TypeOrder.VectorValue: case TypeOrder.ObjectValue: + case TypeOrder.RegexValue: + case TypeOrder.BsonObjectIdValue: + case TypeOrder.BsonBinaryValue: + case TypeOrder.BsonTimestampValue: + case TypeOrder.MaxKeyValue: return estimateMapByteSize(value.mapValue!); default: throw fail('Invalid value type: ' + JSON.stringify(value)); @@ -619,10 +801,67 @@ export function isMapValue( return !!value && 'mapValue' in value; } -/** Returns true if `value` is a VetorValue. */ +/** Returns true if `value` is a VectorValue. */ export function isVectorValue(value: ProtoValue | null): boolean { - const type = (value?.mapValue?.fields || {})[TYPE_KEY]?.stringValue; - return type === VECTOR_VALUE_SENTINEL; + return !!value && detectSpecialMapType(value) === 'vectorValue'; +} + +/** Returns true if the `Value` represents the canonical {@link #INTERNAL_MAX_VALUE} . */ +export function isMaxValue(value: Value): boolean { + return detectSpecialMapType(value) === 'maxValue'; +} + +function detectSpecialMapType(value: Value): string { + if (!value || !value.mapValue || !value.mapValue.fields) { + return ''; // Not a special map type + } + + const fields = value.mapValue.fields; + + // Check for type-based mappings + const type = fields[TYPE_KEY]?.stringValue; + if (type) { + const typeMap: Record = { + [RESERVED_VECTOR_KEY]: 'vectorValue', + [RESERVED_MAX_KEY]: 'maxValue', + [RESERVED_SERVER_TIMESTAMP_KEY]: 'serverTimestampValue' + }; + if (typeMap[type]) { + return typeMap[type]; + } + } + + // Check for BSON-related mappings + const bsonMap: Record = { + [RESERVED_REGEX_KEY]: 'regexValue', + [RESERVED_BSON_OBJECT_ID_KEY]: 'bsonObjectIdValue', + [RESERVED_INT32_KEY]: 'int32Value', + [RESERVED_BSON_TIMESTAMP_KEY]: 'bsonTimestampValue', + [RESERVED_BSON_BINARY_KEY]: 'bsonBinaryValue', + [RESERVED_MIN_KEY]: 'minKeyValue', + [RESERVED_MAX_KEY]: 'maxKeyValue' + }; + + for (const key in bsonMap) { + if (fields[key]) { + return bsonMap[key]; + } + } + + return ''; +} + +export function isBsonType(value: Value): boolean { + const bsonTypes = new Set([ + 'regexValue', + 'bsonObjectIdValue', + 'int32Value', + 'bsonTimestampValue', + 'bsonBinaryValue', + 'minKeyValue', + 'maxKeyValue' + ]); + return bsonTypes.has(detectSpecialMapType(value)); } /** Creates a deep copy of `source`. */ @@ -652,29 +891,10 @@ export function deepClone(source: Value): Value { } } -/** Returns true if the Value represents the canonical {@link #MAX_VALUE} . */ -export function isMaxValue(value: Value): boolean { - return ( - (((value.mapValue || {}).fields || {})['__type__'] || {}).stringValue === - MAX_VALUE_TYPE - ); -} - -export const MIN_VECTOR_VALUE = { - mapValue: { - fields: { - [TYPE_KEY]: { stringValue: VECTOR_VALUE_SENTINEL }, - [VECTOR_MAP_VECTORS_KEY]: { - arrayValue: {} - } - } - } -}; - /** Returns the lowest value for the given value type (inclusive). */ export function valuesGetLowerBound(value: Value): Value { if ('nullValue' in value) { - return MIN_VALUE; + return INTERNAL_MIN_VALUE; } else if ('booleanValue' in value) { return { booleanValue: false }; } else if ('integerValue' in value || 'doubleValue' in value) { @@ -692,6 +912,7 @@ export function valuesGetLowerBound(value: Value): Value { } else if ('arrayValue' in value) { return { arrayValue: {} }; } else if ('mapValue' in value) { + // TODO(Mila/BSON): add lower bound for bson types for indexing if (isVectorValue(value)) { return MIN_VECTOR_VALUE; } @@ -722,10 +943,11 @@ export function valuesGetUpperBound(value: Value): Value { } else if ('arrayValue' in value) { return MIN_VECTOR_VALUE; } else if ('mapValue' in value) { + // TODO(Mila/BSON): add upper bound for bson types for indexing if (isVectorValue(value)) { return { mapValue: {} }; } - return MAX_VALUE; + return INTERNAL_MAX_VALUE; } else { return fail('Invalid value type: ' + JSON.stringify(value)); } diff --git a/packages/firestore/test/integration/api/database.test.ts b/packages/firestore/test/integration/api/database.test.ts index 1cda49d9229..2856862f6de 100644 --- a/packages/firestore/test/integration/api/database.test.ts +++ b/packages/firestore/test/integration/api/database.test.ts @@ -20,6 +20,7 @@ import { Deferred } from '@firebase/util'; import { expect, use } from 'chai'; import chaiAsPromised from 'chai-as-promised'; +import { AutoId } from '../../../src/util/misc'; import { EventsAccumulator } from '../util/events_accumulator'; import { addDoc, @@ -67,7 +68,15 @@ import { FirestoreError, QuerySnapshot, vector, - getDocsFromServer + getDocsFromServer, + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + int32, + maxKey, + minKey, + regex, + or } from '../util/firebase_export'; import { apiDescribe, @@ -2424,4 +2433,433 @@ apiDescribe('Database', persistence => { }); }); }); + + describe('BSON types', () => { + // TODO(Mila/BSON): simplify the test setup once prod support BSON + const NIGHTLY_PROJECT_ID = 'firestore-sdk-nightly'; + const settings = { + ...DEFAULT_SETTINGS, + host: 'test-firestore.sandbox.googleapis.com' + }; + + it('can write and read BSON types', async () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + + const docRef = await addDoc(coll, { + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + objectId: bsonObjectId('507f191e810c19729de860ea'), + int32: int32(1), + min: minKey(), + max: maxKey(), + regex: regex('^foo', 'i') + }); + + await setDoc( + docRef, + { + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + int32: int32(2) + }, + { merge: true } + ); + + const snapshot = await getDoc(docRef); + expect( + snapshot + .get('objectId') + .isEqual(bsonObjectId('507f191e810c19729de860ea')) + ).to.be.true; + expect(snapshot.get('int32').isEqual(int32(2))).to.be.true; + expect(snapshot.get('min') === minKey()).to.be.true; + expect(snapshot.get('max') === maxKey()).to.be.true; + expect( + snapshot + .get('binary') + .isEqual(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ).to.be.true; + expect(snapshot.get('timestamp').isEqual(bsonTimestamp(1, 2))).to.be + .true; + expect(snapshot.get('regex').isEqual(regex('^foo', 'i'))).to.be.true; + } + ); + }); + + it('can filter and order objectIds', async () => { + const testDocs = { + a: { key: bsonObjectId('507f191e810c19729de860ea') }, + b: { key: bsonObjectId('507f191e810c19729de860eb') }, + c: { key: bsonObjectId('507f191e810c19729de860ec') } + }; + + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + + let orderedQuery = query( + coll, + where('key', '>', bsonObjectId('507f191e810c19729de860ea')), + orderBy('key', 'desc') + ); + + let snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['b'] + ]); + + orderedQuery = query( + coll, + where('key', 'in', [ + bsonObjectId('507f191e810c19729de860ea'), + bsonObjectId('507f191e810c19729de860eb') + ]), + orderBy('key', 'desc') + ); + + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['b'], + testDocs['a'] + ]); + } + ); + }); + + it('can filter and order Int32 values', async () => { + const testDocs = { + a: { key: int32(-1) }, + b: { key: int32(1) }, + c: { key: int32(2) } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + + let orderedQuery = query( + coll, + where('key', '>=', int32(1)), + orderBy('key', 'desc') + ); + + let snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['b'] + ]); + + orderedQuery = query( + coll, + where('key', 'not-in', [int32(1)]), + orderBy('key', 'desc') + ); + + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['a'] + ]); + } + ); + }); + + it('can filter and order Timestamp values', async () => { + const testDocs = { + a: { key: bsonTimestamp(1, 1) }, + b: { key: bsonTimestamp(1, 2) }, + c: { key: bsonTimestamp(2, 1) } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + + let orderedQuery = query( + coll, + where('key', '>', bsonTimestamp(1, 1)), + orderBy('key', 'desc') + ); + + let snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['b'] + ]); + + orderedQuery = query( + coll, + where('key', '!=', bsonTimestamp(1, 1)), + orderBy('key', 'desc') + ); + + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['b'] + ]); + } + ); + }); + + it('can filter and order Binary values', async () => { + const testDocs = { + a: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + b: { key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) }, + c: { key: bsonBinaryData(2, new Uint8Array([1, 2, 3])) } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + + let orderedQuery = query( + coll, + where('key', '>', bsonBinaryData(1, new Uint8Array([1, 2, 3]))), + orderBy('key', 'desc') + ); + + let snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['b'] + ]); + + orderedQuery = query( + coll, + where('key', '>=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))), + where('key', '<', bsonBinaryData(2, new Uint8Array([1, 2, 3]))), + orderBy('key', 'desc') + ); + + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['b'], + testDocs['a'] + ]); + } + ); + }); + + it('can filter and order Regex values', async () => { + const testDocs = { + a: { key: regex('^bar', 'i') }, + b: { key: regex('^bar', 'x') }, + c: { key: regex('^baz', 'i') } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + + const orderedQuery = query( + coll, + or( + where('key', '>', regex('^bar', 'x')), + where('key', '!=', regex('^bar', 'x')) + ), + orderBy('key', 'desc') + ); + + const snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['a'] + ]); + } + ); + }); + + it('can filter and order minKey values', async () => { + const testDocs = { + a: { key: minKey() }, + b: { key: minKey() }, + c: { key: maxKey() } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + + const orderedQuery = query( + coll, + where('key', '==', minKey()), + orderBy('key', 'desc') // minKeys are equal, would sort by documentId as secondary order + ); + const snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['b'], + testDocs['a'] + ]); + } + ); + }); + + it('can filter and order maxKey values', async () => { + const testDocs = { + a: { key: minKey() }, + b: { key: maxKey() }, + c: { key: maxKey() } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + + const orderedQuery = query( + coll, + where('key', '==', maxKey()), + orderBy('key', 'desc') // maxKeys are equal, would sort by documentId as secondary order + ); + const snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['b'] + ]); + } + ); + }); + + it('can listen to documents with bson types', async () => { + const testDocs = { + a: { key: maxKey() }, + b: { key: minKey() }, + c: { key: bsonTimestamp(1, 2) }, + d: { key: bsonObjectId('507f191e810c19729de860ea') }, + e: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + f: { key: regex('^foo', 'i') } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + await addDoc(coll, testDocs['a']); + await addDoc(coll, testDocs['b']); + await addDoc(coll, testDocs['c']); + await addDoc(coll, testDocs['d']); + await addDoc(coll, testDocs['e']); + await addDoc(coll, testDocs['f']); + + const orderedQuery = query(coll, orderBy('key', 'asc')); + + const storeEvent = new EventsAccumulator(); + const unsubscribe = onSnapshot(orderedQuery, storeEvent.storeEvent); + + let listenSnapshot = await storeEvent.awaitEvent(); + expect(toDataArray(listenSnapshot)).to.deep.equal([ + testDocs['b'], + testDocs['c'], + testDocs['e'], + testDocs['d'], + testDocs['f'], + testDocs['a'] + ]); + + const newData = { key: int32(2) }; + await setDoc(doc(coll, 'g'), newData); + listenSnapshot = await storeEvent.awaitEvent(); + expect(toDataArray(listenSnapshot)).to.deep.equal([ + testDocs['b'], + newData, + testDocs['c'], + testDocs['e'], + testDocs['d'], + testDocs['f'], + testDocs['a'] + ]); + + unsubscribe(); + } + ); + }); + + // TODO(Mila/BSON): Skip the runTransaction tests against nightly when running on browsers. remove when it is supported by prod + // eslint-disable-next-line no-restricted-properties + it.skip('can run transactions on documents with bson types', async () => { + const testDocs = { + a: { key: bsonTimestamp(1, 2) }, + b: { key: regex('^foo', 'i') }, + c: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) } + }; + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + const docA = await addDoc(coll, testDocs['a']); + const docB = await addDoc(coll, { key: 'place holder' }); + const docC = await addDoc(coll, testDocs['c']); + + await runTransaction(dbs[0], async transaction => { + const docSnapshot = await transaction.get(docA); + expect(docSnapshot.data()).to.deep.equal(testDocs['a']); + transaction.set(docB, testDocs['b']); + transaction.delete(docC); + }); + + const orderedQuery = query(coll, orderBy('key', 'asc')); + const snapshot = await getDocs(orderedQuery); + + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['a'], + testDocs['b'] + ]); + } + ); + }); + }); }); diff --git a/packages/firestore/test/integration/api/type.test.ts b/packages/firestore/test/integration/api/type.test.ts index 0fd9c19ccad..a6218f6a1ad 100644 --- a/packages/firestore/test/integration/api/type.test.ts +++ b/packages/firestore/test/integration/api/type.test.ts @@ -17,25 +17,46 @@ import { expect } from 'chai'; +import { AutoId } from '../../../src/util/misc'; import { addEqualityMatcher } from '../../util/equality_matcher'; import { EventsAccumulator } from '../util/events_accumulator'; import { + bsonBinaryData, + bsonObjectId, + bsonTimestamp, Bytes, collection, doc, + DocumentData, + DocumentReference, DocumentSnapshot, Firestore, + FirestoreError, GeoPoint, getDoc, getDocs, + int32, + maxKey, + minKey, onSnapshot, + orderBy, + query, QuerySnapshot, + refEqual, + regex, runTransaction, setDoc, Timestamp, - updateDoc + updateDoc, + vector } from '../util/firebase_export'; -import { apiDescribe, withTestDb, withTestDoc } from '../util/helpers'; +import { + apiDescribe, + withTestDb, + withTestDbsSettings, + withTestDoc +} from '../util/helpers'; +import { DEFAULT_SETTINGS } from '../util/settings'; apiDescribe('Firestore', persistence => { addEqualityMatcher(); @@ -82,6 +103,43 @@ apiDescribe('Firestore', persistence => { return docSnapshot; } + // TODO(Mila/BSON): Transactions against nightly is having issue, remove this after prod supports BSON + async function expectRoundtripWithoutTransaction( + db: Firestore, + data: {}, + validateSnapshots = true, + expectedData?: {} + ): Promise { + expectedData = expectedData ?? data; + + const collRef = collection(db, doc(collection(db, 'a')).id); + const docRef = doc(collRef); + + await setDoc(docRef, data); + let docSnapshot = await getDoc(docRef); + expect(docSnapshot.data()).to.deep.equal(expectedData); + + await updateDoc(docRef, data); + docSnapshot = await getDoc(docRef); + expect(docSnapshot.data()).to.deep.equal(expectedData); + + if (validateSnapshots) { + let querySnapshot = await getDocs(collRef); + docSnapshot = querySnapshot.docs[0]; + expect(docSnapshot.data()).to.deep.equal(expectedData); + + const eventsAccumulator = new EventsAccumulator(); + const unlisten = onSnapshot(collRef, eventsAccumulator.storeEvent); + querySnapshot = await eventsAccumulator.awaitEvent(); + docSnapshot = querySnapshot.docs[0]; + expect(docSnapshot.data()).to.deep.equal(expectedData); + + unlisten(); + } + + return docSnapshot; + } + it('can read and write null fields', () => { return withTestDb(persistence, async db => { await expectRoundtrip(db, { a: 1, b: null }); @@ -177,4 +235,337 @@ apiDescribe('Firestore', persistence => { await expectRoundtrip(db, { a: 42, refs: [doc] }); }); }); + + it('can read and write vector fields', () => { + return withTestDoc(persistence, async (doc, db) => { + await expectRoundtrip(db, { vector: vector([1, 2, 3]) }); + }); + }); + + // TODO(Mila/BSON): simplify the test setup once prod support BSON + const NIGHTLY_PROJECT_ID = 'firestore-sdk-nightly'; + const settings = { + ...DEFAULT_SETTINGS, + host: 'test-firestore.sandbox.googleapis.com' + }; + + it('can read and write minKey fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { min: minKey() }); + } + ); + }); + + it('can read and write maxKey fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { max: maxKey() }); + } + ); + }); + + it('can read and write regex fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { + regex: regex('^foo', 'i') + }); + } + ); + }); + + it('can read and write int32 fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { int32: int32(1) }); + } + ); + }); + + it('can read and write bsonTimestamp fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { + bsonTimestamp: bsonTimestamp(1, 2) + }); + } + ); + }); + + it('can read and write bsonObjectId fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { + objectId: bsonObjectId('507f191e810c19729de860ea') + }); + } + ); + }); + + it('can read and write bsonBinaryData fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + }); + } + ); + }); + + it('can read and write bson fields in an array', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { + array: [ + bsonBinaryData(1, new Uint8Array([1, 2, 3])), + bsonObjectId('507f191e810c19729de860ea'), + int32(1), + minKey(), + maxKey(), + regex('^foo', 'i') + ] + }); + } + ); + }); + + it('can read and write bson fields in an object', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { + object: { + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + objectId: bsonObjectId('507f191e810c19729de860ea'), + int32: int32(1), + min: minKey(), + max: maxKey(), + regex: regex('^foo', 'i') + } + }); + } + ); + }); + + it('invalid 32-bit integer gets rejected', async () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const docRef = doc(dbs[0], 'test-collection/test-doc'); + let errorMessage; + try { + await setDoc(docRef, { key: int32(2147483648) }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains( + "The field '__int__' value (2,147,483,648) is too large to be converted to a 32-bit integer." + ); + + try { + await setDoc(docRef, { key: int32(-2147483650) }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains( + "The field '__int__' value (-2,147,483,650) is too large to be converted to a 32-bit integer." + ); + } + ); + }); + + it('invalid BSON timestamp gets rejected', async () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const docRef = doc(dbs[0], 'test-collection/test-doc'); + let errorMessage; + try { + // BSON timestamp larger than 32-bit integer gets rejected + await setDoc(docRef, { key: bsonTimestamp(4294967296, 2) }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains( + "The field 'seconds' value (4,294,967,296) does not represent an unsigned 32-bit integer." + ); + + try { + // negative BSON timestamp gets rejected + await setDoc(docRef, { key: bsonTimestamp(-1, 2) }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains( + "The field 'seconds' value (-1) does not represent an unsigned 32-bit integer." + ); + } + ); + }); + + it('invalid regex value gets rejected', async () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const docRef = doc(dbs[0], 'test-collection/test-doc'); + let errorMessage; + try { + await setDoc(docRef, { key: regex('foo', 'a') }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains( + "Invalid regex option 'a'. Supported options are 'i', 'm', 's', 'u', and 'x'." + ); + } + ); + }); + + it('invalid bsonObjectId value gets rejected', async () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const docRef = doc(dbs[0], 'test-collection/test-doc'); + + let errorMessage; + try { + // bsonObjectId with length not equal to 24 gets rejected + await setDoc(docRef, { key: bsonObjectId('foo') }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains( + 'Object ID hex string has incorrect length.' + ); + } + ); + }); + + it('invalid bsonBinaryData value gets rejected', async () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const docRef = doc(dbs[0], 'test-collection/test-doc'); + let errorMessage; + try { + await setDoc(docRef, { + key: bsonBinaryData(1234, new Uint8Array([1, 2, 3])) + }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains( + 'The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range.' + ); + } + ); + }); + + it('can order values of different TypeOrder together', async () => { + const testDocs: { [key: string]: DocumentData } = { + nullValue: { key: null }, + minValue: { key: minKey() }, + booleanValue: { key: true }, + nanValue: { key: NaN }, + int32Value: { key: int32(1) }, + doubleValue: { key: 2.0 }, + integerValue: { key: 3 }, + timestampValue: { key: new Timestamp(100, 123456000) }, + bsonTimestampValue: { key: bsonTimestamp(1, 2) }, + stringValue: { key: 'string' }, + bytesValue: { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) }, + bsonBinaryValue: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + // referenceValue: {key: ref('coll/doc')}, + referenceValue: { key: 'placeholder' }, + objectIdValue: { key: bsonObjectId('507f191e810c19729de860ea') }, + geoPointValue: { key: new GeoPoint(0, 0) }, + regexValue: { key: regex('^foo', 'i') }, + arrayValue: { key: [1, 2] }, + vectorValue: { key: vector([1, 2]) }, + objectValue: { key: { a: 1 } }, + maxValue: { key: maxKey() } + }; + + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + const coll = collection(dbs[0], AutoId.newId()); + for (const key of Object.keys(testDocs)) { + await setDoc(doc(coll, key), testDocs[key]); + } + + // TODO(Mila/BSON): replace after prod supports bson + const docRef = doc(coll, 'doc'); + await setDoc(doc(coll, 'referenceValue'), { key: docRef }); + + const orderedQuery = query(coll, orderBy('key')); + const snapshot = await getDocs(orderedQuery); + for (let i = 0; i < snapshot.docs.length; i++) { + const actualDoc = snapshot.docs[i].data().key; + const expectedDoc = + testDocs[snapshot.docs[i].id as keyof typeof testDocs].key; + if (actualDoc instanceof DocumentReference) { + // deep.equal doesn't work with DocumentReference + expect(refEqual(actualDoc, docRef)).to.be.true; + } else { + expect(actualDoc).to.deep.equal(expectedDoc); + } + } + } + ); + }); }); diff --git a/packages/firestore/test/lite/integration.test.ts b/packages/firestore/test/lite/integration.test.ts index 780db5f4f9c..9b647587503 100644 --- a/packages/firestore/test/lite/integration.test.ts +++ b/packages/firestore/test/lite/integration.test.ts @@ -40,8 +40,15 @@ import { FieldValue } from '../../src/lite-api/field_value'; import { arrayRemove, arrayUnion, + bsonBinaryData, + bsonObjectId, + bsonTimestamp, deleteField, increment, + int32, + maxKey, + minKey, + regex, serverTimestamp, vector } from '../../src/lite-api/field_value_impl'; @@ -2960,3 +2967,44 @@ describe('Vectors', () => { }); }); }); + +// eslint-disable-next-line no-restricted-properties +describe.skip('BSON types', () => { + // TODO(Mila/BSON): enable this test once prod supports bson + it('can be read and written using the lite SDK', async () => { + return withTestCollection(async coll => { + const ref = await addDoc(coll, { + objectId: bsonObjectId('507f191e810c19729de860ea'), + int32: int32(1), + min: minKey(), + max: maxKey(), + regex: regex('^foo', 'i') + }); + + await setDoc( + ref, + { + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + int32: int32(2) + }, + { merge: true } + ); + + const snap1 = await getDoc(ref); + expect( + snap1.get('objectId').isEqual(bsonObjectId('507f191e810c19729de860ea')) + ).to.be.true; + expect(snap1.get('int32').isEqual(int32(2))).to.be.true; + expect(snap1.get('min') === minKey()).to.be.true; + expect(snap1.get('max') === maxKey()).to.be.true; + expect( + snap1 + .get('binary') + .isEqual(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ).to.be.true; + expect(snap1.get('timestamp').isEqual(bsonTimestamp(1, 2))).to.be.true; + expect(snap1.get('regex').isEqual(regex('^foo', 'i'))).to.be.true; + }); + }); +}); diff --git a/packages/firestore/test/unit/model/document.test.ts b/packages/firestore/test/unit/model/document.test.ts index cfb93d15e6f..2c2387cca63 100644 --- a/packages/firestore/test/unit/model/document.test.ts +++ b/packages/firestore/test/unit/model/document.test.ts @@ -17,6 +17,15 @@ import { expect } from 'chai'; +import { + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + int32, + maxKey, + minKey, + regex +} from '../../../src/lite-api/field_value_impl'; import { doc, expectEqual, @@ -44,6 +53,34 @@ describe('Document', () => { expect(document.hasLocalMutations).to.equal(false); }); + it('can be constructed with bson types', () => { + const data = { + objectId: bsonObjectId('foo'), + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + min: minKey(), + max: maxKey(), + regex: regex('a', 'b'), + int32: int32(1) + }; + const document = doc('rooms/Eros', 1, data); + + const value = document.data; + expect(value.value).to.deep.equal( + wrap({ + objectId: bsonObjectId('foo'), + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + min: minKey(), + max: maxKey(), + regex: regex('a', 'b'), + int32: int32(1) + }) + ); + expect(value).not.to.equal(data); + expect(document.hasLocalMutations).to.equal(false); + }); + it('returns fields correctly', () => { const data = { desc: 'Discuss all the project related stuff', diff --git a/packages/firestore/test/unit/model/object_value.test.ts b/packages/firestore/test/unit/model/object_value.test.ts index 9e96056d957..13cfa02131b 100644 --- a/packages/firestore/test/unit/model/object_value.test.ts +++ b/packages/firestore/test/unit/model/object_value.test.ts @@ -17,7 +17,16 @@ import { expect } from 'chai'; -import { vector } from '../../../src/lite-api/field_value_impl'; +import { + vector, + bsonObjectId, + bsonBinaryData, + bsonTimestamp, + int32, + regex, + minKey, + maxKey +} from '../../../src/lite-api/field_value_impl'; import { extractFieldMask, ObjectValue } from '../../../src/model/object_value'; import { TypeOrder } from '../../../src/model/type_order'; import { typeOrder } from '../../../src/model/values'; @@ -27,7 +36,16 @@ describe('ObjectValue', () => { it('can extract fields', () => { const objValue = wrapObject({ foo: { a: 1, b: true, c: 'string' }, - embedding: vector([1]) + embedding: vector([1]), + bson: { + objectId: bsonObjectId('foo'), + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + min: minKey(), + max: maxKey(), + regex: regex('a', 'b'), + int32: int32(1) + } }); expect(typeOrder(objValue.field(field('foo'))!)).to.equal( @@ -45,6 +63,27 @@ describe('ObjectValue', () => { expect(typeOrder(objValue.field(field('embedding'))!)).to.equal( TypeOrder.VectorValue ); + expect(typeOrder(objValue.field(field('bson.objectId'))!)).to.equal( + TypeOrder.BsonObjectIdValue + ); + expect(typeOrder(objValue.field(field('bson.binary'))!)).to.equal( + TypeOrder.BsonBinaryValue + ); + expect(typeOrder(objValue.field(field('bson.timestamp'))!)).to.equal( + TypeOrder.BsonTimestampValue + ); + expect(typeOrder(objValue.field(field('bson.min'))!)).to.equal( + TypeOrder.MinKeyValue + ); + expect(typeOrder(objValue.field(field('bson.max'))!)).to.equal( + TypeOrder.MaxKeyValue + ); + expect(typeOrder(objValue.field(field('bson.regex'))!)).to.equal( + TypeOrder.RegexValue + ); + expect(typeOrder(objValue.field(field('bson.int32'))!)).to.equal( + TypeOrder.NumberValue + ); expect(objValue.field(field('foo.a.b'))).to.be.null; expect(objValue.field(field('bar'))).to.be.null; @@ -60,13 +99,42 @@ describe('ObjectValue', () => { expect(objValue.field(field('foo.a'))).to.deep.equal(wrap(1)); expect(objValue.field(field('foo.b'))).to.deep.equal(wrap(true)); expect(objValue.field(field('foo.c'))).to.deep.equal(wrap('string')); + + expect(objValue.field(field('bson'))!).to.deep.equal( + wrap({ + objectId: bsonObjectId('foo'), + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + min: minKey(), + max: maxKey(), + regex: regex('a', 'b'), + int32: int32(1) + }) + ); + expect(objValue.field(field('bson.objectId'))!).to.deep.equal( + wrap(bsonObjectId('foo')) + ); + expect(objValue.field(field('bson.binary'))!).to.deep.equal( + wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + expect(objValue.field(field('bson.timestamp'))!).to.deep.equal( + wrap(bsonTimestamp(1, 2)) + ); + expect(objValue.field(field('bson.min'))!).to.deep.equal(wrap(minKey())); + expect(objValue.field(field('bson.max'))!).to.deep.equal(wrap(maxKey())); + expect(objValue.field(field('bson.regex'))!).to.deep.equal( + wrap(regex('a', 'b')) + ); + expect(objValue.field(field('bson.int32'))!).to.deep.equal(wrap(int32(1))); }); it('can overwrite existing fields', () => { const objValue = wrapObject({ foo: 'foo-value' }); objValue.set(field('foo'), wrap('new-foo-value')); - assertObjectEquals(objValue, { foo: 'new-foo-value' }); + assertObjectEquals(objValue, { + foo: 'new-foo-value' + }); }); it('can add new fields', () => { @@ -163,11 +231,77 @@ describe('ObjectValue', () => { assertObjectEquals(objValue, {}); }); + it('can handle bson types in ObjectValue', () => { + const objValue = ObjectValue.empty(); + // Add new fields + objValue.set(field('objectId'), wrap(bsonObjectId('foo-value'))); + objValue.set( + field('binary'), + wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + objValue.set(field('timestamp'), wrap(bsonTimestamp(1, 2))); + objValue.set(field('regex'), wrap(regex('a', 'b'))); + objValue.set(field('int32'), wrap(int32(1))); + objValue.set(field('min'), wrap(minKey())); + objValue.set(field('max'), wrap(maxKey())); + + assertObjectEquals(objValue, { + objectId: bsonObjectId('foo-value'), + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + regex: regex('a', 'b'), + int32: int32(1), + min: minKey(), + max: maxKey() + }); + + // Overwrite existing fields + objValue.set(field('objectId'), wrap(bsonObjectId('new-foo-value'))); + + // Create nested objects + objValue.set( + field('foo.binary'), + wrap(bsonBinaryData(2, new Uint8Array([1, 2, 3]))) + ); + objValue.set(field('foo.timestamp'), wrap(bsonTimestamp(1, 2))); + + // Delete fields + objValue.delete(field('binary')); + + // overwrite nested objects + objValue.set(field('foo.timestamp'), wrap(bsonTimestamp(2, 1))); + + // Overwrite primitive values to create objects + objValue.set(field('min'), wrap(null)); + + assertObjectEquals(objValue, { + objectId: bsonObjectId('new-foo-value'), + timestamp: bsonTimestamp(1, 2), + regex: regex('a', 'b'), + int32: int32(1), + min: null, + max: maxKey(), + foo: { + binary: bsonBinaryData(2, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(2, 1) + } + }); + }); + it('provides field mask', () => { const objValue = wrapObject({ a: 'b', map: { a: 1, b: true, c: 'string', nested: { d: 'e' } }, - emptymap: {} + emptymap: {}, + bar: { + objectId: bsonObjectId('foo'), + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: bsonTimestamp(1, 2), + min: minKey(), + max: maxKey(), + regex: regex('a', 'b'), + int32: int32(1) + } }); const expectedMask = mask( 'a', @@ -175,7 +309,14 @@ describe('ObjectValue', () => { 'map.b', 'map.c', 'map.nested.d', - 'emptymap' + 'emptymap', + 'bar.objectId', + 'bar.binary', + 'bar.timestamp', + 'bar.min', + 'bar.max', + 'bar.regex', + 'bar.int32' ); const actualMask = extractFieldMask(objValue.value.mapValue); expect(actualMask.isEqual(expectedMask)).to.be.true; @@ -185,6 +326,6 @@ describe('ObjectValue', () => { objValue: ObjectValue, data: { [k: string]: unknown } ): void { - expect(objValue.isEqual(wrapObject(data))); + expect(objValue.isEqual(wrapObject(data))).to.be.true; } }); diff --git a/packages/firestore/test/unit/model/target.test.ts b/packages/firestore/test/unit/model/target.test.ts index bbeea5dec83..1fa2e58b298 100644 --- a/packages/firestore/test/unit/model/target.test.ts +++ b/packages/firestore/test/unit/model/target.test.ts @@ -31,8 +31,8 @@ import { import { IndexKind } from '../../../src/model/field_index'; import { canonicalId, - MAX_VALUE, - MIN_VALUE, + INTERNAL_MAX_VALUE, + INTERNAL_MIN_VALUE, valueEquals } from '../../../src/model/values'; import { @@ -207,11 +207,11 @@ describe('Target Bounds', () => { const index = fieldIndex('c', { fields: [['foo', IndexKind.ASCENDING]] }); const lowerBound = targetGetLowerBound(target, index); - expect(lowerBound?.position[0]).to.equal(MIN_VALUE); + expect(lowerBound?.position[0]).to.equal(INTERNAL_MIN_VALUE); expect(lowerBound?.inclusive).to.be.true; const upperBound = targetGetUpperBound(target, index); - expect(upperBound?.position[0]).to.equal(MAX_VALUE); + expect(upperBound?.position[0]).to.equal(INTERNAL_MAX_VALUE); expect(upperBound?.inclusive).to.be.true; }); @@ -241,7 +241,7 @@ describe('Target Bounds', () => { verifyBound(lowerBound, true, 'bar'); const upperBound = targetGetUpperBound(target, index); - expect(upperBound?.position[0]).to.equal(MAX_VALUE); + expect(upperBound?.position[0]).to.equal(INTERNAL_MAX_VALUE); expect(upperBound?.inclusive).to.be.true; }); @@ -337,7 +337,7 @@ describe('Target Bounds', () => { const index = fieldIndex('c', { fields: [['foo', IndexKind.ASCENDING]] }); const lowerBound = targetGetLowerBound(target, index); - expect(lowerBound?.position[0]).to.equal(MIN_VALUE); + expect(lowerBound?.position[0]).to.equal(INTERNAL_MIN_VALUE); expect(lowerBound?.inclusive).to.be.true; const upperBound = targetGetUpperBound(target, index); diff --git a/packages/firestore/test/unit/model/values.test.ts b/packages/firestore/test/unit/model/values.test.ts index 722d2db6fa5..bf46386c800 100644 --- a/packages/firestore/test/unit/model/values.test.ts +++ b/packages/firestore/test/unit/model/values.test.ts @@ -19,7 +19,23 @@ import { expect } from 'chai'; import { GeoPoint, Timestamp } from '../../../src'; import { DatabaseId } from '../../../src/core/database_info'; -import { vector } from '../../../src/lite-api/field_value_impl'; +import { BsonBinaryData } from '../../../src/lite-api/bson_binary_data'; +import { BsonObjectId } from '../../../src/lite-api/bson_object_Id'; +import { BsonTimestampValue } from '../../../src/lite-api/bson_timestamp_value'; +import { + vector, + regex, + bsonTimestamp, + int32, + bsonBinaryData, + bsonObjectId, + minKey, + maxKey +} from '../../../src/lite-api/field_value_impl'; +import { Int32Value } from '../../../src/lite-api/int32_value'; +import { MaxKey } from '../../../src/lite-api/max_key'; +import { MinKey } from '../../../src/lite-api/min_key'; +import { RegexValue } from '../../../src/lite-api/regex_value'; import { serverTimestamp } from '../../../src/model/server_timestamps'; import { canonicalId, @@ -31,7 +47,7 @@ import { valuesGetLowerBound, valuesGetUpperBound, TYPE_KEY, - VECTOR_VALUE_SENTINEL, + RESERVED_VECTOR_KEY, VECTOR_MAP_VECTORS_KEY } from '../../../src/model/values'; import * as api from '../../../src/protos/firestore_proto_api'; @@ -55,7 +71,14 @@ describe('Values', () => { const values: api.Value[][] = [ [wrap(true), wrap(true)], [wrap(false), wrap(false)], - [wrap(null), wrap(null)], + // MinKeys are all equal, and sort the same as null. + [ + wrap(null), + wrap(null), + wrap(minKey()), + wrap(minKey()), + wrap(MinKey.instance()) + ], [wrap(0 / 0), wrap(Number.NaN), wrap(NaN)], // -0.0 and 0.0 order the same but are not considered equal. [wrap(-0.0)], @@ -92,7 +115,21 @@ describe('Values', () => { [wrap({ bar: 1, foo: 1 })], [wrap({ foo: 1 })], [wrap(vector([]))], - [wrap(vector([1, 2.3, -4.0]))] + [wrap(vector([1, 2.3, -4.0]))], + [wrap(regex('^foo', 'i')), wrap(new RegexValue('^foo', 'i'))], + [wrap(bsonTimestamp(57, 4)), wrap(new BsonTimestampValue(57, 4))], + [ + wrap(bsonBinaryData(128, Uint8Array.from([7, 8, 9]))), + wrap(new BsonBinaryData(128, Uint8Array.from([7, 8, 9]))), + wrap(bsonBinaryData(128, Buffer.from([7, 8, 9]))), + wrap(new BsonBinaryData(128, Buffer.from([7, 8, 9]))) + ], + [ + wrap(bsonObjectId('123456789012')), + wrap(new BsonObjectId('123456789012')) + ], + [wrap(int32(255)), wrap(new Int32Value(255))], + [wrap(maxKey()), wrap(maxKey()), wrap(MaxKey.instance())] ]; expectEqualitySets(values, (v1, v2) => valueEquals(v1, v2)); }); @@ -129,7 +166,7 @@ describe('Values', () => { it('orders types correctly', () => { const groups = [ // null first - [wrap(null)], + [wrap(null), wrap(minKey())], // booleans [wrap(false)], @@ -141,15 +178,24 @@ describe('Values', () => { [wrap(-Number.MAX_VALUE)], [wrap(Number.MIN_SAFE_INTEGER - 1)], [wrap(Number.MIN_SAFE_INTEGER)], + // 64-bit and 32-bit integers order together numerically. + [{ integerValue: -2147483648 }, wrap(int32(-2147483648))], [wrap(-1.1)], - // Integers and Doubles order the same. - [{ integerValue: -1 }, { doubleValue: -1 }], + // Integers, Int32Values and Doubles order the same. + [{ integerValue: -1 }, { doubleValue: -1 }, wrap(int32(-1))], [wrap(-Number.MIN_VALUE)], // zeros all compare the same. - [{ integerValue: 0 }, { doubleValue: 0 }, { doubleValue: -0 }], + [ + { integerValue: 0 }, + { doubleValue: 0 }, + { doubleValue: -0 }, + wrap(int32(0)) + ], [wrap(Number.MIN_VALUE)], - [{ integerValue: 1 }, { doubleValue: 1 }], + [{ integerValue: 1 }, { doubleValue: 1.0 }, wrap(int32(1))], [wrap(1.1)], + [wrap(int32(2))], + [wrap(int32(2147483647))], [wrap(Number.MAX_SAFE_INTEGER)], [wrap(Number.MAX_SAFE_INTEGER + 1)], [wrap(Infinity)], @@ -164,6 +210,11 @@ describe('Values', () => { { timestampValue: '2020-04-05T14:30:01.000000000Z' } ], + // request timestamp + [wrap(bsonTimestamp(123, 4))], + [wrap(bsonTimestamp(123, 5))], + [wrap(bsonTimestamp(124, 0))], + // server timestamps come after all concrete timestamps. [serverTimestamp(Timestamp.fromDate(date1), null)], [serverTimestamp(Timestamp.fromDate(date2), null)], @@ -187,6 +238,13 @@ describe('Values', () => { [wrap(blob(0, 1, 2, 4, 3))], [wrap(blob(255))], + [ + wrap(bsonBinaryData(5, Buffer.from([1, 2, 3]))), + wrap(bsonBinaryData(5, new Uint8Array([1, 2, 3]))) + ], + [wrap(bsonBinaryData(7, Buffer.from([1])))], + [wrap(bsonBinaryData(7, new Uint8Array([2])))], + // reference values [refValue(dbId('p1', 'd1'), key('c1/doc1'))], [refValue(dbId('p1', 'd1'), key('c1/doc2'))], @@ -195,6 +253,13 @@ describe('Values', () => { [refValue(dbId('p1', 'd2'), key('c1/doc1'))], [refValue(dbId('p2', 'd1'), key('c1/doc1'))], + // ObjectId + [wrap(bsonObjectId('foo')), wrap(bsonObjectId('foo'))], + // TODO(Mila/BSON): uncomment after string sort bug is fixed + // [wrap(bsonObjectId('Ḟoo'))], // with latin capital letter f with dot above + // [wrap(bsonObjectId('foo\u0301'))], // with combining acute accent + [wrap(bsonObjectId('xyz'))], + // geo points [wrap(new GeoPoint(-90, -180))], [wrap(new GeoPoint(-90, 0))], @@ -209,6 +274,12 @@ describe('Values', () => { [wrap(new GeoPoint(90, 0))], [wrap(new GeoPoint(90, 180))], + // regular expressions + [wrap(regex('a', 'bar1'))], + [wrap(regex('foo', 'bar1'))], + [wrap(regex('foo', 'bar2'))], + [wrap(regex('go', 'bar1'))], + // arrays [wrap([])], [wrap(['bar'])], @@ -227,7 +298,10 @@ describe('Values', () => { [wrap({ bar: 0, foo: 1 })], [wrap({ foo: 1 })], [wrap({ foo: 2 })], - [wrap({ foo: '0' })] + [wrap({ foo: '0' })], + + // MaxKey + [wrap(maxKey())] ]; expectCorrectComparisonGroups( @@ -331,7 +405,31 @@ describe('Values', () => { { expectedByteSize: 49, elements: [wrap(vector([1, 2])), wrap(vector([-100, 20000098.123445]))] - } + }, + { + expectedByteSize: 27, + elements: [wrap(regex('a', 'b')), wrap(regex('c', 'd'))] + }, + { + expectedByteSize: 13, + elements: [wrap(bsonObjectId('foo')), wrap(bsonObjectId('bar'))] + }, + { + expectedByteSize: 53, + elements: [wrap(bsonTimestamp(1, 2)), wrap(bsonTimestamp(3, 4))] + }, + { + expectedByteSize: 8, + elements: [wrap(int32(1)), wrap(int32(2147483647))] + }, + { + expectedByteSize: 16, + elements: [ + wrap(bsonBinaryData(1, new Uint8Array([127, 128]))), + wrap(bsonBinaryData(128, new Uint8Array([1, 2]))) + ] + }, + { expectedByteSize: 11, elements: [wrap(minKey()), wrap(maxKey())] } ]; for (const group of equalityGroups) { @@ -361,7 +459,13 @@ describe('Values', () => { [wrap({ a: 'a', b: 'b' }), wrap({ a: 'a', bc: 'b' })], [wrap({ a: 'a', b: 'b' }), wrap({ a: 'a', b: 'b', c: 'c' })], [wrap({ a: 'a', b: 'b' }), wrap({ a: 'a', b: 'b', c: 'c' })], - [wrap(vector([2, 3])), wrap(vector([1, 2, 3]))] + [wrap(vector([2, 3])), wrap(vector([1, 2, 3]))], + [wrap(regex('a', 'b')), wrap(regex('cc', 'dd'))], + [wrap(bsonObjectId('foo')), wrap(bsonObjectId('foobar'))], + [ + wrap(bsonBinaryData(128, new Uint8Array([127, 128]))), + wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ] ]; for (const group of relativeGroups) { @@ -376,9 +480,14 @@ describe('Values', () => { }); it('computes lower bound', () => { + // TODO(Mila/BSON):add cases for bson types const groups = [ - // null first - [valuesGetLowerBound({ nullValue: 'NULL_VALUE' }), wrap(null)], + // null and minKey first + [ + valuesGetLowerBound({ nullValue: 'NULL_VALUE' }), + wrap(null), + wrap(minKey()) + ], // booleans [valuesGetLowerBound({ booleanValue: true }), wrap(false)], @@ -420,7 +529,7 @@ describe('Values', () => { valuesGetLowerBound({ mapValue: { fields: { - [TYPE_KEY]: { stringValue: VECTOR_VALUE_SENTINEL }, + [TYPE_KEY]: { stringValue: RESERVED_VECTOR_KEY }, [VECTOR_MAP_VECTORS_KEY]: { arrayValue: { values: [{ doubleValue: 1 }] @@ -433,7 +542,10 @@ describe('Values', () => { ], // objects - [valuesGetLowerBound({ mapValue: {} }), wrap({})] + [valuesGetLowerBound({ mapValue: {} }), wrap({})], + + // MaxKey + [wrap(maxKey())] ]; expectCorrectComparisonGroups( @@ -445,6 +557,7 @@ describe('Values', () => { }); it('computes upper bound', () => { + // TODO(Mila/BSON):add cases for bson types const groups = [ // null first [wrap(null)], @@ -526,6 +639,19 @@ describe('Values', () => { expect( canonicalId(wrap({ 'a': ['b', { 'c': new GeoPoint(30, 60) }] })) ).to.equal('{a:[b,{c:geo(30,60)}]}'); + expect(canonicalId(wrap(regex('a', 'b')))).to.equal( + '{__regex__:{options:b,pattern:a}}' + ); + expect(canonicalId(wrap(bsonObjectId('foo')))).to.equal('{__oid__:foo}'); + expect(canonicalId(wrap(bsonTimestamp(1, 2)))).to.equal( + '{__request_timestamp__:{increment:2,seconds:1}}' + ); + expect(canonicalId(wrap(int32(1)))).to.equal('{__int__:1}'); + expect( + canonicalId(wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3])))) + ).to.equal('{__binary__:{subType:1,data:AQID}}'); + expect(canonicalId(wrap(minKey()))).to.equal('{__min__:null}'); + expect(canonicalId(wrap(maxKey()))).to.equal('{__max__:null}'); }); it('canonical IDs ignore sort order', () => { diff --git a/packages/firestore/test/unit/remote/serializer.helper.ts b/packages/firestore/test/unit/remote/serializer.helper.ts index d523c8fab83..9c116549928 100644 --- a/packages/firestore/test/unit/remote/serializer.helper.ts +++ b/packages/firestore/test/unit/remote/serializer.helper.ts @@ -52,7 +52,16 @@ import { } from '../../../src/core/query'; import { SnapshotVersion } from '../../../src/core/snapshot_version'; import { Target, targetEquals, TargetImpl } from '../../../src/core/target'; -import { vector } from '../../../src/lite-api/field_value_impl'; +import { + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + int32, + maxKey, + minKey, + regex, + vector +} from '../../../src/lite-api/field_value_impl'; import { parseQueryValue } from '../../../src/lite-api/user_data_reader'; import { TargetData, TargetPurpose } from '../../../src/local/target_data'; import { FieldMask } from '../../../src/model/field_mask'; @@ -565,6 +574,57 @@ export function serializerTest( jsonValue: expectedJson.mapValue }); }); + + it('converts BSON types in mapValue', () => { + const examples = [ + bsonObjectId('foo'), + bsonTimestamp(1, 2), + minKey(), + maxKey(), + regex('a', 'b'), + int32(1) + ]; + + for (const example of examples) { + expect(userDataWriter.convertValue(wrap(example))).to.deep.equal( + example + ); + + verifyFieldValueRoundTrip({ + value: example, + valueType: 'mapValue', + jsonValue: wrap(example).mapValue + }); + } + + // BsonBinaryData will be serialized differently Proto3Json VS. regular Protobuf format + const bsonBinary = bsonBinaryData(1, new Uint8Array([1, 2, 3])); + const expectedJson: api.Value = { + mapValue: { + fields: { + '__binary__': { + 'bytesValue': 'AQECAw==' + } + } + } + }; + + const expectedProtoJson: api.Value = { + mapValue: { + fields: { + '__binary__': { + 'bytesValue': new Uint8Array([1, 1, 2, 3]) + } + } + } + }; + verifyFieldValueRoundTrip({ + value: bsonBinary, + valueType: 'mapValue', + jsonValue: expectedJson.mapValue, + protoJsValue: expectedProtoJson.mapValue + }); + }); }); describe('toKey', () => { From 3c743b25c77fa076f6bc4db254e5e02036773de3 Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Tue, 11 Mar 2025 13:38:22 -0400 Subject: [PATCH 2/9] Implement indexing for bson types (#331) --- packages/firestore/src/core/target.ts | 6 +- .../src/index/firestore_index_value_writer.ts | 102 ++- packages/firestore/src/lite-api/query.ts | 27 + .../src/lite-api/user_data_writer.ts | 5 +- packages/firestore/src/model/type_order.ts | 39 +- packages/firestore/src/model/values.ts | 231 +++++-- .../test/integration/api/database.test.ts | 424 +++++++++--- .../test/integration/api/query.test.ts | 14 - .../test/integration/api/type.test.ts | 55 +- .../test/integration/api/validation.test.ts | 14 + .../test/integration/util/helpers.ts | 74 ++- .../firestore_index_value_writer.test.ts | 366 ++++++++++- .../test/unit/local/index_manager.test.ts | 607 +++++++++++++++++- .../firestore/test/unit/model/values.test.ts | 117 +++- 14 files changed, 1823 insertions(+), 258 deletions(-) diff --git a/packages/firestore/src/core/target.ts b/packages/firestore/src/core/target.ts index 664a2ef9a08..cc2732e8f8a 100644 --- a/packages/firestore/src/core/target.ts +++ b/packages/firestore/src/core/target.ts @@ -28,6 +28,8 @@ import { INTERNAL_MAX_VALUE, INTERNAL_MIN_VALUE, lowerBoundCompare, + MAX_KEY_VALUE, + MIN_KEY_VALUE, upperBoundCompare, valuesGetLowerBound, valuesGetUpperBound @@ -387,7 +389,7 @@ function targetGetAscendingBound( break; case Operator.NOT_EQUAL: case Operator.NOT_IN: - filterValue = INTERNAL_MIN_VALUE; + filterValue = MIN_KEY_VALUE; break; default: // Remaining filters cannot be used as lower bounds. @@ -462,7 +464,7 @@ function targetGetDescendingBound( break; case Operator.NOT_EQUAL: case Operator.NOT_IN: - filterValue = INTERNAL_MAX_VALUE; + filterValue = MAX_KEY_VALUE; break; default: // Remaining filters cannot be used as upper bounds. diff --git a/packages/firestore/src/index/firestore_index_value_writer.ts b/packages/firestore/src/index/firestore_index_value_writer.ts index f831862a0de..d02a07313fe 100644 --- a/packages/firestore/src/index/firestore_index_value_writer.ts +++ b/packages/firestore/src/index/firestore_index_value_writer.ts @@ -22,9 +22,16 @@ import { normalizeTimestamp } from '../model/normalize'; import { - isVectorValue, VECTOR_MAP_VECTORS_KEY, - isMaxValue + detectSpecialMapType, + RESERVED_BSON_TIMESTAMP_KEY, + RESERVED_REGEX_KEY, + RESERVED_BSON_OBJECT_ID_KEY, + RESERVED_BSON_BINARY_KEY, + SpecialMapValueType, + RESERVED_REGEX_PATTERN_KEY, + RESERVED_REGEX_OPTIONS_KEY, + RESERVED_INT32_KEY } from '../model/values'; import { ArrayValue, MapValue, Value } from '../protos/firestore_proto_api'; import { fail } from '../util/assert'; @@ -32,22 +39,28 @@ import { isNegativeZero } from '../util/types'; import { DirectionalIndexByteEncoder } from './directional_index_byte_encoder'; -// Note: This code is copied from the backend. Code that is not used by -// Firestore was removed. +// Note: This file is copied from the backend. Code that is not used by +// Firestore was removed. Code that has different behavior was modified. const INDEX_TYPE_NULL = 5; +const INDEX_TYPE_MIN_KEY = 7; const INDEX_TYPE_BOOLEAN = 10; const INDEX_TYPE_NAN = 13; const INDEX_TYPE_NUMBER = 15; const INDEX_TYPE_TIMESTAMP = 20; +const INDEX_TYPE_BSON_TIMESTAMP = 22; const INDEX_TYPE_STRING = 25; const INDEX_TYPE_BLOB = 30; +const INDEX_TYPE_BSON_BINARY = 31; const INDEX_TYPE_REFERENCE = 37; +const INDEX_TYPE_BSON_OBJECT_ID = 43; const INDEX_TYPE_GEOPOINT = 45; +const INDEX_TYPE_REGEX = 47; const INDEX_TYPE_ARRAY = 50; const INDEX_TYPE_VECTOR = 53; const INDEX_TYPE_MAP = 55; const INDEX_TYPE_REFERENCE_SEGMENT = 60; +const INDEX_TYPE_MAX_VALUE = 999; // A terminator that indicates that a truncatable value was not truncated. // This must be smaller than all other type labels. @@ -124,11 +137,30 @@ export class FirestoreIndexValueWriter { encoder.writeNumber(geoPoint.latitude || 0); encoder.writeNumber(geoPoint.longitude || 0); } else if ('mapValue' in indexValue) { - // TODO(Mila/BSON): add bson types for indexing - if (isMaxValue(indexValue)) { + const type = detectSpecialMapType(indexValue); + if (type === SpecialMapValueType.INTERNAL_MAX) { this.writeValueTypeLabel(encoder, Number.MAX_SAFE_INTEGER); - } else if (isVectorValue(indexValue)) { + } else if (type === SpecialMapValueType.VECTOR) { this.writeIndexVector(indexValue.mapValue!, encoder); + } else if (type === SpecialMapValueType.MAX_KEY) { + this.writeValueTypeLabel(encoder, INDEX_TYPE_MAX_VALUE); + } else if (type === SpecialMapValueType.MIN_KEY) { + this.writeValueTypeLabel(encoder, INDEX_TYPE_MIN_KEY); + } else if (type === SpecialMapValueType.BSON_BINARY) { + this.writeIndexBsonBinaryData(indexValue.mapValue!, encoder); + } else if (type === SpecialMapValueType.REGEX) { + this.writeIndexRegex(indexValue.mapValue!, encoder); + } else if (type === SpecialMapValueType.BSON_TIMESTAMP) { + this.writeIndexBsonTimestamp(indexValue.mapValue!, encoder); + } else if (type === SpecialMapValueType.BSON_OBJECT_ID) { + this.writeIndexBsonObjectId(indexValue.mapValue!, encoder); + } else if (type === SpecialMapValueType.INT32) { + this.writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + encoder.writeNumber( + normalizeNumber( + indexValue.mapValue!.fields![RESERVED_INT32_KEY]!.integerValue! + ) + ); } else { this.writeIndexMap(indexValue.mapValue!, encoder); this.writeTruncationMarker(encoder); @@ -202,7 +234,10 @@ export class FirestoreIndexValueWriter { encoder: DirectionalIndexByteEncoder ): void { this.writeValueTypeLabel(encoder, INDEX_TYPE_REFERENCE); - const path = DocumentKey.fromName(referenceValue).path; + const segments: string[] = referenceValue + .split('/') + .filter(segment => segment.length > 0); + const path = DocumentKey.fromSegments(segments.slice(5)).path; path.forEach(segment => { this.writeValueTypeLabel(encoder, INDEX_TYPE_REFERENCE_SEGMENT); this.writeUnlabeledIndexString(segment, encoder); @@ -222,4 +257,55 @@ export class FirestoreIndexValueWriter { // references, arrays and maps). encoder.writeNumber(NOT_TRUNCATED); } + + private writeIndexBsonTimestamp( + mapValue: MapValue, + encoder: DirectionalIndexByteEncoder + ): void { + this.writeValueTypeLabel(encoder, INDEX_TYPE_BSON_TIMESTAMP); + const fields = mapValue.fields || {}; + if (fields) { + // The JS SDK encodes BSON timestamps differently than the backend. + // This is due to the limitation of `number` in JS which handles up to 53-bit precision. + this.writeIndexMap( + fields[RESERVED_BSON_TIMESTAMP_KEY].mapValue!, + encoder + ); + } + } + + private writeIndexBsonObjectId( + mapValue: MapValue, + encoder: DirectionalIndexByteEncoder + ): void { + this.writeValueTypeLabel(encoder, INDEX_TYPE_BSON_OBJECT_ID); + const fields = mapValue.fields || {}; + const oid = fields[RESERVED_BSON_OBJECT_ID_KEY]?.stringValue || ''; + encoder.writeBytes(normalizeByteString(oid)); + } + + private writeIndexBsonBinaryData( + mapValue: MapValue, + encoder: DirectionalIndexByteEncoder + ): void { + this.writeValueTypeLabel(encoder, INDEX_TYPE_BSON_BINARY); + const fields = mapValue.fields || {}; + const binary = fields[RESERVED_BSON_BINARY_KEY]?.bytesValue || ''; + encoder.writeBytes(normalizeByteString(binary)); + this.writeTruncationMarker(encoder); + } + + private writeIndexRegex( + mapValue: MapValue, + encoder: DirectionalIndexByteEncoder + ): void { + this.writeValueTypeLabel(encoder, INDEX_TYPE_REGEX); + const fields = mapValue.fields || {}; + const regex = fields[RESERVED_REGEX_KEY]?.mapValue?.fields || {}; + if (regex) { + encoder.writeString(regex[RESERVED_REGEX_PATTERN_KEY]?.stringValue || ''); + encoder.writeString(regex[RESERVED_REGEX_OPTIONS_KEY]?.stringValue || ''); + } + this.writeTruncationMarker(encoder); + } } diff --git a/packages/firestore/src/lite-api/query.ts b/packages/firestore/src/lite-api/query.ts index f0a357b828c..67245f96d07 100644 --- a/packages/firestore/src/lite-api/query.ts +++ b/packages/firestore/src/lite-api/query.ts @@ -811,6 +811,8 @@ export function newQueryFilter( value: unknown ): FieldFilter { let fieldValue: ProtoValue; + validateQueryOperator(value, op); + if (fieldPath.isKeyField()) { if (op === Operator.ARRAY_CONTAINS || op === Operator.ARRAY_CONTAINS_ANY) { throw new FirestoreError( @@ -1064,6 +1066,31 @@ function validateDisjunctiveFilterElements( } } +/** + * Validates the input string as a field comparison operator. + */ +export function validateQueryOperator( + value: unknown, + operator: Operator +): void { + if ( + typeof value === 'number' && + isNaN(value) && + operator !== '==' && + operator !== '!=' + ) { + throw new Error( + "Invalid query. You can only perform '==' and '!=' comparisons on NaN." + ); + } + + if (value === null && operator !== '==' && operator !== '!=') { + throw new Error( + "Invalid query. You can only perform '==' and '!=' comparisons on Null." + ); + } +} + /** * Given an operator, returns the set of operators that cannot be used with it. * diff --git a/packages/firestore/src/lite-api/user_data_writer.ts b/packages/firestore/src/lite-api/user_data_writer.ts index 0de02b822b2..012b04874c3 100644 --- a/packages/firestore/src/lite-api/user_data_writer.ts +++ b/packages/firestore/src/lite-api/user_data_writer.ts @@ -83,9 +83,6 @@ export abstract class AbstractUserDataWriter { ): unknown { switch (typeOrder(value)) { case TypeOrder.NullValue: - if ('mapValue' in value) { - return minKey(); - } return null; case TypeOrder.BooleanValue: return value.booleanValue!; @@ -122,6 +119,8 @@ export abstract class AbstractUserDataWriter { return this.convertToBsonTimestampValue(value.mapValue!); case TypeOrder.MaxKeyValue: return maxKey(); + case TypeOrder.MinKeyValue: + return minKey(); default: throw fail('Invalid value type: ' + JSON.stringify(value)); } diff --git a/packages/firestore/src/model/type_order.ts b/packages/firestore/src/model/type_order.ts index 7ac6ed11e47..a13e16f4211 100644 --- a/packages/firestore/src/model/type_order.ts +++ b/packages/firestore/src/model/type_order.ts @@ -27,24 +27,25 @@ export const enum TypeOrder { // server timestamps and `MAX_VALUE` inside the SDK. // NULL and MIN_KEY sort the same. NullValue = 0, - MinKeyValue = 0, - BooleanValue = 1, - NumberValue = 2, - TimestampValue = 3, - // TODO(Mila/BSON): which should come first considering indexes? - BsonTimestampValue = 4, - ServerTimestampValue = 5, - StringValue = 6, - BlobValue = 7, - BsonBinaryValue = 8, - RefValue = 9, - BsonObjectIdValue = 10, - GeoPointValue = 11, - RegexValue = 12, - ArrayValue = 13, - VectorValue = 14, - ObjectValue = 15, - // TODO(Mila/BSON):should MaxKeyValue and MaxValue combined? how would this affect indexes? - MaxKeyValue = 16, + MinKeyValue = 1, + BooleanValue = 2, + // Note: all numbers (32-bit int, 64-bit int, 64-bit double, 128-bit decimal, + // etc.) are sorted together numerically. The `numberEquals` function + // distinguishes between different number types and compares them accordingly. + NumberValue = 3, + TimestampValue = 4, + BsonTimestampValue = 5, + ServerTimestampValue = 6, + StringValue = 7, + BlobValue = 8, + BsonBinaryValue = 9, + RefValue = 10, + BsonObjectIdValue = 11, + GeoPointValue = 12, + RegexValue = 13, + ArrayValue = 14, + VectorValue = 15, + ObjectValue = 16, + MaxKeyValue = 17, MaxValue = 9007199254740991 // Number.MAX_SAFE_INTEGER } diff --git a/packages/firestore/src/model/values.ts b/packages/firestore/src/model/values.ts index d6344409ed9..02404130a77 100644 --- a/packages/firestore/src/model/values.ts +++ b/packages/firestore/src/model/values.ts @@ -21,7 +21,6 @@ import { LatLng, MapValue, Timestamp, - Value as ProtoValue, Value } from '../protos/firestore_proto_api'; import { fail } from '../util/assert'; @@ -74,7 +73,7 @@ export const INTERNAL_MAX_VALUE: Value = { } }; -export const MIN_VECTOR_VALUE = { +export const MIN_VECTOR_VALUE: Value = { mapValue: { fields: { [TYPE_KEY]: { stringValue: RESERVED_VECTOR_KEY }, @@ -85,6 +84,96 @@ export const MIN_VECTOR_VALUE = { } }; +export const MIN_KEY_VALUE: Value = { + mapValue: { + fields: { + [RESERVED_MIN_KEY]: { + nullValue: 'NULL_VALUE' + } + } + } +}; + +export const MAX_KEY_VALUE: Value = { + mapValue: { + fields: { + [RESERVED_MAX_KEY]: { + nullValue: 'NULL_VALUE' + } + } + } +}; + +export const MIN_BSON_OBJECT_ID_VALUE: Value = { + mapValue: { + fields: { + [RESERVED_BSON_OBJECT_ID_KEY]: { + stringValue: '' + } + } + } +}; + +export const MIN_BSON_TIMESTAMP_VALUE: Value = { + mapValue: { + fields: { + [RESERVED_BSON_TIMESTAMP_KEY]: { + mapValue: { + fields: { + // Both seconds and increment are 32 bit unsigned integers + [RESERVED_BSON_TIMESTAMP_SECONDS_KEY]: { + integerValue: 0 + }, + [RESERVED_BSON_TIMESTAMP_INCREMENT_KEY]: { + integerValue: 0 + } + } + } + } + } + } +}; + +export const MIN_REGEX_VALUE: Value = { + mapValue: { + fields: { + [RESERVED_REGEX_KEY]: { + mapValue: { + fields: { + [RESERVED_REGEX_PATTERN_KEY]: { stringValue: '' }, + [RESERVED_REGEX_OPTIONS_KEY]: { stringValue: '' } + } + } + } + } + } +}; + +export const MIN_BSON_BINARY_VALUE: Value = { + mapValue: { + fields: { + [RESERVED_BSON_BINARY_KEY]: { + // bsonBinaryValue should have at least one byte as subtype + bytesValue: Uint8Array.from([0]) + } + } + } +}; + +export enum SpecialMapValueType { + REGEX = 'regexValue', + BSON_OBJECT_ID = 'bsonObjectIdValue', + INT32 = 'int32Value', + BSON_TIMESTAMP = 'bsonTimestampValue', + BSON_BINARY = 'bsonBinaryValue', + MIN_KEY = 'minKeyValue', + MAX_KEY = 'maxKeyValue', + INTERNAL_MAX = 'maxValue', + VECTOR = 'vectorValue', + SERVER_TIMESTAMP = 'serverTimestampValue', + REGULAR_MAP = 'regularMapValue' +} + /** Extracts the backend's type order for the provided value. */ export function typeOrder(value: Value): TypeOrder { if ('nullValue' in value) { @@ -108,25 +197,25 @@ export function typeOrder(value: Value): TypeOrder { } else if ('mapValue' in value) { const valueType = detectSpecialMapType(value); switch (valueType) { - case 'serverTimestampValue': + case SpecialMapValueType.SERVER_TIMESTAMP: return TypeOrder.ServerTimestampValue; - case 'maxValue': + case SpecialMapValueType.INTERNAL_MAX: return TypeOrder.MaxValue; - case 'vectorValue': + case SpecialMapValueType.VECTOR: return TypeOrder.VectorValue; - case 'regexValue': + case SpecialMapValueType.REGEX: return TypeOrder.RegexValue; - case 'bsonObjectIdValue': + case SpecialMapValueType.BSON_OBJECT_ID: return TypeOrder.BsonObjectIdValue; - case 'int32Value': + case SpecialMapValueType.INT32: return TypeOrder.NumberValue; - case 'bsonTimestampValue': + case SpecialMapValueType.BSON_TIMESTAMP: return TypeOrder.BsonTimestampValue; - case 'bsonBinaryValue': + case SpecialMapValueType.BSON_BINARY: return TypeOrder.BsonBinaryValue; - case 'minKeyValue': + case SpecialMapValueType.MIN_KEY: return TypeOrder.MinKeyValue; - case 'maxKeyValue': + case SpecialMapValueType.MAX_KEY: return TypeOrder.MaxKeyValue; default: return TypeOrder.ObjectValue; @@ -230,8 +319,8 @@ function blobEquals(left: Value, right: Value): boolean { export function numberEquals(left: Value, right: Value): boolean { if ( ('integerValue' in left && 'integerValue' in right) || - (detectSpecialMapType(left) === 'int32Value' && - detectSpecialMapType(right) === 'int32Value') + (detectSpecialMapType(left) === SpecialMapValueType.INT32 && + detectSpecialMapType(right) === SpecialMapValueType.INT32) ) { return extractNumber(left) === extractNumber(right); } else if ('doubleValue' in left && 'doubleValue' in right) { @@ -338,7 +427,7 @@ export function valueCompare(left: Value, right: Value): number { export function extractNumber(value: Value): number { let numberValue; - if (detectSpecialMapType(value) === 'int32Value') { + if (detectSpecialMapType(value) === SpecialMapValueType.INT32) { numberValue = value.mapValue!.fields![RESERVED_INT32_KEY].integerValue!; } else { numberValue = value.integerValue || value.doubleValue; @@ -598,7 +687,7 @@ function canonifyValue(value: Value): string { return canonifyArray(value.arrayValue!); } else if ('mapValue' in value) { // BsonBinaryValue contains an array of bytes, and needs to extract `subtype` and `data` from it before canonifying. - if (detectSpecialMapType(value) === 'bsonBinaryValue') { + if (detectSpecialMapType(value) === SpecialMapValueType.BSON_BINARY) { return canonifyBsonBinaryData(value.mapValue!); } return canonifyMap(value.mapValue!); @@ -679,11 +768,6 @@ function canonifyArray(arrayValue: ArrayValue): string { export function estimateByteSize(value: Value): number { switch (typeOrder(value)) { case TypeOrder.NullValue: - // MinKeyValue and NullValue has same TypeOrder number, but MinKeyValue is encoded as MapValue - // and its size should be estimated differently. - if ('mapValue' in value) { - return estimateMapByteSize(value.mapValue!); - } return 4; case TypeOrder.BooleanValue: return 4; @@ -716,6 +800,7 @@ export function estimateByteSize(value: Value): number { case TypeOrder.BsonObjectIdValue: case TypeOrder.BsonBinaryValue: case TypeOrder.BsonTimestampValue: + case TypeOrder.MinKeyValue: case TypeOrder.MaxKeyValue: return estimateMapByteSize(value.mapValue!); default: @@ -801,19 +886,9 @@ export function isMapValue( return !!value && 'mapValue' in value; } -/** Returns true if `value` is a VectorValue. */ -export function isVectorValue(value: ProtoValue | null): boolean { - return !!value && detectSpecialMapType(value) === 'vectorValue'; -} - -/** Returns true if the `Value` represents the canonical {@link #INTERNAL_MAX_VALUE} . */ -export function isMaxValue(value: Value): boolean { - return detectSpecialMapType(value) === 'maxValue'; -} - -function detectSpecialMapType(value: Value): string { +export function detectSpecialMapType(value: Value): SpecialMapValueType { if (!value || !value.mapValue || !value.mapValue.fields) { - return ''; // Not a special map type + return SpecialMapValueType.REGULAR_MAP; // Not a special map type } const fields = value.mapValue.fields; @@ -821,10 +896,10 @@ function detectSpecialMapType(value: Value): string { // Check for type-based mappings const type = fields[TYPE_KEY]?.stringValue; if (type) { - const typeMap: Record = { - [RESERVED_VECTOR_KEY]: 'vectorValue', - [RESERVED_MAX_KEY]: 'maxValue', - [RESERVED_SERVER_TIMESTAMP_KEY]: 'serverTimestampValue' + const typeMap: Record = { + [RESERVED_VECTOR_KEY]: SpecialMapValueType.VECTOR, + [RESERVED_MAX_KEY]: SpecialMapValueType.INTERNAL_MAX, + [RESERVED_SERVER_TIMESTAMP_KEY]: SpecialMapValueType.SERVER_TIMESTAMP }; if (typeMap[type]) { return typeMap[type]; @@ -832,14 +907,14 @@ function detectSpecialMapType(value: Value): string { } // Check for BSON-related mappings - const bsonMap: Record = { - [RESERVED_REGEX_KEY]: 'regexValue', - [RESERVED_BSON_OBJECT_ID_KEY]: 'bsonObjectIdValue', - [RESERVED_INT32_KEY]: 'int32Value', - [RESERVED_BSON_TIMESTAMP_KEY]: 'bsonTimestampValue', - [RESERVED_BSON_BINARY_KEY]: 'bsonBinaryValue', - [RESERVED_MIN_KEY]: 'minKeyValue', - [RESERVED_MAX_KEY]: 'maxKeyValue' + const bsonMap: Record = { + [RESERVED_REGEX_KEY]: SpecialMapValueType.REGEX, + [RESERVED_BSON_OBJECT_ID_KEY]: SpecialMapValueType.BSON_OBJECT_ID, + [RESERVED_INT32_KEY]: SpecialMapValueType.INT32, + [RESERVED_BSON_TIMESTAMP_KEY]: SpecialMapValueType.BSON_TIMESTAMP, + [RESERVED_BSON_BINARY_KEY]: SpecialMapValueType.BSON_BINARY, + [RESERVED_MIN_KEY]: SpecialMapValueType.MIN_KEY, + [RESERVED_MAX_KEY]: SpecialMapValueType.MAX_KEY }; for (const key in bsonMap) { @@ -848,18 +923,18 @@ function detectSpecialMapType(value: Value): string { } } - return ''; + return SpecialMapValueType.REGULAR_MAP; } export function isBsonType(value: Value): boolean { const bsonTypes = new Set([ - 'regexValue', - 'bsonObjectIdValue', - 'int32Value', - 'bsonTimestampValue', - 'bsonBinaryValue', - 'minKeyValue', - 'maxKeyValue' + SpecialMapValueType.REGEX, + SpecialMapValueType.BSON_OBJECT_ID, + SpecialMapValueType.INT32, + SpecialMapValueType.BSON_TIMESTAMP, + SpecialMapValueType.BSON_BINARY, + SpecialMapValueType.MIN_KEY, + SpecialMapValueType.MAX_KEY ]); return bsonTypes.has(detectSpecialMapType(value)); } @@ -912,9 +987,24 @@ export function valuesGetLowerBound(value: Value): Value { } else if ('arrayValue' in value) { return { arrayValue: {} }; } else if ('mapValue' in value) { - // TODO(Mila/BSON): add lower bound for bson types for indexing - if (isVectorValue(value)) { + const type = detectSpecialMapType(value); + if (type === SpecialMapValueType.VECTOR) { return MIN_VECTOR_VALUE; + } else if (type === SpecialMapValueType.BSON_OBJECT_ID) { + return MIN_BSON_OBJECT_ID_VALUE; + } else if (type === SpecialMapValueType.BSON_TIMESTAMP) { + return MIN_BSON_TIMESTAMP_VALUE; + } else if (type === SpecialMapValueType.BSON_BINARY) { + return MIN_BSON_BINARY_VALUE; + } else if (type === SpecialMapValueType.REGEX) { + return MIN_REGEX_VALUE; + } else if (type === SpecialMapValueType.INT32) { + // int32Value is treated the same as integerValue and doubleValue + return { doubleValue: NaN }; + } else if (type === SpecialMapValueType.MIN_KEY) { + return MIN_KEY_VALUE; + } else if (type === SpecialMapValueType.MAX_KEY) { + return MAX_KEY_VALUE; } return { mapValue: {} }; } else { @@ -925,29 +1015,44 @@ export function valuesGetLowerBound(value: Value): Value { /** Returns the largest value for the given value type (exclusive). */ export function valuesGetUpperBound(value: Value): Value { if ('nullValue' in value) { - return { booleanValue: false }; + return MIN_KEY_VALUE; } else if ('booleanValue' in value) { return { doubleValue: NaN }; } else if ('integerValue' in value || 'doubleValue' in value) { return { timestampValue: { seconds: Number.MIN_SAFE_INTEGER } }; } else if ('timestampValue' in value) { - return { stringValue: '' }; + return MIN_BSON_TIMESTAMP_VALUE; } else if ('stringValue' in value) { return { bytesValue: '' }; } else if ('bytesValue' in value) { - return refValue(DatabaseId.empty(), DocumentKey.empty()); + return MIN_BSON_BINARY_VALUE; } else if ('referenceValue' in value) { - return { geoPointValue: { latitude: -90, longitude: -180 } }; + return MIN_BSON_OBJECT_ID_VALUE; } else if ('geoPointValue' in value) { - return { arrayValue: {} }; + return MIN_REGEX_VALUE; } else if ('arrayValue' in value) { return MIN_VECTOR_VALUE; } else if ('mapValue' in value) { - // TODO(Mila/BSON): add upper bound for bson types for indexing - if (isVectorValue(value)) { + const type = detectSpecialMapType(value); + if (type === SpecialMapValueType.VECTOR) { return { mapValue: {} }; + } else if (type === SpecialMapValueType.BSON_OBJECT_ID) { + return { geoPointValue: { latitude: -90, longitude: -180 } }; + } else if (type === SpecialMapValueType.BSON_TIMESTAMP) { + return { stringValue: '' }; + } else if (type === SpecialMapValueType.BSON_BINARY) { + return refValue(DatabaseId.empty(), DocumentKey.empty()); + } else if (type === SpecialMapValueType.REGEX) { + return { arrayValue: {} }; + } else if (type === SpecialMapValueType.INT32) { + // int32Value is treated the same as integerValue and doubleValue + return { timestampValue: { seconds: Number.MIN_SAFE_INTEGER } }; + } else if (type === SpecialMapValueType.MIN_KEY) { + return { booleanValue: false }; + } else if (type === SpecialMapValueType.MAX_KEY) { + return INTERNAL_MAX_VALUE; } - return INTERNAL_MAX_VALUE; + return MAX_KEY_VALUE; } else { return fail('Invalid value type: ' + JSON.stringify(value)); } diff --git a/packages/firestore/test/integration/api/database.test.ts b/packages/firestore/test/integration/api/database.test.ts index 2856862f6de..d4091768e7c 100644 --- a/packages/firestore/test/integration/api/database.test.ts +++ b/packages/firestore/test/integration/api/database.test.ts @@ -62,7 +62,6 @@ import { WithFieldValue, Timestamp, FieldPath, - newTestFirestore, SnapshotOptions, newTestApp, FirestoreError, @@ -76,7 +75,10 @@ import { maxKey, minKey, regex, - or + or, + newTestFirestore, + GeoPoint, + Bytes } from '../util/firebase_export'; import { apiDescribe, @@ -89,7 +91,9 @@ import { withNamedTestDbsOrSkipUnlessUsingEmulator, toDataArray, checkOnlineAndOfflineResultsMatch, - toIds + toIds, + withTestProjectIdAndCollectionSettings, + checkCacheRoundTrip } from '../util/helpers'; import { DEFAULT_SETTINGS, DEFAULT_PROJECT_ID } from '../util/settings'; @@ -2443,14 +2447,12 @@ apiDescribe('Database', persistence => { }; it('can write and read BSON types', async () => { - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - + {}, + async coll => { const docRef = await addDoc(coll, { binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), objectId: bsonObjectId('507f191e810c19729de860ea'), @@ -2491,6 +2493,49 @@ apiDescribe('Database', persistence => { ); }); + it('can write and read BSON types offline', async () => { + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + {}, + async (coll, db) => { + await disableNetwork(db); + const docRef = doc(coll, 'testDoc'); + + // Adding docs to cache, do not wait for promise to resolve. + // eslint-disable-next-line @typescript-eslint/no-floating-promises + setDoc(docRef, { + binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), + objectId: bsonObjectId('507f191e810c19729de860ea'), + int32: int32(1), + regex: regex('^foo', 'i'), + timestamp: bsonTimestamp(1, 2), + min: minKey(), + max: maxKey() + }); + + const snapshot = await getDocFromCache(docRef); + expect( + snapshot + .get('binary') + .isEqual(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ).to.be.true; + expect( + snapshot + .get('objectId') + .isEqual(bsonObjectId('507f191e810c19729de860ea')) + ).to.be.true; + expect(snapshot.get('int32').isEqual(int32(1))).to.be.true; + expect(snapshot.get('regex').isEqual(regex('^foo', 'i'))).to.be.true; + expect(snapshot.get('timestamp').isEqual(bsonTimestamp(1, 2))).to.be + .true; + expect(snapshot.get('min') === minKey()).to.be.true; + expect(snapshot.get('max') === maxKey()).to.be.true; + } + ); + }); + it('can filter and order objectIds', async () => { const testDocs = { a: { key: bsonObjectId('507f191e810c19729de860ea') }, @@ -2498,17 +2543,12 @@ apiDescribe('Database', persistence => { c: { key: bsonObjectId('507f191e810c19729de860ec') } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); - + testDocs, + async (coll, db) => { let orderedQuery = query( coll, where('key', '>', bsonObjectId('507f191e810c19729de860ea')), @@ -2520,6 +2560,7 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); orderedQuery = query( coll, @@ -2535,6 +2576,7 @@ apiDescribe('Database', persistence => { testDocs['b'], testDocs['a'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); } ); }); @@ -2545,17 +2587,12 @@ apiDescribe('Database', persistence => { b: { key: int32(1) }, c: { key: int32(2) } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); - + testDocs, + async (coll, db) => { let orderedQuery = query( coll, where('key', '>=', int32(1)), @@ -2567,6 +2604,7 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); orderedQuery = query( coll, @@ -2579,6 +2617,7 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['a'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); } ); }); @@ -2589,17 +2628,12 @@ apiDescribe('Database', persistence => { b: { key: bsonTimestamp(1, 2) }, c: { key: bsonTimestamp(2, 1) } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); - + testDocs, + async (coll, db) => { let orderedQuery = query( coll, where('key', '>', bsonTimestamp(1, 1)), @@ -2611,6 +2645,7 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); orderedQuery = query( coll, @@ -2623,6 +2658,7 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); } ); }); @@ -2633,17 +2669,12 @@ apiDescribe('Database', persistence => { b: { key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) }, c: { key: bsonBinaryData(2, new Uint8Array([1, 2, 3])) } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); - + testDocs, + async (coll, db) => { let orderedQuery = query( coll, where('key', '>', bsonBinaryData(1, new Uint8Array([1, 2, 3]))), @@ -2655,6 +2686,7 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); orderedQuery = query( coll, @@ -2668,6 +2700,7 @@ apiDescribe('Database', persistence => { testDocs['b'], testDocs['a'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); } ); }); @@ -2678,17 +2711,12 @@ apiDescribe('Database', persistence => { b: { key: regex('^bar', 'x') }, c: { key: regex('^baz', 'i') } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); - + testDocs, + async (coll, db) => { const orderedQuery = query( coll, or( @@ -2703,6 +2731,7 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['a'] ]); + await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); } ); }); @@ -2711,29 +2740,62 @@ apiDescribe('Database', persistence => { const testDocs = { a: { key: minKey() }, b: { key: minKey() }, - c: { key: maxKey() } + c: { key: null }, + d: { key: 1 }, + e: { key: maxKey() } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); + testDocs, + async (coll, db) => { + let filteredQuery = query(coll, where('key', '==', minKey())); + let snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['a'], + testDocs['b'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); - const orderedQuery = query( - coll, - where('key', '==', minKey()), - orderBy('key', 'desc') // minKeys are equal, would sort by documentId as secondary order - ); - const snapshot = await getDocs(orderedQuery); + filteredQuery = query(coll, where('key', '!=', minKey())); + snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ - testDocs['b'], - testDocs['a'] + testDocs['d'], + testDocs['e'] ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '>=', minKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['a'], + testDocs['b'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '<=', minKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['a'], + testDocs['b'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '>', minKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '<', minKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '<', 1)); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); } ); }); @@ -2741,30 +2803,98 @@ apiDescribe('Database', persistence => { it('can filter and order maxKey values', async () => { const testDocs = { a: { key: minKey() }, - b: { key: maxKey() }, - c: { key: maxKey() } + b: { key: 1 }, + c: { key: maxKey() }, + d: { key: maxKey() }, + e: { key: null } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); - - const orderedQuery = query( - coll, - where('key', '==', maxKey()), - orderBy('key', 'desc') // maxKeys are equal, would sort by documentId as secondary order - ); - const snapshot = await getDocs(orderedQuery); + testDocs, + async (coll, db) => { + let filteredQuery = query(coll, where('key', '==', maxKey())); + let snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['c'], + testDocs['d'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '!=', maxKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['a'], testDocs['b'] ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '>=', maxKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['d'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '<=', maxKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['c'], + testDocs['d'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '>', maxKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '<', maxKey())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '>', 1)); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + } + ); + }); + + it('can handle null with bson values', async () => { + const testDocs = { + a: { key: minKey() }, + b: { key: null }, + c: { key: null }, + d: { key: 1 }, + e: { key: maxKey() } + }; + + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + testDocs, + async (coll, db) => { + let filteredQuery = query(coll, where('key', '==', null)); + let snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['b'], + testDocs['c'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + + filteredQuery = query(coll, where('key', '!=', null)); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['a'], + testDocs['d'], + testDocs['e'] + ]); + await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); } ); }); @@ -2778,20 +2908,12 @@ apiDescribe('Database', persistence => { e: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, f: { key: regex('^foo', 'i') } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - await addDoc(coll, testDocs['a']); - await addDoc(coll, testDocs['b']); - await addDoc(coll, testDocs['c']); - await addDoc(coll, testDocs['d']); - await addDoc(coll, testDocs['e']); - await addDoc(coll, testDocs['f']); - + testDocs, + async (coll, db) => { const orderedQuery = query(coll, orderBy('key', 'asc')); const storeEvent = new EventsAccumulator(); @@ -2861,5 +2983,125 @@ apiDescribe('Database', persistence => { } ); }); + + // eslint-disable-next-line no-restricted-properties + (persistence.gc === 'lru' ? describe : describe.skip)('From Cache', () => { + it('SDK orders different value types together the same way online and offline', async () => { + const testDocs: { [key: string]: DocumentData } = { + a: { key: null }, + b: { key: minKey() }, + c: { key: true }, + d: { key: NaN }, + e: { key: int32(1) }, + f: { key: 2.0 }, + g: { key: 3 }, + h: { key: new Timestamp(100, 123456000) }, + i: { key: bsonTimestamp(1, 2) }, + j: { key: 'string' }, + k: { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) }, + l: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + n: { key: bsonObjectId('507f191e810c19729de860ea') }, + o: { key: new GeoPoint(0, 0) }, + p: { key: regex('^foo', 'i') }, + q: { key: [1, 2] }, + r: { key: vector([1, 2]) }, + s: { key: { a: 1 } }, + t: { key: maxKey() } + }; + + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + testDocs, + async coll => { + // TODO(Mila/BSON): remove after prod supports bson, and use `ref` helper function instead + const docRef = doc(coll, 'doc'); + await setDoc(doc(coll, 'm'), { key: docRef }); + + const orderedQuery = query(coll, orderBy('key', 'desc')); + await checkOnlineAndOfflineResultsMatch( + orderedQuery, + 't', + 's', + 'r', + 'q', + 'p', + 'o', + 'n', + 'm', + 'l', + 'k', + 'j', + 'i', + 'h', + 'g', + 'f', + 'e', + 'd', + 'c', + 'b', + 'a' + ); + } + ); + }); + + it('SDK orders bson types the same way online and offline', async () => { + const testDocs: { [key: string]: DocumentData } = { + a: { key: maxKey() }, // maxKeys are all equal + b: { key: maxKey() }, + c: { key: int32(1) }, + d: { key: int32(-1) }, + e: { key: int32(0) }, + f: { key: bsonTimestamp(1, 1) }, + g: { key: bsonTimestamp(2, 1) }, + h: { key: bsonTimestamp(1, 2) }, + i: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + j: { key: bsonBinaryData(1, new Uint8Array([1, 1, 4])) }, + k: { key: bsonBinaryData(2, new Uint8Array([1, 0, 0])) }, + l: { key: bsonObjectId('507f191e810c19729de860eb') }, + m: { key: bsonObjectId('507f191e810c19729de860ea') }, + n: { key: bsonObjectId('407f191e810c19729de860ea') }, + o: { key: regex('^foo', 'i') }, + p: { key: regex('^foo', 'm') }, + q: { key: regex('^bar', 'i') }, + r: { key: minKey() }, // minKeys are all equal + s: { key: minKey() } + }; + + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + testDocs, + async coll => { + const orderedQuery = query(coll, orderBy('key')); + await checkOnlineAndOfflineResultsMatch( + orderedQuery, + 'r', + 's', + 'd', + 'e', + 'c', + 'f', + 'h', + 'g', + 'j', + 'i', + 'k', + 'n', + 'm', + 'l', + 'q', + 'o', + 'p', + 'a', + 'b' + ); + } + ); + }); + }); }); }); diff --git a/packages/firestore/test/integration/api/query.test.ts b/packages/firestore/test/integration/api/query.test.ts index 01fd0e47e35..91090d04fa9 100644 --- a/packages/firestore/test/integration/api/query.test.ts +++ b/packages/firestore/test/integration/api/query.test.ts @@ -925,20 +925,6 @@ apiDescribe('Queries', persistence => { { array: ['a', 42, 'c'] }, { array: [42], array2: ['bingo'] } ]); - - // NOTE: The backend doesn't currently support null, NaN, objects, or - // arrays, so there isn't much of anything else interesting to test. - // With null. - const snapshot3 = await getDocs( - query(coll, where('zip', 'array-contains', null)) - ); - expect(toDataArray(snapshot3)).to.deep.equal([]); - - // With NaN. - const snapshot4 = await getDocs( - query(coll, where('zip', 'array-contains', Number.NaN)) - ); - expect(toDataArray(snapshot4)).to.deep.equal([]); }); }); diff --git a/packages/firestore/test/integration/api/type.test.ts b/packages/firestore/test/integration/api/type.test.ts index a6218f6a1ad..156eba426f8 100644 --- a/packages/firestore/test/integration/api/type.test.ts +++ b/packages/firestore/test/integration/api/type.test.ts @@ -17,7 +17,6 @@ import { expect } from 'chai'; -import { AutoId } from '../../../src/util/misc'; import { addEqualityMatcher } from '../../util/equality_matcher'; import { EventsAccumulator } from '../util/events_accumulator'; import { @@ -52,6 +51,7 @@ import { } from '../util/firebase_export'; import { apiDescribe, + withTestProjectIdAndCollectionSettings, withTestDb, withTestDbsSettings, withTestDoc @@ -384,13 +384,13 @@ apiDescribe('Firestore', persistence => { }); it('invalid 32-bit integer gets rejected', async () => { - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const docRef = doc(dbs[0], 'test-collection/test-doc'); + {}, + async coll => { + const docRef = doc(coll, 'test-doc'); let errorMessage; try { await setDoc(docRef, { key: int32(2147483648) }); @@ -414,13 +414,13 @@ apiDescribe('Firestore', persistence => { }); it('invalid BSON timestamp gets rejected', async () => { - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const docRef = doc(dbs[0], 'test-collection/test-doc'); + {}, + async coll => { + const docRef = doc(coll, 'test-doc'); let errorMessage; try { // BSON timestamp larger than 32-bit integer gets rejected @@ -446,13 +446,13 @@ apiDescribe('Firestore', persistence => { }); it('invalid regex value gets rejected', async () => { - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const docRef = doc(dbs[0], 'test-collection/test-doc'); + {}, + async coll => { + const docRef = doc(coll, 'test-doc'); let errorMessage; try { await setDoc(docRef, { key: regex('foo', 'a') }); @@ -467,13 +467,13 @@ apiDescribe('Firestore', persistence => { }); it('invalid bsonObjectId value gets rejected', async () => { - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const docRef = doc(dbs[0], 'test-collection/test-doc'); + {}, + async coll => { + const docRef = doc(coll, 'test-doc'); let errorMessage; try { @@ -490,13 +490,13 @@ apiDescribe('Firestore', persistence => { }); it('invalid bsonBinaryData value gets rejected', async () => { - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const docRef = doc(dbs[0], 'test-collection/test-doc'); + {}, + async coll => { + const docRef = doc(coll, 'test-doc'); let errorMessage; try { await setDoc(docRef, { @@ -537,18 +537,13 @@ apiDescribe('Firestore', persistence => { maxValue: { key: maxKey() } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); - for (const key of Object.keys(testDocs)) { - await setDoc(doc(coll, key), testDocs[key]); - } - - // TODO(Mila/BSON): replace after prod supports bson + testDocs, + async coll => { + // TODO(Mila/BSON): remove after prod supports bson const docRef = doc(coll, 'doc'); await setDoc(doc(coll, 'referenceValue'), { key: docRef }); diff --git a/packages/firestore/test/integration/api/validation.test.ts b/packages/firestore/test/integration/api/validation.test.ts index 9c74634affa..72978f71fe3 100644 --- a/packages/firestore/test/integration/api/validation.test.ts +++ b/packages/firestore/test/integration/api/validation.test.ts @@ -856,6 +856,20 @@ apiDescribe('Validation:', persistence => { ).to.throw("Invalid query. You cannot use more than one '!=' filter."); }); + validationIt(persistence, 'rejects invalid NaN filter', db => { + const coll = collection(db, 'test'); + expect(() => query(coll, where('foo', '>', NaN))).to.throw( + "Invalid query. You can only perform '==' and '!=' comparisons on NaN." + ); + }); + + validationIt(persistence, 'rejects invalid Null filter', db => { + const coll = collection(db, 'test'); + expect(() => query(coll, where('foo', '>', null))).to.throw( + "Invalid query. You can only perform '==' and '!=' comparisons on Null." + ); + }); + validationIt(persistence, 'with != and not-in filters fail', db => { expect(() => query( diff --git a/packages/firestore/test/integration/util/helpers.ts b/packages/firestore/test/integration/util/helpers.ts index 465bc8edd61..2c789ec9151 100644 --- a/packages/firestore/test/integration/util/helpers.ts +++ b/packages/firestore/test/integration/util/helpers.ts @@ -44,7 +44,9 @@ import { Query, getDocsFromServer, getDocsFromCache, - _AutoId + _AutoId, + disableNetwork, + enableNetwork } from './firebase_export'; import { ALT_PROJECT_ID, @@ -444,10 +446,27 @@ export function withTestCollectionSettings( settings: PrivateSettings, docs: { [key: string]: DocumentData }, fn: (collection: CollectionReference, db: Firestore) => Promise +): Promise { + return withTestProjectIdAndCollectionSettings( + persistence, + DEFAULT_PROJECT_ID, + settings, + docs, + fn + ); +} + +export function withTestProjectIdAndCollectionSettings( + persistence: PersistenceMode | typeof PERSISTENCE_MODE_UNSPECIFIED, + projectId: string, + settings: PrivateSettings, + docs: { [key: string]: DocumentData }, + fn: (collection: CollectionReference, db: Firestore) => Promise ): Promise { const collectionId = _AutoId.newId(); - return batchCommitDocsToCollection( + return batchCommitDocsToCollectionWithSettings( persistence, + projectId, settings, docs, collectionId, @@ -462,10 +481,28 @@ export function batchCommitDocsToCollection( collectionId: string, fn: (collection: CollectionReference, db: Firestore) => Promise ): Promise { - return withTestDbsSettings( + return batchCommitDocsToCollectionWithSettings( persistence, DEFAULT_PROJECT_ID, settings, + docs, + collectionId, + fn + ); +} + +export function batchCommitDocsToCollectionWithSettings( + persistence: PersistenceMode | typeof PERSISTENCE_MODE_UNSPECIFIED, + projectId: string, + settings: PrivateSettings, + docs: { [key: string]: DocumentData }, + collectionId: string, + fn: (collection: CollectionReference, db: Firestore) => Promise +): Promise { + return withTestDbsSettings( + persistence, + projectId, + settings, 2, ([testDb, setupDb]) => { const testCollection = collection(testDb, collectionId); @@ -557,3 +594,34 @@ export async function checkOnlineAndOfflineResultsMatch( const docsFromCache = await getDocsFromCache(query); expect(toIds(docsFromServer)).to.deep.equal(toIds(docsFromCache)); } + +/** + * Checks that documents fetched from the server and stored in the cache can be + * successfully retrieved from the cache and matches the expected documents. + * + * This function performs the following steps: + * 1. Fetch documents from the server for provided query and populate the cache. + * 2. Disables the network connection to simulate offline mode. + * 3. Retrieves the documents from the cache using the same query. + * 4. Compares the cached documents with the expected documents. + * + * @param query The query to check. + * @param db The Firestore database instance. + * @param expectedDocs Optional ordered list of document data that are expected to be retrieved from the cache. + */ +export async function checkCacheRoundTrip( + query: Query, + db: Firestore, + expectedDocs: DocumentData[] +): Promise { + await getDocsFromServer(query); + + await disableNetwork(db); + const docsFromCache = await getDocsFromCache(query); + + if (expectedDocs.length !== 0) { + expect(expectedDocs).to.deep.equal(toDataArray(docsFromCache)); + } + + await enableNetwork(db); +} diff --git a/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts b/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts index 8daa97eb77d..c646726feeb 100644 --- a/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts +++ b/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts @@ -16,13 +16,35 @@ */ import { expect } from 'chai'; +import { + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + int32, + regex +} from '../../../lite'; import { FirestoreIndexValueWriter } from '../../../src/index/firestore_index_value_writer'; import { IndexByteEncoder } from '../../../src/index/index_byte_encoder'; import { Timestamp } from '../../../src/lite-api/timestamp'; +import { + parseBsonBinaryData, + parseInt32Value, + parseMaxKey, + parseMinKey, + parseBsonObjectId, + parseRegexValue, + parseBsonTimestamp +} from '../../../src/lite-api/user_data_reader'; import { IndexKind } from '../../../src/model/field_index'; import type { Value } from '../../../src/protos/firestore_proto_api'; -import { toTimestamp } from '../../../src/remote/serializer'; -import { JSON_SERIALIZER } from '../local/persistence_test_helpers'; +import { + JsonProtoSerializer, + toTimestamp +} from '../../../src/remote/serializer'; +import { + JSON_SERIALIZER, + TEST_DATABASE_ID +} from '../local/persistence_test_helpers'; import { compare } from './ordered_code_writer.test'; @@ -247,4 +269,344 @@ describe('Firestore Index Value Writer', () => { ).to.equal(1); }); }); + + describe('can gracefully handle BSON types', () => { + it('can compare BSON ObjectIds', () => { + const value1 = { + mapValue: { + fields: { + '__oid__': { stringValue: '507f191e810c19729de860ea' } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__oid__': { stringValue: '507f191e810c19729de860eb' } + } + } + }; + const value3 = parseBsonObjectId( + bsonObjectId('507f191e810c19729de860ea') + ); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value3, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value3, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value3, value1, IndexKind.ASCENDING) + ).to.equal(0); + }); + + it('can compare BSON Timestamps', () => { + const value1 = { + mapValue: { + fields: { + '__request_timestamp__': { + mapValue: { + fields: { + seconds: { integerValue: 1 }, + increment: { integerValue: 2 } + } + } + } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__request_timestamp__': { + mapValue: { + fields: { + seconds: { integerValue: 1 }, + increment: { integerValue: 3 } + } + } + } + } + } + }; + const value3 = parseBsonTimestamp(bsonTimestamp(1, 2)); + const value4 = parseBsonTimestamp(bsonTimestamp(2, 1)); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value3, value1, IndexKind.ASCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value3, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value3, IndexKind.ASCENDING) + ).to.equal(1); + + expect( + compareIndexEncodedValues(value4, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value2, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value3, IndexKind.ASCENDING) + ).to.equal(1); + }); + + it('can compare BSON Binary', () => { + const value1 = { + mapValue: { + fields: { + '__binary__': { + bytesValue: 'AQECAw==' // 1, 1, 2, 3 + } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__binary__': { + bytesValue: 'AQECBA==' // 1, 1, 2, 4 + } + } + } + }; + + const serializer = new JsonProtoSerializer( + TEST_DATABASE_ID, + /* useProto3Json= */ false + ); + const value3 = parseBsonBinaryData( + serializer, + bsonBinaryData(1, new Uint8Array([1, 2, 3])) + ); + + const jsonSerializer = new JsonProtoSerializer( + TEST_DATABASE_ID, + /* useProto3Json= */ true + ); + + const value4 = parseBsonBinaryData( + jsonSerializer, + bsonBinaryData(1, new Uint8Array([1, 2, 3])) + ); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value3, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value3, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value3, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value4, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value4, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value1, IndexKind.ASCENDING) + ).to.equal(0); + }); + + it('can compare BSON Regex', () => { + const value1 = { + mapValue: { + fields: { + '__regex__': { + mapValue: { + fields: { + 'pattern': { stringValue: '^foo' }, + 'options': { stringValue: 'i' } + } + } + } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__regex__': { + mapValue: { + fields: { + 'pattern': { stringValue: '^foo' }, + 'options': { stringValue: 'm' } + } + } + } + } + } + }; + const value3 = parseRegexValue(regex('^foo', 'i')); + const value4 = parseRegexValue(regex('^zoo', 'i')); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value3, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value3, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value3, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value4, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value2, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value3, IndexKind.ASCENDING) + ).to.equal(1); + }); + + it('can compare BSON Int32', () => { + const value1 = { + mapValue: { + fields: { + '__int__': { integerValue: 1 } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__int__': { integerValue: 2 } + } + } + }; + const value3 = parseInt32Value(int32(1)); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value3, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value3, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value3, value1, IndexKind.ASCENDING) + ).to.equal(0); + }); + + it('can compare BSON MinKey', () => { + const value1 = { + mapValue: { + fields: { + '__min__': { + nullValue: 'NULL_VALUE' as const + } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__min__': { + nullValue: 'NULL_VALUE' as const + } + } + } + }; + const value3 = parseMinKey(); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value1, value3, IndexKind.DESCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + }); + + it('can compare BSON MaxKey', () => { + const value1 = { + mapValue: { + fields: { + '__max__': { + nullValue: 'NULL_VALUE' as const + } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__max__': { + nullValue: 'NULL_VALUE' as const + } + } + } + }; + const value3 = parseMaxKey(); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value1, value3, IndexKind.DESCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + }); + }); }); diff --git a/packages/firestore/test/unit/local/index_manager.test.ts b/packages/firestore/test/unit/local/index_manager.test.ts index 2521be99bf5..51bef76b31e 100644 --- a/packages/firestore/test/unit/local/index_manager.test.ts +++ b/packages/firestore/test/unit/local/index_manager.test.ts @@ -17,6 +17,7 @@ import { expect } from 'chai'; +import { Bytes, GeoPoint } from '../../../src/'; import { User } from '../../../src/auth/user'; import { FieldFilter } from '../../../src/core/filter'; import { @@ -30,7 +31,16 @@ import { queryWithLimit, queryWithStartAt } from '../../../src/core/query'; -import { vector } from '../../../src/lite-api/field_value_impl'; +import { + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + int32, + maxKey, + minKey, + regex, + vector +} from '../../../src/lite-api/field_value_impl'; import { Timestamp } from '../../../src/lite-api/timestamp'; import { displayNameForIndexType, @@ -71,6 +81,7 @@ import { orFilter, path, query, + ref, version, wrap } from '../../util/helpers'; @@ -327,6 +338,14 @@ describe('IndexedDbIndexManager', async () => { await addDoc('coll/doc2', {}); }); + it('adds string', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['exists', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { 'exists': 'a' }); + await addDoc('coll/doc2', { 'exists': 'b' }); + }); + it('applies orderBy', async () => { await indexManager.addFieldIndex( fieldIndex('coll', { fields: [['count', IndexKind.ASCENDING]] }) @@ -1856,6 +1875,592 @@ describe('IndexedDbIndexManager', async () => { await validateIsNoneIndex(query2); }); + describe('BSON type indexing', () => { + it('can index BSON ObjectId fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + + await addDoc('coll/doc1', { + key: bsonObjectId('507f191e810c19729de860ea') + }); + await addDoc('coll/doc2', { + key: bsonObjectId('507f191e810c19729de860eb') + }); + await addDoc('coll/doc3', { + key: bsonObjectId('507f191e810c19729de860ec') + }); + + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', bsonObjectId('507f191e810c19729de860ea')) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', bsonObjectId('507f191e810c19729de860ea')) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', bsonObjectId('507f191e810c19729de860eb')) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', bsonObjectId('507f191e810c19729de860eb')) + ); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', bsonObjectId('507f191e810c19729de860eb')) + ); + await verifyResults(q, 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', bsonObjectId('507f191e810c19729de860eb')) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', bsonObjectId('507f191e810c19729de860ec')) + ); + await verifyResults(q); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', bsonObjectId('507f191e810c19729de860ea')) + ); + await verifyResults(q); + }); + + it('can index BSON Binary Data fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + }); + await addDoc('coll/doc2', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + }); + await addDoc('coll/doc3', { + key: bsonBinaryData(1, new Uint8Array([2, 1, 2])) + }); + + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + ); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + ); + await verifyResults(q, 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', bsonBinaryData(1, new Uint8Array([2, 1, 2]))) + ); + await verifyResults(q); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + await verifyResults(q); + }); + + it('can index BSON Timestamp fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: bsonTimestamp(1, 1) + }); + await addDoc('coll/doc2', { + key: bsonTimestamp(1, 2) + }); + await addDoc('coll/doc3', { + key: bsonTimestamp(2, 1) + }); + + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', bsonTimestamp(1, 1)) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', bsonTimestamp(1, 1)) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', bsonTimestamp(1, 2)) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', bsonTimestamp(1, 2)) + ); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', bsonTimestamp(1, 2)) + ); + await verifyResults(q, 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', bsonTimestamp(1, 2)) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', bsonTimestamp(2, 1)) + ); + await verifyResults(q); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', bsonTimestamp(1, 1)) + ); + await verifyResults(q); + }); + + it('can index Int32 fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: int32(1) + }); + await addDoc('coll/doc2', { + key: int32(2) + }); + await addDoc('coll/doc3', { + key: int32(3) + }); + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter(query('coll'), filter('key', '==', int32(1))); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter(query('coll'), filter('key', '!=', int32(1))); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter(query('coll'), filter('key', '>=', int32(2))); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter(query('coll'), filter('key', '<=', int32(2))); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter(query('coll'), filter('key', '>', int32(2))); + await verifyResults(q, 'coll/doc3'); + + q = queryWithAddedFilter(query('coll'), filter('key', '<', int32(2))); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter(query('coll'), filter('key', '>', int32(3))); + await verifyResults(q); + + q = queryWithAddedFilter(query('coll'), filter('key', '<', int32(1))); + await verifyResults(q); + }); + + it('can index regex fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: regex('a', 'i') + }); + await addDoc('coll/doc2', { + key: regex('a', 'm') + }); + await addDoc('coll/doc3', { + key: regex('b', 'i') + }); + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', regex('a', 'i')) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', regex('a', 'i')) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', regex('a', 'm')) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', regex('a', 'm')) + ); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', regex('a', 'm')) + ); + await verifyResults(q, 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', regex('a', 'm')) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', regex('b', 'i')) + ); + await verifyResults(q); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', regex('a', 'i')) + ); + await verifyResults(q); + }); + + it('can index minKey fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: minKey() + }); + await addDoc('coll/doc2', { + key: minKey() + }); + await addDoc('coll/doc3', { + key: null + }); + await addDoc('coll/doc4', { + key: 1 + }); + await addDoc('coll/doc5', { + key: maxKey() + }); + + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults( + q, + 'coll/doc3', + 'coll/doc1', + 'coll/doc2', + 'coll/doc4', + 'coll/doc5' + ); + + q = queryWithAddedFilter(query('coll'), filter('key', '==', minKey())); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter(query('coll'), filter('key', '!=', minKey())); + await verifyResults(q, 'coll/doc4', 'coll/doc5'); + + q = queryWithAddedFilter(query('coll'), filter('key', '>=', minKey())); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter(query('coll'), filter('key', '<=', minKey())); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter(query('coll'), filter('key', '>', minKey())); + await verifyResults(q); + + q = queryWithAddedFilter(query('coll'), filter('key', '<', minKey())); + await verifyResults(q); + }); + + it('can index maxKey fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: minKey() + }); + await addDoc('coll/doc2', { + key: 1 + }); + await addDoc('coll/doc3', { + key: maxKey() + }); + await addDoc('coll/doc4', { + key: maxKey() + }); + await addDoc('coll/doc5', { + key: null + }); + + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults( + q, + 'coll/doc5', + 'coll/doc1', + 'coll/doc2', + 'coll/doc3', + 'coll/doc4' + ); + + q = queryWithAddedFilter(query('coll'), filter('key', '==', maxKey())); + await verifyResults(q, 'coll/doc3', 'coll/doc4'); + + q = queryWithAddedFilter(query('coll'), filter('key', '!=', maxKey())); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter(query('coll'), filter('key', '>=', maxKey())); + await verifyResults(q, 'coll/doc3', 'coll/doc4'); + + q = queryWithAddedFilter(query('coll'), filter('key', '<=', maxKey())); + await verifyResults(q, 'coll/doc3', 'coll/doc4'); + + q = queryWithAddedFilter(query('coll'), filter('key', '>', maxKey())); + await verifyResults(q); + + q = queryWithAddedFilter(query('coll'), filter('key', '<', maxKey())); + await verifyResults(q); + }); + + it('can index fields of BSON types together', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.DESCENDING]] }) + ); + await addDoc('coll/doc1', { + key: minKey() + }); + + await addDoc('coll/doc2', { + key: int32(2) + }); + await addDoc('coll/doc3', { + key: int32(1) + }); + + await addDoc('coll/doc4', { + key: bsonTimestamp(1, 2) + }); + await addDoc('coll/doc5', { + key: bsonTimestamp(1, 1) + }); + + await addDoc('coll/doc6', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + }); + await addDoc('coll/doc7', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + }); + await addDoc('coll/doc8', { + key: bsonObjectId('507f191e810c19729de860eb') + }); + await addDoc('coll/doc9', { + key: bsonObjectId('507f191e810c19729de860ea') + }); + + await addDoc('coll/doc10', { + key: regex('a', 'm') + }); + await addDoc('coll/doc11', { + key: regex('a', 'i') + }); + + await addDoc('coll/doc12', { + key: maxKey() + }); + + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + const q = queryWithAddedOrderBy(query('coll'), orderBy('key', 'desc')); + await verifyResults( + q, + 'coll/doc12', + 'coll/doc10', + 'coll/doc11', + 'coll/doc8', + 'coll/doc9', + 'coll/doc6', + 'coll/doc7', + 'coll/doc4', + 'coll/doc5', + 'coll/doc2', + 'coll/doc3', + 'coll/doc1' + ); + }); + }); + + it('can index fields of all types together', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.DESCENDING]] }) + ); + await addDoc('coll/a', { + key: null + }); + await addDoc('coll/b', { + key: minKey() + }); + await addDoc('coll/c', { + key: true + }); + await addDoc('coll/d', { + key: NaN + }); + await addDoc('coll/e', { + key: int32(1) + }); + await addDoc('coll/f', { + key: 2.0 + }); + await addDoc('coll/g', { + key: 3 + }); + await addDoc('coll/h', { + key: new Timestamp(100, 123456000) + }); + await addDoc('coll/i', { + key: bsonTimestamp(1, 2) + }); + await addDoc('coll/j', { + key: 'string' + }); + await addDoc('coll/k', { + key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) as Bytes + }); + await addDoc('coll/l', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + }); + await addDoc('coll/m', { + key: ref('coll/doc') + }); + await addDoc('coll/n', { + key: bsonObjectId('507f191e810c19729de860ea') + }); + await addDoc('coll/o', { + key: new GeoPoint(0, 1) + }); + await addDoc('coll/p', { + key: regex('^foo', 'i') + }); + await addDoc('coll/q', { + key: [1, 2] + }); + await addDoc('coll/r', { + key: vector([1, 2]) + }); + await addDoc('coll/s', { + key: { a: 1 } + }); + await addDoc('coll/t', { + key: maxKey() + }); + + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + const q = queryWithAddedOrderBy(query('coll'), orderBy('key', 'desc')); + await verifyResults( + q, + 'coll/t', + 'coll/s', + 'coll/r', + 'coll/q', + 'coll/p', + 'coll/o', + 'coll/n', + 'coll/m', + 'coll/l', + 'coll/k', + 'coll/j', + 'coll/i', + 'coll/h', + 'coll/g', + 'coll/f', + 'coll/e', + 'coll/d', + 'coll/c', + 'coll/b', + 'coll/a' + ); + }); + async function validateIsPartialIndex(query: Query): Promise { await validateIndexType(query, IndexType.PARTIAL); } diff --git a/packages/firestore/test/unit/model/values.test.ts b/packages/firestore/test/unit/model/values.test.ts index bf46386c800..7a2558d2461 100644 --- a/packages/firestore/test/unit/model/values.test.ts +++ b/packages/firestore/test/unit/model/values.test.ts @@ -48,7 +48,14 @@ import { valuesGetUpperBound, TYPE_KEY, RESERVED_VECTOR_KEY, - VECTOR_MAP_VECTORS_KEY + VECTOR_MAP_VECTORS_KEY, + MIN_BSON_TIMESTAMP_VALUE, + MIN_VECTOR_VALUE, + RESERVED_INT32_KEY, + MIN_BSON_BINARY_VALUE, + MIN_KEY_VALUE, + MIN_REGEX_VALUE, + MIN_BSON_OBJECT_ID_VALUE } from '../../../src/model/values'; import * as api from '../../../src/protos/firestore_proto_api'; import { primitiveComparator } from '../../../src/util/misc'; @@ -71,14 +78,8 @@ describe('Values', () => { const values: api.Value[][] = [ [wrap(true), wrap(true)], [wrap(false), wrap(false)], - // MinKeys are all equal, and sort the same as null. - [ - wrap(null), - wrap(null), - wrap(minKey()), - wrap(minKey()), - wrap(MinKey.instance()) - ], + [wrap(null), wrap(null)], + [wrap(minKey()), wrap(minKey()), wrap(MinKey.instance())], [wrap(0 / 0), wrap(Number.NaN), wrap(NaN)], // -0.0 and 0.0 order the same but are not considered equal. [wrap(-0.0)], @@ -166,7 +167,10 @@ describe('Values', () => { it('orders types correctly', () => { const groups = [ // null first - [wrap(null), wrap(minKey())], + [wrap(null)], + + // MinKey is after null + [wrap(minKey())], // booleans [wrap(false)], @@ -480,21 +484,25 @@ describe('Values', () => { }); it('computes lower bound', () => { - // TODO(Mila/BSON):add cases for bson types const groups = [ - // null and minKey first - [ - valuesGetLowerBound({ nullValue: 'NULL_VALUE' }), - wrap(null), - wrap(minKey()) - ], + // lower bound of null is null + [valuesGetLowerBound({ nullValue: 'NULL_VALUE' }), wrap(null)], + + // lower bound of MinKey is MinKey + [valuesGetLowerBound(MIN_KEY_VALUE), wrap(minKey())], // booleans [valuesGetLowerBound({ booleanValue: true }), wrap(false)], [wrap(true)], // numbers - [valuesGetLowerBound({ doubleValue: 0 }), wrap(NaN)], + [ + valuesGetLowerBound({ doubleValue: 0 }), + valuesGetLowerBound({ + mapValue: { fields: { [RESERVED_INT32_KEY]: { integerValue: 0 } } } + }), + wrap(NaN) + ], [wrap(Number.NEGATIVE_INFINITY)], [wrap(Number.MIN_VALUE)], @@ -502,10 +510,31 @@ describe('Values', () => { [valuesGetLowerBound({ timestampValue: {} })], [wrap(date1)], + // bson timestamps + [ + valuesGetLowerBound(wrap(bsonTimestamp(4294967295, 4294967295))), + MIN_BSON_TIMESTAMP_VALUE, + wrap(bsonTimestamp(0, 0)) + ], + [wrap(bsonTimestamp(1, 1))], + // strings - [valuesGetLowerBound({ stringValue: '' }), wrap('')], + [valuesGetLowerBound({ stringValue: 'Z' }), wrap('')], [wrap('\u0000')], + // blobs + [valuesGetLowerBound({ bytesValue: 'Z' }), wrap(blob())], + [wrap(blob(0))], + + // bson binary data + [ + valuesGetLowerBound( + wrap(bsonBinaryData(128, new Uint8Array([128, 128]))) + ), + MIN_BSON_BINARY_VALUE + ], + [wrap(bsonBinaryData(0, new Uint8Array([0])))], + // resource names [ valuesGetLowerBound({ referenceValue: '' }), @@ -513,6 +542,14 @@ describe('Values', () => { ], [refValue(DatabaseId.empty(), key('a/a'))], + // bson object ids + [ + valuesGetLowerBound(wrap(bsonObjectId('ZZZ'))), + wrap(bsonObjectId('')), + MIN_BSON_OBJECT_ID_VALUE + ], + [wrap(bsonObjectId('a'))], + // geo points [ valuesGetLowerBound({ geoPointValue: {} }), @@ -520,6 +557,14 @@ describe('Values', () => { ], [wrap(new GeoPoint(-90, 0))], + // regular expressions + [ + valuesGetLowerBound(wrap(regex('ZZZ', 'i'))), + wrap(regex('', '')), + MIN_REGEX_VALUE + ], + [wrap(regex('a', 'i'))], + // arrays [valuesGetLowerBound({ arrayValue: {} }), wrap([])], [wrap([false])], @@ -557,17 +602,22 @@ describe('Values', () => { }); it('computes upper bound', () => { - // TODO(Mila/BSON):add cases for bson types const groups = [ // null first [wrap(null)], - [valuesGetUpperBound({ nullValue: 'NULL_VALUE' })], + + // upper value of null is MinKey + [valuesGetUpperBound({ nullValue: 'NULL_VALUE' }), wrap(minKey())], + + // upper value of MinKey is boolean `false` + [valuesGetUpperBound(MIN_KEY_VALUE), wrap(false)], // booleans [wrap(true)], [valuesGetUpperBound({ booleanValue: false })], // numbers + [wrap(int32(2147483647))], //largest int32 value [wrap(Number.MAX_SAFE_INTEGER)], [wrap(Number.POSITIVE_INFINITY)], [valuesGetUpperBound({ doubleValue: NaN })], @@ -576,6 +626,10 @@ describe('Values', () => { [wrap(date1)], [valuesGetUpperBound({ timestampValue: {} })], + // bson timestamps + [wrap(bsonTimestamp(4294967295, 4294967295))], // largest bson timestamp value + [valuesGetUpperBound(MIN_BSON_TIMESTAMP_VALUE)], + // strings [wrap('\u0000')], [valuesGetUpperBound({ stringValue: '' })], @@ -584,20 +638,39 @@ describe('Values', () => { [wrap(blob(255))], [valuesGetUpperBound({ bytesValue: '' })], + // bson binary data + [wrap(bsonBinaryData(128, new Uint8Array([255, 255, 255])))], + [valuesGetUpperBound(MIN_BSON_BINARY_VALUE)], + // resource names [refValue(dbId('', ''), key('a/a'))], [valuesGetUpperBound({ referenceValue: '' })], + // bson object ids + [wrap(bsonObjectId('foo'))], + [valuesGetUpperBound(MIN_BSON_OBJECT_ID_VALUE)], + // geo points [wrap(new GeoPoint(90, 180))], [valuesGetUpperBound({ geoPointValue: {} })], + // regular expressions + [wrap(regex('a', 'i'))], + [valuesGetUpperBound(MIN_REGEX_VALUE)], + // arrays [wrap([false])], [valuesGetUpperBound({ arrayValue: {} })], + // vectors + [wrap(vector([1, 2, 3]))], + [valuesGetUpperBound(MIN_VECTOR_VALUE)], + // objects - [wrap({ 'a': 'b' })] + [wrap({ 'a': 'b' })], + + // MaxKey + [wrap(maxKey())] ]; expectCorrectComparisonGroups( From 82f32ca7de9508352e26be16ad7b1ab2f0eb0d46 Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Fri, 14 Mar 2025 13:44:38 -0400 Subject: [PATCH 3/9] rename BsonTimestampValue class to BsonTimestamp (#333) --- packages/firestore/lite/index.ts | 2 +- .../src/lite-api/bson_timestamp_value.ts | 12 ++++++------ .../firestore/src/lite-api/field_value_impl.ts | 10 +++++----- .../firestore/src/lite-api/user_data_reader.ts | 8 ++++---- .../firestore/src/lite-api/user_data_writer.ts | 18 ++++++++---------- .../firestore/test/unit/model/values.test.ts | 4 ++-- 6 files changed, 26 insertions(+), 28 deletions(-) diff --git a/packages/firestore/lite/index.ts b/packages/firestore/lite/index.ts index 6d1d6c01998..48e0bdae068 100644 --- a/packages/firestore/lite/index.ts +++ b/packages/firestore/lite/index.ts @@ -156,7 +156,7 @@ export { BsonBinaryData } from '../src/lite-api/bson_binary_data'; export { BsonObjectId } from '../src/lite-api/bson_object_Id'; -export { BsonTimestampValue } from '../src/lite-api/bson_timestamp_value'; +export { BsonTimestamp } from '../src/lite-api/bson_timestamp_value'; export { MinKey } from '../src/lite-api/min_key'; diff --git a/packages/firestore/src/lite-api/bson_timestamp_value.ts b/packages/firestore/src/lite-api/bson_timestamp_value.ts index 60b48157906..0b317f9042c 100644 --- a/packages/firestore/src/lite-api/bson_timestamp_value.ts +++ b/packages/firestore/src/lite-api/bson_timestamp_value.ts @@ -18,18 +18,18 @@ /** * Represents a BSON Timestamp type in Firestore documents. * - * @class BsonTimestampValue + * @class BsonTimestamp */ -export class BsonTimestampValue { +export class BsonTimestamp { constructor(readonly seconds: number, readonly increment: number) {} /** - * Returns true if this `BsonTimestampValue` is equal to the provided one. + * Returns true if this `BsonTimestamp` is equal to the provided one. * - * @param other - The `BsonTimestampValue` to compare against. - * @return 'true' if this `BsonTimestampValue` is equal to the provided one. + * @param other - The `BsonTimestamp` to compare against. + * @return 'true' if this `BsonTimestamp` is equal to the provided one. */ - isEqual(other: BsonTimestampValue): boolean { + isEqual(other: BsonTimestamp): boolean { return this.seconds === other.seconds && this.increment === other.increment; } } diff --git a/packages/firestore/src/lite-api/field_value_impl.ts b/packages/firestore/src/lite-api/field_value_impl.ts index 2cc1e3522b0..ade0656e0d3 100644 --- a/packages/firestore/src/lite-api/field_value_impl.ts +++ b/packages/firestore/src/lite-api/field_value_impl.ts @@ -17,7 +17,7 @@ import { BsonBinaryData } from './bson_binary_data'; import { BsonObjectId } from './bson_object_Id'; -import { BsonTimestampValue } from './bson_timestamp_value'; +import { BsonTimestamp } from './bson_timestamp_value'; import { FieldValue } from './field_value'; import { Int32Value } from './int32_value'; import { MaxKey } from './max_key'; @@ -167,18 +167,18 @@ export function bsonObjectId(value: string): BsonObjectId { } /** - * Creates a new `BsonTimestampValue` constructed with the given seconds and increment. + * Creates a new `BsonTimestamp` constructed with the given seconds and increment. * * @param seconds - The underlying unsigned 32-bit integer for seconds. * @param seconds - The underlying unsigned 32-bit integer for increment. * - * @returns A new `BsonTimestampValue` constructed with the given seconds and increment. + * @returns A new `BsonTimestamp` constructed with the given seconds and increment. */ export function bsonTimestamp( seconds: number, increment: number -): BsonTimestampValue { - return new BsonTimestampValue(seconds, increment); +): BsonTimestamp { + return new BsonTimestamp(seconds, increment); } /** diff --git a/packages/firestore/src/lite-api/user_data_reader.ts b/packages/firestore/src/lite-api/user_data_reader.ts index 3d0ce031599..9d7e6fa79f1 100644 --- a/packages/firestore/src/lite-api/user_data_reader.ts +++ b/packages/firestore/src/lite-api/user_data_reader.ts @@ -77,7 +77,7 @@ import { Dict, forEach, isEmpty } from '../util/obj'; import { BsonBinaryData } from './bson_binary_data'; import { BsonObjectId } from './bson_object_Id'; -import { BsonTimestampValue } from './bson_timestamp_value'; +import { BsonTimestamp } from './bson_timestamp_value'; import { Bytes } from './bytes'; import { Firestore } from './database'; import { FieldPath } from './field_path'; @@ -934,7 +934,7 @@ function parseScalarValue( return parseBsonObjectId(value); } else if (value instanceof Int32Value) { return parseInt32Value(value); - } else if (value instanceof BsonTimestampValue) { + } else if (value instanceof BsonTimestamp) { return parseBsonTimestamp(value); } else if (value instanceof BsonBinaryData) { return parseBsonBinaryData(context.serializer, value); @@ -1043,7 +1043,7 @@ export function parseInt32Value(value: Int32Value): ProtoValue { return { mapValue }; } -export function parseBsonTimestamp(value: BsonTimestampValue): ProtoValue { +export function parseBsonTimestamp(value: BsonTimestamp): ProtoValue { const mapValue: ProtoMapValue = { fields: { [RESERVED_BSON_TIMESTAMP_KEY]: { @@ -1105,7 +1105,7 @@ function looksLikeJsonObject(input: unknown): boolean { !(input instanceof Int32Value) && !(input instanceof RegexValue) && !(input instanceof BsonObjectId) && - !(input instanceof BsonTimestampValue) && + !(input instanceof BsonTimestamp) && !(input instanceof BsonBinaryData) ); } diff --git a/packages/firestore/src/lite-api/user_data_writer.ts b/packages/firestore/src/lite-api/user_data_writer.ts index 012b04874c3..e4719591b4c 100644 --- a/packages/firestore/src/lite-api/user_data_writer.ts +++ b/packages/firestore/src/lite-api/user_data_writer.ts @@ -60,7 +60,7 @@ import { forEach } from '../util/obj'; import { BsonBinaryData } from './bson_binary_data'; import { BsonObjectId } from './bson_object_Id'; -import { BsonTimestampValue } from './bson_timestamp_value'; +import { BsonTimestamp } from './bson_timestamp_value'; import { maxKey, minKey } from './field_value_impl'; import { GeoPoint } from './geo_point'; import { Int32Value } from './int32_value'; @@ -112,11 +112,11 @@ export abstract class AbstractUserDataWriter { case TypeOrder.RegexValue: return this.convertToRegexValue(value.mapValue!); case TypeOrder.BsonObjectIdValue: - return this.convertToBsonObjectIdValue(value.mapValue!); + return this.convertToBsonObjectId(value.mapValue!); case TypeOrder.BsonBinaryValue: - return this.convertToBsonBinaryValue(value.mapValue!); + return this.convertToBsonBinaryData(value.mapValue!); case TypeOrder.BsonTimestampValue: - return this.convertToBsonTimestampValue(value.mapValue!); + return this.convertToBsonTimestamp(value.mapValue!); case TypeOrder.MaxKeyValue: return maxKey(); case TypeOrder.MinKeyValue: @@ -160,13 +160,13 @@ export abstract class AbstractUserDataWriter { return new VectorValue(values); } - private convertToBsonObjectIdValue(mapValue: ProtoMapValue): BsonObjectId { + private convertToBsonObjectId(mapValue: ProtoMapValue): BsonObjectId { const oid = mapValue!.fields?.[RESERVED_BSON_OBJECT_ID_KEY]?.stringValue ?? ''; return new BsonObjectId(oid); } - private convertToBsonBinaryValue(mapValue: ProtoMapValue): BsonBinaryData { + private convertToBsonBinaryData(mapValue: ProtoMapValue): BsonBinaryData { const fields = mapValue!.fields?.[RESERVED_BSON_BINARY_KEY]; const subtypeAndData = fields?.bytesValue; if (!subtypeAndData) { @@ -182,9 +182,7 @@ export abstract class AbstractUserDataWriter { return new BsonBinaryData(Number(subtype), data); } - private convertToBsonTimestampValue( - mapValue: ProtoMapValue - ): BsonTimestampValue { + private convertToBsonTimestamp(mapValue: ProtoMapValue): BsonTimestamp { const fields = mapValue!.fields?.[RESERVED_BSON_TIMESTAMP_KEY]; const seconds = Number( fields?.mapValue?.fields?.[RESERVED_BSON_TIMESTAMP_SECONDS_KEY] @@ -194,7 +192,7 @@ export abstract class AbstractUserDataWriter { fields?.mapValue?.fields?.[RESERVED_BSON_TIMESTAMP_INCREMENT_KEY] ?.integerValue ); - return new BsonTimestampValue(seconds, increment); + return new BsonTimestamp(seconds, increment); } private convertToRegexValue(mapValue: ProtoMapValue): RegexValue { diff --git a/packages/firestore/test/unit/model/values.test.ts b/packages/firestore/test/unit/model/values.test.ts index 7a2558d2461..4054dd6481d 100644 --- a/packages/firestore/test/unit/model/values.test.ts +++ b/packages/firestore/test/unit/model/values.test.ts @@ -21,7 +21,7 @@ import { GeoPoint, Timestamp } from '../../../src'; import { DatabaseId } from '../../../src/core/database_info'; import { BsonBinaryData } from '../../../src/lite-api/bson_binary_data'; import { BsonObjectId } from '../../../src/lite-api/bson_object_Id'; -import { BsonTimestampValue } from '../../../src/lite-api/bson_timestamp_value'; +import { BsonTimestamp } from '../../../src/lite-api/bson_timestamp_value'; import { vector, regex, @@ -118,7 +118,7 @@ describe('Values', () => { [wrap(vector([]))], [wrap(vector([1, 2.3, -4.0]))], [wrap(regex('^foo', 'i')), wrap(new RegexValue('^foo', 'i'))], - [wrap(bsonTimestamp(57, 4)), wrap(new BsonTimestampValue(57, 4))], + [wrap(bsonTimestamp(57, 4)), wrap(new BsonTimestamp(57, 4))], [ wrap(bsonBinaryData(128, Uint8Array.from([7, 8, 9]))), wrap(new BsonBinaryData(128, Uint8Array.from([7, 8, 9]))), From fbf72228d6a3fbb9eab9786c0e2fe2605aa9fa35 Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Fri, 25 Apr 2025 14:39:31 -0400 Subject: [PATCH 4/9] Port BSON code updates from other sdks (#341) --- common/api-review/firestore-lite.api.md | 10 +- common/api-review/firestore.api.md | 10 +- packages/firestore/src/api.ts | 2 +- .../src/index/firestore_index_value_writer.ts | 36 +- packages/firestore/src/model/values.ts | 135 ++- .../test/integration/api/database.test.ts | 328 +++++-- .../test/integration/util/helpers.ts | 90 +- .../test/unit/local/index_manager.test.ts | 80 +- .../unit/local/local_store_indexeddb.test.ts | 832 +++++++++++++++++- .../firestore/test/unit/model/values.test.ts | 2 +- 10 files changed, 1226 insertions(+), 299 deletions(-) diff --git a/common/api-review/firestore-lite.api.md b/common/api-review/firestore-lite.api.md index 04faa9c47c6..394fd0b33ce 100644 --- a/common/api-review/firestore-lite.api.md +++ b/common/api-review/firestore-lite.api.md @@ -88,18 +88,18 @@ export class BsonObjectId { export function bsonObjectId(value: string): BsonObjectId; // @public -export function bsonTimestamp(seconds: number, increment: number): BsonTimestampValue; - -// @public -export class BsonTimestampValue { +export class BsonTimestamp { constructor(seconds: number, increment: number); // (undocumented) readonly increment: number; - isEqual(other: BsonTimestampValue): boolean; + isEqual(other: BsonTimestamp): boolean; // (undocumented) readonly seconds: number; } +// @public +export function bsonTimestamp(seconds: number, increment: number): BsonTimestamp; + // @public export class Bytes { static fromBase64String(base64: string): Bytes; diff --git a/common/api-review/firestore.api.md b/common/api-review/firestore.api.md index 5d3c2286859..39dcdb6df59 100644 --- a/common/api-review/firestore.api.md +++ b/common/api-review/firestore.api.md @@ -88,18 +88,18 @@ export class BsonObjectId { export function bsonObjectId(value: string): BsonObjectId; // @public -export function bsonTimestamp(seconds: number, increment: number): BsonTimestampValue; - -// @public -export class BsonTimestampValue { +export class BsonTimestamp { constructor(seconds: number, increment: number); // (undocumented) readonly increment: number; - isEqual(other: BsonTimestampValue): boolean; + isEqual(other: BsonTimestamp): boolean; // (undocumented) readonly seconds: number; } +// @public +export function bsonTimestamp(seconds: number, increment: number): BsonTimestamp; + // @public export class Bytes { static fromBase64String(base64: string): Bytes; diff --git a/packages/firestore/src/api.ts b/packages/firestore/src/api.ts index 46fb1b3bba3..ec6fdd2c4ea 100644 --- a/packages/firestore/src/api.ts +++ b/packages/firestore/src/api.ts @@ -193,7 +193,7 @@ export { BsonBinaryData } from './lite-api/bson_binary_data'; export { BsonObjectId } from './lite-api/bson_object_Id'; -export { BsonTimestampValue } from './lite-api/bson_timestamp_value'; +export { BsonTimestamp } from './lite-api/bson_timestamp_value'; export { MinKey } from './lite-api/min_key'; diff --git a/packages/firestore/src/index/firestore_index_value_writer.ts b/packages/firestore/src/index/firestore_index_value_writer.ts index d02a07313fe..6949ad99b31 100644 --- a/packages/firestore/src/index/firestore_index_value_writer.ts +++ b/packages/firestore/src/index/firestore_index_value_writer.ts @@ -23,12 +23,12 @@ import { } from '../model/normalize'; import { VECTOR_MAP_VECTORS_KEY, - detectSpecialMapType, + detectMapRepresentation, RESERVED_BSON_TIMESTAMP_KEY, RESERVED_REGEX_KEY, RESERVED_BSON_OBJECT_ID_KEY, RESERVED_BSON_BINARY_KEY, - SpecialMapValueType, + MapRepresentation, RESERVED_REGEX_PATTERN_KEY, RESERVED_REGEX_OPTIONS_KEY, RESERVED_INT32_KEY @@ -42,6 +42,10 @@ import { DirectionalIndexByteEncoder } from './directional_index_byte_encoder'; // Note: This file is copied from the backend. Code that is not used by // Firestore was removed. Code that has different behavior was modified. +// The client SDK only supports references to documents from the same database. We can skip the +// first five segments. +const DOCUMENT_NAME_OFFSET = 5; + const INDEX_TYPE_NULL = 5; const INDEX_TYPE_MIN_KEY = 7; const INDEX_TYPE_BOOLEAN = 10; @@ -60,7 +64,7 @@ const INDEX_TYPE_ARRAY = 50; const INDEX_TYPE_VECTOR = 53; const INDEX_TYPE_MAP = 55; const INDEX_TYPE_REFERENCE_SEGMENT = 60; -const INDEX_TYPE_MAX_VALUE = 999; +const INDEX_TYPE_MAX_KEY = 999; // A terminator that indicates that a truncatable value was not truncated. // This must be smaller than all other type labels. @@ -137,24 +141,24 @@ export class FirestoreIndexValueWriter { encoder.writeNumber(geoPoint.latitude || 0); encoder.writeNumber(geoPoint.longitude || 0); } else if ('mapValue' in indexValue) { - const type = detectSpecialMapType(indexValue); - if (type === SpecialMapValueType.INTERNAL_MAX) { + const type = detectMapRepresentation(indexValue); + if (type === MapRepresentation.INTERNAL_MAX) { this.writeValueTypeLabel(encoder, Number.MAX_SAFE_INTEGER); - } else if (type === SpecialMapValueType.VECTOR) { + } else if (type === MapRepresentation.VECTOR) { this.writeIndexVector(indexValue.mapValue!, encoder); - } else if (type === SpecialMapValueType.MAX_KEY) { - this.writeValueTypeLabel(encoder, INDEX_TYPE_MAX_VALUE); - } else if (type === SpecialMapValueType.MIN_KEY) { + } else if (type === MapRepresentation.MAX_KEY) { + this.writeValueTypeLabel(encoder, INDEX_TYPE_MAX_KEY); + } else if (type === MapRepresentation.MIN_KEY) { this.writeValueTypeLabel(encoder, INDEX_TYPE_MIN_KEY); - } else if (type === SpecialMapValueType.BSON_BINARY) { + } else if (type === MapRepresentation.BSON_BINARY) { this.writeIndexBsonBinaryData(indexValue.mapValue!, encoder); - } else if (type === SpecialMapValueType.REGEX) { + } else if (type === MapRepresentation.REGEX) { this.writeIndexRegex(indexValue.mapValue!, encoder); - } else if (type === SpecialMapValueType.BSON_TIMESTAMP) { + } else if (type === MapRepresentation.BSON_TIMESTAMP) { this.writeIndexBsonTimestamp(indexValue.mapValue!, encoder); - } else if (type === SpecialMapValueType.BSON_OBJECT_ID) { + } else if (type === MapRepresentation.BSON_OBJECT_ID) { this.writeIndexBsonObjectId(indexValue.mapValue!, encoder); - } else if (type === SpecialMapValueType.INT32) { + } else if (type === MapRepresentation.INT32) { this.writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); encoder.writeNumber( normalizeNumber( @@ -237,7 +241,9 @@ export class FirestoreIndexValueWriter { const segments: string[] = referenceValue .split('/') .filter(segment => segment.length > 0); - const path = DocumentKey.fromSegments(segments.slice(5)).path; + const path = DocumentKey.fromSegments( + segments.slice(DOCUMENT_NAME_OFFSET) + ).path; path.forEach(segment => { this.writeValueTypeLabel(encoder, INDEX_TYPE_REFERENCE_SEGMENT); this.writeUnlabeledIndexString(segment, encoder); diff --git a/packages/firestore/src/model/values.ts b/packages/firestore/src/model/values.ts index 02404130a77..15408892725 100644 --- a/packages/firestore/src/model/values.ts +++ b/packages/firestore/src/model/values.ts @@ -160,7 +160,7 @@ export const MIN_BSON_BINARY_VALUE: Value = { } }; -export enum SpecialMapValueType { +export enum MapRepresentation { REGEX = 'regexValue', BSON_OBJECT_ID = 'bsonObjectIdValue', INT32 = 'int32Value', @@ -195,27 +195,27 @@ export function typeOrder(value: Value): TypeOrder { } else if ('arrayValue' in value) { return TypeOrder.ArrayValue; } else if ('mapValue' in value) { - const valueType = detectSpecialMapType(value); + const valueType = detectMapRepresentation(value); switch (valueType) { - case SpecialMapValueType.SERVER_TIMESTAMP: + case MapRepresentation.SERVER_TIMESTAMP: return TypeOrder.ServerTimestampValue; - case SpecialMapValueType.INTERNAL_MAX: + case MapRepresentation.INTERNAL_MAX: return TypeOrder.MaxValue; - case SpecialMapValueType.VECTOR: + case MapRepresentation.VECTOR: return TypeOrder.VectorValue; - case SpecialMapValueType.REGEX: + case MapRepresentation.REGEX: return TypeOrder.RegexValue; - case SpecialMapValueType.BSON_OBJECT_ID: + case MapRepresentation.BSON_OBJECT_ID: return TypeOrder.BsonObjectIdValue; - case SpecialMapValueType.INT32: + case MapRepresentation.INT32: return TypeOrder.NumberValue; - case SpecialMapValueType.BSON_TIMESTAMP: + case MapRepresentation.BSON_TIMESTAMP: return TypeOrder.BsonTimestampValue; - case SpecialMapValueType.BSON_BINARY: + case MapRepresentation.BSON_BINARY: return TypeOrder.BsonBinaryValue; - case SpecialMapValueType.MIN_KEY: + case MapRepresentation.MIN_KEY: return TypeOrder.MinKeyValue; - case SpecialMapValueType.MAX_KEY: + case MapRepresentation.MAX_KEY: return TypeOrder.MaxKeyValue; default: return TypeOrder.ObjectValue; @@ -319,8 +319,8 @@ function blobEquals(left: Value, right: Value): boolean { export function numberEquals(left: Value, right: Value): boolean { if ( ('integerValue' in left && 'integerValue' in right) || - (detectSpecialMapType(left) === SpecialMapValueType.INT32 && - detectSpecialMapType(right) === SpecialMapValueType.INT32) + (detectMapRepresentation(left) === MapRepresentation.INT32 && + detectMapRepresentation(right) === MapRepresentation.INT32) ) { return extractNumber(left) === extractNumber(right); } else if ('doubleValue' in left && 'doubleValue' in right) { @@ -427,7 +427,7 @@ export function valueCompare(left: Value, right: Value): number { export function extractNumber(value: Value): number { let numberValue; - if (detectSpecialMapType(value) === SpecialMapValueType.INT32) { + if (detectMapRepresentation(value) === MapRepresentation.INT32) { numberValue = value.mapValue!.fields![RESERVED_INT32_KEY].integerValue!; } else { numberValue = value.integerValue || value.doubleValue; @@ -686,10 +686,6 @@ function canonifyValue(value: Value): string { } else if ('arrayValue' in value) { return canonifyArray(value.arrayValue!); } else if ('mapValue' in value) { - // BsonBinaryValue contains an array of bytes, and needs to extract `subtype` and `data` from it before canonifying. - if (detectSpecialMapType(value) === SpecialMapValueType.BSON_BINARY) { - return canonifyBsonBinaryData(value.mapValue!); - } return canonifyMap(value.mapValue!); } else { return fail('Invalid value type: ' + JSON.stringify(value)); @@ -713,19 +709,6 @@ function canonifyReference(referenceValue: string): string { return DocumentKey.fromName(referenceValue).toString(); } -function canonifyBsonBinaryData(mapValue: MapValue): string { - const fields = mapValue!.fields?.[RESERVED_BSON_BINARY_KEY]; - const subtypeAndData = fields?.bytesValue; - if (!subtypeAndData) { - throw new Error('Received incorrect bytesValue for BsonBinaryData'); - } - // Normalize the bytesValue to Uint8Array before extracting subtype and data. - const bytes = normalizeByteString(subtypeAndData).toUint8Array(); - return `{__binary__:{subType:${bytes.at(0)},data:${canonifyByteString( - bytes.slice(1) - )}}}`; -} - function canonifyMap(mapValue: MapValue): string { // Iteration order in JavaScript is not guaranteed. To ensure that we generate // matching canonical IDs for identical maps, we need to sort the keys. @@ -886,9 +869,9 @@ export function isMapValue( return !!value && 'mapValue' in value; } -export function detectSpecialMapType(value: Value): SpecialMapValueType { +export function detectMapRepresentation(value: Value): MapRepresentation { if (!value || !value.mapValue || !value.mapValue.fields) { - return SpecialMapValueType.REGULAR_MAP; // Not a special map type + return MapRepresentation.REGULAR_MAP; // Not a special map type } const fields = value.mapValue.fields; @@ -896,25 +879,31 @@ export function detectSpecialMapType(value: Value): SpecialMapValueType { // Check for type-based mappings const type = fields[TYPE_KEY]?.stringValue; if (type) { - const typeMap: Record = { - [RESERVED_VECTOR_KEY]: SpecialMapValueType.VECTOR, - [RESERVED_MAX_KEY]: SpecialMapValueType.INTERNAL_MAX, - [RESERVED_SERVER_TIMESTAMP_KEY]: SpecialMapValueType.SERVER_TIMESTAMP + const typeMap: Record = { + [RESERVED_VECTOR_KEY]: MapRepresentation.VECTOR, + [RESERVED_MAX_KEY]: MapRepresentation.INTERNAL_MAX, + [RESERVED_SERVER_TIMESTAMP_KEY]: MapRepresentation.SERVER_TIMESTAMP }; if (typeMap[type]) { return typeMap[type]; } } + if (objectSize(fields) !== 1) { + // All BSON types have 1 key in the map. To improve performance, we can + // return early if the number of keys in the map is not 1. + return MapRepresentation.REGULAR_MAP; + } + // Check for BSON-related mappings - const bsonMap: Record = { - [RESERVED_REGEX_KEY]: SpecialMapValueType.REGEX, - [RESERVED_BSON_OBJECT_ID_KEY]: SpecialMapValueType.BSON_OBJECT_ID, - [RESERVED_INT32_KEY]: SpecialMapValueType.INT32, - [RESERVED_BSON_TIMESTAMP_KEY]: SpecialMapValueType.BSON_TIMESTAMP, - [RESERVED_BSON_BINARY_KEY]: SpecialMapValueType.BSON_BINARY, - [RESERVED_MIN_KEY]: SpecialMapValueType.MIN_KEY, - [RESERVED_MAX_KEY]: SpecialMapValueType.MAX_KEY + const bsonMap: Record = { + [RESERVED_REGEX_KEY]: MapRepresentation.REGEX, + [RESERVED_BSON_OBJECT_ID_KEY]: MapRepresentation.BSON_OBJECT_ID, + [RESERVED_INT32_KEY]: MapRepresentation.INT32, + [RESERVED_BSON_TIMESTAMP_KEY]: MapRepresentation.BSON_TIMESTAMP, + [RESERVED_BSON_BINARY_KEY]: MapRepresentation.BSON_BINARY, + [RESERVED_MIN_KEY]: MapRepresentation.MIN_KEY, + [RESERVED_MAX_KEY]: MapRepresentation.MAX_KEY }; for (const key in bsonMap) { @@ -923,20 +912,20 @@ export function detectSpecialMapType(value: Value): SpecialMapValueType { } } - return SpecialMapValueType.REGULAR_MAP; + return MapRepresentation.REGULAR_MAP; } export function isBsonType(value: Value): boolean { const bsonTypes = new Set([ - SpecialMapValueType.REGEX, - SpecialMapValueType.BSON_OBJECT_ID, - SpecialMapValueType.INT32, - SpecialMapValueType.BSON_TIMESTAMP, - SpecialMapValueType.BSON_BINARY, - SpecialMapValueType.MIN_KEY, - SpecialMapValueType.MAX_KEY + MapRepresentation.REGEX, + MapRepresentation.BSON_OBJECT_ID, + MapRepresentation.INT32, + MapRepresentation.BSON_TIMESTAMP, + MapRepresentation.BSON_BINARY, + MapRepresentation.MIN_KEY, + MapRepresentation.MAX_KEY ]); - return bsonTypes.has(detectSpecialMapType(value)); + return bsonTypes.has(detectMapRepresentation(value)); } /** Creates a deep copy of `source`. */ @@ -987,23 +976,23 @@ export function valuesGetLowerBound(value: Value): Value { } else if ('arrayValue' in value) { return { arrayValue: {} }; } else if ('mapValue' in value) { - const type = detectSpecialMapType(value); - if (type === SpecialMapValueType.VECTOR) { + const type = detectMapRepresentation(value); + if (type === MapRepresentation.VECTOR) { return MIN_VECTOR_VALUE; - } else if (type === SpecialMapValueType.BSON_OBJECT_ID) { + } else if (type === MapRepresentation.BSON_OBJECT_ID) { return MIN_BSON_OBJECT_ID_VALUE; - } else if (type === SpecialMapValueType.BSON_TIMESTAMP) { + } else if (type === MapRepresentation.BSON_TIMESTAMP) { return MIN_BSON_TIMESTAMP_VALUE; - } else if (type === SpecialMapValueType.BSON_BINARY) { + } else if (type === MapRepresentation.BSON_BINARY) { return MIN_BSON_BINARY_VALUE; - } else if (type === SpecialMapValueType.REGEX) { + } else if (type === MapRepresentation.REGEX) { return MIN_REGEX_VALUE; - } else if (type === SpecialMapValueType.INT32) { + } else if (type === MapRepresentation.INT32) { // int32Value is treated the same as integerValue and doubleValue return { doubleValue: NaN }; - } else if (type === SpecialMapValueType.MIN_KEY) { + } else if (type === MapRepresentation.MIN_KEY) { return MIN_KEY_VALUE; - } else if (type === SpecialMapValueType.MAX_KEY) { + } else if (type === MapRepresentation.MAX_KEY) { return MAX_KEY_VALUE; } return { mapValue: {} }; @@ -1033,23 +1022,23 @@ export function valuesGetUpperBound(value: Value): Value { } else if ('arrayValue' in value) { return MIN_VECTOR_VALUE; } else if ('mapValue' in value) { - const type = detectSpecialMapType(value); - if (type === SpecialMapValueType.VECTOR) { + const type = detectMapRepresentation(value); + if (type === MapRepresentation.VECTOR) { return { mapValue: {} }; - } else if (type === SpecialMapValueType.BSON_OBJECT_ID) { + } else if (type === MapRepresentation.BSON_OBJECT_ID) { return { geoPointValue: { latitude: -90, longitude: -180 } }; - } else if (type === SpecialMapValueType.BSON_TIMESTAMP) { + } else if (type === MapRepresentation.BSON_TIMESTAMP) { return { stringValue: '' }; - } else if (type === SpecialMapValueType.BSON_BINARY) { + } else if (type === MapRepresentation.BSON_BINARY) { return refValue(DatabaseId.empty(), DocumentKey.empty()); - } else if (type === SpecialMapValueType.REGEX) { + } else if (type === MapRepresentation.REGEX) { return { arrayValue: {} }; - } else if (type === SpecialMapValueType.INT32) { + } else if (type === MapRepresentation.INT32) { // int32Value is treated the same as integerValue and doubleValue return { timestampValue: { seconds: Number.MIN_SAFE_INTEGER } }; - } else if (type === SpecialMapValueType.MIN_KEY) { + } else if (type === MapRepresentation.MIN_KEY) { return { booleanValue: false }; - } else if (type === SpecialMapValueType.MAX_KEY) { + } else if (type === MapRepresentation.MAX_KEY) { return INTERNAL_MAX_VALUE; } return MAX_KEY_VALUE; diff --git a/packages/firestore/test/integration/api/database.test.ts b/packages/firestore/test/integration/api/database.test.ts index d4091768e7c..2071b786f24 100644 --- a/packages/firestore/test/integration/api/database.test.ts +++ b/packages/firestore/test/integration/api/database.test.ts @@ -20,7 +20,6 @@ import { Deferred } from '@firebase/util'; import { expect, use } from 'chai'; import chaiAsPromised from 'chai-as-promised'; -import { AutoId } from '../../../src/util/misc'; import { EventsAccumulator } from '../util/events_accumulator'; import { addDoc, @@ -93,7 +92,7 @@ import { checkOnlineAndOfflineResultsMatch, toIds, withTestProjectIdAndCollectionSettings, - checkCacheRoundTrip + assertSDKQueryResultsConsistentWithBackend } from '../util/helpers'; import { DEFAULT_SETTINGS, DEFAULT_PROJECT_ID } from '../util/settings'; @@ -2439,7 +2438,8 @@ apiDescribe('Database', persistence => { }); describe('BSON types', () => { - // TODO(Mila/BSON): simplify the test setup once prod support BSON + // TODO(Mila/BSON): simplify the test setup once prod support BSON and + // remove the cache population after the test helper is updated const NIGHTLY_PROJECT_ID = 'firestore-sdk-nightly'; const settings = { ...DEFAULT_SETTINGS, @@ -2548,7 +2548,10 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + let orderedQuery = query( coll, where('key', '>', bsonObjectId('507f191e810c19729de860ea')), @@ -2560,7 +2563,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); orderedQuery = query( coll, @@ -2576,7 +2583,11 @@ apiDescribe('Database', persistence => { testDocs['b'], testDocs['a'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2592,7 +2603,10 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + let orderedQuery = query( coll, where('key', '>=', int32(1)), @@ -2604,7 +2618,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); orderedQuery = query( coll, @@ -2617,7 +2635,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['a'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2633,7 +2655,10 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + let orderedQuery = query( coll, where('key', '>', bsonTimestamp(1, 1)), @@ -2645,7 +2670,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); orderedQuery = query( coll, @@ -2658,7 +2687,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2674,7 +2707,10 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + let orderedQuery = query( coll, where('key', '>', bsonBinaryData(1, new Uint8Array([1, 2, 3]))), @@ -2686,7 +2722,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['b'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); orderedQuery = query( coll, @@ -2700,7 +2740,11 @@ apiDescribe('Database', persistence => { testDocs['b'], testDocs['a'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2716,7 +2760,10 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + const orderedQuery = query( coll, or( @@ -2731,7 +2778,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['a'] ]); - await checkCacheRoundTrip(orderedQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2749,22 +2800,34 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + let filteredQuery = query(coll, where('key', '==', minKey())); let snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['a'], testDocs['b'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); - filteredQuery = query(coll, where('key', '!=', minKey())); - snapshot = await getDocs(filteredQuery); - expect(toDataArray(snapshot)).to.deep.equal([ - testDocs['d'], - testDocs['e'] - ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + // TODO(Mila/BSON): uncomment after the null inclusion bug + // filteredQuery = query(coll, where('key', '!=', minKey())); + // snapshot = await getDocs(filteredQuery); + // expect(toDataArray(snapshot)).to.deep.equal([ + // testDocs['d'], + // testDocs['e'] + // ]); + // await assertSDKQueryResultsConsistentWithBackend( + // filteredQuery, + // testDocs, + // toIds(snapshot) + // ); filteredQuery = query(coll, where('key', '>=', minKey())); snapshot = await getDocs(filteredQuery); @@ -2772,7 +2835,11 @@ apiDescribe('Database', persistence => { testDocs['a'], testDocs['b'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '<=', minKey())); snapshot = await getDocs(filteredQuery); @@ -2780,22 +2847,38 @@ apiDescribe('Database', persistence => { testDocs['a'], testDocs['b'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '>', minKey())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '<', minKey())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '<', 1)); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2813,22 +2896,34 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + let filteredQuery = query(coll, where('key', '==', maxKey())); let snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['c'], testDocs['d'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); - filteredQuery = query(coll, where('key', '!=', maxKey())); - snapshot = await getDocs(filteredQuery); - expect(toDataArray(snapshot)).to.deep.equal([ - testDocs['a'], - testDocs['b'] - ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + // TODO(Mila/BSON): uncomment after the null inclusion bug + // filteredQuery = query(coll, where('key', '!=', maxKey())); + // snapshot = await getDocs(filteredQuery); + // expect(toDataArray(snapshot)).to.deep.equal([ + // testDocs['a'], + // testDocs['b'] + // ]); + // await assertSDKQueryResultsConsistentWithBackend( + // filteredQuery, + // testDocs, + // toIds(snapshot) + // ); filteredQuery = query(coll, where('key', '>=', maxKey())); snapshot = await getDocs(filteredQuery); @@ -2836,7 +2931,11 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['d'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '<=', maxKey())); snapshot = await getDocs(filteredQuery); @@ -2844,22 +2943,38 @@ apiDescribe('Database', persistence => { testDocs['c'], testDocs['d'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '>', maxKey())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '<', maxKey())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '>', 1)); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2878,14 +2993,21 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + let filteredQuery = query(coll, where('key', '==', null)); let snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['b'], testDocs['c'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '!=', null)); snapshot = await getDocs(filteredQuery); @@ -2894,7 +3016,11 @@ apiDescribe('Database', persistence => { testDocs['d'], testDocs['e'] ]); - await checkCacheRoundTrip(filteredQuery, db, toDataArray(snapshot)); + await assertSDKQueryResultsConsistentWithBackend( + filteredQuery, + testDocs, + toIds(snapshot) + ); } ); }); @@ -2913,7 +3039,7 @@ apiDescribe('Database', persistence => { NIGHTLY_PROJECT_ID, settings, testDocs, - async (coll, db) => { + async coll => { const orderedQuery = query(coll, orderBy('key', 'asc')); const storeEvent = new EventsAccumulator(); @@ -2955,18 +3081,17 @@ apiDescribe('Database', persistence => { b: { key: regex('^foo', 'i') }, c: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) } }; - return withTestDbsSettings( + return withTestProjectIdAndCollectionSettings( persistence, NIGHTLY_PROJECT_ID, settings, - 1, - async dbs => { - const coll = collection(dbs[0], AutoId.newId()); + {}, + async (coll, db) => { const docA = await addDoc(coll, testDocs['a']); const docB = await addDoc(coll, { key: 'place holder' }); const docC = await addDoc(coll, testDocs['c']); - await runTransaction(dbs[0], async transaction => { + await runTransaction(db, async transaction => { const docSnapshot = await transaction.get(docA); expect(docSnapshot.data()).to.deep.equal(testDocs['a']); transaction.set(docB, testDocs['b']); @@ -3018,30 +3143,37 @@ apiDescribe('Database', persistence => { // TODO(Mila/BSON): remove after prod supports bson, and use `ref` helper function instead const docRef = doc(coll, 'doc'); await setDoc(doc(coll, 'm'), { key: docRef }); + testDocs['m'] = { key: docRef }; + + // Populate the cache with all docs first + await getDocs(coll); const orderedQuery = query(coll, orderBy('key', 'desc')); - await checkOnlineAndOfflineResultsMatch( + await assertSDKQueryResultsConsistentWithBackend( orderedQuery, - 't', - 's', - 'r', - 'q', - 'p', - 'o', - 'n', - 'm', - 'l', - 'k', - 'j', - 'i', - 'h', - 'g', - 'f', - 'e', - 'd', - 'c', - 'b', - 'a' + testDocs, + [ + 't', + 's', + 'r', + 'q', + 'p', + 'o', + 'n', + 'm', + 'l', + 'k', + 'j', + 'i', + 'h', + 'g', + 'f', + 'e', + 'd', + 'c', + 'b', + 'a' + ] ); } ); @@ -3076,28 +3208,34 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { + // Populate the cache with all docs first + await getDocs(coll); + const orderedQuery = query(coll, orderBy('key')); - await checkOnlineAndOfflineResultsMatch( + await assertSDKQueryResultsConsistentWithBackend( orderedQuery, - 'r', - 's', - 'd', - 'e', - 'c', - 'f', - 'h', - 'g', - 'j', - 'i', - 'k', - 'n', - 'm', - 'l', - 'q', - 'o', - 'p', - 'a', - 'b' + testDocs, + [ + 'r', + 's', + 'd', + 'e', + 'c', + 'f', + 'h', + 'g', + 'j', + 'i', + 'k', + 'n', + 'm', + 'l', + 'q', + 'o', + 'p', + 'a', + 'b' + ] ); } ); diff --git a/packages/firestore/test/integration/util/helpers.ts b/packages/firestore/test/integration/util/helpers.ts index 2c789ec9151..092721e113c 100644 --- a/packages/firestore/test/integration/util/helpers.ts +++ b/packages/firestore/test/integration/util/helpers.ts @@ -18,6 +18,7 @@ import { isIndexedDBAvailable } from '@firebase/util'; import { expect } from 'chai'; +import { EventsAccumulator } from './events_accumulator'; import { clearIndexedDbPersistence, collection, @@ -45,8 +46,7 @@ import { getDocsFromServer, getDocsFromCache, _AutoId, - disableNetwork, - enableNetwork + onSnapshot } from './firebase_export'; import { ALT_PROJECT_ID, @@ -596,32 +596,74 @@ export async function checkOnlineAndOfflineResultsMatch( } /** - * Checks that documents fetched from the server and stored in the cache can be - * successfully retrieved from the cache and matches the expected documents. - * - * This function performs the following steps: - * 1. Fetch documents from the server for provided query and populate the cache. - * 2. Disables the network connection to simulate offline mode. - * 3. Retrieves the documents from the cache using the same query. - * 4. Compares the cached documents with the expected documents. - * - * @param query The query to check. - * @param db The Firestore database instance. - * @param expectedDocs Optional ordered list of document data that are expected to be retrieved from the cache. + * Asserts that the given query produces the expected result for all of the + * following scenarios: + * 1. Performing the given query using source=server, compare with expected result and populate + * cache. + * 2. Performing the given query using source=cache, compare with server result and expected + * result. + * 3. Using a snapshot listener to raise snapshots from cache and server, compare them with + * expected result. + * @param {firebase.firestore.Query} query The query to test. + * @param {Object>} allData A map of document IDs to their data. + * @param {string[]} expectedDocIds An array of expected document IDs in the result. + * @returns {Promise} A Promise that resolves when the assertions are complete. */ -export async function checkCacheRoundTrip( +export async function assertSDKQueryResultsConsistentWithBackend( query: Query, - db: Firestore, - expectedDocs: DocumentData[] + allData: { [key: string]: DocumentData }, + expectedDocIds: string[] ): Promise { - await getDocsFromServer(query); + // Check the cache round trip first to make sure cache is properly populated, otherwise the + // snapshot listener below will return partial results from previous + // "assertSDKQueryResultsConsistentWithBackend" calls if it is called multiple times in one test + await checkOnlineAndOfflineResultsMatch(query, ...expectedDocIds); + + const eventAccumulator = new EventsAccumulator(); + const unsubscribe = onSnapshot( + query, + { includeMetadataChanges: true }, + eventAccumulator.storeEvent + ); + let watchSnapshots; + try { + watchSnapshots = await eventAccumulator.awaitEvents(2); + } finally { + unsubscribe(); + } - await disableNetwork(db); - const docsFromCache = await getDocsFromCache(query); + expect(watchSnapshots[0].metadata.fromCache).to.be.true; + verifySnapshot(watchSnapshots[0], allData, expectedDocIds); + expect(watchSnapshots[1].metadata.fromCache).to.be.false; + verifySnapshot(watchSnapshots[1], allData, expectedDocIds); +} - if (expectedDocs.length !== 0) { - expect(expectedDocs).to.deep.equal(toDataArray(docsFromCache)); +/** + * Verifies that a QuerySnapshot matches the expected data and document IDs. + * @param {firebase.firestore.QuerySnapshot} snapshot The QuerySnapshot to verify. + * @param {Object>} allData A map of document IDs to their data. + * @param {string[]} expectedDocIds An array of expected document IDs in the result. + */ +function verifySnapshot( + snapshot: QuerySnapshot, + allData: { [key: string]: DocumentData }, + expectedDocIds: string[] +): void { + const snapshotDocIds = toIds(snapshot); + expect( + expectedDocIds.length === snapshotDocIds.length, + `Did not get the same document size. Expected doc size: ${expectedDocIds.length}, Actual doc size: ${snapshotDocIds.length} ` + ).to.be.true; + + expect( + expectedDocIds.every((id, index) => id === snapshotDocIds[index]), + `Did not get the expected document IDs. Expected doc IDs: ${expectedDocIds}, Actual doc IDs: ${snapshotDocIds} ` + ).to.be.true; + + const actualDocs = toDataMap(snapshot); + for (const docId of expectedDocIds) { + const expectedDoc = allData[docId]; + const actualDoc = actualDocs[docId]; + expect(expectedDoc).to.deep.equal(actualDoc); } - - await enableNetwork(db); } diff --git a/packages/firestore/test/unit/local/index_manager.test.ts b/packages/firestore/test/unit/local/index_manager.test.ts index 51bef76b31e..1097f5e682e 100644 --- a/packages/firestore/test/unit/local/index_manager.test.ts +++ b/packages/firestore/test/unit/local/index_manager.test.ts @@ -2371,64 +2371,64 @@ describe('IndexedDbIndexManager', async () => { await indexManager.addFieldIndex( fieldIndex('coll', { fields: [['key', IndexKind.DESCENDING]] }) ); - await addDoc('coll/a', { + await addDoc('coll/doc1', { key: null }); - await addDoc('coll/b', { + await addDoc('coll/doc2', { key: minKey() }); - await addDoc('coll/c', { + await addDoc('coll/doc3', { key: true }); - await addDoc('coll/d', { + await addDoc('coll/doc4', { key: NaN }); - await addDoc('coll/e', { + await addDoc('coll/doc5', { key: int32(1) }); - await addDoc('coll/f', { + await addDoc('coll/doc6', { key: 2.0 }); - await addDoc('coll/g', { + await addDoc('coll/doc7', { key: 3 }); - await addDoc('coll/h', { + await addDoc('coll/doc8', { key: new Timestamp(100, 123456000) }); - await addDoc('coll/i', { + await addDoc('coll/doc9', { key: bsonTimestamp(1, 2) }); - await addDoc('coll/j', { + await addDoc('coll/doc10', { key: 'string' }); - await addDoc('coll/k', { + await addDoc('coll/doc11', { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) as Bytes }); - await addDoc('coll/l', { + await addDoc('coll/doc12', { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }); - await addDoc('coll/m', { + await addDoc('coll/doc13', { key: ref('coll/doc') }); - await addDoc('coll/n', { + await addDoc('coll/doc14', { key: bsonObjectId('507f191e810c19729de860ea') }); - await addDoc('coll/o', { + await addDoc('coll/doc15', { key: new GeoPoint(0, 1) }); - await addDoc('coll/p', { + await addDoc('coll/doc16', { key: regex('^foo', 'i') }); - await addDoc('coll/q', { + await addDoc('coll/doc17', { key: [1, 2] }); - await addDoc('coll/r', { + await addDoc('coll/doc18', { key: vector([1, 2]) }); - await addDoc('coll/s', { + await addDoc('coll/doc19', { key: { a: 1 } }); - await addDoc('coll/t', { + await addDoc('coll/doc20', { key: maxKey() }); @@ -2438,26 +2438,26 @@ describe('IndexedDbIndexManager', async () => { const q = queryWithAddedOrderBy(query('coll'), orderBy('key', 'desc')); await verifyResults( q, - 'coll/t', - 'coll/s', - 'coll/r', - 'coll/q', - 'coll/p', - 'coll/o', - 'coll/n', - 'coll/m', - 'coll/l', - 'coll/k', - 'coll/j', - 'coll/i', - 'coll/h', - 'coll/g', - 'coll/f', - 'coll/e', - 'coll/d', - 'coll/c', - 'coll/b', - 'coll/a' + 'coll/doc20', + 'coll/doc19', + 'coll/doc18', + 'coll/doc17', + 'coll/doc16', + 'coll/doc15', + 'coll/doc14', + 'coll/doc13', + 'coll/doc12', + 'coll/doc11', + 'coll/doc10', + 'coll/doc9', + 'coll/doc8', + 'coll/doc7', + 'coll/doc6', + 'coll/doc5', + 'coll/doc4', + 'coll/doc3', + 'coll/doc2', + 'coll/doc1' ); }); diff --git a/packages/firestore/test/unit/local/local_store_indexeddb.test.ts b/packages/firestore/test/unit/local/local_store_indexeddb.test.ts index 6f0275ab4ad..0e5afbc2914 100644 --- a/packages/firestore/test/unit/local/local_store_indexeddb.test.ts +++ b/packages/firestore/test/unit/local/local_store_indexeddb.test.ts @@ -18,17 +18,28 @@ import { isIndexedDBAvailable } from '@firebase/util'; import { expect } from 'chai'; -import { serverTimestamp, Timestamp } from '../../../src'; +import { serverTimestamp, Timestamp, GeoPoint } from '../../../src'; import { User } from '../../../src/auth/user'; import { BundleConverterImpl } from '../../../src/core/bundle_impl'; import { LimitType, + newQueryComparator, Query, queryToTarget, queryWithLimit } from '../../../src/core/query'; import { Target } from '../../../src/core/target'; import { TargetId } from '../../../src/core/types'; +import { + bsonBinaryData, + bsonObjectId, + bsonTimestamp, + int32, + maxKey, + minKey, + regex, + vector +} from '../../../src/lite-api/field_value_impl'; import { IndexBackfiller } from '../../../src/local/index_backfiller'; import { LocalStore } from '../../../src/local/local_store'; import { @@ -44,6 +55,7 @@ import { } from '../../../src/local/local_store_impl'; import { Persistence } from '../../../src/local/persistence'; import { DocumentMap } from '../../../src/model/collections'; +import { Document } from '../../../src/model/document'; import { DocumentKey } from '../../../src/model/document_key'; import { FieldIndex, @@ -53,6 +65,7 @@ import { import { Mutation, MutationType } from '../../../src/model/mutation'; import { MutationBatch } from '../../../src/model/mutation_batch'; import { RemoteEvent } from '../../../src/remote/remote_event'; +import { SortedSet } from '../../../src/util/sorted_set'; import { deletedDoc, deleteMutation, @@ -65,8 +78,10 @@ import { orderBy, orFilter, query, + ref, setMutation, - version + version, + blob } from '../../util/helpers'; import { CountingQueryEngine } from './counting_query_engine'; @@ -208,11 +223,20 @@ class AsyncLocalStoreTester { } } - assertQueryReturned(...keys: string[]): void { + assertQueryReturned(query: Query, ...keys: string[]): void { expect(this.lastChanges).to.exist; - for (const k of keys) { - expect(this.lastChanges?.get(key(k))).to.exist; - } + expect(this.lastChanges?.size === keys.length).to.be.true; + + // lastChanges is a DocumentMap sorted by document keys. Re-sort the documents by the query comparator. + let returnedDocs = new SortedSet(newQueryComparator(query)); + this.lastChanges!.forEach((key, doc) => { + returnedDocs = returnedDocs.add(doc); + }); + + let i = 0; + returnedDocs.forEach(doc => { + expect(keys[i++]).to.equal(doc.key.path.toString()); + }); } async backfillIndexes(config?: { @@ -331,7 +355,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(queryMatches); test.assertRemoteDocumentsRead(1, 0); - test.assertQueryReturned('coll/a'); + test.assertQueryReturned(queryMatches, 'coll/a'); await test.applyRemoteEvent( docUpdateRemoteEvent(deletedDoc('coll/a', 0), [targetId]) @@ -340,7 +364,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // No backfill needed for deleted document. await test.executeQuery(queryMatches); test.assertRemoteDocumentsRead(0, 0); - test.assertQueryReturned(); + test.assertQueryReturned(queryMatches); }); it('Uses Indexes', async () => { @@ -360,7 +384,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(queryMatches); test.assertRemoteDocumentsRead(1, 0); - test.assertQueryReturned('coll/a'); + test.assertQueryReturned(queryMatches, 'coll/a'); }); it('Uses Partially Indexed Remote Documents When Available', async () => { @@ -384,7 +408,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(queryMatches); test.assertRemoteDocumentsRead(1, 1); - test.assertQueryReturned('coll/a', 'coll/b'); + test.assertQueryReturned(queryMatches, 'coll/a', 'coll/b'); }); it('Uses Partially Indexed Overlays When Available', async () => { @@ -405,7 +429,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { [key('coll/a').toString()]: MutationType.Set, [key('coll/b').toString()]: MutationType.Set }); - test.assertQueryReturned('coll/a', 'coll/b'); + test.assertQueryReturned(queryMatches, 'coll/a', 'coll/b'); }); it('Does Not Use Limit When Index Is Outdated', async () => { @@ -443,7 +467,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { test.assertOverlaysRead(5, 1, { [key('coll/b').toString()]: MutationType.Delete }); - test.assertQueryReturned('coll/a', 'coll/c'); + test.assertQueryReturned(queryCount, 'coll/a', 'coll/c'); }); it('Uses Index For Limit Query When Index Is Updated', async () => { @@ -476,7 +500,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(queryCount); test.assertRemoteDocumentsRead(2, 0); test.assertOverlaysRead(2, 0, {}); - test.assertQueryReturned('coll/a', 'coll/c'); + test.assertQueryReturned(queryCount, 'coll/a', 'coll/c'); }); it('Indexes Server Timestamps', async () => { @@ -496,7 +520,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { test.assertOverlaysRead(1, 0, { [key('coll/a').toString()]: MutationType.Set }); - test.assertQueryReturned('coll/a'); + test.assertQueryReturned(queryTime, 'coll/a'); }); it('can auto-create indexes', async () => { @@ -522,7 +546,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // Full matched index should be created. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.backfillIndexes(); @@ -532,7 +556,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(2, 1); - test.assertQueryReturned('coll/a', 'coll/e', 'coll/f'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e', 'coll/f'); }); it('can auto-create indexes works with or query', async () => { @@ -561,7 +585,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // Full matched index should be created. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.backfillIndexes(); @@ -571,7 +595,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(2, 1); - test.assertQueryReturned('coll/a', 'coll/e', 'coll/f'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e', 'coll/f'); }); it('does not auto-create indexes for small collections', async () => { @@ -597,7 +621,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // SDK will not create indexes since collection size is too small. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/e', 'coll/a'); await test.backfillIndexes(); @@ -607,7 +631,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 3); - test.assertQueryReturned('coll/a', 'coll/e', 'coll/f'); + test.assertQueryReturned(query_, 'coll/e', 'coll/f', 'coll/a'); }); it('does not auto create indexes when index lookup is expensive', async () => { @@ -632,7 +656,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // SDK will not create indexes since relative read cost is too large. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.backfillIndexes(); @@ -642,7 +666,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 3); - test.assertQueryReturned('coll/a', 'coll/e', 'coll/f'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e', 'coll/f'); }); it('index auto creation works when backfiller runs halfway', async () => { @@ -680,7 +704,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // Full matched index should be created. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.backfillIndexes({ maxDocumentsToProcess: 2 }); @@ -692,7 +716,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(1, 2); - test.assertQueryReturned('coll/a', 'coll/e', 'coll/f'); + test.assertQueryReturned(query_, 'coll/a', 'coll/f', 'coll/e'); }); it('index created by index auto creation exists after turn off auto creation', async () => { @@ -718,7 +742,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // Full matched index should be created. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/e', 'coll/a'); test.configureIndexAutoCreation({ isEnabled: false }); await test.backfillIndexes(); @@ -729,7 +753,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(2, 1); - test.assertQueryReturned('coll/a', 'coll/e', 'coll/f'); + test.assertQueryReturned(query_, 'coll/e', 'coll/a', 'coll/f'); }); it('disable index auto creation works', async () => { @@ -757,13 +781,13 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // Full matched index should be created. await test.executeQuery(query1); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query1, 'coll/a', 'coll/e'); test.configureIndexAutoCreation({ isEnabled: false }); await test.backfillIndexes(); await test.executeQuery(query1); test.assertRemoteDocumentsRead(2, 0); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query1, 'coll/a', 'coll/e'); const targetId2 = await test.allocateQuery(query2); await test.applyRemoteEvents( @@ -776,14 +800,14 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query2); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('foo/a', 'foo/e'); + test.assertQueryReturned(query2, 'foo/a', 'foo/e'); await test.backfillIndexes(); // Run the query in second time, test index won't be created await test.executeQuery(query2); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('foo/a', 'foo/e'); + test.assertQueryReturned(query2, 'foo/a', 'foo/e'); }); it('index auto creation works with mutation', async () => { @@ -811,7 +835,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.writeMutations(deleteMutation('coll/e')); await test.backfillIndexes(); @@ -820,7 +844,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(1, 0); test.assertOverlaysRead(1, 1); - test.assertQueryReturned('coll/a', 'coll/f'); + test.assertQueryReturned(query_, 'coll/a', 'coll/f'); }); it('delete all indexes works with index auto creation', async () => { @@ -847,24 +871,24 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // Full matched index should be created. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.backfillIndexes(); await test.executeQuery(query_); test.assertRemoteDocumentsRead(2, 0); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.deleteAllFieldIndexes(); await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); // Field index is created again. await test.backfillIndexes(); await test.executeQuery(query_); test.assertRemoteDocumentsRead(2, 0); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); }); it('delete all indexes works with manual added indexes', async () => { @@ -884,13 +908,13 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.executeQuery(query_); test.assertRemoteDocumentsRead(1, 0); - test.assertQueryReturned('coll/a'); + test.assertQueryReturned(query_, 'coll/a'); await test.deleteAllFieldIndexes(); await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 1); - test.assertQueryReturned('coll/a'); + test.assertQueryReturned(query_, 'coll/a'); }); it('index auto creation does not work with multiple inequality', async () => { @@ -930,11 +954,739 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { // support multiple inequality. await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); await test.backfillIndexes(); await test.executeQuery(query_); test.assertRemoteDocumentsRead(0, 2); - test.assertQueryReturned('coll/a', 'coll/e'); + test.assertQueryReturned(query_, 'coll/a', 'coll/e'); + }); + + describe('BSON type indexing', () => { + it('Indexes BSON ObjectId fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + + await test.writeMutations( + setMutation('coll/a', { + key: bsonObjectId('507f191e810c19729de860ea') + }), + setMutation('coll/b', { + key: bsonObjectId('507f191e810c19729de860eb') + }), + setMutation('coll/c', { key: bsonObjectId('507f191e810c19729de860ec') }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b', 'coll/c'); + + query_ = query( + 'coll', + filter('key', '==', bsonObjectId('507f191e810c19729de860ea')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/a').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a'); + + query_ = query( + 'coll', + filter('key', '!=', bsonObjectId('507f191e810c19729de860ea')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c'); + + query_ = query( + 'coll', + filter('key', '>=', bsonObjectId('507f191e810c19729de860eb')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c'); + + query_ = query( + 'coll', + filter('key', '<', bsonObjectId('507f191e810c19729de860ea')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query( + 'coll', + filter('key', 'in', [ + bsonObjectId('507f191e810c19729de860ea'), + bsonObjectId('507f191e810c19729de860eb') + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query( + 'coll', + filter('key', 'not-in', [ + bsonObjectId('507f191e810c19729de860ea'), + bsonObjectId('507f191e810c19729de860eb') + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c'); + }); + + it('Indexes BSON Timestamp fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { key: bsonTimestamp(1000, 1000) }), + setMutation('coll/b', { key: bsonTimestamp(1001, 1000) }), + setMutation('coll/c', { key: bsonTimestamp(1000, 1001) }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/c', 'coll/b'); + + query_ = query('coll', filter('key', '==', bsonTimestamp(1000, 1000))); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/a').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a'); + + query_ = query('coll', filter('key', '!=', bsonTimestamp(1000, 1000))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c', 'coll/b'); + + query_ = query('coll', filter('key', '>=', bsonTimestamp(1000, 1001))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c', 'coll/b'); + + query_ = query('coll', filter('key', '<', bsonTimestamp(1000, 1000))); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query( + 'coll', + filter('key', 'in', [ + bsonTimestamp(1000, 1000), + bsonTimestamp(1001, 1000) + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query( + 'coll', + filter('key', 'not-in', [ + bsonTimestamp(1000, 1000), + bsonTimestamp(1001, 1000) + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c'); + }); + + it('Indexes BSON Binary Data fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + }), + setMutation('coll/b', { + key: bsonBinaryData(1, new Uint8Array([1, 2])) + }), + setMutation('coll/c', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + }), + setMutation('coll/d', { + key: bsonBinaryData(2, new Uint8Array([1, 2])) + }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(4, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/a', 'coll/c', 'coll/d'); + + query_ = query( + 'coll', + filter('key', '==', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/a').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a'); + + query_ = query( + 'coll', + filter('key', '!=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c', 'coll/d'); + + query_ = query( + 'coll', + filter('key', '>=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/c', 'coll/d'); + + query_ = query( + 'coll', + filter('key', '<', bsonBinaryData(1, new Uint8Array([1, 2]))) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query( + 'coll', + filter('key', 'in', [ + bsonBinaryData(1, new Uint8Array([1, 2, 3])), + bsonBinaryData(1, new Uint8Array([1, 2])) + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + // Note that `in` does not add implicit ordering, so the result is ordered by keys + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query( + 'coll', + filter('key', 'not-in', [ + bsonBinaryData(1, new Uint8Array([1, 2, 3])), + bsonBinaryData(1, new Uint8Array([1, 2])) + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c', 'coll/d'); + }); + + it('Indexes BSON Int32 fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { key: int32(-1) }), + setMutation('coll/b', { key: int32(0) }), + setMutation('coll/c', { key: int32(1) }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b', 'coll/c'); + + query_ = query('coll', filter('key', '==', int32(0))); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b'); + + query_ = query('coll', filter('key', '!=', int32(0))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/c'); + + query_ = query('coll', filter('key', '>=', int32(0))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c'); + + query_ = query('coll', filter('key', '<', int32(-1))); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query('coll', filter('key', 'in', [int32(0), int32(1)])); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c'); + + query_ = query('coll', filter('key', 'not-in', [int32(0), int32(1)])); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/a').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a'); + }); + + it('Indexes BSON Regex fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { key: regex('a', 'i') }), + setMutation('coll/b', { key: regex('a', 'm') }), + setMutation('coll/c', { key: regex('b', 'i') }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b', 'coll/c'); + + query_ = query('coll', filter('key', '==', regex('a', 'i'))); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/a').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a'); + + query_ = query('coll', filter('key', '!=', regex('a', 'i'))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c'); + + query_ = query('coll', filter('key', '>=', regex('a', 'm'))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c'); + + query_ = query('coll', filter('key', '<', regex('a', 'i'))); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query( + 'coll', + filter('key', 'in', [regex('a', 'i'), regex('a', 'm')]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query( + 'coll', + filter('key', 'not-in', [regex('a', 'i'), regex('a', 'm')]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c'); + }); + + it('Indexes BSON minKey fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { key: minKey() }), + setMutation('coll/b', { key: minKey() }), + setMutation('coll/c', { key: null }), + setMutation('coll/d', { key: 1 }), + setMutation('coll/e', { key: maxKey() }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(5, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set + }); + test.assertQueryReturned( + query_, + 'coll/c', + 'coll/a', + 'coll/b', + 'coll/d', + 'coll/e' + ); + + query_ = query('coll', filter('key', '==', minKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query('coll', filter('key', '!=', minKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/d', 'coll/e'); + + query_ = query('coll', filter('key', '>=', minKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query('coll', filter('key', '<', minKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0, {}); + test.assertQueryReturned(query_); + + query_ = query('coll', filter('key', 'in', [minKey()])); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query('coll', filter('key', 'not-in', [minKey()])); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/d', 'coll/e'); + }); + + it('Indexes BSON maxKey fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { key: maxKey() }), + setMutation('coll/b', { key: maxKey() }), + setMutation('coll/c', { key: null }), + setMutation('coll/d', { key: 1 }), + setMutation('coll/e', { key: minKey() }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(5, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set + }); + test.assertQueryReturned( + query_, + 'coll/c', + 'coll/e', + 'coll/d', + 'coll/a', + 'coll/b' + ); + + query_ = query('coll', filter('key', '==', maxKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query('coll', filter('key', '!=', maxKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/e', 'coll/d'); + + query_ = query('coll', filter('key', '<=', maxKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query('coll', filter('key', '>', maxKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0, {}); + test.assertQueryReturned(query_); + + query_ = query('coll', filter('key', '<', maxKey())); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0, {}); + test.assertQueryReturned(query_); + + query_ = query('coll', filter('key', 'in', [maxKey()])); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query('coll', filter('key', 'not-in', [maxKey()])); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/e', 'coll/d'); + }); + + it('Indexes multiple BSON types together', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.DESCENDING]] + }); + await test.configureFieldsIndexes(index); + + await test.writeMutations( + setMutation('coll/a', { key: minKey() }), + setMutation('coll/b', { key: int32(2) }), + setMutation('coll/c', { key: int32(1) }), + setMutation('coll/d', { key: bsonTimestamp(1000, 1001) }), + setMutation('coll/e', { key: bsonTimestamp(1000, 1000) }), + setMutation('coll/f', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + }), + setMutation('coll/g', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + }), + setMutation('coll/h', { + key: bsonObjectId('507f191e810c19729de860eb') + }), + setMutation('coll/i', { + key: bsonObjectId('507f191e810c19729de860ea') + }), + setMutation('coll/j', { key: regex('^bar', 'm') }), + setMutation('coll/k', { key: regex('^bar', 'i') }), + setMutation('coll/l', { key: maxKey() }) + ); + await test.backfillIndexes(); + + const query_ = query('coll', orderBy('key', 'desc')); + await test.executeQuery(query_); + test.assertOverlaysRead(12, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set, + [key('coll/f').toString()]: MutationType.Set, + [key('coll/g').toString()]: MutationType.Set, + [key('coll/h').toString()]: MutationType.Set, + [key('coll/i').toString()]: MutationType.Set, + [key('coll/j').toString()]: MutationType.Set, + [key('coll/k').toString()]: MutationType.Set, + [key('coll/l').toString()]: MutationType.Set + }); + test.assertQueryReturned( + query_, + 'coll/l', + 'coll/j', + 'coll/k', + 'coll/h', + 'coll/i', + 'coll/f', + 'coll/g', + 'coll/d', + 'coll/e', + 'coll/b', + 'coll/c', + 'coll/a' + ); + }); + + it('Indexes all types together', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + + await test.writeMutations( + setMutation('coll/a', { key: null }), + setMutation('coll/b', { key: minKey() }), + setMutation('coll/c', { key: true }), + setMutation('coll/d', { key: NaN }), + setMutation('coll/e', { key: int32(1) }), + setMutation('coll/f', { key: 2.0 }), + setMutation('coll/g', { key: 3 }), + setMutation('coll/h', { key: new Timestamp(100, 123456000) }), + setMutation('coll/i', { key: bsonTimestamp(1, 2) }), + setMutation('coll/j', { key: 'string' }), + setMutation('coll/k', { key: blob(1, 2, 3) }), + setMutation('coll/l', { + key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + }), + setMutation('coll/m', { key: ref('foo/bar') }), + setMutation('coll/n', { + key: bsonObjectId('507f191e810c19729de860ea') + }), + setMutation('coll/o', { key: new GeoPoint(1, 2) }), + setMutation('coll/p', { key: regex('^bar', 'm') }), + setMutation('coll/q', { key: [2, 'foo'] }), + setMutation('coll/r', { key: vector([1, 2, 3]) }), + setMutation('coll/s', { key: { bar: 1, foo: 2 } }), + setMutation('coll/t', { key: maxKey() }) + ); + await test.backfillIndexes(); + + const query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(20, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set, + [key('coll/d').toString()]: MutationType.Set, + [key('coll/e').toString()]: MutationType.Set, + [key('coll/f').toString()]: MutationType.Set, + [key('coll/g').toString()]: MutationType.Set, + [key('coll/h').toString()]: MutationType.Set, + [key('coll/i').toString()]: MutationType.Set, + [key('coll/j').toString()]: MutationType.Set, + [key('coll/k').toString()]: MutationType.Set, + [key('coll/l').toString()]: MutationType.Set, + [key('coll/m').toString()]: MutationType.Set, + [key('coll/n').toString()]: MutationType.Set, + [key('coll/o').toString()]: MutationType.Set, + [key('coll/p').toString()]: MutationType.Set, + [key('coll/q').toString()]: MutationType.Set, + [key('coll/r').toString()]: MutationType.Set, + [key('coll/s').toString()]: MutationType.Set, + [key('coll/t').toString()]: MutationType.Set + }); + test.assertQueryReturned( + query_, + 'coll/a', + 'coll/b', + 'coll/c', + 'coll/d', + 'coll/e', + 'coll/f', + 'coll/g', + 'coll/h', + 'coll/i', + 'coll/j', + 'coll/k', + 'coll/l', + 'coll/m', + 'coll/n', + 'coll/o', + 'coll/p', + 'coll/q', + 'coll/r', + 'coll/s', + 'coll/t' + ); + }); }); }); diff --git a/packages/firestore/test/unit/model/values.test.ts b/packages/firestore/test/unit/model/values.test.ts index 4054dd6481d..0d93d335ded 100644 --- a/packages/firestore/test/unit/model/values.test.ts +++ b/packages/firestore/test/unit/model/values.test.ts @@ -722,7 +722,7 @@ describe('Values', () => { expect(canonicalId(wrap(int32(1)))).to.equal('{__int__:1}'); expect( canonicalId(wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3])))) - ).to.equal('{__binary__:{subType:1,data:AQID}}'); + ).to.equal('{__binary__:AQECAw==}'); expect(canonicalId(wrap(minKey()))).to.equal('{__min__:null}'); expect(canonicalId(wrap(maxKey()))).to.equal('{__max__:null}'); }); From ac59a1c18d5a5b5fc6e3fb2c0e5647c26f8006c2 Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Thu, 1 May 2025 13:02:18 -0400 Subject: [PATCH 5/9] Remove FieldValue factory methods (#342) --- common/api-review/firestore-lite.api.md | 23 +- common/api-review/firestore.api.md | 23 +- packages/firestore/lite/index.ts | 11 +- packages/firestore/src/api.ts | 11 +- .../firestore/src/api/field_value_impl.ts | 9 +- .../src/lite-api/bson_binary_data.ts | 6 +- ...n_timestamp_value.ts => bson_timestamp.ts} | 14 +- .../src/lite-api/field_value_impl.ts | 90 ------ .../src/lite-api/user_data_reader.ts | 2 +- .../src/lite-api/user_data_writer.ts | 9 +- .../test/integration/api/database.test.ts | 258 +++++++++-------- .../test/integration/api/type.test.ts | 92 +++--- .../firestore/test/lite/integration.test.ts | 48 ++-- .../firestore_index_value_writer.test.ts | 39 ++- .../test/unit/local/index_manager.test.ts | 267 +++++++++++------- .../unit/local/local_store_indexeddb.test.ts | 227 ++++++++------- .../test/unit/model/document.test.ts | 44 +-- .../test/unit/model/object_value.test.ts | 130 +++++---- .../firestore/test/unit/model/values.test.ts | 179 ++++++------ .../test/unit/remote/serializer.helper.ts | 32 +-- 20 files changed, 751 insertions(+), 763 deletions(-) rename packages/firestore/src/lite-api/{bson_timestamp_value.ts => bson_timestamp.ts} (72%) diff --git a/common/api-review/firestore-lite.api.md b/common/api-review/firestore-lite.api.md index 394fd0b33ce..f0203c034b3 100644 --- a/common/api-review/firestore-lite.api.md +++ b/common/api-review/firestore-lite.api.md @@ -68,14 +68,13 @@ export function average(field: string | FieldPath): AggregateField> = UnionToIntersection<{ [K in keyof T & string]: ChildUpdateFields; @@ -426,9 +410,6 @@ export class QueryStartAtConstraint extends QueryConstraint { // @public export function refEqual(left: DocumentReference | CollectionReference, right: DocumentReference | CollectionReference): boolean; -// @public -export function regex(pattern: string, options: string): RegexValue; - // @public export class RegexValue { constructor(pattern: string, options: string); diff --git a/common/api-review/firestore.api.md b/common/api-review/firestore.api.md index 39dcdb6df59..90137f78b00 100644 --- a/common/api-review/firestore.api.md +++ b/common/api-review/firestore.api.md @@ -68,14 +68,13 @@ export function average(field: string | FieldPath): AggregateField; @@ -686,9 +670,6 @@ export class QueryStartAtConstraint extends QueryConstraint { // @public export function refEqual(left: DocumentReference | CollectionReference, right: DocumentReference | CollectionReference): boolean; -// @public -export function regex(pattern: string, options: string): RegexValue; - // @public export class RegexValue { constructor(pattern: string, options: string); diff --git a/packages/firestore/lite/index.ts b/packages/firestore/lite/index.ts index 48e0bdae068..7eee71a9893 100644 --- a/packages/firestore/lite/index.ts +++ b/packages/firestore/lite/index.ts @@ -128,14 +128,7 @@ export { arrayUnion, serverTimestamp, deleteField, - vector, - int32, - regex, - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - minKey, - maxKey + vector } from '../src/lite-api/field_value_impl'; export { @@ -156,7 +149,7 @@ export { BsonBinaryData } from '../src/lite-api/bson_binary_data'; export { BsonObjectId } from '../src/lite-api/bson_object_Id'; -export { BsonTimestamp } from '../src/lite-api/bson_timestamp_value'; +export { BsonTimestamp } from '../src/lite-api/bson_timestamp'; export { MinKey } from '../src/lite-api/min_key'; diff --git a/packages/firestore/src/api.ts b/packages/firestore/src/api.ts index ec6fdd2c4ea..b9d14923bcd 100644 --- a/packages/firestore/src/api.ts +++ b/packages/firestore/src/api.ts @@ -173,14 +173,7 @@ export { deleteField, increment, serverTimestamp, - vector, - int32, - regex, - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - minKey, - maxKey + vector } from './api/field_value_impl'; export { VectorValue } from './lite-api/vector_value'; @@ -193,7 +186,7 @@ export { BsonBinaryData } from './lite-api/bson_binary_data'; export { BsonObjectId } from './lite-api/bson_object_Id'; -export { BsonTimestamp } from './lite-api/bson_timestamp_value'; +export { BsonTimestamp } from './lite-api/bson_timestamp'; export { MinKey } from './lite-api/min_key'; diff --git a/packages/firestore/src/api/field_value_impl.ts b/packages/firestore/src/api/field_value_impl.ts index 6e65d273259..1b1283a3543 100644 --- a/packages/firestore/src/api/field_value_impl.ts +++ b/packages/firestore/src/api/field_value_impl.ts @@ -21,12 +21,5 @@ export { arrayUnion, serverTimestamp, deleteField, - vector, - int32, - regex, - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - minKey, - maxKey + vector } from '../lite-api/field_value_impl'; diff --git a/packages/firestore/src/lite-api/bson_binary_data.ts b/packages/firestore/src/lite-api/bson_binary_data.ts index 233dd790aec..8b4b1fe0ef0 100644 --- a/packages/firestore/src/lite-api/bson_binary_data.ts +++ b/packages/firestore/src/lite-api/bson_binary_data.ts @@ -24,13 +24,9 @@ import { Code, FirestoreError } from '../util/error'; * @class BsonBinaryData */ export class BsonBinaryData { - /** The subtype for the data */ - readonly subtype: number; - - /** The binary data as a byte array */ readonly data: Uint8Array; - constructor(subtype: number, data: Uint8Array) { + constructor(readonly subtype: number, data: Uint8Array) { if (subtype < 0 || subtype > 255) { throw new FirestoreError( Code.INVALID_ARGUMENT, diff --git a/packages/firestore/src/lite-api/bson_timestamp_value.ts b/packages/firestore/src/lite-api/bson_timestamp.ts similarity index 72% rename from packages/firestore/src/lite-api/bson_timestamp_value.ts rename to packages/firestore/src/lite-api/bson_timestamp.ts index 0b317f9042c..dc18db02bb1 100644 --- a/packages/firestore/src/lite-api/bson_timestamp_value.ts +++ b/packages/firestore/src/lite-api/bson_timestamp.ts @@ -21,7 +21,19 @@ * @class BsonTimestamp */ export class BsonTimestamp { - constructor(readonly seconds: number, readonly increment: number) {} + constructor(readonly seconds: number, readonly increment: number) { + // Make sure 'seconds' and 'increment' are in the range of a 32-bit unsigned integer. + if (seconds < 0 || seconds > 4294967295) { + throw new Error( + "BsonTimestamp 'seconds' must be in the range of a 32-bit unsigned integer." + ); + } + if (increment < 0 || increment > 4294967295) { + throw new Error( + "BsonTimestamp 'increment' must be in the range of a 32-bit unsigned integer." + ); + } + } /** * Returns true if this `BsonTimestamp` is equal to the provided one. diff --git a/packages/firestore/src/lite-api/field_value_impl.ts b/packages/firestore/src/lite-api/field_value_impl.ts index ade0656e0d3..11db1005235 100644 --- a/packages/firestore/src/lite-api/field_value_impl.ts +++ b/packages/firestore/src/lite-api/field_value_impl.ts @@ -14,15 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -import { BsonBinaryData } from './bson_binary_data'; -import { BsonObjectId } from './bson_object_Id'; -import { BsonTimestamp } from './bson_timestamp_value'; import { FieldValue } from './field_value'; -import { Int32Value } from './int32_value'; -import { MaxKey } from './max_key'; -import { MinKey } from './min_key'; -import { RegexValue } from './regex_value'; import { ArrayRemoveFieldValueImpl, ArrayUnionFieldValueImpl, @@ -116,85 +108,3 @@ export function increment(n: number): FieldValue { export function vector(values?: number[]): VectorValue { return new VectorValue(values); } - -/** - * Creates a new `Int32Value` constructed with the given number. - * - * @param value - The 32-bit number to be used for constructing the Int32Value - * - * @returns A new `Int32Value` constructed with the given number. - */ -export function int32(value: number): Int32Value { - return new Int32Value(value); -} - -/** - * Creates a new `RegexValue` constructed with the given pattern and options. - * - * @param subtype - The subtype of the BSON binary data. - * @param data - The data to use for the BSON binary data. - * - * @returns A new `RegexValue` constructed with the given pattern and options. - */ -export function regex(pattern: string, options: string): RegexValue { - return new RegexValue(pattern, options); -} - -/** - * Creates a new `BsonBinaryData` constructed with the given subtype and data. - * - * @param subtype - Create a `BsonBinaryData` instance with the given subtype. - * @param data - Create a `BsonBinaryData` instance with a copy of this array of numbers. - * - * @returns A new `BsonBinaryData` constructed with the given subtype and data. - */ -export function bsonBinaryData( - subtype: number, - data: Uint8Array -): BsonBinaryData { - return new BsonBinaryData(subtype, data); -} - -/** - * Creates a new `BsonObjectId` constructed with the given string. - * - * @param value - The 24-character hex string representing the ObjectId. - * - * @returns A new `BsonObjectId` constructed with the given string. - */ -export function bsonObjectId(value: string): BsonObjectId { - return new BsonObjectId(value); -} - -/** - * Creates a new `BsonTimestamp` constructed with the given seconds and increment. - * - * @param seconds - The underlying unsigned 32-bit integer for seconds. - * @param seconds - The underlying unsigned 32-bit integer for increment. - * - * @returns A new `BsonTimestamp` constructed with the given seconds and increment. - */ -export function bsonTimestamp( - seconds: number, - increment: number -): BsonTimestamp { - return new BsonTimestamp(seconds, increment); -} - -/** - * Creates or returns a `MinKey` instance. - * - * @returns A `MinKey` instance. - */ -export function minKey(): MinKey { - return MinKey.instance(); -} - -/** - * Creates or returns a `MaxKey` instance. - * - * @returns A `MaxKey` instance. - */ -export function maxKey(): MaxKey { - return MaxKey.instance(); -} diff --git a/packages/firestore/src/lite-api/user_data_reader.ts b/packages/firestore/src/lite-api/user_data_reader.ts index 9d7e6fa79f1..008a5225d9d 100644 --- a/packages/firestore/src/lite-api/user_data_reader.ts +++ b/packages/firestore/src/lite-api/user_data_reader.ts @@ -77,7 +77,7 @@ import { Dict, forEach, isEmpty } from '../util/obj'; import { BsonBinaryData } from './bson_binary_data'; import { BsonObjectId } from './bson_object_Id'; -import { BsonTimestamp } from './bson_timestamp_value'; +import { BsonTimestamp } from './bson_timestamp'; import { Bytes } from './bytes'; import { Firestore } from './database'; import { FieldPath } from './field_path'; diff --git a/packages/firestore/src/lite-api/user_data_writer.ts b/packages/firestore/src/lite-api/user_data_writer.ts index e4719591b4c..94e3f96fe12 100644 --- a/packages/firestore/src/lite-api/user_data_writer.ts +++ b/packages/firestore/src/lite-api/user_data_writer.ts @@ -60,10 +60,11 @@ import { forEach } from '../util/obj'; import { BsonBinaryData } from './bson_binary_data'; import { BsonObjectId } from './bson_object_Id'; -import { BsonTimestamp } from './bson_timestamp_value'; -import { maxKey, minKey } from './field_value_impl'; +import { BsonTimestamp } from './bson_timestamp'; import { GeoPoint } from './geo_point'; import { Int32Value } from './int32_value'; +import { MaxKey } from './max_key'; +import { MinKey } from './min_key'; import { RegexValue } from './regex_value'; import { Timestamp } from './timestamp'; import { VectorValue } from './vector_value'; @@ -118,9 +119,9 @@ export abstract class AbstractUserDataWriter { case TypeOrder.BsonTimestampValue: return this.convertToBsonTimestamp(value.mapValue!); case TypeOrder.MaxKeyValue: - return maxKey(); + return MaxKey.instance(); case TypeOrder.MinKeyValue: - return minKey(); + return MinKey.instance(); default: throw fail('Invalid value type: ' + JSON.stringify(value)); } diff --git a/packages/firestore/test/integration/api/database.test.ts b/packages/firestore/test/integration/api/database.test.ts index 2071b786f24..6630384bce1 100644 --- a/packages/firestore/test/integration/api/database.test.ts +++ b/packages/firestore/test/integration/api/database.test.ts @@ -67,17 +67,17 @@ import { QuerySnapshot, vector, getDocsFromServer, - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - int32, - maxKey, - minKey, - regex, or, newTestFirestore, GeoPoint, - Bytes + Bytes, + BsonBinaryData, + BsonObjectId, + Int32Value, + MaxKey, + MinKey, + RegexValue, + BsonTimestamp } from '../util/firebase_export'; import { apiDescribe, @@ -2454,20 +2454,20 @@ apiDescribe('Database', persistence => { {}, async coll => { const docRef = await addDoc(coll, { - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - objectId: bsonObjectId('507f191e810c19729de860ea'), - int32: int32(1), - min: minKey(), - max: maxKey(), - regex: regex('^foo', 'i') + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + objectId: new BsonObjectId('507f191e810c19729de860ea'), + int32: new Int32Value(1), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('^foo', 'i') }); await setDoc( docRef, { - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - int32: int32(2) + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + int32: new Int32Value(2) }, { merge: true } ); @@ -2476,19 +2476,20 @@ apiDescribe('Database', persistence => { expect( snapshot .get('objectId') - .isEqual(bsonObjectId('507f191e810c19729de860ea')) + .isEqual(new BsonObjectId('507f191e810c19729de860ea')) ).to.be.true; - expect(snapshot.get('int32').isEqual(int32(2))).to.be.true; - expect(snapshot.get('min') === minKey()).to.be.true; - expect(snapshot.get('max') === maxKey()).to.be.true; + expect(snapshot.get('int32').isEqual(new Int32Value(2))).to.be.true; + expect(snapshot.get('min') === MinKey.instance()).to.be.true; + expect(snapshot.get('max') === MaxKey.instance()).to.be.true; expect( snapshot .get('binary') - .isEqual(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + .isEqual(new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ).to.be.true; - expect(snapshot.get('timestamp').isEqual(bsonTimestamp(1, 2))).to.be - .true; - expect(snapshot.get('regex').isEqual(regex('^foo', 'i'))).to.be.true; + expect(snapshot.get('timestamp').isEqual(new BsonTimestamp(1, 2))).to + .be.true; + expect(snapshot.get('regex').isEqual(new RegexValue('^foo', 'i'))).to + .be.true; } ); }); @@ -2506,41 +2507,42 @@ apiDescribe('Database', persistence => { // Adding docs to cache, do not wait for promise to resolve. // eslint-disable-next-line @typescript-eslint/no-floating-promises setDoc(docRef, { - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - objectId: bsonObjectId('507f191e810c19729de860ea'), - int32: int32(1), - regex: regex('^foo', 'i'), - timestamp: bsonTimestamp(1, 2), - min: minKey(), - max: maxKey() + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + objectId: new BsonObjectId('507f191e810c19729de860ea'), + int32: new Int32Value(1), + regex: new RegexValue('^foo', 'i'), + timestamp: new BsonTimestamp(1, 2), + min: MinKey.instance(), + max: MaxKey.instance() }); const snapshot = await getDocFromCache(docRef); expect( snapshot .get('binary') - .isEqual(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + .isEqual(new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ).to.be.true; expect( snapshot .get('objectId') - .isEqual(bsonObjectId('507f191e810c19729de860ea')) + .isEqual(new BsonObjectId('507f191e810c19729de860ea')) ).to.be.true; - expect(snapshot.get('int32').isEqual(int32(1))).to.be.true; - expect(snapshot.get('regex').isEqual(regex('^foo', 'i'))).to.be.true; - expect(snapshot.get('timestamp').isEqual(bsonTimestamp(1, 2))).to.be - .true; - expect(snapshot.get('min') === minKey()).to.be.true; - expect(snapshot.get('max') === maxKey()).to.be.true; + expect(snapshot.get('int32').isEqual(new Int32Value(1))).to.be.true; + expect(snapshot.get('regex').isEqual(new RegexValue('^foo', 'i'))).to + .be.true; + expect(snapshot.get('timestamp').isEqual(new BsonTimestamp(1, 2))).to + .be.true; + expect(snapshot.get('min') === MinKey.instance()).to.be.true; + expect(snapshot.get('max') === MaxKey.instance()).to.be.true; } ); }); it('can filter and order objectIds', async () => { const testDocs = { - a: { key: bsonObjectId('507f191e810c19729de860ea') }, - b: { key: bsonObjectId('507f191e810c19729de860eb') }, - c: { key: bsonObjectId('507f191e810c19729de860ec') } + a: { key: new BsonObjectId('507f191e810c19729de860ea') }, + b: { key: new BsonObjectId('507f191e810c19729de860eb') }, + c: { key: new BsonObjectId('507f191e810c19729de860ec') } }; return withTestProjectIdAndCollectionSettings( @@ -2554,7 +2556,7 @@ apiDescribe('Database', persistence => { let orderedQuery = query( coll, - where('key', '>', bsonObjectId('507f191e810c19729de860ea')), + where('key', '>', new BsonObjectId('507f191e810c19729de860ea')), orderBy('key', 'desc') ); @@ -2572,8 +2574,8 @@ apiDescribe('Database', persistence => { orderedQuery = query( coll, where('key', 'in', [ - bsonObjectId('507f191e810c19729de860ea'), - bsonObjectId('507f191e810c19729de860eb') + new BsonObjectId('507f191e810c19729de860ea'), + new BsonObjectId('507f191e810c19729de860eb') ]), orderBy('key', 'desc') ); @@ -2594,9 +2596,9 @@ apiDescribe('Database', persistence => { it('can filter and order Int32 values', async () => { const testDocs = { - a: { key: int32(-1) }, - b: { key: int32(1) }, - c: { key: int32(2) } + a: { key: new Int32Value(-1) }, + b: { key: new Int32Value(1) }, + c: { key: new Int32Value(2) } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -2609,7 +2611,7 @@ apiDescribe('Database', persistence => { let orderedQuery = query( coll, - where('key', '>=', int32(1)), + where('key', '>=', new Int32Value(1)), orderBy('key', 'desc') ); @@ -2626,7 +2628,7 @@ apiDescribe('Database', persistence => { orderedQuery = query( coll, - where('key', 'not-in', [int32(1)]), + where('key', 'not-in', [new Int32Value(1)]), orderBy('key', 'desc') ); @@ -2646,9 +2648,9 @@ apiDescribe('Database', persistence => { it('can filter and order Timestamp values', async () => { const testDocs = { - a: { key: bsonTimestamp(1, 1) }, - b: { key: bsonTimestamp(1, 2) }, - c: { key: bsonTimestamp(2, 1) } + a: { key: new BsonTimestamp(1, 1) }, + b: { key: new BsonTimestamp(1, 2) }, + c: { key: new BsonTimestamp(2, 1) } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -2661,7 +2663,7 @@ apiDescribe('Database', persistence => { let orderedQuery = query( coll, - where('key', '>', bsonTimestamp(1, 1)), + where('key', '>', new BsonTimestamp(1, 1)), orderBy('key', 'desc') ); @@ -2678,7 +2680,7 @@ apiDescribe('Database', persistence => { orderedQuery = query( coll, - where('key', '!=', bsonTimestamp(1, 1)), + where('key', '!=', new BsonTimestamp(1, 1)), orderBy('key', 'desc') ); @@ -2698,9 +2700,9 @@ apiDescribe('Database', persistence => { it('can filter and order Binary values', async () => { const testDocs = { - a: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, - b: { key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) }, - c: { key: bsonBinaryData(2, new Uint8Array([1, 2, 3])) } + a: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + b: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) }, + c: { key: new BsonBinaryData(2, new Uint8Array([1, 2, 3])) } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -2713,7 +2715,7 @@ apiDescribe('Database', persistence => { let orderedQuery = query( coll, - where('key', '>', bsonBinaryData(1, new Uint8Array([1, 2, 3]))), + where('key', '>', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))), orderBy('key', 'desc') ); @@ -2730,8 +2732,12 @@ apiDescribe('Database', persistence => { orderedQuery = query( coll, - where('key', '>=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))), - where('key', '<', bsonBinaryData(2, new Uint8Array([1, 2, 3]))), + where( + 'key', + '>=', + new BsonBinaryData(1, new Uint8Array([1, 2, 3])) + ), + where('key', '<', new BsonBinaryData(2, new Uint8Array([1, 2, 3]))), orderBy('key', 'desc') ); @@ -2751,9 +2757,9 @@ apiDescribe('Database', persistence => { it('can filter and order Regex values', async () => { const testDocs = { - a: { key: regex('^bar', 'i') }, - b: { key: regex('^bar', 'x') }, - c: { key: regex('^baz', 'i') } + a: { key: new RegexValue('^bar', 'i') }, + b: { key: new RegexValue('^bar', 'x') }, + c: { key: new RegexValue('^baz', 'i') } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -2767,8 +2773,8 @@ apiDescribe('Database', persistence => { const orderedQuery = query( coll, or( - where('key', '>', regex('^bar', 'x')), - where('key', '!=', regex('^bar', 'x')) + where('key', '>', new RegexValue('^bar', 'x')), + where('key', '!=', new RegexValue('^bar', 'x')) ), orderBy('key', 'desc') ); @@ -2789,11 +2795,11 @@ apiDescribe('Database', persistence => { it('can filter and order minKey values', async () => { const testDocs = { - a: { key: minKey() }, - b: { key: minKey() }, + a: { key: MinKey.instance() }, + b: { key: MinKey.instance() }, c: { key: null }, d: { key: 1 }, - e: { key: maxKey() } + e: { key: MaxKey.instance() } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -2804,7 +2810,10 @@ apiDescribe('Database', persistence => { // Populate the cache with all docs first await getDocs(coll); - let filteredQuery = query(coll, where('key', '==', minKey())); + let filteredQuery = query( + coll, + where('key', '==', MinKey.instance()) + ); let snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['a'], @@ -2817,7 +2826,7 @@ apiDescribe('Database', persistence => { ); // TODO(Mila/BSON): uncomment after the null inclusion bug - // filteredQuery = query(coll, where('key', '!=', minKey())); + // filteredQuery = query(coll, where('key', '!=', MinKey.instance())); // snapshot = await getDocs(filteredQuery); // expect(toDataArray(snapshot)).to.deep.equal([ // testDocs['d'], @@ -2829,7 +2838,7 @@ apiDescribe('Database', persistence => { // toIds(snapshot) // ); - filteredQuery = query(coll, where('key', '>=', minKey())); + filteredQuery = query(coll, where('key', '>=', MinKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['a'], @@ -2841,7 +2850,7 @@ apiDescribe('Database', persistence => { toIds(snapshot) ); - filteredQuery = query(coll, where('key', '<=', minKey())); + filteredQuery = query(coll, where('key', '<=', MinKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['a'], @@ -2853,7 +2862,7 @@ apiDescribe('Database', persistence => { toIds(snapshot) ); - filteredQuery = query(coll, where('key', '>', minKey())); + filteredQuery = query(coll, where('key', '>', MinKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( @@ -2862,7 +2871,7 @@ apiDescribe('Database', persistence => { toIds(snapshot) ); - filteredQuery = query(coll, where('key', '<', minKey())); + filteredQuery = query(coll, where('key', '<', MinKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( @@ -2885,10 +2894,10 @@ apiDescribe('Database', persistence => { it('can filter and order maxKey values', async () => { const testDocs = { - a: { key: minKey() }, + a: { key: MinKey.instance() }, b: { key: 1 }, - c: { key: maxKey() }, - d: { key: maxKey() }, + c: { key: MaxKey.instance() }, + d: { key: MaxKey.instance() }, e: { key: null } }; return withTestProjectIdAndCollectionSettings( @@ -2900,7 +2909,10 @@ apiDescribe('Database', persistence => { // Populate the cache with all docs first await getDocs(coll); - let filteredQuery = query(coll, where('key', '==', maxKey())); + let filteredQuery = query( + coll, + where('key', '==', MaxKey.instance()) + ); let snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['c'], @@ -2913,7 +2925,7 @@ apiDescribe('Database', persistence => { ); // TODO(Mila/BSON): uncomment after the null inclusion bug - // filteredQuery = query(coll, where('key', '!=', maxKey())); + // filteredQuery = query(coll, where('key', '!=', MaxKey.instance())); // snapshot = await getDocs(filteredQuery); // expect(toDataArray(snapshot)).to.deep.equal([ // testDocs['a'], @@ -2925,7 +2937,7 @@ apiDescribe('Database', persistence => { // toIds(snapshot) // ); - filteredQuery = query(coll, where('key', '>=', maxKey())); + filteredQuery = query(coll, where('key', '>=', MaxKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['c'], @@ -2937,7 +2949,7 @@ apiDescribe('Database', persistence => { toIds(snapshot) ); - filteredQuery = query(coll, where('key', '<=', maxKey())); + filteredQuery = query(coll, where('key', '<=', MaxKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ testDocs['c'], @@ -2949,7 +2961,7 @@ apiDescribe('Database', persistence => { toIds(snapshot) ); - filteredQuery = query(coll, where('key', '>', maxKey())); + filteredQuery = query(coll, where('key', '>', MaxKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( @@ -2958,7 +2970,7 @@ apiDescribe('Database', persistence => { toIds(snapshot) ); - filteredQuery = query(coll, where('key', '<', maxKey())); + filteredQuery = query(coll, where('key', '<', MaxKey.instance())); snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( @@ -2981,11 +2993,11 @@ apiDescribe('Database', persistence => { it('can handle null with bson values', async () => { const testDocs = { - a: { key: minKey() }, + a: { key: MinKey.instance() }, b: { key: null }, c: { key: null }, d: { key: 1 }, - e: { key: maxKey() } + e: { key: MaxKey.instance() } }; return withTestProjectIdAndCollectionSettings( @@ -3027,12 +3039,12 @@ apiDescribe('Database', persistence => { it('can listen to documents with bson types', async () => { const testDocs = { - a: { key: maxKey() }, - b: { key: minKey() }, - c: { key: bsonTimestamp(1, 2) }, - d: { key: bsonObjectId('507f191e810c19729de860ea') }, - e: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, - f: { key: regex('^foo', 'i') } + a: { key: MaxKey.instance() }, + b: { key: MinKey.instance() }, + c: { key: new BsonTimestamp(1, 2) }, + d: { key: new BsonObjectId('507f191e810c19729de860ea') }, + e: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + f: { key: new RegexValue('^foo', 'i') } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -3055,7 +3067,7 @@ apiDescribe('Database', persistence => { testDocs['a'] ]); - const newData = { key: int32(2) }; + const newData = { key: new Int32Value(2) }; await setDoc(doc(coll, 'g'), newData); listenSnapshot = await storeEvent.awaitEvent(); expect(toDataArray(listenSnapshot)).to.deep.equal([ @@ -3077,9 +3089,9 @@ apiDescribe('Database', persistence => { // eslint-disable-next-line no-restricted-properties it.skip('can run transactions on documents with bson types', async () => { const testDocs = { - a: { key: bsonTimestamp(1, 2) }, - b: { key: regex('^foo', 'i') }, - c: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) } + a: { key: new BsonTimestamp(1, 2) }, + b: { key: new RegexValue('^foo', 'i') }, + c: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -3114,24 +3126,24 @@ apiDescribe('Database', persistence => { it('SDK orders different value types together the same way online and offline', async () => { const testDocs: { [key: string]: DocumentData } = { a: { key: null }, - b: { key: minKey() }, + b: { key: MinKey.instance() }, c: { key: true }, d: { key: NaN }, - e: { key: int32(1) }, + e: { key: new Int32Value(1) }, f: { key: 2.0 }, g: { key: 3 }, h: { key: new Timestamp(100, 123456000) }, - i: { key: bsonTimestamp(1, 2) }, + i: { key: new BsonTimestamp(1, 2) }, j: { key: 'string' }, k: { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) }, - l: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, - n: { key: bsonObjectId('507f191e810c19729de860ea') }, + l: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + n: { key: new BsonObjectId('507f191e810c19729de860ea') }, o: { key: new GeoPoint(0, 0) }, - p: { key: regex('^foo', 'i') }, + p: { key: new RegexValue('^foo', 'i') }, q: { key: [1, 2] }, r: { key: vector([1, 2]) }, s: { key: { a: 1 } }, - t: { key: maxKey() } + t: { key: MaxKey.instance() } }; return withTestProjectIdAndCollectionSettings( @@ -3181,25 +3193,25 @@ apiDescribe('Database', persistence => { it('SDK orders bson types the same way online and offline', async () => { const testDocs: { [key: string]: DocumentData } = { - a: { key: maxKey() }, // maxKeys are all equal - b: { key: maxKey() }, - c: { key: int32(1) }, - d: { key: int32(-1) }, - e: { key: int32(0) }, - f: { key: bsonTimestamp(1, 1) }, - g: { key: bsonTimestamp(2, 1) }, - h: { key: bsonTimestamp(1, 2) }, - i: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, - j: { key: bsonBinaryData(1, new Uint8Array([1, 1, 4])) }, - k: { key: bsonBinaryData(2, new Uint8Array([1, 0, 0])) }, - l: { key: bsonObjectId('507f191e810c19729de860eb') }, - m: { key: bsonObjectId('507f191e810c19729de860ea') }, - n: { key: bsonObjectId('407f191e810c19729de860ea') }, - o: { key: regex('^foo', 'i') }, - p: { key: regex('^foo', 'm') }, - q: { key: regex('^bar', 'i') }, - r: { key: minKey() }, // minKeys are all equal - s: { key: minKey() } + a: { key: MaxKey.instance() }, // maxKeys are all equal + b: { key: MaxKey.instance() }, + c: { key: new Int32Value(1) }, + d: { key: new Int32Value(-1) }, + e: { key: new Int32Value(0) }, + f: { key: new BsonTimestamp(1, 1) }, + g: { key: new BsonTimestamp(2, 1) }, + h: { key: new BsonTimestamp(1, 2) }, + i: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + j: { key: new BsonBinaryData(1, new Uint8Array([1, 1, 4])) }, + k: { key: new BsonBinaryData(2, new Uint8Array([1, 0, 0])) }, + l: { key: new BsonObjectId('507f191e810c19729de860eb') }, + m: { key: new BsonObjectId('507f191e810c19729de860ea') }, + n: { key: new BsonObjectId('407f191e810c19729de860ea') }, + o: { key: new RegexValue('^foo', 'i') }, + p: { key: new RegexValue('^foo', 'm') }, + q: { key: new RegexValue('^bar', 'i') }, + r: { key: MinKey.instance() }, // minKeys are all equal + s: { key: MinKey.instance() } }; return withTestProjectIdAndCollectionSettings( diff --git a/packages/firestore/test/integration/api/type.test.ts b/packages/firestore/test/integration/api/type.test.ts index 156eba426f8..2f7cb7f9295 100644 --- a/packages/firestore/test/integration/api/type.test.ts +++ b/packages/firestore/test/integration/api/type.test.ts @@ -20,9 +20,9 @@ import { expect } from 'chai'; import { addEqualityMatcher } from '../../util/equality_matcher'; import { EventsAccumulator } from '../util/events_accumulator'; import { - bsonBinaryData, - bsonObjectId, - bsonTimestamp, + BsonBinaryData, + BsonObjectId, + BsonTimestamp, Bytes, collection, doc, @@ -34,15 +34,15 @@ import { GeoPoint, getDoc, getDocs, - int32, - maxKey, - minKey, + Int32Value, + MaxKey, + MinKey, onSnapshot, orderBy, query, QuerySnapshot, refEqual, - regex, + RegexValue, runTransaction, setDoc, Timestamp, @@ -256,7 +256,9 @@ apiDescribe('Firestore', persistence => { settings, 1, async dbs => { - await expectRoundtripWithoutTransaction(dbs[0], { min: minKey() }); + await expectRoundtripWithoutTransaction(dbs[0], { + min: MinKey.instance() + }); } ); }); @@ -268,7 +270,9 @@ apiDescribe('Firestore', persistence => { settings, 1, async dbs => { - await expectRoundtripWithoutTransaction(dbs[0], { max: maxKey() }); + await expectRoundtripWithoutTransaction(dbs[0], { + max: MaxKey.instance() + }); } ); }); @@ -281,7 +285,7 @@ apiDescribe('Firestore', persistence => { 1, async dbs => { await expectRoundtripWithoutTransaction(dbs[0], { - regex: regex('^foo', 'i') + regex: new RegexValue('^foo', 'i') }); } ); @@ -294,7 +298,9 @@ apiDescribe('Firestore', persistence => { settings, 1, async dbs => { - await expectRoundtripWithoutTransaction(dbs[0], { int32: int32(1) }); + await expectRoundtripWithoutTransaction(dbs[0], { + int32: new Int32Value(1) + }); } ); }); @@ -307,7 +313,7 @@ apiDescribe('Firestore', persistence => { 1, async dbs => { await expectRoundtripWithoutTransaction(dbs[0], { - bsonTimestamp: bsonTimestamp(1, 2) + bsonTimestamp: new BsonTimestamp(1, 2) }); } ); @@ -321,7 +327,7 @@ apiDescribe('Firestore', persistence => { 1, async dbs => { await expectRoundtripWithoutTransaction(dbs[0], { - objectId: bsonObjectId('507f191e810c19729de860ea') + objectId: new BsonObjectId('507f191e810c19729de860ea') }); } ); @@ -335,7 +341,7 @@ apiDescribe('Firestore', persistence => { 1, async dbs => { await expectRoundtripWithoutTransaction(dbs[0], { - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }); } ); @@ -350,12 +356,12 @@ apiDescribe('Firestore', persistence => { async dbs => { await expectRoundtripWithoutTransaction(dbs[0], { array: [ - bsonBinaryData(1, new Uint8Array([1, 2, 3])), - bsonObjectId('507f191e810c19729de860ea'), - int32(1), - minKey(), - maxKey(), - regex('^foo', 'i') + new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + new BsonObjectId('507f191e810c19729de860ea'), + new Int32Value(1), + MinKey.instance(), + MaxKey.instance(), + new RegexValue('^foo', 'i') ] }); } @@ -371,12 +377,12 @@ apiDescribe('Firestore', persistence => { async dbs => { await expectRoundtripWithoutTransaction(dbs[0], { object: { - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - objectId: bsonObjectId('507f191e810c19729de860ea'), - int32: int32(1), - min: minKey(), - max: maxKey(), - regex: regex('^foo', 'i') + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + objectId: new BsonObjectId('507f191e810c19729de860ea'), + int32: new Int32Value(1), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('^foo', 'i') } }); } @@ -393,7 +399,7 @@ apiDescribe('Firestore', persistence => { const docRef = doc(coll, 'test-doc'); let errorMessage; try { - await setDoc(docRef, { key: int32(2147483648) }); + await setDoc(docRef, { key: new Int32Value(2147483648) }); } catch (err) { errorMessage = (err as FirestoreError)?.message; } @@ -402,7 +408,7 @@ apiDescribe('Firestore', persistence => { ); try { - await setDoc(docRef, { key: int32(-2147483650) }); + await setDoc(docRef, { key: new Int32Value(-2147483650) }); } catch (err) { errorMessage = (err as FirestoreError)?.message; } @@ -424,22 +430,22 @@ apiDescribe('Firestore', persistence => { let errorMessage; try { // BSON timestamp larger than 32-bit integer gets rejected - await setDoc(docRef, { key: bsonTimestamp(4294967296, 2) }); + await setDoc(docRef, { key: new BsonTimestamp(4294967296, 2) }); } catch (err) { errorMessage = (err as FirestoreError)?.message; } expect(errorMessage).to.contains( - "The field 'seconds' value (4,294,967,296) does not represent an unsigned 32-bit integer." + "BsonTimestamp 'seconds' must be in the range of a 32-bit unsigned integer." ); try { // negative BSON timestamp gets rejected - await setDoc(docRef, { key: bsonTimestamp(-1, 2) }); + await setDoc(docRef, { key: new BsonTimestamp(-1, 2) }); } catch (err) { errorMessage = (err as FirestoreError)?.message; } expect(errorMessage).to.contains( - "The field 'seconds' value (-1) does not represent an unsigned 32-bit integer." + "BsonTimestamp 'seconds' must be in the range of a 32-bit unsigned integer." ); } ); @@ -455,7 +461,7 @@ apiDescribe('Firestore', persistence => { const docRef = doc(coll, 'test-doc'); let errorMessage; try { - await setDoc(docRef, { key: regex('foo', 'a') }); + await setDoc(docRef, { key: new RegexValue('foo', 'a') }); } catch (err) { errorMessage = (err as FirestoreError)?.message; } @@ -478,7 +484,7 @@ apiDescribe('Firestore', persistence => { let errorMessage; try { // bsonObjectId with length not equal to 24 gets rejected - await setDoc(docRef, { key: bsonObjectId('foo') }); + await setDoc(docRef, { key: new BsonObjectId('foo') }); } catch (err) { errorMessage = (err as FirestoreError)?.message; } @@ -500,7 +506,7 @@ apiDescribe('Firestore', persistence => { let errorMessage; try { await setDoc(docRef, { - key: bsonBinaryData(1234, new Uint8Array([1, 2, 3])) + key: new BsonBinaryData(1234, new Uint8Array([1, 2, 3])) }); } catch (err) { errorMessage = (err as FirestoreError)?.message; @@ -515,26 +521,28 @@ apiDescribe('Firestore', persistence => { it('can order values of different TypeOrder together', async () => { const testDocs: { [key: string]: DocumentData } = { nullValue: { key: null }, - minValue: { key: minKey() }, + minValue: { key: MinKey.instance() }, booleanValue: { key: true }, nanValue: { key: NaN }, - int32Value: { key: int32(1) }, + int32Value: { key: new Int32Value(1) }, doubleValue: { key: 2.0 }, integerValue: { key: 3 }, timestampValue: { key: new Timestamp(100, 123456000) }, - bsonTimestampValue: { key: bsonTimestamp(1, 2) }, + bsonTimestampValue: { key: new BsonTimestamp(1, 2) }, stringValue: { key: 'string' }, bytesValue: { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) }, - bsonBinaryValue: { key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + bsonBinaryValue: { + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) + }, // referenceValue: {key: ref('coll/doc')}, referenceValue: { key: 'placeholder' }, - objectIdValue: { key: bsonObjectId('507f191e810c19729de860ea') }, + objectIdValue: { key: new BsonObjectId('507f191e810c19729de860ea') }, geoPointValue: { key: new GeoPoint(0, 0) }, - regexValue: { key: regex('^foo', 'i') }, + regexValue: { key: new RegexValue('^foo', 'i') }, arrayValue: { key: [1, 2] }, vectorValue: { key: vector([1, 2]) }, objectValue: { key: { a: 1 } }, - maxValue: { key: maxKey() } + maxValue: { key: MaxKey.instance() } }; return withTestProjectIdAndCollectionSettings( diff --git a/packages/firestore/test/lite/integration.test.ts b/packages/firestore/test/lite/integration.test.ts index 9b647587503..25f372bcb95 100644 --- a/packages/firestore/test/lite/integration.test.ts +++ b/packages/firestore/test/lite/integration.test.ts @@ -28,6 +28,9 @@ import { sum, average } from '../../src/lite-api/aggregate'; +import { BsonBinaryData } from '../../src/lite-api/bson_binary_data'; +import { BsonObjectId } from '../../src/lite-api/bson_object_Id'; +import { BsonTimestamp } from '../../src/lite-api/bson_timestamp'; import { Bytes } from '../../src/lite-api/bytes'; import { Firestore, @@ -40,18 +43,14 @@ import { FieldValue } from '../../src/lite-api/field_value'; import { arrayRemove, arrayUnion, - bsonBinaryData, - bsonObjectId, - bsonTimestamp, deleteField, increment, - int32, - maxKey, - minKey, - regex, serverTimestamp, vector } from '../../src/lite-api/field_value_impl'; +import { Int32Value } from '../../src/lite-api/int32_value'; +import { MaxKey } from '../../src/lite-api/max_key'; +import { MinKey } from '../../src/lite-api/min_key'; import { endAt, endBefore, @@ -85,6 +84,7 @@ import { setDoc, updateDoc } from '../../src/lite-api/reference_impl'; +import { RegexValue } from '../../src/lite-api/regex_value'; import { FirestoreDataConverter, snapshotEqual, @@ -2974,37 +2974,41 @@ describe.skip('BSON types', () => { it('can be read and written using the lite SDK', async () => { return withTestCollection(async coll => { const ref = await addDoc(coll, { - objectId: bsonObjectId('507f191e810c19729de860ea'), - int32: int32(1), - min: minKey(), - max: maxKey(), - regex: regex('^foo', 'i') + objectId: new BsonObjectId('507f191e810c19729de860ea'), + int32: new Int32Value(1), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('^foo', 'i') }); await setDoc( ref, { - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - int32: int32(2) + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + int32: new Int32Value(2) }, { merge: true } ); const snap1 = await getDoc(ref); expect( - snap1.get('objectId').isEqual(bsonObjectId('507f191e810c19729de860ea')) + snap1 + .get('objectId') + .isEqual(new BsonObjectId('507f191e810c19729de860ea')) ).to.be.true; - expect(snap1.get('int32').isEqual(int32(2))).to.be.true; - expect(snap1.get('min') === minKey()).to.be.true; - expect(snap1.get('max') === maxKey()).to.be.true; + expect(snap1.get('int32').isEqual(new Int32Value(2))).to.be.true; + expect(snap1.get('min') === MinKey.instance()).to.be.true; + expect(snap1.get('max') === MaxKey.instance()).to.be.true; expect( snap1 .get('binary') - .isEqual(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + .isEqual(new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ).to.be.true; - expect(snap1.get('timestamp').isEqual(bsonTimestamp(1, 2))).to.be.true; - expect(snap1.get('regex').isEqual(regex('^foo', 'i'))).to.be.true; + expect(snap1.get('timestamp').isEqual(new BsonTimestamp(1, 2))).to.be + .true; + expect(snap1.get('regex').isEqual(new RegexValue('^foo', 'i'))).to.be + .true; }); }); }); diff --git a/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts b/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts index c646726feeb..907881c262c 100644 --- a/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts +++ b/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts @@ -16,15 +16,13 @@ */ import { expect } from 'chai'; -import { - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - int32, - regex -} from '../../../lite'; import { FirestoreIndexValueWriter } from '../../../src/index/firestore_index_value_writer'; import { IndexByteEncoder } from '../../../src/index/index_byte_encoder'; +import { BsonBinaryData } from '../../../src/lite-api/bson_binary_data'; +import { BsonObjectId } from '../../../src/lite-api/bson_object_Id'; +import { BsonTimestamp } from '../../../src/lite-api/bson_timestamp'; +import { Int32Value } from '../../../src/lite-api/int32_value'; +import { RegexValue } from '../../../src/lite-api/regex_value'; import { Timestamp } from '../../../src/lite-api/timestamp'; import { parseBsonBinaryData, @@ -287,7 +285,7 @@ describe('Firestore Index Value Writer', () => { } }; const value3 = parseBsonObjectId( - bsonObjectId('507f191e810c19729de860ea') + new BsonObjectId('507f191e810c19729de860ea') ); expect( @@ -340,8 +338,8 @@ describe('Firestore Index Value Writer', () => { } } }; - const value3 = parseBsonTimestamp(bsonTimestamp(1, 2)); - const value4 = parseBsonTimestamp(bsonTimestamp(2, 1)); + const value3 = parseBsonTimestamp(new BsonTimestamp(1, 2)); + const value4 = parseBsonTimestamp(new BsonTimestamp(2, 1)); expect( compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) @@ -400,7 +398,7 @@ describe('Firestore Index Value Writer', () => { ); const value3 = parseBsonBinaryData( serializer, - bsonBinaryData(1, new Uint8Array([1, 2, 3])) + new BsonBinaryData(1, new Uint8Array([1, 2, 3])) ); const jsonSerializer = new JsonProtoSerializer( @@ -410,7 +408,7 @@ describe('Firestore Index Value Writer', () => { const value4 = parseBsonBinaryData( jsonSerializer, - bsonBinaryData(1, new Uint8Array([1, 2, 3])) + new BsonBinaryData(1, new Uint8Array([1, 2, 3])) ); expect( @@ -473,8 +471,8 @@ describe('Firestore Index Value Writer', () => { } } }; - const value3 = parseRegexValue(regex('^foo', 'i')); - const value4 = parseRegexValue(regex('^zoo', 'i')); + const value3 = parseRegexValue(new RegexValue('^foo', 'i')); + const value4 = parseRegexValue(new RegexValue('^zoo', 'i')); expect( compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) @@ -522,7 +520,8 @@ describe('Firestore Index Value Writer', () => { } } }; - const value3 = parseInt32Value(int32(1)); + const value3 = parseInt32Value(new Int32Value(1)); + const value4 = parseInt32Value(new Int32Value(2)); expect( compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) @@ -543,6 +542,16 @@ describe('Firestore Index Value Writer', () => { expect( compareIndexEncodedValues(value3, value1, IndexKind.ASCENDING) ).to.equal(0); + + expect( + compareIndexEncodedValues(value4, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value2, IndexKind.ASCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value4, value3, IndexKind.ASCENDING) + ).to.equal(1); }); it('can compare BSON MinKey', () => { diff --git a/packages/firestore/test/unit/local/index_manager.test.ts b/packages/firestore/test/unit/local/index_manager.test.ts index 1097f5e682e..b6af448b2db 100644 --- a/packages/firestore/test/unit/local/index_manager.test.ts +++ b/packages/firestore/test/unit/local/index_manager.test.ts @@ -17,7 +17,17 @@ import { expect } from 'chai'; -import { Bytes, GeoPoint } from '../../../src/'; +import { + BsonBinaryData, + BsonObjectId, + BsonTimestamp, + Bytes, + GeoPoint, + Int32Value, + MaxKey, + MinKey, + RegexValue +} from '../../../src/'; import { User } from '../../../src/auth/user'; import { FieldFilter } from '../../../src/core/filter'; import { @@ -31,16 +41,7 @@ import { queryWithLimit, queryWithStartAt } from '../../../src/core/query'; -import { - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - int32, - maxKey, - minKey, - regex, - vector -} from '../../../src/lite-api/field_value_impl'; +import { vector } from '../../../src/lite-api/field_value_impl'; import { Timestamp } from '../../../src/lite-api/timestamp'; import { displayNameForIndexType, @@ -1882,13 +1883,13 @@ describe('IndexedDbIndexManager', async () => { ); await addDoc('coll/doc1', { - key: bsonObjectId('507f191e810c19729de860ea') + key: new BsonObjectId('507f191e810c19729de860ea') }); await addDoc('coll/doc2', { - key: bsonObjectId('507f191e810c19729de860eb') + key: new BsonObjectId('507f191e810c19729de860eb') }); await addDoc('coll/doc3', { - key: bsonObjectId('507f191e810c19729de860ec') + key: new BsonObjectId('507f191e810c19729de860ec') }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); @@ -1899,49 +1900,49 @@ describe('IndexedDbIndexManager', async () => { q = queryWithAddedFilter( query('coll'), - filter('key', '==', bsonObjectId('507f191e810c19729de860ea')) + filter('key', '==', new BsonObjectId('507f191e810c19729de860ea')) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '!=', bsonObjectId('507f191e810c19729de860ea')) + filter('key', '!=', new BsonObjectId('507f191e810c19729de860ea')) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '>=', bsonObjectId('507f191e810c19729de860eb')) + filter('key', '>=', new BsonObjectId('507f191e810c19729de860eb')) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<=', bsonObjectId('507f191e810c19729de860eb')) + filter('key', '<=', new BsonObjectId('507f191e810c19729de860eb')) ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', bsonObjectId('507f191e810c19729de860eb')) + filter('key', '>', new BsonObjectId('507f191e810c19729de860eb')) ); await verifyResults(q, 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<', bsonObjectId('507f191e810c19729de860eb')) + filter('key', '<', new BsonObjectId('507f191e810c19729de860eb')) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', bsonObjectId('507f191e810c19729de860ec')) + filter('key', '>', new BsonObjectId('507f191e810c19729de860ec')) ); await verifyResults(q); q = queryWithAddedFilter( query('coll'), - filter('key', '<', bsonObjectId('507f191e810c19729de860ea')) + filter('key', '<', new BsonObjectId('507f191e810c19729de860ea')) ); await verifyResults(q); }); @@ -1951,13 +1952,13 @@ describe('IndexedDbIndexManager', async () => { fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) ); await addDoc('coll/doc1', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }); await addDoc('coll/doc2', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) }); await addDoc('coll/doc3', { - key: bsonBinaryData(1, new Uint8Array([2, 1, 2])) + key: new BsonBinaryData(1, new Uint8Array([2, 1, 2])) }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); @@ -1968,49 +1969,49 @@ describe('IndexedDbIndexManager', async () => { q = queryWithAddedFilter( query('coll'), - filter('key', '==', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + filter('key', '==', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '!=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + filter('key', '!=', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '>=', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + filter('key', '>=', new BsonBinaryData(1, new Uint8Array([1, 2, 4]))) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<=', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + filter('key', '<=', new BsonBinaryData(1, new Uint8Array([1, 2, 4]))) ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + filter('key', '>', new BsonBinaryData(1, new Uint8Array([1, 2, 4]))) ); await verifyResults(q, 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<', bsonBinaryData(1, new Uint8Array([1, 2, 4]))) + filter('key', '<', new BsonBinaryData(1, new Uint8Array([1, 2, 4]))) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', bsonBinaryData(1, new Uint8Array([2, 1, 2]))) + filter('key', '>', new BsonBinaryData(1, new Uint8Array([2, 1, 2]))) ); await verifyResults(q); q = queryWithAddedFilter( query('coll'), - filter('key', '<', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + filter('key', '<', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); await verifyResults(q); }); @@ -2020,13 +2021,13 @@ describe('IndexedDbIndexManager', async () => { fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) ); await addDoc('coll/doc1', { - key: bsonTimestamp(1, 1) + key: new BsonTimestamp(1, 1) }); await addDoc('coll/doc2', { - key: bsonTimestamp(1, 2) + key: new BsonTimestamp(1, 2) }); await addDoc('coll/doc3', { - key: bsonTimestamp(2, 1) + key: new BsonTimestamp(2, 1) }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); @@ -2037,49 +2038,49 @@ describe('IndexedDbIndexManager', async () => { q = queryWithAddedFilter( query('coll'), - filter('key', '==', bsonTimestamp(1, 1)) + filter('key', '==', new BsonTimestamp(1, 1)) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '!=', bsonTimestamp(1, 1)) + filter('key', '!=', new BsonTimestamp(1, 1)) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '>=', bsonTimestamp(1, 2)) + filter('key', '>=', new BsonTimestamp(1, 2)) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<=', bsonTimestamp(1, 2)) + filter('key', '<=', new BsonTimestamp(1, 2)) ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', bsonTimestamp(1, 2)) + filter('key', '>', new BsonTimestamp(1, 2)) ); await verifyResults(q, 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<', bsonTimestamp(1, 2)) + filter('key', '<', new BsonTimestamp(1, 2)) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', bsonTimestamp(2, 1)) + filter('key', '>', new BsonTimestamp(2, 1)) ); await verifyResults(q); q = queryWithAddedFilter( query('coll'), - filter('key', '<', bsonTimestamp(1, 1)) + filter('key', '<', new BsonTimestamp(1, 1)) ); await verifyResults(q); }); @@ -2089,13 +2090,13 @@ describe('IndexedDbIndexManager', async () => { fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) ); await addDoc('coll/doc1', { - key: int32(1) + key: new Int32Value(1) }); await addDoc('coll/doc2', { - key: int32(2) + key: new Int32Value(2) }); await addDoc('coll/doc3', { - key: int32(3) + key: new Int32Value(3) }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); expect(fieldIndexes).to.have.length(1); @@ -2103,28 +2104,52 @@ describe('IndexedDbIndexManager', async () => { let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); - q = queryWithAddedFilter(query('coll'), filter('key', '==', int32(1))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', new Int32Value(1)) + ); await verifyResults(q, 'coll/doc1'); - q = queryWithAddedFilter(query('coll'), filter('key', '!=', int32(1))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', new Int32Value(1)) + ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); - q = queryWithAddedFilter(query('coll'), filter('key', '>=', int32(2))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', new Int32Value(2)) + ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); - q = queryWithAddedFilter(query('coll'), filter('key', '<=', int32(2))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', new Int32Value(2)) + ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); - q = queryWithAddedFilter(query('coll'), filter('key', '>', int32(2))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', new Int32Value(2)) + ); await verifyResults(q, 'coll/doc3'); - q = queryWithAddedFilter(query('coll'), filter('key', '<', int32(2))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', new Int32Value(2)) + ); await verifyResults(q, 'coll/doc1'); - q = queryWithAddedFilter(query('coll'), filter('key', '>', int32(3))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', new Int32Value(3)) + ); await verifyResults(q); - q = queryWithAddedFilter(query('coll'), filter('key', '<', int32(1))); + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', new Int32Value(1)) + ); await verifyResults(q); }); @@ -2133,13 +2158,13 @@ describe('IndexedDbIndexManager', async () => { fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) ); await addDoc('coll/doc1', { - key: regex('a', 'i') + key: new RegexValue('a', 'i') }); await addDoc('coll/doc2', { - key: regex('a', 'm') + key: new RegexValue('a', 'm') }); await addDoc('coll/doc3', { - key: regex('b', 'i') + key: new RegexValue('b', 'i') }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); expect(fieldIndexes).to.have.length(1); @@ -2148,49 +2173,49 @@ describe('IndexedDbIndexManager', async () => { q = queryWithAddedFilter( query('coll'), - filter('key', '==', regex('a', 'i')) + filter('key', '==', new RegexValue('a', 'i')) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '!=', regex('a', 'i')) + filter('key', '!=', new RegexValue('a', 'i')) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '>=', regex('a', 'm')) + filter('key', '>=', new RegexValue('a', 'm')) ); await verifyResults(q, 'coll/doc2', 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<=', regex('a', 'm')) + filter('key', '<=', new RegexValue('a', 'm')) ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', regex('a', 'm')) + filter('key', '>', new RegexValue('a', 'm')) ); await verifyResults(q, 'coll/doc3'); q = queryWithAddedFilter( query('coll'), - filter('key', '<', regex('a', 'm')) + filter('key', '<', new RegexValue('a', 'm')) ); await verifyResults(q, 'coll/doc1'); q = queryWithAddedFilter( query('coll'), - filter('key', '>', regex('b', 'i')) + filter('key', '>', new RegexValue('b', 'i')) ); await verifyResults(q); q = queryWithAddedFilter( query('coll'), - filter('key', '<', regex('a', 'i')) + filter('key', '<', new RegexValue('a', 'i')) ); await verifyResults(q); }); @@ -2200,10 +2225,10 @@ describe('IndexedDbIndexManager', async () => { fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) ); await addDoc('coll/doc1', { - key: minKey() + key: MinKey.instance() }); await addDoc('coll/doc2', { - key: minKey() + key: MinKey.instance() }); await addDoc('coll/doc3', { key: null @@ -2212,7 +2237,7 @@ describe('IndexedDbIndexManager', async () => { key: 1 }); await addDoc('coll/doc5', { - key: maxKey() + key: MaxKey.instance() }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); @@ -2228,22 +2253,40 @@ describe('IndexedDbIndexManager', async () => { 'coll/doc5' ); - q = queryWithAddedFilter(query('coll'), filter('key', '==', minKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', MinKey.instance()) + ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); - q = queryWithAddedFilter(query('coll'), filter('key', '!=', minKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', MinKey.instance()) + ); await verifyResults(q, 'coll/doc4', 'coll/doc5'); - q = queryWithAddedFilter(query('coll'), filter('key', '>=', minKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', MinKey.instance()) + ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); - q = queryWithAddedFilter(query('coll'), filter('key', '<=', minKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', MinKey.instance()) + ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); - q = queryWithAddedFilter(query('coll'), filter('key', '>', minKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', MinKey.instance()) + ); await verifyResults(q); - q = queryWithAddedFilter(query('coll'), filter('key', '<', minKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', MinKey.instance()) + ); await verifyResults(q); }); @@ -2252,16 +2295,16 @@ describe('IndexedDbIndexManager', async () => { fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) ); await addDoc('coll/doc1', { - key: minKey() + key: MinKey.instance() }); await addDoc('coll/doc2', { key: 1 }); await addDoc('coll/doc3', { - key: maxKey() + key: MaxKey.instance() }); await addDoc('coll/doc4', { - key: maxKey() + key: MaxKey.instance() }); await addDoc('coll/doc5', { key: null @@ -2280,22 +2323,40 @@ describe('IndexedDbIndexManager', async () => { 'coll/doc4' ); - q = queryWithAddedFilter(query('coll'), filter('key', '==', maxKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', MaxKey.instance()) + ); await verifyResults(q, 'coll/doc3', 'coll/doc4'); - q = queryWithAddedFilter(query('coll'), filter('key', '!=', maxKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', MaxKey.instance()) + ); await verifyResults(q, 'coll/doc1', 'coll/doc2'); - q = queryWithAddedFilter(query('coll'), filter('key', '>=', maxKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', MaxKey.instance()) + ); await verifyResults(q, 'coll/doc3', 'coll/doc4'); - q = queryWithAddedFilter(query('coll'), filter('key', '<=', maxKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', MaxKey.instance()) + ); await verifyResults(q, 'coll/doc3', 'coll/doc4'); - q = queryWithAddedFilter(query('coll'), filter('key', '>', maxKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', MaxKey.instance()) + ); await verifyResults(q); - q = queryWithAddedFilter(query('coll'), filter('key', '<', maxKey())); + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', MaxKey.instance()) + ); await verifyResults(q); }); @@ -2304,45 +2365,45 @@ describe('IndexedDbIndexManager', async () => { fieldIndex('coll', { fields: [['key', IndexKind.DESCENDING]] }) ); await addDoc('coll/doc1', { - key: minKey() + key: MinKey.instance() }); await addDoc('coll/doc2', { - key: int32(2) + key: new Int32Value(2) }); await addDoc('coll/doc3', { - key: int32(1) + key: new Int32Value(1) }); await addDoc('coll/doc4', { - key: bsonTimestamp(1, 2) + key: new BsonTimestamp(1, 2) }); await addDoc('coll/doc5', { - key: bsonTimestamp(1, 1) + key: new BsonTimestamp(1, 1) }); await addDoc('coll/doc6', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) }); await addDoc('coll/doc7', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }); await addDoc('coll/doc8', { - key: bsonObjectId('507f191e810c19729de860eb') + key: new BsonObjectId('507f191e810c19729de860eb') }); await addDoc('coll/doc9', { - key: bsonObjectId('507f191e810c19729de860ea') + key: new BsonObjectId('507f191e810c19729de860ea') }); await addDoc('coll/doc10', { - key: regex('a', 'm') + key: new RegexValue('a', 'm') }); await addDoc('coll/doc11', { - key: regex('a', 'i') + key: new RegexValue('a', 'i') }); await addDoc('coll/doc12', { - key: maxKey() + key: MaxKey.instance() }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); @@ -2375,7 +2436,7 @@ describe('IndexedDbIndexManager', async () => { key: null }); await addDoc('coll/doc2', { - key: minKey() + key: MinKey.instance() }); await addDoc('coll/doc3', { key: true @@ -2384,7 +2445,7 @@ describe('IndexedDbIndexManager', async () => { key: NaN }); await addDoc('coll/doc5', { - key: int32(1) + key: new Int32Value(1) }); await addDoc('coll/doc6', { key: 2.0 @@ -2396,7 +2457,7 @@ describe('IndexedDbIndexManager', async () => { key: new Timestamp(100, 123456000) }); await addDoc('coll/doc9', { - key: bsonTimestamp(1, 2) + key: new BsonTimestamp(1, 2) }); await addDoc('coll/doc10', { key: 'string' @@ -2405,19 +2466,19 @@ describe('IndexedDbIndexManager', async () => { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) as Bytes }); await addDoc('coll/doc12', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }); await addDoc('coll/doc13', { key: ref('coll/doc') }); await addDoc('coll/doc14', { - key: bsonObjectId('507f191e810c19729de860ea') + key: new BsonObjectId('507f191e810c19729de860ea') }); await addDoc('coll/doc15', { key: new GeoPoint(0, 1) }); await addDoc('coll/doc16', { - key: regex('^foo', 'i') + key: new RegexValue('^foo', 'i') }); await addDoc('coll/doc17', { key: [1, 2] @@ -2429,7 +2490,7 @@ describe('IndexedDbIndexManager', async () => { key: { a: 1 } }); await addDoc('coll/doc20', { - key: maxKey() + key: MaxKey.instance() }); const fieldIndexes = await indexManager.getFieldIndexes('coll'); diff --git a/packages/firestore/test/unit/local/local_store_indexeddb.test.ts b/packages/firestore/test/unit/local/local_store_indexeddb.test.ts index 0e5afbc2914..5f68684d193 100644 --- a/packages/firestore/test/unit/local/local_store_indexeddb.test.ts +++ b/packages/firestore/test/unit/local/local_store_indexeddb.test.ts @@ -18,7 +18,18 @@ import { isIndexedDBAvailable } from '@firebase/util'; import { expect } from 'chai'; -import { serverTimestamp, Timestamp, GeoPoint } from '../../../src'; +import { + serverTimestamp, + Timestamp, + GeoPoint, + BsonObjectId, + BsonBinaryData, + BsonTimestamp, + Int32Value, + RegexValue, + MaxKey, + MinKey +} from '../../../src'; import { User } from '../../../src/auth/user'; import { BundleConverterImpl } from '../../../src/core/bundle_impl'; import { @@ -30,16 +41,7 @@ import { } from '../../../src/core/query'; import { Target } from '../../../src/core/target'; import { TargetId } from '../../../src/core/types'; -import { - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - int32, - maxKey, - minKey, - regex, - vector -} from '../../../src/lite-api/field_value_impl'; +import { vector } from '../../../src/lite-api/field_value_impl'; import { IndexBackfiller } from '../../../src/local/index_backfiller'; import { LocalStore } from '../../../src/local/local_store'; import { @@ -972,12 +974,14 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.writeMutations( setMutation('coll/a', { - key: bsonObjectId('507f191e810c19729de860ea') + key: new BsonObjectId('507f191e810c19729de860ea') }), setMutation('coll/b', { - key: bsonObjectId('507f191e810c19729de860eb') + key: new BsonObjectId('507f191e810c19729de860eb') }), - setMutation('coll/c', { key: bsonObjectId('507f191e810c19729de860ec') }) + setMutation('coll/c', { + key: new BsonObjectId('507f191e810c19729de860ec') + }) ); await test.backfillIndexes(); @@ -992,7 +996,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '==', bsonObjectId('507f191e810c19729de860ea')) + filter('key', '==', new BsonObjectId('507f191e810c19729de860ea')) ); await test.executeQuery(query_); test.assertOverlaysRead(1, 0, { @@ -1002,7 +1006,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '!=', bsonObjectId('507f191e810c19729de860ea')) + filter('key', '!=', new BsonObjectId('507f191e810c19729de860ea')) ); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { @@ -1013,7 +1017,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '>=', bsonObjectId('507f191e810c19729de860eb')) + filter('key', '>=', new BsonObjectId('507f191e810c19729de860eb')) ); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { @@ -1024,7 +1028,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '<', bsonObjectId('507f191e810c19729de860ea')) + filter('key', '<', new BsonObjectId('507f191e810c19729de860ea')) ); await test.executeQuery(query_); test.assertOverlaysRead(0, 0); @@ -1033,8 +1037,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', filter('key', 'in', [ - bsonObjectId('507f191e810c19729de860ea'), - bsonObjectId('507f191e810c19729de860eb') + new BsonObjectId('507f191e810c19729de860ea'), + new BsonObjectId('507f191e810c19729de860eb') ]) ); await test.executeQuery(query_); @@ -1047,8 +1051,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', filter('key', 'not-in', [ - bsonObjectId('507f191e810c19729de860ea'), - bsonObjectId('507f191e810c19729de860eb') + new BsonObjectId('507f191e810c19729de860ea'), + new BsonObjectId('507f191e810c19729de860eb') ]) ); await test.executeQuery(query_); @@ -1065,9 +1069,9 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); await test.configureFieldsIndexes(index); await test.writeMutations( - setMutation('coll/a', { key: bsonTimestamp(1000, 1000) }), - setMutation('coll/b', { key: bsonTimestamp(1001, 1000) }), - setMutation('coll/c', { key: bsonTimestamp(1000, 1001) }) + setMutation('coll/a', { key: new BsonTimestamp(1000, 1000) }), + setMutation('coll/b', { key: new BsonTimestamp(1001, 1000) }), + setMutation('coll/c', { key: new BsonTimestamp(1000, 1001) }) ); await test.backfillIndexes(); @@ -1080,14 +1084,20 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/c', 'coll/b'); - query_ = query('coll', filter('key', '==', bsonTimestamp(1000, 1000))); + query_ = query( + 'coll', + filter('key', '==', new BsonTimestamp(1000, 1000)) + ); await test.executeQuery(query_); test.assertOverlaysRead(1, 0, { [key('coll/a').toString()]: MutationType.Set }); test.assertQueryReturned(query_, 'coll/a'); - query_ = query('coll', filter('key', '!=', bsonTimestamp(1000, 1000))); + query_ = query( + 'coll', + filter('key', '!=', new BsonTimestamp(1000, 1000)) + ); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/b').toString()]: MutationType.Set, @@ -1095,7 +1105,10 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/c', 'coll/b'); - query_ = query('coll', filter('key', '>=', bsonTimestamp(1000, 1001))); + query_ = query( + 'coll', + filter('key', '>=', new BsonTimestamp(1000, 1001)) + ); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/b').toString()]: MutationType.Set, @@ -1103,7 +1116,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/c', 'coll/b'); - query_ = query('coll', filter('key', '<', bsonTimestamp(1000, 1000))); + query_ = query('coll', filter('key', '<', new BsonTimestamp(1000, 1000))); await test.executeQuery(query_); test.assertOverlaysRead(0, 0); test.assertQueryReturned(query_); @@ -1111,8 +1124,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', filter('key', 'in', [ - bsonTimestamp(1000, 1000), - bsonTimestamp(1001, 1000) + new BsonTimestamp(1000, 1000), + new BsonTimestamp(1001, 1000) ]) ); await test.executeQuery(query_); @@ -1125,8 +1138,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', filter('key', 'not-in', [ - bsonTimestamp(1000, 1000), - bsonTimestamp(1001, 1000) + new BsonTimestamp(1000, 1000), + new BsonTimestamp(1001, 1000) ]) ); await test.executeQuery(query_); @@ -1144,16 +1157,16 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.configureFieldsIndexes(index); await test.writeMutations( setMutation('coll/a', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }), setMutation('coll/b', { - key: bsonBinaryData(1, new Uint8Array([1, 2])) + key: new BsonBinaryData(1, new Uint8Array([1, 2])) }), setMutation('coll/c', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) }), setMutation('coll/d', { - key: bsonBinaryData(2, new Uint8Array([1, 2])) + key: new BsonBinaryData(2, new Uint8Array([1, 2])) }) ); await test.backfillIndexes(); @@ -1170,7 +1183,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '==', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + filter('key', '==', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); await test.executeQuery(query_); test.assertOverlaysRead(1, 0, { @@ -1180,7 +1193,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '!=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + filter('key', '!=', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); await test.executeQuery(query_); test.assertOverlaysRead(3, 0, { @@ -1192,7 +1205,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '>=', bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + filter('key', '>=', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); await test.executeQuery(query_); test.assertOverlaysRead(3, 0, { @@ -1204,7 +1217,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', '<', bsonBinaryData(1, new Uint8Array([1, 2]))) + filter('key', '<', new BsonBinaryData(1, new Uint8Array([1, 2]))) ); await test.executeQuery(query_); test.assertOverlaysRead(0, 0); @@ -1213,8 +1226,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', filter('key', 'in', [ - bsonBinaryData(1, new Uint8Array([1, 2, 3])), - bsonBinaryData(1, new Uint8Array([1, 2])) + new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + new BsonBinaryData(1, new Uint8Array([1, 2])) ]) ); await test.executeQuery(query_); @@ -1228,8 +1241,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', filter('key', 'not-in', [ - bsonBinaryData(1, new Uint8Array([1, 2, 3])), - bsonBinaryData(1, new Uint8Array([1, 2])) + new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + new BsonBinaryData(1, new Uint8Array([1, 2])) ]) ); await test.executeQuery(query_); @@ -1247,9 +1260,9 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); await test.configureFieldsIndexes(index); await test.writeMutations( - setMutation('coll/a', { key: int32(-1) }), - setMutation('coll/b', { key: int32(0) }), - setMutation('coll/c', { key: int32(1) }) + setMutation('coll/a', { key: new Int32Value(-1) }), + setMutation('coll/b', { key: new Int32Value(0) }), + setMutation('coll/c', { key: new Int32Value(1) }) ); await test.backfillIndexes(); @@ -1262,14 +1275,14 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b', 'coll/c'); - query_ = query('coll', filter('key', '==', int32(0))); + query_ = query('coll', filter('key', '==', new Int32Value(0))); await test.executeQuery(query_); test.assertOverlaysRead(1, 0, { [key('coll/b').toString()]: MutationType.Set }); test.assertQueryReturned(query_, 'coll/b'); - query_ = query('coll', filter('key', '!=', int32(0))); + query_ = query('coll', filter('key', '!=', new Int32Value(0))); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/a').toString()]: MutationType.Set, @@ -1277,7 +1290,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/c'); - query_ = query('coll', filter('key', '>=', int32(0))); + query_ = query('coll', filter('key', '>=', new Int32Value(0))); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/b').toString()]: MutationType.Set, @@ -1285,12 +1298,15 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/b', 'coll/c'); - query_ = query('coll', filter('key', '<', int32(-1))); + query_ = query('coll', filter('key', '<', new Int32Value(-1))); await test.executeQuery(query_); test.assertOverlaysRead(0, 0); test.assertQueryReturned(query_); - query_ = query('coll', filter('key', 'in', [int32(0), int32(1)])); + query_ = query( + 'coll', + filter('key', 'in', [new Int32Value(0), new Int32Value(1)]) + ); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/b').toString()]: MutationType.Set, @@ -1298,7 +1314,10 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/b', 'coll/c'); - query_ = query('coll', filter('key', 'not-in', [int32(0), int32(1)])); + query_ = query( + 'coll', + filter('key', 'not-in', [new Int32Value(0), new Int32Value(1)]) + ); await test.executeQuery(query_); test.assertOverlaysRead(1, 0, { [key('coll/a').toString()]: MutationType.Set @@ -1313,9 +1332,9 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); await test.configureFieldsIndexes(index); await test.writeMutations( - setMutation('coll/a', { key: regex('a', 'i') }), - setMutation('coll/b', { key: regex('a', 'm') }), - setMutation('coll/c', { key: regex('b', 'i') }) + setMutation('coll/a', { key: new RegexValue('a', 'i') }), + setMutation('coll/b', { key: new RegexValue('a', 'm') }), + setMutation('coll/c', { key: new RegexValue('b', 'i') }) ); await test.backfillIndexes(); @@ -1328,14 +1347,14 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b', 'coll/c'); - query_ = query('coll', filter('key', '==', regex('a', 'i'))); + query_ = query('coll', filter('key', '==', new RegexValue('a', 'i'))); await test.executeQuery(query_); test.assertOverlaysRead(1, 0, { [key('coll/a').toString()]: MutationType.Set }); test.assertQueryReturned(query_, 'coll/a'); - query_ = query('coll', filter('key', '!=', regex('a', 'i'))); + query_ = query('coll', filter('key', '!=', new RegexValue('a', 'i'))); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/b').toString()]: MutationType.Set, @@ -1343,7 +1362,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/b', 'coll/c'); - query_ = query('coll', filter('key', '>=', regex('a', 'm'))); + query_ = query('coll', filter('key', '>=', new RegexValue('a', 'm'))); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/b').toString()]: MutationType.Set, @@ -1351,14 +1370,17 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/b', 'coll/c'); - query_ = query('coll', filter('key', '<', regex('a', 'i'))); + query_ = query('coll', filter('key', '<', new RegexValue('a', 'i'))); await test.executeQuery(query_); test.assertOverlaysRead(0, 0); test.assertQueryReturned(query_); query_ = query( 'coll', - filter('key', 'in', [regex('a', 'i'), regex('a', 'm')]) + filter('key', 'in', [ + new RegexValue('a', 'i'), + new RegexValue('a', 'm') + ]) ); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { @@ -1369,7 +1391,10 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { query_ = query( 'coll', - filter('key', 'not-in', [regex('a', 'i'), regex('a', 'm')]) + filter('key', 'not-in', [ + new RegexValue('a', 'i'), + new RegexValue('a', 'm') + ]) ); await test.executeQuery(query_); test.assertOverlaysRead(1, 0, { @@ -1385,11 +1410,11 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); await test.configureFieldsIndexes(index); await test.writeMutations( - setMutation('coll/a', { key: minKey() }), - setMutation('coll/b', { key: minKey() }), + setMutation('coll/a', { key: MinKey.instance() }), + setMutation('coll/b', { key: MinKey.instance() }), setMutation('coll/c', { key: null }), setMutation('coll/d', { key: 1 }), - setMutation('coll/e', { key: maxKey() }) + setMutation('coll/e', { key: MaxKey.instance() }) ); await test.backfillIndexes(); @@ -1411,7 +1436,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { 'coll/e' ); - query_ = query('coll', filter('key', '==', minKey())); + query_ = query('coll', filter('key', '==', MinKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/a').toString()]: MutationType.Set, @@ -1419,7 +1444,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b'); - query_ = query('coll', filter('key', '!=', minKey())); + query_ = query('coll', filter('key', '!=', MinKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/d').toString()]: MutationType.Set, @@ -1427,7 +1452,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/d', 'coll/e'); - query_ = query('coll', filter('key', '>=', minKey())); + query_ = query('coll', filter('key', '>=', MinKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/a').toString()]: MutationType.Set, @@ -1435,12 +1460,12 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b'); - query_ = query('coll', filter('key', '<', minKey())); + query_ = query('coll', filter('key', '<', MinKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(0, 0, {}); test.assertQueryReturned(query_); - query_ = query('coll', filter('key', 'in', [minKey()])); + query_ = query('coll', filter('key', 'in', [MinKey.instance()])); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/a').toString()]: MutationType.Set, @@ -1448,7 +1473,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b'); - query_ = query('coll', filter('key', 'not-in', [minKey()])); + query_ = query('coll', filter('key', 'not-in', [MinKey.instance()])); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/d').toString()]: MutationType.Set, @@ -1464,11 +1489,11 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); await test.configureFieldsIndexes(index); await test.writeMutations( - setMutation('coll/a', { key: maxKey() }), - setMutation('coll/b', { key: maxKey() }), + setMutation('coll/a', { key: MaxKey.instance() }), + setMutation('coll/b', { key: MaxKey.instance() }), setMutation('coll/c', { key: null }), setMutation('coll/d', { key: 1 }), - setMutation('coll/e', { key: minKey() }) + setMutation('coll/e', { key: MinKey.instance() }) ); await test.backfillIndexes(); @@ -1490,7 +1515,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { 'coll/b' ); - query_ = query('coll', filter('key', '==', maxKey())); + query_ = query('coll', filter('key', '==', MaxKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/a').toString()]: MutationType.Set, @@ -1498,7 +1523,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b'); - query_ = query('coll', filter('key', '!=', maxKey())); + query_ = query('coll', filter('key', '!=', MaxKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/d').toString()]: MutationType.Set, @@ -1506,7 +1531,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/e', 'coll/d'); - query_ = query('coll', filter('key', '<=', maxKey())); + query_ = query('coll', filter('key', '<=', MaxKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/a').toString()]: MutationType.Set, @@ -1514,17 +1539,17 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b'); - query_ = query('coll', filter('key', '>', maxKey())); + query_ = query('coll', filter('key', '>', MaxKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(0, 0, {}); test.assertQueryReturned(query_); - query_ = query('coll', filter('key', '<', maxKey())); + query_ = query('coll', filter('key', '<', MaxKey.instance())); await test.executeQuery(query_); test.assertOverlaysRead(0, 0, {}); test.assertQueryReturned(query_); - query_ = query('coll', filter('key', 'in', [maxKey()])); + query_ = query('coll', filter('key', 'in', [MaxKey.instance()])); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/a').toString()]: MutationType.Set, @@ -1532,7 +1557,7 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { }); test.assertQueryReturned(query_, 'coll/a', 'coll/b'); - query_ = query('coll', filter('key', 'not-in', [maxKey()])); + query_ = query('coll', filter('key', 'not-in', [MaxKey.instance()])); await test.executeQuery(query_); test.assertOverlaysRead(2, 0, { [key('coll/d').toString()]: MutationType.Set, @@ -1549,26 +1574,26 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.configureFieldsIndexes(index); await test.writeMutations( - setMutation('coll/a', { key: minKey() }), - setMutation('coll/b', { key: int32(2) }), - setMutation('coll/c', { key: int32(1) }), - setMutation('coll/d', { key: bsonTimestamp(1000, 1001) }), - setMutation('coll/e', { key: bsonTimestamp(1000, 1000) }), + setMutation('coll/a', { key: MinKey.instance() }), + setMutation('coll/b', { key: new Int32Value(2) }), + setMutation('coll/c', { key: new Int32Value(1) }), + setMutation('coll/d', { key: new BsonTimestamp(1000, 1001) }), + setMutation('coll/e', { key: new BsonTimestamp(1000, 1000) }), setMutation('coll/f', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 4])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) }), setMutation('coll/g', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }), setMutation('coll/h', { - key: bsonObjectId('507f191e810c19729de860eb') + key: new BsonObjectId('507f191e810c19729de860eb') }), setMutation('coll/i', { - key: bsonObjectId('507f191e810c19729de860ea') + key: new BsonObjectId('507f191e810c19729de860ea') }), - setMutation('coll/j', { key: regex('^bar', 'm') }), - setMutation('coll/k', { key: regex('^bar', 'i') }), - setMutation('coll/l', { key: maxKey() }) + setMutation('coll/j', { key: new RegexValue('^bar', 'm') }), + setMutation('coll/k', { key: new RegexValue('^bar', 'i') }), + setMutation('coll/l', { key: MaxKey.instance() }) ); await test.backfillIndexes(); @@ -1614,29 +1639,29 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.writeMutations( setMutation('coll/a', { key: null }), - setMutation('coll/b', { key: minKey() }), + setMutation('coll/b', { key: MinKey.instance() }), setMutation('coll/c', { key: true }), setMutation('coll/d', { key: NaN }), - setMutation('coll/e', { key: int32(1) }), + setMutation('coll/e', { key: new Int32Value(1) }), setMutation('coll/f', { key: 2.0 }), setMutation('coll/g', { key: 3 }), setMutation('coll/h', { key: new Timestamp(100, 123456000) }), - setMutation('coll/i', { key: bsonTimestamp(1, 2) }), + setMutation('coll/i', { key: new BsonTimestamp(1, 2) }), setMutation('coll/j', { key: 'string' }), setMutation('coll/k', { key: blob(1, 2, 3) }), setMutation('coll/l', { - key: bsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }), setMutation('coll/m', { key: ref('foo/bar') }), setMutation('coll/n', { - key: bsonObjectId('507f191e810c19729de860ea') + key: new BsonObjectId('507f191e810c19729de860ea') }), setMutation('coll/o', { key: new GeoPoint(1, 2) }), - setMutation('coll/p', { key: regex('^bar', 'm') }), + setMutation('coll/p', { key: new RegexValue('^bar', 'm') }), setMutation('coll/q', { key: [2, 'foo'] }), setMutation('coll/r', { key: vector([1, 2, 3]) }), setMutation('coll/s', { key: { bar: 1, foo: 2 } }), - setMutation('coll/t', { key: maxKey() }) + setMutation('coll/t', { key: MaxKey.instance() }) ); await test.backfillIndexes(); diff --git a/packages/firestore/test/unit/model/document.test.ts b/packages/firestore/test/unit/model/document.test.ts index 2c2387cca63..f67e9d971a0 100644 --- a/packages/firestore/test/unit/model/document.test.ts +++ b/packages/firestore/test/unit/model/document.test.ts @@ -18,14 +18,14 @@ import { expect } from 'chai'; import { - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - int32, - maxKey, - minKey, - regex -} from '../../../src/lite-api/field_value_impl'; + BsonBinaryData, + BsonObjectId, + BsonTimestamp, + Int32Value, + MaxKey, + MinKey, + RegexValue +} from '../../../src'; import { doc, expectEqual, @@ -55,26 +55,26 @@ describe('Document', () => { it('can be constructed with bson types', () => { const data = { - objectId: bsonObjectId('foo'), - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - min: minKey(), - max: maxKey(), - regex: regex('a', 'b'), - int32: int32(1) + objectId: new BsonObjectId('foo'), + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('a', 'b'), + int32: new Int32Value(1) }; const document = doc('rooms/Eros', 1, data); const value = document.data; expect(value.value).to.deep.equal( wrap({ - objectId: bsonObjectId('foo'), - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - min: minKey(), - max: maxKey(), - regex: regex('a', 'b'), - int32: int32(1) + objectId: new BsonObjectId('foo'), + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('a', 'b'), + int32: new Int32Value(1) }) ); expect(value).not.to.equal(data); diff --git a/packages/firestore/test/unit/model/object_value.test.ts b/packages/firestore/test/unit/model/object_value.test.ts index 13cfa02131b..40b18893e68 100644 --- a/packages/firestore/test/unit/model/object_value.test.ts +++ b/packages/firestore/test/unit/model/object_value.test.ts @@ -18,15 +18,15 @@ import { expect } from 'chai'; import { - vector, - bsonObjectId, - bsonBinaryData, - bsonTimestamp, - int32, - regex, - minKey, - maxKey -} from '../../../src/lite-api/field_value_impl'; + BsonObjectId, + BsonBinaryData, + BsonTimestamp, + RegexValue, + Int32Value, + MaxKey, + MinKey +} from '../../../src'; +import { vector } from '../../../src/lite-api/field_value_impl'; import { extractFieldMask, ObjectValue } from '../../../src/model/object_value'; import { TypeOrder } from '../../../src/model/type_order'; import { typeOrder } from '../../../src/model/values'; @@ -38,13 +38,13 @@ describe('ObjectValue', () => { foo: { a: 1, b: true, c: 'string' }, embedding: vector([1]), bson: { - objectId: bsonObjectId('foo'), - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - min: minKey(), - max: maxKey(), - regex: regex('a', 'b'), - int32: int32(1) + objectId: new BsonObjectId('foo'), + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('a', 'b'), + int32: new Int32Value(1) } }); @@ -102,30 +102,36 @@ describe('ObjectValue', () => { expect(objValue.field(field('bson'))!).to.deep.equal( wrap({ - objectId: bsonObjectId('foo'), - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - min: minKey(), - max: maxKey(), - regex: regex('a', 'b'), - int32: int32(1) + objectId: new BsonObjectId('foo'), + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('a', 'b'), + int32: new Int32Value(1) }) ); expect(objValue.field(field('bson.objectId'))!).to.deep.equal( - wrap(bsonObjectId('foo')) + wrap(new BsonObjectId('foo')) ); expect(objValue.field(field('bson.binary'))!).to.deep.equal( - wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + wrap(new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); expect(objValue.field(field('bson.timestamp'))!).to.deep.equal( - wrap(bsonTimestamp(1, 2)) + wrap(new BsonTimestamp(1, 2)) + ); + expect(objValue.field(field('bson.min'))!).to.deep.equal( + wrap(MinKey.instance()) + ); + expect(objValue.field(field('bson.max'))!).to.deep.equal( + wrap(MaxKey.instance()) ); - expect(objValue.field(field('bson.min'))!).to.deep.equal(wrap(minKey())); - expect(objValue.field(field('bson.max'))!).to.deep.equal(wrap(maxKey())); expect(objValue.field(field('bson.regex'))!).to.deep.equal( - wrap(regex('a', 'b')) + wrap(new RegexValue('a', 'b')) + ); + expect(objValue.field(field('bson.int32'))!).to.deep.equal( + wrap(new Int32Value(1)) ); - expect(objValue.field(field('bson.int32'))!).to.deep.equal(wrap(int32(1))); }); it('can overwrite existing fields', () => { @@ -234,56 +240,56 @@ describe('ObjectValue', () => { it('can handle bson types in ObjectValue', () => { const objValue = ObjectValue.empty(); // Add new fields - objValue.set(field('objectId'), wrap(bsonObjectId('foo-value'))); + objValue.set(field('objectId'), wrap(new BsonObjectId('foo-value'))); objValue.set( field('binary'), - wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + wrap(new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ); - objValue.set(field('timestamp'), wrap(bsonTimestamp(1, 2))); - objValue.set(field('regex'), wrap(regex('a', 'b'))); - objValue.set(field('int32'), wrap(int32(1))); - objValue.set(field('min'), wrap(minKey())); - objValue.set(field('max'), wrap(maxKey())); + objValue.set(field('timestamp'), wrap(new BsonTimestamp(1, 2))); + objValue.set(field('regex'), wrap(new RegexValue('a', 'b'))); + objValue.set(field('int32'), wrap(new Int32Value(1))); + objValue.set(field('min'), wrap(MinKey.instance())); + objValue.set(field('max'), wrap(MaxKey.instance())); assertObjectEquals(objValue, { - objectId: bsonObjectId('foo-value'), - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - regex: regex('a', 'b'), - int32: int32(1), - min: minKey(), - max: maxKey() + objectId: new BsonObjectId('foo-value'), + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + regex: new RegexValue('a', 'b'), + int32: new Int32Value(1), + min: MinKey.instance(), + max: MaxKey.instance() }); // Overwrite existing fields - objValue.set(field('objectId'), wrap(bsonObjectId('new-foo-value'))); + objValue.set(field('objectId'), wrap(new BsonObjectId('new-foo-value'))); // Create nested objects objValue.set( field('foo.binary'), - wrap(bsonBinaryData(2, new Uint8Array([1, 2, 3]))) + wrap(new BsonBinaryData(2, new Uint8Array([1, 2, 3]))) ); - objValue.set(field('foo.timestamp'), wrap(bsonTimestamp(1, 2))); + objValue.set(field('foo.timestamp'), wrap(new BsonTimestamp(1, 2))); // Delete fields objValue.delete(field('binary')); // overwrite nested objects - objValue.set(field('foo.timestamp'), wrap(bsonTimestamp(2, 1))); + objValue.set(field('foo.timestamp'), wrap(new BsonTimestamp(2, 1))); // Overwrite primitive values to create objects objValue.set(field('min'), wrap(null)); assertObjectEquals(objValue, { - objectId: bsonObjectId('new-foo-value'), - timestamp: bsonTimestamp(1, 2), - regex: regex('a', 'b'), - int32: int32(1), + objectId: new BsonObjectId('new-foo-value'), + timestamp: new BsonTimestamp(1, 2), + regex: new RegexValue('a', 'b'), + int32: new Int32Value(1), min: null, - max: maxKey(), + max: MaxKey.instance(), foo: { - binary: bsonBinaryData(2, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(2, 1) + binary: new BsonBinaryData(2, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(2, 1) } }); }); @@ -294,13 +300,13 @@ describe('ObjectValue', () => { map: { a: 1, b: true, c: 'string', nested: { d: 'e' } }, emptymap: {}, bar: { - objectId: bsonObjectId('foo'), - binary: bsonBinaryData(1, new Uint8Array([1, 2, 3])), - timestamp: bsonTimestamp(1, 2), - min: minKey(), - max: maxKey(), - regex: regex('a', 'b'), - int32: int32(1) + objectId: new BsonObjectId('foo'), + binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), + timestamp: new BsonTimestamp(1, 2), + min: MinKey.instance(), + max: MaxKey.instance(), + regex: new RegexValue('a', 'b'), + int32: new Int32Value(1) } }); const expectedMask = mask( diff --git a/packages/firestore/test/unit/model/values.test.ts b/packages/firestore/test/unit/model/values.test.ts index 0d93d335ded..048ae762a98 100644 --- a/packages/firestore/test/unit/model/values.test.ts +++ b/packages/firestore/test/unit/model/values.test.ts @@ -17,25 +17,19 @@ import { expect } from 'chai'; -import { GeoPoint, Timestamp } from '../../../src'; -import { DatabaseId } from '../../../src/core/database_info'; -import { BsonBinaryData } from '../../../src/lite-api/bson_binary_data'; -import { BsonObjectId } from '../../../src/lite-api/bson_object_Id'; -import { BsonTimestamp } from '../../../src/lite-api/bson_timestamp_value'; import { - vector, - regex, - bsonTimestamp, - int32, - bsonBinaryData, - bsonObjectId, - minKey, - maxKey -} from '../../../src/lite-api/field_value_impl'; -import { Int32Value } from '../../../src/lite-api/int32_value'; -import { MaxKey } from '../../../src/lite-api/max_key'; -import { MinKey } from '../../../src/lite-api/min_key'; -import { RegexValue } from '../../../src/lite-api/regex_value'; + GeoPoint, + Timestamp, + BsonBinaryData, + BsonTimestamp, + BsonObjectId, + RegexValue, + Int32Value, + MaxKey, + MinKey +} from '../../../src'; +import { DatabaseId } from '../../../src/core/database_info'; +import { vector } from '../../../src/lite-api/field_value_impl'; import { serverTimestamp } from '../../../src/model/server_timestamps'; import { canonicalId, @@ -79,7 +73,7 @@ describe('Values', () => { [wrap(true), wrap(true)], [wrap(false), wrap(false)], [wrap(null), wrap(null)], - [wrap(minKey()), wrap(minKey()), wrap(MinKey.instance())], + [wrap(MinKey.instance()), wrap(MinKey.instance())], [wrap(0 / 0), wrap(Number.NaN), wrap(NaN)], // -0.0 and 0.0 order the same but are not considered equal. [wrap(-0.0)], @@ -117,20 +111,20 @@ describe('Values', () => { [wrap({ foo: 1 })], [wrap(vector([]))], [wrap(vector([1, 2.3, -4.0]))], - [wrap(regex('^foo', 'i')), wrap(new RegexValue('^foo', 'i'))], - [wrap(bsonTimestamp(57, 4)), wrap(new BsonTimestamp(57, 4))], + [wrap(new RegexValue('^foo', 'i')), wrap(new RegexValue('^foo', 'i'))], + [wrap(new BsonTimestamp(57, 4)), wrap(new BsonTimestamp(57, 4))], [ - wrap(bsonBinaryData(128, Uint8Array.from([7, 8, 9]))), wrap(new BsonBinaryData(128, Uint8Array.from([7, 8, 9]))), - wrap(bsonBinaryData(128, Buffer.from([7, 8, 9]))), + wrap(new BsonBinaryData(128, Uint8Array.from([7, 8, 9]))), + wrap(new BsonBinaryData(128, Buffer.from([7, 8, 9]))), wrap(new BsonBinaryData(128, Buffer.from([7, 8, 9]))) ], [ - wrap(bsonObjectId('123456789012')), + wrap(new BsonObjectId('123456789012')), wrap(new BsonObjectId('123456789012')) ], - [wrap(int32(255)), wrap(new Int32Value(255))], - [wrap(maxKey()), wrap(maxKey()), wrap(MaxKey.instance())] + [wrap(new Int32Value(255)), wrap(new Int32Value(255))], + [wrap(MaxKey.instance()), wrap(MaxKey.instance())] ]; expectEqualitySets(values, (v1, v2) => valueEquals(v1, v2)); }); @@ -170,7 +164,7 @@ describe('Values', () => { [wrap(null)], // MinKey is after null - [wrap(minKey())], + [wrap(MinKey.instance())], // booleans [wrap(false)], @@ -183,23 +177,23 @@ describe('Values', () => { [wrap(Number.MIN_SAFE_INTEGER - 1)], [wrap(Number.MIN_SAFE_INTEGER)], // 64-bit and 32-bit integers order together numerically. - [{ integerValue: -2147483648 }, wrap(int32(-2147483648))], + [{ integerValue: -2147483648 }, wrap(new Int32Value(-2147483648))], [wrap(-1.1)], // Integers, Int32Values and Doubles order the same. - [{ integerValue: -1 }, { doubleValue: -1 }, wrap(int32(-1))], + [{ integerValue: -1 }, { doubleValue: -1 }, wrap(new Int32Value(-1))], [wrap(-Number.MIN_VALUE)], // zeros all compare the same. [ { integerValue: 0 }, { doubleValue: 0 }, { doubleValue: -0 }, - wrap(int32(0)) + wrap(new Int32Value(0)) ], [wrap(Number.MIN_VALUE)], - [{ integerValue: 1 }, { doubleValue: 1.0 }, wrap(int32(1))], + [{ integerValue: 1 }, { doubleValue: 1.0 }, wrap(new Int32Value(1))], [wrap(1.1)], - [wrap(int32(2))], - [wrap(int32(2147483647))], + [wrap(new Int32Value(2))], + [wrap(new Int32Value(2147483647))], [wrap(Number.MAX_SAFE_INTEGER)], [wrap(Number.MAX_SAFE_INTEGER + 1)], [wrap(Infinity)], @@ -215,9 +209,9 @@ describe('Values', () => { ], // request timestamp - [wrap(bsonTimestamp(123, 4))], - [wrap(bsonTimestamp(123, 5))], - [wrap(bsonTimestamp(124, 0))], + [wrap(new BsonTimestamp(123, 4))], + [wrap(new BsonTimestamp(123, 5))], + [wrap(new BsonTimestamp(124, 0))], // server timestamps come after all concrete timestamps. [serverTimestamp(Timestamp.fromDate(date1), null)], @@ -243,11 +237,11 @@ describe('Values', () => { [wrap(blob(255))], [ - wrap(bsonBinaryData(5, Buffer.from([1, 2, 3]))), - wrap(bsonBinaryData(5, new Uint8Array([1, 2, 3]))) + wrap(new BsonBinaryData(5, Buffer.from([1, 2, 3]))), + wrap(new BsonBinaryData(5, new Uint8Array([1, 2, 3]))) ], - [wrap(bsonBinaryData(7, Buffer.from([1])))], - [wrap(bsonBinaryData(7, new Uint8Array([2])))], + [wrap(new BsonBinaryData(7, Buffer.from([1])))], + [wrap(new BsonBinaryData(7, new Uint8Array([2])))], // reference values [refValue(dbId('p1', 'd1'), key('c1/doc1'))], @@ -258,11 +252,11 @@ describe('Values', () => { [refValue(dbId('p2', 'd1'), key('c1/doc1'))], // ObjectId - [wrap(bsonObjectId('foo')), wrap(bsonObjectId('foo'))], + [wrap(new BsonObjectId('foo')), wrap(new BsonObjectId('foo'))], // TODO(Mila/BSON): uncomment after string sort bug is fixed - // [wrap(bsonObjectId('Ḟoo'))], // with latin capital letter f with dot above - // [wrap(bsonObjectId('foo\u0301'))], // with combining acute accent - [wrap(bsonObjectId('xyz'))], + // [wrap(new BsonObjectId('Ḟoo'))], // with latin capital letter f with dot above + // [wrap(new BsonObjectId('foo\u0301'))], // with combining acute accent + [wrap(new BsonObjectId('xyz'))], // geo points [wrap(new GeoPoint(-90, -180))], @@ -279,10 +273,10 @@ describe('Values', () => { [wrap(new GeoPoint(90, 180))], // regular expressions - [wrap(regex('a', 'bar1'))], - [wrap(regex('foo', 'bar1'))], - [wrap(regex('foo', 'bar2'))], - [wrap(regex('go', 'bar1'))], + [wrap(new RegexValue('a', 'bar1'))], + [wrap(new RegexValue('foo', 'bar1'))], + [wrap(new RegexValue('foo', 'bar2'))], + [wrap(new RegexValue('go', 'bar1'))], // arrays [wrap([])], @@ -305,7 +299,7 @@ describe('Values', () => { [wrap({ foo: '0' })], // MaxKey - [wrap(maxKey())] + [wrap(MaxKey.instance())] ]; expectCorrectComparisonGroups( @@ -412,28 +406,34 @@ describe('Values', () => { }, { expectedByteSize: 27, - elements: [wrap(regex('a', 'b')), wrap(regex('c', 'd'))] + elements: [ + wrap(new RegexValue('a', 'b')), + wrap(new RegexValue('c', 'd')) + ] }, { expectedByteSize: 13, - elements: [wrap(bsonObjectId('foo')), wrap(bsonObjectId('bar'))] + elements: [wrap(new BsonObjectId('foo')), wrap(new BsonObjectId('bar'))] }, { expectedByteSize: 53, - elements: [wrap(bsonTimestamp(1, 2)), wrap(bsonTimestamp(3, 4))] + elements: [wrap(new BsonTimestamp(1, 2)), wrap(new BsonTimestamp(3, 4))] }, { expectedByteSize: 8, - elements: [wrap(int32(1)), wrap(int32(2147483647))] + elements: [wrap(new Int32Value(1)), wrap(new Int32Value(2147483647))] }, { expectedByteSize: 16, elements: [ - wrap(bsonBinaryData(1, new Uint8Array([127, 128]))), - wrap(bsonBinaryData(128, new Uint8Array([1, 2]))) + wrap(new BsonBinaryData(1, new Uint8Array([127, 128]))), + wrap(new BsonBinaryData(128, new Uint8Array([1, 2]))) ] }, - { expectedByteSize: 11, elements: [wrap(minKey()), wrap(maxKey())] } + { + expectedByteSize: 11, + elements: [wrap(MinKey.instance()), wrap(MaxKey.instance())] + } ]; for (const group of equalityGroups) { @@ -464,11 +464,11 @@ describe('Values', () => { [wrap({ a: 'a', b: 'b' }), wrap({ a: 'a', b: 'b', c: 'c' })], [wrap({ a: 'a', b: 'b' }), wrap({ a: 'a', b: 'b', c: 'c' })], [wrap(vector([2, 3])), wrap(vector([1, 2, 3]))], - [wrap(regex('a', 'b')), wrap(regex('cc', 'dd'))], - [wrap(bsonObjectId('foo')), wrap(bsonObjectId('foobar'))], + [wrap(new RegexValue('a', 'b')), wrap(new RegexValue('cc', 'dd'))], + [wrap(new BsonObjectId('foo')), wrap(new BsonObjectId('foobar'))], [ - wrap(bsonBinaryData(128, new Uint8Array([127, 128]))), - wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3]))) + wrap(new BsonBinaryData(128, new Uint8Array([127, 128]))), + wrap(new BsonBinaryData(1, new Uint8Array([1, 2, 3]))) ] ]; @@ -489,7 +489,7 @@ describe('Values', () => { [valuesGetLowerBound({ nullValue: 'NULL_VALUE' }), wrap(null)], // lower bound of MinKey is MinKey - [valuesGetLowerBound(MIN_KEY_VALUE), wrap(minKey())], + [valuesGetLowerBound(MIN_KEY_VALUE), wrap(MinKey.instance())], // booleans [valuesGetLowerBound({ booleanValue: true }), wrap(false)], @@ -512,11 +512,11 @@ describe('Values', () => { // bson timestamps [ - valuesGetLowerBound(wrap(bsonTimestamp(4294967295, 4294967295))), + valuesGetLowerBound(wrap(new BsonTimestamp(4294967295, 4294967295))), MIN_BSON_TIMESTAMP_VALUE, - wrap(bsonTimestamp(0, 0)) + wrap(new BsonTimestamp(0, 0)) ], - [wrap(bsonTimestamp(1, 1))], + [wrap(new BsonTimestamp(1, 1))], // strings [valuesGetLowerBound({ stringValue: 'Z' }), wrap('')], @@ -529,11 +529,11 @@ describe('Values', () => { // bson binary data [ valuesGetLowerBound( - wrap(bsonBinaryData(128, new Uint8Array([128, 128]))) + wrap(new BsonBinaryData(128, new Uint8Array([128, 128]))) ), MIN_BSON_BINARY_VALUE ], - [wrap(bsonBinaryData(0, new Uint8Array([0])))], + [wrap(new BsonBinaryData(0, new Uint8Array([0])))], // resource names [ @@ -544,11 +544,11 @@ describe('Values', () => { // bson object ids [ - valuesGetLowerBound(wrap(bsonObjectId('ZZZ'))), - wrap(bsonObjectId('')), + valuesGetLowerBound(wrap(new BsonObjectId('ZZZ'))), + wrap(new BsonObjectId('')), MIN_BSON_OBJECT_ID_VALUE ], - [wrap(bsonObjectId('a'))], + [wrap(new BsonObjectId('a'))], // geo points [ @@ -559,11 +559,11 @@ describe('Values', () => { // regular expressions [ - valuesGetLowerBound(wrap(regex('ZZZ', 'i'))), - wrap(regex('', '')), + valuesGetLowerBound(wrap(new RegexValue('ZZZ', 'i'))), + wrap(new RegexValue('', '')), MIN_REGEX_VALUE ], - [wrap(regex('a', 'i'))], + [wrap(new RegexValue('a', 'i'))], // arrays [valuesGetLowerBound({ arrayValue: {} }), wrap([])], @@ -590,7 +590,7 @@ describe('Values', () => { [valuesGetLowerBound({ mapValue: {} }), wrap({})], // MaxKey - [wrap(maxKey())] + [wrap(MaxKey.instance())] ]; expectCorrectComparisonGroups( @@ -607,7 +607,10 @@ describe('Values', () => { [wrap(null)], // upper value of null is MinKey - [valuesGetUpperBound({ nullValue: 'NULL_VALUE' }), wrap(minKey())], + [ + valuesGetUpperBound({ nullValue: 'NULL_VALUE' }), + wrap(MinKey.instance()) + ], // upper value of MinKey is boolean `false` [valuesGetUpperBound(MIN_KEY_VALUE), wrap(false)], @@ -617,7 +620,7 @@ describe('Values', () => { [valuesGetUpperBound({ booleanValue: false })], // numbers - [wrap(int32(2147483647))], //largest int32 value + [wrap(new Int32Value(2147483647))], //largest int32 value [wrap(Number.MAX_SAFE_INTEGER)], [wrap(Number.POSITIVE_INFINITY)], [valuesGetUpperBound({ doubleValue: NaN })], @@ -627,7 +630,7 @@ describe('Values', () => { [valuesGetUpperBound({ timestampValue: {} })], // bson timestamps - [wrap(bsonTimestamp(4294967295, 4294967295))], // largest bson timestamp value + [wrap(new BsonTimestamp(4294967295, 4294967295))], // largest bson timestamp value [valuesGetUpperBound(MIN_BSON_TIMESTAMP_VALUE)], // strings @@ -639,7 +642,7 @@ describe('Values', () => { [valuesGetUpperBound({ bytesValue: '' })], // bson binary data - [wrap(bsonBinaryData(128, new Uint8Array([255, 255, 255])))], + [wrap(new BsonBinaryData(128, new Uint8Array([255, 255, 255])))], [valuesGetUpperBound(MIN_BSON_BINARY_VALUE)], // resource names @@ -647,7 +650,7 @@ describe('Values', () => { [valuesGetUpperBound({ referenceValue: '' })], // bson object ids - [wrap(bsonObjectId('foo'))], + [wrap(new BsonObjectId('foo'))], [valuesGetUpperBound(MIN_BSON_OBJECT_ID_VALUE)], // geo points @@ -655,7 +658,7 @@ describe('Values', () => { [valuesGetUpperBound({ geoPointValue: {} })], // regular expressions - [wrap(regex('a', 'i'))], + [wrap(new RegexValue('a', 'i'))], [valuesGetUpperBound(MIN_REGEX_VALUE)], // arrays @@ -670,7 +673,7 @@ describe('Values', () => { [wrap({ 'a': 'b' })], // MaxKey - [wrap(maxKey())] + [wrap(MaxKey.instance())] ]; expectCorrectComparisonGroups( @@ -712,19 +715,21 @@ describe('Values', () => { expect( canonicalId(wrap({ 'a': ['b', { 'c': new GeoPoint(30, 60) }] })) ).to.equal('{a:[b,{c:geo(30,60)}]}'); - expect(canonicalId(wrap(regex('a', 'b')))).to.equal( + expect(canonicalId(wrap(new RegexValue('a', 'b')))).to.equal( '{__regex__:{options:b,pattern:a}}' ); - expect(canonicalId(wrap(bsonObjectId('foo')))).to.equal('{__oid__:foo}'); - expect(canonicalId(wrap(bsonTimestamp(1, 2)))).to.equal( + expect(canonicalId(wrap(new BsonObjectId('foo')))).to.equal( + '{__oid__:foo}' + ); + expect(canonicalId(wrap(new BsonTimestamp(1, 2)))).to.equal( '{__request_timestamp__:{increment:2,seconds:1}}' ); - expect(canonicalId(wrap(int32(1)))).to.equal('{__int__:1}'); + expect(canonicalId(wrap(new Int32Value(1)))).to.equal('{__int__:1}'); expect( - canonicalId(wrap(bsonBinaryData(1, new Uint8Array([1, 2, 3])))) + canonicalId(wrap(new BsonBinaryData(1, new Uint8Array([1, 2, 3])))) ).to.equal('{__binary__:AQECAw==}'); - expect(canonicalId(wrap(minKey()))).to.equal('{__min__:null}'); - expect(canonicalId(wrap(maxKey()))).to.equal('{__max__:null}'); + expect(canonicalId(wrap(MinKey.instance()))).to.equal('{__min__:null}'); + expect(canonicalId(wrap(MaxKey.instance()))).to.equal('{__max__:null}'); }); it('canonical IDs ignore sort order', () => { diff --git a/packages/firestore/test/unit/remote/serializer.helper.ts b/packages/firestore/test/unit/remote/serializer.helper.ts index 9c116549928..24d7b039d0c 100644 --- a/packages/firestore/test/unit/remote/serializer.helper.ts +++ b/packages/firestore/test/unit/remote/serializer.helper.ts @@ -20,11 +20,18 @@ import { expect } from 'chai'; import { arrayRemove, arrayUnion, + BsonBinaryData, + BsonObjectId, + BsonTimestamp, Bytes, DocumentReference, GeoPoint, increment, + Int32Value, + MaxKey, + MinKey, refEqual, + RegexValue, serverTimestamp, Timestamp } from '../../../src'; @@ -52,16 +59,7 @@ import { } from '../../../src/core/query'; import { SnapshotVersion } from '../../../src/core/snapshot_version'; import { Target, targetEquals, TargetImpl } from '../../../src/core/target'; -import { - bsonBinaryData, - bsonObjectId, - bsonTimestamp, - int32, - maxKey, - minKey, - regex, - vector -} from '../../../src/lite-api/field_value_impl'; +import { vector } from '../../../src/lite-api/field_value_impl'; import { parseQueryValue } from '../../../src/lite-api/user_data_reader'; import { TargetData, TargetPurpose } from '../../../src/local/target_data'; import { FieldMask } from '../../../src/model/field_mask'; @@ -577,12 +575,12 @@ export function serializerTest( it('converts BSON types in mapValue', () => { const examples = [ - bsonObjectId('foo'), - bsonTimestamp(1, 2), - minKey(), - maxKey(), - regex('a', 'b'), - int32(1) + new BsonObjectId('foo'), + new BsonTimestamp(1, 2), + MinKey.instance(), + MaxKey.instance(), + new RegexValue('a', 'b'), + new Int32Value(1) ]; for (const example of examples) { @@ -598,7 +596,7 @@ export function serializerTest( } // BsonBinaryData will be serialized differently Proto3Json VS. regular Protobuf format - const bsonBinary = bsonBinaryData(1, new Uint8Array([1, 2, 3])); + const bsonBinary = new BsonBinaryData(1, new Uint8Array([1, 2, 3])); const expectedJson: api.Value = { mapValue: { fields: { From 4600a34360e0edff345068ad7a44403d9cd1b5fd Mon Sep 17 00:00:00 2001 From: Ehsan Date: Fri, 2 May 2025 15:07:37 -0700 Subject: [PATCH 6/9] Create seven-actors-kneel.md --- .changeset/seven-actors-kneel.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/seven-actors-kneel.md diff --git a/.changeset/seven-actors-kneel.md b/.changeset/seven-actors-kneel.md new file mode 100644 index 00000000000..6bc97d0e3ed --- /dev/null +++ b/.changeset/seven-actors-kneel.md @@ -0,0 +1,5 @@ +--- +"@firebase/firestore": minor +--- + +feat: Adds support for MinKey, MaxKey, RegexValue, Int32Value, BsonObjectId, BsonTimestamp, and BsonBinaryData. From cbba951db7ced310ff052d88241fa682ad2e971a Mon Sep 17 00:00:00 2001 From: milaGGL <107142260+milaGGL@users.noreply.github.com> Date: Mon, 5 May 2025 12:40:20 -0400 Subject: [PATCH 7/9] resolve TODOs --- packages/firestore/src/model/values.ts | 5 +- .../test/integration/api/database.test.ts | 103 +++++++++--------- .../test/integration/util/helpers.ts | 3 +- .../firestore/test/unit/model/values.test.ts | 12 +- 4 files changed, 60 insertions(+), 63 deletions(-) diff --git a/packages/firestore/src/model/values.ts b/packages/firestore/src/model/values.ts index 848325c3521..fca9c34b9ea 100644 --- a/packages/firestore/src/model/values.ts +++ b/packages/firestore/src/model/values.ts @@ -644,7 +644,7 @@ function compareRegex(left: Value, right: Value): number { ]?.stringValue ?? ''; // First order by patterns, and then options. - const patternDiff = primitiveComparator(leftPattern, rightPattern); + const patternDiff = compareUtf8Strings(leftPattern, rightPattern); return patternDiff !== 0 ? patternDiff : primitiveComparator(leftOptions, rightOptions); @@ -656,8 +656,7 @@ function compareBsonObjectIds(left: Value, right: Value): number { const rightOid = right.mapValue!.fields?.[RESERVED_BSON_OBJECT_ID_KEY]?.stringValue ?? ''; - // TODO(Mila/BSON): use compareUtf8Strings once the bug fix is merged. - return primitiveComparator(leftOid, rightOid); + return compareUtf8Strings(leftOid, rightOid); } /** diff --git a/packages/firestore/test/integration/api/database.test.ts b/packages/firestore/test/integration/api/database.test.ts index 8d9167cee9c..ce5a3d34eae 100644 --- a/packages/firestore/test/integration/api/database.test.ts +++ b/packages/firestore/test/integration/api/database.test.ts @@ -2818,9 +2818,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - let orderedQuery = query( coll, where('key', '>', new BsonObjectId('507f191e810c19729de860ea')), @@ -2833,6 +2830,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -2853,6 +2851,7 @@ apiDescribe('Database', persistence => { testDocs['a'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -2873,9 +2872,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - let orderedQuery = query( coll, where('key', '>=', new Int32Value(1)), @@ -2888,6 +2884,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -2905,6 +2902,7 @@ apiDescribe('Database', persistence => { testDocs['a'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -2925,9 +2923,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - let orderedQuery = query( coll, where('key', '>', new BsonTimestamp(1, 1)), @@ -2940,6 +2935,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -2957,6 +2953,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -2977,9 +2974,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - let orderedQuery = query( coll, where('key', '>', new BsonBinaryData(1, new Uint8Array([1, 2, 3]))), @@ -2992,6 +2986,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3014,6 +3009,7 @@ apiDescribe('Database', persistence => { testDocs['a'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3034,9 +3030,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - const orderedQuery = query( coll, or( @@ -3052,6 +3045,7 @@ apiDescribe('Database', persistence => { testDocs['a'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3074,9 +3068,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - let filteredQuery = query( coll, where('key', '==', MinKey.instance()) @@ -3087,23 +3078,24 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) ); - // TODO(Mila/BSON): uncomment after the null inclusion bug - // filteredQuery = query(coll, where('key', '!=', MinKey.instance())); - // snapshot = await getDocs(filteredQuery); - // expect(toDataArray(snapshot)).to.deep.equal([ - // testDocs['d'], - // testDocs['e'] - // ]); - // await assertSDKQueryResultsConsistentWithBackend( - // filteredQuery, - // testDocs, - // toIds(snapshot) - // ); + filteredQuery = query(coll, where('key', '!=', MinKey.instance())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['d'], + testDocs['e'] + ]); + await assertSDKQueryResultsConsistentWithBackend( + coll, + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '>=', MinKey.instance())); snapshot = await getDocs(filteredQuery); @@ -3112,6 +3104,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3124,6 +3117,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3133,6 +3127,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3142,6 +3137,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3151,6 +3147,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3173,9 +3170,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - let filteredQuery = query( coll, where('key', '==', MaxKey.instance()) @@ -3186,23 +3180,24 @@ apiDescribe('Database', persistence => { testDocs['d'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) ); - // TODO(Mila/BSON): uncomment after the null inclusion bug - // filteredQuery = query(coll, where('key', '!=', MaxKey.instance())); - // snapshot = await getDocs(filteredQuery); - // expect(toDataArray(snapshot)).to.deep.equal([ - // testDocs['a'], - // testDocs['b'] - // ]); - // await assertSDKQueryResultsConsistentWithBackend( - // filteredQuery, - // testDocs, - // toIds(snapshot) - // ); + filteredQuery = query(coll, where('key', '!=', MaxKey.instance())); + snapshot = await getDocs(filteredQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['a'], + testDocs['b'] + ]); + await assertSDKQueryResultsConsistentWithBackend( + coll, + filteredQuery, + testDocs, + toIds(snapshot) + ); filteredQuery = query(coll, where('key', '>=', MaxKey.instance())); snapshot = await getDocs(filteredQuery); @@ -3211,6 +3206,7 @@ apiDescribe('Database', persistence => { testDocs['d'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3223,6 +3219,7 @@ apiDescribe('Database', persistence => { testDocs['d'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3232,6 +3229,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3241,6 +3239,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3250,6 +3249,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3273,9 +3273,6 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - let filteredQuery = query(coll, where('key', '==', null)); let snapshot = await getDocs(filteredQuery); expect(toDataArray(snapshot)).to.deep.equal([ @@ -3283,6 +3280,7 @@ apiDescribe('Database', persistence => { testDocs['c'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3296,6 +3294,7 @@ apiDescribe('Database', persistence => { testDocs['e'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, filteredQuery, testDocs, toIds(snapshot) @@ -3424,11 +3423,9 @@ apiDescribe('Database', persistence => { await setDoc(doc(coll, 'm'), { key: docRef }); testDocs['m'] = { key: docRef }; - // Populate the cache with all docs first - await getDocs(coll); - const orderedQuery = query(coll, orderBy('key', 'desc')); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, [ @@ -3487,11 +3484,9 @@ apiDescribe('Database', persistence => { settings, testDocs, async coll => { - // Populate the cache with all docs first - await getDocs(coll); - const orderedQuery = query(coll, orderBy('key')); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, [ diff --git a/packages/firestore/test/integration/util/helpers.ts b/packages/firestore/test/integration/util/helpers.ts index cdedf2a3e02..7cfe7d3a7a4 100644 --- a/packages/firestore/test/integration/util/helpers.ts +++ b/packages/firestore/test/integration/util/helpers.ts @@ -633,6 +633,7 @@ export async function checkOnlineAndOfflineResultsMatch( * @returns {Promise} A Promise that resolves when the assertions are complete. */ export async function assertSDKQueryResultsConsistentWithBackend( + collection: CollectionReference, query: Query, allData: { [key: string]: DocumentData }, expectedDocIds: string[] @@ -640,7 +641,7 @@ export async function assertSDKQueryResultsConsistentWithBackend( // Check the cache round trip first to make sure cache is properly populated, otherwise the // snapshot listener below will return partial results from previous // "assertSDKQueryResultsConsistentWithBackend" calls if it is called multiple times in one test - await checkOnlineAndOfflineResultsMatch(query, ...expectedDocIds); + await checkOnlineAndOfflineResultsMatch(collection, query, ...expectedDocIds); const eventAccumulator = new EventsAccumulator(); const unsubscribe = onSnapshot( diff --git a/packages/firestore/test/unit/model/values.test.ts b/packages/firestore/test/unit/model/values.test.ts index 048ae762a98..dce8f1e123c 100644 --- a/packages/firestore/test/unit/model/values.test.ts +++ b/packages/firestore/test/unit/model/values.test.ts @@ -252,11 +252,13 @@ describe('Values', () => { [refValue(dbId('p2', 'd1'), key('c1/doc1'))], // ObjectId - [wrap(new BsonObjectId('foo')), wrap(new BsonObjectId('foo'))], - // TODO(Mila/BSON): uncomment after string sort bug is fixed - // [wrap(new BsonObjectId('Ḟoo'))], // with latin capital letter f with dot above - // [wrap(new BsonObjectId('foo\u0301'))], // with combining acute accent - [wrap(new BsonObjectId('xyz'))], + [wrap(new BsonObjectId('507f191e810c19729de860ea'))], + [wrap(new BsonObjectId('507f191e810c19729de860eb'))], + // latin small letter e + combining acute accent + latin small letter b + [wrap(new BsonObjectId('e\u0301b'))], + [wrap(new BsonObjectId('æ'))], + // latin small letter e with acute accent + latin small letter a + [wrap(new BsonObjectId('\u00e9a'))], // geo points [wrap(new GeoPoint(-90, -180))], From b6404540c2fe630b02be3b1be57c20dd5d0376e4 Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Fri, 27 Jun 2025 18:31:51 -0400 Subject: [PATCH 8/9] Add decimal128 to new types (#344) * add decimal128 * add new tests * format * resolve comments * update comments * refactor compareNumbers * copy paste Quadruple code * resolve comments 1 * make Quadruple value in decimal128 private --- common/api-review/firestore-lite.api.md | 8 + common/api-review/firestore.api.md | 8 + packages/firestore/lite/index.ts | 2 + packages/firestore/src/api.ts | 2 + .../src/index/firestore_index_value_writer.ts | 98 +- .../src/lite-api/decimal128_value.ts | 47 + .../src/lite-api/user_data_reader.ts | 18 +- .../src/lite-api/user_data_writer.ts | 18 +- .../src/model/transform_operation.ts | 4 +- packages/firestore/src/model/values.ts | 272 +++-- packages/firestore/src/util/quadruple.ts | 275 +++++ .../firestore/src/util/quadruple_builder.ts | 1067 +++++++++++++++++ .../test/integration/api/database.test.ts | 336 +++++- .../test/integration/api/type.test.ts | 74 ++ .../firestore/test/lite/integration.test.ts | 4 + .../firestore_index_value_writer.test.ts | 106 +- .../test/unit/local/index_manager.test.ts | 214 +++- .../unit/local/local_store_indexeddb.test.ts | 274 ++++- .../test/unit/model/document.test.ts | 7 +- .../test/unit/model/object_value.test.ts | 24 +- .../firestore/test/unit/model/values.test.ts | 83 +- .../test/unit/remote/serializer.helper.ts | 4 +- packages/firestore/tsconfig.json | 3 +- 23 files changed, 2686 insertions(+), 262 deletions(-) create mode 100644 packages/firestore/src/lite-api/decimal128_value.ts create mode 100644 packages/firestore/src/util/quadruple.ts create mode 100644 packages/firestore/src/util/quadruple_builder.ts diff --git a/common/api-review/firestore-lite.api.md b/common/api-review/firestore-lite.api.md index f0203c034b3..7d2098e6f11 100644 --- a/common/api-review/firestore-lite.api.md +++ b/common/api-review/firestore-lite.api.md @@ -136,6 +136,14 @@ export function connectFirestoreEmulator(firestore: Firestore, host: string, por // @public export function count(): AggregateField; +// @public +export class Decimal128Value { + constructor(value: string); + isEqual(other: Decimal128Value): boolean; + // (undocumented) + readonly stringValue: string; + } + // @public export function deleteDoc(reference: DocumentReference): Promise; diff --git a/common/api-review/firestore.api.md b/common/api-review/firestore.api.md index 90137f78b00..2c7b3879c73 100644 --- a/common/api-review/firestore.api.md +++ b/common/api-review/firestore.api.md @@ -142,6 +142,14 @@ export function connectFirestoreEmulator(firestore: Firestore, host: string, por // @public export function count(): AggregateField; +// @public +export class Decimal128Value { + constructor(value: string); + isEqual(other: Decimal128Value): boolean; + // (undocumented) + readonly stringValue: string; + } + // @public export function deleteAllPersistentCacheIndexes(indexManager: PersistentCacheIndexManager): void; diff --git a/packages/firestore/lite/index.ts b/packages/firestore/lite/index.ts index 7eee71a9893..2a0799d4d8e 100644 --- a/packages/firestore/lite/index.ts +++ b/packages/firestore/lite/index.ts @@ -143,6 +143,8 @@ export { VectorValue } from '../src/lite-api/vector_value'; export { Int32Value } from '../src/lite-api/int32_value'; +export { Decimal128Value } from '../src/lite-api/decimal128_value'; + export { RegexValue } from '../src/lite-api/regex_value'; export { BsonBinaryData } from '../src/lite-api/bson_binary_data'; diff --git a/packages/firestore/src/api.ts b/packages/firestore/src/api.ts index b9d14923bcd..442ef30007f 100644 --- a/packages/firestore/src/api.ts +++ b/packages/firestore/src/api.ts @@ -180,6 +180,8 @@ export { VectorValue } from './lite-api/vector_value'; export { Int32Value } from './lite-api/int32_value'; +export { Decimal128Value } from './lite-api/decimal128_value'; + export { RegexValue } from './lite-api/regex_value'; export { BsonBinaryData } from './lite-api/bson_binary_data'; diff --git a/packages/firestore/src/index/firestore_index_value_writer.ts b/packages/firestore/src/index/firestore_index_value_writer.ts index 5a5f04c9988..f306b21c13a 100644 --- a/packages/firestore/src/index/firestore_index_value_writer.ts +++ b/packages/firestore/src/index/firestore_index_value_writer.ts @@ -31,9 +31,16 @@ import { MapRepresentation, RESERVED_REGEX_PATTERN_KEY, RESERVED_REGEX_OPTIONS_KEY, - RESERVED_INT32_KEY + RESERVED_INT32_KEY, + RESERVED_DECIMAL128_KEY } from '../model/values'; -import { ArrayValue, MapValue, Value } from '../protos/firestore_proto_api'; +import { + ArrayValue, + MapValue, + Value, + Timestamp, + LatLng +} from '../protos/firestore_proto_api'; import { fail } from '../util/assert'; import { isNegativeZero } from '../util/types'; @@ -106,26 +113,10 @@ export class FirestoreIndexValueWriter { this.writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); encoder.writeNumber(normalizeNumber(indexValue.integerValue)); } else if ('doubleValue' in indexValue) { - const n = normalizeNumber(indexValue.doubleValue); - if (isNaN(n)) { - this.writeValueTypeLabel(encoder, INDEX_TYPE_NAN); - } else { - this.writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); - if (isNegativeZero(n)) { - // -0.0, 0 and 0.0 are all considered the same - encoder.writeNumber(0.0); - } else { - encoder.writeNumber(n); - } - } + const doubleValue = normalizeNumber(indexValue.doubleValue); + this.writeIndexDouble(doubleValue, encoder); } else if ('timestampValue' in indexValue) { - let timestamp = indexValue.timestampValue!; - this.writeValueTypeLabel(encoder, INDEX_TYPE_TIMESTAMP); - if (typeof timestamp === 'string') { - timestamp = normalizeTimestamp(timestamp); - } - encoder.writeString(`${timestamp.seconds || ''}`); - encoder.writeNumber(timestamp.nanos || 0); + this.writeIndexTimestamp(indexValue.timestampValue!, encoder); } else if ('stringValue' in indexValue) { this.writeIndexString(indexValue.stringValue!, encoder); this.writeTruncationMarker(encoder); @@ -136,10 +127,7 @@ export class FirestoreIndexValueWriter { } else if ('referenceValue' in indexValue) { this.writeIndexEntityRef(indexValue.referenceValue!, encoder); } else if ('geoPointValue' in indexValue) { - const geoPoint = indexValue.geoPointValue!; - this.writeValueTypeLabel(encoder, INDEX_TYPE_GEOPOINT); - encoder.writeNumber(geoPoint.latitude || 0); - encoder.writeNumber(geoPoint.longitude || 0); + this.writeIndexGeoPoint(indexValue.geoPointValue!, encoder); } else if ('mapValue' in indexValue) { const type = detectMapRepresentation(indexValue); if (type === MapRepresentation.INTERNAL_MAX) { @@ -159,12 +147,14 @@ export class FirestoreIndexValueWriter { } else if (type === MapRepresentation.BSON_OBJECT_ID) { this.writeIndexBsonObjectId(indexValue.mapValue!, encoder); } else if (type === MapRepresentation.INT32) { - this.writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); - encoder.writeNumber( - normalizeNumber( - indexValue.mapValue!.fields![RESERVED_INT32_KEY]!.integerValue! - ) + this.writeIndexInt32(indexValue.mapValue!, encoder); + } else if (type === MapRepresentation.DECIMAL128) { + // Double and Decimal128 sort the same + // Decimal128 is written as double with precision lost + const parsedValue = parseFloat( + indexValue.mapValue!.fields![RESERVED_DECIMAL128_KEY]!.stringValue! ); + this.writeIndexDouble(parsedValue, encoder); } else { this.writeIndexMap(indexValue.mapValue!, encoder); this.writeTruncationMarker(encoder); @@ -192,6 +182,54 @@ export class FirestoreIndexValueWriter { encoder.writeString(stringIndexValue); } + private writeIndexDouble( + double: number, + encoder: DirectionalIndexByteEncoder + ): void { + if (isNaN(double)) { + this.writeValueTypeLabel(encoder, INDEX_TYPE_NAN); + } else { + this.writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + if (isNegativeZero(double)) { + // -0.0, 0 and 0.0 are all considered the same + encoder.writeNumber(0.0); + } else { + encoder.writeNumber(double); + } + } + } + + private writeIndexInt32( + mapValue: MapValue, + encoder: DirectionalIndexByteEncoder + ): void { + this.writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + encoder.writeNumber( + normalizeNumber(mapValue.fields![RESERVED_INT32_KEY]!.integerValue!) + ); + } + + private writeIndexTimestamp( + timestamp: Timestamp, + encoder: DirectionalIndexByteEncoder + ): void { + this.writeValueTypeLabel(encoder, INDEX_TYPE_TIMESTAMP); + if (typeof timestamp === 'string') { + timestamp = normalizeTimestamp(timestamp); + } + encoder.writeString(`${timestamp.seconds || ''}`); + encoder.writeNumber(timestamp.nanos || 0); + } + + private writeIndexGeoPoint( + geoPoint: LatLng, + encoder: DirectionalIndexByteEncoder + ): void { + this.writeValueTypeLabel(encoder, INDEX_TYPE_GEOPOINT); + encoder.writeNumber(geoPoint.latitude || 0); + encoder.writeNumber(geoPoint.longitude || 0); + } + private writeIndexMap( mapIndexValue: MapValue, encoder: DirectionalIndexByteEncoder diff --git a/packages/firestore/src/lite-api/decimal128_value.ts b/packages/firestore/src/lite-api/decimal128_value.ts new file mode 100644 index 00000000000..ef420238273 --- /dev/null +++ b/packages/firestore/src/lite-api/decimal128_value.ts @@ -0,0 +1,47 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { Quadruple } from '../util/quadruple'; + +/** + * Represents a 128-bit decimal type in Firestore documents. + * + * @class Decimal128Value + */ +export class Decimal128Value { + readonly stringValue: string; + private value: Quadruple; + + constructor(value: string) { + this.stringValue = value; + this.value = Quadruple.fromString(value); + } + + /** + * Returns true if this `Decimal128Value` is equal to the provided one. + * + * @param other - The `Decimal128Value` to compare against. + * @return 'true' if this `Decimal128Value` is equal to the provided one. + */ + isEqual(other: Decimal128Value): boolean { + // Firestore considers +0 and -0 to be equal. + if (this.value.isZero() && other.value.isZero()) { + return true; + } + return this.value.compareTo(other.value) === 0; + } +} diff --git a/packages/firestore/src/lite-api/user_data_reader.ts b/packages/firestore/src/lite-api/user_data_reader.ts index 63b0b99db90..b0fd7772bc1 100644 --- a/packages/firestore/src/lite-api/user_data_reader.ts +++ b/packages/firestore/src/lite-api/user_data_reader.ts @@ -55,7 +55,8 @@ import { RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, RESERVED_BSON_BINARY_KEY, RESERVED_MIN_KEY, - RESERVED_MAX_KEY + RESERVED_MAX_KEY, + RESERVED_DECIMAL128_KEY } from '../model/values'; import { newSerializer } from '../platform/serializer'; import { @@ -80,6 +81,7 @@ import { BsonObjectId } from './bson_object_Id'; import { BsonTimestamp } from './bson_timestamp'; import { Bytes } from './bytes'; import { Firestore } from './database'; +import { Decimal128Value } from './decimal128_value'; import { FieldPath } from './field_path'; import { FieldValue } from './field_value'; import { GeoPoint } from './geo_point'; @@ -936,6 +938,8 @@ function parseScalarValue( return parseBsonObjectId(value); } else if (value instanceof Int32Value) { return parseInt32Value(value); + } else if (value instanceof Decimal128Value) { + return parseDecimal128Value(value); } else if (value instanceof BsonTimestamp) { return parseBsonTimestamp(value); } else if (value instanceof BsonBinaryData) { @@ -1045,6 +1049,17 @@ export function parseInt32Value(value: Int32Value): ProtoValue { return { mapValue }; } +export function parseDecimal128Value(value: Decimal128Value): ProtoValue { + const mapValue: ProtoMapValue = { + fields: { + [RESERVED_DECIMAL128_KEY]: { + stringValue: value.stringValue + } + } + }; + return { mapValue }; +} + export function parseBsonTimestamp(value: BsonTimestamp): ProtoValue { const mapValue: ProtoMapValue = { fields: { @@ -1105,6 +1120,7 @@ function looksLikeJsonObject(input: unknown): boolean { !(input instanceof MinKey) && !(input instanceof MaxKey) && !(input instanceof Int32Value) && + !(input instanceof Decimal128Value) && !(input instanceof RegexValue) && !(input instanceof BsonObjectId) && !(input instanceof BsonTimestamp) && diff --git a/packages/firestore/src/lite-api/user_data_writer.ts b/packages/firestore/src/lite-api/user_data_writer.ts index ba9ade1c207..92ba958608b 100644 --- a/packages/firestore/src/lite-api/user_data_writer.ts +++ b/packages/firestore/src/lite-api/user_data_writer.ts @@ -41,7 +41,10 @@ import { RESERVED_BSON_TIMESTAMP_KEY, RESERVED_BSON_TIMESTAMP_SECONDS_KEY, typeOrder, - VECTOR_MAP_VECTORS_KEY + VECTOR_MAP_VECTORS_KEY, + RESERVED_DECIMAL128_KEY, + isInt32Value, + isDecimal128Value } from '../model/values'; import { ApiClientObjectMap, @@ -61,6 +64,7 @@ import { forEach } from '../util/obj'; import { BsonBinaryData } from './bson_binary_data'; import { BsonObjectId } from './bson_object_Id'; import { BsonTimestamp } from './bson_timestamp'; +import { Decimal128Value } from './decimal128_value'; import { GeoPoint } from './geo_point'; import { Int32Value } from './int32_value'; import { MaxKey } from './max_key'; @@ -89,7 +93,11 @@ export abstract class AbstractUserDataWriter { return value.booleanValue!; case TypeOrder.NumberValue: if ('mapValue' in value) { - return this.convertToInt32Value(value.mapValue!); + if (isInt32Value(value)) { + return this.convertToInt32Value(value.mapValue!); + } else if (isDecimal128Value(value)) { + return this.convertToDecimal128Value(value.mapValue!); + } } return normalizeNumber(value.integerValue || value.doubleValue); case TypeOrder.TimestampValue: @@ -215,6 +223,12 @@ export abstract class AbstractUserDataWriter { return new Int32Value(value); } + private convertToDecimal128Value(mapValue: ProtoMapValue): Decimal128Value { + const value = + mapValue!.fields?.[RESERVED_DECIMAL128_KEY]?.stringValue ?? ''; + return new Decimal128Value(value); + } + private convertGeoPoint(value: ProtoLatLng): GeoPoint { return new GeoPoint( normalizeNumber(value.latitude), diff --git a/packages/firestore/src/model/transform_operation.ts b/packages/firestore/src/model/transform_operation.ts index 07f6df94366..01ce05d3f82 100644 --- a/packages/firestore/src/model/transform_operation.ts +++ b/packages/firestore/src/model/transform_operation.ts @@ -23,7 +23,7 @@ import { arrayEquals } from '../util/misc'; import { normalizeNumber } from './normalize'; import { serverTimestamp } from './server_timestamps'; -import { isArray, isInteger, isNumber, valueEquals } from './values'; +import { isArray, isIntegerValue, isNumber, valueEquals } from './values'; /** Used to represent a field transform on a mutation. */ export class TransformOperation { @@ -205,7 +205,7 @@ export function applyNumericIncrementTransformOperationToLocalView( previousValue )!; const sum = asNumber(baseValue) + asNumber(transform.operand); - if (isInteger(baseValue) && isInteger(transform.operand)) { + if (isIntegerValue(baseValue) && isIntegerValue(transform.operand)) { return toInteger(sum); } else { return toDouble(transform.serializer, sum); diff --git a/packages/firestore/src/model/values.ts b/packages/firestore/src/model/values.ts index fca9c34b9ea..01902ff8b5d 100644 --- a/packages/firestore/src/model/values.ts +++ b/packages/firestore/src/model/values.ts @@ -30,6 +30,7 @@ import { primitiveComparator } from '../util/misc'; import { forEach, objectSize } from '../util/obj'; +import { Quadruple } from '../util/quadruple'; import { isNegativeZero } from '../util/types'; import { DocumentKey } from './document_key'; @@ -59,6 +60,8 @@ export const RESERVED_BSON_OBJECT_ID_KEY = '__oid__'; export const RESERVED_INT32_KEY = '__int__'; +export const RESERVED_DECIMAL128_KEY = '__decimal128__'; + export const RESERVED_BSON_TIMESTAMP_KEY = '__request_timestamp__'; export const RESERVED_BSON_TIMESTAMP_SECONDS_KEY = 'seconds'; export const RESERVED_BSON_TIMESTAMP_INCREMENT_KEY = 'increment'; @@ -168,6 +171,7 @@ export enum MapRepresentation { REGEX = 'regexValue', BSON_OBJECT_ID = 'bsonObjectIdValue', INT32 = 'int32Value', + DECIMAL128 = 'decimal128Value', BSON_TIMESTAMP = 'bsonTimestampValue', BSON_BINARY = 'bsonBinaryValue', MIN_KEY = 'minKeyValue', @@ -178,6 +182,27 @@ export enum MapRepresentation { REGULAR_MAP = 'regularMapValue' } +const TYPE_BASED_REPRESENTATIONS: Record = { + [RESERVED_VECTOR_KEY]: MapRepresentation.VECTOR, + [RESERVED_MAX_KEY]: MapRepresentation.INTERNAL_MAX, + [RESERVED_SERVER_TIMESTAMP_KEY]: MapRepresentation.SERVER_TIMESTAMP +}; + +const BSON_REPRESENTATIONS: Record = { + [RESERVED_REGEX_KEY]: MapRepresentation.REGEX, + [RESERVED_BSON_OBJECT_ID_KEY]: MapRepresentation.BSON_OBJECT_ID, + [RESERVED_INT32_KEY]: MapRepresentation.INT32, + [RESERVED_DECIMAL128_KEY]: MapRepresentation.DECIMAL128, + [RESERVED_BSON_TIMESTAMP_KEY]: MapRepresentation.BSON_TIMESTAMP, + [RESERVED_BSON_BINARY_KEY]: MapRepresentation.BSON_BINARY, + [RESERVED_MIN_KEY]: MapRepresentation.MIN_KEY, + [RESERVED_MAX_KEY]: MapRepresentation.MAX_KEY +}; + +const BSON_TYPE_REPRESENTATIONS = new Set( + Object.values(BSON_REPRESENTATIONS) +); + /** Extracts the backend's type order for the provided value. */ export function typeOrder(value: Value): TypeOrder { if ('nullValue' in value) { @@ -212,6 +237,7 @@ export function typeOrder(value: Value): TypeOrder { case MapRepresentation.BSON_OBJECT_ID: return TypeOrder.BsonObjectIdValue; case MapRepresentation.INT32: + case MapRepresentation.DECIMAL128: return TypeOrder.NumberValue; case MapRepresentation.BSON_TIMESTAMP: return TypeOrder.BsonTimestampValue; @@ -321,10 +347,11 @@ function blobEquals(left: Value, right: Value): boolean { } export function numberEquals(left: Value, right: Value): boolean { - if ( + if (isDecimal128Value(left) && isDecimal128Value(right)) { + return compareQuadruples(left, right) === 0; + } else if ( ('integerValue' in left && 'integerValue' in right) || - (detectMapRepresentation(left) === MapRepresentation.INT32 && - detectMapRepresentation(right) === MapRepresentation.INT32) + (isInt32Value(left) && isInt32Value(right)) ) { return extractNumber(left) === extractNumber(right); } else if ('doubleValue' in left && 'doubleValue' in right) { @@ -431,7 +458,7 @@ export function valueCompare(left: Value, right: Value): number { export function extractNumber(value: Value): number { let numberValue; - if (detectMapRepresentation(value) === MapRepresentation.INT32) { + if (isInt32Value(value)) { numberValue = value.mapValue!.fields![RESERVED_INT32_KEY].integerValue!; } else { numberValue = value.integerValue || value.doubleValue; @@ -439,23 +466,53 @@ export function extractNumber(value: Value): number { return normalizeNumber(numberValue); } +function getDecimal128StringValue(value: Value): string { + return value.mapValue!.fields![RESERVED_DECIMAL128_KEY].stringValue!; +} + function compareNumbers(left: Value, right: Value): number { + // If either number is Decimal128, we cast both to wider (128-bit) representation, and compare those. + if (isDecimal128Value(left) || isDecimal128Value(right)) { + return compareQuadruples(left, right); + } + const leftNumber = extractNumber(left); const rightNumber = extractNumber(right); - if (leftNumber < rightNumber) { - return -1; - } else if (leftNumber > rightNumber) { + // one or both numbers are NaN. + if (isNaN(leftNumber)) { + return isNaN(rightNumber) ? 0 : -1; + } else if (isNaN(rightNumber)) { return 1; - } else if (leftNumber === rightNumber) { + } + + return primitiveComparator(leftNumber, rightNumber); +} + +function compareQuadruples(left: Value, right: Value): number { + const leftQuadruple = convertNumberToQuadruple(left); + const rightQuadruple = convertNumberToQuadruple(right); + + // Firestore considers +0 and -0 to be equal. + if (leftQuadruple.isZero() && rightQuadruple.isZero()) { return 0; + } + + // NaN sorts equal to itself and before any other number. + if (leftQuadruple.isNaN()) { + return rightQuadruple.isNaN() ? 0 : -1; + } else if (rightQuadruple.isNaN()) { + return 1; + } + + return leftQuadruple.compareTo(rightQuadruple); +} + +function convertNumberToQuadruple(value: Value): Quadruple { + if (isDecimal128Value(value)) { + return Quadruple.fromString(getDecimal128StringValue(value)); } else { - // one or both are NaN. - if (isNaN(leftNumber)) { - return isNaN(rightNumber) ? 0 : -1; - } else { - return 1; - } + return Quadruple.fromNumber(extractNumber(value)); } } @@ -758,7 +815,9 @@ export function estimateByteSize(value: Value): number { case TypeOrder.BooleanValue: return 4; case TypeOrder.NumberValue: - // TODO(Mila/BSON): return 16 if the value is 128 decimal value + if (isDecimal128Value(value)) { + return 16; + } return 8; case TypeOrder.TimestampValue: // Timestamps are made up of two distinct numbers (seconds + nanoseconds) @@ -819,22 +878,48 @@ export function refValue(databaseId: DatabaseId, key: DocumentKey): Value { } /** Returns true if `value` is an IntegerValue . */ -export function isInteger( +export function isIntegerValue( value?: Value | null ): value is { integerValue: string | number } { return !!value && 'integerValue' in value; } /** Returns true if `value` is a DoubleValue. */ -export function isDouble( +export function isDoubleValue( value?: Value | null ): value is { doubleValue: string | number } { return !!value && 'doubleValue' in value; } +export function isDecimal128Value(value: Value): boolean { + if (!value.mapValue?.fields) { + return false; + } + + const fields = value.mapValue.fields; + return ( + objectSize(fields) === 1 && + fields[RESERVED_DECIMAL128_KEY] && + !!fields[RESERVED_DECIMAL128_KEY].stringValue + ); +} + +export function isInt32Value(value: Value): boolean { + if (!value.mapValue?.fields) { + return false; + } + + const fields = value.mapValue.fields; + return ( + objectSize(fields) === 1 && + fields[RESERVED_INT32_KEY] && + !!fields[RESERVED_INT32_KEY].integerValue + ); +} + /** Returns true if `value` is either an IntegerValue or a DoubleValue. */ export function isNumber(value?: Value | null): boolean { - return isInteger(value) || isDouble(value); + return isIntegerValue(value) || isDoubleValue(value); } /** Returns true if `value` is an ArrayValue. */ @@ -859,10 +944,16 @@ export function isNullValue( } /** Returns true if `value` is NaN. */ -export function isNanValue( - value?: Value | null -): value is { doubleValue: 'NaN' | number } { - return !!value && 'doubleValue' in value && isNaN(Number(value.doubleValue)); +export function isNanValue(value: Value): boolean { + if (isDoubleValue(value) && isNaN(Number(value.doubleValue))) { + return true; + } + + if (isDecimal128Value(value) && getDecimal128StringValue(value) === 'NaN') { + return true; + } + + return false; } /** Returns true if `value` is a MapValue. */ @@ -872,65 +963,32 @@ export function isMapValue( return !!value && 'mapValue' in value; } +export function isBsonType(value: Value): boolean { + return BSON_TYPE_REPRESENTATIONS.has(detectMapRepresentation(value)); +} + export function detectMapRepresentation(value: Value): MapRepresentation { - if (!value || !value.mapValue || !value.mapValue.fields) { - return MapRepresentation.REGULAR_MAP; // Not a special map type + if (!value.mapValue?.fields) { + return MapRepresentation.REGULAR_MAP; } const fields = value.mapValue.fields; // Check for type-based mappings - const type = fields[TYPE_KEY]?.stringValue; - if (type) { - const typeMap: Record = { - [RESERVED_VECTOR_KEY]: MapRepresentation.VECTOR, - [RESERVED_MAX_KEY]: MapRepresentation.INTERNAL_MAX, - [RESERVED_SERVER_TIMESTAMP_KEY]: MapRepresentation.SERVER_TIMESTAMP - }; - if (typeMap[type]) { - return typeMap[type]; - } - } - - if (objectSize(fields) !== 1) { - // All BSON types have 1 key in the map. To improve performance, we can - // return early if the number of keys in the map is not 1. - return MapRepresentation.REGULAR_MAP; + const typeString = fields[TYPE_KEY]?.stringValue; + if (typeString && TYPE_BASED_REPRESENTATIONS[typeString]) { + return TYPE_BASED_REPRESENTATIONS[typeString]; } - // Check for BSON-related mappings - const bsonMap: Record = { - [RESERVED_REGEX_KEY]: MapRepresentation.REGEX, - [RESERVED_BSON_OBJECT_ID_KEY]: MapRepresentation.BSON_OBJECT_ID, - [RESERVED_INT32_KEY]: MapRepresentation.INT32, - [RESERVED_BSON_TIMESTAMP_KEY]: MapRepresentation.BSON_TIMESTAMP, - [RESERVED_BSON_BINARY_KEY]: MapRepresentation.BSON_BINARY, - [RESERVED_MIN_KEY]: MapRepresentation.MIN_KEY, - [RESERVED_MAX_KEY]: MapRepresentation.MAX_KEY - }; - - for (const key in bsonMap) { - if (fields[key]) { - return bsonMap[key]; - } + // For BSON-related mappings, they typically have a single, unique key. + if (objectSize(fields) === 1) { + const keys = Object.keys(fields); + return BSON_REPRESENTATIONS[keys[0]]; } return MapRepresentation.REGULAR_MAP; } -export function isBsonType(value: Value): boolean { - const bsonTypes = new Set([ - MapRepresentation.REGEX, - MapRepresentation.BSON_OBJECT_ID, - MapRepresentation.INT32, - MapRepresentation.BSON_TIMESTAMP, - MapRepresentation.BSON_BINARY, - MapRepresentation.MIN_KEY, - MapRepresentation.MAX_KEY - ]); - return bsonTypes.has(detectMapRepresentation(value)); -} - /** Creates a deep copy of `source`. */ export function deepClone(source: Value): Value { if (source.geoPointValue) { @@ -980,25 +1038,28 @@ export function valuesGetLowerBound(value: Value): Value { return { arrayValue: {} }; } else if ('mapValue' in value) { const type = detectMapRepresentation(value); - if (type === MapRepresentation.VECTOR) { - return MIN_VECTOR_VALUE; - } else if (type === MapRepresentation.BSON_OBJECT_ID) { - return MIN_BSON_OBJECT_ID_VALUE; - } else if (type === MapRepresentation.BSON_TIMESTAMP) { - return MIN_BSON_TIMESTAMP_VALUE; - } else if (type === MapRepresentation.BSON_BINARY) { - return MIN_BSON_BINARY_VALUE; - } else if (type === MapRepresentation.REGEX) { - return MIN_REGEX_VALUE; - } else if (type === MapRepresentation.INT32) { - // int32Value is treated the same as integerValue and doubleValue - return { doubleValue: NaN }; - } else if (type === MapRepresentation.MIN_KEY) { - return MIN_KEY_VALUE; - } else if (type === MapRepresentation.MAX_KEY) { - return MAX_KEY_VALUE; + switch (type) { + case MapRepresentation.VECTOR: + return MIN_VECTOR_VALUE; + case MapRepresentation.BSON_OBJECT_ID: + return MIN_BSON_OBJECT_ID_VALUE; + case MapRepresentation.BSON_TIMESTAMP: + return MIN_BSON_TIMESTAMP_VALUE; + case MapRepresentation.BSON_BINARY: + return MIN_BSON_BINARY_VALUE; + case MapRepresentation.REGEX: + return MIN_REGEX_VALUE; + case MapRepresentation.INT32: + case MapRepresentation.DECIMAL128: + // Int32Value and Decimal128Value are treated the same as integerValue and doubleValue + return { doubleValue: NaN }; + case MapRepresentation.MIN_KEY: + return MIN_KEY_VALUE; + case MapRepresentation.MAX_KEY: + return MAX_KEY_VALUE; + default: + return { mapValue: {} }; } - return { mapValue: {} }; } else { return fail(0x8c66, 'Invalid value type', { value }); } @@ -1026,25 +1087,28 @@ export function valuesGetUpperBound(value: Value): Value { return MIN_VECTOR_VALUE; } else if ('mapValue' in value) { const type = detectMapRepresentation(value); - if (type === MapRepresentation.VECTOR) { - return { mapValue: {} }; - } else if (type === MapRepresentation.BSON_OBJECT_ID) { - return { geoPointValue: { latitude: -90, longitude: -180 } }; - } else if (type === MapRepresentation.BSON_TIMESTAMP) { - return { stringValue: '' }; - } else if (type === MapRepresentation.BSON_BINARY) { - return refValue(DatabaseId.empty(), DocumentKey.empty()); - } else if (type === MapRepresentation.REGEX) { - return { arrayValue: {} }; - } else if (type === MapRepresentation.INT32) { - // int32Value is treated the same as integerValue and doubleValue - return { timestampValue: { seconds: Number.MIN_SAFE_INTEGER } }; - } else if (type === MapRepresentation.MIN_KEY) { - return { booleanValue: false }; - } else if (type === MapRepresentation.MAX_KEY) { - return INTERNAL_MAX_VALUE; + switch (type) { + case MapRepresentation.VECTOR: + return { mapValue: {} }; + case MapRepresentation.BSON_OBJECT_ID: + return { geoPointValue: { latitude: -90, longitude: -180 } }; + case MapRepresentation.BSON_TIMESTAMP: + return { stringValue: '' }; + case MapRepresentation.BSON_BINARY: + return refValue(DatabaseId.empty(), DocumentKey.empty()); + case MapRepresentation.REGEX: + return { arrayValue: {} }; + case MapRepresentation.INT32: + case MapRepresentation.DECIMAL128: + // Int32Value and Decimal128Value are treated the same as integerValue and doubleValue + return { timestampValue: { seconds: Number.MIN_SAFE_INTEGER } }; + case MapRepresentation.MIN_KEY: + return { booleanValue: false }; + case MapRepresentation.MAX_KEY: + return INTERNAL_MAX_VALUE; + default: + return MAX_KEY_VALUE; } - return MAX_KEY_VALUE; } else { return fail(0xf207, 'Invalid value type', { value }); } diff --git a/packages/firestore/src/util/quadruple.ts b/packages/firestore/src/util/quadruple.ts new file mode 100644 index 00000000000..5c3501106a1 --- /dev/null +++ b/packages/firestore/src/util/quadruple.ts @@ -0,0 +1,275 @@ +/** + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { QuadrupleBuilder } from './quadruple_builder'; + +/** + * @private + * @internal + */ +export class Quadruple { + constructor( + negative: boolean, + biasedExponent: number, + mantHi: bigint, + mantLo: bigint + ) { + this.negative = negative; + this.biasedExponent = biasedExponent; + this.mantHi = mantHi; + this.mantLo = mantLo; + } + // The fields containing the value of the instance + negative: boolean; + biasedExponent: number; + mantHi: bigint; + mantLo: bigint; + static #exponentOfInfinity = Number(QuadrupleBuilder.EXPONENT_OF_INFINITY); + static positiveZero: Quadruple = new Quadruple(false, 0, 0n, 0n); + static negativeZero: Quadruple = new Quadruple(true, 0, 0n, 0n); + static NaN: Quadruple = new Quadruple( + false, + Quadruple.#exponentOfInfinity, + 1n << 63n, + 0n + ); + static negativeInfinity: Quadruple = new Quadruple( + true, + Quadruple.#exponentOfInfinity, + 0n, + 0n + ); + static positiveInfinity: Quadruple = new Quadruple( + false, + Quadruple.#exponentOfInfinity, + 0n, + 0n + ); + static #minLong: Quadruple = new Quadruple(true, Quadruple.#bias(63), 0n, 0n); + static #positiveOne: Quadruple = new Quadruple( + false, + Quadruple.#bias(0), + 0n, + 0n + ); + static #negativeOne: Quadruple = new Quadruple( + true, + Quadruple.#bias(0), + 0n, + 0n + ); + /** Return the (unbiased) exponent of this {@link Quadruple}. */ + exponent(): number { + return this.biasedExponent - QuadrupleBuilder.EXPONENT_BIAS; + } + /** Return true if this {@link Quadruple} is -0 or +0 */ + isZero(): boolean { + return ( + this.biasedExponent === 0 && this.mantHi === 0n && this.mantLo === 0n + ); + } + /** Return true if this {@link Quadruple} is -infinity or +infinity */ + isInfinite(): boolean { + return ( + this.biasedExponent === Quadruple.#exponentOfInfinity && + this.mantHi === 0n && + this.mantLo === 0n + ); + } + /** Return true if this {@link Quadruple} is a NaN. */ + isNaN(): boolean { + return ( + this.biasedExponent === Quadruple.#exponentOfInfinity && + !(this.mantHi === 0n && this.mantLo === 0n) + ); + } + /** Compare two quadruples, with -0 < 0, and all NaNs equal and larger than all numbers. */ + compareTo(other: Quadruple): number { + if (this.isNaN()) { + return other.isNaN() ? 0 : 1; + } + if (other.isNaN()) { + return -1; + } + let lessThan; + let greaterThan; + if (this.negative) { + if (!other.negative) { + return -1; + } + lessThan = 1; + greaterThan = -1; + } else { + if (other.negative) { + return 1; + } + lessThan = -1; + greaterThan = 1; + } + if (this.biasedExponent < other.biasedExponent) { + return lessThan; + } + if (this.biasedExponent > other.biasedExponent) { + return greaterThan; + } + if (this.mantHi < other.mantHi) { + return lessThan; + } + if (this.mantHi > other.mantHi) { + return greaterThan; + } + if (this.mantLo < other.mantLo) { + return lessThan; + } + if (this.mantLo > other.mantLo) { + return greaterThan; + } + return 0; + } + debug(): string { + return ( + (this.negative ? '+' : '-') + + Quadruple.#hex(this.mantHi) + + Quadruple.#hex(this.mantLo) + + '*2^' + + this.exponent() + ); + } + static #hex(n: bigint): string { + return n.toString(16).padStart(16, '0'); + } + static fromNumber(value: number): Quadruple { + if (isNaN(value)) { + return Quadruple.NaN; + } + if (!isFinite(value)) { + return value < 0 + ? Quadruple.negativeInfinity + : Quadruple.positiveInfinity; + } + if (value === 0) { + // -0 === 0 and Math.sign(-0) = -0, so can't be used to distinguish 0 and -0. + // But 1/-0=-infinity, and 1/0=infinity, and Math.sign does "work" on infinity. + return Math.sign(1 / value) > 0 + ? Quadruple.positiveZero + : Quadruple.negativeZero; + } + const array = new DataView(new ArrayBuffer(8)); + array.setFloat64(0, value); + const bits = array.getBigUint64(0); + let mantHi = BigInt.asUintN(64, bits << 12n); + let exponent = Number(bits >> 52n) & 0x7ff; + if (exponent === 0) { + // subnormal - mantHi cannot be zero as that means value===+/-0 + const leadingZeros = QuadrupleBuilder.clz64(mantHi); + mantHi = leadingZeros < 63 ? mantHi << BigInt(leadingZeros + 1) : 0n; + exponent = -leadingZeros; + } + return new Quadruple( + value < 0, + Quadruple.#bias(exponent - 1023), + mantHi, + 0n + ); + } + /** + * Converts a decimal number to a {@link Quadruple}. The supported format (no whitespace allowed) + * is: + * + *
    + *
  • NaN for Quadruple.NaN + *
  • Infinity or +Infinity for Quadruple.POSITIVE_INFINITY + *
  • -Infinity for Quadruple.NEGATIVE_INFINITY + *
  • regular expression: [+-]?[0-9]*(.[0-9]*)?([eE][+-]?[0-9]+)? - the exponent cannot be more + * than 9 digits, and the whole string cannot be empty + *
+ */ + static fromString(s: string): Quadruple { + if (s === 'NaN') { + return Quadruple.NaN; + } + if (s === '-Infinity') { + return Quadruple.negativeInfinity; + } + if (s === 'Infinity' || s === '+Infinity') { + return Quadruple.positiveInfinity; + } + const digits: number[] = new Array(s.length).fill(0); + let i = 0; + let j = 0; + let exponent = 0; + let negative = false; + if (s[i] === '-') { + negative = true; + i++; + } else if (s[i] === '+') { + i++; + } + while (Quadruple.#isDigit(s, i)) { + digits[j++] = Quadruple.#digit(s, i++); + } + if (s[i] === '.') { + const decimal = ++i; + while (Quadruple.#isDigit(s, i)) { + digits[j++] = Quadruple.#digit(s, i++); + } + exponent = decimal - i; + } + if (s[i] === 'e' || s[i] === 'E') { + let exponentValue = 0; + i++; + let exponentSign = 1; + if (s[i] === '-') { + exponentSign = -1; + i++; + } else if (s[i] === '+') { + i++; + } + const firstExponent = i; + while (Quadruple.#isDigit(s, i)) { + exponentValue = exponentValue * 10 + Quadruple.#digit(s, i++); + if (i - firstExponent > 9) { + throw new Error('Exponent too large ' + s); + } + } + if (i === firstExponent) { + throw new Error('Invalid number ' + s); + } + exponent += exponentValue * exponentSign; + } + if (j === 0 || i !== s.length) { + throw new Error('Invalid number ' + s); + } + const parsed = QuadrupleBuilder.parseDecimal(digits.slice(0, j), exponent); + return new Quadruple( + negative, + parsed.exponent, + parsed.mantHi, + parsed.mantLo + ); + } + static #isDigit(s: string, i: number): boolean { + const cp = s.codePointAt(i); + return cp !== undefined && cp >= 48 && cp <= 57; + } + static #digit(s: string, i: number): number { + return s.codePointAt(i)! - 48; + } + static #bias(exponent: number): number { + return exponent + QuadrupleBuilder.EXPONENT_BIAS; + } +} diff --git a/packages/firestore/src/util/quadruple_builder.ts b/packages/firestore/src/util/quadruple_builder.ts new file mode 100644 index 00000000000..5d5d69bbf84 --- /dev/null +++ b/packages/firestore/src/util/quadruple_builder.ts @@ -0,0 +1,1067 @@ +/** + * Copyright 2021 M.Vokhmentsev + * + * @license + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* eslint-disable */ + +/** + * @private + * @internal + */ +export class QuadrupleBuilder { + static parseDecimal(digits: number[], exp10: number): QuadrupleBuilder { + let q = new QuadrupleBuilder(); + q.parse(digits, exp10); + return q; + } + // The fields containing the value of the instance + exponent: number = 0; + mantHi: bigint = 0n; + mantLo: bigint = 0n; + // 2^192 = 6.277e57, so the 58-th digit after point may affect the result + static MAX_MANTISSA_LENGTH = 59; + // Max value of the decimal exponent, corresponds to EXPONENT_OF_MAX_VALUE + static MAX_EXP10 = 646456993; + // Min value of the decimal exponent, corresponds to EXPONENT_OF_MIN_NORMAL + static MIN_EXP10 = -646457032; + // (2^63) / 10 =~ 9.223372e17 + static TWO_POW_63_DIV_10 = 922337203685477580.0; + // Just for convenience: 0x8000_0000_0000_0000L + static HIGH_BIT = 0x8000_0000_0000_0000n; + // Just for convenience: 0x8000_0000L, 2^31 + static POW_2_31 = 2147483648.0; + // Just for convenience: 0x0000_0000_FFFF_FFFFL + static LOWER_32_BITS = 0x0000_0000_ffff_ffffn; + // Just for convenience: 0xFFFF_FFFF_0000_0000L; + static HIGHER_32_BITS = 0xffff_ffff_0000_0000n; + // Approximate value of log2(10) + static LOG2_10 = Math.log(10) / Math.log(2); + // Approximate value of log2(e) + static LOG2_E = 1 / Math.log(2.0); + // The value of the exponent (biased) corresponding to {@code 1.0 == 2^0}; equals to 2_147_483_647 + // ({@code 0x7FFF_FFFF}). + static EXPONENT_BIAS = 0x7fff_ffff; + // The value of the exponent (biased), corresponding to {@code Infinity}, {@code _Infinty}, and + // {@code NaN} + static EXPONENT_OF_INFINITY = 0xffff_ffffn; + // An array of positive powers of two, each value consists of 4 longs: decimal exponent and 3 x 64 + // bits of mantissa, divided by ten Used to find an arbitrary power of 2 (by powerOfTwo(long exp)) + static POS_POWERS_OF_2: bigint[][] = [ + // 0: 2^0 = 1 = 0.1e1 + [ + 1n, + 0x1999_9999_9999_9999n, + 0x9999_9999_9999_9999n, + 0x9999_9999_9999_999an + ], // 1: 2^(2^0) = 2^1 = 2 = 0.2e1 + [ + 1n, + 0x3333_3333_3333_3333n, + 0x3333_3333_3333_3333n, + 0x3333_3333_3333_3334n + ], // *** + // 2: 2^(2^1) = 2^2 = 4 = 0.4e1 + [ + 1n, + 0x6666_6666_6666_6666n, + 0x6666_6666_6666_6666n, + 0x6666_6666_6666_6667n + ], // *** + // 3: 2^(2^2) = 2^4 = 16 = 0.16e2 + [ + 2n, + 0x28f5_c28f_5c28_f5c2n, + 0x8f5c_28f5_c28f_5c28n, + 0xf5c2_8f5c_28f5_c290n + ], // *** + // 4: 2^(2^3) = 2^8 = 256 = 0.256e3 + [ + 3n, + 0x4189_374b_c6a7_ef9dn, + 0xb22d_0e56_0418_9374n, + 0xbc6a_7ef9_db22_d0e6n + ], // *** + // 5: 2^(2^4) = 2^16 = 65536 = 0.65536e5 + [ + 5n, + 0xa7c5_ac47_1b47_8423n, + 0x0fcf_80dc_3372_1d53n, + 0xcddd_6e04_c059_2104n + ], // 6: 2^(2^5) = 2^32 = 4294967296 = 0.4294967296e10 + [ + 10n, + 0x6df3_7f67_5ef6_eadfn, + 0x5ab9_a207_2d44_268dn, + 0x97df_837e_6748_956en + ], // 7: 2^(2^6) = 2^64 = 18446744073709551616 = 0.18446744073709551616e20 + [ + 20n, + 0x2f39_4219_2484_46ban, + 0xa23d_2ec7_29af_3d61n, + 0x0607_aa01_67dd_94cbn + ], // 8: 2^(2^7) = 2^128 = 340282366920938463463374607431768211456 = + // 0.340282366920938463463374607431768211456e39 + [ + 39n, + 0x571c_bec5_54b6_0dbbn, + 0xd5f6_4baf_0506_840dn, + 0x451d_b70d_5904_029bn + ], // 9: 2^(2^8) = 2^256 = + // 1.1579208923731619542357098500868790785326998466564056403945758401E+77 = + // 0.11579208923731619542357098500868790785326998466564056403945758401e78 + [ + 78n, + 0x1da4_8ce4_68e7_c702n, + 0x6520_247d_3556_476dn, + 0x1469_caf6_db22_4cfan + ], // *** + // 10: 2^(2^9) = 2^512 = + // 1.3407807929942597099574024998205846127479365820592393377723561444E+154 = + // 0.13407807929942597099574024998205846127479365820592393377723561444e155 + [ + 155n, + 0x2252_f0e5_b397_69dcn, + 0x9ae2_eea3_0ca3_ade0n, + 0xeeaa_3c08_dfe8_4e30n + ], // 11: 2^(2^10) = 2^1024 = + // 1.7976931348623159077293051907890247336179769789423065727343008116E+308 = + // 0.17976931348623159077293051907890247336179769789423065727343008116e309 + [ + 309n, + 0x2e05_5c9a_3f6b_a793n, + 0x1658_3a81_6eb6_0a59n, + 0x22c4_b082_6cf1_ebf7n + ], // 12: 2^(2^11) = 2^2048 = + // 3.2317006071311007300714876688669951960444102669715484032130345428E+616 = + // 0.32317006071311007300714876688669951960444102669715484032130345428e617 + [ + 617n, + 0x52bb_45e9_cf23_f17fn, + 0x7688_c076_06e5_0364n, + 0xb344_79aa_9d44_9a57n + ], // 13: 2^(2^12) = 2^4096 = + // 1.0443888814131525066917527107166243825799642490473837803842334833E+1233 = + // 0.10443888814131525066917527107166243825799642490473837803842334833e1234 + [ + 1234n, + 0x1abc_81c8_ff5f_846cn, + 0x8f5e_3c98_53e3_8c97n, + 0x4506_0097_f3bf_9296n + ], // 14: 2^(2^13) = 2^8192 = + // 1.0907481356194159294629842447337828624482641619962326924318327862E+2466 = + // 0.10907481356194159294629842447337828624482641619962326924318327862e2467 + [ + 2467n, + 0x1bec_53b5_10da_a7b4n, + 0x4836_9ed7_7dbb_0eb1n, + 0x3b05_587b_2187_b41en + ], // 15: 2^(2^14) = 2^16384 = + // 1.1897314953572317650857593266280071307634446870965102374726748212E+4932 = + // 0.11897314953572317650857593266280071307634446870965102374726748212e4933 + [ + 4933n, + 0x1e75_063a_5ba9_1326n, + 0x8abf_b8e4_6001_6ae3n, + 0x2800_8702_d29e_8a3cn + ], // 16: 2^(2^15) = 2^32768 = + // 1.4154610310449547890015530277449516013481307114723881672343857483E+9864 = + // 0.14154610310449547890015530277449516013481307114723881672343857483e9865 + [ + 9865n, + 0x243c_5d8b_b5c5_fa55n, + 0x40c6_d248_c588_1915n, + 0x4c0f_d99f_d5be_fc22n + ], // 17: 2^(2^16) = 2^65536 = + // 2.0035299304068464649790723515602557504478254755697514192650169737E+19728 = + // 0.20035299304068464649790723515602557504478254755697514192650169737e19729 + [ + 19729n, + 0x334a_5570_c3f4_ef3cn, + 0xa13c_36c4_3f97_9c90n, + 0xda7a_c473_555f_b7a8n + ], // 18: 2^(2^17) = 2^131072 = + // 4.0141321820360630391660606060388767343771510270414189955825538065E+39456 = + // 0.40141321820360630391660606060388767343771510270414189955825538065e39457 + [ + 39457n, + 0x66c3_0444_5dd9_8f3bn, + 0xa8c2_93a2_0e47_a41bn, + 0x4c5b_03dc_1260_4964n + ], // 19: 2^(2^18) = 2^262144 = + // 1.6113257174857604736195721184520050106440238745496695174763712505E+78913 = + // 0.16113257174857604736195721184520050106440238745496695174763712505e78914 + [ + 78914n, + 0x293f_fbf5_fb02_8cc4n, + 0x89d3_e5ff_4423_8406n, + 0x369a_339e_1bfe_8c9bn + ], // 20: 2^(2^19) = 2^524288 = + // 2.5963705678310007761265964957268828277447343763484560463573654868E+157826 = + // 0.25963705678310007761265964957268828277447343763484560463573654868e157827 + [ + 157827n, + 0x4277_92fb_b68e_5d20n, + 0x7b29_7cd9_fc15_4b62n, + 0xf091_4211_4aa9_a20cn + ], // 21: 2^(2^20) = 2^1048576 = + // 6.7411401254990734022690651047042454376201859485326882846944915676E+315652 = + // 0.67411401254990734022690651047042454376201859485326882846944915676e315653 + [ + 315653n, + 0xac92_bc65_ad5c_08fcn, + 0x00be_eb11_5a56_6c19n, + 0x4ba8_82d8_a462_2437n + ], // 22: 2^(2^21) = 2^2097152 = + // 4.5442970191613663099961595907970650433180103994591456270882095573E+631305 = + // 0.45442970191613663099961595907970650433180103994591456270882095573e631306 + [ + 631306n, + 0x7455_8144_0f92_e80en, + 0x4da8_22cf_7f89_6f41n, + 0x509d_5986_7816_4ecdn + ], // 23: 2^(2^22) = 2^4194304 = + // 2.0650635398358879243991194945816501695274360493029670347841664177E+1262611 = + // 0.20650635398358879243991194945816501695274360493029670347841664177e1262612 + [ + 1262612n, + 0x34dd_99b4_c695_23a5n, + 0x64bc_2e8f_0d8b_1044n, + 0xb03b_1c96_da5d_d349n + ], // 24: 2^(2^23) = 2^8388608 = + // 4.2644874235595278724327289260856157547554200794957122157246170406E+2525222 = + // 0.42644874235595278724327289260856157547554200794957122157246170406e2525223 + [ + 2525223n, + 0x6d2b_bea9_d6d2_5a08n, + 0xa0a4_606a_88e9_6b70n, + 0x1820_63bb_c2fe_8520n + ], // 25: 2^(2^24) = 2^16777216 = + // 1.8185852985697380078927713277749906189248596809789408311078112486E+5050445 = + // 0.18185852985697380078927713277749906189248596809789408311078112486e5050446 + [ + 5050446n, + 0x2e8e_47d6_3bfd_d6e3n, + 0x2b55_fa89_76ea_a3e9n, + 0x1a6b_9d30_8641_2a73n + ], // 26: 2^(2^25) = 2^33554432 = + // 3.3072524881739831340558051919726975471129152081195558970611353362E+10100890 = + // 0.33072524881739831340558051919726975471129152081195558970611353362e10100891 + [ + 10100891n, + 0x54aa_68ef_a1d7_19dfn, + 0xd850_5806_612c_5c8fn, + 0xad06_8837_fee8_b43an + ], // 27: 2^(2^26) = 2^67108864 = + // 1.0937919020533002449982468634925923461910249420785622990340704603E+20201781 = + // 0.10937919020533002449982468634925923461910249420785622990340704603e20201782 + [ + 20201782n, + 0x1c00_464c_cb7b_ae77n, + 0x9e38_7778_4c77_982cn, + 0xd94a_f3b6_1717_404fn + ], // 28: 2^(2^27) = 2^134217728 = + // 1.1963807249973763567102377630870670302911237824129274789063323723E+40403562 = + // 0.11963807249973763567102377630870670302911237824129274789063323723e40403563 + [ + 40403563n, + 0x1ea0_99c8_be2b_6cd0n, + 0x8bfb_6d53_9fa5_0466n, + 0x6d3b_c37e_69a8_4218n + ], // 29: 2^(2^28) = 2^268435456 = + // 1.4313268391452478724777126233530788980596273340675193575004129517E+80807124 = + // 0.14313268391452478724777126233530788980596273340675193575004129517e80807125 + [ + 80807125n, + 0x24a4_57f4_66ce_8d18n, + 0xf2c8_f3b8_1bc6_bb59n, + 0xa78c_7576_92e0_2d49n + ], // 30: 2^(2^29) = 2^536870912 = + // 2.0486965204575262773910959587280218683219330308711312100181276813E+161614248 = + // 0.20486965204575262773910959587280218683219330308711312100181276813e161614249 + [ + 161614249n, + 0x3472_5667_7aba_6b53n, + 0x3fbf_90d3_0611_a67cn, + 0x1e03_9d87_e0bd_b32bn + ], // 31: 2^(2^30) = 2^1073741824 = + // 4.1971574329347753848087162337676781412761959309467052555732924370E+323228496 = + // 0.41971574329347753848087162337676781412761959309467052555732924370e323228497 + [ + 323228497n, + 0x6b72_7daf_0fd3_432an, + 0x71f7_1121_f9e4_200fn, + 0x8fcd_9942_d486_c10cn + ], // 32: 2^(2^31) = 2^2147483648 = + // 1.7616130516839633532074931497918402856671115581881347960233679023E+646456993 = + // 0.17616130516839633532074931497918402856671115581881347960233679023e646456994 + [ + 646456994n, + 0x2d18_e844_84d9_1f78n, + 0x4079_bfe7_829d_ec6fn, + 0x2155_1643_e365_abc6n + ] + ]; + // An array of negative powers of two, each value consists of 4 longs: decimal exponent and 3 x 64 + // bits of mantissa, divided by ten. Used to find an arbitrary power of 2 (by powerOfTwo(long exp)) + static NEG_POWERS_OF_2: bigint[][] = [ + // v18 + // 0: 2^0 = 1 = 0.1e1 + [ + 1n, + 0x1999_9999_9999_9999n, + 0x9999_9999_9999_9999n, + 0x9999_9999_9999_999an + ], // 1: 2^-(2^0) = 2^-1 = 0.5 = 0.5e0 + [ + 0n, + 0x8000_0000_0000_0000n, + 0x0000_0000_0000_0000n, + 0x0000_0000_0000_0000n + ], // 2: 2^-(2^1) = 2^-2 = 0.25 = 0.25e0 + // {0, 0x4000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L}, + [ + 0n, + 0x4000_0000_0000_0000n, + 0x0000_0000_0000_0000n, + 0x0000_0000_0000_0001n + ], // *** + // 3: 2^-(2^2) = 2^-4 = 0.0625 = 0.625e-1 + [ + -1n, + 0xa000_0000_0000_0000n, + 0x0000_0000_0000_0000n, + 0x0000_0000_0000_0000n + ], // 4: 2^-(2^3) = 2^-8 = 0.00390625 = 0.390625e-2 + [ + -2n, + 0x6400_0000_0000_0000n, + 0x0000_0000_0000_0000n, + 0x0000_0000_0000_0000n + ], // 5: 2^-(2^4) = 2^-16 = 0.0000152587890625 = 0.152587890625e-4 + [ + -4n, + 0x2710_0000_0000_0000n, + 0x0000_0000_0000_0000n, + 0x0000_0000_0000_0001n + ], // *** + // 6: 2^-(2^5) = 2^-32 = 2.3283064365386962890625E-10 = 0.23283064365386962890625e-9 + [ + -9n, + 0x3b9a_ca00_0000_0000n, + 0x0000_0000_0000_0000n, + 0x0000_0000_0000_0001n + ], // *** + // 7: 2^-(2^6) = 2^-64 = 5.42101086242752217003726400434970855712890625E-20 = + // 0.542101086242752217003726400434970855712890625e-19 + [ + -19n, + 0x8ac7_2304_89e8_0000n, + 0x0000_0000_0000_0000n, + 0x0000_0000_0000_0000n + ], // 8: 2^-(2^7) = 2^-128 = + // 2.9387358770557187699218413430556141945466638919302188037718792657E-39 = + // 0.29387358770557187699218413430556141945466638919302188037718792657e-38 + [ + -38n, + 0x4b3b_4ca8_5a86_c47an, + 0x098a_2240_0000_0000n, + 0x0000_0000_0000_0001n + ], // *** + // 9: 2^-(2^8) = 2^-256 = + // 8.6361685550944446253863518628003995711160003644362813850237034700E-78 = + // 0.86361685550944446253863518628003995711160003644362813850237034700e-77 + [ + -77n, + 0xdd15_fe86_affa_d912n, + 0x49ef_0eb7_13f3_9eben, + 0xaa98_7b6e_6fd2_a002n + ], // 10: 2^-(2^9) = 2^-512 = + // 7.4583407312002067432909653154629338373764715346004068942715183331E-155 = + // 0.74583407312002067432909653154629338373764715346004068942715183331e-154 + [ + -154n, + 0xbeee_fb58_4aff_8603n, + 0xaafb_550f_facf_d8fan, + 0x5ca4_7e4f_88d4_5371n + ], // 11: 2^-(2^10) = 2^-1024 = + // 5.5626846462680034577255817933310101605480399511558295763833185421E-309 = + // 0.55626846462680034577255817933310101605480399511558295763833185421e-308 + [ + -308n, + 0x8e67_9c2f_5e44_ff8fn, + 0x570f_09ea_a7ea_7648n, + 0x5961_db50_c6d2_b888n + ], // *** + // 12: 2^-(2^11) = 2^-2048 = + // 3.0943460473825782754801833699711978538925563038849690459540984582E-617 = + // 0.30943460473825782754801833699711978538925563038849690459540984582e-616 + [ + -616n, + 0x4f37_1b33_99fc_2ab0n, + 0x8170_041c_9feb_05aan, + 0xc7c3_4344_7c75_bcf6n + ], // 13: 2^-(2^12) = 2^-4096 = + // 9.5749774609521853579467310122804202420597417413514981491308464986E-1234 = + // 0.95749774609521853579467310122804202420597417413514981491308464986e-1233 + [ + -1233n, + 0xf51e_9281_7901_3fd3n, + 0xde4b_d12c_de4d_985cn, + 0x4a57_3ca6_f94b_ff14n + ], // 14: 2^-(2^13) = 2^-8192 = + // 9.1680193377742358281070619602424158297818248567928361864131947526E-2467 = + // 0.91680193377742358281070619602424158297818248567928361864131947526e-2466 + [ + -2466n, + 0xeab3_8812_7bcc_aff7n, + 0x1667_6391_42b9_fbaen, + 0x775e_c999_5e10_39fbn + ], // 15: 2^-(2^14) = 2^-16384 = + // 8.4052578577802337656566945433043815064951983621161781002720680748E-4933 = + // 0.84052578577802337656566945433043815064951983621161781002720680748e-4932 + [ + -4932n, + 0xd72c_b2a9_5c7e_f6ccn, + 0xe81b_f1e8_25ba_7515n, + 0xc2fe_b521_d6cb_5dcdn + ], // 16: 2^-(2^15) = 2^-32768 = + // 7.0648359655776364427774021878587184537374439102725065590941425796E-9865 = + // 0.70648359655776364427774021878587184537374439102725065590941425796e-9864 + [ + -9864n, + 0xb4dc_1be6_6045_02dcn, + 0xd491_079b_8eef_6535n, + 0x578d_3965_d24d_e84dn + ], // *** + // 17: 2^-(2^16) = 2^-65536 = + // 4.9911907220519294656590574792132451973746770423207674161425040336E-19729 = + // 0.49911907220519294656590574792132451973746770423207674161425040336e-19728 + [ + -19728n, + 0x7fc6_447b_ee60_ea43n, + 0x2548_da5c_8b12_5b27n, + 0x5f42_d114_2f41_d349n + ], // *** + // 18: 2^-(2^17) = 2^-131072 = + // 2.4911984823897261018394507280431349807329035271689521242878455599E-39457 = + // 0.24911984823897261018394507280431349807329035271689521242878455599e-39456 + [ + -39456n, + 0x3fc6_5180_f88a_f8fbn, + 0x6a69_15f3_8334_9413n, + 0x063c_3708_b6ce_b291n + ], // *** + // 19: 2^-(2^18) = 2^-262144 = + // 6.2060698786608744707483205572846793091942192651991171731773832448E-78914 = + // 0.62060698786608744707483205572846793091942192651991171731773832448e-78913 + [ + -78913n, + 0x9ee0_197c_8dcd_55bfn, + 0x2b2b_9b94_2c38_f4a2n, + 0x0f8b_a634_e9c7_06aen + ], // 20: 2^-(2^19) = 2^-524288 = + // 3.8515303338821801176537443725392116267291403078581314096728076497E-157827 = + // 0.38515303338821801176537443725392116267291403078581314096728076497e-157826 + [ + -157826n, + 0x6299_63a2_5b8b_2d79n, + 0xd00b_9d22_86f7_0876n, + 0xe970_0470_0c36_44fcn + ], // *** + // 21: 2^-(2^20) = 2^-1048576 = + // 1.4834285912814577854404052243709225888043963245995136935174170977E-315653 = + // 0.14834285912814577854404052243709225888043963245995136935174170977e-315652 + [ + -315652n, + 0x25f9_cc30_8cee_f4f3n, + 0x40f1_9543_911a_4546n, + 0xa2cd_3894_52cf_c366n + ], // 22: 2^-(2^21) = 2^-2097152 = + // 2.2005603854312903332428997579002102976620485709683755186430397089E-631306 = + // 0.22005603854312903332428997579002102976620485709683755186430397089e-631305 + [ + -631305n, + 0x3855_97b0_d47e_76b8n, + 0x1b9f_67e1_03bf_2329n, + 0xc311_9848_5959_85f7n + ], // 23: 2^-(2^22) = 2^-4194304 = + // 4.8424660099295090687215589310713586524081268589231053824420510106E-1262612 = + // 0.48424660099295090687215589310713586524081268589231053824420510106e-1262611 + [ + -1262611n, + 0x7bf7_95d2_76c1_2f66n, + 0x66a6_1d62_a446_659an, + 0xa1a4_d73b_ebf0_93d5n + ], // *** + // 24: 2^-(2^23) = 2^-8388608 = + // 2.3449477057322620222546775527242476219043877555386221929831430440E-2525223 = + // 0.23449477057322620222546775527242476219043877555386221929831430440e-2525222 + [ + -2525222n, + 0x3c07_d96a_b1ed_7799n, + 0xcb73_55c2_2cc0_5ac0n, + 0x4ffc_0ab7_3b1f_6a49n + ], // *** + // 25: 2^-(2^24) = 2^-16777216 = + // 5.4987797426189993226257377747879918011694025935111951649826798628E-5050446 = + // 0.54987797426189993226257377747879918011694025935111951649826798628e-5050445 + [ + -5050445n, + 0x8cc4_cd8c_3ede_fb9an, + 0x6c8f_f86a_90a9_7e0cn, + 0x166c_fddb_f98b_71bfn + ], // *** + // 26: 2^-(2^25) = 2^-33554432 = + // 3.0236578657837068435515418409027857523343464783010706819696074665E-10100891 = + // 0.30236578657837068435515418409027857523343464783010706819696074665e-10100890 + [ + -10100890n, + 0x4d67_d81c_c88e_1228n, + 0x1d7c_fb06_666b_79b3n, + 0x7b91_6728_aaa4_e70dn + ], // *** + // 27: 2^-(2^26) = 2^-67108864 = + // 9.1425068893156809483320844568740945600482370635012633596231964471E-20201782 = + // 0.91425068893156809483320844568740945600482370635012633596231964471e-20201781 + [ + -20201781n, + 0xea0c_5549_4e7a_552dn, + 0xb88c_b948_4bb8_6c61n, + 0x8d44_893c_610b_b7dfn + ], // *** + // 28: 2^-(2^27) = 2^-134217728 = + // 8.3585432221184688810803924874542310018191301711943564624682743545E-40403563 = + // 0.83585432221184688810803924874542310018191301711943564624682743545e-40403562 + [ + -40403562n, + 0xd5fa_8c82_1ec0_c24an, + 0xa80e_46e7_64e0_f8b0n, + 0xa727_6bfa_432f_ac7en + ], // 29: 2^-(2^28) = 2^-268435456 = + // 6.9865244796022595809958912202005005328020601847785697028605460277E-80807125 = + // 0.69865244796022595809958912202005005328020601847785697028605460277e-80807124 + [ + -80807124n, + 0xb2da_e307_426f_6791n, + 0xc970_b82f_58b1_2918n, + 0x0472_592f_7f39_190en + ], // 30: 2^-(2^29) = 2^-536870912 = + // 4.8811524304081624052042871019605298977947353140996212667810837790E-161614249 = + // 0.48811524304081624052042871019605298977947353140996212667810837790e-161614248 + // {-161614248, 0x7cf5_1edd_8a15_f1c9L, 0x656d_ab34_98f8_e697L, 0x12da_a2a8_0e53_c809L}, + [ + -161614248n, + 0x7cf5_1edd_8a15_f1c9n, + 0x656d_ab34_98f8_e697n, + 0x12da_a2a8_0e53_c807n + ], // 31: 2^-(2^30) = 2^-1073741824 = + // 2.3825649048879510732161697817326745204151961255592397879550237608E-323228497 = + // 0.23825649048879510732161697817326745204151961255592397879550237608e-323228496 + [ + -323228496n, + 0x3cfe_609a_b588_3c50n, + 0xbec8_b5d2_2b19_8871n, + 0xe184_7770_3b46_22b4n + ], // 32: 2^-(2^31) = 2^-2147483648 = + // 5.6766155260037313438164181629489689531186932477276639365773003794E-646456994 = + // 0.56766155260037313438164181629489689531186932477276639365773003794e-646456993 + [ + -646456993n, + 0x9152_447b_9d7c_da9an, + 0x3b4d_3f61_10d7_7aadn, + 0xfa81_bad1_c394_adb4n + ] + ]; + // Buffers used internally + // The order of words in the arrays is big-endian: the highest part is in buff[0] (in buff[1] for + // buffers of 10 words) + + buffer4x64B: bigint[] = new Array(4).fill(0n); + buffer6x32A: bigint[] = new Array(6).fill(0n); + buffer6x32B: bigint[] = new Array(6).fill(0n); + buffer6x32C: bigint[] = new Array(6).fill(0n); + buffer12x32: bigint[] = new Array(12).fill(0n); + parse(digits: number[], exp10: number): void { + exp10 += digits.length - 1; // digits is viewed as x.yyy below. + this.exponent = 0; + this.mantHi = 0n; + this.mantLo = 0n; + // Finds numeric value of the decimal mantissa + let mantissa: bigint[] = this.buffer6x32C; + let exp10Corr: number = this.parseMantissa(digits, mantissa); + if (exp10Corr == 0 && this.isEmpty(mantissa)) { + // Mantissa == 0 + return; + } + // takes account of the point position in the mant string and possible carry as a result of + // round-up (like 9.99e1 -> 1.0e2) + exp10 += exp10Corr; + if (exp10 < QuadrupleBuilder.MIN_EXP10) { + return; + } + if (exp10 > QuadrupleBuilder.MAX_EXP10) { + this.exponent = Number(QuadrupleBuilder.EXPONENT_OF_INFINITY); + return; + } + let exp2: number = this.findBinaryExponent(exp10, mantissa); + // Finds binary mantissa and possible exponent correction. Fills the fields. + this.findBinaryMantissa(exp10, exp2, mantissa); + } + parseMantissa(digits: number[], mantissa: bigint[]): number { + for (let i = 0; i < 6; i++) { + mantissa[i] = 0n; + } + // Skip leading zeroes + let firstDigit: number = 0; + while (firstDigit < digits.length && digits[firstDigit] == 0) { + firstDigit += 1; + } + if (firstDigit == digits.length) { + return 0; // All zeroes + } + let expCorr: number = -firstDigit; + // Limit the string length to avoid unnecessary fuss + if (digits.length - firstDigit > QuadrupleBuilder.MAX_MANTISSA_LENGTH) { + let carry: boolean = digits[QuadrupleBuilder.MAX_MANTISSA_LENGTH] >= 5; // The highest digit to be truncated + let truncated: number[] = new Array( + QuadrupleBuilder.MAX_MANTISSA_LENGTH + ).fill(0); + for (let i = 0; i < QuadrupleBuilder.MAX_MANTISSA_LENGTH; i++) { + truncated[i] = digits[i + firstDigit]; + } + if (carry) { + // Round-up: add carry + expCorr += this.addCarry(truncated); // May add an extra digit in front of it (99..99 -> 100) + } + digits = truncated; + firstDigit = 0; + } + for (let i = digits.length - 1; i >= firstDigit; i--) { + // digits, starting from the last + mantissa[0] |= BigInt(digits[i]) << 32n; + this.divBuffBy10(mantissa); + } + return expCorr; + } + // Divides the unpacked value stored in the given buffer by 10 + // @param buffer contains the unpacked value to divide (32 least significant bits are used) + divBuffBy10(buffer: bigint[]): void { + let maxIdx: number = buffer.length; + // big/endian + for (let i = 0; i < maxIdx; i++) { + let r: bigint = buffer[i] % 10n; + buffer[i] = buffer[i] / 10n; + if (i + 1 < maxIdx) { + buffer[i + 1] += r << 32n; + } + } + } + // Checks if the buffer is empty (contains nothing but zeros) + // @param buffer the buffer to check + // @return {@code true} if the buffer is empty, {@code false} otherwise + isEmpty(buffer: bigint[]): boolean { + for (let i = 0; i < buffer.length; i++) { + if (buffer[i] != 0n) { + return false; + } + } + return true; + } + // Adds one to a decimal number represented as a sequence of decimal digits. propagates carry as + // needed, so that {@code addCarryTo("6789") = "6790", addCarryTo("9999") = "10000"} etc. + // @return 1 if an additional higher "1" was added in front of the number as a result of + // rounding-up, 0 otherwise + addCarry(digits: number[]): number { + for (let i = digits.length - 1; i >= 0; i--) { + // starting with the lowest digit + let c: number = digits[i]; + if (c == 9) { + digits[i] = 0; + } else { + digits[i] = digits[i] + 1; + return 0; + } + } + digits[0] = 1; + return 1; + } + // Finds binary exponent, using decimal exponent and mantissa.
+ // exp2 = exp10 * log2(10) + log2(mant)
+ // @param exp10 decimal exponent + // @param mantissa array of longs containing decimal mantissa (divided by 10) + // @return found value of binary exponent + findBinaryExponent(exp10: number, mantissa: bigint[]): number { + let mant10: bigint = (mantissa[0] << 31n) | (mantissa[1] >> 1n); // Higher 63 bits of the mantissa, in range + // 0x0CC..CCC -- 0x7FF..FFF (2^63/10 -- 2^63-1) + // decimal value of the mantissa in range 1.0..9.9999... + let mant10d: number = Number(mant10) / QuadrupleBuilder.TWO_POW_63_DIV_10; + return Math.floor( + Number(exp10) * QuadrupleBuilder.LOG2_10 + this.log2(mant10d) + ); // Binary exponent + } + // Calculates log2 of the given x + // @param x argument that can't be 0 + // @return the value of log2(x) + log2(x: number): number { + // x can't be 0 + return QuadrupleBuilder.LOG2_E * Math.log(x); + } + findBinaryMantissa(exp10: number, exp2: number, mantissa: bigint[]): void { + // pow(2, -exp2): division by 2^exp2 is multiplication by 2^(-exp2) actually + let powerOf2: bigint[] = this.buffer4x64B; + this.powerOfTwo(-exp2, powerOf2); + let product: bigint[] = this.buffer12x32; // use it for the product (M * 10^E / 2^e) + this.multUnpacked6x32byPacked(mantissa, powerOf2, product); // product in buff_12x32 + this.multBuffBy10(product); // "Quasidecimals" are numbers divided by 10 + // The powerOf2[0] is stored as an unsigned value + if (BigInt(powerOf2[0]) != BigInt(-exp10)) { + // For some combinations of exp2 and exp10, additional multiplication needed + // (see mant2_from_M_E_e.xls) + this.multBuffBy10(product); + } + // compensate possible inaccuracy of logarithms used to compute exp2 + exp2 += this.normalizeMant(product); + exp2 += QuadrupleBuilder.EXPONENT_BIAS; // add bias + // For subnormal values, exp2 <= 0. We just return 0 for them, as they are + // far from any range we are interested in. + if (exp2 <= 0) { + return; + } + exp2 += this.roundUp(product); // round up, may require exponent correction + if (BigInt(exp2) >= QuadrupleBuilder.EXPONENT_OF_INFINITY) { + this.exponent = Number(QuadrupleBuilder.EXPONENT_OF_INFINITY); + } else { + this.exponent = Number(exp2); + this.mantHi = ((product[0] << 32n) + product[1]) & 0xffffffffffffffffn; + this.mantLo = ((product[2] << 32n) + product[3]) & 0xffffffffffffffffn; + } + } + // Calculates the required power and returns the result in the quasidecimal format (an array of + // longs, where result[0] is the decimal exponent of the resulting value, and result[1] -- + // result[3] contain 192 bits of the mantissa divided by ten (so that 8 looks like + //
{@code {1, 0xCCCC_.._CCCCL, 0xCCCC_.._CCCCL, 0xCCCC_.._CCCDL}}}
+ // uses arrays buffer4x64B, buffer6x32A, buffer6x32B, buffer12x32, + // @param exp the power to raise 2 to + // @param power (result) the value of {@code2^exp} + powerOfTwo(exp: number, power: bigint[]): void { + if (exp == 0) { + this.array_copy(QuadrupleBuilder.POS_POWERS_OF_2[0], power); + return; + } + // positive powers of 2 (2^0, 2^1, 2^2, 2^4, 2^8 ... 2^(2^31) ) + let powers: bigint[][] = QuadrupleBuilder.POS_POWERS_OF_2; + if (exp < 0) { + exp = -exp; + powers = QuadrupleBuilder.NEG_POWERS_OF_2; // positive powers of 2 (2^0, 2^-1, 2^-2, 2^-4, 2^-8 ... 2^30) + } + // 2^31 = 0x8000_0000L; a single bit that will be shifted right at every iteration + let currPowOf2: number = QuadrupleBuilder.POW_2_31; + let idx: number = 32; // Index in the table of powers + let first_power: boolean = true; + // if exp = b31 * 2^31 + b30 * 2^30 + .. + b0 * 2^0, where b0..b31 are the values of the bits in + // exp, then 2^exp = 2^b31 * 2^b30 ... * 2^b0. Find the product, using a table of powers of 2. + while (exp > 0) { + if (exp >= currPowOf2) { + // the current bit in the exponent is 1 + if (first_power) { + // 4 longs, power[0] -- decimal (?) exponent, power[1..3] -- 192 bits of mantissa + this.array_copy(powers[idx], power); + first_power = false; + } else { + // Multiply by the corresponding power of 2 + this.multPacked3x64_AndAdjustExponent(power, powers[idx], power); + } + exp -= currPowOf2; + } + idx -= 1; + currPowOf2 = currPowOf2 * 0.5; // Note: this is exact + } + } + // Copies from into to. + array_copy(source: bigint[], dest: bigint[]): void { + for (let i = 0; i < dest.length; i++) { + dest[i] = source[i]; + } + } + // Multiplies two quasidecimal numbers contained in buffers of 3 x 64 bits with exponents, puts + // the product to buffer4x64B
+ // and returns it. Both each of the buffers and the product contain 4 longs - exponent and 3 x 64 + // bits of mantissa. If the higher word of mantissa of the product is less than + // 0x1999_9999_9999_9999L (i.e. mantissa is less than 0.1) multiplies mantissa by 10 and adjusts + // the exponent respectively. + multPacked3x64_AndAdjustExponent( + factor1: bigint[], + factor2: bigint[], + result: bigint[] + ): void { + this.multPacked3x64_simply(factor1, factor2, this.buffer12x32); + let expCorr: number = this.correctPossibleUnderflow(this.buffer12x32); + this.pack_6x32_to_3x64(this.buffer12x32, result); + // result[0] is a signed int64 value stored in an uint64 + result[0] = factor1[0] + factor2[0] + BigInt(expCorr); // product.exp = f1.exp + f2.exp + } + // Multiplies mantissas of two packed quasidecimal values (each is an array of 4 longs, exponent + + // 3 x 64 bits of mantissa) Returns the product as unpacked buffer of 12 x 32 (12 x 32 bits of + // product) + // uses arrays buffer6x32A, buffer6x32B + // @param factor1 an array of longs containing factor 1 as packed quasidecimal + // @param factor2 an array of longs containing factor 2 as packed quasidecimal + // @param result an array of 12 longs filled with the product of mantissas + multPacked3x64_simply( + factor1: bigint[], + factor2: bigint[], + result: bigint[] + ): void { + for (let i = 0; i < result.length; i++) { + result[i] = 0n; + } + // TODO2 19.01.16 21:23:06 for the next version -- rebuild the table of powers to make the + // numbers unpacked, to avoid packing/unpacking + this.unpack_3x64_to_6x32(factor1, this.buffer6x32A); + this.unpack_3x64_to_6x32(factor2, this.buffer6x32B); + for (let i = 6 - 1; i >= 0; i--) { + // compute partial 32-bit products + for (let j = 6 - 1; j >= 0; j--) { + let part: bigint = this.buffer6x32A[i] * this.buffer6x32B[j]; + result[j + i + 1] = + (result[j + i + 1] + (part & QuadrupleBuilder.LOWER_32_BITS)) & + 0xffffffffffffffffn; + result[j + i] = (result[j + i] + (part >> 32n)) & 0xffffffffffffffffn; + } + } + // Carry higher bits of the product to the lower bits of the next word + for (let i = 12 - 1; i >= 1; i--) { + result[i - 1] = + (result[i - 1] + (result[i] >> 32n)) & 0xffffffffffffffffn; + result[i] &= QuadrupleBuilder.LOWER_32_BITS; + } + } + // Corrects possible underflow of the decimal mantissa, passed in in the {@code mantissa}, by + // multiplying it by a power of ten. The corresponding value to adjust the decimal exponent is + // returned as the result + // @param mantissa a buffer containing the mantissa to be corrected + // @return a corrective (addition) that is needed to adjust the decimal exponent of the number + correctPossibleUnderflow(mantissa: bigint[]): number { + let expCorr: number = 0; + while (this.isLessThanOne(mantissa)) { + // Underflow + this.multBuffBy10(mantissa); + expCorr -= 1; + } + return expCorr; + } + // Checks if the unpacked quasidecimal value held in the given buffer is less than one (in this + // format, one is represented as { 0x1999_9999L, 0x9999_9999L, 0x9999_9999L,...} + // @param buffer a buffer containing the value to check + // @return {@code true}, if the value is less than one + isLessThanOne(buffer: bigint[]): boolean { + if (buffer[0] < 0x1999_9999n) { + return true; + } + if (buffer[0] > 0x1999_9999n) { + return false; + } + // A note regarding the coverage: + // Multiplying a 128-bit number by another 192-bit number, + // as well as multiplying of two 192-bit numbers, + // can never produce 320 (or 384 bits, respectively) of 0x1999_9999L, 0x9999_9999L, + for (let i = 1; i < buffer.length; i++) { + // so this loop can't be covered entirely + if (buffer[i] < 0x9999_9999n) { + return true; + } + if (buffer[i] > 0x9999_9999n) { + return false; + } + } + // and it can never reach this point in real life. + return false; // Still Java requires the return statement here. + } + // Multiplies unpacked 192-bit value by a packed 192-bit factor
+ // uses static arrays buffer6x32B + // @param factor1 a buffer containing unpacked quasidecimal mantissa (6 x 32 bits) + // @param factor2 an array of 4 longs containing packed quasidecimal power of two + // @param product a buffer of at least 12 longs to hold the product + multUnpacked6x32byPacked( + factor1: bigint[], + factor2: bigint[], + product: bigint[] + ): void { + for (let i = 0; i < product.length; i++) { + product[i] = 0n; + } + let unpacked2: bigint[] = this.buffer6x32B; + this.unpack_3x64_to_6x32(factor2, unpacked2); // It's the powerOf2, with exponent in 0'th word + let maxFactIdx: number = factor1.length; + for (let i = maxFactIdx - 1; i >= 0; i--) { + // compute partial 32-bit products + for (let j = maxFactIdx - 1; j >= 0; j--) { + let part: bigint = factor1[i] * unpacked2[j]; + product[j + i + 1] = + (product[j + i + 1] + (part & QuadrupleBuilder.LOWER_32_BITS)) & + 0xffffffffffffffffn; + product[j + i] = (product[j + i] + (part >> 32n)) & 0xffffffffffffffffn; + } + } + // Carry higher bits of the product to the lower bits of the next word + for (let i = 12 - 1; i >= 1; i--) { + product[i - 1] = + (product[i - 1] + (product[i] >> 32n)) & 0xffffffffffffffffn; + product[i] &= QuadrupleBuilder.LOWER_32_BITS; + } + } + // Multiplies the unpacked value stored in the given buffer by 10 + // @param buffer contains the unpacked value to multiply (32 least significant bits are used) + multBuffBy10(buffer: bigint[]): void { + let maxIdx: number = buffer.length - 1; + buffer[0] &= QuadrupleBuilder.LOWER_32_BITS; + buffer[maxIdx] *= 10n; + for (let i = maxIdx - 1; i >= 0; i--) { + buffer[i] = + (buffer[i] * 10n + (buffer[i + 1] >> 32n)) & 0xffffffffffffffffn; + buffer[i + 1] &= QuadrupleBuilder.LOWER_32_BITS; + } + } + // Makes sure that the (unpacked) mantissa is normalized, + // i.e. buff[0] contains 1 in bit 32 (the implied integer part) and higher 32 of mantissa in bits 31..0, + // and buff[1]..buff[4] contain other 96 bits of mantissa in their lower halves: + //
0x0000_0001_XXXX_XXXXL, 0x0000_0000_XXXX_XXXXL...
+ // If necessary, divides the mantissa by appropriate power of 2 to make it normal. + // @param mantissa a buffer containing unpacked mantissa + // @return if the mantissa was not normal initially, a correction that should be added to the result's exponent, or 0 otherwise + normalizeMant(mantissa: bigint[]): number { + let expCorr: number = 31 - QuadrupleBuilder.clz64(mantissa[0]); + if (expCorr != 0) { + this.divBuffByPower2(mantissa, expCorr); + } + return expCorr; + } + // Rounds up the contents of the unpacked buffer to 128 bits by adding unity one bit lower than + // the lowest of these 128 bits. If carry propagates up to bit 33 of buff[0], shifts the buffer + // rightwards to keep it normalized. + // @param mantissa the buffer to get rounded + // @return 1 if the buffer was shifted, 0 otherwise + roundUp(mantissa: bigint[]): number { + // due to the limited precision of the power of 2, a number with exactly half LSB in its + // mantissa + // (i.e that would have 0x8000_0000_0000_0000L in bits 128..191 if it were computed precisely), + // after multiplication by this power of 2, may get erroneous bits 185..191 (counting from the + // MSB), + // taking a value from + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x7FFF_FFFF_FFFF_FFD8L. + // to + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x8000_0000_0000_0014L, or something alike. + // To round it up, we first add + // 0x0000_0000_0000_0000L 0x0000_0000_0000_0000L 0x0000_0000_0000_0028L, to turn it into + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x8000_0000_0000_00XXL, + // and then add + // 0x0000_0000_0000_0000L 0x0000_0000_0000_0000L 0x8000_0000_0000_0000L, to provide carry to + // higher bits. + this.addToBuff(mantissa, 5, 100n); // to compensate possible inaccuracy + this.addToBuff(mantissa, 4, 0x8000_0000n); // round-up, if bits 128..159 >= 0x8000_0000L + if ((mantissa[0] & (QuadrupleBuilder.HIGHER_32_BITS << 1n)) != 0n) { + // carry's got propagated beyond the highest bit + this.divBuffByPower2(mantissa, 1); + return 1; + } + return 0; + } + // converts 192 most significant bits of the mantissa of a number from an unpacked quasidecimal + // form (where 32 least significant bits only used) to a packed quasidecimal form (where buff[0] + // contains the exponent and buff[1]..buff[3] contain 3 x 64 = 192 bits of mantissa) + // @param unpackedMant a buffer of at least 6 longs containing an unpacked value + // @param result a buffer of at least 4 long to hold the packed value + // @return packedQD192 with words 1..3 filled with the packed mantissa. packedQD192[0] is not + // affected. + pack_6x32_to_3x64(unpackedMant: bigint[], result: bigint[]): void { + result[1] = (unpackedMant[0] << 32n) + unpackedMant[1]; + result[2] = (unpackedMant[2] << 32n) + unpackedMant[3]; + result[3] = (unpackedMant[4] << 32n) + unpackedMant[5]; + } + // Unpacks the mantissa of a 192-bit quasidecimal (4 longs: exp10, mantHi, mantMid, mantLo) to a + // buffer of 6 longs, where the least significant 32 bits of each long contains respective 32 bits + // of the mantissa + // @param qd192 array of 4 longs containing the number to unpack + // @param buff_6x32 buffer of 6 long to hold the unpacked mantissa + unpack_3x64_to_6x32(qd192: bigint[], buff_6x32: bigint[]): void { + buff_6x32[0] = qd192[1] >> 32n; + buff_6x32[1] = qd192[1] & QuadrupleBuilder.LOWER_32_BITS; + buff_6x32[2] = qd192[2] >> 32n; + buff_6x32[3] = qd192[2] & QuadrupleBuilder.LOWER_32_BITS; + buff_6x32[4] = qd192[3] >> 32n; + buff_6x32[5] = qd192[3] & QuadrupleBuilder.LOWER_32_BITS; + } + // Divides the contents of the buffer by 2^exp2
+ // (shifts the buffer rightwards by exp2 if the exp2 is positive, and leftwards if it's negative), + // keeping it unpacked (only lower 32 bits of each element are used, except the buff[0] whose + // higher half is intended to contain integer part) + // @param buffer the buffer to divide + // @param exp2 the exponent of the power of two to divide by, expected to be + divBuffByPower2(buffer: bigint[], exp2: number): void { + let maxIdx: number = buffer.length - 1; + let backShift: bigint = BigInt(32 - Math.abs(exp2)); + if (exp2 > 0) { + // Shift to the right + let exp2Shift: bigint = BigInt(exp2); + for (let i = maxIdx + 1 - 1; i >= 1; i--) { + buffer[i] = + (buffer[i] >> exp2Shift) | + ((buffer[i - 1] << backShift) & QuadrupleBuilder.LOWER_32_BITS); + } + buffer[0] = buffer[0] >> exp2Shift; // Preserve the high half of buff[0] + } else if (exp2 < 0) { + // Shift to the left + let exp2Shift: bigint = BigInt(-exp2); + buffer[0] = + ((buffer[0] << exp2Shift) | (buffer[1] >> backShift)) & + 0xffffffffffffffffn; // Preserve the high half of buff[0] + for (let i = 1; i < maxIdx; i++) { + buffer[i] = + (((buffer[i] << exp2Shift) & QuadrupleBuilder.LOWER_32_BITS) | + (buffer[i + 1] >> backShift)) & + 0xffffffffffffffffn; + } + buffer[maxIdx] = + (buffer[maxIdx] << exp2Shift) & QuadrupleBuilder.LOWER_32_BITS; + } + } + // Adds the summand to the idx'th word of the unpacked value stored in the buffer + // and propagates carry as necessary + // @param buff the buffer to add the summand to + // @param idx the index of the element to which the summand is to be added + // @param summand the summand to add to the idx'th element of the buffer + addToBuff(buff: bigint[], idx: number, summand: bigint): void { + let maxIdx: number = idx; + buff[maxIdx] = (buff[maxIdx] + summand) & 0xffffffffffffffffn; // Big-endian, the lowest word + for (let i = maxIdx + 1 - 1; i >= 1; i--) { + // from the lowest word upwards, except the highest + if ((buff[i] & QuadrupleBuilder.HIGHER_32_BITS) != 0n) { + buff[i] &= QuadrupleBuilder.LOWER_32_BITS; + buff[i - 1] += 1n; + } else { + break; + } + } + } + static clz64(x: bigint): number { + let high = Number(x >> 32n); + return high == 0 + ? 32 + Math.clz32(Number(BigInt.asUintN(32, x))) + : Math.clz32(high); + } +} diff --git a/packages/firestore/test/integration/api/database.test.ts b/packages/firestore/test/integration/api/database.test.ts index ce5a3d34eae..76475f23b4e 100644 --- a/packages/firestore/test/integration/api/database.test.ts +++ b/packages/firestore/test/integration/api/database.test.ts @@ -77,7 +77,8 @@ import { MaxKey, MinKey, RegexValue, - BsonTimestamp + BsonTimestamp, + Decimal128Value } from '../util/firebase_export'; import { apiDescribe, @@ -2724,6 +2725,7 @@ apiDescribe('Database', persistence => { binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), objectId: new BsonObjectId('507f191e810c19729de860ea'), int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3'), min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('^foo', 'i') @@ -2746,6 +2748,9 @@ apiDescribe('Database', persistence => { .isEqual(new BsonObjectId('507f191e810c19729de860ea')) ).to.be.true; expect(snapshot.get('int32').isEqual(new Int32Value(2))).to.be.true; + expect( + snapshot.get('decimal128').isEqual(new Decimal128Value('1.2e3')) + ).to.be.true; expect(snapshot.get('min') === MinKey.instance()).to.be.true; expect(snapshot.get('max') === MaxKey.instance()).to.be.true; expect( @@ -2777,6 +2782,7 @@ apiDescribe('Database', persistence => { binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), objectId: new BsonObjectId('507f191e810c19729de860ea'), int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3'), regex: new RegexValue('^foo', 'i'), timestamp: new BsonTimestamp(1, 2), min: MinKey.instance(), @@ -2795,6 +2801,9 @@ apiDescribe('Database', persistence => { .isEqual(new BsonObjectId('507f191e810c19729de860ea')) ).to.be.true; expect(snapshot.get('int32').isEqual(new Int32Value(1))).to.be.true; + expect( + snapshot.get('decimal128').isEqual(new Decimal128Value('1.2e3')) + ).to.be.true; expect(snapshot.get('regex').isEqual(new RegexValue('^foo', 'i'))).to .be.true; expect(snapshot.get('timestamp').isEqual(new BsonTimestamp(1, 2))).to @@ -2879,6 +2888,7 @@ apiDescribe('Database', persistence => { ); let snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ testDocs['c'], testDocs['b'] @@ -2911,6 +2921,126 @@ apiDescribe('Database', persistence => { ); }); + it('can filter and order Decimal128 values', async () => { + const testDocs = { + a: { key: new Decimal128Value('-1.2e3') }, + b: { key: new Decimal128Value('0') }, + c: { key: new Decimal128Value('1.2e3') }, + d: { key: new Decimal128Value('NaN') }, + e: { key: new Decimal128Value('-Infinity') }, + f: { key: new Decimal128Value('Infinity') } + }; + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + testDocs, + async coll => { + // Populate the cache with all docs first + await getDocs(coll); + + let orderedQuery = query( + coll, + where('key', '>', new Decimal128Value('-1.2e3')), + orderBy('key', 'desc') + ); + + let snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['f'], + testDocs['c'], + testDocs['b'] + ]); + + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query( + coll, + where('key', '!=', new Decimal128Value('0.0')), + orderBy('key', 'desc') + ); + + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['f'], + testDocs['c'], + testDocs['a'], + testDocs['e'], + testDocs['d'] + ]); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query( + coll, + where('key', '>', new Decimal128Value('-1.2e-3')), + orderBy('key', 'desc') + ); + + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['f'], + testDocs['c'], + testDocs['b'] + ]); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query( + coll, + where('key', '!=', new Decimal128Value('NaN')) + ); + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['e'], + testDocs['a'], + testDocs['b'], + testDocs['c'], + testDocs['f'] + ]); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query( + coll, + where('key', 'not-in', [ + new Decimal128Value('1.2e3'), + new Decimal128Value('Infinity'), + new Decimal128Value('NaN') + ]), + orderBy('key', 'desc') + ); + // Note: server is sending NaN incorrectly, but the SDK NotInFilter + // `matches` function gracefully handles it and removes the incorrect + // doc "d". + snapshot = await getDocs(orderedQuery); + expect(toDataArray(snapshot)).to.deep.equal([ + testDocs['b'], + testDocs['a'], + testDocs['e'] + ]); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + } + ); + }); + it('can filter and order Timestamp values', async () => { const testDocs = { a: { key: new BsonTimestamp(1, 1) }, @@ -3303,6 +3433,154 @@ apiDescribe('Database', persistence => { ); }); + it('can filter and order numerical values ', async () => { + const testDocs = { + a: { key: new Decimal128Value('-1.2e3') }, // -1200 + b: { key: new Int32Value(0) }, + c: { key: new Decimal128Value('1') }, + d: { key: new Int32Value(1) }, + e: { key: 1 }, + f: { key: 1.0 }, + g: { key: new Decimal128Value('1.2e-3') }, // 0.0012 + h: { key: new Int32Value(2) }, + i: { key: new Decimal128Value('NaN') }, + j: { key: new Decimal128Value('-Infinity') }, + k: { key: NaN }, + l: { key: Infinity } + }; + + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + testDocs, + async coll => { + // Pre-populate the cache with all docs + await getDocs(coll); + + let orderedQuery = query(coll, orderBy('key', 'desc')); + let snapshot = await getDocs(orderedQuery); + expect(toIds(snapshot)).to.deep.equal([ + 'l', // Infinity + 'h', // 2 + 'f', // 1.0 + 'e', // 1 + 'd', // 1 + 'c', // 1 + 'g', // 0.0012 + 'b', // 0 + 'a', // -1200 + 'j', // -Infinity + 'k', // NaN + 'i' // NaN + ]); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query( + coll, + orderBy('key', 'desc'), + where('key', '!=', new Decimal128Value('1.0')) + ); + snapshot = await getDocs(orderedQuery); + expect(toIds(snapshot)).to.deep.equal([ + 'l', + 'h', + 'g', + 'b', + 'a', + 'j', + 'k', + 'i' + ]); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query( + coll, + orderBy('key', 'desc'), + where('key', '==', 1) + ); + snapshot = await getDocs(orderedQuery); + expect(toIds(snapshot)).to.deep.equal(['f', 'e', 'd', 'c']); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + } + ); + }); + + it('decimal128 values with no 2s complement representation', async () => { + const testDocs = { + a: { key: new Decimal128Value('-1.1e-3') }, // -0.0011 + b: { key: new Decimal128Value('1.1') }, + c: { key: 1.1 }, + d: { key: 1.0 }, + e: { key: new Decimal128Value('1.1e-3') } // 0.0011 + }; + + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + testDocs, + async coll => { + // Pre-populate the cache with all docs + await getDocs(coll); + + let orderedQuery = query( + coll, + where('key', '==', new Decimal128Value('1.1')) + ); + let snapshot = await getDocs(orderedQuery); + expect(toIds(snapshot)).to.deep.equal(['b']); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query( + coll, + where('key', '!=', new Decimal128Value('1.1')) + ); + snapshot = await getDocs(orderedQuery); + expect(toIds(snapshot)).to.deep.equal(['a', 'e', 'd', 'c']); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query(coll, where('key', '==', 1.1)); + snapshot = await getDocs(orderedQuery); + expect(toIds(snapshot)).to.deep.equal(['c']); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + + orderedQuery = query(coll, where('key', '!=', 1.1)); + snapshot = await getDocs(orderedQuery); + expect(toIds(snapshot)).to.deep.equal(['a', 'e', 'd', 'b']); + await assertSDKQueryResultsConsistentWithBackend( + orderedQuery, + testDocs, + toIds(snapshot) + ); + } + ); + }); + it('can listen to documents with bson types', async () => { const testDocs = { a: { key: MaxKey.instance() }, @@ -3310,7 +3588,8 @@ apiDescribe('Database', persistence => { c: { key: new BsonTimestamp(1, 2) }, d: { key: new BsonObjectId('507f191e810c19729de860ea') }, e: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, - f: { key: new RegexValue('^foo', 'i') } + f: { key: new RegexValue('^foo', 'i') }, + g: { key: new Decimal128Value('1.2e3') } }; return withTestProjectIdAndCollectionSettings( persistence, @@ -3326,6 +3605,7 @@ apiDescribe('Database', persistence => { let listenSnapshot = await storeEvent.awaitEvent(); expect(toDataArray(listenSnapshot)).to.deep.equal([ testDocs['b'], + testDocs['g'], testDocs['c'], testDocs['e'], testDocs['d'], @@ -3334,11 +3614,12 @@ apiDescribe('Database', persistence => { ]); const newData = { key: new Int32Value(2) }; - await setDoc(doc(coll, 'g'), newData); + await setDoc(doc(coll, 'h'), newData); listenSnapshot = await storeEvent.awaitEvent(); expect(toDataArray(listenSnapshot)).to.deep.equal([ testDocs['b'], newData, + testDocs['g'], testDocs['c'], testDocs['e'], testDocs['d'], @@ -3351,7 +3632,8 @@ apiDescribe('Database', persistence => { ); }); - // TODO(Mila/BSON): Skip the runTransaction tests against nightly when running on browsers. remove when it is supported by prod + // TODO(Mila/BSON): Skip the runTransaction tests against nightly when running on browsers. + // Run this test when BSON type is supported by prod // eslint-disable-next-line no-restricted-properties it.skip('can run transactions on documents with bson types', async () => { const testDocs = { @@ -3398,18 +3680,19 @@ apiDescribe('Database', persistence => { e: { key: new Int32Value(1) }, f: { key: 2.0 }, g: { key: 3 }, - h: { key: new Timestamp(100, 123456000) }, - i: { key: new BsonTimestamp(1, 2) }, - j: { key: 'string' }, - k: { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) }, - l: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, - n: { key: new BsonObjectId('507f191e810c19729de860ea') }, - o: { key: new GeoPoint(0, 0) }, - p: { key: new RegexValue('^foo', 'i') }, - q: { key: [1, 2] }, - r: { key: vector([1, 2]) }, - s: { key: { a: 1 } }, - t: { key: MaxKey.instance() } + h: { key: new Decimal128Value('1.2e3') }, + i: { key: new Timestamp(100, 123456000) }, + j: { key: new BsonTimestamp(1, 2) }, + k: { key: 'string' }, + l: { key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) }, + m: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, + o: { key: new BsonObjectId('507f191e810c19729de860ea') }, + p: { key: new GeoPoint(0, 0) }, + q: { key: new RegexValue('^foo', 'i') }, + r: { key: [1, 2] }, + s: { key: vector([1, 2]) }, + t: { key: { a: 1 } }, + u: { key: MaxKey.instance() } }; return withTestProjectIdAndCollectionSettings( @@ -3420,8 +3703,8 @@ apiDescribe('Database', persistence => { async coll => { // TODO(Mila/BSON): remove after prod supports bson, and use `ref` helper function instead const docRef = doc(coll, 'doc'); - await setDoc(doc(coll, 'm'), { key: docRef }); - testDocs['m'] = { key: docRef }; + await setDoc(doc(coll, 'n'), { key: docRef }); + testDocs['n'] = { key: docRef }; const orderedQuery = query(coll, orderBy('key', 'desc')); await assertSDKQueryResultsConsistentWithBackend( @@ -3429,6 +3712,7 @@ apiDescribe('Database', persistence => { orderedQuery, testDocs, [ + 'u', 't', 's', 'r', @@ -3462,9 +3746,12 @@ apiDescribe('Database', persistence => { c: { key: new Int32Value(1) }, d: { key: new Int32Value(-1) }, e: { key: new Int32Value(0) }, - f: { key: new BsonTimestamp(1, 1) }, - g: { key: new BsonTimestamp(2, 1) }, - h: { key: new BsonTimestamp(1, 2) }, + f: { key: new Decimal128Value('-1.2e3') }, + g: { key: new Decimal128Value('0.0') }, + h: { key: new Decimal128Value('1.2e3') }, + t: { key: new BsonTimestamp(1, 1) }, + u: { key: new BsonTimestamp(2, 1) }, + v: { key: new BsonTimestamp(1, 2) }, i: { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }, j: { key: new BsonBinaryData(1, new Uint8Array([1, 1, 4])) }, k: { key: new BsonBinaryData(2, new Uint8Array([1, 0, 0])) }, @@ -3492,12 +3779,15 @@ apiDescribe('Database', persistence => { [ 'r', 's', + 'f', 'd', 'e', + 'g', 'c', - 'f', 'h', - 'g', + 't', + 'v', + 'u', 'j', 'i', 'k', diff --git a/packages/firestore/test/integration/api/type.test.ts b/packages/firestore/test/integration/api/type.test.ts index 2f7cb7f9295..f47e99b0185 100644 --- a/packages/firestore/test/integration/api/type.test.ts +++ b/packages/firestore/test/integration/api/type.test.ts @@ -25,6 +25,7 @@ import { BsonTimestamp, Bytes, collection, + Decimal128Value, doc, DocumentData, DocumentReference, @@ -305,6 +306,41 @@ apiDescribe('Firestore', persistence => { ); }); + it('can read and write decimal128 fields', () => { + return withTestDbsSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + 1, + async dbs => { + await expectRoundtripWithoutTransaction(dbs[0], { + decimalSciPositive: new Decimal128Value('1.2e3'), + decimalSciNegative: new Decimal128Value('-2.5e-2'), + decimalSciPositiveCapE: new Decimal128Value('1.2345E+5'), + decimalSciNegativeCapE: new Decimal128Value('-9.876E-3'), + decimalIntPositive: new Decimal128Value('12345'), + decimalIntNegative: new Decimal128Value('-67890'), + decimalFloatPositive: new Decimal128Value('123.456'), + decimalFloatNegative: new Decimal128Value('-789.012'), + decimalZeroFloat: new Decimal128Value('0.0'), + decimalZeroInt: new Decimal128Value('0'), + decimalPrecisePositive: new Decimal128Value( + '0.1234567890123456789012345678901234' + ), + decimalLargePositive: new Decimal128Value( + '1234567890123456789012345678901234' + ), + decimalPreciseNegative: new Decimal128Value( + '-0.1234567890123456789012345678901234' + ), + decimalLargeNegative: new Decimal128Value( + '-1234567890123456789012345678901234' + ) + }); + } + ); + }); + it('can read and write bsonTimestamp fields', () => { return withTestDbsSettings( persistence, @@ -359,6 +395,8 @@ apiDescribe('Firestore', persistence => { new BsonBinaryData(1, new Uint8Array([1, 2, 3])), new BsonObjectId('507f191e810c19729de860ea'), new Int32Value(1), + new Decimal128Value('1.2e3'), + new BsonTimestamp(1, 2), MinKey.instance(), MaxKey.instance(), new RegexValue('^foo', 'i') @@ -380,6 +418,8 @@ apiDescribe('Firestore', persistence => { binary: new BsonBinaryData(1, new Uint8Array([1, 2, 3])), objectId: new BsonObjectId('507f191e810c19729de860ea'), int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3'), + bsonTimestamp: new BsonTimestamp(1, 2), min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('^foo', 'i') @@ -419,6 +459,39 @@ apiDescribe('Firestore', persistence => { ); }); + it('invalid decimal128 gets rejected', async () => { + return withTestProjectIdAndCollectionSettings( + persistence, + NIGHTLY_PROJECT_ID, + settings, + {}, + async coll => { + const docRef = doc(coll, 'test-doc'); + let errorMessage; + try { + await setDoc(docRef, { key: new Decimal128Value('') }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains('Invalid number'); + + try { + await setDoc(docRef, { key: new Decimal128Value('1 23. 4') }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains('Invalid number 1 23. 4'); + + try { + await setDoc(docRef, { key: new Decimal128Value('abc') }); + } catch (err) { + errorMessage = (err as FirestoreError)?.message; + } + expect(errorMessage).to.contains('Invalid number abc'); + } + ); + }); + it('invalid BSON timestamp gets rejected', async () => { return withTestProjectIdAndCollectionSettings( persistence, @@ -525,6 +598,7 @@ apiDescribe('Firestore', persistence => { booleanValue: { key: true }, nanValue: { key: NaN }, int32Value: { key: new Int32Value(1) }, + decimal128Value: { key: new Decimal128Value('1.2e3') }, doubleValue: { key: 2.0 }, integerValue: { key: 3 }, timestampValue: { key: new Timestamp(100, 123456000) }, diff --git a/packages/firestore/test/lite/integration.test.ts b/packages/firestore/test/lite/integration.test.ts index 25f372bcb95..df44d5d965b 100644 --- a/packages/firestore/test/lite/integration.test.ts +++ b/packages/firestore/test/lite/integration.test.ts @@ -38,6 +38,7 @@ import { initializeFirestore, terminate } from '../../src/lite-api/database'; +import { Decimal128Value } from '../../src/lite-api/decimal128_value'; import { FieldPath } from '../../src/lite-api/field_path'; import { FieldValue } from '../../src/lite-api/field_value'; import { @@ -2976,6 +2977,7 @@ describe.skip('BSON types', () => { const ref = await addDoc(coll, { objectId: new BsonObjectId('507f191e810c19729de860ea'), int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3'), min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('^foo', 'i') @@ -2998,6 +3000,8 @@ describe.skip('BSON types', () => { .isEqual(new BsonObjectId('507f191e810c19729de860ea')) ).to.be.true; expect(snap1.get('int32').isEqual(new Int32Value(2))).to.be.true; + expect(snap1.get('decimal128').isEqual(new Decimal128Value('1.2e3'))).to + .be.true; expect(snap1.get('min') === MinKey.instance()).to.be.true; expect(snap1.get('max') === MaxKey.instance()).to.be.true; expect( diff --git a/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts b/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts index 907881c262c..1a1d795d719 100644 --- a/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts +++ b/packages/firestore/test/unit/index/firestore_index_value_writer.test.ts @@ -21,6 +21,7 @@ import { IndexByteEncoder } from '../../../src/index/index_byte_encoder'; import { BsonBinaryData } from '../../../src/lite-api/bson_binary_data'; import { BsonObjectId } from '../../../src/lite-api/bson_object_Id'; import { BsonTimestamp } from '../../../src/lite-api/bson_timestamp'; +import { Decimal128Value } from '../../../src/lite-api/decimal128_value'; import { Int32Value } from '../../../src/lite-api/int32_value'; import { RegexValue } from '../../../src/lite-api/regex_value'; import { Timestamp } from '../../../src/lite-api/timestamp'; @@ -31,7 +32,8 @@ import { parseMinKey, parseBsonObjectId, parseRegexValue, - parseBsonTimestamp + parseBsonTimestamp, + parseDecimal128Value } from '../../../src/lite-api/user_data_reader'; import { IndexKind } from '../../../src/model/field_index'; import type { Value } from '../../../src/protos/firestore_proto_api'; @@ -554,6 +556,108 @@ describe('Firestore Index Value Writer', () => { ).to.equal(1); }); + it('can compare BSON Decimal128', () => { + const value1 = { + mapValue: { + fields: { + '__decimal128__': { stringValue: '-1.2e3' } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__decimal128__': { stringValue: '1.2e3' } + } + } + }; + const value3 = parseDecimal128Value(new Decimal128Value('-1.2e3')); + const value4 = parseDecimal128Value(new Decimal128Value('1.2e3')); + + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value1, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value3, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value3, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value3, value1, IndexKind.ASCENDING) + ).to.equal(0); + + expect( + compareIndexEncodedValues(value4, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value2, IndexKind.ASCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value4, value3, IndexKind.ASCENDING) + ).to.equal(1); + }); + + it('can compare BSON Decimal128 special cases', () => { + const value1 = { + mapValue: { + fields: { + '__decimal128__': { stringValue: 'NaN' } + } + } + }; + const value2 = { + mapValue: { + fields: { + '__decimal128__': { stringValue: '-Infinity' } + } + } + }; + const value3 = parseDecimal128Value(new Decimal128Value('NaN')); + const value4 = parseDecimal128Value(new Decimal128Value('Infinity')); + + // order should be: NaNs are equal, and less than -Infinity + expect( + compareIndexEncodedValues(value1, value2, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value2, value1, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value1, value3, IndexKind.ASCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value1, value4, IndexKind.ASCENDING) + ).to.equal(-1); + + expect( + compareIndexEncodedValues(value2, value2, IndexKind.ASCENDING) + ).to.equal(0); + expect( + compareIndexEncodedValues(value2, value3, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value2, value4, IndexKind.ASCENDING) + ).to.equal(-1); + + expect( + compareIndexEncodedValues(value3, value4, IndexKind.ASCENDING) + ).to.equal(-1); + expect( + compareIndexEncodedValues(value4, value3, IndexKind.ASCENDING) + ).to.equal(1); + expect( + compareIndexEncodedValues(value4, value4, IndexKind.ASCENDING) + ).to.equal(0); + }); + it('can compare BSON MinKey', () => { const value1 = { mapValue: { diff --git a/packages/firestore/test/unit/local/index_manager.test.ts b/packages/firestore/test/unit/local/index_manager.test.ts index b6af448b2db..6ca4bd7cdeb 100644 --- a/packages/firestore/test/unit/local/index_manager.test.ts +++ b/packages/firestore/test/unit/local/index_manager.test.ts @@ -22,6 +22,7 @@ import { BsonObjectId, BsonTimestamp, Bytes, + Decimal128Value, GeoPoint, Int32Value, MaxKey, @@ -2153,6 +2154,140 @@ describe('IndexedDbIndexManager', async () => { await verifyResults(q); }); + it('can index Decimal128 fields', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: new Decimal128Value('-1.2e3') + }); + await addDoc('coll/doc2', { + key: new Decimal128Value('0.0') + }); + await addDoc('coll/doc3', { + key: new Decimal128Value('1.2e3') + }); + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', new Decimal128Value('-1200')) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', new Decimal128Value('0')) + ); + await verifyResults(q, 'coll/doc1', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', new Decimal128Value('-0')) + ); + await verifyResults(q, 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', new Decimal128Value('-0.0')) + ); + await verifyResults(q, 'coll/doc1', 'coll/doc2'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', new Decimal128Value('1.2e-3')) + ); + await verifyResults(q, 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', new Decimal128Value('-1.2e-3')) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', new Decimal128Value('1.2e3')) + ); + await verifyResults(q); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', new Decimal128Value('-1.2e3')) + ); + await verifyResults(q); + }); + + it('indexes Decimal128 fields with precision loss', async () => { + await indexManager.addFieldIndex( + fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) + ); + await addDoc('coll/doc1', { + key: new Decimal128Value('-0.1234567890123456789') // will be rounded to -0.12345678901234568 + }); + await addDoc('coll/doc2', { + key: new Decimal128Value('0') + }); + await addDoc('coll/doc3', { + key: new Decimal128Value('0.1234567890123456789') // will be rounded to 0.12345678901234568 + }); + const fieldIndexes = await indexManager.getFieldIndexes('coll'); + expect(fieldIndexes).to.have.length(1); + + let q = queryWithAddedOrderBy(query('coll'), orderBy('key')); + await verifyResults(q, 'coll/doc1', 'coll/doc2', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', new Decimal128Value('0.1234567890123456789')) + ); + await verifyResults(q, 'coll/doc3'); + + // Mismatch behaviour caused by rounding error. Firestore fetches the doc3 from indexDB as + // doc3 rounds to the same number, even though the actual number in doc3 is different + q = queryWithAddedFilter( + query('coll'), + filter('key', '==', new Decimal128Value('0.12345678901234568')) + ); + await verifyResults(q, 'coll/doc3'); + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't be affected by + // this rounding errors. + q = queryWithAddedFilter( + query('coll'), + filter('key', '!=', new Decimal128Value('0.0')) + ); + await verifyResults(q, 'coll/doc1', 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>=', new Decimal128Value('1.23e-1')) + ); + await verifyResults(q, 'coll/doc3'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<=', new Decimal128Value('-1.23e-1')) + ); + await verifyResults(q, 'coll/doc1'); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '>', new Decimal128Value('1.2e3')) + ); + await verifyResults(q); + + q = queryWithAddedFilter( + query('coll'), + filter('key', '<', new Decimal128Value('-1.2e3')) + ); + await verifyResults(q); + }); + it('can index regex fields', async () => { await indexManager.addFieldIndex( fieldIndex('coll', { fields: [['key', IndexKind.ASCENDING]] }) @@ -2372,37 +2507,44 @@ describe('IndexedDbIndexManager', async () => { key: new Int32Value(2) }); await addDoc('coll/doc3', { - key: new Int32Value(1) + key: new Int32Value(-1) }); await addDoc('coll/doc4', { - key: new BsonTimestamp(1, 2) + key: new Decimal128Value('1.2e3') }); await addDoc('coll/doc5', { - key: new BsonTimestamp(1, 1) + key: new Decimal128Value('-0.0') }); await addDoc('coll/doc6', { - key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) + key: new BsonTimestamp(1, 2) }); await addDoc('coll/doc7', { - key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: new BsonTimestamp(1, 1) }); + await addDoc('coll/doc8', { - key: new BsonObjectId('507f191e810c19729de860eb') + key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) }); await addDoc('coll/doc9', { + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) + }); + await addDoc('coll/doc10', { + key: new BsonObjectId('507f191e810c19729de860eb') + }); + await addDoc('coll/doc11', { key: new BsonObjectId('507f191e810c19729de860ea') }); - await addDoc('coll/doc10', { + await addDoc('coll/doc12', { key: new RegexValue('a', 'm') }); - await addDoc('coll/doc11', { + await addDoc('coll/doc13', { key: new RegexValue('a', 'i') }); - await addDoc('coll/doc12', { + await addDoc('coll/doc14', { key: MaxKey.instance() }); @@ -2412,18 +2554,20 @@ describe('IndexedDbIndexManager', async () => { const q = queryWithAddedOrderBy(query('coll'), orderBy('key', 'desc')); await verifyResults( q, - 'coll/doc12', - 'coll/doc10', - 'coll/doc11', - 'coll/doc8', - 'coll/doc9', - 'coll/doc6', - 'coll/doc7', - 'coll/doc4', - 'coll/doc5', - 'coll/doc2', - 'coll/doc3', - 'coll/doc1' + 'coll/doc14', // maxKey + 'coll/doc12', // regex m + 'coll/doc13', // regex i + 'coll/doc10', // objectId eb + 'coll/doc11', // objectId ea + 'coll/doc8', // binary [1,2,4] + 'coll/doc9', // binary [1,2,3] + 'coll/doc6', // timestamp 1,2 + 'coll/doc7', // timestamp 1,1 + 'coll/doc4', // Number decimal128 1200 + 'coll/doc2', // Number int32 2 + 'coll/doc5', // Number decimal128 -0.0 + 'coll/doc3', // Number int32 -1 + 'coll/doc1' // minKey ); }); }); @@ -2454,42 +2598,45 @@ describe('IndexedDbIndexManager', async () => { key: 3 }); await addDoc('coll/doc8', { - key: new Timestamp(100, 123456000) + key: new Decimal128Value('1.2e3') }); await addDoc('coll/doc9', { - key: new BsonTimestamp(1, 2) + key: new Timestamp(100, 123456000) }); await addDoc('coll/doc10', { - key: 'string' + key: new BsonTimestamp(1, 2) }); await addDoc('coll/doc11', { - key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) as Bytes + key: 'string' }); await addDoc('coll/doc12', { - key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) + key: Bytes.fromUint8Array(new Uint8Array([0, 1, 255])) as Bytes }); await addDoc('coll/doc13', { - key: ref('coll/doc') + key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }); await addDoc('coll/doc14', { - key: new BsonObjectId('507f191e810c19729de860ea') + key: ref('coll/doc') }); await addDoc('coll/doc15', { - key: new GeoPoint(0, 1) + key: new BsonObjectId('507f191e810c19729de860ea') }); await addDoc('coll/doc16', { - key: new RegexValue('^foo', 'i') + key: new GeoPoint(0, 1) }); await addDoc('coll/doc17', { - key: [1, 2] + key: new RegexValue('^foo', 'i') }); await addDoc('coll/doc18', { - key: vector([1, 2]) + key: [1, 2] }); await addDoc('coll/doc19', { - key: { a: 1 } + key: vector([1, 2]) }); await addDoc('coll/doc20', { + key: { a: 1 } + }); + await addDoc('coll/doc21', { key: MaxKey.instance() }); @@ -2499,6 +2646,7 @@ describe('IndexedDbIndexManager', async () => { const q = queryWithAddedOrderBy(query('coll'), orderBy('key', 'desc')); await verifyResults( q, + 'coll/doc21', 'coll/doc20', 'coll/doc19', 'coll/doc18', diff --git a/packages/firestore/test/unit/local/local_store_indexeddb.test.ts b/packages/firestore/test/unit/local/local_store_indexeddb.test.ts index 5f68684d193..176322e5da3 100644 --- a/packages/firestore/test/unit/local/local_store_indexeddb.test.ts +++ b/packages/firestore/test/unit/local/local_store_indexeddb.test.ts @@ -28,7 +28,8 @@ import { Int32Value, RegexValue, MaxKey, - MinKey + MinKey, + Decimal128Value } from '../../../src'; import { User } from '../../../src/auth/user'; import { BundleConverterImpl } from '../../../src/core/bundle_impl'; @@ -1325,6 +1326,188 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { test.assertQueryReturned(query_, 'coll/a'); }); + it('Indexes BSON Decimal128 fields', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { key: new Decimal128Value('-1.2e3') }), + setMutation('coll/b', { key: new Decimal128Value('0') }), + setMutation('coll/c', { key: new Decimal128Value('1.2e3') }) + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b', 'coll/c'); + + query_ = query('coll', filter('key', '==', new Decimal128Value('-1200'))); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/a').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a'); + + query_ = query('coll', filter('key', '!=', new Decimal128Value('0.0'))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/c'); + + query_ = query('coll', filter('key', '>=', new Decimal128Value('-0'))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/b', 'coll/c'); + + // This will fail if the negative 0s are not converted to positive 0 in `writeIndexValueAux` + // function + query_ = query('coll', filter('key', '<=', new Decimal128Value('-0.0'))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query('coll', filter('key', '>', new Decimal128Value('1.2e3'))); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query('coll', filter('key', '<', new Decimal128Value('-1.2e3'))); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query( + 'coll', + filter('key', 'in', [ + new Decimal128Value('-1.2e3'), + new Decimal128Value('0') + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b'); + + query_ = query( + 'coll', + filter('key', 'not-in', [ + new Decimal128Value('-1200'), + new Decimal128Value('0.0') + ]) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c'); + }); + + it('Indexes BSON Decimal128 fields with precision loss', async () => { + const index = fieldIndex('coll', { + id: 1, + fields: [['key', IndexKind.ASCENDING]] + }); + await test.configureFieldsIndexes(index); + await test.writeMutations( + setMutation('coll/a', { + key: new Decimal128Value('-0.1234567890123456789') + }), // will be rounded to -0.12345678901234568 + setMutation('coll/b', { key: new Decimal128Value('0') }), + setMutation('coll/c', { + key: new Decimal128Value('0.1234567890123456789') + }) // will be rounded to 0.12345678901234568 + ); + await test.backfillIndexes(); + + let query_ = query('coll', orderBy('key', 'asc')); + await test.executeQuery(query_); + test.assertOverlaysRead(3, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/b').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/b', 'coll/c'); + + query_ = query( + 'coll', + filter('key', '==', new Decimal128Value('0.1234567890123456789')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c'); + + // Mismatch behavior caused by rounding error. Firestore fetches the doc3 from IndexedDb as + // doc3 rounds to the same number, but, it is not presented on the final query result. + query_ = query( + 'coll', + filter('key', '==', new Decimal128Value('0.12345678901234568')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_); + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't be affected by + // this rounding errors. + query_ = query('coll', filter('key', '!=', new Decimal128Value('0.0'))); + await test.executeQuery(query_); + test.assertOverlaysRead(2, 0, { + [key('coll/a').toString()]: MutationType.Set, + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a', 'coll/c'); + + query_ = query( + 'coll', + filter('key', '>=', new Decimal128Value('1.23e-1')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/c').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/c'); + + query_ = query( + 'coll', + filter('key', '<=', new Decimal128Value('-1.23e-1')) + ); + await test.executeQuery(query_); + test.assertOverlaysRead(1, 0, { + [key('coll/a').toString()]: MutationType.Set + }); + test.assertQueryReturned(query_, 'coll/a'); + + query_ = query('coll', filter('key', '>', new Decimal128Value('1.2e3'))); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + + query_ = query('coll', filter('key', '<', new Decimal128Value('-1.2e3'))); + await test.executeQuery(query_); + test.assertOverlaysRead(0, 0); + test.assertQueryReturned(query_); + }); + it('Indexes BSON Regex fields', async () => { const index = fieldIndex('coll', { id: 1, @@ -1576,30 +1759,32 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { await test.writeMutations( setMutation('coll/a', { key: MinKey.instance() }), setMutation('coll/b', { key: new Int32Value(2) }), - setMutation('coll/c', { key: new Int32Value(1) }), - setMutation('coll/d', { key: new BsonTimestamp(1000, 1001) }), - setMutation('coll/e', { key: new BsonTimestamp(1000, 1000) }), - setMutation('coll/f', { + setMutation('coll/c', { key: new Int32Value(-1) }), + setMutation('coll/d', { key: new Decimal128Value('1.2e3') }), + setMutation('coll/e', { key: new Decimal128Value('-0') }), + setMutation('coll/f', { key: new BsonTimestamp(1000, 1001) }), + setMutation('coll/g', { key: new BsonTimestamp(1000, 1000) }), + setMutation('coll/h', { key: new BsonBinaryData(1, new Uint8Array([1, 2, 4])) }), - setMutation('coll/g', { + setMutation('coll/i', { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }), - setMutation('coll/h', { + setMutation('coll/j', { key: new BsonObjectId('507f191e810c19729de860eb') }), - setMutation('coll/i', { + setMutation('coll/k', { key: new BsonObjectId('507f191e810c19729de860ea') }), - setMutation('coll/j', { key: new RegexValue('^bar', 'm') }), - setMutation('coll/k', { key: new RegexValue('^bar', 'i') }), - setMutation('coll/l', { key: MaxKey.instance() }) + setMutation('coll/l', { key: new RegexValue('^bar', 'm') }), + setMutation('coll/m', { key: new RegexValue('^bar', 'i') }), + setMutation('coll/n', { key: MaxKey.instance() }) ); await test.backfillIndexes(); const query_ = query('coll', orderBy('key', 'desc')); await test.executeQuery(query_); - test.assertOverlaysRead(12, 0, { + test.assertOverlaysRead(14, 0, { [key('coll/a').toString()]: MutationType.Set, [key('coll/b').toString()]: MutationType.Set, [key('coll/c').toString()]: MutationType.Set, @@ -1611,22 +1796,26 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { [key('coll/i').toString()]: MutationType.Set, [key('coll/j').toString()]: MutationType.Set, [key('coll/k').toString()]: MutationType.Set, - [key('coll/l').toString()]: MutationType.Set + [key('coll/l').toString()]: MutationType.Set, + [key('coll/m').toString()]: MutationType.Set, + [key('coll/n').toString()]: MutationType.Set }); test.assertQueryReturned( query_, - 'coll/l', - 'coll/j', - 'coll/k', - 'coll/h', - 'coll/i', - 'coll/f', - 'coll/g', - 'coll/d', - 'coll/e', - 'coll/b', - 'coll/c', - 'coll/a' + 'coll/n', // maxKey + 'coll/l', // regex m + 'coll/m', // regex i + 'coll/j', // objectId eb + 'coll/k', // objectId ea + 'coll/h', // binary [1,2,4] + 'coll/i', // binary [1,2,3] + 'coll/f', // timestamp 1000,1001 + 'coll/g', // timestamp 1000,1000 + 'coll/d', // Number decimal128 1200 + 'coll/b', // Number int32 2 + 'coll/e', // Number decimal128 -0.0 + 'coll/c', // Number int32 -1 + 'coll/a' // minKey ); }); @@ -1645,29 +1834,30 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { setMutation('coll/e', { key: new Int32Value(1) }), setMutation('coll/f', { key: 2.0 }), setMutation('coll/g', { key: 3 }), - setMutation('coll/h', { key: new Timestamp(100, 123456000) }), - setMutation('coll/i', { key: new BsonTimestamp(1, 2) }), - setMutation('coll/j', { key: 'string' }), - setMutation('coll/k', { key: blob(1, 2, 3) }), - setMutation('coll/l', { + setMutation('coll/h', { key: new Decimal128Value('1.2e3') }), + setMutation('coll/i', { key: new Timestamp(100, 123456000) }), + setMutation('coll/j', { key: new BsonTimestamp(1, 2) }), + setMutation('coll/k', { key: 'string' }), + setMutation('coll/l', { key: blob(1, 2, 3) }), + setMutation('coll/m', { key: new BsonBinaryData(1, new Uint8Array([1, 2, 3])) }), - setMutation('coll/m', { key: ref('foo/bar') }), - setMutation('coll/n', { + setMutation('coll/n', { key: ref('foo/bar') }), + setMutation('coll/o', { key: new BsonObjectId('507f191e810c19729de860ea') }), - setMutation('coll/o', { key: new GeoPoint(1, 2) }), - setMutation('coll/p', { key: new RegexValue('^bar', 'm') }), - setMutation('coll/q', { key: [2, 'foo'] }), - setMutation('coll/r', { key: vector([1, 2, 3]) }), - setMutation('coll/s', { key: { bar: 1, foo: 2 } }), - setMutation('coll/t', { key: MaxKey.instance() }) + setMutation('coll/p', { key: new GeoPoint(1, 2) }), + setMutation('coll/q', { key: new RegexValue('^bar', 'm') }), + setMutation('coll/r', { key: [2, 'foo'] }), + setMutation('coll/s', { key: vector([1, 2, 3]) }), + setMutation('coll/t', { key: { bar: 1, foo: 2 } }), + setMutation('coll/u', { key: MaxKey.instance() }) ); await test.backfillIndexes(); const query_ = query('coll', orderBy('key', 'asc')); await test.executeQuery(query_); - test.assertOverlaysRead(20, 0, { + test.assertOverlaysRead(21, 0, { [key('coll/a').toString()]: MutationType.Set, [key('coll/b').toString()]: MutationType.Set, [key('coll/c').toString()]: MutationType.Set, @@ -1687,7 +1877,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { [key('coll/q').toString()]: MutationType.Set, [key('coll/r').toString()]: MutationType.Set, [key('coll/s').toString()]: MutationType.Set, - [key('coll/t').toString()]: MutationType.Set + [key('coll/t').toString()]: MutationType.Set, + [key('coll/u').toString()]: MutationType.Set }); test.assertQueryReturned( query_, @@ -1710,7 +1901,8 @@ describe('LocalStore w/ IndexedDB Persistence (Non generic)', () => { 'coll/q', 'coll/r', 'coll/s', - 'coll/t' + 'coll/t', + 'coll/u' ); }); }); diff --git a/packages/firestore/test/unit/model/document.test.ts b/packages/firestore/test/unit/model/document.test.ts index f67e9d971a0..472176633fc 100644 --- a/packages/firestore/test/unit/model/document.test.ts +++ b/packages/firestore/test/unit/model/document.test.ts @@ -21,6 +21,7 @@ import { BsonBinaryData, BsonObjectId, BsonTimestamp, + Decimal128Value, Int32Value, MaxKey, MinKey, @@ -61,7 +62,8 @@ describe('Document', () => { min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('a', 'b'), - int32: new Int32Value(1) + int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3') }; const document = doc('rooms/Eros', 1, data); @@ -74,7 +76,8 @@ describe('Document', () => { min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('a', 'b'), - int32: new Int32Value(1) + int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3') }) ); expect(value).not.to.equal(data); diff --git a/packages/firestore/test/unit/model/object_value.test.ts b/packages/firestore/test/unit/model/object_value.test.ts index 40b18893e68..b2df554490f 100644 --- a/packages/firestore/test/unit/model/object_value.test.ts +++ b/packages/firestore/test/unit/model/object_value.test.ts @@ -24,7 +24,8 @@ import { RegexValue, Int32Value, MaxKey, - MinKey + MinKey, + Decimal128Value } from '../../../src'; import { vector } from '../../../src/lite-api/field_value_impl'; import { extractFieldMask, ObjectValue } from '../../../src/model/object_value'; @@ -44,7 +45,8 @@ describe('ObjectValue', () => { min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('a', 'b'), - int32: new Int32Value(1) + int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3') } }); @@ -84,6 +86,9 @@ describe('ObjectValue', () => { expect(typeOrder(objValue.field(field('bson.int32'))!)).to.equal( TypeOrder.NumberValue ); + expect(typeOrder(objValue.field(field('bson.decimal128'))!)).to.equal( + TypeOrder.NumberValue + ); expect(objValue.field(field('foo.a.b'))).to.be.null; expect(objValue.field(field('bar'))).to.be.null; @@ -108,7 +113,8 @@ describe('ObjectValue', () => { min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('a', 'b'), - int32: new Int32Value(1) + int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3') }) ); expect(objValue.field(field('bson.objectId'))!).to.deep.equal( @@ -132,6 +138,9 @@ describe('ObjectValue', () => { expect(objValue.field(field('bson.int32'))!).to.deep.equal( wrap(new Int32Value(1)) ); + expect(objValue.field(field('bson.decimal128'))!).to.deep.equal( + wrap(new Decimal128Value('1.2e3')) + ); }); it('can overwrite existing fields', () => { @@ -248,6 +257,7 @@ describe('ObjectValue', () => { objValue.set(field('timestamp'), wrap(new BsonTimestamp(1, 2))); objValue.set(field('regex'), wrap(new RegexValue('a', 'b'))); objValue.set(field('int32'), wrap(new Int32Value(1))); + objValue.set(field('decimal128'), wrap(new Decimal128Value('1.2e3'))); objValue.set(field('min'), wrap(MinKey.instance())); objValue.set(field('max'), wrap(MaxKey.instance())); @@ -257,6 +267,7 @@ describe('ObjectValue', () => { timestamp: new BsonTimestamp(1, 2), regex: new RegexValue('a', 'b'), int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3'), min: MinKey.instance(), max: MaxKey.instance() }); @@ -285,6 +296,7 @@ describe('ObjectValue', () => { timestamp: new BsonTimestamp(1, 2), regex: new RegexValue('a', 'b'), int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3'), min: null, max: MaxKey.instance(), foo: { @@ -306,7 +318,8 @@ describe('ObjectValue', () => { min: MinKey.instance(), max: MaxKey.instance(), regex: new RegexValue('a', 'b'), - int32: new Int32Value(1) + int32: new Int32Value(1), + decimal128: new Decimal128Value('1.2e3') } }); const expectedMask = mask( @@ -322,7 +335,8 @@ describe('ObjectValue', () => { 'bar.min', 'bar.max', 'bar.regex', - 'bar.int32' + 'bar.int32', + 'bar.decimal128' ); const actualMask = extractFieldMask(objValue.value.mapValue); expect(actualMask.isEqual(expectedMask)).to.be.true; diff --git a/packages/firestore/test/unit/model/values.test.ts b/packages/firestore/test/unit/model/values.test.ts index dce8f1e123c..e4d45cb8e95 100644 --- a/packages/firestore/test/unit/model/values.test.ts +++ b/packages/firestore/test/unit/model/values.test.ts @@ -26,7 +26,8 @@ import { RegexValue, Int32Value, MaxKey, - MinKey + MinKey, + Decimal128Value } from '../../../src'; import { DatabaseId } from '../../../src/core/database_info'; import { vector } from '../../../src/lite-api/field_value_impl'; @@ -49,7 +50,8 @@ import { MIN_BSON_BINARY_VALUE, MIN_KEY_VALUE, MIN_REGEX_VALUE, - MIN_BSON_OBJECT_ID_VALUE + MIN_BSON_OBJECT_ID_VALUE, + RESERVED_DECIMAL128_KEY } from '../../../src/model/values'; import * as api from '../../../src/protos/firestore_proto_api'; import { primitiveComparator } from '../../../src/util/misc'; @@ -124,6 +126,7 @@ describe('Values', () => { wrap(new BsonObjectId('123456789012')) ], [wrap(new Int32Value(255)), wrap(new Int32Value(255))], + [wrap(new Decimal128Value('1.2e3')), wrap(new Decimal128Value('1.2e3'))], [wrap(MaxKey.instance()), wrap(MaxKey.instance())] ]; expectEqualitySets(values, (v1, v2) => valueEquals(v1, v2)); @@ -171,32 +174,61 @@ describe('Values', () => { [wrap(true)], // numbers - [wrap(NaN)], - [wrap(-Infinity)], + [wrap(NaN), wrap(new Decimal128Value('NaN'))], + [wrap(-Infinity), wrap(new Decimal128Value('-Infinity'))], [wrap(-Number.MAX_VALUE)], - [wrap(Number.MIN_SAFE_INTEGER - 1)], + [ + wrap(Number.MIN_SAFE_INTEGER - 1), + wrap(new Decimal128Value('-9007199254740992')) + ], [wrap(Number.MIN_SAFE_INTEGER)], - // 64-bit and 32-bit integers order together numerically. - [{ integerValue: -2147483648 }, wrap(new Int32Value(-2147483648))], - [wrap(-1.1)], + // 64-bit,32-bit integers and 128 decimal numbers order together numerically. + [ + { integerValue: -2147483648 }, + wrap(new Int32Value(-2147483648)), + wrap(new Decimal128Value('-2147483648')), + wrap(new Decimal128Value('-2.147483648e9')) + ], + [wrap(-1.5), wrap(new Decimal128Value('-1.5'))], // Integers, Int32Values and Doubles order the same. - [{ integerValue: -1 }, { doubleValue: -1 }, wrap(new Int32Value(-1))], + [ + { integerValue: -1 }, + { doubleValue: -1 }, + wrap(new Int32Value(-1)), + wrap(new Decimal128Value('-1')), + wrap(new Decimal128Value('-1.0')) + ], [wrap(-Number.MIN_VALUE)], // zeros all compare the same. [ { integerValue: 0 }, { doubleValue: 0 }, { doubleValue: -0 }, - wrap(new Int32Value(0)) + wrap(new Int32Value(0)), + wrap(new Decimal128Value('0')), + wrap(new Decimal128Value('0.0')), + wrap(new Decimal128Value('-0')), + wrap(new Decimal128Value('-0.0')), + wrap(new Decimal128Value('+0')), + wrap(new Decimal128Value('+0.0')) ], [wrap(Number.MIN_VALUE)], - [{ integerValue: 1 }, { doubleValue: 1.0 }, wrap(new Int32Value(1))], - [wrap(1.1)], - [wrap(new Int32Value(2))], - [wrap(new Int32Value(2147483647))], + [ + { integerValue: 1 }, + { doubleValue: 1.0 }, + wrap(new Int32Value(1)), + wrap(new Decimal128Value('1')), + wrap(new Decimal128Value('1.0')) + ], + [wrap(1.5), wrap(new Decimal128Value('1.5'))], + [wrap(new Int32Value(2)), wrap(new Decimal128Value('2'))], + [ + wrap(new Int32Value(2147483647)), + wrap(new Decimal128Value('2.147483647e9')) + ], [wrap(Number.MAX_SAFE_INTEGER)], [wrap(Number.MAX_SAFE_INTEGER + 1)], - [wrap(Infinity)], + [wrap(Infinity), wrap(new Decimal128Value('Infinity'))], // timestamps [wrap(date1)], @@ -425,6 +457,13 @@ describe('Values', () => { expectedByteSize: 8, elements: [wrap(new Int32Value(1)), wrap(new Int32Value(2147483647))] }, + { + expectedByteSize: 16, + elements: [ + wrap(new Decimal128Value('1.2e3')), + wrap(new Decimal128Value('1234567890.1234567890123456')) + ] + }, { expectedByteSize: 16, elements: [ @@ -503,9 +542,15 @@ describe('Values', () => { valuesGetLowerBound({ mapValue: { fields: { [RESERVED_INT32_KEY]: { integerValue: 0 } } } }), + valuesGetLowerBound({ + mapValue: { + fields: { [RESERVED_DECIMAL128_KEY]: { stringValue: '0' } } + } + }), wrap(NaN) ], [wrap(Number.NEGATIVE_INFINITY)], + [wrap(0), wrap(new Int32Value(0)), wrap(new Decimal128Value('0.0'))], [wrap(Number.MIN_VALUE)], // dates @@ -622,7 +667,10 @@ describe('Values', () => { [valuesGetUpperBound({ booleanValue: false })], // numbers - [wrap(new Int32Value(2147483647))], //largest int32 value + [ + wrap(new Int32Value(2147483647)), + wrap(new Decimal128Value('2147483647')) + ], //largest int32 value [wrap(Number.MAX_SAFE_INTEGER)], [wrap(Number.POSITIVE_INFINITY)], [valuesGetUpperBound({ doubleValue: NaN })], @@ -727,6 +775,9 @@ describe('Values', () => { '{__request_timestamp__:{increment:2,seconds:1}}' ); expect(canonicalId(wrap(new Int32Value(1)))).to.equal('{__int__:1}'); + expect(canonicalId(wrap(new Decimal128Value('-1.2e3')))).to.equal( + '{__decimal128__:-1.2e3}' + ); expect( canonicalId(wrap(new BsonBinaryData(1, new Uint8Array([1, 2, 3])))) ).to.equal('{__binary__:AQECAw==}'); diff --git a/packages/firestore/test/unit/remote/serializer.helper.ts b/packages/firestore/test/unit/remote/serializer.helper.ts index 24d7b039d0c..7aa0321e418 100644 --- a/packages/firestore/test/unit/remote/serializer.helper.ts +++ b/packages/firestore/test/unit/remote/serializer.helper.ts @@ -24,6 +24,7 @@ import { BsonObjectId, BsonTimestamp, Bytes, + Decimal128Value, DocumentReference, GeoPoint, increment, @@ -580,7 +581,8 @@ export function serializerTest( MinKey.instance(), MaxKey.instance(), new RegexValue('a', 'b'), - new Int32Value(1) + new Int32Value(1), + new Decimal128Value('1.2e3') ]; for (const example of examples) { diff --git a/packages/firestore/tsconfig.json b/packages/firestore/tsconfig.json index d53852fa79a..f25cf0b153d 100644 --- a/packages/firestore/tsconfig.json +++ b/packages/firestore/tsconfig.json @@ -1,7 +1,8 @@ { "extends": "../../config/tsconfig.base.json", "compilerOptions": { - "outDir": "dist" + "outDir": "dist", + "target": "es2020" }, "exclude": ["scripts/**/*", "dist/**/*"] } From 60fd0763fd16e22c183bf8cc8492935f4788a507 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 27 Jun 2025 16:12:52 -0700 Subject: [PATCH 9/9] Fix merge conflicts. --- .../test/integration/api/database.test.ts | 15 ++++++++++++++- .../firestore/test/integration/api/type.test.ts | 3 ++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/packages/firestore/test/integration/api/database.test.ts b/packages/firestore/test/integration/api/database.test.ts index c0502489e0f..c709c520a23 100644 --- a/packages/firestore/test/integration/api/database.test.ts +++ b/packages/firestore/test/integration/api/database.test.ts @@ -3137,7 +3137,8 @@ apiDescribe('Database', persistence => { const NIGHTLY_PROJECT_ID = 'firestore-sdk-nightly'; const settings = { ...DEFAULT_SETTINGS, - host: 'test-firestore.sandbox.googleapis.com' + host: 'test-firestore.sandbox.googleapis.com', + databaseId: '(default)' }; it('can write and read BSON types', async () => { @@ -3379,6 +3380,7 @@ apiDescribe('Database', persistence => { ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3399,6 +3401,7 @@ apiDescribe('Database', persistence => { testDocs['d'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3417,6 +3420,7 @@ apiDescribe('Database', persistence => { testDocs['b'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3435,6 +3439,7 @@ apiDescribe('Database', persistence => { testDocs['f'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3459,6 +3464,7 @@ apiDescribe('Database', persistence => { testDocs['e'] ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3901,6 +3907,7 @@ apiDescribe('Database', persistence => { 'i' // NaN ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3923,6 +3930,7 @@ apiDescribe('Database', persistence => { 'i' ]); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3936,6 +3944,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(orderedQuery); expect(toIds(snapshot)).to.deep.equal(['f', 'e', 'd', 'c']); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3969,6 +3978,7 @@ apiDescribe('Database', persistence => { let snapshot = await getDocs(orderedQuery); expect(toIds(snapshot)).to.deep.equal(['b']); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3981,6 +3991,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(orderedQuery); expect(toIds(snapshot)).to.deep.equal(['a', 'e', 'd', 'c']); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3990,6 +4001,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(orderedQuery); expect(toIds(snapshot)).to.deep.equal(['c']); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) @@ -3999,6 +4011,7 @@ apiDescribe('Database', persistence => { snapshot = await getDocs(orderedQuery); expect(toIds(snapshot)).to.deep.equal(['a', 'e', 'd', 'b']); await assertSDKQueryResultsConsistentWithBackend( + coll, orderedQuery, testDocs, toIds(snapshot) diff --git a/packages/firestore/test/integration/api/type.test.ts b/packages/firestore/test/integration/api/type.test.ts index f47e99b0185..97df0672af9 100644 --- a/packages/firestore/test/integration/api/type.test.ts +++ b/packages/firestore/test/integration/api/type.test.ts @@ -247,7 +247,8 @@ apiDescribe('Firestore', persistence => { const NIGHTLY_PROJECT_ID = 'firestore-sdk-nightly'; const settings = { ...DEFAULT_SETTINGS, - host: 'test-firestore.sandbox.googleapis.com' + host: 'test-firestore.sandbox.googleapis.com', + databaseId: '(default)' }; it('can read and write minKey fields', () => {