From ed5b6324aeede5d673cd6b76620ee8c05ca8b882 Mon Sep 17 00:00:00 2001 From: Razvan Cristian Lung Date: Sun, 24 Jan 2021 23:01:37 +0200 Subject: [PATCH] [cloud_firestore][wip] migrate to present --- .../firestore/auth/credentials_provider.dart | 1 + .../firebase_auth_credentials_provider.dart | 13 +- .../lib/src/firebase/firestore/blob.dart | 1 + .../src/firebase/firestore/core/bound.dart | 53 +- .../firestore/core/component_provider.dart | 93 ++ .../firestore/core/event_manager.dart | 59 +- .../filter/array_contains_any_filter.dart | 20 +- .../core/filter/array_contains_filter.dart | 7 +- .../firestore/core/filter/field_filter.dart | 61 +- .../firestore/core/filter/filter.dart | 7 +- .../core/filter/filter_operator.dart | 25 +- .../firestore/core/filter/in_filter.dart | 10 +- .../core/filter/key_field_filter.dart | 12 +- .../core/filter/key_field_in_filter.dart | 35 +- .../core/filter/key_field_not_in_filter.dart | 17 + .../firestore/core/filter/not_in_filter.dart | 21 + .../firestore/core/firestore_client.dart | 263 ++-- .../firebase/firestore/core/index_range.dart | 12 +- ...ent_sequence.dart => listen_sequence.dart} | 0 .../core/memory_component_provider.dart | 104 ++ .../src/firebase/firestore/core/order_by.dart | 24 +- .../src/firebase/firestore/core/query.dart | 272 ++-- .../firebase/firestore/core/query_stream.dart | 52 +- .../core/sqlite_component_provider.dart | 38 + .../firebase/firestore/core/sync_engine.dart | 419 +++--- .../src/firebase/firestore/core/target.dart | 181 +++ .../firestore/core/target_id_generator.dart | 34 +- .../firebase/firestore/core/transaction.dart | 38 +- .../firestore/core/transaction_runner.dart | 70 + .../firebase/firestore/core/user_data.dart | 104 +- .../lib/src/firebase/firestore/core/view.dart | 109 +- .../firebase/firestore/document_change.dart | 6 +- .../firestore/document_reference.dart | 85 +- .../firebase/firestore/document_snapshot.dart | 77 +- .../lib/src/firebase/firestore/firestore.dart | 85 +- .../firebase/firestore/firestore_error.dart | 7 +- .../firestore/local/default_query_engine.dart | 138 ++ .../local/garbage_collection_scheduler.dart | 10 + .../firestore/local/indexed_query_engine.dart | 82 +- .../firestore/local/local_documents_view.dart | 104 +- .../firestore/local/local_serializer.dart | 139 +- .../firebase/firestore/local/local_store.dart | 444 +++---- .../firestore/local/local_view_changes.dart | 15 +- .../memory_eager_reference_delegate.dart | 17 +- .../memory/memory_lru_reference_delegate.dart | 45 +- .../local/memory/memory_mutation_queue.dart | 81 +- .../local/memory/memory_persistence.dart | 59 +- .../memory/memory_remote_document_cache.dart | 99 +- ...ry_cache.dart => memory_target_cache.dart} | 59 +- .../local/persistance/stats_collector.dart | 25 - .../index_manager.dart | 0 .../local/{ => persistence}/lru_delegate.dart | 6 +- .../lru_garbage_collector.dart | 79 +- .../mutation_queue.dart | 16 +- .../persistence.dart | 10 +- .../reference_delegate.dart | 4 +- .../remote_document_cache.dart | 20 +- .../target_cache.dart} | 44 +- .../target_data.dart} | 65 +- .../firestore/local/query_engine.dart | 18 +- .../firestore/local/query_result.dart | 14 + .../firestore/local/simple_query_engine.dart | 36 - .../local/sqlite/sqlite_collection_index.dart | 9 +- .../sqlite/sqlite_lru_reference_delegate.dart | 84 +- .../local/sqlite/sqlite_mutation_queue.dart | 93 +- .../local/sqlite/sqlite_persistence.dart | 105 +- .../sqlite/sqlite_remote_document_cache.dart | 102 +- .../firestore/local/sqlite/sqlite_schema.dart | 179 ++- ...ry_cache.dart => sqlite_target_cache.dart} | 104 +- .../firebase/firestore/model/database_id.dart | 13 + .../firebase/firestore/model/document.dart | 97 +- .../firestore/model/document_collections.dart | 12 +- .../firestore/model/document_key.dart | 24 +- .../mutation/array_transform_operation.dart | 62 +- .../model/mutation/delete_mutation.dart | 20 +- .../firestore/model/mutation/field_mask.dart | 2 +- .../model/mutation/field_transform.dart | 2 +- .../firestore/model/mutation/mutation.dart | 132 +- .../model/mutation/mutation_batch.dart | 26 +- .../model/mutation/mutation_result.dart | 10 +- ...numeric_increment_transform_operation.dart | 75 +- .../model/mutation/patch_mutation.dart | 62 +- .../mutation/server_timestamp_operation.dart | 16 +- .../model/mutation/set_mutation.dart | 51 +- .../model/mutation/transform_mutation.dart | 224 ---- .../model/mutation/transform_operation.dart | 13 +- .../model/mutation/verify_mutation.dart | 48 + .../firestore/model/object_value.dart | 210 +++ .../firestore/model/server_timestamps.dart | 73 ++ .../firestore/model/value/array_value.dart | 51 - .../firestore/model/value/blob_value.dart | 39 - .../firestore/model/value/bool_value.dart | 45 - .../firestore/model/value/double_value.dart | 70 - .../firestore/model/value/field_value.dart | 93 -- .../model/value/geo_point_value.dart | 39 - .../firestore/model/value/integer_value.dart | 48 - .../firestore/model/value/null_value.dart | 37 - .../firestore/model/value/number_value.dart | 17 - .../firestore/model/value/object_value.dart | 169 --- .../model/value/reference_value.dart | 44 - .../model/value/server_timestamp_value.dart | 72 -- .../firestore/model/value/string_value.dart | 43 - .../model/value/timestamp_value.dart | 44 - .../src/firebase/firestore/model/values.dart | 406 ++++++ .../lib/src/firebase/firestore/query.dart | 4 +- .../firestore/remote/abstract_stream.dart | 327 +++++ .../firebase/firestore/remote/datastore.dart | 219 ++++ .../remote/datastore/base_stream.dart | 282 ----- .../datastore/channel_options_provider.dart | 48 - .../firestore/remote/datastore/datastore.dart | 202 --- .../remote/datastore/firestore_client.dart | 202 --- .../remote/datastore/transaction_client.dart | 42 - .../remote/datastore/watch_stream.dart | 92 -- .../remote/datastore/write_stream.dart | 143 --- ...irebase_client_grpc_metadata_provider.dart | 56 + .../remote/firestore_call_credentials.dart | 3 +- .../firestore/remote/firestore_channel.dart | 194 +++ .../incoming_stream_observer.dart | 4 +- .../remote/online_state_tracker.dart | 20 +- .../firestore/remote/remote_serializer.dart | 749 ++++------- .../firestore/remote/remote_store.dart | 366 +++--- .../src/firebase/firestore/remote/stream.dart | 113 ++ .../firestore/remote/target_change.dart | 9 +- .../remote/watch_change_aggregator.dart | 101 +- .../firestore/remote/watch_stream.dart | 102 ++ .../firestore/remote/write_stream.dart | 157 +++ .../firestore/server_timestamp_behavior.dart | 1 - .../src/firebase/firestore/transaction.dart | 8 +- .../firebase/firestore/util/async_task.dart | 405 ++++++ .../firestore/util/exponential_backoff.dart | 68 +- .../firebase/firestore/util/timer_task.dart | 179 --- .../lib/src/firebase/firestore/util/util.dart | 34 +- .../src/firebase/firestore/write_batch.dart | 8 +- .../src/proto/firebase/firestore/index.dart | 1 - .../firebase/firestore/proto/bundle.pb.dart | 495 ++++++++ .../firestore/proto/bundle.pbenum.dart | 26 + .../firestore/proto/bundle.pbjson.dart | 71 ++ .../proto/firebase/firestore/proto/index.dart | 3 + .../firestore/proto/maybe_document.pb.dart | 106 +- .../proto/maybe_document.pbenum.dart | 4 +- .../proto/maybe_document.pbjson.dart | 4 +- .../firebase/firestore/proto/mutation.pb.dart | 82 +- .../firestore/proto/mutation.pbenum.dart | 4 +- .../firestore/proto/mutation.pbjson.dart | 4 +- .../firebase/firestore/proto/target.pb.dart | 116 +- .../firestore/proto/target.pbenum.dart | 4 +- .../firestore/proto/target.pbjson.dart | 5 +- .../lib/src/proto/firebase/index.dart | 1 - .../src/proto/google/api/annotations.pb.dart | 6 +- .../proto/google/api/annotations.pbenum.dart | 4 +- .../proto/google/api/annotations.pbjson.dart | 4 +- .../lib/src/proto/google/api/http.pb.dart | 136 +- .../lib/src/proto/google/api/http.pbenum.dart | 4 +- .../lib/src/proto/google/api/http.pbjson.dart | 4 +- .../lib/src/proto/google/firestore/index.dart | 1 - .../proto/google/firestore/v1/common.pb.dart | 136 +- .../google/firestore/v1/common.pbenum.dart | 4 +- .../google/firestore/v1/common.pbjson.dart | 4 +- .../google/firestore/v1/document.pb.dart | 178 ++- .../google/firestore/v1/document.pbenum.dart | 4 +- .../google/firestore/v1/document.pbjson.dart | 4 +- .../google/firestore/v1/firestore.pb.dart | 1122 +++++++++++++---- .../google/firestore/v1/firestore.pbenum.dart | 16 +- .../google/firestore/v1/firestore.pbgrpc.dart | 66 +- .../google/firestore/v1/firestore.pbjson.dart | 4 +- .../proto/google/firestore/v1/query.pb.dart | 348 ++++- .../google/firestore/v1/query.pbenum.dart | 48 +- .../google/firestore/v1/query.pbjson.dart | 8 +- .../proto/google/firestore/v1/write.pb.dart | 356 +++++- .../google/firestore/v1/write.pbenum.dart | 10 +- .../google/firestore/v1/write.pbjson.dart | 6 +- .../lib/src/proto/google/index.dart | 1 - .../lib/src/proto/google/protobuf/any.pb.dart | 34 +- .../src/proto/google/protobuf/any.pbenum.dart | 4 +- .../src/proto/google/protobuf/any.pbjson.dart | 4 +- .../src/proto/google/protobuf/empty.pb.dart | 16 +- .../proto/google/protobuf/empty.pbenum.dart | 4 +- .../proto/google/protobuf/empty.pbjson.dart | 4 +- .../src/proto/google/protobuf/struct.pb.dart | 106 +- .../proto/google/protobuf/struct.pbenum.dart | 8 +- .../proto/google/protobuf/struct.pbjson.dart | 4 +- .../proto/google/protobuf/timestamp.pb.dart | 34 +- .../google/protobuf/timestamp.pbenum.dart | 4 +- .../google/protobuf/timestamp.pbjson.dart | 4 +- .../proto/google/protobuf/wrappers.pb.dart | 220 +++- .../google/protobuf/wrappers.pbenum.dart | 4 +- .../google/protobuf/wrappers.pbjson.dart | 4 +- .../lib/src/proto/google/rpc/status.pb.dart | 44 +- .../src/proto/google/rpc/status.pbenum.dart | 4 +- .../src/proto/google/rpc/status.pbjson.dart | 4 +- .../lib/src/proto/google/type/latlng.pb.dart | 34 +- .../src/proto/google/type/latlng.pbenum.dart | 4 +- .../src/proto/google/type/latlng.pbjson.dart | 4 +- .../cloud_firestore_vm/pubspec.lock | 174 ++- .../cloud_firestore_vm/pubspec.yaml | 36 +- .../firebase/firestore/proto/bundle.proto | 120 ++ .../firebase/firestore/proto/target.proto | 4 + .../res/protos/generate_proto.sh | 2 +- .../protos/google/firestore/v1/query.proto | 86 +- .../protos/google/firestore/v1/write.proto | 15 +- .../firebase/firestore/firestore_test.dart | 6 +- .../firestore/remote/stream_test.dart | 18 +- .../core/target_id_generator_test.dart | 2 +- .../local/accumulating_stats_collector.dart | 2 +- .../local/cases/index_manager_test_case.dart | 4 +- .../local/cases/local_store_test_case.dart | 18 +- .../lru_garbage_collector_test_case.dart | 56 +- .../local/cases/mutation_queue_test_case.dart | 4 +- .../local/cases/query_cache_test_case.dart | 43 +- .../remote_document_cache_test_case.dart | 4 +- .../local/local_serializer_test.dart | 8 +- .../local/persistence_test_helpers.dart | 2 +- .../firestore/remote/mock_datastore.dart | 26 +- .../remote/remote_serializer_test.dart | 16 +- .../firestore/remote/remote_store_test.dart | 6 +- .../firestore/spec/spec_test_case.dart | 12 +- .../unit/firebase/firestore/test_util.dart | 2 +- .../util/test_target_metadata_provider.dart | 12 +- .../test/util/integration_test_util.dart | 8 +- .../test/util/test_util.dart | 22 +- 220 files changed, 9536 insertions(+), 6367 deletions(-) create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/component_provider.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_not_in_filter.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/not_in_filter.dart rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/{listent_sequence.dart => listen_sequence.dart} (100%) create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/memory_component_provider.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sqlite_component_provider.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction_runner.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/default_query_engine.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/garbage_collection_scheduler.dart rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/{memory_query_cache.dart => memory_target_cache.dart} (59%) delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/stats_collector.dart rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{persistance => persistence}/index_manager.dart (100%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{ => persistence}/lru_delegate.dart (91%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{ => persistence}/lru_garbage_collector.dart (77%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{persistance => persistence}/mutation_queue.dart (91%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{persistance => persistence}/persistence.dart (96%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{persistance => persistence}/reference_delegate.dart (96%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{persistance => persistence}/remote_document_cache.dart (74%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{persistance/query_cache.dart => persistence/target_cache.dart} (69%) rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/{query_data.dart => persistence/target_data.dart} (54%) create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_result.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/simple_query_engine.dart rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/{sqlite_query_cache.dart => sqlite_target_cache.dart} (78%) delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_mutation.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/verify_mutation.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/object_value.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/server_timestamps.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/array_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/blob_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/bool_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/double_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/field_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/geo_point_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/integer_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/null_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/number_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/object_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/reference_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/server_timestamp_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/string_value.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/timestamp_value.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/values.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/abstract_stream.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/base_stream.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/channel_options_provider.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/datastore.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/firestore_client.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/transaction_client.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/watch_stream.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/write_stream.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firebase_client_grpc_metadata_provider.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_channel.dart rename cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/{util => remote}/incoming_stream_observer.dart (86%) create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/stream.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_stream.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/write_stream.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/async_task.dart delete mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/timer_task.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pb.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbenum.dart create mode 100644 cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbjson.dart create mode 100644 cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/bundle.proto diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/credentials_provider.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/credentials_provider.dart index 6dafea57..8e337202 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/credentials_provider.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/credentials_provider.dart @@ -11,6 +11,7 @@ abstract class CredentialsProvider { /// Requests token for the current user. Use [invalidateToken] to /// force-refresh the token. Returns future that will be completed with the /// current token. + // todo: make this a method Future get token; /// Marks the last retrieved token as invalid, making the next [token] request diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/firebase_auth_credentials_provider.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/firebase_auth_credentials_provider.dart index e5787273..4c2da411 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/firebase_auth_credentials_provider.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/auth/firebase_auth_credentials_provider.dart @@ -7,7 +7,6 @@ import 'dart:async'; import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/credentials_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; import 'package:firebase_core_vm/firebase_core_vm.dart'; import 'package:rxdart/rxdart.dart'; @@ -15,9 +14,8 @@ import 'package:rxdart/rxdart.dart'; /// get an auth token. class FirebaseAuthCredentialsProvider extends CredentialsProvider { FirebaseAuthCredentialsProvider(this.authProvider) - : _onUserChange = BehaviorSubject.seeded(authProvider.uid != null - ? User(authProvider.uid) - : User.unauthenticated); + : _onUserChange = + BehaviorSubject.seeded(authProvider.uid != null ? User(authProvider.uid) : User.unauthenticated); /// Stream that will receive credential changes (sign-in / sign-out, token /// changes). @@ -47,15 +45,14 @@ class FirebaseAuthCredentialsProvider extends CredentialsProvider { // the request is outstanding. final int savedCounter = _tokenCounter; - final GetTokenResult result = - await authProvider.getAccessToken(forceRefresh: doForceRefresh); + final GetTokenResult result = await authProvider.getAccessToken(forceRefresh: doForceRefresh); // Cancel the request since the token changed while the request was // outstanding so the response is potentially for a previous user (which // user, we can't be sure). if (savedCounter != _tokenCounter) { - throw FirestoreError('getToken aborted due to token change', - FirestoreErrorCode.aborted); + Log.d('FirebaseAuthCredentialsProvider', 'getToken aborted due to token change'); + return token; } return result?.token; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/blob.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/blob.dart index 884b82d7..7acbd18f 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/blob.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/blob.dart @@ -8,6 +8,7 @@ import 'dart:typed_data'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:collection/collection.dart'; +/// Immutable class representing an array of bytes in Cloud Firestore. class Blob implements Comparable { Blob(Uint8List bytes) : bytes = Uint8List.fromList(bytes); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/bound.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/bound.dart index 5dda1fd8..c78f8f46 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/bound.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/bound.dart @@ -6,21 +6,21 @@ import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' show Value; import 'package:collection/collection.dart'; import 'order_by.dart'; /// Represents a bound of a query. /// -/// The bound is specified with the given components representing a position and -/// whether it's just before or just after the position (relative to whatever -/// the query order is). The position represents a logical index position for a -/// query. It's a prefix of values for the (potentially implicit) order by -/// clauses of a query. Bound provides a function to determine whether a -/// document comes before or after a bound. This is influenced by whether the -/// position is just before or just after the provided values. +/// The bound is specified with the given components representing a position and whether it's just before or just after +/// the position (relative to whatever the query order is). +/// +/// The position represents a logical index position for a query. It's a prefix of values for the (potentially implicit) +/// order by clauses of a query. Bound provides a function to determine whether a document comes before or after a +/// bound. This is influenced by whether the position is just before or just after the provided values. class Bound { const Bound({this.position, this.before}); @@ -28,7 +28,7 @@ class Bound { final bool before; /// The index position of this bound - final List position; + final List position; String canonicalString() { // TODO(long1eu): Make this collision robust. @@ -38,31 +38,33 @@ class Bound { } else { builder.write('a:'); } - position.forEach(builder.write); + bool first = true; + for (Value indexComponent in position) { + if (!first) { + builder.write(','); + } + first = false; + builder.write(canonicalId(indexComponent)); + } return builder.toString(); } /// Returns true if a document sorts before a bound using the provided sort /// order. bool sortsBeforeDocument(List orderBy, Document document) { - hardAssert(position.length <= orderBy.length, - 'Bound has more components than query\'s orderBy'); + hardAssert(position.length <= orderBy.length, 'Bound has more components than query\'s orderBy'); int comparison = 0; for (int i = 0; i < position.length; i++) { final OrderBy orderByComponent = orderBy[i]; - final FieldValue component = position[i]; + final Value component = position[i]; if (orderByComponent.field == FieldPath.keyPath) { - final Object refValue = component.value; - hardAssert(refValue is DocumentKey, - 'Bound has a non-key value where the key path is being used $component'); - - final DocumentKey documentKey = refValue; - comparison = documentKey.compareTo(document.key); + hardAssert( + isReferenceValue(component), 'Bound has a non-key value where the key path is being used $component'); + comparison = DocumentKey.fromName(component.referenceValue).compareTo(document.key); } else { - final FieldValue docValue = document.getField(orderByComponent.field); - hardAssert(docValue != null, - 'Field should exist since document matched the orderBy already.'); - comparison = component.compareTo(docValue); + final Value docValue = document.getField(orderByComponent.field); + hardAssert(docValue != null, 'Field should exist since document matched the orderBy already.'); + comparison = compare(component, docValue); } if (orderByComponent.direction == OrderByDirection.descending) { @@ -83,11 +85,10 @@ class Bound { other is Bound && runtimeType == other.runtimeType && before == other.before && - const ListEquality().equals(position, other.position); + const ListEquality().equals(position, other.position); @override - int get hashCode => - before.hashCode ^ const ListEquality().hash(position); + int get hashCode => before.hashCode ^ const ListEquality().hash(position); @override String toString() { diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/component_provider.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/component_provider.dart new file mode 100644 index 00000000..3adb3309 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/component_provider.dart @@ -0,0 +1,93 @@ +// File created by +// Lung Razvan +// on 16/01/2021 + +import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/database_info.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/event_manager.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/sync_engine.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_settings.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/garbage_collection_scheduler.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/database.dart'; +import 'package:rxdart/rxdart.dart'; + +// ignore_for_file: close_sinks + +/// Initializes and wires up all core components for Firestore. +/// +/// Implementations provide custom components by overriding the `createX()` methods. +abstract class ComponentProvider { + Persistence _persistence; + LocalStore _localStore; + SyncEngine _syncEngine; + RemoteStore _remoteStore; + EventManager _eventManager; + BehaviorSubject _onNetworkConnected; + GarbageCollectionScheduler _gargabeCollectionScheduler; + + Persistence get persistence => _persistence; + + LocalStore get localStore => _localStore; + + SyncEngine get syncEngine => _syncEngine; + + RemoteStore get remoteStore => _remoteStore; + + EventManager get eventManager => _eventManager; + + BehaviorSubject get onNetworkConnected => _onNetworkConnected; + + GarbageCollectionScheduler get gargabeCollectionScheduler => _gargabeCollectionScheduler; + + Future initialize(ComponentProviderConfiguration configuration) async { + _persistence = await createPersistence(configuration); + await persistence.start(); + _localStore = createLocalStore(configuration); + _onNetworkConnected = configuration.onNetworkConnected; + _remoteStore = createRemoteStore(configuration); + _syncEngine = createSyncEngine(configuration); + _eventManager = createEventManager(configuration); + await localStore.start(); + await remoteStore.start(); + _gargabeCollectionScheduler = createGarbageCollectionScheduler(configuration); + } + + GarbageCollectionScheduler createGarbageCollectionScheduler(ComponentProviderConfiguration configuration); + + EventManager createEventManager(ComponentProviderConfiguration configuration); + + LocalStore createLocalStore(ComponentProviderConfiguration configuration); + + Future createPersistence(ComponentProviderConfiguration configuration); + + RemoteStore createRemoteStore(ComponentProviderConfiguration configuration); + + SyncEngine createSyncEngine(ComponentProviderConfiguration configuration); +} + +class ComponentProviderConfiguration { + ComponentProviderConfiguration({ + this.asyncQueue, + this.databaseInfo, + this.datastore, + this.initialUser, + this.maxConcurrentLimboResolutions, + this.settings, + this.onNetworkConnected, + this.openDatabase, + }); + + final AsyncQueue asyncQueue; + final DatabaseInfo databaseInfo; + final Datastore datastore; + final User initialUser; + final int maxConcurrentLimboResolutions; + final FirestoreSettings settings; + final BehaviorSubject onNetworkConnected; + final OpenDatabase openDatabase; +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/event_manager.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/event_manager.dart index 9b56ef3e..2297165b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/event_manager.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/event_manager.dart @@ -9,22 +9,32 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query_stream.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/sync_engine.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/view_snapshot.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:grpc/grpc.dart'; +import 'package:rxdart/rxdart.dart'; /// EventManager is responsible for mapping queries to query event listeners. /// It handles 'fan-out.' (Identical queries will re-use the same watch on the /// backend.) class EventManager implements SyncEngineCallback { - EventManager(this._syncEngine) : _queries = {} { + EventManager(this._syncEngine) + : _queries = {}, + _controller = BehaviorSubject.seeded(null) { _syncEngine.syncEngineListener = this; } final SyncEngine _syncEngine; final Map _queries; + // We user BehaviorSubject because it emits the last value received + final BehaviorSubject _controller; + OnlineState _onlineState = OnlineState.unknown; + /// Global snapshots stream + Stream get snapshotsInSync => _controller; + /// Adds a query listener that will be called with new snapshots for the /// query. The [EventManager] is responsible for multiplexing many listeners /// to a single listen in the [SyncEngine] and will perform a listen if it's @@ -44,10 +54,15 @@ class EventManager implements SyncEngineCallback { queryInfo.listeners.add(queryListener); - queryListener.onOnlineStateChanged(_onlineState); + // Run global snapshot listeners if a consistent snapshot has been emitted. + bool raisedEvent = queryListener.onOnlineStateChanged(_onlineState); + hardAssert(!raisedEvent, "onOnlineStateChanged() shouldn't raise an event for brand-new listeners."); if (queryInfo.viewSnapshot != null) { - await queryListener.onViewSnapshot(queryInfo.viewSnapshot); + raisedEvent = queryListener.onViewSnapshot(queryInfo.viewSnapshot); + if (raisedEvent) { + _controller.add(null); + } } if (firstListen) { @@ -56,15 +71,13 @@ class EventManager implements SyncEngineCallback { return queryInfo.targetId; } - /// Removes a previously added listener and returns true if the listener was - /// found. - Future removeQueryListener(QueryStream listener) async { + /// It's a no-op if the listener is not found. + Future removeQueryListener(QueryStream listener) async { final Query query = listener.query; final _QueryListenersInfo queryInfo = _queries[query]; bool lastListen = false; - bool found = false; if (queryInfo != null) { - found = queryInfo.listeners.remove(listener); + queryInfo.listeners.remove(listener); lastListen = queryInfo.listeners.isEmpty; } @@ -72,22 +85,26 @@ class EventManager implements SyncEngineCallback { _queries.remove(query); await _syncEngine.stopListening(query); } - - return found; } @override Future onViewSnapshots(List snapshotList) async { + bool raisedEvent = false; for (ViewSnapshot viewSnapshot in snapshotList) { final Query query = viewSnapshot.query; final _QueryListenersInfo info = _queries[query]; if (info != null) { for (QueryStream listener in info.listeners.toList()) { - await listener.onViewSnapshot(viewSnapshot); + if (listener.onViewSnapshot(viewSnapshot)) { + raisedEvent = true; + } } info.viewSnapshot = viewSnapshot; } } + if (raisedEvent) { + _controller.add(null); + } } @override @@ -103,12 +120,18 @@ class EventManager implements SyncEngineCallback { @override void handleOnlineStateChange(OnlineState onlineState) { + bool raisedEvent = false; _onlineState = onlineState; for (_QueryListenersInfo info in _queries.values) { for (QueryStream listener in info.listeners.toList()) { - listener.onOnlineStateChanged(onlineState); + if (listener.onOnlineStateChanged(onlineState)) { + raisedEvent = true; + } } } + if (raisedEvent) { + _controller.add(null); + } } } @@ -126,7 +149,8 @@ class ListenOptions { this.includeDocumentMetadataChanges = false, this.includeQueryMetadataChanges = false, this.waitForSyncWhenOnline = false, - }) : assert(includeDocumentMetadataChanges != null), + }) + : assert(includeDocumentMetadataChanges != null), assert(includeQueryMetadataChanges != null), assert(waitForSyncWhenOnline != null); @@ -151,12 +175,9 @@ class ListenOptions { bool waitForSyncWhenOnline, }) { return ListenOptions( - includeDocumentMetadataChanges: - includeDocumentMetadataChanges ?? this.includeDocumentMetadataChanges, - includeQueryMetadataChanges: - includeQueryMetadataChanges ?? this.includeQueryMetadataChanges, - waitForSyncWhenOnline: - waitForSyncWhenOnline ?? this.waitForSyncWhenOnline, + includeDocumentMetadataChanges: includeDocumentMetadataChanges ?? this.includeDocumentMetadataChanges, + includeQueryMetadataChanges: includeQueryMetadataChanges ?? this.includeQueryMetadataChanges, + waitForSyncWhenOnline: waitForSyncWhenOnline ?? this.waitForSyncWhenOnline, ); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_any_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_any_filter.dart index 123b1482..5f96194e 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_any_filter.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_any_filter.dart @@ -6,21 +6,21 @@ part of filter; /// A Filter that implements the array-contains-any operator. class ArrayContainsAnyFilter extends FieldFilter { - ArrayContainsAnyFilter(FieldPath field, FieldValue value) - : super._(field, FilterOperator.arrayContainsAny, value); + ArrayContainsAnyFilter(FieldPath field, Value value) : super._(field, FilterOperator.arrayContainsAny, value) { + hardAssert(isArray(value), 'ArrayContainsAnyFilter expects an ArrayValue'); + } @override bool matches(Document doc) { - final ArrayValue arrayValue = value; - final FieldValue other = doc.getField(field); - if (other is ArrayValue) { - for (FieldValue val in other.internalValue) { - if (arrayValue.internalValue.contains(val)) { - return true; - } + final Value other = doc.getField(field); + if (!isArray(other)) { + return false; + } + for (Value val in other.arrayValue.values) { + if (contains(value.arrayValue, val)) { + return true; } } - return false; } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_filter.dart index d7ccb812..5e97116c 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_filter.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/array_contains_filter.dart @@ -6,12 +6,11 @@ part of filter; /// A Filter that implements the array-contains operator. class ArrayContainsFilter extends FieldFilter { - ArrayContainsFilter(FieldPath field, FieldValue value) - : super._(field, FilterOperator.arrayContains, value); + ArrayContainsFilter(FieldPath field, Value value) : super._(field, FilterOperator.arrayContains, value); @override bool matches(Document doc) { - final FieldValue other = doc.getField(field); - return other is ArrayValue && other.internalValue.contains(value); + final Value other = doc.getField(field); + return isArray(other) && contains(other.arrayValue, value); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/field_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/field_filter.dart index a1bdd516..f237254f 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/field_filter.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/field_filter.dart @@ -8,43 +8,27 @@ part of filter; class FieldFilter extends Filter { /// Note that if the relation operator is EQUAL and the value is null or NaN, this will return /// the appropriate NullFilter or NaNFilter class instead of a FieldFilter. - factory FieldFilter( - FieldPath path, FilterOperator operator, FieldValue value) { + factory FieldFilter(FieldPath path, FilterOperator operator, Value value) { if (path.isKeyField) { if (operator == FilterOperator.IN) { - hardAssert(value is ArrayValue, - 'Comparing on key with IN, but an array value was not a RefValue'); return KeyFieldInFilter(path, value); + } else if (operator == FilterOperator.notIn) { + return KeyFieldNotInFilter(path, value); } else { - hardAssert(value is ReferenceValue, - 'Comparing on key, but filter value not a ReferenceValue'); hardAssert( - operator != FilterOperator.arrayContains && - operator != FilterOperator.arrayContainsAny, - '$operator queries don\'t make sense on document keys'); + operator != FilterOperator.arrayContains && operator != FilterOperator.arrayContainsAny, + "$operator queries don't make sense on document keys", + ); return KeyFieldFilter(path, operator, value); } - } else if (value == NullValue.nullValue()) { - if (operator != FilterOperator.equal) { - throw ArgumentError( - 'Invalid Query. Null supports only equality comparisons (via whereEqualTo()).'); - } - return FieldFilter._(path, operator, value); - } else if (value == DoubleValue.nan) { - if (operator != FilterOperator.equal) { - throw ArgumentError( - 'Invalid Query. NaN supports only equality comparisons (via whereEqualTo()).'); - } - return FieldFilter._(path, operator, value); } else if (operator == FilterOperator.arrayContains) { return ArrayContainsFilter(path, value); } else if (operator == FilterOperator.IN) { - hardAssert(value is ArrayValue, 'IN filter has invalid value: $value'); return InFilter(path, value); } else if (operator == FilterOperator.arrayContainsAny) { - hardAssert(value is ArrayValue, - 'ARRAY_CONTAINS_ANY filter has invalid value: $value'); return ArrayContainsAnyFilter(path, value); + } else if (operator == FilterOperator.notIn) { + return NotInFilter(path, value); } else { return FieldFilter._(path, operator, value); } @@ -55,17 +39,19 @@ class FieldFilter extends Filter { const FieldFilter._(this.field, this.operator, this.value) : super._(); final FilterOperator operator; - final FieldValue value; + final Value value; @override final FieldPath field; @override bool matches(Document doc) { - final FieldValue other = doc.getField(field); + final Value other = doc.getField(field); + // Types do not have to match in NOT_EQUAL filters. + if (operator == FilterOperator.notEqual) { + return other != null && _matchesComparison(compare(other, value)); + } // Only compare types with matching backend order (such as double and int). - return other != null && - value.typeOrder == other.typeOrder && - _matchesComparison(other.compareTo(value)); + return other != null && typeOrder(other) == typeOrder(value) && _matchesComparison(compare(other, value)); } bool _matchesComparison(int comp) { @@ -76,6 +62,8 @@ class FieldFilter extends Filter { return comp <= 0; case FilterOperator.equal: return comp == 0; + case FilterOperator.notEqual: + return comp != 0; case FilterOperator.graterThan: return comp > 0; case FilterOperator.graterThanOrEqual: @@ -85,14 +73,17 @@ class FieldFilter extends Filter { } } - bool get isInequality => - FilterOperator.inequalityOperators.contains(operator); + bool get isInequality { + return FilterOperator.inequalityOperators.contains(operator); + } - // TODO(long1eu): Technically, this won't be unique if two values have the - // same description, such as the int 3 and the string '3'. So we should add - // the types in here somehow, too. @override - String get canonicalId => '${field.canonicalString} $operator $value'; + String get canonicalId { + // TODO(long1eu): Technically, this won't be unique if two values have the + // same description, such as the int 3 and the string '3'. So we should add + // the types in here somehow, too. + return '${field.canonicalString} $operator ${values.canonicalId(value)}'; + } @override String toString() => canonicalId; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter.dart index 385bd331..2c1ed943 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter.dart @@ -5,9 +5,12 @@ library filter; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart' as values; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' hide Document; part 'array_contains_any_filter.dart'; part 'array_contains_filter.dart'; @@ -16,6 +19,8 @@ part 'filter_operator.dart'; part 'in_filter.dart'; part 'key_field_filter.dart'; part 'key_field_in_filter.dart'; +part 'key_field_not_in_filter.dart'; +part 'not_in_filter.dart'; /// Interface used for all query filters. abstract class Filter { diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter_operator.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter_operator.dart index f0b17475..c6dbdcf8 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter_operator.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/filter_operator.dart @@ -4,6 +4,7 @@ part of filter; +// ignore_for_file: constant_identifier_names class FilterOperator { const FilterOperator._(this._value); @@ -12,31 +13,25 @@ class FilterOperator { static const FilterOperator lessThan = FilterOperator._('<'); static const FilterOperator lessThanOrEqual = FilterOperator._('<='); static const FilterOperator equal = FilterOperator._('=='); + static const FilterOperator notEqual = FilterOperator._('!='); static const FilterOperator graterThan = FilterOperator._('>'); static const FilterOperator graterThanOrEqual = FilterOperator._('>='); - static const FilterOperator arrayContains = - FilterOperator._('array_contains'); - static const FilterOperator arrayContainsAny = - FilterOperator._('array_contains_any'); - - // ignore: constant_identifier_names + static const FilterOperator arrayContains = FilterOperator._('array_contains'); + static const FilterOperator arrayContainsAny = FilterOperator._('array_contains_any'); static const FilterOperator IN = FilterOperator._('in'); + static const FilterOperator notIn = FilterOperator._('not_in'); - static const List arrayOperators = [ - arrayContains, - arrayContainsAny - ]; + static const List arrayOperators = [arrayContains, arrayContainsAny]; - static const List disjunctiveOperators = [ - arrayContainsAny, - IN - ]; + static const List disjunctiveOperators = [arrayContainsAny, IN]; static const List inequalityOperators = [ lessThan, lessThanOrEqual, graterThan, - graterThanOrEqual + graterThanOrEqual, + notEqual, + notIn ]; @override diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/in_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/in_filter.dart index 4a1de9b2..1706cc93 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/in_filter.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/in_filter.dart @@ -6,13 +6,13 @@ part of filter; /// A Filter that implements the IN operator. class InFilter extends FieldFilter { - InFilter(FieldPath field, FieldValue value) - : super._(field, FilterOperator.IN, value); + InFilter(FieldPath field, Value value) : super._(field, FilterOperator.IN, value) { + hardAssert(isArray(value), 'InFilter expects an ArrayValue'); + } @override bool matches(Document doc) { - final ArrayValue arrayValue = value; - final FieldValue other = doc.getField(field); - return other != null && arrayValue.internalValue.contains(other); + final Value other = doc.getField(field); + return other != null && contains(value.arrayValue, other); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_filter.dart index 44b706c7..dd8609bc 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_filter.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_filter.dart @@ -6,13 +6,17 @@ part of filter; /// Filter that matches on key fields (i.e. '__name__'). class KeyFieldFilter extends FieldFilter { - KeyFieldFilter(FieldPath field, FilterOperator operator, FieldValue value) - : super._(field, operator, value); + KeyFieldFilter(FieldPath field, FilterOperator operator, Value value) + : _key = DocumentKey.fromName(value.referenceValue), + super._(field, operator, value) { + hardAssert(isReferenceValue(value), 'KeyFieldFilter expects a ReferenceValue'); + } + + final DocumentKey _key; @override bool matches(Document doc) { - final ReferenceValue referenceValue = value; - final int comparator = doc.key.compareTo(referenceValue.value); + final int comparator = doc.key.compareTo(_key); return _matchesComparison(comparator); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_in_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_in_filter.dart index 2af605b0..950268f5 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_in_filter.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_in_filter.dart @@ -6,24 +6,27 @@ part of filter; /// A Filter that implements the array-contains-any operator. class KeyFieldInFilter extends FieldFilter { - KeyFieldInFilter(FieldPath field, FieldValue value) - : super._(field, FilterOperator.IN, value) { - final ArrayValue arrayValue = value; - for (FieldValue refValue in arrayValue.internalValue) { - hardAssert(refValue is ReferenceValue, - 'Comparing on key with IN, but an array value was not a ReferenceValue'); - } - } + KeyFieldInFilter(FieldPath field, Value value) + : _keys = extractDocumentKeysFromArrayValue(FilterOperator.IN, value), + super._(field, FilterOperator.IN, value); + + final List _keys; @override - bool matches(Document doc) { - final ArrayValue arrayValue = value; - for (FieldValue refValue in arrayValue.internalValue) { - if (doc.key == refValue.value) { - return true; - } - } + bool matches(Document doc) => _keys.contains(doc.key); - return false; + static List extractDocumentKeysFromArrayValue(FilterOperator operator, Value value) { + hardAssert(operator == FilterOperator.IN || operator == FilterOperator.notIn, + 'extractDocumentKeysFromArrayValue requires IN or NOT_IN operators'); + hardAssert(isArray(value), 'KeyFieldInFilter/KeyFieldNotInFilter expects an ArrayValue'); + final List keys = []; + for (Value element in value.arrayValue.values) { + hardAssert( + isReferenceValue(element), + 'Comparing on key with $operator, but an array value was not a ReferenceValue', + ); + keys.add(DocumentKey.fromName(element.referenceValue)); + } + return keys; } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_not_in_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_not_in_filter.dart new file mode 100644 index 00000000..a57c42ea --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/key_field_not_in_filter.dart @@ -0,0 +1,17 @@ +// File created by +// Lung Razvan +// on 16/03/2020 + +part of filter; + +/// A Filter that implements the array-contains-any operator. +class KeyFieldNotInFilter extends FieldFilter { + KeyFieldNotInFilter(FieldPath field, Value value) + : _keys = KeyFieldInFilter.extractDocumentKeysFromArrayValue(FilterOperator.notIn, value), + super._(field, FilterOperator.notIn, value); + + final List _keys; + + @override + bool matches(Document doc) => !_keys.contains(doc.key); +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/not_in_filter.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/not_in_filter.dart new file mode 100644 index 00000000..06df3ed5 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/filter/not_in_filter.dart @@ -0,0 +1,21 @@ +// File created by +// Lung Razvan +// on 16/03/2020 + +part of filter; + +/// A Filter that implements the IN operator. +class NotInFilter extends FieldFilter { + NotInFilter(FieldPath field, Value value) : super._(field, FilterOperator.notIn, value) { + hardAssert(isArray(value), 'NotInFilter expects an ArrayValue'); + } + + @override + bool matches(Document doc) { + if (contains(value.arrayValue, NULL_VALUE)) { + return false; + } + final Value other = doc.getField(field); + return other != null && !contains(value.arrayValue, other); + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/firestore_client.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/firestore_client.dart index 51e20940..443ef41a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/firestore_client.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/firestore_client.dart @@ -4,61 +4,58 @@ import 'dart:async'; -import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/credentials_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/component_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/database_info.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/event_manager.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/online_state.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/memory_component_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query_stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/sqlite_component_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/sync_engine.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/transaction.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/view.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/view_snapshot.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_settings.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_serializer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/garbage_collection_scheduler.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_garbage_collector.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/memory/memory_persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/sqlite/sqlite_persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/datastore.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_event.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firebase_client_grpc_metadata_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/database.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; -import 'package:grpc/grpc.dart'; import 'package:rxdart/rxdart.dart'; /// [FirestoreClient] is a top-level class that constructs and owns all of the pieces of the client SDK architecture. -class FirestoreClient implements RemoteStoreCallback { +class FirestoreClient { FirestoreClient._(this.databaseInfo, this.credentialsProvider); static const String logTag = 'FirestoreClient'; + static const int _kMaxConcurrentLimboResolutions = 100; final DatabaseInfo databaseInfo; final CredentialsProvider credentialsProvider; + AsyncQueue asyncQueue; StreamSubscription onCredentialChangeSubscription; Persistence persistence; LocalStore localStore; RemoteStore remoteStore; SyncEngine syncEngine; EventManager eventManager; - bool _isShutdown = false; - LruGarbageCollectorScheduler _lruScheduler; + GarbageCollectionScheduler _gcScheduler; static Future initialize( DatabaseInfo databaseInfo, @@ -66,16 +63,27 @@ class FirestoreClient implements RemoteStoreCallback { CredentialsProvider credentialsProvider, OpenDatabase openDatabase, BehaviorSubject onNetworkConnected, - TaskScheduler scheduler, + AsyncQueue scheduler, + GrpcMetadataProvider metadataProvider, ) async { - final FirestoreClient client = - FirestoreClient._(databaseInfo, credentialsProvider); + final FirestoreClient client = FirestoreClient._(databaseInfo, credentialsProvider); final Completer firstUser = Completer(); bool initialized = false; - client.onCredentialChangeSubscription = - credentialsProvider.onChange.listen((User user) { + await scheduler.enqueue(() async { + final User user = await firstUser.future; + await client._initialize( + user, + settings, + openDatabase, + onNetworkConnected, + scheduler, + metadataProvider, + ); + }); + + client.onCredentialChangeSubscription = credentialsProvider.onChange.listen((User user) { if (initialized == false) { initialized = true; hardAssert(!firstUser.isCompleted, 'Already fulfilled first user task'); @@ -86,64 +94,64 @@ class FirestoreClient implements RemoteStoreCallback { } }); - final User user = await firstUser.future; - await client._initialize( - user, - // TODO(long1eu): Make sure you remove the openDatabase != null once we - // provide a default way to instantiate a db instance - settings.persistenceEnabled && openDatabase != null, - settings.cacheSizeBytes, - openDatabase, - onNetworkConnected, - scheduler, - ); return client; } Future disableNetwork() { - _verifyNotShutdown(); - return remoteStore.disableNetwork(); + _verifyNotTerminated(); + return asyncQueue.enqueue(() => remoteStore.disableNetwork()); } Future enableNetwork() { - _verifyNotShutdown(); - return remoteStore.enableNetwork(); + _verifyNotTerminated(); + return asyncQueue.enqueue(() => remoteStore.enableNetwork()); } /// Shuts down this client, cancels all writes / listeners, and releases all resources. - Future shutdown() async { - if (_isShutdown) { + Future terminate() async { + if (isTerminated) { return; } await onCredentialChangeSubscription.cancel(); - await remoteStore.shutdown(); - await persistence.shutdown(); - _lruScheduler?.stop(); - _isShutdown = true; + + await asyncQueue.enqueueAndInitiateShutdown(() async { + await remoteStore.shutdown(); + await persistence.shutdown(); + _gcScheduler?.stop(); + }); + } + + /// Returns true if this client has been terminated. + bool get isTerminated { + // Technically, the asyncQueue is still running, but only accepting tasks related to shutdown + // or supposed to be run after shutdown. It is effectively shut down to the eyes of users. + return asyncQueue.isShuttingDown; } /// Starts listening to a query. */ Future listen(Query query, ListenOptions options) async { - _verifyNotShutdown(); + _verifyNotTerminated(); - final QueryStream queryListener = - QueryStream(query, options, stopListening); - await eventManager.addQueryListener(queryListener); + final QueryStream queryListener = QueryStream(query, options, stopListening); + asyncQueue.enqueueAndForget(() => eventManager.addQueryListener(queryListener)); return queryListener; } /// Stops listening to a query previously listened to. - Future stopListening(QueryStream listener) { - _verifyNotShutdown(); - - return eventManager.removeQueryListener(listener); + void stopListening(QueryStream listener) { + // Checks for terminate but does not raise error, allowing it to be a no-op if client is already + // terminated. + if (isTerminated) { + return; + } + asyncQueue.enqueueAndForget(() => eventManager.removeQueryListener(listener)); } Future getDocumentFromLocalCache(DocumentKey docKey) async { - _verifyNotShutdown(); + _verifyNotTerminated(); - final MaybeDocument maybeDoc = await localStore.readDocument(docKey); + final MaybeDocument maybeDoc = await asyncQueue.enqueue(() => localStore.readDocument(docKey)); if (maybeDoc is Document) { return maybeDoc; @@ -159,125 +167,90 @@ class FirestoreClient implements RemoteStoreCallback { } Future getDocumentsFromLocalCache(Query query) async { - _verifyNotShutdown(); - - final ImmutableSortedMap docs = - await localStore.executeQuery(query); - - final View view = View(query, ImmutableSortedSet()); - final ViewDocumentChanges viewDocChanges = view.computeDocChanges(docs); - return view.applyChanges(viewDocChanges).snapshot; + _verifyNotTerminated(); + return asyncQueue.enqueue(() async { + final QueryResult queryResult = await localStore.executeQuery(query, /* usePreviousResults= */ true); + final View view = View(query, queryResult.remoteKeys); + final ViewDocumentChanges viewDocChanges = view.computeDocChanges(queryResult.documents); + return view.applyChanges(viewDocChanges).snapshot; + }); } /// Writes mutations. The returned Future will be notified when it's written to the backend. Future write(final List mutations) async { - _verifyNotShutdown(); + _verifyNotTerminated(); final Completer source = Completer(); - await syncEngine.writeMutations(mutations, source); + asyncQueue.enqueueAndForget(() => syncEngine.writeMutations(mutations, source)); await source.future; } - /// Tries to execute the transaction in updateFunction up to retries times. - Future transaction( - Future Function(Transaction) updateFunction, int retries) { - _verifyNotShutdown(); + /// Tries to execute the transaction in transaction. + Future transaction(Future Function(Transaction) updateFunction) { + _verifyNotTerminated(); + return asyncQueue.enqueue(() => syncEngine.transaction(asyncQueue, updateFunction)); + } - return syncEngine.transaction(updateFunction, retries); + /// Returns a task resolves when all the pending writes at the time when this method is called + /// received server acknowledgement. An acknowledgement can be either acceptance or rejections. + Future waitForPendingWrites() { + _verifyNotTerminated(); + + final Completer source = Completer(); + asyncQueue.enqueueAndForget(() => syncEngine.registerPendingWritesTask(source)); + return source.future; } Future _initialize( User user, - bool usePersistence, - int cacheSizeBytes, + FirestoreSettings settings, OpenDatabase openDatabase, BehaviorSubject onNetworkConnected, - TaskScheduler scheduler, + AsyncQueue asyncQueue, + GrpcMetadataProvider metadataProvider, ) async { // Note: The initialization work must all be synchronous (we can't dispatch more work) since external write/listen // operations could get queued to run before that subsequent work completes. Log.d(logTag, 'Initializing. user=${user.uid}'); + final Datastore datastore = Datastore( + databaseInfo: databaseInfo, + workerQueue: asyncQueue, + credentialsProvider: credentialsProvider, + metadataProvider: metadataProvider, + ); - LruGarbageCollector gc; - if (usePersistence) { - final LocalSerializer serializer = - LocalSerializer(RemoteSerializer(databaseInfo.databaseId)); - final LruGarbageCollectorParams params = - LruGarbageCollectorParams.withCacheSizeBytes(cacheSizeBytes); - - final SQLitePersistence persistence = await SQLitePersistence.create( - databaseInfo.persistenceKey, - databaseInfo.databaseId, - serializer, - openDatabase, - params); - - final SQLiteLruReferenceDelegate lruDelegate = - persistence.referenceDelegate; - gc = lruDelegate.garbageCollector; - this.persistence = persistence; - } else { - persistence = MemoryPersistence.createEagerGcMemoryPersistence(); - } - - await persistence.start(); - localStore = LocalStore(persistence, user); - if (gc != null) { - _lruScheduler = gc.newScheduler(scheduler, localStore) // - ..start(); - } - - final Datastore datastore = - Datastore(scheduler, databaseInfo, credentialsProvider); - remoteStore = - RemoteStore(this, localStore, datastore, onNetworkConnected, scheduler); + final ComponentProviderConfiguration configuration = ComponentProviderConfiguration( + asyncQueue: asyncQueue, + databaseInfo: databaseInfo, + datastore: datastore, + initialUser: user, + maxConcurrentLimboResolutions: _kMaxConcurrentLimboResolutions, + settings: settings, + onNetworkConnected: onNetworkConnected, + openDatabase: openDatabase, + ); - syncEngine = SyncEngine(localStore, remoteStore, user); - eventManager = EventManager(syncEngine); + final ComponentProvider provider = + settings.persistenceEnabled ? SQLiteComponentProvider() : MemoryComponentProvider(); + + await provider.initialize(configuration); + persistence = provider.persistence; + _gcScheduler = provider.gargabeCollectionScheduler; + localStore = provider.localStore; + remoteStore = provider.remoteStore; + syncEngine = provider.syncEngine; + eventManager = provider.eventManager; + _gcScheduler?.start(); + } - // NOTE: RemoteStore depends on LocalStore (for persisting stream tokens, - // refilling mutation queue, etc.) so must be started after LocalStore. - await localStore.start(); - await remoteStore.start(); + Stream get snapshotsInSync { + _verifyNotTerminated(); + return eventManager.snapshotsInSync; } - void _verifyNotShutdown() { - if (_isShutdown) { + void _verifyNotTerminated() { + if (isTerminated) { throw ArgumentError('The client has already been shutdown'); } } - - @override - Future handleRemoteEvent(RemoteEvent remoteEvent) async { - await syncEngine.handleRemoteEvent(remoteEvent); - } - - @override - Future handleRejectedListen(int targetId, GrpcError error) async { - await syncEngine.handleRejectedListen(targetId, error); - } - - @override - Future handleSuccessfulWrite( - MutationBatchResult mutationBatchResult) async { - await syncEngine.handleSuccessfulWrite(mutationBatchResult); - } - - @override - Future handleRejectedWrite(int batchId, GrpcError error) async { - await syncEngine.handleRejectedWrite(batchId, error); - } - - @override - Future handleOnlineStateChange(OnlineState onlineState) async { - await syncEngine.handleOnlineStateChange(onlineState); - } - - @override - ImmutableSortedSet Function(int targetId) - get getRemoteKeysForTarget { - return (int targetId) { - return syncEngine.getRemoteKeysForTarget(targetId); - }; - } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/index_range.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/index_range.dart index 2d39aa61..90c319c1 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/index_range.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/index_range.dart @@ -2,7 +2,7 @@ // Lung Razvan // on 20/09/2018 import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; /// A range of index field values over which a cursor should iterate. If [start] /// and [end] are both null, any field value will be considered within range. @@ -13,16 +13,12 @@ class IndexRange { final FieldPath fieldPath; /// the inclusive start position of the index lookup. - final FieldValue start; + final Value start; /// the inclusive end position of the index lookup. - final FieldValue end; + final Value end; - IndexRange copyWith({ - FieldPath fieldPath, - FieldValue start, - FieldValue end, - }) { + IndexRange copyWith({FieldPath fieldPath, Value start, Value end}) { return IndexRange( fieldPath: fieldPath ?? this.fieldPath, start: start ?? this.start, diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/listent_sequence.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/listen_sequence.dart similarity index 100% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/listent_sequence.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/listen_sequence.dart diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/memory_component_provider.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/memory_component_provider.dart new file mode 100644 index 00000000..64bd4e66 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/memory_component_provider.dart @@ -0,0 +1,104 @@ +// File created by +// Lung Razvan +// on 16/01/2021 + +import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/component_provider.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/event_manager.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/online_state.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/sync_engine.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/default_query_engine.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/garbage_collection_scheduler.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/memory/memory_persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch_result.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_event.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; +import 'package:grpc/grpc.dart'; + +/// Provides all components needed for Firestore with in-memory persistence. Uses EagerGC garbage +/// collection. +class MemoryComponentProvider extends ComponentProvider { + @override + GarbageCollectionScheduler createGarbageCollectionScheduler(ComponentProviderConfiguration configuration) { + return null; + } + + @override + EventManager createEventManager(ComponentProviderConfiguration configuration) { + return EventManager(syncEngine); + } + + @override + LocalStore createLocalStore(ComponentProviderConfiguration configuration) { + return LocalStore(persistence, DefaultQueryEngine(), configuration.initialUser); + } + + @override + Future createPersistence(ComponentProviderConfiguration configuration) async { + return MemoryPersistence.createEagerGcMemoryPersistence(); + } + + @override + RemoteStore createRemoteStore(ComponentProviderConfiguration configuration) { + return RemoteStore( + _RemoteStoreCallbackImpl(this), + localStore, + configuration.datastore, + configuration.asyncQueue, + configuration.onNetworkConnected, + ); + } + + @override + SyncEngine createSyncEngine(ComponentProviderConfiguration configuration) { + return SyncEngine( + localStore, + remoteStore, + configuration.initialUser, + configuration.maxConcurrentLimboResolutions, + ); + } +} + +/// A callback interface used by RemoteStore. All calls are forwarded to SyncEngine. +/// +/// This interface exists to allow RemoteStore to access functionality provided by SyncEngine +/// even though SyncEngine is created after RemoteStore. +class _RemoteStoreCallbackImpl implements RemoteStoreCallback { + _RemoteStoreCallbackImpl(this._provider); + + final ComponentProvider _provider; + + @override + ImmutableSortedSet getRemoteKeysForTarget(int targetId) { + return _provider.syncEngine.getRemoteKeysForTarget(targetId); + } + + @override + Future handleOnlineStateChange(OnlineState onlineState) { + return _provider.syncEngine.handleOnlineStateChange(onlineState); + } + + @override + Future handleRejectedListen(int targetId, GrpcError error) { + return _provider.syncEngine.handleRejectedListen(targetId, error); + } + + @override + Future handleRejectedWrite(int batchId, GrpcError error) { + return _provider.syncEngine.handleRejectedWrite(batchId, error); + } + + @override + Future handleRemoteEvent(RemoteEvent remoteEvent) { + return _provider.syncEngine.handleRemoteEvent(remoteEvent); + } + + @override + Future handleSuccessfulWrite(MutationBatchResult successfulWrite) { + return _provider.syncEngine.handleSuccessfulWrite(successfulWrite); + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/order_by.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/order_by.dart index 07aa5255..77946dbd 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/order_by.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/order_by.dart @@ -4,8 +4,9 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart' as values; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' show Value; /// The direction of the ordering class OrderByDirection { @@ -19,9 +20,7 @@ class OrderByDirection { @override bool operator ==(Object other) => identical(this, other) || - other is OrderByDirection && - runtimeType == other.runtimeType && - _comparisonModifier == other._comparisonModifier; + other is OrderByDirection && runtimeType == other.runtimeType && _comparisonModifier == other._comparisonModifier; @override int get hashCode => _comparisonModifier.hashCode; @@ -42,27 +41,22 @@ class OrderBy { if (field == FieldPath.keyPath) { return direction._comparisonModifier * d1.key.compareTo(d2.key); } else { - final FieldValue v1 = d1.getField(field); - final FieldValue v2 = d2.getField(field); - hardAssert(v1 != null && v2 != null, - 'Trying to compare documents on fields that don\'t exist.'); - return direction._comparisonModifier * v1.compareTo(v2); + final Value v1 = d1.getField(field); + final Value v2 = d2.getField(field); + hardAssert(v1 != null && v2 != null, 'Trying to compare documents on fields that don\'t exist.'); + return direction._comparisonModifier * values.compare(v1, v2); } } @override String toString() { - return (direction == OrderByDirection.ascending ? '' : '-') + - field.canonicalString; + return (direction == OrderByDirection.ascending ? '' : '-') + field.canonicalString; } @override bool operator ==(Object other) => identical(this, other) || - other is OrderBy && - runtimeType == other.runtimeType && - direction == other.direction && - field == other.field; + other is OrderBy && runtimeType == other.runtimeType && direction == other.direction && field == other.field; @override int get hashCode => direction.hashCode ^ field.hashCode; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query.dart index 03ef4a99..d6419c51 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query.dart @@ -5,41 +5,45 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/core/bound.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/filter/filter.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/order_by.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; -import 'package:collection/collection.dart'; -/// Represents the internal structure of a Firestore Query +enum QueryLimitType { limitToFirst, limitToLast } + +/// Encapsulates all the query attributes we support in the SDK. It can be run against the +/// LocalStore, as well as be converted to a {@code Target} to query the RemoteStore results. class Query { /// Initializes a Query with all of its components directly. - const Query( + Query( this.path, { this.collectionGroup, this.filters = const [], this.explicitSortOrder = const [], - int limit = noLimit, + int limit = Target.kNoLimit, + QueryLimitType limitType = QueryLimitType.limitToFirst, Bound startAt, Bound endAt, }) : _limit = limit, + _limitType = limitType, _startAt = startAt, _endAt = endAt; - static const int noLimit = -1; - - static final OrderBy keyOrderingAsc = - OrderBy.getInstance(OrderByDirection.ascending, FieldPath.keyPath); + static final OrderBy keyOrderingAsc = OrderBy.getInstance(OrderByDirection.ascending, FieldPath.keyPath); - static final OrderBy keyOrderingDesc = - OrderBy.getInstance(OrderByDirection.descending, FieldPath.keyPath); + static final OrderBy keyOrderingDesc = OrderBy.getInstance(OrderByDirection.descending, FieldPath.keyPath); /// Returns the list of ordering constraints that were explicitly requested on the query by the user. /// /// Note that the actual query performed might add additional sort orders to match the behavior of the backend. final List explicitSortOrder; + // The corresponding Target of this Query instance. + Target _memoizedTarget; + /// The filters on the documents returned by the query. final List filters; @@ -49,6 +53,7 @@ class Query { final String collectionGroup; final int _limit; + final QueryLimitType _limitType; /// An optional bound to start the query at. final Bound _startAt; @@ -58,30 +63,47 @@ class Query { /// Returns true if this Query is for a specific document. bool get isDocumentQuery { - return DocumentKey.isDocumentKey(path) && - collectionGroup == null && - filters.isEmpty; + return DocumentKey.isDocumentKey(path) && collectionGroup == null && filters.isEmpty; } /// Returns true if this is a collection group query. bool get isCollectionGroupQuery => collectionGroup != null; - /// The maximum number of results to return. If there is no limit on the - /// query, then this will cause an assertion failure. - int getLimit() { - hardAssert(hasLimit, 'Called getLimit when no limit was set'); + /// Returns true if this query does not specify any query constraints that could remove results. + bool get matchesAllDocuments { + return filters.isEmpty && + _limit == Target.kNoLimit && + startAt == null && + endAt == null && + (explicitSortOrder.isEmpty || (explicitSortOrder.length == 1 && firstOrderByField.isKeyField)); + } + + /// The maximum number of results to return. If there is no limit on the query, then this will + /// cause an assertion failure. + int getLimitToFirst() { + hardAssert(hasLimitToFirst, 'Called getLimitToFirst when no limit was set'); return _limit; } - bool get hasLimit => _limit != noLimit; + bool get hasLimitToFirst { + return _limitType == QueryLimitType.limitToFirst && _limit != Target.kNoLimit; + } - /// Returns a new [Query] with the given limit on how many results can be - /// returned. - /// - /// [limit] represents the maximum number of results to return. If - /// `limit == noLimit`, then no limit is applied. Otherwise, if `limit <= 0`, - /// behavior is unspecified. - Query limit(int limit) => copyWith(limit: limit); + /// The maximum number of last-matching results to return. If there is no limit on the query, then + /// this will cause an assertion failure. + int getLimitToLast() { + hardAssert(hasLimitToLast, 'Called getLimitToLast when no limit was set'); + return _limit; + } + + bool get hasLimitToLast { + return _limitType == QueryLimitType.limitToLast && _limit != Target.kNoLimit; + } + + QueryLimitType getLimitType() { + hardAssert(hasLimitToLast || hasLimitToFirst, 'Called getLimitType when no limit was set'); + return _limitType; + } /// An optional bound to start the query at. Bound getStartAt() => _startAt; @@ -137,16 +159,11 @@ class Query { } final FieldPath queryInequalityField = inequalityField; - hardAssert( - queryInequalityField == null || - newInequalityField == null || - queryInequalityField == newInequalityField, + hardAssert(queryInequalityField == null || newInequalityField == null || queryInequalityField == newInequalityField, 'Query must only have one inequality field'); hardAssert( - explicitSortOrder.isEmpty || - newInequalityField == null || - explicitSortOrder[0].field == newInequalityField, + explicitSortOrder.isEmpty || newInequalityField == null || explicitSortOrder[0].field == newInequalityField, 'First orderBy must match inequality field'); return copyWith(filters: [...filters, filter]); @@ -164,13 +181,20 @@ class Query { throw fail('First orderBy must match inequality field'); } } - final List updatedSortOrder = [ - ...explicitSortOrder, - order - ]; + final List updatedSortOrder = [...explicitSortOrder, order]; return copyWith(explicitSortOrder: updatedSortOrder); } + /// Returns a new Query with the given [limit] on how many results can be returned. + Query limitToFirst(int limit) { + return copyWith(limit: limit, limitType: QueryLimitType.limitToFirst); + } + + /// Returns a new Query with the given limit on how many last-matching results can be returned. + Query limitToLast(int limit) { + return copyWith(limit: limit, limitType: QueryLimitType.limitToLast); + } + /// Creates a new Query starting at the provided bound. /// The [bound] to end this query at. Query startAt(Bound bound) => copyWith(startAt: bound); @@ -195,6 +219,10 @@ class Query { ); } + List getOrderBy() { + return orderByConstraints; + } + /// Returns the full list of ordering constraints on the query. /// /// This might include additional sort orders added implicitly to match the @@ -208,10 +236,7 @@ class Query { if (inequalityField.isKeyField) { return [keyOrderingAsc]; } else { - return [ - OrderBy.getInstance(OrderByDirection.ascending, inequalityField), - keyOrderingAsc - ]; + return [OrderBy.getInstance(OrderByDirection.ascending, inequalityField), keyOrderingAsc]; } } else { final List res = []; @@ -228,9 +253,7 @@ class Query { final OrderByDirection lastDirection = explicitSortOrder.isNotEmpty ? explicitSortOrder[explicitSortOrder.length - 1].direction : OrderByDirection.ascending; - res.add(lastDirection == OrderByDirection.ascending - ? keyOrderingAsc - : keyOrderingDesc); + res.add(lastDirection == OrderByDirection.ascending ? keyOrderingAsc : keyOrderingDesc); } return res; } @@ -241,8 +264,7 @@ class Query { if (collectionGroup != null) { // NOTE: this.path is currently always empty since we don't expose // Collection Group queries rooted at a document path yet. - return doc.key.hasCollectionId(collectionGroup) && - path.isPrefixOf(docPath); + return doc.key.hasCollectionId(collectionGroup) && path.isPrefixOf(docPath); } else if (DocumentKey.isDocumentKey(path)) { return path == docPath; } else { @@ -263,8 +285,7 @@ class Query { bool _matchesOrderBy(Document doc) { for (OrderBy order in explicitSortOrder) { // order by key always matches - if (order.field != FieldPath.keyPath && - doc.getField(order.field) == null) { + if (order.field != FieldPath.keyPath && doc.getField(order.field) == null) { return false; } } @@ -273,8 +294,7 @@ class Query { /// Makes sure a document is within the bounds, if provided. bool _matchesBounds(Document doc) { - if (_startAt != null && - !_startAt.sortsBeforeDocument(orderByConstraints, doc)) { + if (_startAt != null && !_startAt.sortsBeforeDocument(orderByConstraints, doc)) { return false; } if (_endAt != null && _endAt.sortsBeforeDocument(orderByConstraints, doc)) { @@ -285,64 +305,60 @@ class Query { /// Returns true if the document matches the constraints of this query. bool matches(Document doc) { - return _matchesPathAndCollectionGroup(doc) && - _matchesOrderBy(doc) && - _matchesFilters(doc) && - _matchesBounds(doc); + return _matchesPathAndCollectionGroup(doc) && _matchesOrderBy(doc) && _matchesFilters(doc) && _matchesBounds(doc); } /// Returns a comparator that will sort documents according to this Query's /// sort order. - Comparator get comparator => - QueryComparator(orderByConstraints).comparator; - - /// Returns a canonical string representing this query. This should match the - /// iOS and Android canonical ids for a query exactly. - String get canonicalId { - // TODO(long1eu): Cache the return value. - final StringBuffer builder = StringBuffer(path.canonicalString); - - if (collectionGroup != null) { - builder // - ..write('|cg:') - ..write(collectionGroup); - } - - // Add filters. - builder.write('|f:'); - for (Filter filter in filters) { - builder.write(filter.canonicalId); - } - - // Add order by. - builder.write('|ob:'); - for (OrderBy orderBy in orderByConstraints) { - builder - ..write(orderBy.field.canonicalString) - ..write( - orderBy.direction == OrderByDirection.ascending ? 'asc' : 'desc'); - } - - // Add limit. - if (hasLimit) { - builder // - ..write('|l:') - ..write(limit); - } + Comparator get comparator => QueryComparator(orderByConstraints).comparator; + + /// Returns a [Target] instance this query will be mapped to in backend and local store. + Target toTarget() { + if (_memoizedTarget == null) { + if (_limitType == QueryLimitType.limitToFirst) { + _memoizedTarget = Target( + path: path, + collectionGroup: collectionGroup, + filters: filters, + orderBy: getOrderBy(), + limit: _limit, + startAt: getStartAt(), + endAt: getEndAt(), + ); + } else { + // Flip the orderBy directions since we want the last results + final List newOrderBy = []; + for (OrderBy orderBy in orderByConstraints) { + final OrderByDirection dir = orderBy.direction == OrderByDirection.descending + ? OrderByDirection.ascending + : OrderByDirection.descending; + newOrderBy.add(OrderBy.getInstance(dir, orderBy.field)); + } - if (_startAt != null) { - builder // - ..write('|lb:') - ..write(_startAt.canonicalString()); + // We need to swap the cursors to match the now-flipped query ordering. + final Bound newStartAt = _endAt != null ? Bound(position: _endAt.position, before: !_endAt.before) : null; + final Bound newEndAt = _startAt != null ? Bound(position: _startAt.position, before: !_startAt.before) : null; + + _memoizedTarget = Target( + path: path, + collectionGroup: collectionGroup, + filters: filters, + orderBy: newOrderBy, + limit: _limit, + startAt: newStartAt, + endAt: newEndAt, + ); + } } - if (_endAt != null) { - builder // - ..write('|ub:') - ..write(_endAt.canonicalString()); - } + return _memoizedTarget; + } - return builder.toString(); + /// Returns a canonical string representing this query. This should match the + /// iOS and Android canonical ids for a query exactly. + // TODO(long1eu): This is now only used in tests and SpecTestCase. Maybe we can delete it? + String get canonicalId { + return '${toTarget().canonicalId}|lt:$_limitType'; } Query copyWith({ @@ -350,6 +366,7 @@ class Query { String collectionGroup, List explicitSortOrder, int limit, + QueryLimitType limitType, Bound startAt, Bound endAt, }) { @@ -359,6 +376,7 @@ class Query { filters: filters ?? this.filters, explicitSortOrder: explicitSortOrder ?? this.explicitSortOrder, limit: limit ?? _limit, + limitType: limitType ?? _limitType, startAt: startAt ?? _startAt, endAt: endAt ?? _endAt, ); @@ -369,58 +387,22 @@ class Query { return identical(this, other) || other is Query && runtimeType == other.runtimeType && - _limit == other._limit && - const ListEquality() - .equals(orderByConstraints, other.orderByConstraints) && - const ListEquality().equals(filters, other.filters) && - path == other.path && - collectionGroup == other.collectionGroup && - _startAt == other._startAt && - _endAt == other._endAt; + _limitType == other._limitType && + toTarget() == other.toTarget(); } @override - int get hashCode => - const ListEquality().hash(filters) ^ - path.hashCode ^ - collectionGroup.hashCode ^ - _limit.hashCode ^ - _startAt.hashCode ^ - _endAt.hashCode ^ - const ListEquality().hash(orderByConstraints); + int get hashCode => _limitType.hashCode ^ toTarget().hashCode; @override String toString() { - final StringBuffer builder = StringBuffer() // - ..write('Query(') - ..write(path.canonicalString); - if (collectionGroup != null) { - builder // - ..write(' collectionGroup=') - ..write(collectionGroup); - } - if (filters.isNotEmpty) { - builder.write(' where '); - for (int i = 0; i < filters.length; i++) { - if (i > 0) { - builder.write(' and '); - } - builder.write(filters[i]); - } - } - - if (explicitSortOrder.isNotEmpty) { - builder.write(' order by '); - for (int i = 0; i < explicitSortOrder.length; i++) { - if (i > 0) { - builder.write(', '); - } - builder.write(explicitSortOrder[i]); - } - } - - builder.write(')'); - return builder.toString(); + return (StringBuffer() + ..write('Query(target=') + ..write(toTarget()) + ..write(';limitType=') + ..write(_limitType) + ..write(')')) + .toString(); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query_stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query_stream.dart index 63a735bb..f36b4696 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query_stream.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/query_stream.dart @@ -56,54 +56,57 @@ class QueryStream extends Stream { ListenOptions get options => _options; - Future onViewSnapshot(ViewSnapshot newSnapshot) async { - hardAssert(newSnapshot.changes.isNotEmpty || newSnapshot.didSyncStateChange, - 'We got a new snapshot with no changes?'); + /// Applies the new ViewSnapshot to this listener, raising a user-facing event if applicable + /// (depending on what changed, whether the user has opted into metadata-only changes, etc.). + /// Returns true if a user-facing event was indeed raised. + bool onViewSnapshot(ViewSnapshot newSnapshot) { + hardAssert( + newSnapshot.changes.isNotEmpty || newSnapshot.didSyncStateChange, 'We got a new snapshot with no changes?'); + bool raisedEvent = false; if (!options.includeDocumentMetadataChanges) { // Remove the metadata only changes newSnapshot = newSnapshot.copyWith( excludesMetadataChanges: true, changes: newSnapshot.changes - .where((DocumentViewChange change) => - change.type != DocumentViewChangeType.metadata) + .where((DocumentViewChange change) => change.type != DocumentViewChangeType.metadata) .toList(), ); } if (!_raisedInitialEvent) { - final bool shouldRaiseInitialEvent = - _shouldRaiseInitialEvent(newSnapshot, _onlineState); - - if (shouldRaiseInitialEvent) { + if (_shouldRaiseInitialEvent(newSnapshot, _onlineState)) { _raiseInitialEvent(newSnapshot); + raisedEvent = true; } } else if (_shouldRaiseEvent(newSnapshot)) { _sink.add(newSnapshot); + raisedEvent = true; } _snapshot = newSnapshot; + return raisedEvent; } void onError(FirestoreError error) { _sink.addError(error); } - void onOnlineStateChanged(OnlineState onlineState) { + bool onOnlineStateChanged(OnlineState onlineState) { _onlineState = onlineState; - if (_snapshot != null && - !_raisedInitialEvent && - _shouldRaiseInitialEvent(_snapshot, onlineState)) { + bool raisedEvent = false; + if (_snapshot != null && !_raisedInitialEvent && _shouldRaiseInitialEvent(_snapshot, onlineState)) { _raiseInitialEvent(_snapshot); + raisedEvent = true; } + return raisedEvent; } bool _shouldRaiseInitialEvent( ViewSnapshot snapshot, OnlineState onlineState, ) { - hardAssert(!_raisedInitialEvent, - 'Determining whether to raise first event but already had first event.'); + hardAssert(!_raisedInitialEvent, 'Determining whether to raise first event but already had first event.'); // Always raise the first event when we're synced if (!snapshot.isFromCache) { @@ -116,8 +119,7 @@ class QueryStream extends Stream { // Don't raise the event if we're online, aren't synced yet (checked above) // and are waiting for a sync. if (options.waitForSyncWhenOnline && maybeOnline) { - hardAssert(snapshot.isFromCache, - 'Waiting for sync, but snapshot is not from cache'); + hardAssert(snapshot.isFromCache, 'Waiting for sync, but snapshot is not from cache'); return false; } @@ -133,8 +135,7 @@ class QueryStream extends Stream { return true; } - final bool hasPendingWritesChanged = _snapshot != null && - _snapshot.hasPendingWrites != snapshot.hasPendingWrites; + final bool hasPendingWritesChanged = _snapshot != null && _snapshot.hasPendingWrites != snapshot.hasPendingWrites; if (snapshot.didSyncStateChange || hasPendingWritesChanged) { return options.includeQueryMetadataChanges; } @@ -145,8 +146,7 @@ class QueryStream extends Stream { } void _raiseInitialEvent(ViewSnapshot snapshot) { - hardAssert( - !_raisedInitialEvent, 'Trying to raise initial event for second time'); + hardAssert(!_raisedInitialEvent, 'Trying to raise initial event for second time'); _raisedInitialEvent = true; _sink.add(ViewSnapshot.fromInitialDocuments( @@ -159,12 +159,8 @@ class QueryStream extends Stream { } @override - StreamSubscription listen( - void Function(ViewSnapshot event) onData, - {Function onError, - void Function() onDone, - bool cancelOnError}) { - return _sink.stream.listen(onData, - onError: onError, onDone: onDone, cancelOnError: cancelOnError); + StreamSubscription listen(void Function(ViewSnapshot event) onData, + {Function onError, void Function() onDone, bool cancelOnError}) { + return _sink.stream.listen(onData, onError: onError, onDone: onDone, cancelOnError: cancelOnError); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sqlite_component_provider.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sqlite_component_provider.dart new file mode 100644 index 00000000..6b916865 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sqlite_component_provider.dart @@ -0,0 +1,38 @@ +// File created by +// Lung Razvan +// on 24/01/2021 + +import 'package:cloud_firestore_vm/src/firebase/firestore/core/component_provider.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/database_info.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/memory_component_provider.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/garbage_collection_scheduler.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_serializer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_garbage_collector.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/sqlite/sqlite_persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; + +class SQLiteComponentProvider extends MemoryComponentProvider { + @override + GarbageCollectionScheduler createGarbageCollectionScheduler(ComponentProviderConfiguration configuration) { + final SQLiteLruReferenceDelegate lruDelegate = (persistence as SQLitePersistence).referenceDelegate; + final LruGarbageCollector gc = lruDelegate.garbageCollector; + return gc.newScheduler(configuration.asyncQueue, localStore); + } + + @override + Future createPersistence(ComponentProviderConfiguration configuration) async { + final DatabaseInfo databaseInfo = configuration.databaseInfo; + final LocalSerializer serializer = LocalSerializer(RemoteSerializer(databaseInfo.databaseId)); + final LruGarbageCollectorParams params = + LruGarbageCollectorParams.withCacheSizeBytes(configuration.settings.cacheSizeBytes); + + return SQLitePersistence.create( + databaseInfo.persistenceKey, + databaseInfo.databaseId, + serializer, + configuration.openDatabase, + params, + ); + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sync_engine.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sync_engine.dart index 5c49185b..580f484a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sync_engine.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/sync_engine.dart @@ -3,6 +3,7 @@ // on 17/09/2018 import 'dart:async'; +import 'dart:collection'; import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; @@ -10,11 +11,11 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/event_manager.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/firestore_client.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/limbo_document_change.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/listent_sequence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/listen_sequence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/online_state.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/target_id_generator.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/transaction.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/transaction_runner.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/view.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/view_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/view_snapshot.dart'; @@ -22,22 +23,22 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_view_changes.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_write_result.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/reference_set.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/datastore.dart' - show Datastore; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_event.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/target_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:grpc/grpc.dart'; import 'package:meta/meta.dart'; @@ -45,20 +46,26 @@ import 'package:meta/meta.dart'; /// [SyncEngine] is the central controller in the client SDK architecture. It is the glue code between the /// [EventManager], [LocalStore], and [RemoteStore]. Some of [SyncEngine]'s responsibilities include: /// -/// Coordinating client requests and remote events between the [EventManager] and the local and remote data stores. -/// Managing a [View] object for each query, providing the unified view between the local and remote data stores. -/// Notifying the [RemoteStore] when the [LocalStore] has new mutations in its queue that need sending to the backend. +/// * Coordinating client requests and remote events between the [EventManager] and the local and remote data stores. +/// * Managing a [View] object for each query, providing the unified view between the local and remote data stores. +/// * Notifying the [RemoteStore] when the [LocalStore] has new mutations in its queue that need sending to the backend. /// /// The [SyncEngine]’s methods should only ever be called by methods running on our own worker dispatch queue. class SyncEngine implements RemoteStoreCallback { - SyncEngine(this._localStore, this._remoteStore, this._currentUser) - : _queryViewsByQuery = {}, - _queryViewsByTarget = {}, - _limboTargetsByKey = {}, - _limboResolutionsByTarget = {}, + SyncEngine( + this._localStore, + this._remoteStore, + this._currentUser, + this._maxConcurrentLimboResolutions, + ) : _queryViewsByQuery = {}, + _queriesByTarget = >{}, + _enqueuedLimboResolutions = Queue(), + _activeLimboTargetsByKey = {}, + _activeLimboResolutionsByTarget = {}, _limboDocumentRefs = ReferenceSet(), _mutationUserCallbacks = >>{}, - _targetIdGenerator = TargetIdGenerator.forSyncEngine(); + _targetIdGenerator = TargetIdGenerator.forSyncEngine(), + _pendingWritesCallbacks = >>{}; static const String _tag = 'SyncEngine'; @@ -68,19 +75,23 @@ class SyncEngine implements RemoteStoreCallback { /// The remote store for sending writes, watches, etc. to the backend. final RemoteStore _remoteStore; - /// [QueryViews] for all active queries, indexed by query. + /// [QueryView]s for all active queries, indexed by query. final Map _queryViewsByQuery; - /// [QueryViews] for all active queries, indexed by target ID. - final Map _queryViewsByTarget; + /// [Query]s mapped to active targets, indexed by target id. + final Map> _queriesByTarget; + final int _maxConcurrentLimboResolutions; - /// When a document is in limbo, we create a special listen to resolve it. This maps the [DocumentKey] of each limbo - /// document to the target id of the listen resolving it. - final Map _limboTargetsByKey; + /// The keys of documents that are in limbo for which we haven't yet started a limbo resolution + /// query. + final Queue _enqueuedLimboResolutions; - /// Basically the inverse of [_limboTargetsByKey], a map of target id to a [_LimboResolution] (which includes the - /// DocumentKey as well as whether we've received a document for the target). - final Map _limboResolutionsByTarget; + /// Keeps track of the target ID for each document that is in limbo with an active target. + final Map _activeLimboTargetsByKey; + + /// Keeps track of the information about an active limbo resolution for each active target ID that + /// was started for the purpose of limbo resolution. + final Map _activeLimboResolutionsByTarget; /// Used to track any documents that are currently in limbo. final ReferenceSet _limboDocumentRefs; @@ -88,6 +99,9 @@ class SyncEngine implements RemoteStoreCallback { /// Stores user completion blocks, indexed by user and batch id. final Map>> _mutationUserCallbacks; + /// Stores user callbacks waiting for all pending writes to be acknowledged. + final Map>> _pendingWritesCallbacks; + /// Used for creating the target ids for the listens used to resolve limbo documents. final TargetIdGenerator _targetIdGenerator; @@ -100,8 +114,7 @@ class SyncEngine implements RemoteStoreCallback { } void _assertCallback(String method) { - hardAssert(_syncEngineListener != null, - 'Trying to call $method before setting callback'); + hardAssert(_syncEngineListener != null, 'Trying to call $method before setting callback'); } /// Initiates a new listen. @@ -114,36 +127,47 @@ class SyncEngine implements RemoteStoreCallback { /// Returns the target ID assigned to the query. Future listen(Query query) async { _assertCallback('listen'); - hardAssert(!_queryViewsByQuery.containsKey(query), - 'We already listen to query: $query'); + hardAssert(!_queryViewsByQuery.containsKey(query), 'We already listen to query: $query'); - final QueryData queryData = await _localStore.allocateQuery(query); - final ViewSnapshot viewSnapshot = - await initializeViewAndComputeSnapshot(queryData); + final TargetData targetData = await _localStore.allocateTarget(query.toTarget()); + final ViewSnapshot viewSnapshot = await initializeViewAndComputeSnapshot(query, targetData.targetId); await _syncEngineListener.onViewSnapshots([viewSnapshot]); - await _remoteStore.listen(queryData); - return queryData.targetId; + await _remoteStore.listen(targetData); + return targetData.targetId; } - Future initializeViewAndComputeSnapshot( - QueryData queryData) async { - final Query query = queryData.query; - final ImmutableSortedMap docs = - await _localStore.executeQuery(query); - final ImmutableSortedSet remoteKeys = - await _localStore.getRemoteDocumentKeys(queryData.targetId); + Future initializeViewAndComputeSnapshot(Query query, int targetId) async { + final QueryResult queryResult = await _localStore.executeQuery(query, /* usePreviousResults= */ true); - final View view = View(query, remoteKeys); - final ViewDocumentChanges viewDocChanges = view.computeDocChanges(docs); + ViewSnapshotSyncState currentTargetSyncState = ViewSnapshotSyncState.none; + TargetChange synthesizedCurrentChange; + + // If there are already queries mapped to the target id, create a synthesized target change to + // apply the sync state from those queries to the new query. + if (_queriesByTarget[targetId] != null) { + final Query mirrorQuery = _queriesByTarget[targetId].first; + currentTargetSyncState = _queryViewsByQuery[mirrorQuery].view.syncState; + final bool current = currentTargetSyncState == ViewSnapshotSyncState.synced; + synthesizedCurrentChange = TargetChange.createSynthesizedTargetChangeForCurrentChange(current); + } - final ViewChange viewChange = view.applyChanges(viewDocChanges); - hardAssert(view.limboDocuments.isEmpty, - 'View returned limbo docs before target ack from the server'); + // TODO(wuandy): Investigate if we can extract the logic of view change computation and + // update tracked limbo in one place, and have both emitNewSnapsAndNotifyLocalStore + // and here to call that. + final View view = View(query, queryResult.remoteKeys); + final ViewDocumentChanges viewDocChanges = view.computeDocChanges(queryResult.documents); + final ViewChange viewChange = view.applyChanges(viewDocChanges, synthesizedCurrentChange); + await _updateTrackedLimboDocuments(viewChange.limboChanges, targetId); - final QueryView queryView = QueryView(query, queryData.targetId, view); + final QueryView queryView = QueryView(query, targetId, view); _queryViewsByQuery[query] = queryView; - _queryViewsByTarget[queryData.targetId] = queryView; + + if (!_queriesByTarget.containsKey(targetId)) { + _queriesByTarget[targetId] = []; + } + _queriesByTarget[targetId].add(query); + return viewChange.snapshot; } @@ -152,12 +176,19 @@ class SyncEngine implements RemoteStoreCallback { _assertCallback('stopListening'); final QueryView queryView = _queryViewsByQuery[query]; - hardAssert( - queryView != null, 'Trying to stop listening to a query not found'); + hardAssert(queryView != null, 'Trying to stop listening to a query not found'); - await _localStore.releaseQuery(query); - await _remoteStore.stopListening(queryView.targetId); - await _removeAndCleanupQuery(queryView); + _queryViewsByQuery.remove(query); + + final int targetId = queryView.targetId; + final List targetQueries = _queriesByTarget[targetId] // + ..remove(query); + + if (targetQueries.isEmpty) { + await _localStore.releaseTarget(targetId); + await _remoteStore.stopListening(targetId); + await _removeAndCleanupTarget(targetId, GrpcError.ok()); + } } /// Initiates the write of local mutation batch which involves adding the writes to the mutation queue, notifying the @@ -165,15 +196,13 @@ class SyncEngine implements RemoteStoreCallback { /// /// The provided Future will be resolved once the write has been acked/rejected by the backend (or failed locally for /// any other reason). - Future writeMutations( - List mutations, Completer userTask) async { + Future writeMutations(List mutations, Completer userTask) async { _assertCallback('writeMutations'); final LocalWriteResult result = await _localStore.writeLocally(mutations); _addUserCallback(result.batchId, userTask); - await _emitNewSnapsAndNotifyLocalStore( - result.changes, /*remoteEvent:*/ null); + await _emitNewSnapsAndNotifyLocalStore(result.changes, /*remoteEvent:*/ null); await _remoteStore.fillWritePipeline(); } @@ -186,33 +215,22 @@ class SyncEngine implements RemoteStoreCallback { userTasks[batchId] = userTask; } - /// Takes an [updateFunction] in which a set of reads and writes can be performed atomically. In the [updateFunction], - /// the client can read and write values using the supplied transaction object. After the [updateFunction], all changes will be committed. + /// Takes an [updateFunction] in which a set of reads and writes can be performed atomically. /// - /// If some other client has changed any of the data referenced, then the [updateFunction] will be called again. If - /// the [updateFunction] still fails after the given number of retries, then the transaction will be rejected. + /// In the [updateFunction], the client can read and write values using the supplied transaction + /// object. After the [updateFunction], all changes will be committed. If a retryable error occurs + /// (ex: some other client has changed any of the data referenced), then the [updateFunction] will + /// be called again after a backoff. If the [updateFunction] still fails after all retries, then the + /// transaction will be rejected. /// - /// The transaction object passed to the [updateFunction] contains methods for accessing documents and collections. - /// Unlike other datastore access, data accessed with the transaction will not reflect local changes that have not - /// been committed. For this reason, it is required that all reads are performed before any writes. Transactions must - /// be performed while online. + /// The transaction object passed to the [updateFunction] contains methods for accessing documents + /// and collections. Unlike other datastore access, data accessed with the transaction will not + /// reflect local changes that have not been committed. For this reason, it is required that all + /// reads are performed before any writes. Transactions must be performed while online. /// - /// The Future returned is resolved when the transaction is fully committed. - Future transaction( - Future Function(Transaction) updateFunction, int retries) async { - hardAssert(retries >= 0, 'Got negative number of retries for transaction.'); - try { - final Transaction transaction = _remoteStore.createTransaction(); - final TResult result = await updateFunction(transaction); - await transaction.commit(); - return result; - } catch (e) { - if (retries > 0 && _isRetryableTransactionError(e)) { - return transaction(updateFunction, retries - 1); - } - - return Future.error(e); - } + /// The Future returned is completed when the transaction is fully committed. + Future transaction(AsyncQueue asyncQueue, TransactionUpdateFunction updateFunction) async { + return TransactionRunner(asyncQueue, _remoteStore, updateFunction).run(); } /// Called by [FirestoreClient] to notify us of a new remote event. @@ -224,8 +242,7 @@ class SyncEngine implements RemoteStoreCallback { for (MapEntry entry in event.targetChanges.entries) { final int targetId = entry.key; final TargetChange targetChange = entry.value; - final _LimboResolution limboResolution = - _limboResolutionsByTarget[targetId]; + final _LimboResolution limboResolution = _activeLimboResolutionsByTarget[targetId]; if (limboResolution != null) { // Since this is a limbo resolution lookup, it's for a single document and it could be added, modified, or // removed, but not a combination. @@ -238,11 +255,9 @@ class SyncEngine implements RemoteStoreCallback { if (targetChange.addedDocuments.isNotEmpty) { limboResolution.receivedDocument = true; } else if (targetChange.modifiedDocuments.isNotEmpty) { - hardAssert(limboResolution.receivedDocument, - 'Received change for limbo target document without add.'); + hardAssert(limboResolution.receivedDocument, 'Received change for limbo target document without add.'); } else if (targetChange.removedDocuments.isNotEmpty) { - hardAssert(limboResolution.receivedDocument, - 'Received remove for limbo target document without add.'); + hardAssert(limboResolution.receivedDocument, 'Received remove for limbo target document without add.'); limboResolution.receivedDocument = false; } else { // This was probably just a CURRENT targetChange or similar. @@ -250,8 +265,7 @@ class SyncEngine implements RemoteStoreCallback { } } - final ImmutableSortedMap changes = - await _localStore.applyRemoteEvent(event); + final ImmutableSortedMap changes = await _localStore.applyRemoteEvent(event); await _emitNewSnapsAndNotifyLocalStore(changes, event); } @@ -259,12 +273,12 @@ class SyncEngine implements RemoteStoreCallback { /// of the change. @override Future handleOnlineStateChange(OnlineState onlineState) async { + _assertCallback('handleOnlineStateChange'); final List newViewSnapshots = []; for (MapEntry entry in _queryViewsByQuery.entries) { final View view = entry.value.view; final ViewChange viewChange = view.applyOnlineStateChange(onlineState); - hardAssert(viewChange.limboChanges.isEmpty, - 'OnlineState should not affect limbo documents.'); + hardAssert(viewChange.limboChanges.isEmpty, 'OnlineState should not affect limbo documents.'); if (viewChange.snapshot != null) { newViewSnapshots.add(viewChange.snapshot); } @@ -273,22 +287,23 @@ class SyncEngine implements RemoteStoreCallback { _syncEngineListener.handleOnlineStateChange(onlineState); } - // TODO(long1eu): implement getRemoteKeysForTarget @override - ImmutableSortedSet Function(int targetId) - get getRemoteKeysForTarget { - return (int targetId) { - final _LimboResolution limboResolution = - _limboResolutionsByTarget[targetId]; - if (limboResolution != null && limboResolution.receivedDocument) { - return DocumentKey.emptyKeySet.insert(limboResolution.key); - } else { - final QueryView queryView = _queryViewsByTarget[targetId]; - return queryView != null - ? queryView.view.syncedDocuments - : DocumentKey.emptyKeySet; + ImmutableSortedSet getRemoteKeysForTarget(int targetId) { + final _LimboResolution limboResolution = _activeLimboResolutionsByTarget[targetId]; + if (limboResolution != null && limboResolution.receivedDocument) { + return DocumentKey.emptyKeySet.insert(limboResolution.key); + } else { + ImmutableSortedSet remoteKeys = DocumentKey.emptyKeySet; + if (_queriesByTarget.containsKey(targetId)) { + for (Query query in _queriesByTarget[targetId]) { + if (_queryViewsByQuery.containsKey(query)) { + remoteKeys = remoteKeys.unionWith(_queryViewsByQuery[query].view.syncedDocuments); + } + } } - }; + + return remoteKeys; + } } /// Called by FirestoreClient to notify us of a rejected listen. @@ -296,22 +311,20 @@ class SyncEngine implements RemoteStoreCallback { Future handleRejectedListen(int targetId, GrpcError error) async { _assertCallback('handleRejectedListen'); - final _LimboResolution limboResolution = - _limboResolutionsByTarget[targetId]; - final DocumentKey limboKey = - limboResolution != null ? limboResolution.key : null; + final _LimboResolution limboResolution = _activeLimboResolutionsByTarget[targetId]; + final DocumentKey limboKey = limboResolution != null ? limboResolution.key : null; if (limboKey != null) { // Since this query failed, we won't want to manually unlisten to it. So go ahead and remove it from bookkeeping. - _limboTargetsByKey.remove(limboKey); - _limboResolutionsByTarget.remove(targetId); + _activeLimboTargetsByKey.remove(limboKey); + _activeLimboResolutionsByTarget.remove(targetId); + await _pumpEnqueuedLimboResolutions(); // TODO(long1eu): Retry on transient errors? // It's a limbo doc. Create a synthetic event saying it was deleted. This is kind of a hack. Ideally, we would // have a method in the local store to purge a document. However, it would be tricky to keep all of the local // store's invariants with another method. - final Map documentUpdates = - { + final Map documentUpdates = { limboKey: NoDocument( limboKey, SnapshotVersion.none, @@ -325,19 +338,13 @@ class SyncEngine implements RemoteStoreCallback { ); await handleRemoteEvent(event); } else { - final QueryView queryView = _queryViewsByTarget[targetId]; - hardAssert(queryView != null, 'Unknown target: $targetId'); - final Query query = queryView.query; - await _localStore.releaseQuery(query); - await _removeAndCleanupQuery(queryView); - _logErrorIfInteresting(error, 'Listen for $query failed'); - _syncEngineListener.onError(query, error); + await _localStore.releaseTarget(targetId); + await _removeAndCleanupTarget(targetId, error); } } @override - Future handleSuccessfulWrite( - MutationBatchResult mutationBatchResult) async { + Future handleSuccessfulWrite(MutationBatchResult mutationBatchResult) async { _assertCallback('handleSuccessfulWrite'); // The local store may or may not be able to apply the write result and raise events immediately (depending on @@ -345,6 +352,8 @@ class SyncEngine implements RemoteStoreCallback { // listen events. _notifyUser(mutationBatchResult.batch.batchId, /*status:*/ null); + _resolvePendingWriteTasks(mutationBatchResult.batch.batchId); + final ImmutableSortedMap changes = await _localStore.acknowledgeBatch(mutationBatchResult); @@ -355,8 +364,7 @@ class SyncEngine implements RemoteStoreCallback { Future handleRejectedWrite(int batchId, GrpcError status) async { _assertCallback('handleRejectedWrite'); - final ImmutableSortedMap changes = - await _localStore.rejectBatch(batchId); + final ImmutableSortedMap changes = await _localStore.rejectBatch(batchId); if (changes.isNotEmpty) { _logErrorIfInteresting(status, 'Write failed at ${changes.minKey.path}'); @@ -367,13 +375,59 @@ class SyncEngine implements RemoteStoreCallback { // listen events. _notifyUser(batchId, status); + _resolvePendingWriteTasks(batchId); + await _emitNewSnapsAndNotifyLocalStore(changes, /*remoteEvent:*/ null); } + /// Takes a snapshot of current mutation queue, and register a user task which will resolve when + /// all those mutations are either accepted or rejected by the server. + Future registerPendingWritesTask(Completer userTask) async { + if (!_remoteStore.canUseNetwork()) { + Log.d(_tag, + 'The network is disabled. The task returned by [awaitPendingWrites] will not complete until the network is enabled.'); + } + + final int largestPendingBatchId = await _localStore.getHighestUnacknowledgedBatchId(); + + if (largestPendingBatchId == MutationBatch.unknown) { + // Complete the task right away if there is no pending writes at the moment. + userTask.complete(null); + return; + } + + if (!_pendingWritesCallbacks.containsKey(largestPendingBatchId)) { + _pendingWritesCallbacks[largestPendingBatchId] = >[]; + } + + _pendingWritesCallbacks[largestPendingBatchId].add(userTask); + } + + /// Resolves tasks waiting for this batch id to get acknowledged by server, if there are any. + void _resolvePendingWriteTasks(int batchId) { + if (_pendingWritesCallbacks.containsKey(batchId)) { + for (Completer task in _pendingWritesCallbacks[batchId]) { + task.complete(null); + } + + _pendingWritesCallbacks.remove(batchId); + } + } + + void _failOutstandingPendingWritesAwaitingTasks() { + for (MapEntry>> entry in _pendingWritesCallbacks.entries) { + for (Completer task in entry.value) { + task.completeError(FirestoreError( + "'waitForPendingWrites' task is cancelled due to User change.", FirestoreErrorCode.cancelled)); + } + } + + _pendingWritesCallbacks.clear(); + } + /// Resolves the task corresponding to this write result. void _notifyUser(int batchId, GrpcError status) { - final Map> userTasks = - _mutationUserCallbacks[_currentUser]; + final Map> userTasks = _mutationUserCallbacks[_currentUser]; // NOTE: Mutations restored from persistence won't have task completion // sources, so it's okay for this (or the task below) to be null. @@ -391,13 +445,18 @@ class SyncEngine implements RemoteStoreCallback { } } - Future _removeAndCleanupQuery(QueryView view) async { - _queryViewsByQuery.remove(view.query); - _queryViewsByTarget.remove(view.targetId); + Future _removeAndCleanupTarget(int targetId, GrpcError status) async { + for (Query query in _queriesByTarget[targetId]) { + _queryViewsByQuery.remove(query); + if (status.code != StatusCode.ok) { + _syncEngineListener.onError(query, status); + _logErrorIfInteresting(status, 'Listen for $query failed'); + } + } + _queriesByTarget.remove(targetId); - final ImmutableSortedSet limboKeys = - _limboDocumentRefs.referencesForId(view.targetId); - _limboDocumentRefs.removeReferencesForId(view.targetId); + final ImmutableSortedSet limboKeys = _limboDocumentRefs.referencesForId(targetId); + _limboDocumentRefs.removeReferencesForId(targetId); for (DocumentKey key in limboKeys) { if (!_limboDocumentRefs.containsKey(key)) { // We removed the last reference for this key. @@ -409,23 +468,22 @@ class SyncEngine implements RemoteStoreCallback { Future _removeLimboTarget(DocumentKey key) async { // It's possible that the target already got removed because the query failed. In that case, the key won't exist in // limboTargetsByKey. Only do the cleanup if we still have the target. - final int targetId = _limboTargetsByKey[key]; + final int targetId = _activeLimboTargetsByKey[key]; if (targetId != null) { await _remoteStore.stopListening(targetId); - _limboTargetsByKey.remove(key); - _limboResolutionsByTarget.remove(targetId); + _activeLimboTargetsByKey.remove(key); + _activeLimboResolutionsByTarget.remove(targetId); + await _pumpEnqueuedLimboResolutions(); } } /// Computes a new snapshot from the changes and calls the registered callback with the new snapshot. Future _emitNewSnapsAndNotifyLocalStore( ImmutableSortedMap changes, - RemoteEvent remoteEvent, [ - String caller, - ]) async { + RemoteEvent remoteEvent, + ) async { final List newSnapshots = []; - final List documentChangesInAllViews = - []; + final List documentChangesInAllViews = []; for (MapEntry entry in _queryViewsByQuery.entries) { final QueryView queryView = entry.value; @@ -434,17 +492,13 @@ class SyncEngine implements RemoteStoreCallback { if (viewDocChanges.needsRefill) { // The query has a limit and some docs were removed/updated, so we need to re-run the query against the local // store to make sure we didn't lose any good docs that had been past the limit. - final ImmutableSortedMap docs = - await _localStore.executeQuery(queryView.query); - viewDocChanges = view.computeDocChanges(docs, viewDocChanges); + final QueryResult queryResult = + await _localStore.executeQuery(queryView.query, /* usePreviousResults= */ false); + viewDocChanges = view.computeDocChanges(queryResult.documents, viewDocChanges); } - final TargetChange targetChange = remoteEvent == null - ? null - : remoteEvent.targetChanges[queryView.targetId]; - final ViewChange viewChange = - queryView.view.applyChanges(viewDocChanges, targetChange); - await _updateTrackedLimboDocuments( - viewChange.limboChanges, queryView.targetId); + final TargetChange targetChange = remoteEvent == null ? null : remoteEvent.targetChanges[queryView.targetId]; + final ViewChange viewChange = queryView.view.applyChanges(viewDocChanges, targetChange); + await _updateTrackedLimboDocuments(viewChange.limboChanges, queryView.targetId); if (viewChange.snapshot != null) { newSnapshots.add(viewChange.snapshot); @@ -461,8 +515,7 @@ class SyncEngine implements RemoteStoreCallback { } /// Updates the limbo document state for the given targetId. - Future _updateTrackedLimboDocuments( - List limboChanges, int targetId) async { + Future _updateTrackedLimboDocuments(List limboChanges, int targetId) async { for (LimboDocumentChange limboChange in limboChanges) { switch (limboChange.type) { case LimboDocumentChangeType.added: @@ -486,26 +539,46 @@ class SyncEngine implements RemoteStoreCallback { Future _trackLimboChange(LimboDocumentChange change) async { final DocumentKey key = change.key; - if (!_limboTargetsByKey.containsKey(key)) { + if (!_activeLimboTargetsByKey.containsKey(key)) { Log.d(_tag, 'New document in limbo: $key'); + _enqueuedLimboResolutions.add(key); + await _pumpEnqueuedLimboResolutions(); + } + } + + /// Starts listens for documents in limbo that are enqueued for resolution, subject to a maximum + /// number of concurrent resolutions. + /// + ///

Without bounding the number of concurrent resolutions, the server can fail with "resource + /// exhausted" errors which can lead to pathological client behavior as seen in + /// https://github.com/firebase/firebase-js-sdk/issues/2683. + Future _pumpEnqueuedLimboResolutions() async { + while (_enqueuedLimboResolutions.isNotEmpty && _activeLimboTargetsByKey.length < _maxConcurrentLimboResolutions) { + final DocumentKey key = _enqueuedLimboResolutions.removeFirst(); final int limboTargetId = _targetIdGenerator.nextId; - final Query query = Query(key.path); - final QueryData queryData = QueryData( - query, - limboTargetId, - ListenSequence.invalid, - QueryPurpose.limboResolution, + _activeLimboResolutionsByTarget[limboTargetId] = _LimboResolution(key); + _activeLimboTargetsByKey[key] = limboTargetId; + await _remoteStore.listen( + TargetData( + Query(key.path).toTarget(), + limboTargetId, + ListenSequence.invalid, + QueryPurpose.limboResolution, + ), ); - _limboResolutionsByTarget[limboTargetId] = _LimboResolution(key); - await _remoteStore.listen(queryData); - _limboTargetsByKey[key] = limboTargetId; } } @visibleForTesting - Map getCurrentLimboDocuments() { + Map getActiveLimboDocumentResolutions() { // Make a defensive copy as the Map continues to be modified. - return Map.from(_limboTargetsByKey); + return Map.from(_activeLimboTargetsByKey); + } + + @visibleForTesting + Queue getEnqueuedLimboDocumentResolutions() { + // Make a defensive copy as the Queue continues to be modified. + return Queue.from(_enqueuedLimboResolutions); } Future handleCredentialChange(User user) async { @@ -513,9 +586,10 @@ class SyncEngine implements RemoteStoreCallback { _currentUser = user; if (userChanged) { + // Fails tasks waiting for pending writes requested by previous user. + _failOutstandingPendingWritesAwaitingTasks(); // Notify local store and emit any resulting events from swapping out the mutation queue. - final ImmutableSortedMap changes = - await _localStore.handleUserChange(user); + final ImmutableSortedMap changes = await _localStore.handleUserChange(user); await _emitNewSnapsAndNotifyLocalStore(changes, /*remoteEvent:*/ null); } @@ -535,8 +609,7 @@ class SyncEngine implements RemoteStoreCallback { final int code = error.code; final String description = error.message ?? ''; - if (code == StatusCode.failedPrecondition && - description.contains('requires an index')) { + if (code == StatusCode.failedPrecondition && description.contains('requires an index')) { return true; } else if (code == StatusCode.permissionDenied) { return true; @@ -544,18 +617,6 @@ class SyncEngine implements RemoteStoreCallback { return false; } - - bool _isRetryableTransactionError(dynamic e) { - if (e is FirestoreError) { - // In transactions, the backend will fail outdated reads with FAILED_PRECONDITION and - // non-matching document versions with ABORTED. These errors should be retried. - final FirestoreErrorCode code = e.code; - return code == FirestoreErrorCode.aborted || - code == FirestoreErrorCode.failedPrecondition || - !Datastore.isPermanentError(e.code); - } - return false; - } } /// Tracks a limbo resolution. diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target.dart new file mode 100644 index 00000000..fd0a9570 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target.dart @@ -0,0 +1,181 @@ +// File created by +// Lung Razvan +// on 16/01/2021 + +import 'package:cloud_firestore_vm/src/firebase/firestore/core/bound.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/filter/filter.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/order_by.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:collection/collection.dart'; +import 'package:meta/meta.dart'; + + + +/// A Target represents the [WatchTarget] representation of a [Query], which is used by the [LocalStore] +/// and the [RemoteStore] to keep track of and to execute backend queries. While multiple Queries can +/// map to the same [Target], each [Target] maps to a single [WatchTarget] in RemoteStore and a single +/// [TargetData] entry in persistence. +class Target { + /// Initializes a Target with a path and additional query constraints. Path must currently be empty + /// if this is a collection group query. + /// + /// NOTE: you should always construct Target from [Query.toTarget] instead of using this + /// constructor, because Query provides an implicit [orderBy] property. + Target({ + @required this.path, + @required this.collectionGroup, + @required this.filters, + @required this.orderBy, + @required int limit, + @required this.startAt, + @required this.endAt, + }) : _limit = limit; + + static const int kNoLimit = -1; + + final List orderBy; + + /// The filters on the documents returned by the query. + final List filters; + + /// The base path of the query. + final ResourcePath path; + + /// An optional collection group within which to query. + final String collectionGroup; + + final int _limit; + + /// An optional bound to start the query at. + final Bound startAt; + + /// An optional bound to end the query at. + final Bound endAt; + + String _memoizedCannonicalId; + + /// Returns true if this Query is for a specific document. + bool get isDocumentQuery { + return DocumentKey.isDocumentKey(path) && collectionGroup == null && filters.isEmpty; + } + + /// The maximum number of results to return. + /// + /// If there is no limit on the query, then this will cause an assertion failure. + int get limit { + hardAssert(hasLimit, 'Called getter limit when no limit was set'); + return _limit; + } + + bool get hasLimit => _limit != kNoLimit; + + /// Returns a canonical string representing this target. + String get canonicalId { + if (_memoizedCannonicalId != null) { + return _memoizedCannonicalId; + } + + final StringBuffer buffer = StringBuffer() // + ..write(path.canonicalString); + + if (collectionGroup != null) { + buffer // + ..write('|cg:') + ..write(collectionGroup); + } + + // Add filters. + buffer.write('|f:'); + for (Filter filter in filters) { + buffer.write(filter.canonicalId); + } + + // Add order by. + buffer.write('|ob:'); + for (OrderBy orderBy in orderBy) { + buffer + ..write(orderBy.field.canonicalString) // + ..write(orderBy.direction == OrderByDirection.ascending ? 'asc' : 'desc'); + } + + // Add limit. + if (hasLimit) { + buffer // + ..write('|l:') + ..write(limit); + } + + if (startAt != null) { + buffer // + ..write('|lb:') + ..write(startAt.canonicalString()); + } + + if (endAt != null) { + buffer // + ..write('|ub:') + ..write(endAt.canonicalString()); + } + + return _memoizedCannonicalId = buffer.toString(); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is Target && + runtimeType == other.runtimeType && + collectionGroup == other.collectionGroup && + _limit == other._limit && + const ListEquality().equals(orderBy, other.orderBy) && + const ListEquality().equals(filters, other.filters) && + path == other.path && + startAt == other.startAt && + endAt == other.endAt; + + @override + int get hashCode => + collectionGroup.hashCode ^ + _limit.hashCode ^ + const ListEquality().hash(orderBy) ^ + const ListEquality().hash(filters) ^ + path.hashCode ^ + startAt.hashCode ^ + endAt.hashCode; + + @override + String toString() { + final StringBuffer buffer = StringBuffer() // + ..write('Query(') + ..write(path.canonicalString); + if (collectionGroup != null) { + buffer // + ..write(' collectionGroup=') + ..write(collectionGroup); + } + if (filters.isNotEmpty) { + buffer.write(' where '); + for (int i = 0; i < filters.length; i++) { + if (i > 0) { + buffer.write(' and '); + } + buffer.write(filters[i]); + } + } + + if (orderBy.isNotEmpty) { + buffer.write(' order by '); + for (int i = 0; i < orderBy.length; i++) { + if (i > 0) { + buffer.write(', '); + } + buffer.write(orderBy[i]); + } + } + + buffer.write(')'); + return buffer.toString(); + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target_id_generator.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target_id_generator.dart index 2e992b35..ff4ea996 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target_id_generator.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/target_id_generator.dart @@ -2,35 +2,30 @@ // Lung Razvan // on 20/09/2018 -/// Generates monotonically increasing target IDs for sending targets to the -/// watch stream. +/// Generates monotonically increasing target IDs for sending targets to the watch stream. /// -/// The client constructs two generators, one for the query cache -/// [TargetIdGenerator.forQueryCache], and one for limbo documents -/// [TargetIdGenerator.forSyncEngine]. These two generators produce -/// non-overlapping IDs (by using even and odd IDs respectively). +/// The client constructs two generators, one for the query cache (via [forTargetCache]), and one +/// for limbo documents (via [forSyncEngine]). These two generators produce non-overlapping IDs (by +/// using even and odd IDs respectively). /// -/// By separating the target ID space, the query cache can generate target IDs -/// that persist across client restarts, while sync engine can independently -/// generate in-memory target IDs that are transient and can be reused after a restart. - -// TODO(mrschmidt): Explore removing this class in favor of generating these IDs -// directly in SyncEngine and LocalStore. +/// By separating the target ID space, the query cache can generate target IDs that persist across +/// client restarts, while sync engine can independently generate in-memory target IDs that are +/// transient and can be reused after a restart. +// TODO(mrschmidt): Explore removing this class in favor of generating these IDs directly in +// [SyncEngine] and [LocalStore]. class TargetIdGenerator { /// Instantiates a new TargetIdGenerator, using the seed as the first target ID to return. TargetIdGenerator(int generatorId, int seed) : assert((generatorId & _reservedBits) == generatorId, 'Generator ID $generatorId contains more than $_reservedBits reserved bits.'), - assert((seed & _reservedBits) == generatorId, - 'Cannot supply target ID from different generator ID'), + assert((seed & _reservedBits) == generatorId, 'Cannot supply target ID from different generator ID'), _nextId = seed; /// Creates and returns the [TargetIdGenerator] for the local store. - factory TargetIdGenerator.forQueryCache(int after) { - final TargetIdGenerator generator = TargetIdGenerator(_queryCacheId, after); - // Make sure that the next call to `nextId()` returns the first value after 'after'. - generator.nextId; - return generator; + factory TargetIdGenerator.forTargetCache(int after) { + return TargetIdGenerator(_queryCacheId, after) + // Make sure that the next call to `nextId()` returns the first value after 'after'. + ..nextId; } /// Creates and returns the [TargetIdGenerator] for the sync engine. @@ -41,7 +36,6 @@ class TargetIdGenerator { static const int _queryCacheId = 0; static const int _syncEngineId = 1; - static const int _reservedBits = 1; int _nextId; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction.dart index c2ce60dc..a64ce297 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction.dart @@ -12,20 +12,21 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.d import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/delete_mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/verify_mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/datastore.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; /// Internal transaction object responsible for accumulating the mutations to /// perform and the base versions for any documents read. class Transaction { - Transaction(this._transactionClient) + Transaction(this._datastore) : readVersions = {}, mutations = [], committed = false; - final TransactionClient _transactionClient; + final Datastore _datastore; final Map readVersions; final List mutations; @@ -46,12 +47,11 @@ class Transaction { _ensureCommitNotCalled(); if (mutations.isNotEmpty) { - return Future>.error(FirestoreError( - 'Transactions lookups are invalid after writes.', - FirestoreErrorCode.invalidArgument)); + return Future>.error( + FirestoreError('Transactions lookups are invalid after writes.', FirestoreErrorCode.invalidArgument)); } - final List result = await _transactionClient.lookup(keys); + final List result = await _datastore.lookup(keys); result.forEach(_recordVersion); return result; } @@ -59,7 +59,7 @@ class Transaction { /// Stores a set mutation for the given key and value, to be committed when /// [commit] is called. void set(DocumentKey key, UserDataParsedSetData data) { - _write(data.toMutationList(key, _precondition(key))); + _write([data.toMutation(key, _precondition(key))]); writtenDocs.add(key); } @@ -67,7 +67,7 @@ class Transaction { /// when [commit] is called. void update(DocumentKey key, UserDataParsedUpdateData data) { try { - _write(data.toMutationList(key, _preconditionForUpdate(key))); + _write([data.toMutation(key, _preconditionForUpdate(key))]); } on FirestoreError catch (e) { _lastWriteError = e; } @@ -91,14 +91,13 @@ class Transaction { for (Mutation mutation in mutations) { unwritten.remove(mutation.key); } - if (unwritten.isNotEmpty) { - return Future.error(FirestoreError( - 'Every document read in a transaction must also be written.', - FirestoreErrorCode.invalidArgument)); + // For each document that was read but not written to, we want to perform a `verify` operation. + for (DocumentKey key in unwritten) { + mutations.add(VerifyMutation(key, _precondition(key))); } committed = true; - return _transactionClient.commit(mutations); + return _datastore.commit(mutations); } void _recordVersion(MaybeDocument doc) { @@ -117,9 +116,7 @@ class Transaction { final SnapshotVersion existingVersion = readVersions[doc.key]; if (existingVersion != doc.version) { // This transaction will fail no matter what. - throw FirestoreError( - 'Document version changed between two reads.', - FirestoreErrorCode.aborted); + throw FirestoreError('Document version changed between two reads.', FirestoreErrorCode.aborted); } } else { readVersions[doc.key] = docVersion; @@ -156,9 +153,7 @@ class Transaction { // // Note: this can change once we can send separate verify writes in the // transaction. - throw FirestoreError( - "Can't update a document that doesn't exist.", - FirestoreErrorCode.invalidArgument); + throw FirestoreError("Can't update a document that doesn't exist.", FirestoreErrorCode.invalidArgument); } // Document exists, base precondition on document update time. return Precondition(updateTime: version); @@ -177,7 +172,6 @@ class Transaction { } void _ensureCommitNotCalled() { - hardAssert(!committed, - 'A transaction object cannot be used after its update callback has been invoked.'); + hardAssert(!committed, 'A transaction object cannot be used after its update callback has been invoked.'); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction_runner.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction_runner.dart new file mode 100644 index 00000000..c7bb6c67 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/transaction_runner.dart @@ -0,0 +1,70 @@ +// File created by +// Lung Razvan +// on 24/01/2021 + +import 'dart:async'; + +import 'package:cloud_firestore_vm/src/firebase/firestore/core/transaction.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/exponential_backoff.dart'; + +typedef TransactionUpdateFunction = Future Function(Transaction transaction); + +/// TransactionRunner encapsulates the logic needed to run and retry transactions with backoff. +class TransactionRunner { + TransactionRunner(AsyncQueue asyncQueue, this._remoteStore, this._updateFunction) + : _retriesLeft = _kRetryCount, + _backoff = ExponentialBackoff(asyncQueue, TimerId.retryTransaction); + + static const int _kRetryCount = 5; + + final Completer _completer = Completer(); + final TransactionUpdateFunction _updateFunction; + final ExponentialBackoff _backoff; + final RemoteStore _remoteStore; + + int _retriesLeft; + + /// Runs the transaction and returns a Task containing the result. + Future run() { + _runWithBackoff(); + return _completer.future; + } + + void _runWithBackoff() { + _backoff.backoffAndRun(() async { + try { + final Transaction transaction = _remoteStore.createTransaction(); + final TResult result = await _updateFunction(transaction); + await transaction.commit(); + _completer.complete(result); + } catch (e) { + _handleTransactionError(e); + } + }); + } + + void _handleTransactionError(Object error) { + if (_retriesLeft > 0 && _isRetryableTransactionError(error)) { + _retriesLeft -= 1; + _runWithBackoff(); + } else { + _completer.completeError(error); + } + } + + static bool _isRetryableTransactionError(Object e) { + if (e is FirestoreError) { + // In transactions, the backend will fail outdated reads with FAILED_PRECONDITION and + // non-matching document versions with ABORTED. These errors should be retried. + final FirestoreErrorCode code = e.code; + return code == FirestoreErrorCode.aborted || + code == FirestoreErrorCode.failedPrecondition || + !Datastore.isPermanentError(code); + } + return false; + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/user_data.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/user_data.dart index 28dd9e5c..64773cf7 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/user_data.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/user_data.dart @@ -10,9 +10,8 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutatio import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/patch_mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/set_mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_operation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:meta/meta.dart'; @@ -36,20 +35,26 @@ class UserDataSource { /// UserDataParseContext.isWrite() will return false. static const UserDataSource argument = UserDataSource._(3); + /// Indicates that the source is an Argument that may directly contain nested arrays (e.g. the + /// operand of a `whereIn` query). + static const UserDataSource arrayArgument = UserDataSource._(4); + String get name => _stringValues[_value]; static const List values = [ set, mergeSet, update, - argument + argument, + arrayArgument, ]; static const List _stringValues = [ 'set', 'mergeSet', 'update', - 'argument' + 'argument', + 'arrayArgument', ]; } @@ -64,10 +69,9 @@ class UserDataParseContext { /// data). The [arrayElement] specifies whether or not this context corresponds to an element of an array. // TODO(long1eu): We don't support array paths right now, so path can be null to indicate the context represents any // location within an array (in which case certain features will not work and errors will be somewhat compromised). - const UserDataParseContext._(this._accumulator, this.path, - {@required this.arrayElement}); + const UserDataParseContext._(this._accumulator, this.path, {@required this.arrayElement}); - static final Pattern _reservedFieldRegex = RegExp('^__.*__\$'); + static const String _kReservedFiledDesignator = '__'; final UserDataParseAccumulator _accumulator; @@ -90,6 +94,7 @@ class UserDataParseContext { case UserDataSource.update: return true; case UserDataSource.argument: + case UserDataSource.arrayArgument: return false; default: throw fail('Unexpected case for UserDataSource: ' @@ -98,25 +103,20 @@ class UserDataParseContext { } UserDataParseContext childContextForSegment(String fieldName) { - final FieldPath childPath = - path == null ? null : path.appendSegment(fieldName); - final UserDataParseContext context = - UserDataParseContext._(_accumulator, childPath, arrayElement: false); + final FieldPath childPath = path == null ? null : path.appendSegment(fieldName); + final UserDataParseContext context = UserDataParseContext._(_accumulator, childPath, arrayElement: false); return context.._validatePathSegment(fieldName); } UserDataParseContext childContextForField(FieldPath fieldPath) { - final FieldPath childPath = - path == null ? null : path.appendField(fieldPath); - final UserDataParseContext context = - UserDataParseContext._(_accumulator, childPath, arrayElement: false); + final FieldPath childPath = path == null ? null : path.appendField(fieldPath); + final UserDataParseContext context = UserDataParseContext._(_accumulator, childPath, arrayElement: false); return context.._validatePath(); } UserDataParseContext childContextForArrayIndex(int arrayIndex) { // TODO(long1eu): We don't support array paths right now; so make path null. - return UserDataParseContext._(_accumulator, /*path:*/ null, - arrayElement: true); + return UserDataParseContext._(_accumulator, /*path:*/ null, arrayElement: true); } /// Adds the given [fieldPath] to the accumulated FieldMask. @@ -125,15 +125,13 @@ class UserDataParseContext { } /// Adds a transformation for the given field path. - void addToFieldTransforms( - FieldPath fieldPath, TransformOperation transformOperation) { + void addToFieldTransforms(FieldPath fieldPath, TransformOperation transformOperation) { _accumulator.addToFieldTransforms(fieldPath, transformOperation); } /// Creates an error including the given reason and the current field path. Error createError(String reason) { - final String fieldDescription = - (path == null || path.isEmpty) ? '' : ' (found in field $path)'; + final String fieldDescription = (path == null || path.isEmpty) ? '' : ' (found in field $path)'; return ArgumentError('Invalid data. $reason$fieldDescription'); } @@ -148,52 +146,46 @@ class UserDataParseContext { } void _validatePathSegment(String segment) { - if (isWrite && _reservedFieldRegex.allMatches(segment).isNotEmpty) { - throw createError('Document fields cannot begin and end with __'); + if (segment.isEmpty) { + throw createError('Document fields must not be empty'); + } + + if (isWrite && segment.startsWith(_kReservedFiledDesignator) && segment.endsWith(_kReservedFiledDesignator)) { + throw createError('Document fields cannot begin and end with \"__\"'); } } } /// The result of parsing document data (e.g. for a setData call). class UserDataParsedSetData { - UserDataParsedSetData(this._data, this._fieldMask, this._fieldTransforms); + UserDataParsedSetData(this.data, this.fieldMask, this.fieldTransforms); - final ObjectValue _data; - final FieldMask _fieldMask; - final List _fieldTransforms; + final ObjectValue data; + final FieldMask fieldMask; + final List fieldTransforms; - List toMutationList(DocumentKey key, Precondition precondition) { - final List mutations = []; - if (_fieldMask != null) { - mutations.add(PatchMutation(key, _data, _fieldMask, precondition)); + Mutation toMutation(DocumentKey key, Precondition precondition) { + if (fieldMask != null) { + return PatchMutation(key, data, fieldMask, precondition, fieldTransforms); } else { - mutations.add(SetMutation(key, _data, precondition)); + return SetMutation(key, data, precondition, fieldTransforms); } - if (_fieldTransforms.isNotEmpty) { - mutations.add(TransformMutation(key, _fieldTransforms)); - } - return mutations; } } /// The result of parsing 'update' data (i.e. for an updateData call). class UserDataParsedUpdateData { - UserDataParsedUpdateData(this._data, this._fieldMask, this.fieldTransforms); + UserDataParsedUpdateData(this.data, this.fieldMask, this.fieldTransforms); - final ObjectValue _data; + final ObjectValue data; - final FieldMask _fieldMask; + // The fieldMask does not include document transforms. + final FieldMask fieldMask; final List fieldTransforms; - List toMutationList(DocumentKey key, Precondition precondition) { - final List mutations = [ - PatchMutation(key, _data, _fieldMask, precondition) - ]; - if (fieldTransforms.isNotEmpty) { - mutations.add(TransformMutation(key, fieldTransforms)); - } - return mutations; + Mutation toMutation(DocumentKey key, Precondition precondition) { + return PatchMutation(key, data, fieldMask, precondition, fieldTransforms); } } @@ -220,8 +212,7 @@ class UserDataParseAccumulator { /// Returns a new [UserDataParseContext] representing the root of a user document. UserDataParseContext get rootContext { - return UserDataParseContext._(this, FieldPath.emptyPath, - arrayElement: false); + return UserDataParseContext._(this, FieldPath.emptyPath, arrayElement: false); } /// Returns true if the given [fieldPath] was encountered in the current document. @@ -247,8 +238,7 @@ class UserDataParseAccumulator { } /// Adds a transformation for the given field path. - void addToFieldTransforms( - FieldPath fieldPath, TransformOperation transformOperation) { + void addToFieldTransforms(FieldPath fieldPath, TransformOperation transformOperation) { fieldTransforms.add(FieldTransform(fieldPath, transformOperation)); } @@ -262,11 +252,9 @@ class UserDataParseAccumulator { /// /// (Optional) The field mask in the result will be the [userFieldMask] and only transforms that are covered by the /// mask will be included. - UserDataParsedSetData toMergeData(ObjectValue data, - [FieldMask userFieldMask]) { + UserDataParsedSetData toMergeData(ObjectValue data, [FieldMask userFieldMask]) { if (userFieldMask == null) { - return UserDataParsedSetData( - data, FieldMask(_fieldMask), fieldTransforms.toList(growable: false)); + return UserDataParsedSetData(data, FieldMask(_fieldMask), fieldTransforms.toList(growable: false)); } final List coveredFieldTransforms = []; @@ -277,8 +265,7 @@ class UserDataParseAccumulator { } } - return UserDataParsedSetData( - data, userFieldMask, coveredFieldTransforms.toList(growable: false)); + return UserDataParsedSetData(data, userFieldMask, coveredFieldTransforms.toList(growable: false)); } /// Wraps the given [data] along with any accumulated transforms into a [UserDataParsedSetData] that represents a @@ -298,7 +285,6 @@ class UserDataParseAccumulator { /// /// Returns [UserDataParsedSetData] that wraps the contents of this [UserDataParseAccumulator]. UserDataParsedUpdateData toUpdateData(ObjectValue data) { - return UserDataParsedUpdateData( - data, FieldMask(_fieldMask), fieldTransforms.toList(growable: false)); + return UserDataParsedUpdateData(data, FieldMask(_fieldMask), fieldTransforms.toList(growable: false)); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/view.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/view.dart index 4f1cdb2d..be332c0b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/view.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/core/view.dart @@ -42,8 +42,7 @@ class View { /// The set of documents that the server has told us belongs to the target associated with this view. ImmutableSortedSet _syncedDocuments; - ImmutableSortedSet get syncedDocuments => - ImmutableSortedSet(_syncedDocuments.toList()); + ImmutableSortedSet get syncedDocuments => ImmutableSortedSet(_syncedDocuments.toList()); /// Documents in the view but not in the remote target ImmutableSortedSet _limboDocuments; @@ -53,18 +52,18 @@ class View { /// Documents that have local changes ImmutableSortedSet mutatedKeys; + ViewSnapshotSyncState get syncState => _syncState; + /// Iterates over a set of doc changes, applies the query limit, and computes what the new results should be, what the /// changes were, and whether we may need to go back to the local cache for more results. Does not make any changes to /// the view. /// /// If this is being called with a refill, then start with [previousChanges] of docs and changes instead of the /// current view. Returns a new set of docs, changes, and refill flag. - ViewDocumentChanges computeDocChanges( - ImmutableSortedMap docChanges, + ViewDocumentChanges computeDocChanges(ImmutableSortedMap docChanges, [ViewDocumentChanges previousChanges]) { - final DocumentViewChangeSet changeSet = previousChanges != null - ? previousChanges.changeSet - : DocumentViewChangeSet(); + final DocumentViewChangeSet changeSet = + previousChanges != null ? previousChanges.changeSet : DocumentViewChangeSet(); final DocumentSet oldDocumentSet = previousChanges != null // ? previousChanges.documentSet : documentSet; @@ -82,10 +81,12 @@ class View { // // Note that this should never get used in a refill (when previousChanges is set), because there will only be adds // -- no deletes or updates. - final Document lastDocInLimit = - (query.hasLimit && oldDocumentSet.length == query.getLimit()) - ? oldDocumentSet.last - : null; + final Document lastDocInLimit = query.hasLimitToFirst && oldDocumentSet.length == query.getLimitToFirst() // + ? oldDocumentSet.last + : null; + final Document firstDocInLimit = query.hasLimitToLast && oldDocumentSet.length == query.getLimitToLast() // + ? oldDocumentSet.first + : null; for (MapEntry entry in docChanges) { final DocumentKey key = entry.key; @@ -98,21 +99,17 @@ class View { } if (newDoc != null) { - hardAssert(key == newDoc.key, - 'Mismatching key in doc change $key != ${newDoc.key}'); + hardAssert(key == newDoc.key, 'Mismatching key in doc change $key != ${newDoc.key}'); if (!query.matches(newDoc)) { newDoc = null; } } - final bool oldDocHadPendingMutations = - oldDoc != null && mutatedKeys.contains(oldDoc.key); + final bool oldDocHadPendingMutations = oldDoc != null && mutatedKeys.contains(oldDoc.key); // We only consider committed mutations for documents that were mutated during the lifetime of the view. final bool newDocHasPendingMutations = newDoc != null && - (newDoc.hasLocalMutations || - (mutatedKeys.contains(newDoc.key) && - newDoc.hasCommittedMutations)); + (newDoc.hasLocalMutations || (mutatedKeys.contains(newDoc.key) && newDoc.hasCommittedMutations)); bool changeApplied = false; @@ -121,31 +118,27 @@ class View { final bool docsEqual = oldDoc.data == newDoc.data; if (!docsEqual) { if (!shouldWaitForSyncedDocument(oldDoc, newDoc)) { - changeSet.addChange( - DocumentViewChange(DocumentViewChangeType.modified, newDoc)); + changeSet.addChange(DocumentViewChange(DocumentViewChangeType.modified, newDoc)); changeApplied = true; - if (lastDocInLimit != null && - query.comparator(newDoc, lastDocInLimit) > 0) { - // This doc moved from inside the limit to after the limit. That means there may be some doc in the local - // cache that's actually less than this one. + if ((lastDocInLimit != null && query.comparator(newDoc, lastDocInLimit) > 0) || + (firstDocInLimit != null && query.comparator(newDoc, firstDocInLimit) < 0)) { + // This doc moved from inside the limit to outside the limit. That means there may be + // some doc in the local cache that should be included instead. needsRefill = true; } } } else if (oldDocHadPendingMutations != newDocHasPendingMutations) { - changeSet.addChange( - DocumentViewChange(DocumentViewChangeType.metadata, newDoc)); + changeSet.addChange(DocumentViewChange(DocumentViewChangeType.metadata, newDoc)); changeApplied = true; } } else if (oldDoc == null && newDoc != null) { - changeSet.addChange( - DocumentViewChange(DocumentViewChangeType.added, newDoc)); + changeSet.addChange(DocumentViewChange(DocumentViewChangeType.added, newDoc)); changeApplied = true; } else if (oldDoc != null && newDoc == null) { - changeSet.addChange( - DocumentViewChange(DocumentViewChangeType.removed, oldDoc)); + changeSet.addChange(DocumentViewChange(DocumentViewChangeType.removed, oldDoc)); changeApplied = true; - if (lastDocInLimit != null) { + if (lastDocInLimit != null || firstDocInLimit != null) { // A doc was removed from a full limit query. We'll need to requery from the local cache to see if we know // about some other doc that should be in the results. needsRefill = true; @@ -167,21 +160,21 @@ class View { } } - if (query.hasLimit) { - for (int i = newDocumentSet.length - query.getLimit(); i > 0; --i) { - final Document oldDoc = newDocumentSet.last; + // Drop documents out to meet limitToFirst/limitToLast requirement. + if (query.hasLimitToFirst || query.hasLimitToLast) { + final int limit = query.hasLimitToFirst ? query.getLimitToFirst() : query.getLimitToLast(); + for (int i = newDocumentSet.length - limit; i > 0; --i) { + final Document oldDoc = query.hasLimitToFirst ? newDocumentSet.last : newDocumentSet.first; newDocumentSet = newDocumentSet.remove(oldDoc.key); newMutatedKeys = newMutatedKeys.remove(oldDoc.key); - changeSet.addChange( - DocumentViewChange(DocumentViewChangeType.removed, oldDoc)); + changeSet.addChange(DocumentViewChange(DocumentViewChangeType.removed, oldDoc)); } } - hardAssert(!needsRefill || previousChanges == null, - 'View was refilled using docs that themselves needed refilling.'); + hardAssert( + !needsRefill || previousChanges == null, 'View was refilled using docs that themselves needed refilling.'); - return ViewDocumentChanges._( - newDocumentSet, changeSet, newMutatedKeys, needsRefill); + return ViewDocumentChanges._(newDocumentSet, changeSet, newMutatedKeys, needsRefill); } bool shouldWaitForSyncedDocument(Document oldDoc, Document newDoc) { @@ -190,39 +183,32 @@ class View { // the event, we only raise two user visible events (one with [hasPendingWrites] and the final state of the // document) instead of three (one with [hasPendingWrites], the modified document with [hasPendingWrites] and the // final state of the document). - return oldDoc.hasLocalMutations && - newDoc.hasCommittedMutations && - !newDoc.hasLocalMutations; + return oldDoc.hasLocalMutations && newDoc.hasCommittedMutations && !newDoc.hasLocalMutations; } /// Updates the view with the given [ViewDocumentChanges] and updates limbo docs and sync state from the given /// (optional) target change. Returns a new [ViewChange] with the given docs, changes, and sync state. - ViewChange applyChanges(ViewDocumentChanges docChanges, - [TargetChange targetChange]) { - hardAssert( - !docChanges.needsRefill, 'Cannot apply changes that need a refill'); + ViewChange applyChanges(ViewDocumentChanges docChanges, [TargetChange targetChange]) { + hardAssert(!docChanges.needsRefill, 'Cannot apply changes that need a refill'); final DocumentSet oldDocumentSet = documentSet; documentSet = docChanges.documentSet; mutatedKeys = docChanges.mutatedKeys; // Sort changes based on type and query comparator. - final List viewChanges = - docChanges.changeSet.getChanges() - ..sort((DocumentViewChange a, DocumentViewChange b) { - final int typeComp = - View._changeTypeOrder(a).compareTo(View._changeTypeOrder(b)); - a.type.compareTo(b.type); - if (typeComp != 0) { - return typeComp; - } - return query.comparator(a.document, b.document); - }); + final List viewChanges = docChanges.changeSet.getChanges() + ..sort((DocumentViewChange a, DocumentViewChange b) { + final int typeComp = View._changeTypeOrder(a).compareTo(View._changeTypeOrder(b)); + a.type.compareTo(b.type); + if (typeComp != 0) { + return typeComp; + } + return query.comparator(a.document, b.document); + }); _applyTargetChange(targetChange); - final List limboDocumentChanges = - _updateLimboDocuments(); + final List limboDocumentChanges = _updateLimboDocuments(); final bool synced = _limboDocuments.isEmpty && _current; final ViewSnapshotSyncState newSyncState = synced // @@ -274,8 +260,7 @@ class View { _syncedDocuments = _syncedDocuments.insert(documentKey); } for (DocumentKey documentKey in targetChange.modifiedDocuments) { - hardAssert(_syncedDocuments.contains(documentKey), - 'Modified document $documentKey not found in view.'); + hardAssert(_syncedDocuments.contains(documentKey), 'Modified document $documentKey not found in view.'); } for (DocumentKey documentKey in targetChange.removedDocuments) { _syncedDocuments = _syncedDocuments.remove(documentKey); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_change.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_change.dart index 18eb6051..81910b0c 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_change.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_change.dart @@ -46,20 +46,20 @@ class DocumentChange { final QueryDocumentSnapshot document; /// The index of the changed document in the result set immediately prior to this [DocumentChange] - /// (i.e. supposing that all prior [DocumentChange] objects have been applied). Returns -1 for + /// (i.e. assuming that all prior [DocumentChange] objects have been applied). Returns -1 for /// 'added' events. /// /// Returns the index in the old snapshot, after processing all previous changes. final int oldIndex; /// The index of the changed document in the result set immediately after this [DocumentChange] - /// (i.e. supposing that all prior [DocumentChange] objects and the current [DocumentChange] + /// (i.e. assuming that all prior [DocumentChange] objects and the current [DocumentChange] /// object have been applied). Returns -1 for 'removed' events. /// /// The index in the new snapshot, after processing all previous changes. final int newIndex; - /// Creates the list of DocumentChanges from a ViewSnapshot. + /// Creates the list of document changes from a ViewSnapshot. static List changesFromSnapshot(Firestore firestore, MetadataChanges metadataChanges, ViewSnapshot snapshot) { final List documentChanges = []; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_reference.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_reference.dart index 09fbdf53..46f53e17 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_reference.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_reference.dart @@ -6,8 +6,7 @@ import 'dart:async'; import 'package:cloud_firestore_vm/src/firebase/firestore/collection_reference.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/event_manager.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart' - as core; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart' as core; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query_stream.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/user_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/view_snapshot.dart'; @@ -18,6 +17,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/metadata_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/delete_mutation.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/set_options.dart'; @@ -26,21 +26,19 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:rxdart/rxdart.dart'; -/// A [DocumentReference] refers to a document location in a Firestore database and can be used to write, read, or +/// A [DocumentReference] refers to a document location in a Cloud Firestore database and can be used to write, read, or /// listen to the location. There may or may not exist a document at the referenced location. A [DocumentReference] can /// also be used to create a [CollectionReference] to a subcollection. /// -/// **Subclassing Note**: Firestore classes are not meant to be subclassed except for use in test mocks. Subclassing -/// is not supported in production code and new SDK releases may break code that does so. +/// **Subclassing Note**: Cloud Firestore classes are not meant to be subclassed except for use in test mocks. +/// Subclassing is not supported in production code and new SDK releases may break code that does so. class DocumentReference { // TODO(long1eu): We should checkNotNull(firestore), but tests are currently cheating and setting it to null. DocumentReference(this.key, this.firestore) : assert(key != null); - factory DocumentReference.forPath( - ResourcePath path, Firestore firestore) { + factory DocumentReference.forPath(ResourcePath path, Firestore firestore) { if (path.length.remainder(2) != 0) { - throw ArgumentError( - 'Invalid document reference. Document references must have an even number of segments, but ' + throw ArgumentError('Invalid document reference. Document references must have an even number of segments, but ' '${path.canonicalString} has ${path.length}'); } @@ -49,7 +47,7 @@ class DocumentReference { final DocumentKey key; - /// Gets the Firestore instance associated with this document reference. + /// Gets the Cloud Firestore instance associated with this document reference. final Firestore firestore; String get id => key.path.last; @@ -74,9 +72,7 @@ class DocumentReference { /// Returns the [CollectionReference] instance. CollectionReference collection(String collectionPath) { checkNotNull(collectionPath, 'Provided collection path must not be null.'); - return CollectionReference( - key.path.appendField(ResourcePath.fromString(collectionPath)), - firestore); + return CollectionReference(key.path.appendField(ResourcePath.fromString(collectionPath)), firestore); } /// Writes to the document referred to by this DocumentReference. If the document does not yet exist, it will be @@ -91,27 +87,10 @@ class DocumentReference { checkNotNull(data, 'Provided data must not be null.'); checkNotNull(options, 'Provided options must not be null.'); final UserDataParsedSetData parsed = options.merge - ? firestore.dataConverter.parseMergeData(data, options.fieldMask) - : firestore.dataConverter.parseSetData(data); + ? firestore.userDataReader.parseMergeData(data, options.fieldMask) + : firestore.userDataReader.parseSetData(data); - await voidErrorTransformer(() => - firestore.client.write(parsed.toMutationList(key, Precondition.none))); - } - - /// Updates fields in the document referred to by this DocumentReference. If no document exists yet, the update will - /// fail. - /// - /// [data] is a List of field/value pairs to be updated. - /// - /// The first item should be the field to update followed by the value. Repeat this pattern for any additional - /// field/value pairs. - /// - /// Returns a Future that will be resolved when the write finishes. - Future updateFromList(List data) async { - final UserDataParsedUpdateData parsedData = firestore.dataConverter - .parseUpdateDataFromList(collectUpdateArguments(1, data)); - await voidErrorTransformer(() => firestore.client - .write(parsedData.toMutationList(key, Precondition(exists: true)))); + await voidErrorTransformer(() => firestore.client.write([parsed.toMutation(key, Precondition.none)])); } /// Updates fields in the document referred to by this [DocumentReference]. If no document exists yet, the update will @@ -122,18 +101,17 @@ class DocumentReference { /// /// Returns a Future that will be resolved when the write finishes. Future update(Map data) async { - final UserDataParsedUpdateData parsedData = - firestore.dataConverter.parseUpdateData(data); - await voidErrorTransformer(() => firestore.client - .write(parsedData.toMutationList(key, Precondition(exists: true)))); + final UserDataParsedUpdateData parsedData = firestore.userDataReader.parseUpdateData(data); + await voidErrorTransformer(() { + return firestore.client.write([parsedData.toMutation(key, Precondition(exists: true))]); + }); } /// Deletes the document referred to by this [DocumentReference]. /// /// Returns a Future that will be resolved when the delete completes. Future delete() { - return voidErrorTransformer(() => firestore.client - .write([DeleteMutation(key, Precondition.none)])); + return voidErrorTransformer(() => firestore.client.write([DeleteMutation(key, Precondition.none)])); } /// Reads the document referenced by this [DocumentReference]. @@ -149,8 +127,7 @@ class DocumentReference { source ??= Source.defaultSource; if (source == Source.cache) { - final Document doc = - await firestore.client.getDocumentFromLocalCache(key); + final Document doc = await firestore.client.getDocumentFromLocalCache(key); final bool hasPendingWrites = doc != null && doc.hasLocalMutations; return DocumentSnapshot( @@ -166,20 +143,15 @@ class DocumentReference { } Future _getViaSnapshotListener(Source source) { - return _getSnapshotsInternal(const ListenOptions.all()) - .map((DocumentSnapshot snapshot) { + return _getSnapshotsInternal(const ListenOptions.all()).map((DocumentSnapshot snapshot) { if (!snapshot.exists && snapshot.metadata.isFromCache) { // TODO(long1eu): Reconsider how to raise missing documents when offline. // If we're online and the document doesn't exist then we set the result of the Future with a document with // document.exists set to false. If we're offline however, we set the Error on the Task. Two options: // 1. Cache the negative response from the server so we can deliver that even when you're offline. // 2. Actually set the Error of the Task if the document doesn't exist when you are offline. - throw FirestoreError( - 'Failed to get document because the client is offline.', - FirestoreErrorCode.unavailable); - } else if (snapshot.exists && - snapshot.metadata.isFromCache && - source == Source.server) { + throw FirestoreError('Failed to get document because the client is offline.', FirestoreErrorCode.unavailable); + } else if (snapshot.exists && snapshot.metadata.isFromCache && source == Source.server) { throw FirestoreError( 'Failed to get document from server. (However, this document does exist in the local cache. Run again ' 'without setting source to Source.SERVER to retrieve the cached document.)', @@ -196,25 +168,21 @@ class DocumentReference { } Stream getSnapshots([MetadataChanges changes]) { - final ListenOptions options = - _internalOptions(changes ?? MetadataChanges.exclude); + final ListenOptions options = _internalOptions(changes ?? MetadataChanges.exclude); return _getSnapshotsInternal(options); } Stream _getSnapshotsInternal(ListenOptions options) { final core.Query query = core.Query(key.path); - return Stream.fromFuture( - firestore.client.listen(query, options)) + return Stream.fromFuture(firestore.client.listen(query, options)) .flatMap((QueryStream it) => it) .map((ViewSnapshot snapshot) { - hardAssert(snapshot.documents.length <= 1, - 'Too many documents returned on a document query'); + hardAssert(snapshot.documents.length <= 1, 'Too many documents returned on a document query'); final Document document = snapshot.documents.getDocument(key); if (document != null) { - final bool hasPendingWrites = - snapshot.mutatedKeys.contains(document.key); + final bool hasPendingWrites = snapshot.mutatedKeys.contains(document.key); return DocumentSnapshot.fromDocument( firestore, document, @@ -237,8 +205,7 @@ class DocumentReference { /// Converts the API [MetadataChanges] object to the internal options object. static ListenOptions _internalOptions(MetadataChanges metadataChanges) { return ListenOptions( - includeDocumentMetadataChanges: - metadataChanges == MetadataChanges.include, + includeDocumentMetadataChanges: metadataChanges == MetadataChanges.include, includeQueryMetadataChanges: metadataChanges == MetadataChanges.include, ); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_snapshot.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_snapshot.dart index e020b29c..dc04c1dd 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_snapshot.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/document_snapshot.dart @@ -11,9 +11,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/geo_point.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart' - as model; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart' as model; import 'package:cloud_firestore_vm/src/firebase/firestore/server_timestamp_behavior.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/snapshot_metadata.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; @@ -45,8 +43,7 @@ class DocumentSnapshot { @required bool isFromCache, @required bool hasPendingWrites, }) { - return DocumentSnapshot(firestore, doc.key, doc, - isFromCache: isFromCache, hasPendingWrites: hasPendingWrites); + return DocumentSnapshot(firestore, doc.key, doc, isFromCache: isFromCache, hasPendingWrites: hasPendingWrites); } factory DocumentSnapshot.fromNoDocument( @@ -55,8 +52,7 @@ class DocumentSnapshot { @required bool isFromCache, @required bool hasPendingWrites, }) { - return DocumentSnapshot(firestore, key, null, - isFromCache: isFromCache, hasPendingWrites: hasPendingWrites); + return DocumentSnapshot(firestore, key, null, isFromCache: isFromCache, hasPendingWrites: hasPendingWrites); } final Firestore _firestore; @@ -89,15 +85,9 @@ class DocumentSnapshot { /// /// Returns the fields of the document as a Map or null if the document doesn't exist. Map getData(ServerTimestampBehavior serverTimestampBehavior) { - checkNotNull(serverTimestampBehavior, - 'Provided serverTimestampBehavior value must not be null.'); - if (document == null) { - return null; - } else { - final _FieldValueOptions fieldValueOptions = - _FieldValueOptions(serverTimestampBehavior: serverTimestampBehavior); - return _convertObject(document.data, fieldValueOptions); - } + checkNotNull(serverTimestampBehavior, 'Provided serverTimestampBehavior value must not be null.'); + final UserDataWriter userDataWriter = UserDataWriter(_firestore, serverTimestampBehavior); + return document == null ? null : userDataWriter.convertObject(document.data.fields); } /// Returns whether or not the field exists in the document. Returns false if the document does not exist. @@ -116,8 +106,7 @@ class DocumentSnapshot { /// Returns true if the field exists. bool containsPath(FieldPath fieldPath) { checkNotNull(fieldPath, 'Provided field path must not be null.'); - return (document != null) && - (document.getField(fieldPath.internalPath) != null); + return (document != null) && (document.getField(fieldPath.internalPath) != null); } Object operator [](String field) => get(field); @@ -130,8 +119,7 @@ class DocumentSnapshot { /// /// Returns the value at the given field or null. Object get(String field, [ServerTimestampBehavior serverTimestampBehavior]) { - return getField(FieldPath.fromDotSeparatedPath(field), - serverTimestampBehavior ?? ServerTimestampBehavior.none); + return getField(FieldPath.fromDotSeparatedPath(field), serverTimestampBehavior ?? ServerTimestampBehavior.none); } /// Returns the value at the field or null if the field or document doesn't exist. @@ -141,16 +129,12 @@ class DocumentSnapshot { /// value. /// /// Returns the value at the given field or null. - Object getField(FieldPath fieldPath, - [ServerTimestampBehavior serverTimestampBehavior]) { + Object getField(FieldPath fieldPath, [ServerTimestampBehavior serverTimestampBehavior]) { serverTimestampBehavior ??= ServerTimestampBehavior.none; checkNotNull(fieldPath, 'Provided field path must not be null.'); - checkNotNull(serverTimestampBehavior, - 'Provided serverTimestampBehavior value must not be null.'); + checkNotNull(serverTimestampBehavior, 'Provided serverTimestampBehavior value must not be null.'); - final _FieldValueOptions fieldValueOptions = - _FieldValueOptions(serverTimestampBehavior: serverTimestampBehavior); - return _getInternal(fieldPath.internalPath, fieldValueOptions); + return _getInternal(fieldPath.internalPath, serverTimestampBehavior); } /// Returns the value of the field as a bool. If the value is not a bool this will throw a state error. @@ -198,12 +182,10 @@ class DocumentSnapshot { /// /// Throws [StateError] if the value is not a Date. /// Returns the value of the field - DateTime getDate(String field, - [ServerTimestampBehavior serverTimestampBehavior]) { + DateTime getDate(String field, [ServerTimestampBehavior serverTimestampBehavior]) { serverTimestampBehavior ??= ServerTimestampBehavior.none; checkNotNull(field, 'Provided field path must not be null.'); - checkNotNull(serverTimestampBehavior, - 'Provided serverTimestampBehavior value must not be null.'); + checkNotNull(serverTimestampBehavior, 'Provided serverTimestampBehavior value must not be null.'); final Object maybeDate = _getInternal( FieldPath.fromDotSeparatedPath(field).internalPath, _FieldValueOptions( @@ -222,12 +204,10 @@ class DocumentSnapshot { /// /// Throws [StateError] if the value is not a timestamp field. /// Returns the value of the field - Timestamp getTimestamp(String field, - [ServerTimestampBehavior serverTimestampBehavior]) { + Timestamp getTimestamp(String field, [ServerTimestampBehavior serverTimestampBehavior]) { serverTimestampBehavior ??= ServerTimestampBehavior.none; checkNotNull(field, 'Provided field path must not be null.'); - checkNotNull(serverTimestampBehavior, - 'Provided serverTimestampBehavior value must not be null.'); + checkNotNull(serverTimestampBehavior, 'Provided serverTimestampBehavior value must not be null.'); final Object maybeTimestamp = _getInternal( FieldPath.fromDotSeparatedPath(field).internalPath, _FieldValueOptions(serverTimestampBehavior: serverTimestampBehavior), @@ -279,8 +259,7 @@ class DocumentSnapshot { final T result = value; return result; } on CastError catch (_) { - throw StateError( - 'Field \'$field\' is not a $T, but it is ${value.runtimeType}'); + throw StateError('Field \'$field\' is not a $T, but it is ${value.runtimeType}'); } } @@ -300,8 +279,7 @@ class DocumentSnapshot { } } - Object _convertServerTimestamp( - ServerTimestampValue value, _FieldValueOptions options) { + Object _convertServerTimestamp(ServerTimestampValue value, _FieldValueOptions options) { switch (options.serverTimestampBehavior) { case ServerTimestampBehavior.previous: return value.previousValue; @@ -333,8 +311,7 @@ class DocumentSnapshot { return DocumentReference(key, _firestore); } - Map _convertObject( - ObjectValue objectValue, _FieldValueOptions options) { + Map _convertObject(ObjectValue objectValue, _FieldValueOptions options) { final Map result = {}; for (MapEntry entry in objectValue.internalValue) { result[entry.key] = _convertValue(entry.value, options); @@ -342,8 +319,7 @@ class DocumentSnapshot { return result; } - List _convertArray( - ArrayValue arrayValue, _FieldValueOptions options) { + List _convertArray(ArrayValue arrayValue, _FieldValueOptions options) { final List result = List(arrayValue.internalValue.length); int i = 0; for (FieldValue v in arrayValue.internalValue) { @@ -370,9 +346,7 @@ class DocumentSnapshot { runtimeType == other.runtimeType && _firestore == other._firestore && _key == other._key && - (document == null - ? other.document == null - : document == other.document) && + (document == null ? other.document == null : document == other.document) && metadata == other.metadata; @override @@ -392,14 +366,3 @@ class DocumentSnapshot { .toString(); } } - -/// Holds settings that define field value deserialization options. -class _FieldValueOptions { - _FieldValueOptions({ - this.serverTimestampBehavior, - this.timestampsInSnapshotsEnabled = true, - }); - - final ServerTimestampBehavior serverTimestampBehavior; - final bool timestampsInSnapshotsEnabled; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore.dart index a3696000..83fcd713 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore.dart @@ -11,22 +11,22 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/auth/firebase_auth_cre import 'package:cloud_firestore_vm/src/firebase/firestore/collection_reference.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/database_info.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/firestore_client.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart' - as core; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/transaction.dart' - as core; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart' as core; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/transaction.dart' as core; import 'package:cloud_firestore_vm/src/firebase/firestore/document_reference.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_multi_db_component.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_settings.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/sqlite/sqlite_persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/transaction.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/user_data_converter.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/database.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/write_batch.dart'; import 'package:firebase_core_vm/firebase_core_vm.dart'; import 'package:meta/meta.dart'; @@ -38,15 +38,15 @@ import 'package:meta/meta.dart'; class Firestore { @visibleForTesting Firestore(this.databaseId, this.firebaseApp, this.client, this._scheduler) - : dataConverter = UserDataConverter(databaseId); + : userDataReader = UserDataConverter(databaseId); static const String _tag = 'FirebaseFirestore'; final DatabaseId databaseId; final FirebaseApp firebaseApp; - final UserDataConverter dataConverter; + final UserDataConverter userDataReader; final FirestoreClient client; - final TaskScheduler _scheduler; + final AsyncQueue _scheduler; static Firestore get instance { final FirebaseApp app = FirebaseApp.instance; @@ -54,8 +54,7 @@ class Firestore { throw StateError('You must call [FirebaseApp.initializeApp] first.'); } - final Firestore firestore = - FirestoreMultiDbComponent.instances[DatabaseId.defaultDatabaseId]; + final Firestore firestore = FirestoreMultiDbComponent.instances[DatabaseId.defaultDatabaseId]; if (firestore == null) { throw StateError('You must call [Firestore.getInstance] first.'); } @@ -69,7 +68,7 @@ class Firestore { } @visibleForTesting - TaskScheduler get scheduler => _scheduler; + AsyncQueue get scheduler => _scheduler; static Future getInstance( FirebaseApp app, { @@ -80,8 +79,7 @@ class Firestore { checkNotNull(app, 'Provided FirebaseApp must not be null.'); Firestore.setLoggingEnabled(); - final FirestoreMultiDbComponent component = - FirestoreMultiDbComponent(app, app.authProvider, settings); + final FirestoreMultiDbComponent component = FirestoreMultiDbComponent(app, app.authProvider, settings); checkNotNull(component, 'Firestore component is not present.'); final Firestore firestore = await component.get(database, openDatabase); @@ -105,12 +103,10 @@ class Firestore { if (authProvider != null) { provider = FirebaseAuthCredentialsProvider(authProvider); } else if (app.authProvider != app) { - Log.d( - _tag, 'Using ${app.authProvider.runtimeType} as the auth provider.'); + Log.d(_tag, 'Using ${app.authProvider.runtimeType} as the auth provider.'); provider = FirebaseAuthCredentialsProvider(app.authProvider); } else { - Log.d(_tag, - 'Firebase Auth not available, falling back to unauthenticated usage.'); + Log.d(_tag, 'Firebase Auth not available, falling back to unauthenticated usage.'); provider = EmptyCredentialsProvider(); } @@ -129,7 +125,7 @@ class Firestore { sslEnabled: settings.sslEnabled, ); - final TaskScheduler scheduler = TaskScheduler(app.name); + final AsyncQueue scheduler = AsyncQueue(app.name); final FirestoreClient firestoreClient = await FirestoreClient.initialize( databaseInfo, settings, @@ -169,8 +165,7 @@ class Firestore { DocumentReference document(String documentPath) { checkNotNull(documentPath, 'Provided document path must not be null.'); _ensureClientConfigured(); - return DocumentReference.forPath( - ResourcePath.fromString(documentPath), this); + return DocumentReference.forPath(ResourcePath.fromString(documentPath), this); } /// Creates and returns a new [Query] that includes all documents in the @@ -182,8 +177,7 @@ class Firestore { Query collectionGroup(String collectionId) { checkNotNull(collectionId, 'Provided collection ID must not be null.'); if (collectionId.contains('/')) { - throw ArgumentError( - 'Invalid collectionId \'$collectionId\'. Collection IDs must not contain \'/\'.'); + throw ArgumentError('Invalid collectionId \'$collectionId\'. Collection IDs must not contain \'/\'.'); } _ensureClientConfigured(); @@ -244,6 +238,12 @@ class Firestore { return batch.commit(); } + Future _shutdownInternal() { + // The client must be initialized to ensure that all subsequent API usage throws an exception. + _ensureClientConfigured(); + return client.terminate(); + } + /// Shuts down this [Firestore] instance. /// /// To restart after shutdown, simply create a new instance of Firestore with @@ -258,10 +258,7 @@ class Firestore { /// method is useful only when you want to force this instance to release all /// of its resources. Future shutdown() async { - // The client must be initialized to ensure that all subsequent API usage - // throws an exception. - _ensureClientConfigured(); - return client.shutdown(); + return _shutdownInternal(); } /// Re-enables network usage for this instance after a prior call to [disableNetwork]. @@ -291,12 +288,44 @@ class Firestore { } } + /// Clears the persistent storage, including pending writes and cached documents. + /// + ///

Must be called while the FirebaseFirestore instance is not started (after the app is + /// shutdown or when the app is first initialized). On startup, this method must be called before + /// other methods (other than setFirestoreSettings()). If the FirebaseFirestore + /// instance is still running, the Task will fail with an error code of + /// FAILED_PRECONDITION. + /// + ///

Note: clearPersistence() is primarily intended to help write reliable tests + /// that use Cloud Firestore. It uses an efficient mechanism for dropping existing data but does + /// not attempt to securely overwrite or otherwise make cached data unrecoverable. For applications + /// that are sensitive to the disclosure of cached data in between user sessions, we strongly + /// recommend not enabling persistence at all. + /// + /// @return A Task that is resolved when the persistent storage is cleared. Otherwise, + /// the Task is rejected with an error. + Future clearPersistence() { + final Completer completer = Completer(); + _scheduler.enqueueAndForgetEvenAfterShutdown(() async { + try { + if (client != null && !client.isTerminated) { + throw FirestoreError('Persistence cannot be cleared while the firestore instance is running.', + FirestoreErrorCode.failedPrecondition); + } + SQLitePersistence.clearPersistence(databaseId, persistenceKey); + completer.complete(); + } on FirestoreError catch (e, s) { + completer.completeError(e, s); + } + }); + return completer.future; + } + /// Helper to validate a [DocumentReference]. Used by [WriteBatch] and [Transaction]. void validateReference(DocumentReference docRef) { checkNotNull(docRef, 'Provided DocumentReference must not be null.'); if (docRef.firestore != this) { - throw ArgumentError( - 'Provided document reference is from a different Cloud Firestore instance.'); + throw ArgumentError('Provided document reference is from a different Cloud Firestore instance.'); } } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore_error.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore_error.dart index 220a15a4..aa40607d 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore_error.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/firestore_error.dart @@ -29,8 +29,8 @@ class FirestoreErrorCode { static const FirestoreErrorCode dataLoss = FirestoreErrorCode._(15); static const FirestoreErrorCode unauthenticated = FirestoreErrorCode._(16); - static FirestoreErrorCode fromValue(GrpcError error) { - return values[error.code]; + static FirestoreErrorCode fromValue(GrpcError code) { + return values[code.code]; } static const List values = [ @@ -82,8 +82,7 @@ class FirestoreError extends FirebaseError { String message, this.code, [ this.cause, - StackTrace stackTrance, - ]) : super(message, stackTrance) { + ]) : super(message) { Preconditions.checkNotNull(message); Preconditions.checkNotNull(code); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/default_query_engine.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/default_query_engine.dart new file mode 100644 index 00000000..42d514d8 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/default_query_engine.dart @@ -0,0 +1,138 @@ +// File created by +// Lung Razvan +// on 17/01/2021 + +import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; +import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_documents_view.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_engine.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; + +/// A query engine that takes advantage of the target document mapping in the TargetCache. Query +/// execution is optimized by only reading the documents that previously matched a query plus any +/// documents that were edited after the query was last listened to. +/// +/// There are some cases where this optimization is not guaranteed to produce the same results as +/// full collection scans. In these cases, query processing falls back to full scans. These cases +/// are: +/// +/// * Limit queries where a document that matched the query previously no longer matches the query. +/// * Limit queries where a document edit may cause the document to sort below another document +/// that is in the local cache. +/// * Queries that have never been CURRENT or free of limbo documents. +class DefaultQueryEngine implements QueryEngine { + static const String _kLogTag = 'DefaultQueryEngine'; + + LocalDocumentsView _localDocumentsView; + + @override + set localDocumentsView(LocalDocumentsView localDocuments) { + _localDocumentsView = localDocuments; + } + + @override + Future> getDocumentsMatchingQuery( + Query query, + SnapshotVersion lastLimboFreeSnapshotVersion, + ImmutableSortedSet remoteKeys, + ) async { + hardAssert(_localDocumentsView != null, 'setLocalDocumentsView() not called'); + + // Queries that match all documents don't benefit from using key-based lookups. It is more + // efficient to scan all documents in a collection, rather than to perform individual lookups. + if (query.matchesAllDocuments) { + return _executeFullCollectionScan(query); + } + + // Queries that have never seen a snapshot without limbo free documents should also be run as a + // full collection scan. + if (lastLimboFreeSnapshotVersion == SnapshotVersion.none) { + return _executeFullCollectionScan(query); + } + + final ImmutableSortedMap documents = await _localDocumentsView.getDocuments(remoteKeys); + final ImmutableSortedSet previousResults = _applyQuery(query, documents); + + if ((query.hasLimitToFirst || query.hasLimitToLast) && + _needsRefill(query.getLimitType(), previousResults, remoteKeys, lastLimboFreeSnapshotVersion)) { + return _executeFullCollectionScan(query); + } + + Log.d(_kLogTag, 'Re-using previous result from $lastLimboFreeSnapshotVersion to execute query: $query'); + + // Retrieve all results for documents that were updated since the last limbo-document free + // remote snapshot. + ImmutableSortedMap updatedResults = + await _localDocumentsView.getDocumentsMatchingQuery(query, lastLimboFreeSnapshotVersion); + + // We merge `previousResults` into `updateResults`, since `updateResults` is already a + // ImmutableSortedMap. If a document is contained in both lists, then its contents are the same. + for (Document result in previousResults) { + updatedResults = updatedResults.insert(result.key, result); + } + + return updatedResults; + } + + /// Applies the query filter and sorting to the provided documents. + ImmutableSortedSet _applyQuery(Query query, ImmutableSortedMap documents) { + // Sort the documents and re-apply the query filter since previously matching documents do not + // necessarily still match the query. + ImmutableSortedSet queryResults = ImmutableSortedSet([], query.comparator); + for (MapEntry entry in documents) { + final MaybeDocument maybeDoc = entry.value; + if (maybeDoc is Document && query.matches(maybeDoc)) { + final Document doc = maybeDoc; + queryResults = queryResults.insert(doc); + } + } + return queryResults; + } + + /// Determines if a limit query needs to be refilled from cache, making it ineligible for + /// index-free execution. + /// + /// The [limitType] represents the type of limit query for refill calculation, while [sortedPreviousResults] + /// are the documents that matched the query when it was last synchronized, sorted by the query's comparator. + bool _needsRefill( + QueryLimitType limitType, + ImmutableSortedSet sortedPreviousResults, + ImmutableSortedSet remoteKeys, + SnapshotVersion limboFreeSnapshotVersion, + ) { + // The query needs to be refilled if a previously matching document no longer matches. + if (remoteKeys.length != sortedPreviousResults.length) { + return true; + } + + // Limit queries are not eligible for index-free query execution if there is a potential that an + // older document from cache now sorts before a document that was previously part of the limit. + // This, however, can only happen if the document at the edge of the limit goes out of limit. If + // a document that is not the limit boundary sorts differently, the boundary of the limit itself + // did not change and documents from cache will continue to be "rejected" by this boundary. + // Therefore, we can ignore any modifications that don't affect the last document. + final Document documentAtLimitEdge = limitType == QueryLimitType.limitToFirst // + ? sortedPreviousResults.maxEntry + : sortedPreviousResults.minEntry; + if (documentAtLimitEdge == null) { + // We don't need to refill the query if there were already no documents. + return false; + } + return documentAtLimitEdge.hasPendingWrites || documentAtLimitEdge.version.compareTo(limboFreeSnapshotVersion) > 0; + } + + @override + void handleDocumentChange(MaybeDocument oldDocument, MaybeDocument newDocument) { + // No indexes to update. + } + + Future> _executeFullCollectionScan(Query query) { + Log.d(_kLogTag, 'Using full collection scan to execute query: $query'); + return _localDocumentsView.getDocumentsMatchingQuery(query, SnapshotVersion.none); + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/garbage_collection_scheduler.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/garbage_collection_scheduler.dart new file mode 100644 index 00000000..1b7ae494 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/garbage_collection_scheduler.dart @@ -0,0 +1,10 @@ +// File created by +// Lung Razvan +// on 16/01/2021 + +/// Helper interface to control the Garbage Collector. +abstract class GarbageCollectionScheduler { + void start(); + + void stop(); +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/indexed_query_engine.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/indexed_query_engine.dart index 5dd69e76..c0b851aa 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/indexed_query_engine.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/indexed_query_engine.dart @@ -17,18 +17,20 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_collect import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' hide Document; const double _kHighSelectivity = 1.0; const double _kLowSelectivity = 0.5; -/// [ArrayValue] and [ObjectValue] are currently considered low cardinality +/// [Value_ValueType.arrayValue] and [Value_ValueType.mapValue] are currently considered low cardinality /// because we don't index them uniquely. -const List _kLowCardinalityTypes = [ - BoolValue, - ArrayValue, - ObjectValue +const List _kLowCardinalityTypes = [ + Value_ValueType.booleanValue, + Value_ValueType.arrayValue, + Value_ValueType.mapValue ]; /// An indexed implementation of [QueryEngine] which performs fairly efficient @@ -53,7 +55,7 @@ const List _kLowCardinalityTypes = [ /// * HIGH_SELECTIVITY: [BlobValue], [DoubleValue], [GeoPointValue], /// [NumberValue], [ReferenceValue], [StringValue], [TimestampValue], /// [NullValue] -/// * LOW_SELECTIVITY: [ArrayValue], [ObjectValue], [BoolValue] +/// * LOW_SELECTIVITY: [ArrayValue], [MapValue], [BoolValue] /// /// Note that we consider [NullValue] a high selectivity filter as we only /// support equals comparisons against 'null' and expect most data to be @@ -67,34 +69,34 @@ const List _kLowCardinalityTypes = [ /// A full collection scan is therefore only needed when no [filters] or /// [orderBy] constraints are specified. class IndexedQueryEngine implements QueryEngine { - const IndexedQueryEngine(this.localDocuments, this.collectionIndex); + IndexedQueryEngine(this.collectionIndex); - final LocalDocumentsView localDocuments; final SQLiteCollectionIndex collectionIndex; + LocalDocumentsView _localDocuments; + + @override + set localDocumentsView(LocalDocumentsView localDocuments) { + _localDocuments = localDocuments; + } @override Future> getDocumentsMatchingQuery( Query query, + SnapshotVersion lastLimboFreeSnapshotVersion, + ImmutableSortedSet remoteKeys, ) { + hardAssert(_localDocuments != null, 'localDocumentsView has not been set'); + return query.isDocumentQuery - ? localDocuments.getDocumentsMatchingQuery(query) + ? _localDocuments.getDocumentsMatchingQuery(query, SnapshotVersion.none) : _performCollectionQuery(query); } - @override - void handleDocumentChange( - MaybeDocument oldDocument, MaybeDocument newDocument) { - // TODO(long1eu): Determine changed fields and make appropriate - // addEntry() / removeEntry() on SQLiteCollectionIndex. - throw StateError('Not yet implemented.'); - } - /// Executes the query using both indexes and post-filtering. Future> _performCollectionQuery( Query query, ) async { - hardAssert(!query.isDocumentQuery, - 'matchesCollectionQuery called with document query.'); + hardAssert(!query.isDocumentQuery, 'matchesCollectionQuery called with document query.'); final IndexRange indexRange = _extractBestIndexRange(query); ImmutableSortedMap filteredResults; @@ -102,12 +104,11 @@ class IndexedQueryEngine implements QueryEngine { if (indexRange != null) { filteredResults = await _performQueryUsingIndex(query, indexRange); } else { - hardAssert(query.filters.isEmpty, - 'If there are any filters, we should be able to use an index.'); + hardAssert(query.filters.isEmpty, 'If there are any filters, we should be able to use an index.'); // TODO(long1eu): Call overlay.getCollectionDocuments(query.path) and // filter the results (there may still be startAt/endAt bounds that // apply). - filteredResults = await localDocuments.getDocumentsMatchingQuery(query); + filteredResults = await _localDocuments.getDocumentsMatchingQuery(query, SnapshotVersion.none); } return filteredResults; @@ -116,16 +117,12 @@ class IndexedQueryEngine implements QueryEngine { /// Applies 'filter' to the index cursor, looks up the relevant documents from /// the local documents view and returns /// all matches. - Future> _performQueryUsingIndex( - Query query, IndexRange indexRange) async { - ImmutableSortedMap results = - DocumentCollections.emptyDocumentMap(); - final IndexCursor cursor = - collectionIndex.getCursor(query.path, indexRange); + Future> _performQueryUsingIndex(Query query, IndexRange indexRange) async { + ImmutableSortedMap results = DocumentCollections.emptyDocumentMap(); + final IndexCursor cursor = collectionIndex.getCursor(query.path, indexRange); try { while (cursor.next) { - final Document document = - await localDocuments.getDocument(cursor.documentKey); + final Document document = await _localDocuments.getDocument(cursor.documentKey); if (query.matches(document)) { results = results.insert(cursor.documentKey, document); } @@ -146,17 +143,14 @@ class IndexedQueryEngine implements QueryEngine { hardAssert(filter is FieldFilter, 'Filter type expected to be FieldFilter'); final FieldFilter fieldFilter = filter; - if (fieldFilter.value == null || fieldFilter.value == DoubleValue.nan) { + final Value filterValue = fieldFilter.value; + if (isNullValue(filterValue) || isNanValue(filterValue)) { return _kHighSelectivity; } else { final double operatorSelectivity = - fieldFilter.operator == FilterOperator.equal - ? _kHighSelectivity - : _kLowSelectivity; + fieldFilter.operator == FilterOperator.equal ? _kHighSelectivity : _kLowSelectivity; final double typeSelectivity = - _kLowCardinalityTypes.contains(fieldFilter.value.runtimeType) - ? _kLowSelectivity - : _kHighSelectivity; + _kLowCardinalityTypes.contains(fieldFilter.value.whichValueType()) ? _kLowSelectivity : _kHighSelectivity; return typeSelectivity * operatorSelectivity; } @@ -174,8 +168,7 @@ class IndexedQueryEngine implements QueryEngine { if (query.filters.isNotEmpty) { Filter selectedFilter; for (Filter currentFilter in query.filters) { - final double estimatedSelectivity = - _estimateFilterSelectivity(currentFilter); + final double estimatedSelectivity = _estimateFilterSelectivity(currentFilter); if (estimatedSelectivity > currentSelectivity) { selectedFilter = currentFilter; currentSelectivity = estimatedSelectivity; @@ -202,7 +195,7 @@ class IndexedQueryEngine implements QueryEngine { static IndexRange _convertFilterToIndexRange(Filter filter) { if (filter is FieldFilter) { final FieldFilter relationFilter = filter; - final FieldValue filterValue = relationFilter.value; + final Value filterValue = relationFilter.value; switch (relationFilter.operator) { case FilterOperator.equal: return IndexRange( @@ -229,4 +222,11 @@ class IndexedQueryEngine implements QueryEngine { } return IndexRange(fieldPath: filter.field); } + + @override + void handleDocumentChange(MaybeDocument oldDocument, MaybeDocument newDocument) { + // TODO(long1eu): Determine changed fields and make appropriate + // addEntry() / removeEntry() on SQLiteCollectionIndex. + throw StateError('Not yet implemented.'); + } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_documents_view.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_documents_view.dart index 6e433837..27233eca 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_documents_view.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_documents_view.dart @@ -5,9 +5,9 @@ import 'dart:async'; import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/index_manager.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/index_manager.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_collections.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; @@ -41,14 +41,12 @@ class LocalDocumentsView { /// Returns the the local view of the document identified by [key]. If we /// don't have any cached state it returns null Future getDocument(DocumentKey key) async { - final List batches = - await mutationQueue.getAllMutationBatchesAffectingDocumentKey(key); + final List batches = await mutationQueue.getAllMutationBatchesAffectingDocumentKey(key); return _getDocument(key, batches); } // Internal version of [getDocument] that allows reusing batches. - Future _getDocument( - DocumentKey key, List inBatches) async { + Future _getDocument(DocumentKey key, List inBatches) async { MaybeDocument document = await remoteDocumentCache.get(key); for (MutationBatch batch in inBatches) { @@ -65,9 +63,7 @@ class LocalDocumentsView { return MapEntry( key, batches.fold( - value, - (MaybeDocument localView, MutationBatch batch) => - batch.applyToLocalView(key, localView))); + value, (MaybeDocument localView, MutationBatch batch) => batch.applyToLocalView(key, localView))); }); } @@ -75,24 +71,19 @@ class LocalDocumentsView { /// /// If we don't have cached state for a document in [keys], a [NoDocument] will be stored for that key in the /// resulting set. - Future> getDocuments( - Iterable keys) async { - final Map docs = - await remoteDocumentCache.getAll(keys); + Future> getDocuments(Iterable keys) async { + final Map docs = await remoteDocumentCache.getAll(keys); return getLocalViewOfDocuments(docs); } /// Similar to [getDocuments], but creates the local view from the given [baseDocs] without retrieving documents from /// the local store. - Future> - getLocalViewOfDocuments(Map baseDocs) async { - ImmutableSortedMap results = - DocumentCollections.emptyMaybeDocumentMap(); - - final List batches = await mutationQueue - .getAllMutationBatchesAffectingDocumentKeys(baseDocs.keys); - final Map docs = - _applyLocalMutationsToDocuments(baseDocs, batches); + Future> getLocalViewOfDocuments( + Map baseDocs) async { + ImmutableSortedMap results = DocumentCollections.emptyMaybeDocumentMap(); + + final List batches = await mutationQueue.getAllMutationBatchesAffectingDocumentKeys(baseDocs.keys); + final Map docs = _applyLocalMutationsToDocuments(baseDocs, batches); for (MapEntry entry in docs.entries) { // TODO(long1eu): Don't conflate missing / deleted. final MaybeDocument maybeDoc = entry.value ?? @@ -107,29 +98,32 @@ class LocalDocumentsView { return results; } - /// Performs a query against the local view of all documents. - // TODO(long1eu): The Querying implementation here should move 100% to - // [SimpleQueryEngine]. Instead, we should just provide a - // getCollectionDocuments method here that return all the documents in a - // given collection so that SimpleQueryEngine] can do that and then filter in + // TODO(long1eu): The Querying implementation here should move 100% to the query engines. + // Instead, we should just provide a getCollectionDocuments() method here that return all the + // documents in a given collection so that query engine can do that and then filter in // memory. + + /// Performs a query against the local view of all documents. + /// + /// If [sinceReadTime] is not set to [SnapshotVersion.min], return only documents that have been + /// read since this snapshot version (exclusive). Future> getDocumentsMatchingQuery( - Query query) async { + Query query, + SnapshotVersion sinceReadTime, + ) async { final ResourcePath path = query.path; if (query.isDocumentQuery) { return _getDocumentsMatchingDocumentQuery(path); } else if (query.isCollectionGroupQuery) { - return _getDocumentsMatchingCollectionGroupQuery(query); + return _getDocumentsMatchingCollectionGroupQuery(query, sinceReadTime); } else { - return _getDocumentsMatchingCollectionQuery(query); + return _getDocumentsMatchingCollectionQuery(query, sinceReadTime); } } /// Performs a simple document lookup for the given path. - Future> - _getDocumentsMatchingDocumentQuery(ResourcePath path) async { - ImmutableSortedMap result = - DocumentCollections.emptyDocumentMap(); + Future> _getDocumentsMatchingDocumentQuery(ResourcePath path) async { + ImmutableSortedMap result = DocumentCollections.emptyDocumentMap(); // Just do a simple document lookup. final MaybeDocument doc = await getDocument(DocumentKey.fromPath(path)); if (doc is Document) { @@ -138,23 +132,21 @@ class LocalDocumentsView { return result; } - Future> - _getDocumentsMatchingCollectionGroupQuery(Query query) async { - hardAssert(query.path.isEmpty, - 'Currently we only support collection group queries at the root.'); + Future> _getDocumentsMatchingCollectionGroupQuery( + Query query, + SnapshotVersion sinceReadTime, + ) async { + hardAssert(query.path.isEmpty, 'Currently we only support collection group queries at the root.'); final String collectionId = query.collectionGroup; - ImmutableSortedMap results = - DocumentCollections.emptyDocumentMap(); - final List parents = - await indexManager.getCollectionParents(collectionId); + ImmutableSortedMap results = DocumentCollections.emptyDocumentMap(); + final List parents = await indexManager.getCollectionParents(collectionId); // Perform a collection query against each parent that contains the // collectionId and aggregate the results. for (ResourcePath parent in parents) { - final Query collectionQuery = - query.asCollectionQueryAtPath(parent.appendSegment(collectionId)); + final Query collectionQuery = query.asCollectionQueryAtPath(parent.appendSegment(collectionId)); final ImmutableSortedMap collectionResults = - await _getDocumentsMatchingCollectionQuery(collectionQuery); + await _getDocumentsMatchingCollectionQuery(collectionQuery, sinceReadTime); for (MapEntry docEntry in collectionResults) { results = results.insert(docEntry.key, docEntry.value); } @@ -163,13 +155,14 @@ class LocalDocumentsView { } /// Queries the remote documents and overlays mutations. - Future> - _getDocumentsMatchingCollectionQuery(Query query) async { + Future> _getDocumentsMatchingCollectionQuery( + Query query, + SnapshotVersion sinceReadTime, + ) async { ImmutableSortedMap results = - await remoteDocumentCache.getAllDocumentsMatchingQuery(query); + await remoteDocumentCache.getAllDocumentsMatchingQuery(query, sinceReadTime); - final List matchingBatches = - await mutationQueue.getAllMutationBatchesAffectingQuery(query); + final List matchingBatches = await mutationQueue.getAllMutationBatchesAffectingQuery(query); results = await _addMissingBaseDocuments(matchingBatches, results); for (MutationBatch batch in matchingBatches) { for (Mutation mutation in batch.mutations) { @@ -180,8 +173,7 @@ class LocalDocumentsView { final DocumentKey key = mutation.key; final MaybeDocument baseDoc = results[key]; - final MaybeDocument mutatedDoc = - mutation.applyToLocalView(baseDoc, baseDoc, batch.localWriteTime); + final MaybeDocument mutatedDoc = mutation.applyToLocalView(baseDoc, baseDoc, batch.localWriteTime); if (mutatedDoc is Document) { results = results.insert(key, mutatedDoc); } else { @@ -214,16 +206,14 @@ class LocalDocumentsView { final Set missingDocKeys = {}; for (MutationBatch batch in matchingBatches) { for (Mutation mutation in batch.mutations) { - if (mutation is PatchMutation && - !existingDocs.containsKey(mutation.key)) { + if (mutation is PatchMutation && !existingDocs.containsKey(mutation.key)) { missingDocKeys.add(mutation.key); } } } ImmutableSortedMap mergedDocs = existingDocs; - final Map missingDocs = - await remoteDocumentCache.getAll(missingDocKeys); + final Map missingDocs = await remoteDocumentCache.getAll(missingDocKeys); for (MapEntry entry in missingDocs.entries) { if (entry.value != null && (entry.value is Document)) { mergedDocs = mergedDocs.insert(entry.key, entry.value); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_serializer.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_serializer.dart index 408c5863..8a5d096f 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_serializer.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_serializer.dart @@ -4,18 +4,18 @@ import 'dart:typed_data'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/unknown_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; @@ -37,7 +37,7 @@ class LocalSerializer { ..hasCommittedMutations = document.hasCommittedMutations; } else if (document is Document) { builder - ..document = document.proto ?? _encodeDocument(document) + ..document = _encodeDocument(document) ..hasCommittedMutations = document.hasCommittedMutations; } else if (document is UnknownDocument) { builder @@ -66,13 +66,9 @@ class LocalSerializer { /// Encodes a Document for local storage. This differs from the v1 RPC serializer for Documents in that it preserves /// the updateTime, which is considered an output only value by the server. proto.Document _encodeDocument(Document document) { - final proto.Document builder = proto.Document.create() - ..name = rpcSerializer.encodeKey(document.key); - - final ObjectValue value = document.data; - for (MapEntry entry in value.internalValue) { - builder.fields[entry.key] = rpcSerializer.encodeValue(entry.value); - } + final proto.Document builder = proto.Document.create() // + ..name = rpcSerializer.encodeKey(document.key) + ..fields.addAll(document.data.fields); final Timestamp updateTime = document.version.timestamp; builder.updateTime = rpcSerializer.encodeTimestamp(updateTime); @@ -80,20 +76,15 @@ class LocalSerializer { } /// Decodes a Document proto to the equivalent model. - Document _decodeDocument( - proto.Document document, bool hasCommittedMutations) { + Document _decodeDocument(proto.Document document, bool hasCommittedMutations) { final DocumentKey key = rpcSerializer.decodeKey(document.name); - final SnapshotVersion version = - rpcSerializer.decodeVersion(document.updateTime); - return Document.fromProto( + final SnapshotVersion version = rpcSerializer.decodeVersion(document.updateTime); + return Document( key, version, - hasCommittedMutations - ? DocumentState.committedMutations - : DocumentState.synced, - document, - rpcSerializer.decodeValue, + ObjectValue.fromMap(document.fields), + hasCommittedMutations ? DocumentState.committedMutations : DocumentState.synced, ); } @@ -106,12 +97,10 @@ class LocalSerializer { } /// Decodes a NoDocument proto to the equivalent model. - NoDocument _decodeNoDocument( - proto.NoDocument proto, bool hasCommittedMutations) { + NoDocument _decodeNoDocument(proto.NoDocument proto, bool hasCommittedMutations) { final DocumentKey key = rpcSerializer.decodeKey(proto.name); final SnapshotVersion version = rpcSerializer.decodeVersion(proto.readTime); - return NoDocument(key, version, - hasCommittedMutations: hasCommittedMutations); + return NoDocument(key, version, hasCommittedMutations: hasCommittedMutations); } /// Encodes a [UnknownDocument] value to the equivalent proto. @@ -147,73 +136,97 @@ class LocalSerializer { /// Decodes a [WriteBatch] proto into a MutationBatch model. */ MutationBatch decodeMutationBatch(proto.WriteBatch batch) { final int batchId = batch.batchId; - final Timestamp localWriteTime = - rpcSerializer.decodeTimestamp(batch.localWriteTime); + final Timestamp localWriteTime = rpcSerializer.decodeTimestamp(batch.localWriteTime); final int baseMutationsCount = batch.baseWrites.length; final List baseMutations = List(baseMutationsCount); for (int i = 0; i < baseMutationsCount; i++) { baseMutations[i] = rpcSerializer.decodeMutation(batch.baseWrites[i]); } - final int mutationsCount = batch.writes.length; - final List mutations = List(mutationsCount); - for (int i = 0; i < mutationsCount; i++) { - mutations[i] = rpcSerializer.decodeMutation(batch.writes[i]); + final List mutations = []; + // Squash old transform mutations into existing patch or set mutations. The replacement of + // representing `transforms` with `update_transforms` on the SDK means that old `transform` + // mutations stored in IndexedDB need to be updated to `update_transforms`. + // TODO(b/174608374): Remove this code once we perform a schema migration. + for (int i = batch.writes.length - 1; i >= 0; --i) { + final proto.Write mutation = batch.writes[i]; + if (mutation.hasTransform()) { + hardAssert( + i >= 1 && !batch.writes[i - 1].hasTransform() && batch.writes[i - 1].hasUpdate(), + 'TransformMutation should be preceded by a patch or set mutation', + ); + final proto.Write newMutationBuilder = batch.writes[i - 1].toBuilder(); + newMutationBuilder.updateTransforms.addAll(mutation.transform.fieldTransforms); + + mutations.add(rpcSerializer.decodeMutation(newMutationBuilder.freeze())); + --i; + } else { + mutations.add(rpcSerializer.decodeMutation(mutation)); + } } + return MutationBatch( batchId: batchId, localWriteTime: localWriteTime, baseMutations: baseMutations, - mutations: mutations, + // Reverse the mutations to preserve the original ordering since the above for-loop iterates in + // reverse order. We use reverse() instead of prepending the elements into the mutations array + // since prepending to a List is O(n). + mutations: mutations.reversed.toList(), ); } - proto.Target encodeQueryData(QueryData queryData) { + proto.Target encodeTargetData(TargetData targetData) { hardAssert( - queryData.purpose == QueryPurpose.listen, - 'Only queries with purpose ${QueryPurpose.listen} ' - 'may be stored, got ${queryData.purpose}'); + targetData.purpose == QueryPurpose.listen, + 'Only queries with purpose ${QueryPurpose.listen} may be stored, got ${targetData.purpose}', + ); final proto.Target result = proto.Target.create() - ..targetId = queryData.targetId - ..lastListenSequenceNumber = Int64(queryData.sequenceNumber) - ..snapshotVersion = rpcSerializer.encodeVersion(queryData.snapshotVersion) - ..resumeToken = queryData.resumeToken; - - final Query query = queryData.query; - if (query.isDocumentQuery) { - result.documents = rpcSerializer.encodeDocumentsTarget(query); + ..targetId = targetData.targetId + ..lastListenSequenceNumber = Int64(targetData.sequenceNumber) + ..lastLimboFreeSnapshotVersion = rpcSerializer.encodeVersion(targetData.lastLimboFreeSnapshotVersion) + ..snapshotVersion = rpcSerializer.encodeVersion(targetData.snapshotVersion) + ..resumeToken = targetData.resumeToken; + + final Target target = targetData.target; + if (target.isDocumentQuery) { + result.documents = rpcSerializer.encodeDocumentsTarget(target); } else { - result.query = rpcSerializer.encodeQueryTarget(query); + result.query = rpcSerializer.encodeQueryTarget(target); } return result..freeze(); } - QueryData decodeQueryData(proto.Target target) { - final int targetId = target.targetId; - - final SnapshotVersion version = - rpcSerializer.decodeVersion(target.snapshotVersion); - final Uint8List resumeToken = Uint8List.fromList(target.resumeToken); - final int sequenceNumber = target.lastListenSequenceNumber.toInt(); - - Query query; - - if (target.hasDocuments()) { - query = rpcSerializer.decodeDocumentsTarget(target.documents); - } else if (target.hasQuery()) { - query = rpcSerializer.decodeQueryTarget(target.query); - } else { - throw fail('Unknown targetType $target}'); + TargetData decodeTargetData(proto.Target targetProto) { + final int targetId = targetProto.targetId; + + final SnapshotVersion version = rpcSerializer.decodeVersion(targetProto.snapshotVersion); + final SnapshotVersion lastLimboFreeSnapshotVersion = + rpcSerializer.decodeVersion(targetProto.lastLimboFreeSnapshotVersion); + final Uint8List resumeToken = Uint8List.fromList(targetProto.resumeToken); + final int sequenceNumber = targetProto.lastListenSequenceNumber.toInt(); + + Target target; + switch (targetProto.whichTargetType()) { + case proto.Target_TargetType.documents: + target = rpcSerializer.decodeDocumentsTarget(targetProto.documents); + break; + case proto.Target_TargetType.query: + target = rpcSerializer.decodeQueryTarget(targetProto.query); + break; + default: + throw fail('Unknown targetType $targetProto}'); } - return QueryData( - query, + return TargetData( + target, targetId, sequenceNumber, QueryPurpose.listen, version, + lastLimboFreeSnapshotVersion, resumeToken, ); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_store.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_store.dart index 61ca0e19..a0426374 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_store.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_store.dart @@ -9,19 +9,21 @@ import 'package:_firebase_database_collection_vm/_firebase_database_collection_v import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/target_id_generator.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_documents_view.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_view_changes.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_write_result.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/query_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_garbage_collector.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_engine.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/reference_set.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/simple_query_engine.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; @@ -30,15 +32,15 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutatio import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/patch_mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_event.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/target_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; - -import 'lru_garbage_collector.dart'; +import 'package:meta/meta.dart'; /// Local storage in the Firestore client. Coordinates persistence components like the mutation queue and remote /// document cache to present a latency compensated view of stored data. @@ -72,50 +74,49 @@ import 'lru_garbage_collector.dart'; /// The [LocalStore] must be able to efficiently execute queries against its local cache of the documents, to provide /// the initial set of results before any remote changes have been received. class LocalStore { - factory LocalStore(Persistence persistence, User initialUser) { - hardAssert(persistence.started, - 'LocalStore was passed an unstarted persistence implementation'); - - final QueryCache queryCache = persistence.queryCache; - final TargetIdGenerator targetIdGenerator = - TargetIdGenerator.forQueryCache(queryCache.highestTargetId); - final MutationQueue mutationQueue = - persistence.getMutationQueue(initialUser); + factory LocalStore(Persistence persistence, QueryEngine queryEngine, User initialUser) { + hardAssert(persistence.started, 'LocalStore was passed an unstarted persistence implementation'); + + final TargetCache targetCache = persistence.targetCache; + final TargetIdGenerator targetIdGenerator = TargetIdGenerator.forTargetCache(targetCache.highestTargetId); + final MutationQueue mutationQueue = persistence.getMutationQueue(initialUser); final RemoteDocumentCache remoteDocuments = persistence.remoteDocumentCache; final LocalDocumentsView localDocuments = LocalDocumentsView( remoteDocumentCache: remoteDocuments, mutationQueue: mutationQueue, indexManager: persistence.indexManager, ); - // TODO(long1eu): Use IndexedQueryEngine as appropriate. - final SimpleQueryEngine queryEngine = SimpleQueryEngine(localDocuments); + + queryEngine.localDocumentsView = localDocuments; final ReferenceSet localViewReferences = ReferenceSet(); persistence.referenceDelegate.inMemoryPins = localViewReferences; return LocalStore._( persistence, - queryCache, + targetCache, targetIdGenerator, mutationQueue, remoteDocuments, localDocuments, queryEngine, localViewReferences, - {}, + {}, + {}, ); } LocalStore._( this._persistence, - this._queryCache, + this._targetCache, this._targetIdGenerator, this._mutationQueue, this._remoteDocuments, this._localDocuments, this._queryEngine, this._localViewReferences, - this._targetIds, + this._queryDataByTarget, + this._targetIdByTarget, ); /// The maximum time to leave a resume token buffered without writing it out. @@ -123,8 +124,7 @@ class LocalStore { /// This value is arbitrary: it's long enough to avoid several writes (possibly indefinitely if updates come more /// frequently than this) but short enough that restarting after crashing will still have a pretty recent resume /// token. - static final int _resultTokenMaxAgeSeconds = - const Duration(minutes: 5).inSeconds; + static final int _resultTokenMaxAgeSeconds = const Duration(minutes: 5).inSeconds; /// Manages our in-memory or durable persistence. final Persistence _persistence; @@ -146,10 +146,13 @@ class LocalStore { final ReferenceSet _localViewReferences; /// Maps a query to the data about that query. - final QueryCache _queryCache; + final TargetCache _targetCache; /// Maps a targetId to data about its query. - final Map _targetIds; + final Map _queryDataByTarget; + + /// Maps a target to its targetID. + final Map _targetIdByTarget; /// Used to generate targetIds for queries tracked locally. final TargetIdGenerator _targetIdGenerator; @@ -159,22 +162,18 @@ class LocalStore { } Future startMutationQueue() { - return _persistence.runTransaction( - 'Start MutationQueue', _mutationQueue.start); + return _persistence.runTransaction('Start MutationQueue', _mutationQueue.start); } // PORTING NOTE: no shutdown for [LocalStore] or persistence components on Android. - Future> handleUserChange( - User user) async { + Future> handleUserChange(User user) async { // Swap out the mutation queue, grabbing the pending mutation batches before and after. - final List oldBatches = - await _mutationQueue.getAllMutationBatches(); + final List oldBatches = await _mutationQueue.getAllMutationBatches(); _mutationQueue = _persistence.getMutationQueue(user); await startMutationQueue(); - final List newBatches = - await _mutationQueue.getAllMutationBatches(); + final List newBatches = await _mutationQueue.getAllMutationBatches(); // Recreate our LocalDocumentsView using the new MutationQueue. _localDocuments = LocalDocumentsView( @@ -182,15 +181,11 @@ class LocalStore { mutationQueue: _mutationQueue, indexManager: _persistence.indexManager, ); - // TODO(long1eu): Use IndexedQueryEngine as appropriate. - _queryEngine = SimpleQueryEngine(_localDocuments); + _queryEngine.localDocumentsView = _localDocuments; // Union the old/new changed keys. ImmutableSortedSet changedKeys = DocumentKey.emptyKeySet; - for (List batches in >[ - oldBatches, - newBatches - ]) { + for (List batches in >[oldBatches, newBatches]) { for (MutationBatch batch in batches) { for (Mutation mutation in batch.mutations) { changedKeys = changedKeys.insert(mutation.key); @@ -223,18 +218,15 @@ class LocalStore { // backend sends us an update that already includes our transform. final List baseMutations = []; for (Mutation mutation in mutations) { - final ObjectValue baseValue = - mutation.extractBaseValue(existingDocuments[mutation.key]); + final ObjectValue baseValue = mutation.extractTransformBaseValue(existingDocuments[mutation.key]); if (baseValue != null) { // NOTE: The base state should only be applied if there's some existing // document to override, so use a Precondition of exists=true - baseMutations.add(PatchMutation(mutation.key, baseValue, - baseValue.fieldMask, Precondition(exists: true))); + baseMutations.add(PatchMutation(mutation.key, baseValue, baseValue.fieldMask, Precondition(exists: true))); } } - final MutationBatch batch = await _mutationQueue.addMutationBatch( - localWriteTime, baseMutations, mutations); + final MutationBatch batch = await _mutationQueue.addMutationBatch(localWriteTime, baseMutations, mutations); final ImmutableSortedMap changedDocuments = batch.applyToLocalDocumentSet(existingDocuments); return LocalWriteResult(batch.batchId, changedDocuments); @@ -251,10 +243,8 @@ class LocalStore { /// (there may be mutations in the queue that affect the documents but /// haven't been acknowledged yet); and /// * give the changed documents back the sync engine - Future> acknowledgeBatch( - MutationBatchResult batchResult) { - return _persistence.runTransactionAndReturn< - ImmutableSortedMap>('Acknowledge batch', + Future> acknowledgeBatch(MutationBatchResult batchResult) { + return _persistence.runTransactionAndReturn>('Acknowledge batch', () async { final MutationBatch batch = batchResult.batch; await _mutationQueue.acknowledgeBatch(batch, batchResult.streamToken); @@ -265,14 +255,11 @@ class LocalStore { } /// Removes mutations from the [MutationQueue] for the specified batch. LocalDocuments will be recalculated. - Future> rejectBatch( - int batchId) { + Future> rejectBatch(int batchId) { // TODO(long1eu): Call queryEngine.handleDocumentChange() appropriately. - return _persistence.runTransactionAndReturn< - ImmutableSortedMap>('Reject batch', + return _persistence.runTransactionAndReturn>('Reject batch', () async { - final MutationBatch toReject = - await _mutationQueue.lookupMutationBatch(batchId); + final MutationBatch toReject = await _mutationQueue.lookupMutationBatch(batchId); hardAssert(toReject != null, 'Attempt to reject nonexistent batch!'); await _mutationQueue.removeMutationBatch(toReject); @@ -281,6 +268,10 @@ class LocalStore { }); } + /// Returns the largest (latest) batch id in mutation queue that is pending server response. + /// Returns [MutationBatch.unknown] if the queue is empty. + Future getHighestUnacknowledgedBatchId() => _mutationQueue.getHighestUnacknowledgedBatchId(); + /// Returns the last recorded stream token for the current user. Uint8List get lastStreamToken => _mutationQueue.lastStreamToken; @@ -289,146 +280,133 @@ class LocalStore { /// /// Use [WriteStream.emptyStreamToken] to clear the current value. Future setLastStreamToken(Uint8List streamToken) async { - await _persistence.runTransaction('Set stream token', - () => _mutationQueue.setLastStreamToken(streamToken)); + await _persistence.runTransaction('Set stream token', () => _mutationQueue.setLastStreamToken(streamToken)); } /// Returns the last consistent snapshot processed (used by the [RemoteStore] to determine whether to buffer incoming /// snapshots from the backend). SnapshotVersion getLastRemoteSnapshotVersion() { - return _queryCache.lastRemoteSnapshotVersion; + return _targetCache.lastRemoteSnapshotVersion; } /// Updates the 'ground-state' (remote) documents. We assume that the remote event reflects any write batches that /// have been acknowledged or rejected (i.e. we do not re-apply local mutations to updates from this event). /// /// [LocalDocuments] are re-calculated if there are remaining mutations in the queue. - Future> applyRemoteEvent( - RemoteEvent remoteEvent) { - // TODO(long1eu): Call queryEngine.handleDocumentChange() appropriately. - return _persistence.runTransactionAndReturn< - ImmutableSortedMap>( - 'Apply remote event', () async { - final int sequenceNumber = - _persistence.referenceDelegate.currentSequenceNumber; - final Set authoritativeUpdates = {}; - - final Map targetChanges = remoteEvent.targetChanges; - for (MapEntry entry in targetChanges.entries) { - final int targetId = entry.key; - final TargetChange change = entry.value; - - // Do not ref/unref unassigned targetIds - it may lead to leaks. - QueryData queryData = _targetIds[targetId]; - if (queryData == null) { - continue; - } + Future> applyRemoteEvent(RemoteEvent remoteEvent) { + final SnapshotVersion remoteVersion = remoteEvent.snapshotVersion; + + // TODO: Call queryEngine.handleDocumentChange() appropriately. + return _persistence.runTransactionAndReturn>( + 'Apply remote event', + () async { + final Map targetChanges = remoteEvent.targetChanges; + final int sequenceNumber = _persistence.referenceDelegate.currentSequenceNumber; + + for (MapEntry entry in targetChanges.entries) { + final int boxedTargetId = entry.key; + final int targetId = boxedTargetId; + final TargetChange change = entry.value; + + final TargetData oldTargetData = _queryDataByTarget[targetId]; + if (oldTargetData == null) { + // We don't update the remote keys if the query is not active. This ensures that + // we persist the updated query data along with the updated assignment. + continue; + } - // When a global snapshot contains updates (either add or modify) we can completely trust these updates as - // authoritative and blindly apply them to our cache (as a defensive measure to promote self-healing in the - // unfortunate case that our cache is ever somehow corrupted / out-of-sync). - // - // If the document is only updated while removing it from a target then watch isn't obligated to send the - // absolute latest version: it can send the first version that caused the document not to match. - change.addedDocuments.forEach(authoritativeUpdates.add); - change.modifiedDocuments.forEach(authoritativeUpdates.add); - - await _queryCache.removeMatchingKeys(change.removedDocuments, targetId); - await _queryCache.addMatchingKeys(change.addedDocuments, targetId); - - // Update the resume token if the change includes one. Don't clear any preexisting value. - final Uint8List resumeToken = change.resumeToken; - if (resumeToken.isNotEmpty) { - final QueryData oldQueryData = queryData; - queryData = queryData.copyWith( - snapshotVersion: remoteEvent.snapshotVersion, - resumeToken: resumeToken, - sequenceNumber: sequenceNumber, - ); - _targetIds[targetId] = queryData; - - if (_shouldPersistQueryData(oldQueryData, queryData, change)) { - await _queryCache.updateQueryData(queryData); + await _targetCache.removeMatchingKeys(change.removedDocuments, targetId); + await _targetCache.addMatchingKeys(change.addedDocuments, targetId); + + final Uint8List resumeToken = change.resumeToken; + // Update the resume token if the change includes one. + if (resumeToken.isNotEmpty) { + final TargetData newTargetData = oldTargetData.copyWith( + snapshotVersion: remoteEvent.snapshotVersion, + resumeToken: resumeToken, + sequenceNumber: sequenceNumber, + ); + _queryDataByTarget[targetId] = newTargetData; + + // Update the query data if there are target changes (or if sufficient time has + // passed since the last update). + if (_shouldPersistTargetData(oldTargetData, newTargetData, change)) { + await _targetCache.updateTargetData(newTargetData); + } } } - } - final Map changedDocs = - {}; - final Map documentUpdates = - remoteEvent.documentUpdates; - final Set limboDocuments = - remoteEvent.resolvedLimboDocuments; - // Each loop iteration only affects its "own" doc, so it's safe to get all the remote documents in advance in a - // single call. - final Map existingDocs = - await _remoteDocuments.getAll(documentUpdates.keys); - for (MapEntry entry - in documentUpdates.entries) { - final DocumentKey key = entry.key; - final MaybeDocument doc = entry.value; - final MaybeDocument existingDoc = existingDocs[key]; - - // If a document update isn't authoritative, make sure we don't apply an old document version to the remote - // cache. We make an exception for SnapshotVersion.MIN which can happen for manufactured events (e.g. in the - // case of a limbo document resolution failing). - if (existingDoc == null || - doc.version == SnapshotVersion.none || - (authoritativeUpdates.contains(doc.key) && - !existingDoc.hasPendingWrites) || - doc.version.compareTo(existingDoc.version) >= 0) { - await _remoteDocuments.add(doc); - changedDocs[key] = doc; - } else { - Log.d( + final Map changedDocs = {}; + final Map documentUpdates = remoteEvent.documentUpdates; + final Set limboDocuments = remoteEvent.resolvedLimboDocuments; + // Each loop iteration only affects its "own" doc, so it's safe to get all the remote documents in advance in a + // single call. + final Map existingDocs = await _remoteDocuments.getAll(documentUpdates.keys); + for (MapEntry entry in documentUpdates.entries) { + final DocumentKey key = entry.key; + final MaybeDocument doc = entry.value; + final MaybeDocument existingDoc = existingDocs[key]; + + // Note: The order of the steps below is important, since we want to ensure that + // rejected limbo resolutions (which fabricate NoDocuments with SnapshotVersion.NONE) + // never add documents to cache. + if (doc is NoDocument && doc.version == SnapshotVersion.none) { + // NoDocuments with SnapshotVersion.NONE are used in manufactured events. We remove + // these documents from cache since we lost access. + await _remoteDocuments.remove(doc.key); + changedDocs[key] = doc; + } else if (existingDoc == null || + doc.version.compareTo(existingDoc.version) > 0 || + (doc.version.compareTo(existingDoc.version) == 0 && existingDoc.hasPendingWrites)) { + hardAssert(SnapshotVersion.none != remoteEvent.snapshotVersion, + 'Cannot add a document when the remote version is zero'); + await _remoteDocuments.add(doc, remoteEvent.snapshotVersion); + changedDocs[key] = doc; + } else { + Log.d( 'LocalStore', - 'Ignoring outdated watch update for $key. Current version: ${existingDoc.version} Watch version: ' - '${doc.version}'); - } + 'Ignoring outdated watch update for $key. Current version: ${existingDoc.version} Watch version: ${doc.version}', + ); + } - if (limboDocuments.contains(key)) { - await _persistence.referenceDelegate.updateLimboDocument(key); + if (limboDocuments.contains(key)) { + await _persistence.referenceDelegate.updateLimboDocument(key); + } } - } - // HACK: The only reason we allow snapshot version none is so that we can synthesize remote events when we get - // permission denied errors while trying to resolve the state of a locally cached document that is in limbo. - final SnapshotVersion lastRemoteVersion = - _queryCache.lastRemoteSnapshotVersion; - final SnapshotVersion remoteVersion = remoteEvent.snapshotVersion; - if (remoteVersion != SnapshotVersion.none) { - hardAssert(remoteVersion.compareTo(lastRemoteVersion) >= 0, - 'Watch stream reverted to previous snapshot?? ($remoteVersion < $lastRemoteVersion)'); - await _queryCache.setLastRemoteSnapshotVersion(remoteVersion); - } + // HACK: The only reason we allow snapshot version none is so that we can synthesize remote events when we get + // permission denied errors while trying to resolve the state of a locally cached document that is in limbo. + final SnapshotVersion lastRemoteVersion = _targetCache.lastRemoteSnapshotVersion; + if (remoteVersion != SnapshotVersion.none) { + hardAssert(remoteVersion.compareTo(lastRemoteVersion) >= 0, + 'Watch stream reverted to previous snapshot?? ($remoteVersion < $lastRemoteVersion)'); + await _targetCache.setLastRemoteSnapshotVersion(remoteVersion); + } - return _localDocuments.getLocalViewOfDocuments(changedDocs); - }); + return _localDocuments.getLocalViewOfDocuments(changedDocs); + }, + ); } - /// Returns true if the [newQueryData] should be persisted during an update of an active target. [QueryData] should + /// Returns true if the [newTargetData] should be persisted during an update of an active target. [TargetData] should /// always be persisted when a target is being released and should not call this function. /// - /// While the target is active, [QueryData] updates can be omitted when nothing about the target has changed except + /// While the target is active, [TargetData] updates can be omitted when nothing about the target has changed except /// metadata like the resume token or snapshot version. Occasionally it's worth the extra write to prevent these /// values from getting too stale after a crash, but this doesn't have to be too frequent. - static bool _shouldPersistQueryData( - QueryData oldQueryData, QueryData newQueryData, TargetChange change) { - // Avoid clearing any existing value - if (newQueryData.resumeToken.isEmpty) { - return false; - } + static bool _shouldPersistTargetData(TargetData oldTargetData, TargetData newTargetData, TargetChange change) { + hardAssert(newTargetData.resumeToken.isNotEmpty, 'Attempted to persist query data with empty resume token'); - // Any resume token is interesting if there isn't one already. - if (oldQueryData.resumeToken.isEmpty) { + // Always persist query data if we don't already have a resume token. + if (oldTargetData.resumeToken.isEmpty) { return true; } // Don't allow resume token changes to be buffered indefinitely. This allows us to be reasonably up-to-date after a // crash and avoids needing to loop over all active queries on shutdown. Especially in the browser we may not get // time to do anything interesting while the current tab is closing. - final int newSeconds = newQueryData.snapshotVersion.timestamp.seconds; - final int oldSeconds = oldQueryData.snapshotVersion.timestamp.seconds; + final int newSeconds = newTargetData.snapshotVersion.timestamp.seconds; + final int oldSeconds = oldTargetData.snapshotVersion.timestamp.seconds; final int timeDelta = newSeconds - oldSeconds; if (timeDelta >= _resultTokenMaxAgeSeconds) { return true; @@ -437,24 +415,33 @@ class LocalStore { // Otherwise if the only thing that has changed about a target is its resume token it's not worth persisting. Note // that the [RemoteStore] keeps an in-memory view of the currently active targets which includes the current resume // token, so stream failure or user changes will still use an up-to-date resume token regardless of what we do here. - final int changes = change.addedDocuments.length + - change.modifiedDocuments.length + - change.removedDocuments.length; + final int changes = change.addedDocuments.length + change.modifiedDocuments.length + change.removedDocuments.length; return changes > 0; } /// Notify the local store of the changed views to locally pin / unpin documents. - Future notifyLocalViewChanges( - List viewChanges) async { + Future notifyLocalViewChanges(List viewChanges) async { await _persistence.runTransaction('notifyLocalViewChanges', () async { for (LocalViewChanges viewChange in viewChanges) { - _localViewReferences.addReferences( - viewChange.added, viewChange.targetId); + final int targetId = viewChange.targetId; + + _localViewReferences.addReferences(viewChange.added, targetId); final ImmutableSortedSet removed = viewChange.removed; for (DocumentKey key in removed) { await _persistence.referenceDelegate.removeReference(key); } - _localViewReferences.removeReferences(removed, viewChange.targetId); + _localViewReferences.removeReferences(removed, targetId); + + if (!viewChange.fromCache) { + final TargetData targetData = _queryDataByTarget[targetId]; + hardAssert(targetData != null, "Can't set limbo-free snapshot version for unknown target: $targetId"); + + // Advance the last limbo free snapshot version + final SnapshotVersion lastLimboFreeSnapshotVersion = targetData.snapshotVersion; + final TargetData updatedTargetData = + targetData.copyWith(lastLimboFreeSnapshotVersion: lastLimboFreeSnapshotVersion); + _queryDataByTarget[targetId] = updatedTargetData; + } } }); } @@ -475,94 +462,112 @@ class LocalStore { /// Assigns the given query an internal id so that its results can be pinned so they don't get GC'd. A query must be /// allocated in the local store before the store can be used to manage its view. - Future allocateQuery(Query query) async { + Future allocateTarget(Target target) async { int targetId; - QueryData cached = await _queryCache.getQueryData(query); + TargetData cached = await _targetCache.getTargetData(target); if (cached != null) { // This query has been listened to previously, so reuse the previous targetId. // TODO(long1eu): freshen last accessed date? targetId = cached.targetId; } else { - await _persistence.runTransaction('Allocate query', () async { + await _persistence.runTransaction('Allocate target', () async { targetId = _targetIdGenerator.nextId; - cached = QueryData( - query, + cached = TargetData( + target, targetId, _persistence.referenceDelegate.currentSequenceNumber, QueryPurpose.listen, ); - await _queryCache.addQueryData(cached); + await _targetCache.addTargetData(cached); }); } - // Sanity check to ensure that even when resuming a query it's not currently active. - hardAssert(_targetIds[targetId] == null, - 'Tried to allocate an already allocated query: $query'); - _targetIds[targetId] = cached; + if (_queryDataByTarget[targetId] == null) { + _queryDataByTarget[targetId] = cached; + _targetIdByTarget[target] = targetId; + } return cached; } - /// Unpin all the documents associated with the given query. - Future releaseQuery(Query query) { - return _persistence.runTransaction('Release query', () async { - QueryData queryData = await _queryCache.getQueryData(query); - hardAssert( - queryData != null, 'Tried to release nonexistent query: $query'); - - final int targetId = queryData.targetId; - final QueryData cachedQueryData = _targetIds[targetId]; - if (cachedQueryData.snapshotVersion.compareTo(queryData.snapshotVersion) > - 0) { - // If we've been avoiding persisting the [resumeToken] (see [shouldPersistQueryData] for conditions and - // rationale) we need to persist the token now because there will no longer be an in-memory version to fall back - // on. - queryData = cachedQueryData; - await _queryCache.updateQueryData(queryData); - } + /// Returns the [TargetData] as seen by the [LocalStore], including updates that may have not yet been + /// persisted to the [TargetCache]. + @visibleForTesting + Future getTargetData(Target target) async { + final int targetId = _targetIdByTarget[target]; + if (targetId != null) { + return _queryDataByTarget[targetId]; + } + return _targetCache.getTargetData(target); + } + + /// Unpin all the documents associated with the given target. + /// + /// Releasing a non-existing target is an error. + Future releaseTarget(int targetId) { + return _persistence.runTransaction('Release target', () async { + final TargetData targetData = _queryDataByTarget[targetId]; + hardAssert(targetData != null, 'Tried to release nonexistent target: $targetId'); // References for documents sent via Watch are automatically removed when we delete a query's target data from the // reference delegate. Since this does not remove references for locally mutated documents, we have to remove the // target associations for these documents manually. - final ImmutableSortedSet removedReferences = - _localViewReferences.removeReferencesForId(queryData.targetId); + final ImmutableSortedSet removedReferences = _localViewReferences.removeReferencesForId(targetId); for (DocumentKey key in removedReferences) { await _persistence.referenceDelegate.removeReference(key); } - await _persistence.referenceDelegate.removeTarget(queryData); - _targetIds.remove(queryData.targetId); + + // Note: This also updates the query cache + await _persistence.referenceDelegate.removeTarget(targetData); + _queryDataByTarget.remove(targetId); + _targetIdByTarget.remove(targetData.target); }); } - /// Runs the given query against all the documents in the local store and returns the results. - Future> executeQuery(Query query) { - return _queryEngine.getDocumentsMatchingQuery(query); + /// Runs the specified query against the local store and returns the results, potentially taking + /// advantage of query data from previous executions (such as the set of remote keys). + /// + /// Set [usePreviousResults] to true in order to use results from previous executions can be used to optimize + /// this query execution. + Future executeQuery(Query query, bool usePreviousResults) async { + final TargetData targetData = await getTargetData(query.toTarget()); + SnapshotVersion lastLimboFreeSnapshotVersion = SnapshotVersion.none; + ImmutableSortedSet remoteKeys = DocumentKey.emptyKeySet; + + if (targetData != null) { + lastLimboFreeSnapshotVersion = targetData.lastLimboFreeSnapshotVersion; + remoteKeys = await _targetCache.getMatchingKeysForTargetId(targetData.targetId); + } + + final ImmutableSortedMap documents = await _queryEngine.getDocumentsMatchingQuery( + query, + usePreviousResults ? lastLimboFreeSnapshotVersion : SnapshotVersion.none, + usePreviousResults ? remoteKeys : DocumentKey.emptyKeySet, + ); + return QueryResult(documents, remoteKeys); } /// Returns the keys of the documents that are associated with the given target id in the remote table. Future> getRemoteDocumentKeys(int targetId) { - return _queryCache.getMatchingKeysForTargetId(targetId); + return _targetCache.getMatchingKeysForTargetId(targetId); } - Future _applyWriteToRemoteDocuments( - MutationBatchResult batchResult) async { + Future _applyWriteToRemoteDocuments(MutationBatchResult batchResult) async { final MutationBatch batch = batchResult.batch; final Set docKeys = batch.keys; for (DocumentKey docKey in docKeys) { final MaybeDocument remoteDoc = await _remoteDocuments.get(docKey); MaybeDocument doc = remoteDoc; final SnapshotVersion ackVersion = batchResult.docVersions[docKey]; - hardAssert(ackVersion != null, - 'docVersions should contain every doc in the write.'); + hardAssert(ackVersion != null, 'docVersions should contain every doc in the write.'); if (doc == null || doc.version.compareTo(ackVersion) < 0) { doc = batch.applyToRemoteDocument(docKey, doc, batchResult); if (doc == null) { - hardAssert(remoteDoc == null, - 'Mutation batch $batch applied to document $remoteDoc resulted in null.'); + hardAssert(remoteDoc == null, 'Mutation batch $batch applied to document $remoteDoc resulted in null.'); } else { - await _remoteDocuments.add(doc); + await _remoteDocuments.add(doc, batchResult.commitVersion); } } } @@ -570,9 +575,10 @@ class LocalStore { await _mutationQueue.removeMutationBatch(batch); } - Future collectGarbage( - LruGarbageCollector garbageCollector) { - return _persistence.runTransactionAndReturn('Collect garbage', - () => garbageCollector.collect(_targetIds.keys.toSet())); + Future collectGarbage(LruGarbageCollector garbageCollector) { + return _persistence.runTransactionAndReturn( + 'Collect garbage', + () => garbageCollector.collect(_queryDataByTarget.keys.toSet()), + ); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_view_changes.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_view_changes.dart index b409d7a7..dfe01c1a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_view_changes.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/local_view_changes.dart @@ -13,15 +13,13 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dar /// These changes are sent to the [LocalStore] by the [View] (via the [SyncEngine]) and are used to /// pin / unpin documents as appropriate. class LocalViewChanges { - const LocalViewChanges(this.targetId, this.added, this.removed); + const LocalViewChanges(this.targetId, this.fromCache, this.added, this.removed); - factory LocalViewChanges.fromViewSnapshot( - int targetId, ViewSnapshot snapshot) { - ImmutableSortedSet addedKeys = ImmutableSortedSet( - [], DocumentKey.comparator); + factory LocalViewChanges.fromViewSnapshot(int targetId, ViewSnapshot snapshot) { + ImmutableSortedSet addedKeys = + ImmutableSortedSet([], DocumentKey.comparator); ImmutableSortedSet removedKeys = - ImmutableSortedSet( - [], DocumentKey.comparator); + ImmutableSortedSet([], DocumentKey.comparator); for (DocumentViewChange docChange in snapshot.changes) { if (docChange.type == DocumentViewChangeType.added) { @@ -33,10 +31,11 @@ class LocalViewChanges { } } - return LocalViewChanges(targetId, addedKeys, removedKeys); + return LocalViewChanges(targetId, snapshot.isFromCache, addedKeys, removedKeys); } final int targetId; + final bool fromCache; final ImmutableSortedSet added; final ImmutableSortedSet removed; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_eager_reference_delegate.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_eager_reference_delegate.dart index 36d7f9d5..0b8d40f2 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_eager_reference_delegate.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_eager_reference_delegate.dart @@ -34,12 +34,12 @@ class MemoryEagerReferenceDelegate implements ReferenceDelegate { } @override - Future removeTarget(QueryData queryData) async { - final MemoryQueryCache queryCache = persistence.queryCache; - (await queryCache.getMatchingKeysForTargetId(queryData.targetId)) - .forEach(orphanedDocuments.add); - - await queryCache.removeQueryData(queryData); + Future removeTarget(TargetData queryData) async { + final MemoryTargetCache targetCache = persistence.targetCache; + await targetCache // + .getMatchingKeysForTargetId(queryData.targetId) + .then(orphanedDocuments.addAll); + await targetCache.removeTargetData(queryData); } @override @@ -50,8 +50,7 @@ class MemoryEagerReferenceDelegate implements ReferenceDelegate { /// In eager garbage collection, collection is run on transaction commit. @override Future onTransactionCommitted() async { - final MemoryRemoteDocumentCache remoteDocuments = - persistence.remoteDocumentCache; + final MemoryRemoteDocumentCache remoteDocuments = persistence.remoteDocumentCache; for (DocumentKey key in orphanedDocuments) { final bool isReferenced = await _isReferenced(key); @@ -83,7 +82,7 @@ class MemoryEagerReferenceDelegate implements ReferenceDelegate { /// Returns true if the given document is referenced by anything. Future _isReferenced(DocumentKey key) async { - final bool containsKey = await persistence.queryCache.containsKey(key); + final bool containsKey = await persistence.targetCache.containsKey(key); if (containsKey) { return true; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_lru_reference_delegate.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_lru_reference_delegate.dart index 51daf9fc..ba74c7a7 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_lru_reference_delegate.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_lru_reference_delegate.dart @@ -6,11 +6,9 @@ part of memory_persistence; /// Provides LRU garbage collection functionality for [MemoryPersistence]. class MemoryLruReferenceDelegate implements ReferenceDelegate, LruDelegate { - MemoryLruReferenceDelegate( - this.persistence, LruGarbageCollectorParams params, this.serializer) + MemoryLruReferenceDelegate(this.persistence, LruGarbageCollectorParams params, this.serializer) : orphanedSequenceNumbers = {}, - listenSequence = - ListenSequence(persistence.queryCache.highestListenSequenceNumber), + listenSequence = ListenSequence(persistence.targetCache.highestListenSequenceNumber), _currentSequenceNumber = ListenSequence.invalid { garbageCollector = LruGarbageCollector(this, params); } @@ -30,15 +28,14 @@ class MemoryLruReferenceDelegate implements ReferenceDelegate, LruDelegate { @override void onTransactionStarted() { - hardAssert(_currentSequenceNumber == ListenSequence.invalid, - 'Starting a transaction without committing the previous one'); + hardAssert( + _currentSequenceNumber == ListenSequence.invalid, 'Starting a transaction without committing the previous one'); _currentSequenceNumber = listenSequence.next; } @override Future onTransactionCommitted() async { - hardAssert(_currentSequenceNumber != ListenSequence.invalid, - 'Committing a transaction without having started one'); + hardAssert(_currentSequenceNumber != ListenSequence.invalid, 'Committing a transaction without having started one'); _currentSequenceNumber = ListenSequence.invalid; } @@ -50,22 +47,20 @@ class MemoryLruReferenceDelegate implements ReferenceDelegate, LruDelegate { } @override - Future forEachTarget(Consumer consumer) async { - await persistence.queryCache.forEachTarget(consumer); + Future forEachTarget(Consumer consumer) async { + await persistence.targetCache.forEachTarget(consumer); } @override Future getSequenceNumberCount() async { - final int targetCount = persistence.queryCache.targetCount; + final int targetCount = persistence.targetCache.targetCount; int orphanedCount = 0; - await forEachOrphanedDocumentSequenceNumber( - (int sequenceNumber) => orphanedCount++); + await forEachOrphanedDocumentSequenceNumber((int sequenceNumber) => orphanedCount++); return targetCount + orphanedCount; } @override - Future forEachOrphanedDocumentSequenceNumber( - Consumer consumer) async { + Future forEachOrphanedDocumentSequenceNumber(Consumer consumer) async { for (MapEntry entry in orphanedSequenceNumbers.entries) { // Pass in the exact sequence number as the upper bound so we know it won't be pinned by being too recent. final bool isPinned = await _isPinned(entry.key, entry.value); @@ -77,15 +72,15 @@ class MemoryLruReferenceDelegate implements ReferenceDelegate, LruDelegate { @override Future removeTargets(int upperBound, Set activeTargetIds) async { - return persistence.queryCache.removeQueries(upperBound, activeTargetIds); + return persistence.targetCache.removeQueries(upperBound, activeTargetIds); } @override Future removeOrphanedDocuments(int upperBound) async { int count = 0; final MemoryRemoteDocumentCache cache = persistence.remoteDocumentCache; - for (MapEntry entry in cache.documents) { - final DocumentKey key = entry.key; + for (MaybeDocument doc in cache.documents) { + final DocumentKey key = doc.key; if (!(await _isPinned(key, upperBound))) { await cache.remove(key); orphanedSequenceNumbers.remove(key); @@ -101,13 +96,9 @@ class MemoryLruReferenceDelegate implements ReferenceDelegate, LruDelegate { } @override - Future removeTarget(QueryData queryData) async { - final QueryData updated = queryData.copyWith( - snapshotVersion: queryData.snapshotVersion, - resumeToken: queryData.resumeToken, - sequenceNumber: currentSequenceNumber, - ); - await persistence.queryCache.updateQueryData(updated); + Future removeTarget(TargetData targetData) async { + final TargetData updated = targetData.copyWith(sequenceNumber: currentSequenceNumber); + await persistence.targetCache.updateTargetData(updated); } @override @@ -145,7 +136,7 @@ class MemoryLruReferenceDelegate implements ReferenceDelegate, LruDelegate { return true; } - if (await persistence.queryCache.containsKey(key)) { + if (await persistence.targetCache.containsKey(key)) { return true; } @@ -158,7 +149,7 @@ class MemoryLruReferenceDelegate implements ReferenceDelegate, LruDelegate { // Note that this method is only used for testing because this delegate is only used for testing. The algorithm here // (loop through everything, serialize it and count bytes) is inefficient and inexact, but won't run in production. int count = 0; - count += persistence.queryCache.getByteSize(serializer); + count += persistence.targetCache.getByteSize(serializer); count += persistence.remoteDocumentCache.getByteSize(serializer); for (MemoryMutationQueue queue in persistence.getMutationQueues()) { count += queue.getByteSize(serializer); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_mutation_queue.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_mutation_queue.dart index 5de08285..48135ddf 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_mutation_queue.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_mutation_queue.dart @@ -5,10 +5,9 @@ part of memory_persistence; class MemoryMutationQueue implements MutationQueue { - MemoryMutationQueue(this.persistence, this._statsCollector) + MemoryMutationQueue(this.persistence) : _queue = [], - _batchesByDocumentKey = ImmutableSortedSet( - [], DocumentReference.byKey), + _batchesByDocumentKey = ImmutableSortedSet([], DocumentReference.byKey), _nextBatchId = 1, lastStreamToken = Uint8List(0); @@ -39,7 +38,6 @@ class MemoryMutationQueue implements MutationQueue { Uint8List lastStreamToken; final MemoryPersistence persistence; - final StatsCollector _statsCollector; // MutationQueue implementation @@ -62,17 +60,14 @@ class MemoryMutationQueue implements MutationQueue { } @override - Future acknowledgeBatch( - MutationBatch batch, Uint8List streamToken) async { + Future acknowledgeBatch(MutationBatch batch, Uint8List streamToken) async { final int batchId = batch.batchId; final int batchIndex = _indexOfExistingBatchId(batchId, 'acknowledged'); - hardAssert(batchIndex == 0, - 'Can only acknowledge the first batch in the mutation queue'); + hardAssert(batchIndex == 0, 'Can only acknowledge the first batch in the mutation queue'); // Verify that the batch in the queue is the one to be acknowledged. final MutationBatch check = _queue[batchIndex]; - hardAssert(batchId == check.batchId, - 'Queue ordering failure: expected batch $batchId, got batch ${check.batchId}'); + hardAssert(batchId == check.batchId, 'Queue ordering failure: expected batch $batchId, got batch ${check.batchId}'); lastStreamToken = checkNotNull(streamToken); } @@ -96,8 +91,7 @@ class MemoryMutationQueue implements MutationQueue { final int size = _queue.length; if (size > 0) { final MutationBatch prior = _queue[size - 1]; - hardAssert(prior.batchId < batchId, - 'Mutation batchIds must be monotonically increasing order'); + hardAssert(prior.batchId < batchId, 'Mutation batchIds must be monotonically increasing order'); } final MutationBatch batch = MutationBatch( @@ -110,20 +104,16 @@ class MemoryMutationQueue implements MutationQueue { // Track references by document key and index collection parents. for (Mutation mutation in mutations) { - _batchesByDocumentKey = _batchesByDocumentKey - .insert(DocumentReference(mutation.key, batchId)); + _batchesByDocumentKey = _batchesByDocumentKey.insert(DocumentReference(mutation.key, batchId)); - await persistence.indexManager - .addToCollectionParentIndex(mutation.key.path.popLast()); + await persistence.indexManager.addToCollectionParentIndex(mutation.key.path.popLast()); } - _statsCollector.recordRowsWritten(MutationQueue.statsTag, 1); return batch; } @override Future lookupMutationBatch(int batchId) async { - _statsCollector.recordRowsRead(MutationQueue.statsTag, 1); final int index = _indexOfBatchId(batchId); if (index < 0 || index >= _queue.length) { return null; @@ -144,19 +134,22 @@ class MemoryMutationQueue implements MutationQueue { return _queue.length > index ? _queue[index] : null; } + @override + Future getHighestUnacknowledgedBatchId() async { + return _queue.isEmpty ? MutationBatch.unknown : _nextBatchId - 1; + } + @override Future> getAllMutationBatches() async { return List.unmodifiable(_queue); } @override - Future> getAllMutationBatchesAffectingDocumentKey( - DocumentKey documentKey) async { + Future> getAllMutationBatchesAffectingDocumentKey(DocumentKey documentKey) async { final DocumentReference start = DocumentReference(documentKey, 0); final List result = []; - final Iterator iterator = - _batchesByDocumentKey.iteratorFrom(start); + final Iterator iterator = _batchesByDocumentKey.iteratorFrom(start); while (iterator.moveNext()) { final DocumentReference reference = iterator.current; if (documentKey != reference.key) { @@ -164,25 +157,20 @@ class MemoryMutationQueue implements MutationQueue { } final MutationBatch batch = await lookupMutationBatch(reference.id); - hardAssert( - batch != null, 'Batches in the index must exist in the main table'); + hardAssert(batch != null, 'Batches in the index must exist in the main table'); result.add(batch); } - _statsCollector.recordRowsRead(MutationQueue.statsTag, result.length); return result; } @override - Future> getAllMutationBatchesAffectingDocumentKeys( - Iterable documentKeys) async { - ImmutableSortedSet uniqueBatchIDs = - ImmutableSortedSet([], standardComparator()); + Future> getAllMutationBatchesAffectingDocumentKeys(Iterable documentKeys) async { + ImmutableSortedSet uniqueBatchIDs = ImmutableSortedSet([], standardComparator()); for (DocumentKey key in documentKeys) { final DocumentReference start = DocumentReference(key, 0); - final Iterator batchesIterator = - _batchesByDocumentKey.iteratorFrom(start); + final Iterator batchesIterator = _batchesByDocumentKey.iteratorFrom(start); while (batchesIterator.moveNext()) { final DocumentReference reference = batchesIterator.current; if (key != reference.key) { @@ -196,10 +184,8 @@ class MemoryMutationQueue implements MutationQueue { } @override - Future> getAllMutationBatchesAffectingQuery( - Query query) async { - hardAssert(!query.isCollectionGroupQuery, - 'CollectionGroup queries should be handled in LocalDocumentsView'); + Future> getAllMutationBatchesAffectingQuery(Query query) async { + hardAssert(!query.isCollectionGroupQuery, 'CollectionGroup queries should be handled in LocalDocumentsView'); // Use the query path as a prefix for testing if a document matches the query. final ResourcePath prefix = query.path; @@ -212,15 +198,12 @@ class MemoryMutationQueue implements MutationQueue { if (!DocumentKey.isDocumentKey(startPath)) { startPath = startPath.appendSegment(''); } - final DocumentReference start = - DocumentReference(DocumentKey.fromPath(startPath), 0); + final DocumentReference start = DocumentReference(DocumentKey.fromPath(startPath), 0); // Find unique [batchId]s referenced by all documents potentially matching the query. - ImmutableSortedSet uniqueBatchIDs = - ImmutableSortedSet([], standardComparator()); + ImmutableSortedSet uniqueBatchIDs = ImmutableSortedSet([], standardComparator()); - final Iterator iterator = - _batchesByDocumentKey.iteratorFrom(start); + final Iterator iterator = _batchesByDocumentKey.iteratorFrom(start); while (iterator.moveNext()) { final DocumentReference reference = iterator.current; final ResourcePath rowKeyPath = reference.key.path; @@ -239,8 +222,7 @@ class MemoryMutationQueue implements MutationQueue { return _lookupMutationBatches(uniqueBatchIDs); } - Future> _lookupMutationBatches( - ImmutableSortedSet batchIds) async { + Future> _lookupMutationBatches(ImmutableSortedSet batchIds) async { // Construct an array of matching batches, sorted by batchId to ensure that multiple mutations affecting the same // document key are applied in order. final List result = []; @@ -258,8 +240,7 @@ class MemoryMutationQueue implements MutationQueue { Future removeMutationBatch(MutationBatch batch) async { // Find the position of the first batch for removal. This need not be the first entry in the queue. final int batchIndex = _indexOfExistingBatchId(batch.batchId, 'removed'); - hardAssert(batchIndex == 0, - 'Can only remove the first entry of the mutation queue'); + hardAssert(batchIndex == 0, 'Can only remove the first entry of the mutation queue'); _queue.removeAt(0); @@ -279,8 +260,8 @@ class MemoryMutationQueue implements MutationQueue { @override Future performConsistencyCheck() async { if (_queue.isEmpty) { - hardAssert(_batchesByDocumentKey.isEmpty, - 'Document leak -- detected dangling mutation references when queue is empty.'); + hardAssert( + _batchesByDocumentKey.isEmpty, 'Document leak -- detected dangling mutation references when queue is empty.'); } } @@ -288,8 +269,7 @@ class MemoryMutationQueue implements MutationQueue { // Create a reference with a zero ID as the start position to find any document reference with this key. final DocumentReference reference = DocumentReference(key, 0); - final Iterator iterator = - _batchesByDocumentKey.iteratorFrom(reference); + final Iterator iterator = _batchesByDocumentKey.iteratorFrom(reference); if (!iterator.moveNext()) { return false; } @@ -329,8 +309,7 @@ class MemoryMutationQueue implements MutationQueue { /// batches). int _indexOfExistingBatchId(int batchId, String action) { final int index = _indexOfBatchId(batchId); - hardAssert(index >= 0 && index < _queue.length, - 'Batches must exist to be $action'); + hardAssert(index >= 0 && index < _queue.length, 'Batches must exist to be $action'); return index; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_persistence.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_persistence.dart index 46cb977d..b65b1391 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_persistence.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_persistence.dart @@ -10,20 +10,20 @@ import 'dart:typed_data'; import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/listent_sequence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/listen_sequence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/document_reference.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_serializer.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_delegate.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_garbage_collector.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/index_manager.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/query_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/reference_delegate.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/stats_collector.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/index_manager.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_delegate.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_garbage_collector.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/reference_delegate.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/reference_set.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_collections.dart'; @@ -42,39 +42,32 @@ part 'memory_eager_reference_delegate.dart'; part 'memory_index_manager.dart'; part 'memory_lru_reference_delegate.dart'; part 'memory_mutation_queue.dart'; -part 'memory_query_cache.dart'; part 'memory_remote_document_cache.dart'; +part 'memory_target_cache.dart'; class MemoryPersistence extends Persistence { /// Use factory constructors to instantiate - MemoryPersistence._(StatsCollector statsCollector) - : _statsCollector = statsCollector ?? StatsCollector.noOp, - mutationQueues = {}, + MemoryPersistence._() + : mutationQueues = {}, _semaphore = GlobalSemaphore(), indexManager = MemoryIndexManager() { - queryCache = MemoryQueryCache(this); - remoteDocumentCache = MemoryRemoteDocumentCache(this, _statsCollector); + targetCache = MemoryTargetCache(this); + remoteDocumentCache = MemoryRemoteDocumentCache(this); } - factory MemoryPersistence.createEagerGcMemoryPersistence([ - StatsCollector statsCollector = StatsCollector.noOp, - ]) { - final MemoryPersistence persistence = MemoryPersistence._(statsCollector); + factory MemoryPersistence.createEagerGcMemoryPersistence() { + final MemoryPersistence persistence = MemoryPersistence._(); persistence.referenceDelegate = MemoryEagerReferenceDelegate(persistence); return persistence; } - factory MemoryPersistence.createLruGcMemoryPersistence( - LruGarbageCollectorParams params, LocalSerializer serializer, - [StatsCollector statsCollector = StatsCollector.noOp]) { - final MemoryPersistence persistence = MemoryPersistence._(statsCollector); - persistence.referenceDelegate = - MemoryLruReferenceDelegate(persistence, params, serializer); + factory MemoryPersistence.createLruGcMemoryPersistence(LruGarbageCollectorParams params, LocalSerializer serializer) { + final MemoryPersistence persistence = MemoryPersistence._(); + persistence.referenceDelegate = MemoryLruReferenceDelegate(persistence, params, serializer); return persistence; } final Semaphore _semaphore; - final StatsCollector _statsCollector; @override final MemoryIndexManager indexManager; @@ -90,7 +83,7 @@ class MemoryPersistence extends Persistence { MemoryRemoteDocumentCache remoteDocumentCache; @override - MemoryQueryCache queryCache; + MemoryTargetCache targetCache; @override ReferenceDelegate referenceDelegate; @@ -120,7 +113,7 @@ class MemoryPersistence extends Persistence { MutationQueue getMutationQueue(User user) { MemoryMutationQueue queue = mutationQueues[user]; if (queue == null) { - queue = MemoryMutationQueue(this, _statsCollector); + queue = MemoryMutationQueue(this); mutationQueues[user] = queue; } return queue; @@ -129,14 +122,12 @@ class MemoryPersistence extends Persistence { Iterable getMutationQueues() => mutationQueues.values; @override - Future runTransaction( - String action, Transaction operation) async { + Future runTransaction(String action, Transaction operation) async { return runTransactionAndReturn(action, operation); } @override - Future runTransactionAndReturn( - String action, Transaction operation) async { + Future runTransactionAndReturn(String action, Transaction operation) async { await _semaphore.acquire(); Log.d('$runtimeType', 'Starting transaction: $action'); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_remote_document_cache.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_remote_document_cache.dart index 5ec6c605..312bce93 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_remote_document_cache.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_remote_document_cache.dart @@ -6,73 +6,81 @@ part of memory_persistence; /// In-memory cache of remote documents. class MemoryRemoteDocumentCache implements RemoteDocumentCache { - MemoryRemoteDocumentCache(MemoryPersistence persistence, this._statsCollector) - : documents = DocumentCollections.emptyMaybeDocumentMap(), + MemoryRemoteDocumentCache(MemoryPersistence persistence) + : _documents = + ImmutableSortedMap>.emptyMap(DocumentKey.comparator), _persistence = persistence; /// Underlying cache of documents. - ImmutableSortedMap documents; + ImmutableSortedMap> _documents; final MemoryPersistence _persistence; - final StatsCollector _statsCollector; @override - Future add(MaybeDocument document) async { - documents = documents.insert(document.key, document); - await _persistence.indexManager - .addToCollectionParentIndex(document.key.path.popLast()); + Future add(MaybeDocument document, SnapshotVersion readTime) async { + hardAssert( + readTime != SnapshotVersion.none, + 'Cannot add document to the RemoteDocumentCache with a read time of zero', + ); + _documents = _documents.insert(document.key, MapEntry(document, readTime)); + + await _persistence.indexManager.addToCollectionParentIndex(document.key.path.popLast()); } @override Future remove(DocumentKey key) async { - _statsCollector.recordRowsDeleted(RemoteDocumentCache.statsTag, 1); - documents = documents.remove(key); + _documents = _documents.remove(key); } @override Future get(DocumentKey key) async { - _statsCollector.recordRowsRead(RemoteDocumentCache.statsTag, 1); - return documents[key]; + final MapEntry entry = _documents[key]; + return entry != null ? entry.key : null; } @override - Future> getAll( - Iterable documentKeys) async { - final List> entries = - await Future.wait(documentKeys.map((DocumentKey key) async => - MapEntry(key, await get(key)))); - - _statsCollector.recordRowsRead( - RemoteDocumentCache.statsTag, entries.length); - return Map.fromEntries(entries); + Future> getAll(Iterable documentKeys) async { + final Map result = {}; + + for (DocumentKey key in documentKeys) { + // Make sure each key has a corresponding entry, which is null in case the document is not + // found. + result[key] = await get(key); + } + + return result; } @override - Future> - getAllDocumentsMatchingQuery(Query query) async { - hardAssert(!query.isCollectionGroupQuery, - 'CollectionGroup queries should be handled in LocalDocumentsView'); - ImmutableSortedMap result = - DocumentCollections.emptyDocumentMap(); + Future> getAllDocumentsMatchingQuery( + Query query, + SnapshotVersion sinceReadTime, + ) async { + hardAssert(!query.isCollectionGroupQuery, 'CollectionGroup queries should be handled in LocalDocumentsView'); + ImmutableSortedMap result = DocumentCollections.emptyDocumentMap(); // Documents are ordered by key, so we can use a prefix scan to narrow down the documents we need to match the query // against. final ResourcePath queryPath = query.path; - final DocumentKey prefix = - DocumentKey.fromPath(queryPath.appendSegment('')); - final Iterator> iterator = - documents.iteratorFrom(prefix); - int rowsRead = 0; + final DocumentKey prefix = DocumentKey.fromPath(queryPath.appendSegment('')); + final Iterator>> iterator = + _documents.iteratorFrom(prefix); + while (iterator.moveNext()) { - final MapEntry entry = iterator.current; - rowsRead++; + final MapEntry> entry = iterator.current; + final DocumentKey key = entry.key; if (!queryPath.isPrefixOf(key.path)) { break; } - final MaybeDocument maybeDoc = entry.value; - if (!(maybeDoc is Document)) { + final MaybeDocument maybeDoc = entry.value.key; + if (maybeDoc is! Document) { + continue; + } + + final SnapshotVersion readTime = entry.value.value; + if (readTime.compareTo(sinceReadTime) <= 0) { continue; } @@ -82,28 +90,17 @@ class MemoryRemoteDocumentCache implements RemoteDocumentCache { } } - _statsCollector.recordRowsRead(RemoteDocumentCache.statsTag, rowsRead); return result; } - /// Returns an estimate of the number of bytes used to store the given document key in memory. This is only an - /// estimate and includes the size of the segments of the path, but not any object overhead or path separators. - static int _getKeySize(DocumentKey key) { - final ResourcePath path = key.path; - int count = 0; - for (int i = 0; i < path.length; i++) { - // Strings in dart are utf-16, each character is two bytes in memory - count += path.segments[i].length * 2; - } - return count; + Iterable get documents { + return _documents.map((MapEntry> item) => item.value.key); } int getByteSize(LocalSerializer serializer) { int count = 0; - for (MapEntry entry in documents) { - count += _getKeySize(entry.key); - count += - serializer.encodeMaybeDocument(entry.value).writeToBuffer().length; + for (MaybeDocument doc in documents) { + count += serializer.encodeMaybeDocument(doc).writeToBuffer().lengthInBytes; } return count; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_query_cache.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_target_cache.dart similarity index 59% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_query_cache.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_target_cache.dart index 3975e136..d01e247e 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_query_cache.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/memory/memory_target_cache.dart @@ -4,13 +4,13 @@ part of memory_persistence; -/// An implementation of the [QueryCache] protocol that merely keeps queries in memory, suitable for online only clients +/// An implementation of the [TargetCache] protocol that merely keeps targets in memory, suitable for online only clients /// with persistence disabled. -class MemoryQueryCache implements QueryCache { - MemoryQueryCache(this.persistence); +class MemoryTargetCache implements TargetCache { + MemoryTargetCache(this.persistence); - /// Maps a query to the data about that query. - final Map queries = {}; + /// Maps a target to the data about that target. + final Map targets = {}; /// A ordered bidirectional mapping between documents and the remote target ids. final ReferenceSet references = ReferenceSet(); @@ -28,53 +28,52 @@ class MemoryQueryCache implements QueryCache { final MemoryPersistence persistence; @override - int get targetCount => queries.length; + int get targetCount => targets.length; @override - Future forEachTarget(Consumer consumer) async { - queries.values.forEach(consumer); + Future forEachTarget(Consumer consumer) async { + targets.values.forEach(consumer); } @override int get highestListenSequenceNumber => highestSequenceNumber; @override - Future setLastRemoteSnapshotVersion( - SnapshotVersion snapshotVersion) async { + Future setLastRemoteSnapshotVersion(SnapshotVersion snapshotVersion) async { lastRemoteSnapshotVersion = snapshotVersion; } // Query tracking @override - Future addQueryData(QueryData queryData) async { - queries[queryData.query] = queryData; - final int targetId = queryData.targetId; + Future addTargetData(TargetData targetData) async { + targets[targetData.target] = targetData; + final int targetId = targetData.targetId; if (targetId > highestTargetId) { highestTargetId = targetId; } - if (queryData.sequenceNumber > highestSequenceNumber) { - highestSequenceNumber = queryData.sequenceNumber; + if (targetData.sequenceNumber > highestSequenceNumber) { + highestSequenceNumber = targetData.sequenceNumber; } } @override - Future updateQueryData(QueryData queryData) async { + Future updateTargetData(TargetData targetData) async { // Memory persistence doesn't need to do anything different between add and remove. - return addQueryData(queryData); + return addTargetData(targetData); } @override - Future removeQueryData(QueryData queryData) async { - queries.remove(queryData.query); - references.removeReferencesForId(queryData.targetId); + Future removeTargetData(TargetData targetData) async { + targets.remove(targetData.target); + references.removeReferencesForId(targetData.targetId); } /// Drops any targets with sequence number less than or equal to the upper bound, excepting those present in /// [activeTargetIds]. Document associations for the removed targets are also removed. int removeQueries(int upperBound, Set activeTargetIds) { int removed = 0; - queries.removeWhere((Query query, QueryData queryData) { + targets.removeWhere((Target target, TargetData queryData) { final int targetId = queryData.targetId; final int sequenceNumber = queryData.sequenceNumber; @@ -90,13 +89,12 @@ class MemoryQueryCache implements QueryCache { } @override - Future getQueryData(Query query) async => queries[query]; + Future getTargetData(Target target) async => targets[target]; // Reference tracking @override - Future addMatchingKeys( - ImmutableSortedSet keys, int targetId) async { + Future addMatchingKeys(ImmutableSortedSet keys, int targetId) async { references.addReferences(keys, targetId); final ReferenceDelegate referenceDelegate = persistence.referenceDelegate; @@ -106,8 +104,7 @@ class MemoryQueryCache implements QueryCache { } @override - Future removeMatchingKeys( - ImmutableSortedSet keys, int targetId) async { + Future removeMatchingKeys(ImmutableSortedSet keys, int targetId) async { references.removeReferences(keys, targetId); final ReferenceDelegate referenceDelegate = persistence.referenceDelegate; @@ -121,19 +118,17 @@ class MemoryQueryCache implements QueryCache { } @override - Future> getMatchingKeysForTargetId( - int targetId) async { + Future> getMatchingKeysForTargetId(int targetId) async { return references.referencesForId(targetId); } @override - Future containsKey(DocumentKey key) async => - references.containsKey(key); + Future containsKey(DocumentKey key) async => references.containsKey(key); int getByteSize(LocalSerializer serializer) { int count = 0; - for (QueryData value in queries.values) { - count += serializer.encodeQueryData(value).writeToBuffer().length; + for (TargetData value in targets.values) { + count += serializer.encodeTargetData(value).writeToBuffer().lengthInBytes; } return count; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/stats_collector.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/stats_collector.dart deleted file mode 100644 index cbe3f0ab..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/stats_collector.dart +++ /dev/null @@ -1,25 +0,0 @@ -// File created by -// Lung Razvan -// on 22/03/2020 - -/// Collects the operation count from the persistence layer. Implementing -/// subclasses can expose this information to measure the efficiency of -/// persistence operations. -/// -/// The only consumer of operation counts is currently the [LocalStoreTestCase] -/// (via [AccumulatingStatsCollector]). If you are not interested in the stats, -/// you can use [noOp] for the default empty stats collector. -class StatsCollector { - const StatsCollector(); - - static const StatsCollector noOp = StatsCollector(); - - /// Records the number of rows read for the given tag. - void recordRowsRead(String tag, int count) {} - - /// Records the number of rows deleted for the given tag. - void recordRowsDeleted(String tag, int count) {} - - /// Records the number of rows written for the given tag. - void recordRowsWritten(String tag, int count) {} -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/index_manager.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/index_manager.dart similarity index 100% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/index_manager.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/index_manager.dart diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/lru_delegate.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/lru_delegate.dart similarity index 91% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/lru_delegate.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/lru_delegate.dart index 62028c10..bf8cd5a1 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/lru_delegate.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/lru_delegate.dart @@ -4,15 +4,15 @@ import 'dart:async'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_garbage_collector.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_garbage_collector.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/types.dart'; /// Persistence layers intending to use LRU Garbage collection should implement this interface. This interface defines /// the operations that the LRU garbage collector needs from the persistence layer. abstract class LruDelegate { /// Enumerates all the targets in the QueryCache. - Future forEachTarget(Consumer consumer); + Future forEachTarget(Consumer consumer); Future getSequenceNumberCount(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/lru_garbage_collector.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/lru_garbage_collector.dart similarity index 77% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/lru_garbage_collector.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/lru_garbage_collector.dart index b07c95b3..c4f6fecc 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/lru_garbage_collector.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/lru_garbage_collector.dart @@ -6,14 +6,14 @@ import 'dart:async'; import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/cloud_firestore_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/listent_sequence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_delegate.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/listen_sequence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/garbage_collection_scheduler.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_delegate.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:collection/collection.dart'; -import 'local_store.dart'; - /// Implements the steps for LRU garbage collection. class LruGarbageCollector { LruGarbageCollector(this.delegate, this._params); @@ -22,8 +22,7 @@ class LruGarbageCollector { final LruGarbageCollectorParams _params; /// A helper method to create a new scheduler. - LruGarbageCollectorScheduler newScheduler( - TaskScheduler scheduler, LocalStore localStore) { + LruGarbageCollectorScheduler newScheduler(AsyncQueue scheduler, LocalStore localStore) { return LruGarbageCollectorScheduler(this, localStore, scheduler); } @@ -38,10 +37,8 @@ class LruGarbageCollector { if (count == 0) { return ListenSequence.invalid; } - final _RollingSequenceNumberBuffer buffer = - _RollingSequenceNumberBuffer(count); - await delegate.forEachTarget( - (QueryData queryData) => buffer.addElement(queryData.sequenceNumber)); + final _RollingSequenceNumberBuffer buffer = _RollingSequenceNumberBuffer(count); + await delegate.forEachTarget((TargetData targetData) => buffer.addElement(targetData.sequenceNumber)); await delegate.forEachOrphanedDocumentSequenceNumber(buffer.addElement); return buffer.maxValue; } @@ -58,8 +55,7 @@ class LruGarbageCollector { } Future collect(Set activeTargetIds) async { - if (_params.minBytesThreshold == - LruGarbageCollectorParams._collectionDisabled) { + if (_params.minBytesThreshold == LruGarbageCollectorParams._collectionDisabled) { Log.d('LruGarbageCollector', 'Garbage collection skipped; disabled'); return LruGarbageCollectorResults.didNotRun; } @@ -74,11 +70,9 @@ class LruGarbageCollector { } } - Future _runGarbageCollection( - Set liveTargetIds) async { + Future _runGarbageCollection(Set liveTargetIds) async { final DateTime startTs = DateTime.now(); - int sequenceNumbers = - await calculateQueryCount(_params.percentileToCollect); + int sequenceNumbers = await calculateQueryCount(_params.percentileToCollect); // Cap at the configured max if (sequenceNumbers > _params.maximumSequenceNumbersToCollect) { Log.d( @@ -92,8 +86,7 @@ class LruGarbageCollector { final int upperBound = await getNthSequenceNumber(sequenceNumbers); final DateTime foundUpperBoundTs = DateTime.now(); - final int numTargetsRemoved = - await removeTargets(upperBound, liveTargetIds); + final int numTargetsRemoved = await removeTargets(upperBound, liveTargetIds); final DateTime removedTargetsTs = DateTime.now(); final int numDocumentsRemoved = await removeOrphanedDocuments(upperBound); @@ -101,15 +94,11 @@ class LruGarbageCollector { if (Log.isDebugEnabled) { final StringBuffer desc = StringBuffer('LRU Garbage Collection:\n') - ..writeln( - '\tCounted targets in ${countedTargetsTs.difference(startTs)}') - ..writeln( - '\tDetermined least recently used $sequenceNumbers sequence numbers in ' + ..writeln('\tCounted targets in ${countedTargetsTs.difference(startTs)}') + ..writeln('\tDetermined least recently used $sequenceNumbers sequence numbers in ' '${foundUpperBoundTs.difference(countedTargetsTs)}') - ..writeln( - '\tRemoved $numTargetsRemoved targets in ${removedTargetsTs.difference(foundUpperBoundTs)}') - ..writeln( - '\tRemoved $numDocumentsRemoved documents in ${removedDocumentsTs.difference(removedTargetsTs)}') + ..writeln('\tRemoved $numTargetsRemoved targets in ${removedTargetsTs.difference(foundUpperBoundTs)}') + ..writeln('\tRemoved $numDocumentsRemoved documents in ${removedDocumentsTs.difference(removedTargetsTs)}') ..writeln('Total Duration: ${removedDocumentsTs.difference(startTs)}'); Log.d('LruGarbageCollector', desc); @@ -128,8 +117,7 @@ class LruGarbageCollector { /// Used to calculate the nth sequence number. Keeps a rolling buffer of the lowest n values passed to [addElement], and /// finally reports the largest of them in [maxValue]. class _RollingSequenceNumberBuffer { - _RollingSequenceNumberBuffer(this.maxElements) - : queue = PriorityQueue(comparator); + _RollingSequenceNumberBuffer(this.maxElements) : queue = PriorityQueue(comparator); final PriorityQueue queue; final int maxElements; @@ -161,13 +149,9 @@ class LruGarbageCollectorParams { }); LruGarbageCollectorParams.disabled() - : this( - minBytesThreshold: _collectionDisabled, - percentileToCollect: 0, - maximumSequenceNumbersToCollect: 0); + : this(minBytesThreshold: _collectionDisabled, percentileToCollect: 0, maximumSequenceNumbersToCollect: 0); - LruGarbageCollectorParams.withCacheSizeBytes(int cacheSizeBytes) - : this(minBytesThreshold: cacheSizeBytes); + LruGarbageCollectorParams.withCacheSizeBytes(int cacheSizeBytes) : this(minBytesThreshold: cacheSizeBytes); final int minBytesThreshold; final int percentileToCollect; @@ -194,11 +178,7 @@ class LruGarbageCollectorResults { }); static const LruGarbageCollectorResults didNotRun = - LruGarbageCollectorResults( - hasRun: false, - sequenceNumbersCollected: 0, - targetsRemoved: 0, - documentsRemoved: 0); + LruGarbageCollectorResults(hasRun: false, sequenceNumbersCollected: 0, targetsRemoved: 0, documentsRemoved: 0); final bool hasRun; final int sequenceNumbersCollected; @@ -218,7 +198,7 @@ class LruGarbageCollectorResults { /// This class is responsible for the scheduling of LRU garbage collection. It handles checking whether or not GC is /// enabled, as well as which delay to use before the next run. -class LruGarbageCollectorScheduler { +class LruGarbageCollectorScheduler implements GarbageCollectionScheduler { LruGarbageCollectorScheduler( this._garbageCollector, this._localStore, @@ -227,10 +207,10 @@ class LruGarbageCollectorScheduler { final LruGarbageCollector _garbageCollector; final LocalStore _localStore; - final TaskScheduler _scheduler; + final AsyncQueue _scheduler; bool _hasRun = false; - TimerTask _gcTask; + DelayedTask _gcTask; /// How long we wait to try running LRU GC after SDK initialization. static const Duration _initialGcDelay = Duration(minutes: 1); @@ -238,14 +218,15 @@ class LruGarbageCollectorScheduler { /// Minimum amount of time between GC checks, after the first one. static const Duration _regularGcDelay = Duration(minutes: 5); + @override void start() { - if (_garbageCollector._params.minBytesThreshold != - LruGarbageCollectorParams._collectionDisabled) { + if (_garbageCollector._params.minBytesThreshold != LruGarbageCollectorParams._collectionDisabled) { _scheduleGC(); } } - void stop() { + @override + Future stop() async { if (_gcTask != null) { _gcTask.cancel(); } @@ -254,8 +235,8 @@ class LruGarbageCollectorScheduler { void _scheduleGC() { final Duration delay = _hasRun ? _regularGcDelay : _initialGcDelay; - _gcTask = _scheduler.add( - TaskId.garbageCollection, + _gcTask = _scheduler.enqueueAfterDelay( + TimerId.garbageCollection, delay, () async { await _localStore.collectGarbage(_garbageCollector); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/mutation_queue.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/mutation_queue.dart similarity index 91% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/mutation_queue.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/mutation_queue.dart index 2a19d7ac..6022d49c 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/mutation_queue.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/mutation_queue.dart @@ -14,8 +14,6 @@ import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; /// A queue of mutations to apply to the remote store. abstract class MutationQueue { - static const String statsTag = 'mutations'; - /// Starts the mutation queue, performing any initial reads that might be /// required to establish invariants, etc. Future start(); @@ -39,8 +37,8 @@ abstract class MutationQueue { /// mutation is applied locally. These mutations are used to locally overwrite /// values that are persisted in the remote document cache. [mutations] The /// user-provided mutations in this mutation batch. - Future addMutationBatch(Timestamp localWriteTime, - List baseMutations, List mutations); + Future addMutationBatch( + Timestamp localWriteTime, List baseMutations, List mutations); /// Loads the mutation batch with the given [batchId]. Future lookupMutationBatch(int batchId); @@ -54,6 +52,10 @@ abstract class MutationQueue { /// Returns the next [Mutation] or null if there wasn't one. Future getNextMutationBatchAfterBatchId(int batchId); + /// Returns the largest (latest) batch id in mutation queue for the current user that is pending + /// server response, [MutationBatch.unknown] if the queue is empty. + Future getHighestUnacknowledgedBatchId(); + /// Returns all mutation batches in the mutation queue. /// // TODO(long1eu): PERF: Current consumer only needs mutated keys; if we can @@ -70,8 +72,7 @@ abstract class MutationQueue { /// convenient. /// /// Batches are guaranteed to be sorted by batch ID. - Future> getAllMutationBatchesAffectingDocumentKey( - DocumentKey documentKey); + Future> getAllMutationBatchesAffectingDocumentKey(DocumentKey documentKey); /// Finds all mutation batches that could possibly affect the given set of /// document keys. Not all mutations in a batch will necessarily affect each @@ -83,8 +84,7 @@ abstract class MutationQueue { /// it's convenient. /// /// Batches are guaranteed to be sorted by batch ID. - Future> getAllMutationBatchesAffectingDocumentKeys( - Iterable documentKeys); + Future> getAllMutationBatchesAffectingDocumentKeys(Iterable documentKeys); /// Finds all mutation batches that could affect the results for the given /// query. Not all mutations in a batch will necessarily affect the query, so diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/persistence.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/persistence.dart similarity index 96% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/persistence.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/persistence.dart index 91ed1097..64e2b1ba 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/persistence.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/persistence.dart @@ -6,10 +6,10 @@ import 'dart:async'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/query_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/reference_delegate.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/reference_delegate.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/types.dart'; import 'index_manager.dart'; @@ -76,7 +76,7 @@ abstract class Persistence { MutationQueue getMutationQueue(User user); /// Creates a [QueryCache] representing the persisted cache of queries. - QueryCache get queryCache; + TargetCache get targetCache; /// Creates a [RemoteDocumentCache] representing the persisted cache of remote /// documents. diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/reference_delegate.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/reference_delegate.dart similarity index 96% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/reference_delegate.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/reference_delegate.dart index ed64b10a..d65066e7 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/reference_delegate.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/reference_delegate.dart @@ -4,7 +4,7 @@ import 'dart:async'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/reference_set.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; @@ -35,7 +35,7 @@ abstract class ReferenceDelegate { /// Notify the delegate that a target was removed. The delegate may, but is /// not obligated to, actually delete the target and associated data. - Future removeTarget(QueryData queryData); + Future removeTarget(TargetData queryData); /// Notify the delegate that a limbo document was updated. Future updateLimboDocument(DocumentKey key); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/remote_document_cache.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/remote_document_cache.dart similarity index 74% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/remote_document_cache.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/remote_document_cache.dart index 13459313..a70c9809 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/remote_document_cache.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/remote_document_cache.dart @@ -10,6 +10,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; /// Represents cached documents received from the remote backend. /// @@ -18,13 +19,12 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart /// (an actual document with data) as well as [NoDocument] instances (indicating /// that the document is known to not exist). abstract class RemoteDocumentCache { - static const String statsTag = 'remote_documents'; - /// Adds or replaces an entry in the cache. /// /// The cache key is extracted from [MaybeDocument.key]. If there is already a - /// cache entry for the key, it will be replaced. - Future add(MaybeDocument maybeDocument); + /// cache entry for the key, it will be replaced. [readTime] is the time at which + /// the document was read or committed. + Future add(MaybeDocument maybeDocument, SnapshotVersion readTime); /// Removes the cached entry for the given key (no-op if no entry exists). Future remove(DocumentKey documentKey); @@ -40,8 +40,7 @@ abstract class RemoteDocumentCache { /// Returns the cached [Document] or [NoDocument] entries indexed by key. If /// an entry is not cached, the corresponding key will be mapped to a null /// value. - Future> getAll( - Iterable documentKeys); + Future> getAll(Iterable documentKeys); /// Executes a query against the cached [Document] entries /// @@ -49,6 +48,11 @@ abstract class RemoteDocumentCache { /// should be re-filtered by the consumer before presenting them to the user. /// /// Cached [NoDocument] entries have no bearing on query results. - Future> - getAllDocumentsMatchingQuery(Query query); + /// + /// If [sinceReadTime] not set to [SnapshotVersion.min], return only documents that have been + /// read since this snapshot version (exclusive). + Future> getAllDocumentsMatchingQuery( + Query query, + SnapshotVersion sinceReadTime, + ); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/query_cache.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/target_cache.dart similarity index 69% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/query_cache.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/target_cache.dart index 6b34455f..068e9b36 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistance/query_cache.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/target_cache.dart @@ -5,21 +5,20 @@ import 'dart:async'; import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/reference_delegate.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/target_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/types.dart'; -/// Represents cached queries received from the remote backend. This contains -/// both a mapping between queries and the documents that matched them according -/// to the server, but also metadata about the queries. +/// Represents cached targets received from the remote backend. This contains +/// both a mapping between targets and the documents that matched them according +/// to the server, but also metadata about the targets. /// -/// The cache is keyed by [Query] and entries in the cache are [QueryData] +/// The cache is keyed by [Target] and entries in the cache are [TargetData] /// instances. -abstract class QueryCache { +abstract class TargetCache { /// Returns the highest target id of any query in the cache. Typically called /// during startup to seed a target id generator and avoid collisions with /// existing queries. If there are no queries in the cache, returns zero. @@ -33,7 +32,7 @@ abstract class QueryCache { int get targetCount; /// Call the consumer for each target in the cache. - Future forEachTarget(Consumer consumer); + Future forEachTarget(Consumer consumer); /// A global snapshot version representing the last consistent snapshot we /// received from the backend. This is monotonically increasing and any @@ -53,39 +52,36 @@ abstract class QueryCache { /// Adds an entry in the cache. This entry should not already exist. /// - /// The cache key is extracted from [QueryData.query]. - Future addQueryData(QueryData queryData); + /// The cache key is extracted from [TargetData.target]. + Future addTargetData(TargetData queryData); /// Replaces an entry in the cache. An entry with the same key should already /// exist. /// - /// The cache key is extracted from [QueryData.query]. - Future updateQueryData(QueryData queryData); + /// The cache key is extracted from [TargetData.target]. + Future updateTargetData(TargetData queryData); /// Removes the cached entry for the given query data. This entry should /// already exist in the cache. This method exists in the interface for /// testing purposes. Production code should instead call /// [ReferenceDelegate.removeTarget]. - Future removeQueryData(QueryData queryData); + Future removeTargetData(TargetData queryData); - /// Looks up a [QueryData] entry in the cache. + /// Looks up a [TargetData] entry in the cache. /// - /// The [query] corresponding to the entry to look up. Returns the cached - /// [QueryData] entry, or null if the cache has no entry for the query. - Future getQueryData(Query query); + /// The [target] corresponding to the entry to look up. Returns the cached + /// [TargetData] entry, or null if the cache has no entry for the query. + Future getTargetData(Target target); /// Adds the given document [keys] to cached query results of the given /// [targetId]. - Future addMatchingKeys( - ImmutableSortedSet keys, int targetId); + Future addMatchingKeys(ImmutableSortedSet keys, int targetId); /// Removes the given document [keys] from the cached query results of the /// given [targetId]. - Future removeMatchingKeys( - ImmutableSortedSet keys, int targetId); + Future removeMatchingKeys(ImmutableSortedSet keys, int targetId); - Future> getMatchingKeysForTargetId( - int targetId); + Future> getMatchingKeysForTargetId(int targetId); /// Returns true if the document is part of any target Future containsKey(DocumentKey key); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_data.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/target_data.dart similarity index 54% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_data.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/target_data.dart index c2e078fa..00359ee1 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_data.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/persistence/target_data.dart @@ -5,89 +5,96 @@ import 'dart:typed_data'; import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/sync_engine.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; import 'package:collection/collection.dart'; -import 'package:meta/meta.dart'; /// An immutable set of metadata that the store will need to keep track of for -/// each query. -class QueryData { - /// Creates a new QueryData with the given values. +/// each target. +class TargetData { + /// Creates a new TargetData with the given values. /// - /// [LocalStore] assigns a [targetId] that corresponds to the [query] for user - /// queries or the [SyncEngine] for limbo queries. The [resumeToken] is an - /// opaque, server-assigned token that allows watching a query to be resumed - /// after disconnecting without retransmitting all the data that matches the - /// query. The resume token essentially identifies a point in time from which - /// the server should resume sending results. - QueryData( - this.query, + /// The [target] being listened to and the [targetId] to which the target corresponds, assigned + /// by the [LocalStore] for user queries or the [SyncEngine] for limbo queries. + /// [lastLimboFreeSnapshotVersion] represents the maximum snapshot version at which the associated target + /// view contained no limbo documents. + /// [resumeToken] is an opaque, server-assigned token that allows watching a target to be resumed + /// after disconnecting without retransmitting all the data that matches the target. The resume + /// token essentially identifies a point in time from which the server should resume sending + TargetData( + this.target, this.targetId, this.sequenceNumber, this.purpose, [ SnapshotVersion snapshotVersion, + SnapshotVersion lastLimboFreeSnapshotVersion, Uint8List resumeToken, - ]) : assert(query != null), + ]) : assert(target != null), snapshotVersion = snapshotVersion ?? SnapshotVersion.none, + lastLimboFreeSnapshotVersion = lastLimboFreeSnapshotVersion ?? SnapshotVersion.none, resumeToken = resumeToken ?? Uint8List(0); - final Query query; + final Target target; final int targetId; final int sequenceNumber; final QueryPurpose purpose; final SnapshotVersion snapshotVersion; + final SnapshotVersion lastLimboFreeSnapshotVersion; final Uint8List resumeToken; /// Creates a new query data instance with an updated snapshot version and /// resume token. - QueryData copyWith({ - @required SnapshotVersion snapshotVersion, - @required Uint8List resumeToken, - @required int sequenceNumber, + TargetData copyWith({ + SnapshotVersion snapshotVersion, + Uint8List resumeToken, + int sequenceNumber, + SnapshotVersion lastLimboFreeSnapshotVersion, }) { - assert(sequenceNumber != null); - return QueryData( - query, + return TargetData( + target, targetId, - sequenceNumber, + sequenceNumber ?? this.sequenceNumber, purpose, - snapshotVersion, - resumeToken, + snapshotVersion ?? this.snapshotVersion, + lastLimboFreeSnapshotVersion ?? this.lastLimboFreeSnapshotVersion, + resumeToken ?? this.resumeToken, ); } @override bool operator ==(Object other) => identical(this, other) || - other is QueryData && + other is TargetData && runtimeType == other.runtimeType && - query == other.query && + target == other.target && targetId == other.targetId && sequenceNumber == other.sequenceNumber && purpose == other.purpose && snapshotVersion == other.snapshotVersion && + lastLimboFreeSnapshotVersion == other.lastLimboFreeSnapshotVersion && const DeepCollectionEquality().equals(resumeToken, other.resumeToken); @override int get hashCode => - query.hashCode ^ + target.hashCode ^ targetId.hashCode ^ sequenceNumber.hashCode ^ purpose.hashCode ^ snapshotVersion.hashCode ^ + lastLimboFreeSnapshotVersion.hashCode ^ const DeepCollectionEquality().hash(resumeToken); @override String toString() { return (ToStringHelper(runtimeType) - ..add('query', query) + ..add('target', target) ..add('targetId', targetId) ..add('sequenceNumber', sequenceNumber) ..add('purpose', purpose) ..add('snapshotVersion', snapshotVersion) + ..add('lastLimboFreeSnapshotVersion', lastLimboFreeSnapshotVersion) ..add('resumeToken', resumeToken)) .toString(); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_engine.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_engine.dart index a2bc871b..0161ea02 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_engine.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_engine.dart @@ -2,24 +2,30 @@ // Lung Razvan // on 20/09/2018 -import 'dart:async'; - import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_documents_view.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -/// Represents a query engine capable of performing queries over the local document cache. +/// Represents a query engine capable of performing queries over the local document cache. You must +/// set [localDocumentsView] before using. abstract class QueryEngine { + /// Sets the document view to query against. + set localDocumentsView(LocalDocumentsView localDocuments); + /// Returns all local documents matching the specified query. Future> getDocumentsMatchingQuery( - Query query); + Query query, + SnapshotVersion lastLimboFreeSnapshotVersion, + ImmutableSortedSet remoteKeys, + ); /// Notifies the query engine of a document change in case it would like to /// update indexes and the like. // TODO(long1eu): We can change this to just accept the changed fields // (w/ old and new values) if it's convenient for the caller to compute. - void handleDocumentChange( - MaybeDocument oldDocument, MaybeDocument newDocument); + void handleDocumentChange(MaybeDocument oldDocument, MaybeDocument newDocument); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_result.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_result.dart new file mode 100644 index 00000000..b2117a34 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/query_result.dart @@ -0,0 +1,14 @@ +// File created by +// Lung Razvan +// on 17/01/2021 + +import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; + +class QueryResult { + const QueryResult(this.documents, this.remoteKeys); + + final ImmutableSortedMap documents; + final ImmutableSortedSet remoteKeys; +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/simple_query_engine.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/simple_query_engine.dart deleted file mode 100644 index 7491b823..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/simple_query_engine.dart +++ /dev/null @@ -1,36 +0,0 @@ -// File created by -// Lung Razvan -// on 21/09/2018 - -import 'dart:async'; - -import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_documents_view.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_engine.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; - -/// A naive implementation of QueryEngine that just loads all the documents in -/// the queried collection and then filters them in memory. -class SimpleQueryEngine implements QueryEngine { - SimpleQueryEngine(this.localDocumentsView); - - final LocalDocumentsView localDocumentsView; - - @override - Future> getDocumentsMatchingQuery( - Query query) { - // TODO(long1eu): Once LocalDocumentsView provides a - // getCollectionDocuments() method, we should call that here and then - // filter the results. - return localDocumentsView.getDocumentsMatchingQuery(query); - } - - @override - void handleDocumentChange( - MaybeDocument oldDocument, MaybeDocument newDocument) { - // No indexes to update. - } -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_collection_index.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_collection_index.dart index c2fea856..ef0a25d8 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_collection_index.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_collection_index.dart @@ -9,21 +9,18 @@ part of sqlite_persistence; /// /// NOTE: There is no in-memory implementation at this time. class SQLiteCollectionIndex { - SQLiteCollectionIndex(this.db, User user) - : uid = user.isAuthenticated ? user.uid : ''; + SQLiteCollectionIndex(this.db, User user) : uid = user.isAuthenticated ? user.uid : ''; final SQLitePersistence db; final String uid; /// Adds the specified entry to the index. - void addEntry( - FieldPath fieldPath, FieldValue fieldValue, DocumentKey documentKey) { + void addEntry(FieldPath fieldPath, proto.Value fieldValue, DocumentKey documentKey) { throw StateError('Not yet implemented.'); } /// Adds the specified entry to the index. - void removeEntry( - FieldPath fieldPath, FieldValue fieldValue, DocumentKey documentKey) { + void removeEntry(FieldPath fieldPath, proto.Value fieldValue, DocumentKey documentKey) { throw StateError('Not yet implemented.'); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_lru_reference_delegate.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_lru_reference_delegate.dart index a9bd9f97..f834ffb9 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_lru_reference_delegate.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_lru_reference_delegate.dart @@ -10,6 +10,12 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { garbageCollector = LruGarbageCollector(this, params); } + /// The batch size for orphaned document GC in [removeOrphanedDocuments]. + /// + /// This addresses https://github.com/firebase/firebase-android-sdk/issues/706, where a customer + /// reported that LRU GC hit a CursorWindow size limit during orphaned document removal. + static const int _kRemoveOrphanedDocumentsBatchSize = 100; + final SQLitePersistence persistence; ListenSequence listenSequence; int _currentSequenceNumber; @@ -26,15 +32,19 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { @override void onTransactionStarted() { - hardAssert(_currentSequenceNumber == ListenSequence.invalid, - 'Starting a transaction without committing the previous one'); + hardAssert( + _currentSequenceNumber == ListenSequence.invalid, + 'Starting a transaction without committing the previous one', + ); _currentSequenceNumber = listenSequence.next; } @override Future onTransactionCommitted() async { - hardAssert(_currentSequenceNumber != ListenSequence.invalid, - 'Committing a transaction without having started one'); + hardAssert( + _currentSequenceNumber != ListenSequence.invalid, + 'Committing a transaction without having started one', + ); _currentSequenceNumber = ListenSequence.invalid; } @@ -47,9 +57,9 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { @override Future getSequenceNumberCount() async { - final int targetCount = persistence.queryCache.targetCount; + final int targetCount = persistence.targetCache.targetCount; final Map data = (await persistence.query( - // @formatter:off + // @formatter:off ''' SELECT COUNT(*) as count FROM (SELECT sequence_number @@ -64,15 +74,14 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { } @override - Future forEachTarget(Consumer consumer) async { - await persistence.queryCache.forEachTarget(consumer); + Future forEachTarget(Consumer consumer) async { + await persistence.targetCache.forEachTarget(consumer); } @override - Future forEachOrphanedDocumentSequenceNumber( - Consumer consumer) async { + Future forEachOrphanedDocumentSequenceNumber(Consumer consumer) async { final List> result = await persistence.query( - // @formatter:off + // @formatter:off ''' SELECT sequence_number FROM target_documents @@ -100,7 +109,7 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { @override Future removeTargets(int upperBound, Set activeTargetIds) { - return persistence.queryCache.removeQueries(upperBound, activeTargetIds); + return persistence.targetCache.removeQueries(upperBound, activeTargetIds); } @override @@ -111,7 +120,7 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { /// Returns true if any mutation queue contains the given document. Future _mutationQueuesContainKey(DocumentKey key) async { return (await persistence.query( - // @formatter:off + // @formatter:off ''' SELECT 1 FROM document_mutations @@ -134,7 +143,7 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { Future _removeSentinel(DocumentKey key) async { await persistence.execute( - // @formatter:off + // @formatter:off ''' DELETE FROM target_documents @@ -149,7 +158,7 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { Future removeOrphanedDocuments(int upperBound) async { int count = 0; final List> result = await persistence.query( - // @formatter:off + // @formatter:off ''' SELECT path FROM target_documents @@ -171,18 +180,45 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { } } + bool resultsRemaining = true; + while (resultsRemaining) { + int rowsProcessed = 0; + final List> rows = await persistence.query( + // @formatter:off + ''' + SELECT path + FROM target_documents + GROUP BY path + HAVING count(*) = 1 + AND target_id = 0 + AND sequence_number <= ? + LIMIT ?; + ''', + // @formatter:on + [upperBound, _kRemoveOrphanedDocumentsBatchSize], + ); + + for (Map row in rows) { + final ResourcePath path = EncodedPath.decodeResourcePath(row['path']); + final DocumentKey key = DocumentKey.fromPath(path); + if (!await _isPinned(key)) { + count++; + await persistence.remoteDocumentCache.remove(key); + await _removeSentinel(key); + } + rowsProcessed++; + } + + resultsRemaining = rowsProcessed == _kRemoveOrphanedDocumentsBatchSize; + } + return count; } @override - Future removeTarget(QueryData queryData) async { - final QueryData updated = queryData.copyWith( - snapshotVersion: queryData.snapshotVersion, - resumeToken: queryData.resumeToken, - sequenceNumber: currentSequenceNumber, - ); - - await persistence.queryCache.updateQueryData(updated); + Future removeTarget(TargetData targetData) async { + final TargetData updated = targetData.copyWith(sequenceNumber: currentSequenceNumber); + await persistence.targetCache.updateTargetData(updated); } @override @@ -193,7 +229,7 @@ class SQLiteLruReferenceDelegate implements ReferenceDelegate, LruDelegate { Future _writeSentinel(DocumentKey key) async { final String path = EncodedPath.encode(key.path); await persistence.execute( - // @formatter:off + // @formatter:off ''' INSERT OR REPLACE INTO target_documents (target_id, path, sequence_number) diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_mutation_queue.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_mutation_queue.dart index ffedd79a..fbbedd0b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_mutation_queue.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_mutation_queue.dart @@ -7,13 +7,12 @@ part of sqlite_persistence; /// A mutation queue for a specific user, backed by SQLite. class SQLiteMutationQueue implements MutationQueue { /// Creates a mutation queue for the given user, in the SQLite database wrapped by the persistence interface. - SQLiteMutationQueue(this.db, this.serializer, this._statsCollector, User user) + SQLiteMutationQueue(this.db, this.serializer, User user) : uid = user.isAuthenticated ? user.uid : '', _lastStreamToken = Uint8List(0); final SQLitePersistence db; final LocalSerializer serializer; - final StatsCollector _statsCollector; /// The normalized uid (e.g. null => '') used in the uid column. final String uid; @@ -37,7 +36,7 @@ class SQLiteMutationQueue implements MutationQueue { Future start() async { await _loadNextBatchIdAcrossAllUsers(); final List> result = await db.query( - // @formatter:off + // @formatter:off ''' SELECT last_stream_token FROM mutation_queues @@ -79,7 +78,7 @@ class SQLiteMutationQueue implements MutationQueue { final List uids = []; final List> uidsRows = await db.query( - // @formatter:off + // @formatter:off ''' SELECT uid FROM mutation_queues; @@ -94,7 +93,7 @@ class SQLiteMutationQueue implements MutationQueue { _nextBatchId = 0; for (String uid in uids) { final List> result = await db.query( - // @formatter:off + // @formatter:off ''' SELECT MAX(batch_id) FROM mutations @@ -105,8 +104,7 @@ class SQLiteMutationQueue implements MutationQueue { for (Map row in result) { final int batchId = row['MAX(batch_id)']; - _nextBatchId = - batchId == null ? _nextBatchId : max(_nextBatchId, batchId); + _nextBatchId = batchId == null ? _nextBatchId : max(_nextBatchId, batchId); } } @@ -116,7 +114,7 @@ class SQLiteMutationQueue implements MutationQueue { @override Future isEmpty() async { return (await db.query( - // @formatter:off + // @formatter:off ''' SELECT batch_id FROM mutations @@ -128,8 +126,7 @@ class SQLiteMutationQueue implements MutationQueue { } @override - Future acknowledgeBatch( - MutationBatch batch, Uint8List streamToken) async { + Future acknowledgeBatch(MutationBatch batch, Uint8List streamToken) async { _lastStreamToken = checkNotNull(streamToken); await _writeMutationQueueMetadata(); } @@ -145,7 +142,7 @@ class SQLiteMutationQueue implements MutationQueue { Future _writeMutationQueueMetadata() async { await db.execute( - // @formatter:off + // @formatter:off ''' INSERT OR REPLACE INTO mutation_queues (uid, last_acknowledged_batch_id, last_stream_token) @@ -173,7 +170,7 @@ class SQLiteMutationQueue implements MutationQueue { final GeneratedMessage proto = serializer.encodeMutationBatch(batch); await db.execute( - // @formatter:off + // @formatter:off ''' INSERT INTO mutations (uid, batch_id, mutations) VALUES (?, ?, ?); @@ -187,7 +184,7 @@ class SQLiteMutationQueue implements MutationQueue { final Set inserted = {}; const String statement = - // @formatter:off + // @formatter:off ''' INSERT INTO document_mutations (uid, path, batch_id) VALUES (?, ?, ?); @@ -205,15 +202,13 @@ class SQLiteMutationQueue implements MutationQueue { await db.indexManager.addToCollectionParentIndex(key.path.popLast()); } - _statsCollector.recordRowsWritten(MutationQueue.statsTag, mutations.length); return batch; } @override Future lookupMutationBatch(int batchId) async { - _statsCollector.recordRowsRead(MutationQueue.statsTag, 1); final List> result = await db.query( - // @formatter:off + // @formatter:off ''' SELECT mutations FROM mutations @@ -232,11 +227,10 @@ class SQLiteMutationQueue implements MutationQueue { @override Future getNextMutationBatchAfterBatchId(int batchId) async { - _statsCollector.recordRowsRead(MutationQueue.statsTag, 1); final int _nextBatchId = batchId + 1; final List> result = await db.query( - // @formatter:off + // @formatter:off ''' SELECT mutations FROM mutations @@ -255,11 +249,26 @@ class SQLiteMutationQueue implements MutationQueue { } } + @override + Future getHighestUnacknowledgedBatchId() async { + final List> rows = await db.query( + // @formatter:off + ''' + SELECT IFNULL(MAX(batch_id), ?) as batch_id + FROM mutations + WHERE uid = ? + ''', + // @formatter:on + [MutationBatch.unknown, uid], + ); + return rows.first['batch_id']; + } + @override Future> getAllMutationBatches() async { final List result = []; final List> rows = await db.query( - // @formatter:off + // @formatter:off ''' SELECT mutations FROM mutations @@ -272,19 +281,16 @@ class SQLiteMutationQueue implements MutationQueue { for (Map row in rows) { result.add(decodeMutationBatch(row['mutations'])); } - - _statsCollector.recordRowsRead(MutationQueue.statsTag, rows.length); return result; } @override - Future> getAllMutationBatchesAffectingDocumentKey( - DocumentKey documentKey) async { + Future> getAllMutationBatchesAffectingDocumentKey(DocumentKey documentKey) async { final String path = EncodedPath.encode(documentKey.path); final List result = []; final List> rows = await db.query( - // @formatter:off + // @formatter:off ''' SELECT m.mutations FROM document_mutations dm, @@ -301,14 +307,11 @@ class SQLiteMutationQueue implements MutationQueue { for (Map row in rows) { result.add(decodeMutationBatch(row['mutations'])); } - - _statsCollector.recordRowsRead(MutationQueue.statsTag, rows.length); return result; } @override - Future> getAllMutationBatchesAffectingDocumentKeys( - Iterable documentKeys) async { + Future> getAllMutationBatchesAffectingDocumentKeys(Iterable documentKeys) async { final List args = []; for (DocumentKey key in documentKeys) { args.add(EncodedPath.encode(key.path)); @@ -321,12 +324,10 @@ class SQLiteMutationQueue implements MutationQueue { args, ') AND dm.uid = m.uid AND dm.batch_id = m.batch_id ORDER BY dm.batch_id'); - int rowsProcessed = 0; final List result = []; final Set uniqueBatchIds = {}; while (longQuery.hasMoreSubqueries) { - final List> rows = - await longQuery.performNextSubquery(); + final List> rows = await longQuery.performNextSubquery(); for (Map row in rows) { final int batchId = row['batch_id']; if (!uniqueBatchIds.contains(batchId)) { @@ -334,25 +335,19 @@ class SQLiteMutationQueue implements MutationQueue { result.add(decodeMutationBatch(row['mutations'])); } } - rowsProcessed += rows.length; } - _statsCollector.recordRowsRead(MutationQueue.statsTag, rowsProcessed); - // If more than one query was issued, batches might be in an unsorted order (batches are ordered within one query's // results, but not across queries). It's likely to be rare, so don't impose performance penalty on the normal case. if (longQuery.subqueriesPerformed > 1) { - result.sort((MutationBatch lhs, MutationBatch rhs) => - lhs.batchId.compareTo(rhs.batchId)); + result.sort((MutationBatch lhs, MutationBatch rhs) => lhs.batchId.compareTo(rhs.batchId)); } return result; } @override - Future> getAllMutationBatchesAffectingQuery( - Query query) async { - hardAssert(!query.isCollectionGroupQuery, - 'CollectionGroup queries should be handled in LocalDocumentsView'); + Future> getAllMutationBatchesAffectingQuery(Query query) async { + hardAssert(!query.isCollectionGroupQuery, 'CollectionGroup queries should be handled in LocalDocumentsView'); // Use the query path as a prefix for testing if a document matches the query. final ResourcePath prefix = query.path; final int immediateChildrenPathLength = prefix.length + 1; @@ -374,7 +369,7 @@ class SQLiteMutationQueue implements MutationQueue { final List result = []; final List> rows = await db.query( - // @formatter:off + // @formatter:off ''' SELECT dm.batch_id, dm.path, m.mutations FROM document_mutations dm, @@ -409,14 +404,13 @@ class SQLiteMutationQueue implements MutationQueue { result.add(decodeMutationBatch(row['mutations'])); } - _statsCollector.recordRowsRead(MutationQueue.statsTag, rows.length); return result; } @override Future removeMutationBatch(MutationBatch batch) async { const String mutationDeleter = - // @formatter:off + // @formatter:off ''' DELETE FROM mutations @@ -426,7 +420,7 @@ class SQLiteMutationQueue implements MutationQueue { // @formatter:on const String indexDeleter = - // @formatter:off + // @formatter:off ''' DELETE FROM document_mutations @@ -437,11 +431,9 @@ class SQLiteMutationQueue implements MutationQueue { // @formatter:on final int batchId = batch.batchId; - final int deleted = - await db.delete(mutationDeleter, [uid, batchId]); + final int deleted = await db.delete(mutationDeleter, [uid, batchId]); - hardAssert( - deleted != 0, 'Mutation batch ($uid, ${batch.batchId}) did not exist'); + hardAssert(deleted != 0, 'Mutation batch ($uid, ${batch.batchId}) did not exist'); for (Mutation mutation in batch.mutations) { final DocumentKey key = mutation.key; @@ -449,9 +441,6 @@ class SQLiteMutationQueue implements MutationQueue { await db.execute(indexDeleter, [uid, path, batchId]); await db.referenceDelegate.removeMutationReference(key); } - - _statsCollector.recordRowsDeleted( - MutationQueue.statsTag, batch.mutations.length); } @override @@ -461,7 +450,7 @@ class SQLiteMutationQueue implements MutationQueue { // Verify that there are no entries in the document_mutations index if the queue is empty. final List danglingMutationReferences = []; final List> rows = await db.query( - // @formatter:off + // @formatter:off ''' SELECT path FROM document_mutations diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_persistence.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_persistence.dart index 179305d5..8e7910bc 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_persistence.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_persistence.dart @@ -12,21 +12,21 @@ import 'package:_firebase_database_collection_vm/_firebase_database_collection_v import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/index_range.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/listent_sequence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/listen_sequence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/encoded_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/index_cursor.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_serializer.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_delegate.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_garbage_collector.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/index_manager.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/query_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/reference_delegate.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/stats_collector.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/index_manager.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_delegate.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/lru_garbage_collector.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/reference_delegate.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/reference_set.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; @@ -37,33 +37,36 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutatio import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/database.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/types.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; -import 'package:cloud_firestore_vm/src/proto/index.dart' as proto; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' as proto hide Target; +import 'package:cloud_firestore_vm/src/proto/index.dart' as proto hide Value; import 'package:meta/meta.dart'; import 'package:protobuf/protobuf.dart'; import 'package:semaphore/semaphore.dart'; part 'sqlite_collection_index.dart'; + part 'sqlite_index_manager.dart'; + part 'sqlite_lru_reference_delegate.dart'; + part 'sqlite_mutation_queue.dart'; -part 'sqlite_query_cache.dart'; + part 'sqlite_remote_document_cache.dart'; + part 'sqlite_schema.dart'; +part 'sqlite_target_cache.dart'; + /// A SQLite-backed instance of Persistence. /// /// In addition to implementations of the methods in the Persistence interface, also contains helper /// routines that make dealing with SQLite much more pleasant. class SQLitePersistence extends Persistence { - SQLitePersistence._(this.serializer, this.openDatabase, this.databaseName, - StatsCollector statsCollector) - : _statsCollector = statsCollector ?? StatsCollector.noOp, - _semaphore = GlobalSemaphore() { + SQLitePersistence._(this.serializer, this.openDatabase, this.databaseName) : _semaphore = GlobalSemaphore() { indexManager = SqliteIndexManager(this); } @@ -72,7 +75,6 @@ class SQLitePersistence extends Persistence { final OpenDatabase openDatabase; final String databaseName; final LocalSerializer serializer; - final StatsCollector _statsCollector; final Semaphore _semaphore; Database _db; @@ -81,7 +83,7 @@ class SQLitePersistence extends Persistence { bool started = false; @override - SQLiteQueryCache queryCache; + SQLiteTargetCache targetCache; @override SqliteIndexManager indexManager; @@ -115,26 +117,22 @@ class SQLitePersistence extends Persistence { } static Future create( - String persistenceKey, - DatabaseId databaseId, - LocalSerializer serializer, - OpenDatabase openDatabase, - LruGarbageCollectorParams params, - [StatsCollector statsCollector = StatsCollector.noOp]) async { + String persistenceKey, + DatabaseId databaseId, + LocalSerializer serializer, + OpenDatabase openDatabase, + LruGarbageCollectorParams params, + ) async { final String databaseName = sDatabaseName(persistenceKey, databaseId); - final SQLitePersistence persistence = SQLitePersistence._( - serializer, openDatabase, databaseName, statsCollector); + final SQLitePersistence persistence = SQLitePersistence._(serializer, openDatabase, databaseName); - final SQLiteQueryCache queryCache = - SQLiteQueryCache(persistence, serializer); - final SQLiteRemoteDocumentCache remoteDocumentCache = - SQLiteRemoteDocumentCache(persistence, serializer, statsCollector); - final SQLiteLruReferenceDelegate referenceDelegate = - SQLiteLruReferenceDelegate(persistence, params); + final SQLiteTargetCache targetCache = SQLiteTargetCache(persistence, serializer); + final SQLiteRemoteDocumentCache remoteDocumentCache = SQLiteRemoteDocumentCache(persistence, serializer); + final SQLiteLruReferenceDelegate referenceDelegate = SQLiteLruReferenceDelegate(persistence, params); return persistence - ..queryCache = queryCache + ..targetCache = targetCache ..remoteDocumentCache = remoteDocumentCache ..referenceDelegate = referenceDelegate; } @@ -155,19 +153,21 @@ class SQLitePersistence extends Persistence { @override Future start() async { await _semaphore.acquire(); - Log.d(tag, 'Starting SQLite persistance'); + Log.d(tag, 'Starting SQLite persistence'); hardAssert(!started, 'SQLitePersistence double-started!'); - _db = await _openDb(databaseName, openDatabase); - await queryCache.start(); + + _db = await _openDb(databaseName, serializer, openDatabase); + + await targetCache.start(); started = true; - referenceDelegate.start(queryCache.highestListenSequenceNumber); + referenceDelegate.start(targetCache.highestListenSequenceNumber); _semaphore.release(); } @override Future shutdown() async { await _semaphore.acquire(); - Log.d(tag, 'Shutingdown SQLite persistance'); + Log.d(tag, 'Shutdown SQLite persistence'); hardAssert(started, 'SQLitePersistence shutdown without start!'); started = false; @@ -181,18 +181,16 @@ class SQLitePersistence extends Persistence { @override MutationQueue getMutationQueue(User user) { - return SQLiteMutationQueue(this, serializer, _statsCollector, user); + return SQLiteMutationQueue(this, serializer, user); } @override - Future runTransaction( - String action, Transaction operation) async { + Future runTransaction(String action, Transaction operation) async { return runTransactionAndReturn(action, operation); } @override - Future runTransactionAndReturn( - String action, Transaction operation) async { + Future runTransactionAndReturn(String action, Transaction operation) async { await _semaphore.acquire(); Log.d(tag, 'Starting transaction: $action'); @@ -216,8 +214,7 @@ class SQLitePersistence extends Persistence { return _db.execute(statement, args); } - Future>> query(String statement, - [List args]) { + Future>> query(String statement, [List args]) { return _db.query(statement, args); } @@ -236,8 +233,7 @@ class SQLitePersistence extends Persistence { /// /// This attempts to obtain exclusive access to the database and attempts to do so as early as possible. /// ^^^ todo: this breaks flutter hot reload - static Future _openDb( - String databaseName, OpenDatabase openDatabase) async { + static Future _openDb(String databaseName, LocalSerializer serializer, OpenDatabase openDatabase) async { bool configured = false; /// Ensures that onConfigure has been called. This should be called first from all methods. @@ -255,11 +251,11 @@ class SQLitePersistence extends Persistence { onConfigure: ensureConfigured, onCreate: (Database db, int version) async { await ensureConfigured(db); - await SQLiteSchema(db).runMigrations(0); + await SQLiteSchema(db, serializer).runMigrations(0); }, onUpgrade: (Database db, int fromVersion, int toVersion) async { await ensureConfigured(db); - await SQLiteSchema(db).runMigrations(fromVersion); + await SQLiteSchema(db, serializer).runMigrations(fromVersion); }, onDowngrade: (Database db, int fromVersion, int toVersion) async { await ensureConfigured(db); @@ -326,8 +322,7 @@ class LongQuery { /// subqueries take the form: /// /// [_head][_argsHead][an auto-generated comma-separated list of '?' placeholders][_tail] - LongQuery(this._db, this._head, List argsHead, - List argsIter, this._tail) + LongQuery(this._db, this._head, List argsHead, List argsIter, this._tail) : _argsIter = argsIter, _argsHead = argsHead ?? [], _subqueriesPerformed = 0; @@ -365,9 +360,7 @@ class LongQuery { final List subqueryArgs = List.from(_argsHead); final StringBuffer placeholdersBuilder = StringBuffer(); - for (int i = 0; - j < _argsIter.length && i < _limit - _argsHead.length; - i++) { + for (int i = 0; j < _argsIter.length && i < _limit - _argsHead.length; i++) { if (i > 0) { placeholdersBuilder.write(', '); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_remote_document_cache.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_remote_document_cache.dart index 3314e5a4..86a6fcd8 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_remote_document_cache.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_remote_document_cache.dart @@ -5,40 +5,47 @@ part of sqlite_persistence; class SQLiteRemoteDocumentCache implements RemoteDocumentCache { - SQLiteRemoteDocumentCache(this.db, this.serializer, this._statsCollector); + SQLiteRemoteDocumentCache(this.db, this.serializer); final SQLitePersistence db; final LocalSerializer serializer; - final StatsCollector _statsCollector; @override - Future add(MaybeDocument maybeDocument) async { + Future add(MaybeDocument maybeDocument, SnapshotVersion readTime) async { + hardAssert( + readTime != SnapshotVersion.none, + 'Cannot add document to the RemoteDocumentCache with a read time of zero', + ); + final String path = _pathForKey(maybeDocument.key); - final GeneratedMessage message = - serializer.encodeMaybeDocument(maybeDocument); + final Timestamp timestamp = readTime.timestamp; + final GeneratedMessage message = serializer.encodeMaybeDocument(maybeDocument); - _statsCollector.recordRowsWritten(RemoteDocumentCache.statsTag, 1); await db.execute( - // @formatter:off + // @formatter:off ''' INSERT - OR REPLACE INTO remote_documents (path, contents) - VALUES (?, ?); + OR REPLACE INTO remote_documents (path, read_time_seconds, read_time_nanos, contents) + VALUES (?, ?, ?, ?); ''', - // @formatter:on - [path, message.writeToBuffer()]); - - await db.indexManager - .addToCollectionParentIndex(maybeDocument.key.path.popLast()); + // @formatter:on + [ + path, + timestamp.seconds, + timestamp.nanoseconds, + message.writeToBuffer(), + ], + ); + + await db.indexManager.addToCollectionParentIndex(maybeDocument.key.path.popLast()); } @override Future remove(DocumentKey documentKey) async { final String path = _pathForKey(documentKey); - _statsCollector.recordRowsDeleted(RemoteDocumentCache.statsTag, 1); await db.execute( - // @formatter:off + // @formatter:off ''' DELETE FROM remote_documents @@ -52,9 +59,8 @@ class SQLiteRemoteDocumentCache implements RemoteDocumentCache { Future get(DocumentKey documentKey) async { final String path = _pathForKey(documentKey); - _statsCollector.recordRowsRead(RemoteDocumentCache.statsTag, 1); final List> result = await db.query( - // @formatter:off + // @formatter:off ''' SELECT contents FROM remote_documents @@ -72,47 +78,38 @@ class SQLiteRemoteDocumentCache implements RemoteDocumentCache { } @override - Future> getAll( - Iterable documentKeys) async { + Future> getAll(Iterable documentKeys) async { final List args = []; for (DocumentKey key in documentKeys) { args.add(EncodedPath.encode(key.path)); } - final Map results = - {}; + final Map results = {}; for (DocumentKey key in documentKeys) { // Make sure each key has a corresponding entry, which is null in case the document is not found. results[key] = null; } - final LongQuery longQuery = LongQuery( - db, - 'SELECT contents FROM remote_documents WHERE path IN (', - null, - args, - ') ORDER BY path'); + final LongQuery longQuery = + LongQuery(db, 'SELECT contents FROM remote_documents WHERE path IN (', null, args, ') ORDER BY path'); - int rowsProcessed = 0; while (longQuery.hasMoreSubqueries) { - final List> rows = - await longQuery.performNextSubquery(); + final List> rows = await longQuery.performNextSubquery(); for (Map row in rows) { final MaybeDocument decoded = decodeMaybeDocument(row['contents']); results[decoded.key] = decoded; } - rowsProcessed += rows.length; } - _statsCollector.recordRowsRead(RemoteDocumentCache.statsTag, rowsProcessed); return results; } @override - Future> - getAllDocumentsMatchingQuery(Query query) async { - hardAssert(!query.isCollectionGroupQuery, - 'CollectionGroup queries should be handled in LocalDocumentsView'); + Future> getAllDocumentsMatchingQuery( + Query query, + SnapshotVersion sinceReadTime, + ) async { + hardAssert(!query.isCollectionGroupQuery, 'CollectionGroup queries should be handled in LocalDocumentsView'); // Use the query path as a prefix for testing if a document matches the query. final ResourcePath prefix = query.path; @@ -120,19 +117,37 @@ class SQLiteRemoteDocumentCache implements RemoteDocumentCache { final String prefixPath = EncodedPath.encode(prefix); final String prefixSuccessorPath = EncodedPath.prefixSuccessor(prefixPath); + final Timestamp readTime = sinceReadTime.timestamp; final Map results = {}; - final List> result = await db.query( + List> result; + if (sinceReadTime == SnapshotVersion.none) { + result = await db.query( // @formatter:off - ''' + ''' SELECT path, contents FROM remote_documents WHERE path >= ? AND path < ?; ''', // @formatter:on - [prefixPath, prefixSuccessorPath]); + [prefixPath, prefixSuccessorPath], + ); + } else { + result = await db.query( + // @formatter:off + ''' + SELECT path, contents + FROM remote_documents + WHERE path >= ? + AND path < ? + AND (read_time_seconds > ? OR (read_time_seconds = ? AND read_time_nanos > ?)); + ''', + // @formatter:on + [prefixPath, prefixSuccessorPath, readTime.seconds, readTime.seconds, readTime.nanoseconds], + ); + } for (Map row in result) { // TODO(long1eu): Actually implement a single-collection query @@ -161,9 +176,7 @@ class SQLiteRemoteDocumentCache implements RemoteDocumentCache { results[doc.key] = doc; } - _statsCollector.recordRowsRead(RemoteDocumentCache.statsTag, result.length); - return ImmutableSortedMap.fromMap( - results, DocumentKey.comparator); + return ImmutableSortedMap.fromMap(results, DocumentKey.comparator); } String _pathForKey(DocumentKey key) { @@ -172,8 +185,7 @@ class SQLiteRemoteDocumentCache implements RemoteDocumentCache { MaybeDocument decodeMaybeDocument(Uint8List bytes) { try { - return serializer - .decodeMaybeDocument(proto.MaybeDocument.fromBuffer(bytes)); + return serializer.decodeMaybeDocument(proto.MaybeDocument.fromBuffer(bytes)); } on InvalidProtocolBufferException catch (e) { throw fail('MaybeDocument failed to parse: $e'); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_schema.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_schema.dart index e5378c18..342f9bae 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_schema.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_schema.dart @@ -13,25 +13,25 @@ part of sqlite_persistence; /// changes can be made to the schema by adding a new migration method, bumping the [version], and adding a call to the /// migration method from [runMigrations]. class SQLiteSchema { - const SQLiteSchema(this.db); + const SQLiteSchema(this.db, this.serializer); /// The version of the schema. Increase this by one for each migration added to [runMigrations] below. - static const int version = 8; + static const int version = 11; // Remove this constant and increment version to enable indexing support static const int indexingSupportVersion = version + 1; final Database db; + final LocalSerializer serializer; /// Runs the migration methods defined in this class, starting at the given version. - Future runMigrations( - [int fromVersion = 0, int toVersion = version]) async { + Future runMigrations([int fromVersion = 0, int toVersion = version]) async { // New migrations should be added at the end of the series of `if` statements and should follow the pattern. Make // sure to increment `VERSION` and to read the comment below about requirements for new migrations. if (fromVersion < 1 && toVersion >= 1) { await _createV1MutationQueue(); - await _createV1QueryCache(); + await _createV1TargetCache(); await _createV1RemoteDocumentCache(); } @@ -40,8 +40,8 @@ class SQLiteSchema { if (fromVersion < 3 && toVersion >= 3) { // Brand new clients don't need to drop and recreate--only clients that have potentially corrupt data. if (fromVersion != 0) { - await _dropV1QueryCache(); - await _createV1QueryCache(); + await _dropV1TargetCache(); + await _createV1TargetCache(); } } @@ -66,6 +66,31 @@ class SQLiteSchema { await _createV8CollectionParentsIndex(); } + if (fromVersion < 9 && toVersion >= 9) { + if (!await _hasReadTime()) { + await _addReadTime(); + } else { + // Index-free queries rely on the fact that documents updated after a query's last limbo + // free snapshot version are persisted with their read-time. If a customer upgrades to + // schema version 9, downgrades and then upgrades again, some queries may have a last limbo + // free snapshot version despite the fact that not all updated document have an associated + // read time. + await _dropLastLimboFreeSnapshotVersion(); + } + } + + if (fromVersion == 9 && toVersion >= 10) { + // Firestore v21.10 contained a regression that led us to disable an assert that is required + // to ensure data integrity. While the schema did not change between version 9 and 10, we use + // the schema bump to version 10 to clear any affected data. + await _dropLastLimboFreeSnapshotVersion(); + } + + if (fromVersion < 11 && toVersion >= 11) { + // Schema version 11 changed the format of canonical IDs in the target cache. + await _rewriteCanonicalIds(); + } + // Adding a new migration? READ THIS FIRST! // // Be aware that the SDK version may be downgraded then re-upgraded. This means that running your new migration must @@ -77,8 +102,7 @@ class SQLiteSchema { // from later versions, so migrations that update values cannot assume that existing values have been properly // maintained. Calculate them again, if applicable. - if (fromVersion < indexingSupportVersion && - toVersion >= indexingSupportVersion) { + if (fromVersion < indexingSupportVersion && toVersion >= indexingSupportVersion) { if (Persistence.indexingSupportEnabled) { await _createLocalDocumentsCollectionIndex(); } @@ -89,8 +113,7 @@ class SQLiteSchema { /// exist. Use this method to create a set of tables at once. /// /// If some but not all of the tables exist, an exception will be thrown. - Future _ifTablesDontExist( - List tables, Future Function() fn) async { + Future _ifTablesDontExist(List tables, Future Function() fn) async { bool tablesFound = false; final String allTables = '[${tables.join(', ')}]'; for (int i = 0; i < tables.length; i++) { @@ -99,8 +122,7 @@ class SQLiteSchema { if (i == 0) { tablesFound = tableFound; } else if (tableFound != tablesFound) { - final StringBuffer msg = StringBuffer( - 'Expected all of $allTables to either exist or not, but '); + final StringBuffer msg = StringBuffer('Expected all of $allTables to either exist or not, but '); if (tablesFound) { msg.write('${tables[0]} exists and $table does not'); } else { @@ -112,18 +134,15 @@ class SQLiteSchema { if (!tablesFound) { return fn(); } else { - Log.d('SQLiteSchema', - 'Skipping migration because all of $allTables already exist'); + Log.d('SQLiteSchema', 'Skipping migration because all of $allTables already exist'); } } Future _createV1MutationQueue() async { - return _ifTablesDontExist( - ['mutation_queues', 'mutations', 'document_mutations'], - () async { + return _ifTablesDontExist(['mutation_queues', 'mutations', 'document_mutations'], () async { // A table naming all the mutation queues in the system. await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE mutation_queues ( uid TEXT PRIMARY KEY, @@ -136,7 +155,7 @@ class SQLiteSchema { // All the mutation batches in the system, partitioned by user. await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE mutations ( uid TEXT, @@ -151,7 +170,7 @@ class SQLiteSchema { // A manually maintained index of all the mutation batches that affect a given document key. The rows in this // table are references based on the contents of mutations.mutations. await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE document_mutations ( uid TEXT, @@ -167,16 +186,15 @@ class SQLiteSchema { /// Note: as of this migration, [last_acknowledged_batch_id] is no longer used by the code. Future _removeAcknowledgedMutations() async { - final List> data = await db - .query('SELECT uid, last_acknowledged_batch_id FROM mutation_queues'); + final List> data = + await db.query('SELECT uid, last_acknowledged_batch_id FROM mutation_queues'); for (Map row in data) { final String uid = row['uid']; final int lastAcknowledgedBatchId = row['last_acknowledged_batch_id']; final List> rows = await db.query( - 'SELECT batch_id FROM mutations WHERE uid = ? AND batch_id <= ?', - [uid, lastAcknowledgedBatchId]); + 'SELECT batch_id FROM mutations WHERE uid = ? AND batch_id <= ?', [uid, lastAcknowledgedBatchId]); for (Map row in rows) { await _removeMutationBatch(uid, row['batch_id']); @@ -185,23 +203,19 @@ class SQLiteSchema { } Future _removeMutationBatch(String uid, int batchId) async { - final int deleted = await db.delete( - 'DELETE FROM mutations WHERE uid = ? AND batch_id = ?', - [uid, batchId]); + final int deleted = + await db.delete('DELETE FROM mutations WHERE uid = ? AND batch_id = ?', [uid, batchId]); hardAssert(deleted != 0, 'Mutation batch ($uid, $batchId) did not exist'); // Delete all index entries for this batch - return db.delete( - 'DELETE FROM document_mutations WHERE uid = ? AND batch_id = ?', - [uid, batchId]); + return db.delete('DELETE FROM document_mutations WHERE uid = ? AND batch_id = ?', [uid, batchId]); } - Future _createV1QueryCache() async { - return _ifTablesDontExist( - ['targets', 'target_globals', 'target_documents'], () async { + Future _createV1TargetCache() async { + return _ifTablesDontExist(['targets', 'target_globals', 'target_documents'], () async { // A cache of targets and associated metadata await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE targets ( target_id INTEGER PRIMARY KEY, @@ -217,7 +231,7 @@ class SQLiteSchema { ); await db.query( - // @formatter:off + // @formatter:off ''' CREATE INDEX query_targets ON targets (canonical_id, target_id); @@ -227,7 +241,7 @@ class SQLiteSchema { // Global state tracked across all queries, tracked separately await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE target_globals ( highest_target_id INTEGER, @@ -241,7 +255,7 @@ class SQLiteSchema { // A Mapping table between targets, document paths await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE target_documents ( target_id INTEGER, @@ -254,7 +268,7 @@ class SQLiteSchema { // The document_targets reverse mapping table is just an index on target_documents. await db.query( - // @formatter:off + // @formatter:off ''' CREATE INDEX document_targets ON target_documents (path, target_id); @@ -264,7 +278,7 @@ class SQLiteSchema { }); } - Future _dropV1QueryCache() async { + Future _dropV1TargetCache() async { // This might be overkill, but if any future migration drops these, it's possible we could try dropping tables that // don't exist. if (await _tableExists('targets')) { @@ -282,7 +296,7 @@ class SQLiteSchema { return _ifTablesDontExist(['remote_documents'], () async { // A cache of documents obtained from the server. await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE remote_documents ( path TEXT PRIMARY KEY, @@ -302,7 +316,7 @@ class SQLiteSchema { return _ifTablesDontExist(['collection_index'], () async { // A per-user, per-collection index for cached documents indexed by a single field's name and value. await db.query( - // @formatter:off + // @formatter:off ''' CREATE TABLE collection_index ( uid TEXT, @@ -325,7 +339,7 @@ class SQLiteSchema { if (!targetGlobalExists) { await db.execute( - // @formatter:off + // @formatter:off ''' INSERT INTO target_globals (highest_target_id, highest_listen_sequence_number, last_remote_snapshot_version_seconds, @@ -339,13 +353,12 @@ class SQLiteSchema { Future _addTargetCount() async { if (!(await _tableContainsColumn('target_globals', 'target_count'))) { - await db.execute( - 'ALTER TABLE target_globals ADD COLUMN target_count INTEGER'); + await db.execute('ALTER TABLE target_globals ADD COLUMN target_count INTEGER'); } // Even if the column already existed, rerun the data migration to make sure it's correct. final int count = await _rowNumber('targets'); return db.execute( - // @formatter:off + // @formatter:off ''' UPDATE target_globals SET target_count=? @@ -356,8 +369,41 @@ class SQLiteSchema { Future _addSequenceNumber() async { if (!(await _tableContainsColumn('target_documents', 'sequence_number'))) { - return db.execute( - 'ALTER TABLE target_documents ADD COLUMN sequence_number INTEGER'); + return db.execute('ALTER TABLE target_documents ADD COLUMN sequence_number INTEGER'); + } + } + + Future _hasReadTime() async { + final bool hasReadTimeSeconds = await _tableContainsColumn('remote_documents', 'read_time_seconds'); + final bool hasReadTimeNanos = await _tableContainsColumn('remote_documents', 'read_time_nanos'); + + hardAssert( + hasReadTimeSeconds == hasReadTimeNanos, + 'Table contained just one of read_time_seconds or read_time_nanos', + ); + + return hasReadTimeSeconds && hasReadTimeNanos; + } + + Future _addReadTime() async { + await db.execute('ALTER TABLE remote_documents ADD COLUMN read_time_seconds INTEGER'); + await db.execute('ALTER TABLE remote_documents ADD COLUMN read_time_nanos INTEGER'); + } + + Future _dropLastLimboFreeSnapshotVersion() async { + final List> rows = await db.query('SELECT target_id, target_proto FROM targets'); + + for (Map row in rows) { + final int targetId = row['target_id']; + final Uint8List targetProtoBytes = Uint8List.fromList(row['target_proto']); + + final proto.Target targetProto = proto.Target.fromBuffer(targetProtoBytes) // + ..clearLastLimboFreeSnapshotVersion(); + + await db.execute( + 'UPDATE targets SET target_proto = ? WHERE target_id = ?', + [targetProto.writeToBuffer(), targetId], + ); } } @@ -367,7 +413,7 @@ class SQLiteSchema { Future _ensureSequenceNumbers() async { // Get the current highest sequence number final List> sequenceNumberQuery = await db.query( - // @formatter:off + // @formatter:off ''' SELECT highest_listen_sequence_number FROM target_globals @@ -375,12 +421,11 @@ class SQLiteSchema { ''' // @formatter:on ); - final int sequenceNumber = - sequenceNumberQuery.first['highest_listen_sequence_number']; + final int sequenceNumber = sequenceNumberQuery.first['highest_listen_sequence_number']; assert(sequenceNumber != null, 'Missing highest sequence number'); final List> untaggedDocumentsQuery = await db.query( - // @formatter:off + // @formatter:off ''' SELECT RD.path FROM remote_documents AS RD @@ -419,7 +464,7 @@ class SQLiteSchema { // parent path will be an empty path in the case of root-level // collections. await db.execute( - // @formatter:off + // @formatter:off ''' CREATE TABLE collection_parents( collection_id TEXT, @@ -455,7 +500,7 @@ class SQLiteSchema { // Index existing remote documents. final List> remoteDocumentsQuery = await db.query( - // @formatter:off + // @formatter:off ''' SELECT path FROM remote_documents; @@ -470,7 +515,7 @@ class SQLiteSchema { // Index existing mutations. final List> documentMutationsQuery = await db.query( - // @formatter:off + // @formatter:off ''' SELECT path FROM document_mutations; @@ -484,6 +529,20 @@ class SQLiteSchema { } } + Future _rewriteCanonicalIds() async { + final List> rows = await db.query('SELECT target_id, target_proto FROM targets'); + for (final Map row in rows) { + final int targetId = row['target_id']; + final Uint8List targetProtoBytes = Uint8List.fromList(row['target_proto']); + + final proto.Target targetProto = proto.Target.fromBuffer(targetProtoBytes); + final TargetData targetData = serializer.decodeTargetData(targetProto); + final String updatedCanonicalId = targetData.target.canonicalId; + await db + .execute('UPDATE targets SET canonical_id = ? WHERE target_id = ?', [updatedCanonicalId, targetId]); + } + } + Future _tableContainsColumn(String table, String column) async { final List columns = await getTableColumns(table); return columns.contains(column); @@ -491,19 +550,17 @@ class SQLiteSchema { @visibleForTesting Future> getTableColumns(String table) async { - final List> data = - await db.query('PRAGMA table_info($table);'); + final List> data = await db.query('PRAGMA table_info($table);'); return data.map((Map row) => row['name']).toList(); } Future _tableExists(String table) async { - final List> data = await db.query( - 'SELECT 1=1 FROM sqlite_master WHERE tbl_name = ?', [table]); + final List> data = + await db.query('SELECT 1=1 FROM sqlite_master WHERE tbl_name = ?', [table]); return data.isNotEmpty; } Future _rowNumber(String tableName) async { - return (await db.query('SELECT Count(*) as count FROM $tableName;')) - .first['count']; + return (await db.query('SELECT Count(*) as count FROM $tableName;')).first['count']; } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_query_cache.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_target_cache.dart similarity index 78% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_query_cache.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_target_cache.dart index faec6695..7978c39c 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_query_cache.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/local/sqlite/sqlite_target_cache.dart @@ -5,8 +5,8 @@ part of sqlite_persistence; /// Cached Queries backed by SQLite. -class SQLiteQueryCache implements QueryCache { - SQLiteQueryCache(this._db, this._localSerializer); +class SQLiteTargetCache implements TargetCache { + SQLiteTargetCache(this._db, this._localSerializer); final SQLitePersistence _db; final LocalSerializer _localSerializer; @@ -25,7 +25,7 @@ class SQLiteQueryCache implements QueryCache { Future start() async { // Store exactly one row in the table. If the row exists at all, it's the global metadata. final List> result = await _db.query( - // @formatter:off + // @formatter:off ''' SELECT highest_target_id, highest_listen_sequence_number, @@ -54,9 +54,9 @@ class SQLiteQueryCache implements QueryCache { int get highestListenSequenceNumber => _lastListenSequenceNumber; @override - Future forEachTarget(Consumer consumer) async { + Future forEachTarget(Consumer consumer) async { final List> result = await _db.query( - // @formatter:off + // @formatter:off ''' SELECT target_proto FROM targets; @@ -66,28 +66,26 @@ class SQLiteQueryCache implements QueryCache { for (Map row in result) { final Uint8List targetProto = row['target_proto']; - final QueryData queryData = _decodeQueryData(targetProto); - consumer(queryData); + final TargetData targetData = _decodeTargetData(targetProto); + consumer(targetData); } } @override - Future setLastRemoteSnapshotVersion( - SnapshotVersion snapshotVersion) async { + Future setLastRemoteSnapshotVersion(SnapshotVersion snapshotVersion) async { lastRemoteSnapshotVersion = snapshotVersion; await _writeMetadata(); } - Future _saveQueryData(QueryData queryData) async { - final int targetId = queryData.targetId; - final String canonicalId = queryData.query.canonicalId; - final Timestamp version = queryData.snapshotVersion.timestamp; + Future _saveTargetData(TargetData targetData) async { + final int targetId = targetData.targetId; + final String canonicalId = targetData.target.canonicalId; + final Timestamp version = targetData.snapshotVersion.timestamp; - final proto.Target targetProto = - _localSerializer.encodeQueryData(queryData); + final proto.Target targetProto = _localSerializer.encodeTargetData(targetData); await _db.execute( - // @formatter:off + // @formatter:off ''' INSERT OR REPLACE INTO targets (target_id, @@ -105,22 +103,22 @@ class SQLiteQueryCache implements QueryCache { canonicalId, version.seconds, version.nanoseconds, - queryData.resumeToken, - queryData.sequenceNumber, + targetData.resumeToken, + targetData.sequenceNumber, targetProto.writeToBuffer() ]); } - bool _updateMetadata(QueryData queryData) { + bool _updateMetadata(TargetData targetData) { bool wasUpdated = false; - if (queryData.targetId > highestTargetId) { - highestTargetId = queryData.targetId; + if (targetData.targetId > highestTargetId) { + highestTargetId = targetData.targetId; wasUpdated = true; } - if (queryData.sequenceNumber > _lastListenSequenceNumber) { - _lastListenSequenceNumber = queryData.sequenceNumber; + if (targetData.sequenceNumber > _lastListenSequenceNumber) { + _lastListenSequenceNumber = targetData.sequenceNumber; wasUpdated = true; } @@ -128,26 +126,26 @@ class SQLiteQueryCache implements QueryCache { } @override - Future addQueryData(QueryData queryData) async { - await _saveQueryData(queryData); + Future addTargetData(TargetData targetData) async { + await _saveTargetData(targetData); // PORTING NOTE: The query_targets index is maintained by SQLite. - _updateMetadata(queryData); + _updateMetadata(targetData); targetCount++; await _writeMetadata(); } @override - Future updateQueryData(QueryData queryData) async { - await _saveQueryData(queryData); + Future updateTargetData(TargetData targetData) async { + await _saveTargetData(targetData); - if (_updateMetadata(queryData)) { + if (_updateMetadata(targetData)) { await _writeMetadata(); } } Future _writeMetadata() async { await _db.execute( - // @formatter:off + // @formatter:off ''' UPDATE target_globals SET highest_target_id = ?, @@ -170,7 +168,7 @@ class SQLiteQueryCache implements QueryCache { Future _removeTarget(int targetId) async { await _removeMatchingKeysForTargetId(targetId); await _db.execute( - // @formatter:off + // @formatter:off ''' DELETE FROM targets @@ -182,8 +180,8 @@ class SQLiteQueryCache implements QueryCache { } @override - Future removeQueryData(QueryData queryData) async { - final int targetId = queryData.targetId; + Future removeTargetData(TargetData targetData) async { + final int targetId = targetData.targetId; await _removeTarget(targetId); await _writeMetadata(); } @@ -198,7 +196,7 @@ class SQLiteQueryCache implements QueryCache { // result set. final List> result = await _db.query( - // @formatter:off + // @formatter:off ''' SELECT target_id FROM targets @@ -220,13 +218,13 @@ class SQLiteQueryCache implements QueryCache { } @override - Future getQueryData(Query query) async { + Future getTargetData(Target target) async { // Querying the targets table by canonical_id may yield more than one result because canonical_id values are not // required to be unique per target. This query depends on the query_targets index to be efficient. - final String canonicalId = query.canonicalId; + final String canonicalId = target.canonicalId; final List> result = await _db.query( - // @formatter:off + // @formatter:off ''' SELECT target_proto FROM targets @@ -235,13 +233,13 @@ class SQLiteQueryCache implements QueryCache { // @formatter:on [canonicalId]); - QueryData data; + TargetData data; for (Map row in result) { // TODO(long1eu): break out early if found. - final QueryData found = _decodeQueryData(row['target_proto']); + final TargetData found = _decodeTargetData(row['target_proto']); // After finding a potential match, check that the query is actually equal to the requested query. - if (query == found.query) { + if (target == found.target) { data = found; } } @@ -249,15 +247,14 @@ class SQLiteQueryCache implements QueryCache { return data; } - QueryData _decodeQueryData(List bytes) { - return _localSerializer.decodeQueryData(proto.Target.fromBuffer(bytes)); + TargetData _decodeTargetData(List bytes) { + return _localSerializer.decodeTargetData(proto.Target.fromBuffer(bytes)); } // Matching key tracking @override - Future addMatchingKeys( - ImmutableSortedSet keys, int targetId) async { + Future addMatchingKeys(ImmutableSortedSet keys, int targetId) async { // PORTING NOTE: The reverse index (document_targets) is maintained by SQLite. // // When updates come in we treat those as added keys, which means these inserts won't necessarily be unique between @@ -265,7 +262,7 @@ class SQLiteQueryCache implements QueryCache { // to add duplicate entries. This works because there's no additional information in the row. If we want to track // additional data this will probably need to become INSERT OR REPLACE instead. const String statement = - // @formatter:off + // @formatter:off ''' INSERT OR IGNORE INTO target_documents (target_id, path) @@ -283,11 +280,10 @@ class SQLiteQueryCache implements QueryCache { } @override - Future removeMatchingKeys( - ImmutableSortedSet keys, int targetId) async { + Future removeMatchingKeys(ImmutableSortedSet keys, int targetId) async { // PORTING NOTE: The reverse index (document_targets) is maintained by SQLite. const String statement = - // @formatter:off + // @formatter:off ''' DELETE FROM target_documents @@ -306,7 +302,7 @@ class SQLiteQueryCache implements QueryCache { Future _removeMatchingKeysForTargetId(int targetId) async { await _db.execute( - // @formatter:off + // @formatter:off ''' DELETE FROM target_documents @@ -317,12 +313,11 @@ class SQLiteQueryCache implements QueryCache { } @override - Future> getMatchingKeysForTargetId( - int targetId) async { + Future> getMatchingKeysForTargetId(int targetId) async { ImmutableSortedSet keys = DocumentKey.emptyKeySet; final List> result = await _db.query( - // @formatter:off + // @formatter:off ''' SELECT path FROM target_documents @@ -333,8 +328,7 @@ class SQLiteQueryCache implements QueryCache { for (Map row in result) { final String path = row['path']; - final DocumentKey key = - DocumentKey.fromPath(EncodedPath.decodeResourcePath(path)); + final DocumentKey key = DocumentKey.fromPath(EncodedPath.decodeResourcePath(path)); keys = keys.insert(key); } @@ -346,7 +340,7 @@ class SQLiteQueryCache implements QueryCache { final String path = EncodedPath.encode(key.path); final List> result = await _db.query( - // @formatter:off + // @formatter:off ''' SELECT target_id FROM target_documents diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/database_id.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/database_id.dart index 963328f3..a9c519bf 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/database_id.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/database_id.dart @@ -2,6 +2,9 @@ // Lung Razvan // on 17/09/2018 +import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; + class DatabaseId implements Comparable { const DatabaseId._(this.projectId, this.databaseId); @@ -13,6 +16,16 @@ class DatabaseId implements Comparable { return DatabaseId._(projectId, databaseId); } + /// Returns a DatabaseId from a fully qualified resource name. + factory DatabaseId.fromName(String name) { + final ResourcePath resourceName = ResourcePath.fromString(name); + hardAssert( + resourceName.length >= 3 && resourceName[0] == 'projects' && resourceName[2] == 'databases', + 'Tried to parse an invalid resource name: $resourceName', + ); + return DatabaseId._(resourceName[1], resourceName[3]); + } + static const String defaultDatabaseId = '(default)'; final String projectId; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document.dart index 9d10547f..193a664a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document.dart @@ -6,13 +6,9 @@ import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/document.pb.dart'; -import 'package:cloud_firestore_vm/src/proto/index.dart' as pb; - -typedef Converter = FieldValue Function(Value value); /// Describes the hasPendingWrites state of a document. enum DocumentState { @@ -30,87 +26,16 @@ class Document extends MaybeDocument implements Comparable { Document( DocumentKey key, SnapshotVersion version, - this.documentState, this._objectValue, - ) : proto = null, - converter = null, - super(key, version); - - Document.fromProto( - DocumentKey key, - SnapshotVersion version, this.documentState, - this.proto, - this.converter, - ) : _objectValue = null, - super(key, version); + ) : super(key, version); final DocumentState documentState; - final pb.Document proto; - final Converter converter; - - /// A cache for FieldValues that have already been deserialized in [getField] - Map _fieldValueCache; - ObjectValue _objectValue; - - ObjectValue get data { - if (_objectValue == null) { - hardAssert(proto != null && converter != null, - 'Expected proto and converter to be non-null'); - - ObjectValue result = ObjectValue.empty; - for (MapEntry entry in proto.fields.entries) { - final FieldPath path = FieldPath.fromSingleSegment(entry.key); - final FieldValue value = converter(entry.value); - result = result.set(path, value); - } - _objectValue = result; - - // Once objectValue is computed, values inside the fieldValueCache are no - // longer accessed. - _fieldValueCache = null; - } - - return _objectValue; - } + final ObjectValue _objectValue; - FieldValue getField(FieldPath path) { - if (_objectValue != null) { - return _objectValue.get(path); - } else { - hardAssert(proto != null && converter != null, - 'Expected proto and converter to be non-null'); - - // TODO(b-136090445): Remove the cache when `getField` is no longer called - // during Query ordering. - _fieldValueCache ??= {}; - FieldValue fieldValue = _fieldValueCache[path]; - if (fieldValue == null) { - // Instead of deserializing the full Document proto, we only deserialize - // the value at the requested field path. This speeds up Query execution - // as query filters can discard documents based on a single field. - Value protoValue = proto.fields[path.getFirstSegment()]; - for (int i = 1; protoValue != null && i < path.length; ++i) { - if (protoValue.whichValueType() != Value_ValueType.mapValue) { - return null; - } - protoValue = protoValue.mapValue.fields[path.getSegment(i)]; - } - - if (protoValue != null) { - fieldValue = converter(protoValue); - _fieldValueCache[path] = fieldValue; - } - } - - return fieldValue; - } - } + ObjectValue get data => _objectValue; - Object getFieldValue(FieldPath path) { - final FieldValue value = getField(path); - return value?.value; - } + Value getField(FieldPath path) => _objectValue[path]; bool get hasLocalMutations { return documentState == DocumentState.localMutations; @@ -123,8 +48,7 @@ class Document extends MaybeDocument implements Comparable { @override bool get hasPendingWrites => hasLocalMutations || hasCommittedMutations; - static int keyComparator(Document left, Document right) => - left.key.compareTo(right.key); + static int keyComparator(Document left, Document right) => left.key.compareTo(right.key); @override int compareTo(Document other) => key.compareTo(other.key); @@ -137,13 +61,14 @@ class Document extends MaybeDocument implements Comparable { version == other.version && key == other.key && documentState == other.documentState && - data == other.data; + _objectValue == other._objectValue; @override int get hashCode { - // Note: We deliberately decided to omit `getData()` since its computation - // is expensive. - return key.hashCode ^ version.hashCode ^ documentState.hashCode; + return key.hashCode ^ // + version.hashCode ^ + documentState.hashCode ^ + _objectValue.hashCode; } @override diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_collections.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_collections.dart index 556d9d36..fab1c1fa 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_collections.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_collections.dart @@ -12,20 +12,16 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version class DocumentCollections { /// Returns an empty, immutable document map static ImmutableSortedMap emptyDocumentMap() { - return ImmutableSortedMap.emptyMap( - DocumentKey.comparator); + return ImmutableSortedMap.emptyMap(DocumentKey.comparator); } /// Returns an empty, immutable 'maybe' document map - static ImmutableSortedMap - emptyMaybeDocumentMap() { - return ImmutableSortedMap.emptyMap( - DocumentKey.comparator); + static ImmutableSortedMap emptyMaybeDocumentMap() { + return ImmutableSortedMap.emptyMap(DocumentKey.comparator); } /// Returns an empty, immutable versions map static ImmutableSortedMap emptyVersionMap() { - return ImmutableSortedMap.emptyMap( - DocumentKey.comparator); + return ImmutableSortedMap.emptyMap(DocumentKey.comparator); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_key.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_key.dart index f40f1f04..7d65bf3d 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_key.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/document_key.dart @@ -15,6 +15,19 @@ class DocumentKey implements Comparable { /// Returns a document key for the empty path. factory DocumentKey.empty() => DocumentKey._(ResourcePath.empty); + /// Returns a DocumentKey from a fully qualified resource name. + factory DocumentKey.fromName(String name) { + final ResourcePath resourceName = ResourcePath.fromString(name); + hardAssert( + resourceName.length >= 4 && + resourceName[0] == 'projects' && + resourceName[2] == 'databases' && + resourceName[4] == 'documents', + 'Tried to parse an invalid key: $resourceName', + ); + return DocumentKey.fromPath(resourceName.popFirst(5)); + } + /// Creates and returns a new document key with the given path. factory DocumentKey.fromPath(ResourcePath path) => DocumentKey._(path); @@ -38,11 +51,9 @@ class DocumentKey implements Comparable { return path.length >= 2 && path.segments[path.length - 2] == collectionId; } - static int comparator(DocumentKey a, DocumentKey b) => - a.path.compareTo(b.path); + static int comparator(DocumentKey a, DocumentKey b) => a.path.compareTo(b.path); - static final ImmutableSortedSet emptyKeySet = - ImmutableSortedSet(); + static final ImmutableSortedSet emptyKeySet = ImmutableSortedSet(); /// Returns true iff the given path is a path to a document. static bool isDocumentKey(ResourcePath path) => path.length.remainder(2) == 0; @@ -58,10 +69,7 @@ class DocumentKey implements Comparable { @override bool operator ==(Object other) => - identical(this, other) || - other is DocumentKey && - runtimeType == other.runtimeType && - path == other.path; + identical(this, other) || other is DocumentKey && runtimeType == other.runtimeType && path == other.path; @override int get hashCode => path.hashCode; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/array_transform_operation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/array_transform_operation.dart index 874527d2..d70dfe98 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/array_transform_operation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/array_transform_operation.dart @@ -4,24 +4,23 @@ import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_operation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; import 'package:collection/collection.dart'; abstract class ArrayTransformOperation implements TransformOperation { const ArrayTransformOperation(this.elements); - final List elements; + final List elements; @override - FieldValue applyToLocalView( - FieldValue previousValue, Timestamp localWriteTime) { + Value applyToLocalView(Value previousValue, Timestamp localWriteTime) { return apply(previousValue); } @override - FieldValue applyToRemoteDocument( - FieldValue previousValue, FieldValue transformResult) { + Value applyToRemoteDocument(Value previousValue, Value transformResult) { // The server just sends null as the transform result for array operations, // so we have to calculate a result the same as we do for local // applications. @@ -29,22 +28,22 @@ abstract class ArrayTransformOperation implements TransformOperation { } @override - FieldValue computeBaseValue(FieldValue currentValue) { + Value computeBaseValue(Value currentValue) { // Array transforms are idempotent and don't require a base value. return null; } /// Applies this ArrayTransformOperation against the specified previousValue. - ArrayValue apply(FieldValue previousValue); + Value apply(Value previousValue); - /// Inspects the provided value, returning an [List] copy of the internal array if it's an - /// ArrayValue and an empty [List] if it's null or any other type of FSTFieldValue. - static List coercedFieldValuesArray(FieldValue value) { - if (value is ArrayValue) { - return value.internalValue.toList(); + /// Inspects the provided value, returning an [ArrayValue] containing the existing array + /// elements or an empty builder if `value` is not an array. + static ArrayValue coercedFieldValuesArray(Value value) { + if (isArray(value)) { + return value.arrayValue.toBuilder(); } else { // coerce to empty array. - return []; + return ArrayValue(); } } @@ -66,30 +65,37 @@ abstract class ArrayTransformOperation implements TransformOperation { /// An array union transform operation. class ArrayTransformOperationUnion extends ArrayTransformOperation { - ArrayTransformOperationUnion(List elements) : super(elements); + ArrayTransformOperationUnion(List elements) : super(elements); @override - ArrayValue apply(FieldValue previousValue) { - final List result = - ArrayTransformOperation.coercedFieldValuesArray(previousValue); - for (FieldValue element in elements) { - if (!result.contains(element)) { - result.add(element); + Value apply(Value previousValue) { + final ArrayValue result = ArrayTransformOperation.coercedFieldValuesArray(previousValue); + for (Value element in elements) { + if (!contains(result, element)) { + result.values.add(element); } } - return ArrayValue.fromList(result); + + return Value(arrayValue: result); } } /// An array remove transform operation. class ArrayTransformOperationRemove extends ArrayTransformOperation { - ArrayTransformOperationRemove(List elements) : super(elements); + ArrayTransformOperationRemove(List elements) : super(elements); @override - ArrayValue apply(FieldValue previousValue) { - final List result = - ArrayTransformOperation.coercedFieldValuesArray(previousValue) - ..removeWhere(elements.contains); - return ArrayValue.fromList(result); + Value apply(Value previousValue) { + final ArrayValue result = ArrayTransformOperation.coercedFieldValuesArray(previousValue); + for (Value removeElement in elements) { + for (int i = 0; i < result.values.length;) { + if (equals(result.values[i], removeElement)) { + result.values.remove(removeElement); + } else { + ++i; + } + } + } + return Value(arrayValue: result); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/delete_mutation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/delete_mutation.dart index 76c4ebcf..b8d199a5 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/delete_mutation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/delete_mutation.dart @@ -10,22 +10,18 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutatio import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; /// Represents a Delete operation class DeleteMutation extends Mutation { - const DeleteMutation(DocumentKey key, Precondition precondition) - : super(key, precondition); + const DeleteMutation(DocumentKey key, Precondition precondition) : super(key, precondition); @override - MaybeDocument applyToRemoteDocument( - MaybeDocument maybeDoc, MutationResult mutationResult) { + MaybeDocument applyToRemoteDocument(MaybeDocument maybeDoc, MutationResult mutationResult) { verifyKeyMatches(maybeDoc); - hardAssert(mutationResult.transformResults == null, - 'Transform results received by DeleteMutation.'); + hardAssert(mutationResult.transformResults == null, 'Transform results received by DeleteMutation.'); // Unlike applyToLocalView, if we're applying a mutation to a remote document the server has // accepted the mutation so the precondition must have held. @@ -37,8 +33,7 @@ class DeleteMutation extends Mutation { } @override - MaybeDocument applyToLocalView( - MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { + MaybeDocument applyToLocalView(MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { verifyKeyMatches(maybeDoc); if (!precondition.isValidFor(maybeDoc)) { @@ -52,13 +47,10 @@ class DeleteMutation extends Mutation { ); } - @override - ObjectValue extractBaseValue(MaybeDocument maybeDoc) => null; - @override bool operator ==(Object other) => identical(this, other) || - other is DeleteMutation && + other is DeleteMutation && // runtimeType == other.runtimeType && hasSameKeyAndPrecondition(other); @@ -67,7 +59,7 @@ class DeleteMutation extends Mutation { @override String toString() { - return (ToStringHelper(runtimeType) + return (ToStringHelper(runtimeType) // ..add('key', key) ..add('precondition', precondition)) .toString(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_mask.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_mask.dart index 74d43f95..834ff870 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_mask.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_mask.dart @@ -4,7 +4,7 @@ import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:collection/collection.dart'; /// Provides a set of fields that can be used to partially patch a document. The [FieldMask] is used in conjunction diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_transform.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_transform.dart index c0ce892c..0395850e 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_transform.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/field_transform.dart @@ -26,7 +26,7 @@ class FieldTransform { @override String toString() { - return (ToStringHelper(runtimeType) + return (ToStringHelper(runtimeType) // ..add('fieldPath', fieldPath) ..add('operation', operation)) .toString(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation.dart index a755c45f..b8e5acbd 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation.dart @@ -4,23 +4,26 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/field_transform.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_operation.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' show Value; /// Represents a [Mutation] of a document. Different subclasses of Mutation will /// perform different kinds of changes to a base document. For example, a /// [SetMutation] replaces the value of a document and a [DeleteMutation] /// deletes a document. /// -/// In addition to the value of the document mutations also operate on the -/// version. For local mutations (mutations that haven't been committed yet), we -/// preserve the existing version for Set, Patch, and Transform mutations. For -/// local deletes, we reset the version to 0. +/// In addition to the value of the document mutations also operate on the version. For local +/// mutations (mutations that haven't been committed yet), we preserve the existing version for Set +/// and Patch mutations. For local deletes, we reset the version to 0. /// /// Here's the expected transition table. /// @@ -33,31 +36,27 @@ import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; /// |PatchMutation |Document(v3) |Document(v3)| /// |PatchMutation |NoDocument(v3) |NoDocument(v3)| /// |PatchMutation |null |null| -/// |TransformMutation |Document(v3) |Document(v3)| -/// |TransformMutation |NoDocument(v3) |NoDocument(v3)| -/// |TransformMutation |null |null| /// |DeleteMutation |Document(v3) |NoDocument(v0)| /// /// For acknowledged mutations, we use the [updateTime] of the [WriteResponse] as the resulting -/// version for Set, Patch, and Transform mutations. As deletes have no explicit update time, we use +/// version for Set and Patch mutations. As deletes have no explicit update time, we use /// the [commitTime] of the [WriteResponse] for acknowledged deletes. /// /// If a mutation is acknowledged by the backend but fails the precondition check locally, we return /// an [UnknownDocument] and rely on Watch to send us the updated version. /// -/// Note that [TransformMutations] don't create [Documents] (in the case of being applied to a -/// [NoDocument]), even though they would on the backend. This is because the client always combines -/// the [TransformMutation] with a [SetMutation] or [PatchMutation] and we only want to apply the -/// transform if the prior mutation resulted in a [Document] (always true for a [SetMutation], but -/// not necessarily for an [PatchMutation]). +/// Field transforms are used only with Patch and Set Mutations. We use the [updateTransforms] +/// field to store transforms, rather than the [transforms] message. abstract class Mutation { - const Mutation(this.key, this.precondition); + const Mutation(this.key, this.precondition, [this.fieldTransforms = const []]); final DocumentKey key; /// The precondition for the mutation. final Precondition precondition; + final List fieldTransforms; + /// Applies this mutation to the given [MaybeDocument] for the purposes of computing a new remote /// document. If the input document doesn't match the expected state (e.g. it is null or /// outdated), an [UnknownDocument] can be returned. @@ -69,8 +68,7 @@ abstract class Mutation { /// /// Returns the mutated document. The returned document may be an [UnknownDocument], if the /// mutation could not be applied to the locally cached base document. - MaybeDocument applyToRemoteDocument( - MaybeDocument maybeDoc, MutationResult mutationResult); + MaybeDocument applyToRemoteDocument(MaybeDocument maybeDoc, MutationResult mutationResult); /// Applies this mutation to [maybeDoc] for the purposes of computing the new local view of a /// document. Both the input and returned documents can be null. @@ -86,24 +84,7 @@ abstract class Mutation { /// /// Returns the mutated document. The returned document may be null, but only if [maybeDoc] was /// null and the mutation would not create a new document. - MaybeDocument applyToLocalView( - MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime); - - /// If applicable, returns the base value to persist with this mutation. If a - /// base value is provided, the mutation is always applied to this base value, - /// even if document has already been updated. - /// - /// The base value is a sparse object that consists of only the document - /// fields for which this mutation contains a non-idempotent transformation - /// (e.g. a numeric increment). The provided value guarantees consistent - /// behavior for non-idempotent transforms and allow us to return the same - /// latency-compensated value even if the backend has already applied the - /// mutation. The base value is null for idempotent mutations, as they can be - /// re-played even if the backend has already applied them. - /// - /// Returns a base value to store along with the mutation, or null for - /// idempotent mutations. - ObjectValue extractBaseValue(MaybeDocument maybeDoc); + MaybeDocument applyToLocalView(MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime); /// Helper for derived classes to implement .equals. bool hasSameKeyAndPrecondition(Mutation other) { @@ -117,8 +98,7 @@ abstract class Mutation { void verifyKeyMatches(MaybeDocument maybeDoc) { if (maybeDoc != null) { - hardAssert(maybeDoc.key == key, - 'Can only apply a mutation to a document with the same key'); + hardAssert(maybeDoc.key == key, 'Can only apply a mutation to a document with the same key'); } } @@ -132,4 +112,82 @@ abstract class Mutation { return SnapshotVersion.none; } } + + /// Creates a list of "transform results" (a transform result is a field value representing the + /// result of applying a transform) for use after a mutation containing transforms has been + /// acknowledged by the server. + /// + /// The [baseDoc] is the document prior to applying this mutation batch. + List serverTransformResults(MaybeDocument baseDoc, List serverTransformResults) { + final List transformResults = []; + hardAssert( + fieldTransforms.length == serverTransformResults.length, + 'server transform count (${serverTransformResults.length}) should match field transform count (${fieldTransforms.length})', + ); + + for (int i = 0; i < serverTransformResults.length; i++) { + final FieldTransform fieldTransform = fieldTransforms[i]; + final TransformOperation transform = fieldTransform.operation; + + Value previousValue; + if (baseDoc is Document) { + previousValue = baseDoc.getField(fieldTransform.fieldPath); + } + + transformResults.add(transform.applyToRemoteDocument(previousValue, serverTransformResults[i])); + } + return transformResults; + } + + /// Creates a list of "transform results" (a transform result is a field value representing the + /// result of applying a transform) for use when applying a transform locally. + /// + /// The [localWriteTime] is the local time of the mutation (used to generate ServerTimestampValues), + /// [maybeDoc] is the current state of the document after applying all previous mutations and [baseDoc] + /// is the document prior to applying this mutation batch. + List localTransformResults(Timestamp localWriteTime, MaybeDocument maybeDoc, MaybeDocument baseDoc) { + final List transformResults = []; + for (FieldTransform fieldTransform in fieldTransforms) { + final TransformOperation transform = fieldTransform.operation; + + Value previousValue; + if (maybeDoc is Document) { + previousValue = maybeDoc.getField(fieldTransform.fieldPath); + } + + transformResults.add(transform.applyToLocalView(previousValue, localWriteTime)); + } + return transformResults; + } + + ObjectValue transformObject(ObjectValue objectValue, List transformResults) { + hardAssert(transformResults.length == fieldTransforms.length, 'Transform results length mismatch.'); + + final ObjectValueBuilder builder = objectValue.toBuilder(); + for (int i = 0; i < fieldTransforms.length; i++) { + final FieldTransform fieldTransform = fieldTransforms[i]; + final FieldPath fieldPath = fieldTransform.fieldPath; + builder[fieldPath] = transformResults[i]; + } + return builder.build(); + } + + ObjectValue extractTransformBaseValue(MaybeDocument maybeDoc) { + ObjectValueBuilder baseObject; + + for (FieldTransform transform in fieldTransforms) { + Value existingValue; + if (maybeDoc is Document) { + existingValue = maybeDoc.getField(transform.fieldPath); + } + + final Value coercedValue = transform.operation.computeBaseValue(existingValue); + if (coercedValue != null) { + baseObject ??= ObjectValue.newBuilder(); + baseObject[transform.fieldPath] = coercedValue; + } + } + + return baseObject != null ? baseObject.build() : null; + } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_batch.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_batch.dart index 4b979580..75416b8a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_batch.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_batch.dart @@ -56,11 +56,11 @@ class MutationBatch { /// [documentKey] is the key of the document to apply mutations to, [maybeDoc] is the document to /// apply mutations to and [batchResult] is the result of applying the [MutationBatch] to the /// backend. - MaybeDocument applyToRemoteDocument(DocumentKey documentKey, - MaybeDocument maybeDoc, MutationBatchResult batchResult) { + MaybeDocument applyToRemoteDocument( + DocumentKey documentKey, MaybeDocument maybeDoc, MutationBatchResult batchResult) { if (maybeDoc != null) { hardAssert(maybeDoc.key == documentKey, - 'applyToRemoteDocument: key $documentKey doesn\'t match maybeDoc key ${maybeDoc.key}'); + "applyToRemoteDocument: key $documentKey doesn't match maybeDoc key ${maybeDoc.key}"); } final int size = mutations.length; @@ -79,19 +79,17 @@ class MutationBatch { } /// Computes the local view of a document given all the mutations in this batch. - MaybeDocument applyToLocalView( - DocumentKey documentKey, MaybeDocument maybeDoc) { + MaybeDocument applyToLocalView(DocumentKey documentKey, MaybeDocument maybeDoc) { if (maybeDoc != null) { hardAssert(maybeDoc.key == documentKey, - 'applyToRemoteDocument: key $documentKey doesn\'t match maybeDoc key ${maybeDoc.key}'); + "applyToRemoteDocument: key $documentKey doesn't match maybeDoc key ${maybeDoc.key}"); } // First, apply the base state. This allows us to apply non-idempotent transform against a // consistent set of values. for (int i = 0; i < baseMutations.length; i++) { final Mutation mutation = baseMutations[i]; if (mutation.key == documentKey) { - maybeDoc = - mutation.applyToLocalView(maybeDoc, maybeDoc, localWriteTime); + maybeDoc = mutation.applyToLocalView(maybeDoc, maybeDoc, localWriteTime); } } @@ -115,14 +113,11 @@ class MutationBatch { // mutations first (as done in [applyToLocalView]), we can reduce the // complexity to O(n). - ImmutableSortedMap mutatedDocuments = - maybeDocumentMap; + ImmutableSortedMap mutatedDocuments = maybeDocumentMap; for (DocumentKey key in keys) { - final MaybeDocument mutatedDocument = - applyToLocalView(key, mutatedDocuments[key]); + final MaybeDocument mutatedDocument = applyToLocalView(key, mutatedDocuments[key]); if (mutatedDocument != null) { - mutatedDocuments = - mutatedDocuments.insert(mutatedDocument.key, mutatedDocument); + mutatedDocuments = mutatedDocuments.insert(mutatedDocument.key, mutatedDocument); } } return mutatedDocuments; @@ -144,8 +139,7 @@ class MutationBatch { runtimeType == other.runtimeType && batchId == other.batchId && localWriteTime == other.localWriteTime && - const DeepCollectionEquality() - .equals(baseMutations, other.baseMutations) && + const DeepCollectionEquality().equals(baseMutations, other.baseMutations) && const DeepCollectionEquality().equals(mutations, other.mutations); @override diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_result.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_result.dart index 0cd47616..f306f609 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_result.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/mutation_result.dart @@ -3,7 +3,7 @@ // on 17/09/2018 import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; /// The result of applying a mutation to the server. This is a model of the [WriteResult] proto /// message. @@ -23,9 +23,9 @@ class MutationResult { /// though the [commitTime] advances. final SnapshotVersion version; - /// The resulting fields returned from the backend after a [TransformMutation] has been committed. - /// Contains one [FieldValue] for each [FieldTransform] that was in the mutation. + /// The resulting fields returned from the backend after a mutation containing field transforms has been committed. + /// Contains one [Value] for each [FieldTransform] that was in the mutation. /// - /// Will be null if the mutation was not a [TransformMutation]. - final List transformResults; + /// Will be null if the mutation did not a contain any field transforms. + final List transformResults; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/numeric_increment_transform_operation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/numeric_increment_transform_operation.dart index 8b0257a1..df2ba045 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/numeric_increment_transform_operation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/numeric_increment_transform_operation.dart @@ -3,56 +3,53 @@ // on 13/03/2020 import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_operation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; +import 'package:fixnum/fixnum.dart'; /// Implements the backend semantics for locally computed NUMERIC_ADD /// (increment) transforms. Converts all field values to ints or doubles and /// resolves overflows to [IntegerValue.max]/[IntegerValue.min]. class NumericIncrementTransformOperation implements TransformOperation { - NumericIncrementTransformOperation(this.operand); + NumericIncrementTransformOperation(this.operand) { + hardAssert(isNumber(operand), 'NumericIncrementTransformOperation expects a NumberValue operand'); + } - final NumberValue operand; + final Value operand; @override - FieldValue applyToLocalView( - FieldValue previousValue, Timestamp localWriteTime) { - final NumberValue baseValue = computeBaseValue(previousValue); + Value applyToLocalView(Value previousValue, Timestamp localWriteTime) { + final Value baseValue = computeBaseValue(previousValue); // Return an integer value only if the previous value and the operand is an // integer. - if (baseValue is IntegerValue && operand is IntegerValue) { - final int sum = _safeIncrement(baseValue.value, _operandAsInt()); - return IntegerValue.valueOf(sum); - } else if (baseValue is IntegerValue) { - final double sum = baseValue.value + _operandAsDouble(); - return DoubleValue.valueOf(sum); - } else if (baseValue is DoubleValue) { - final double sum = baseValue.value + _operandAsDouble(); - return DoubleValue.valueOf(sum); + if (isInteger(baseValue) && isInteger(operand)) { + final int sum = _safeIncrement(baseValue.integerValue.toInt(), _operandAsInt()); + return Value(integerValue: Int64(sum)); + } else if (isInteger(baseValue)) { + final double sum = baseValue.integerValue.toDouble() + _operandAsDouble(); + return Value(doubleValue: sum); } else { - hardAssert(baseValue is DoubleValue, - 'Expected NumberValue to be of type DoubleValue, but was $previousValue'); + hardAssert(isDouble(baseValue), 'Expected NumberValue to be of type DoubleValue, but was $previousValue'); - final double sum = baseValue.value + _operandAsDouble(); - return DoubleValue.valueOf(sum); + final double sum = baseValue.doubleValue + _operandAsDouble(); + return Value(doubleValue: sum); } } @override - FieldValue applyToRemoteDocument( - FieldValue previousValue, FieldValue transformResult) { + Value applyToRemoteDocument(Value previousValue, Value transformResult) { return transformResult; } /// Inspects the provided value, returning the provided value if it is already /// a [NumberValue], otherwise returning a coerced [IntegerValue] of 0. @override - NumberValue computeBaseValue(FieldValue previousValue) { - return previousValue is NumberValue - ? previousValue - : IntegerValue.valueOf(0); + Value computeBaseValue(Value previousValue) { + return isNumber(previousValue) ? previousValue : Value(integerValue: Int64(0)); } int _safeIncrement(int x, int y) { @@ -64,32 +61,26 @@ class NumericIncrementTransformOperation implements TransformOperation { return r; } - if (r >= 0) { - return IntegerValue.min; - } else { - return IntegerValue.max; - } + return r >= 0 ? kMinInt : kMaxInt; } double _operandAsDouble() { - if (operand is DoubleValue) { - return operand.value; - } else if (operand is IntegerValue) { - return operand.value.toDouble(); + if (isDouble(operand)) { + return operand.doubleValue; + } else if (isInteger(operand)) { + return operand.integerValue.toDouble(); } else { - throw fail( - "Expected 'operand' to be of Number type, but was ${operand.runtimeType}"); + throw fail("Expected 'operand' to be of Number type, but was ${operand.runtimeType}"); } } int _operandAsInt() { - if (operand is DoubleValue) { - return operand.value.toInt(); - } else if (operand is IntegerValue) { - return operand.value; + if (isDouble(operand)) { + return operand.doubleValue.toInt(); + } else if (isInteger(operand)) { + return operand.integerValue.toInt(); } else { - throw fail( - "Expected 'operand' to be of Number type, but was ${operand.runtimeType}"); + throw fail("Expected 'operand' to be of Number type, but was ${operand.runtimeType}"); } } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/patch_mutation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/patch_mutation.dart index c42cc574..10c643b1 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/patch_mutation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/patch_mutation.dart @@ -8,14 +8,16 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dar import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/field_mask.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/field_transform.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/unknown_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' show Value; +import 'package:collection/collection.dart'; /// A mutation that modifies fields of the document at the given key with the given values. The /// values are applied through a field mask: @@ -24,9 +26,9 @@ import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; /// * When a field is in the mask but not in the values, the corresponding field is deleted. /// * When a field is not in the mask but is in the values, the values map is ignored. class PatchMutation extends Mutation { - const PatchMutation( - DocumentKey key, this.value, this.mask, Precondition precondition) - : super(key, precondition); + const PatchMutation(DocumentKey key, this.value, this.mask, Precondition precondition, + [List fieldTransforms = const []]) + : super(key, precondition, fieldTransforms); /// Returns the fields and associated values to use when patching the document. final ObjectValue value; @@ -36,13 +38,9 @@ class PatchMutation extends Mutation { final FieldMask mask; @override - MaybeDocument applyToRemoteDocument( - MaybeDocument maybeDoc, MutationResult mutationResult) { + MaybeDocument applyToRemoteDocument(MaybeDocument maybeDoc, MutationResult mutationResult) { verifyKeyMatches(maybeDoc); - hardAssert(mutationResult.transformResults == null, - 'Transform results received by PatchMutation.'); - if (!precondition.isValidFor(maybeDoc)) { // Since the mutation was not rejected, we know that the precondition matched on the backend. // We therefore must not have the expected version of the document in our cache and return an @@ -50,54 +48,58 @@ class PatchMutation extends Mutation { return UnknownDocument(key, mutationResult.version); } + final List transformResults = mutationResult.transformResults != null + ? serverTransformResults(maybeDoc, mutationResult.transformResults) + : []; + final SnapshotVersion version = mutationResult.version; - final ObjectValue newData = patchDocument(maybeDoc); - return Document(key, version, DocumentState.committedMutations, newData); + final ObjectValue newData = patchDocument(maybeDoc, transformResults); + return Document(key, version, newData, DocumentState.committedMutations); } @override - MaybeDocument applyToLocalView( - MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { + MaybeDocument applyToLocalView(MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { verifyKeyMatches(maybeDoc); if (!precondition.isValidFor(maybeDoc)) { return maybeDoc; } + final List transformResults = localTransformResults(localWriteTime, maybeDoc, baseDoc); final SnapshotVersion version = Mutation.getPostMutationVersion(maybeDoc); - final ObjectValue newData = patchDocument(maybeDoc); - return Document(key, version, DocumentState.localMutations, newData); + final ObjectValue newData = patchDocument(maybeDoc, transformResults); + return Document(key, version, newData, DocumentState.localMutations); } /// Patches the data of document if available or creates a new document. Note that this does not /// check whether or not the precondition of this patch holds. - ObjectValue patchDocument(MaybeDocument maybeDoc) { + ObjectValue patchDocument(MaybeDocument maybeDoc, List transformResults) { ObjectValue data; if (maybeDoc is Document) { data = maybeDoc.data; } else { - data = ObjectValue.empty; + data = ObjectValue.empty(); } - return patchObject(data); + data = patchObject(data); + data = transformObject(data, transformResults); + return data; } ObjectValue patchObject(ObjectValue obj) { + final ObjectValueBuilder builder = obj.toBuilder(); for (FieldPath path in mask.mask) { if (path.isNotEmpty) { - final FieldValue newValue = value.get(path); + final Value newValue = value.get(path); if (newValue == null) { - obj = obj.delete(path); + builder.delete(path); } else { - obj = obj.set(path, newValue); + builder[path] = newValue; } } } - return obj; + return builder.build(); } - @override - ObjectValue extractBaseValue(MaybeDocument maybeDoc) => null; - @override bool operator ==(Object other) => identical(this, other) || @@ -105,11 +107,15 @@ class PatchMutation extends Mutation { runtimeType == other.runtimeType && hasSameKeyAndPrecondition(other) && value == other.value && - mask == other.mask; + mask == other.mask && + const ListEquality().equals(fieldTransforms, other.fieldTransforms); @override int get hashCode => - value.hashCode ^ mask.hashCode ^ keyAndPreconditionHashCode(); + keyAndPreconditionHashCode() ^ + value.hashCode ^ + mask.hashCode ^ + const ListEquality().hash(fieldTransforms); @override String toString() { diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/server_timestamp_operation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/server_timestamp_operation.dart index 46694e16..9535ee05 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/server_timestamp_operation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/server_timestamp_operation.dart @@ -3,8 +3,9 @@ // on 17/09/2018 import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_operation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/server_timestamps.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; /// Transforms a value into a server-generated timestamp. class ServerTimestampOperation implements TransformOperation { @@ -12,23 +13,20 @@ class ServerTimestampOperation implements TransformOperation { const ServerTimestampOperation._(); - static const ServerTimestampOperation sharedInstance = - ServerTimestampOperation._(); + static const ServerTimestampOperation sharedInstance = ServerTimestampOperation._(); @override - FieldValue applyToLocalView( - FieldValue previousValue, Timestamp localWriteTime) { - return ServerTimestampValue(localWriteTime, previousValue); + Value applyToLocalView(Value previousValue, Timestamp localWriteTime) { + return ServerTimestamps.valueOf(localWriteTime, previousValue); } @override - FieldValue applyToRemoteDocument( - FieldValue previousValue, FieldValue transformResult) { + Value applyToRemoteDocument(Value previousValue, Value transformResult) { return transformResult; } @override - FieldValue computeBaseValue(FieldValue currentValue) { + Value computeBaseValue(Value currentValue) { // Server timestamps are idempotent and don't require a base value. return null; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/set_mutation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/set_mutation.dart index b0a0bd41..17c26186 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/set_mutation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/set_mutation.dart @@ -6,53 +6,59 @@ import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/field_transform.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' show Value; +import 'package:collection/collection.dart'; /// A mutation that creates or replaces the document at the given key with the object value /// contents. class SetMutation extends Mutation { - SetMutation(DocumentKey key, this.value, Precondition precondition) - : super(key, precondition); + SetMutation( + DocumentKey key, + this.value, + Precondition precondition, [ + List fieldTransforms = const [], + ]) : super(key, precondition, fieldTransforms); /// The object value to use when setting the document. final ObjectValue value; @override - MaybeDocument applyToRemoteDocument( - MaybeDocument maybeDoc, MutationResult mutationResult) { + MaybeDocument applyToRemoteDocument(MaybeDocument maybeDoc, MutationResult mutationResult) { verifyKeyMatches(maybeDoc); - hardAssert(mutationResult.transformResults == null, - 'Transform results received by SetMutation.'); - // Unlike applyToLocalView, if we're applying a mutation to a remote document the server has // accepted the mutation so the precondition must have held. final SnapshotVersion version = mutationResult.version; - return Document(key, version, DocumentState.committedMutations, value); + + ObjectValue newData = value; + if (mutationResult.transformResults != null) { + final List transformResults = serverTransformResults(maybeDoc, mutationResult.transformResults); + newData = transformObject(newData, transformResults); + } + + return Document(key, version, newData, DocumentState.committedMutations); } @override - MaybeDocument applyToLocalView( - MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { + MaybeDocument applyToLocalView(MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { verifyKeyMatches(maybeDoc); if (!precondition.isValidFor(maybeDoc)) { return maybeDoc; } - final SnapshotVersion version = Mutation.getPostMutationVersion(maybeDoc); - return Document(key, version, DocumentState.localMutations, value); - } + final List transformResults = localTransformResults(localWriteTime, maybeDoc, baseDoc); + final ObjectValue newData = transformObject(value, transformResults); - @override - ObjectValue extractBaseValue(MaybeDocument maybeDoc) { - return null; + final SnapshotVersion version = Mutation.getPostMutationVersion(maybeDoc); + return Document(key, version, newData, DocumentState.localMutations); } @override @@ -61,14 +67,17 @@ class SetMutation extends Mutation { other is SetMutation && runtimeType == other.runtimeType && hasSameKeyAndPrecondition(other) && - value == other.value; + const ListEquality().equals(fieldTransforms, other.fieldTransforms); @override - int get hashCode => value.hashCode ^ keyAndPreconditionHashCode(); + int get hashCode => + keyAndPreconditionHashCode() ^ // + value.hashCode ^ + const ListEquality().hash(fieldTransforms); @override String toString() { - return (ToStringHelper(runtimeType) + return (ToStringHelper(runtimeType) // ..add('key', key) ..add('precondition', precondition) ..add('value', value)) diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_mutation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_mutation.dart deleted file mode 100644 index ca617455..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_mutation.dart +++ /dev/null @@ -1,224 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/field_transform.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/patch_mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_operation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/unknown_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; -import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; -import 'package:collection/collection.dart'; - -/// A mutation that modifies specific fields of the document with transform -/// operations. Currently the only supported transform is a server timestamp, -/// but IP Address, increment(n), etc. could be supported in the future. -/// -/// It is somewhat similar to a [PatchMutation] in that it patches specific -/// fields and has no effect when applied to null or a [NoDocument] (see comment -/// on [Mutation.applyToRemoteDocument] and [Mutation.applyToLocalView] for -/// rationale). -class TransformMutation extends Mutation { - // NOTE: We set a precondition of exists: true as a safety-check, since we - // always combine TransformMutations with a SetMutation or PatchMutation which ( - // if successful) should end up with an existing document. - TransformMutation(DocumentKey key, this.fieldTransforms) - : super(key, Precondition(exists: true)); - - final List fieldTransforms; - - @override - MaybeDocument applyToRemoteDocument( - MaybeDocument maybeDoc, MutationResult mutationResult) { - verifyKeyMatches(maybeDoc); - - hardAssert(mutationResult.transformResults != null, - 'Transform results missing for TransformMutation.'); - - if (!precondition.isValidFor(maybeDoc)) { - // Since the mutation was not rejected, we know that the precondition - // matched on the backend. We therefore must not have the expected version - // of the document in our cache and return an [UnknownDocument] with the - // known [updateTime]. - return UnknownDocument(key, mutationResult.version); - } - - final Document doc = _requireDocument(maybeDoc); - final List transformResults = - _serverTransformResults(doc, mutationResult.transformResults); - final ObjectValue newData = _transformObject(doc.data, transformResults); - return Document( - key, mutationResult.version, DocumentState.committedMutations, newData); - } - - @override - MaybeDocument applyToLocalView( - MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { - verifyKeyMatches(maybeDoc); - - if (!precondition.isValidFor(maybeDoc)) { - return maybeDoc; - } - - final Document doc = _requireDocument(maybeDoc); - final List transformResults = - _localTransformResults(localWriteTime, maybeDoc, baseDoc); - final ObjectValue newData = _transformObject(doc.data, transformResults); - return Document(key, doc.version, DocumentState.localMutations, newData); - } - - @override - ObjectValue extractBaseValue(MaybeDocument maybeDoc) { - ObjectValue baseObject; - - for (FieldTransform transform in fieldTransforms) { - FieldValue existingValue; - if (maybeDoc is Document) { - existingValue = maybeDoc.getField(transform.fieldPath); - } - - final FieldValue coercedValue = - transform.operation.computeBaseValue(existingValue); - if (coercedValue != null) { - if (baseObject == null) { - baseObject = ObjectValue.empty.set(transform.fieldPath, coercedValue); - } else { - baseObject = baseObject.set(transform.fieldPath, coercedValue); - } - } - } - - return baseObject; - } - - /// Asserts that the given [MaybeDocument] is actually a [Document] and verifies that it matches - /// the key for this mutation. Since we only support transformations with precondition exists this - /// method is guaranteed to be safe. - Document _requireDocument(MaybeDocument maybeDoc) { - hardAssert(maybeDoc is Document, 'Unknown MaybeDocument type $maybeDoc'); - final Document doc = maybeDoc; - hardAssert( - doc.key == key, 'Can only transform a document with the same key'); - return doc; - } - - /// Creates a list of 'transform results' (a transform result is a field value representing the - /// result of applying a transform) for use after a [TransformMutation] has been acknowledged by - /// the server. - /// - /// [baseDoc] the document prior to applying this mutation batch. - /// [serverTransformResults] the transform results received by the server. - /// - /// Returns the transform results list. - List _serverTransformResults( - MaybeDocument baseDoc, List serverTransformResults) { - final List transformResults = - List(fieldTransforms.length); - hardAssert( - fieldTransforms.length == serverTransformResults.length, - 'server transform count (${serverTransformResults.length}) should match field transform ' - 'count (${fieldTransforms.length})'); - - for (int i = 0; i < serverTransformResults.length; i++) { - final FieldTransform fieldTransform = fieldTransforms[i]; - final TransformOperation transform = fieldTransform.operation; - - FieldValue previousValue; - if (baseDoc is Document) { - previousValue = baseDoc.getField(fieldTransform.fieldPath); - } - - transformResults[i] = transform.applyToRemoteDocument( - previousValue, serverTransformResults[i]); - } - return transformResults; - } - - /// Creates a list of 'transform results' (a transform result is a field value - /// representing the result of applying a transform) for use when applying a - /// [TransformMutation] locally. - /// - /// [localWriteTime] the local time of the transform mutation (used to generate - /// [ServerTimestampValue]s). - /// [maybeDoc] The current state of the document after applying all previous - /// mutations. - /// [baseDoc] is the document prior to applying this mutation batch. - /// - /// Returns the transform results list. - List _localTransformResults( - Timestamp localWriteTime, - MaybeDocument maybeDoc, - MaybeDocument baseDoc, - ) { - final List transformResults = - List(fieldTransforms.length); - int i = 0; - for (FieldTransform fieldTransform in fieldTransforms) { - final TransformOperation transform = fieldTransform.operation; - - FieldValue previousValue; - if (maybeDoc is Document) { - previousValue = maybeDoc.getField(fieldTransform.fieldPath); - } - - if (previousValue == null && baseDoc is Document) { - // If the current document does not contain a value for the mutated - // field, use the value that existed before applying this mutation - // batch. This solves an edge case where a PatchMutation clears the - // values in a nested map before the TransformMutation is applied. - previousValue = baseDoc.getField(fieldTransform.fieldPath); - } - - transformResults[i] = - transform.applyToLocalView(previousValue, localWriteTime); - i++; - } - return transformResults; - } - - ObjectValue _transformObject( - ObjectValue objectValue, List transformResults) { - hardAssert(transformResults.length == fieldTransforms.length, - 'Transform results length mismatch.'); - - for (int i = 0; i < fieldTransforms.length; i++) { - final FieldTransform fieldTransform = fieldTransforms[i]; - final FieldPath fieldPath = fieldTransform.fieldPath; - objectValue = objectValue.set(fieldPath, transformResults[i]); - } - return objectValue; - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is TransformMutation && - runtimeType == other.runtimeType && - hasSameKeyAndPrecondition(other) && - const ListEquality() - .equals(fieldTransforms, other.fieldTransforms); - - @override - int get hashCode => - const ListEquality().hash(fieldTransforms) ^ - keyAndPreconditionHashCode(); - - @override - String toString() { - return (ToStringHelper(runtimeType) - ..add('key', key) - ..add('precondition', precondition) - ..add('fieldTransforms', fieldTransforms)) - .toString(); - } -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_operation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_operation.dart index cf7dfe84..8d468bc7 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_operation.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/transform_operation.dart @@ -2,22 +2,19 @@ // Lung Razvan // on 17/09/2018 -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; -/// A transform within a [TransformMutation]. +/// Used to represent a field transform on a mutation. abstract class TransformOperation { /// Computes the local transform result against the provided [previousValue], /// optionally using the provided [localWriteTime]. - FieldValue applyToLocalView( - FieldValue previousValue, Timestamp localWriteTime); + Value applyToLocalView(Value previousValue, Timestamp localWriteTime); /// Computes a final transform result after the transform has been /// acknowledged by the server, potentially using the server-provided /// [transformResult]. - FieldValue applyToRemoteDocument( - FieldValue previousValue, FieldValue transformResult); + Value applyToRemoteDocument(Value previousValue, Value transformResult); /// If applicable, returns the base value to persist for this transform. If a /// base value is provided, the transform operation is always applied to this @@ -31,5 +28,5 @@ abstract class TransformOperation { /// /// Returns a base value to store along with the mutation, or null for /// idempotent transforms. - FieldValue computeBaseValue(FieldValue previousValue); + Value computeBaseValue(Value previousValue); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/verify_mutation.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/verify_mutation.dart new file mode 100644 index 00000000..4c65767a --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/mutation/verify_mutation.dart @@ -0,0 +1,48 @@ +// File created by +// Lung Razvan +// on 16/01/2021 + +import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; +import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; + +/// A mutation that verifies the existence of the document at the given key with the provided +/// precondition. +/// +/// The `verify` operation is only used in Transactions, and this class serves primarily to +/// facilitate serialization into protos. +class VerifyMutation extends Mutation { + const VerifyMutation(DocumentKey key, Precondition precondition) : super(key, precondition); + + @override + MaybeDocument applyToLocalView(MaybeDocument maybeDoc, MaybeDocument baseDoc, Timestamp localWriteTime) { + throw StateError('VerifyMutation should only be used in Transactions.'); + } + + @override + MaybeDocument applyToRemoteDocument(MaybeDocument maybeDoc, MutationResult mutationResult) { + throw StateError('VerifyMutation should only be used in Transactions.'); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is VerifyMutation && // + runtimeType == other.runtimeType && + hasSameKeyAndPrecondition(other); + + @override + int get hashCode => keyAndPreconditionHashCode(); + + @override + String toString() { + return (ToStringHelper(runtimeType) // + ..add('key', key) + ..add('precondition', precondition)) + .toString(); + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/object_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/object_value.dart new file mode 100644 index 00000000..60018837 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/object_value.dart @@ -0,0 +1,210 @@ +// File created by +// Lung Razvan +// on 17/09/2018 + +import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/field_mask.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/server_timestamps.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; + +/// A structured object value stored in Firestore. +class ObjectValue { + ObjectValue(this.proto) { + hardAssert(proto.whichValueType() == Value_ValueType.mapValue, 'ObjectValues should be backed by a MapValue'); + hardAssert(!ServerTimestamps.isServerTimestamp(proto), 'ServerTimestamps should not be used as an ObjectValue'); + } + + ObjectValue.fromMap(Map value) : this(Value(mapValue: MapValue(fields: value))); + + factory ObjectValue.empty() => _emptyInstance; + static final ObjectValue _emptyInstance = ObjectValue(Value(mapValue: MapValue())); + + /// Returns a new ObjectValueBuilder instance that is based on an empty object. + static ObjectValueBuilder newBuilder() { + return _emptyInstance.toBuilder(); + } + + /// Returns the Protobuf that backs this ObjectValue. + final Value proto; + + Map get fields { + return {...proto.mapValue.fields}; + } + + /// Recursively extracts the FieldPaths that are set in this ObjectValue. + FieldMask get fieldMask { + return _extractFieldMask(proto.mapValue); + } + + FieldMask _extractFieldMask(MapValue value) { + final Set fields = {}; + for (MapEntry entry in value.fields.entries) { + final FieldPath currentPath = FieldPath.fromSingleSegment(entry.key); + if (isMapValue(entry.value)) { + final FieldMask nestedMask = _extractFieldMask(entry.value.mapValue); + final Set nestedFields = nestedMask.mask; + if (nestedFields.isEmpty) { + // Preserve the empty map by adding it to the FieldMask. + fields.add(currentPath); + } else { + // For nested and non-empty ObjectValues, add the FieldPath of the leaf nodes. + for (FieldPath nestedPath in nestedFields) { + fields.add(currentPath.appendField(nestedPath)); + } + } + } else { + fields.add(currentPath); + } + } + return FieldMask(fields); + } + + /// Returns the value at the given [fieldPath] or null. + Value operator [](FieldPath fieldPath) { + if (fieldPath.isEmpty) { + return proto; + } else { + Value value = proto; + for (int i = 0; i < fieldPath.length - 1; ++i) { + value = value.mapValue.fields[fieldPath.getSegment(i)]; + if (!isMapValue(value)) { + return null; + } + } + return value.mapValue.fields[fieldPath.getLastSegment()]; + } + } + + Value get(FieldPath fieldPath) { + if (fieldPath.isEmpty) { + return proto; + } else { + Value value = proto; + for (int i = 0; i < fieldPath.length - 1; ++i) { + value = value.mapValue.fields[fieldPath.getSegment(i)]; + if (!isMapValue(value)) { + return null; + } + } + return value.mapValue.fields[fieldPath.getLastSegment()]; + } + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is ObjectValue && // + runtimeType == other.runtimeType && + equals(proto, other.proto); + + @override + int get hashCode => proto.hashCode; + + /// Creates a ObjectValueBuilder instance that is based on the current value. + ObjectValueBuilder toBuilder() => ObjectValueBuilder(this); +} + +/// An [ObjectValueBuilder] provides APIs to set and delete fields from an ObjectValue. All +/// operations mutate the existing instance. +class ObjectValueBuilder { + ObjectValueBuilder(this._baseObject) : _overlayMap = {}; + + /// The existing data to mutate. + final ObjectValue _baseObject; + + /// A nested map that contains the accumulated changes in this ObjectValueBuilder. Values can either be + /// [Value] protos, [Map] values (to represent additional nesting) or [null] (to + /// represent field deletes). + final Map _overlayMap; + + /// Sets the field to the provided value. + operator []=(FieldPath path, Value value) { + hardAssert(path.isNotEmpty, 'Cannot set field for empty path on ObjectValue'); + _setOverlay(path, value); + } + + /// Removes the field at the specified path. If there is no field at the specified path nothing + /// is changed. + void delete(FieldPath path) { + hardAssert(path.isNotEmpty, 'Cannot delete field for empty path on ObjectValue'); + _setOverlay(path, null); + } + + /// Adds [value] to the overlay map at [path] creating nested map entries if needed. + void _setOverlay(FieldPath path, Value value) { + Map currentLevel = _overlayMap; + + for (int i = 0; i < path.length - 1; ++i) { + final String currentSegment = path.getSegment(i); + final Object currentValue = currentLevel[currentSegment]; + + if (currentValue is Map) { + // Re-use a previously created map + currentLevel = currentValue; + } else if (currentValue is Value && currentValue.whichValueType() == Value_ValueType.mapValue) { + // Convert the existing Protobuf MapValue into a Java map + final Map nextLevel = {...currentValue.mapValue.fields}; + currentLevel[currentSegment] = nextLevel; + currentLevel = nextLevel; + } else { + // Create an empty hash map to represent the current nesting level + final Map nextLevel = {}; + currentLevel[currentSegment] = nextLevel; + currentLevel = nextLevel; + } + } + + currentLevel[path.getLastSegment()] = value; + } + + /// Returns an [ObjectValue] with all mutations applied. + ObjectValue build() { + final MapValue mergedResult = _applyOverlay(FieldPath.emptyPath, _overlayMap); + if (mergedResult != null) { + return ObjectValue(Value(mapValue: mergedResult)); + } else { + return _baseObject; + } + } + + /// Applies any overlays from [currentOverlays] that exist at [currentPath] and returns the + /// merged data at [currentPath] (or null if there were no changes). + /// + /// The [currentPath] is the path at the current nesting level. Can be set to [FieldValue.EMPTY_PATH] + /// to represent the root. The [currentOverlays] are the overlays at the current nesting level in the + /// same format as [overlayMap]. + MapValue _applyOverlay(FieldPath currentPath, Map currentOverlays) { + bool modified = false; + + final Value existingValue = _baseObject.get(currentPath); + final MapValue resultAtPath = isMapValue(existingValue) + // If there is already data at the current path, base our modifications on top + // of the existing data. + ? existingValue.mapValue.toBuilder() + : MapValue(); + + for (MapEntry entry in currentOverlays.entries) { + final String pathSegment = entry.key; + final Object value = entry.value; + + if (value is Map) { + final MapValue nested = _applyOverlay(currentPath.appendSegment(pathSegment), value); + if (nested != null) { + resultAtPath.fields[pathSegment] = Value(mapValue: nested); + modified = true; + } + } else if (value is Value) { + resultAtPath.fields[pathSegment] = value; + modified = true; + } else if (resultAtPath.fields.containsKey(pathSegment)) { + hardAssert(value == null, 'Expected entry to be a Map, a Value or null'); + resultAtPath.fields.remove(pathSegment); + modified = true; + } + } + + return modified ? resultAtPath.freeze() : null; + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/server_timestamps.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/server_timestamps.dart new file mode 100644 index 00000000..dff5ee9e --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/server_timestamps.dart @@ -0,0 +1,73 @@ +// File created by +// Lung Razvan +// on 16/01/2021 + +import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; +import 'package:cloud_firestore_vm/src/proto/google/protobuf/timestamp.pb.dart' as p; +import 'package:fixnum/fixnum.dart'; + +/// Methods for manipulating locally-applied Server Timestamps. +/// +/// Server Timestamps are backed by [MapValue]s that contain an internal field `__type__` with a +/// value of `server_timestamp`. The previous value and local write time are stored in its +/// `__previous_value__` and `__local_write_time__` fields respectively. +/// +/// Notes: +/// * [ServerTimestamp] Values are created as the result of applying a transform. They can only exist +/// in the local view of a document. Therefore they do not need to be parsed or serialized. +/// * When evaluated locally (e.g. via [DocumentSnapshot] data), they evaluate to null. +/// * They sort after all [Timestamp] Values. With respect to other [ServerTimestamp] Values, they sort +/// by their localWriteTime. +class ServerTimestamps { + ServerTimestamps._(); + + static const String _kServerTimestampSentinel = 'server_timestamp'; + static const String _kTypeKey = '__type__'; + static const String _kPreviousValueKey = '__previous_value__'; + static const String _kLocalWriteTimeKey = '__local_write_time__'; + + static bool isServerTimestamp(Value value) { + final Value type = value == null ? null : value.mapValue.fields[_kTypeKey]; + return type != null && _kServerTimestampSentinel == type.stringValue; + } + + static Value valueOf(Timestamp localWriteTime, Value previousValue) { + final Value encodedType = Value(stringValue: _kServerTimestampSentinel); + final Value encodeWriteTime = Value( + timestampValue: p.Timestamp( + seconds: Int64(localWriteTime.seconds), + nanos: localWriteTime.nanoseconds, + ), + ); + + final MapValue mapRepresentation = + MapValue(fields: {_kTypeKey: encodedType, _kLocalWriteTimeKey: encodeWriteTime}); + + if (previousValue != null) { + mapRepresentation.fields[_kPreviousValueKey] = previousValue; + } + + return Value(mapValue: mapRepresentation); + } + + /// Returns the value of the field before this [ServerTimestamp] was set. + /// + /// Preserving the previous values allows the user to display the last resoled value until the + /// backend responds with the timestamp [DocumentSnapshot.ServerTimestampBehavior]. + static Value getPreviousValue(Value serverTimestampValue) { + final Value previousValue = serverTimestampValue.mapValue.fields[_kPreviousValueKey]; + if (isServerTimestamp(previousValue)) { + return getPreviousValue(previousValue); + } + return previousValue; + } + + static p.Timestamp getLocalWriteTime(Value serverTimestampValue) { + final Value value = serverTimestampValue.mapValue.fields[_kLocalWriteTimeKey]; + if (value == null) { + throw ArgumentError('Value should never be null in this case.'); + } + return value.timestampValue; + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/array_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/array_value.dart deleted file mode 100644 index cc511b23..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/array_value.dart +++ /dev/null @@ -1,51 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; -/// A wrapper for Array values in Firestore -class ArrayValue extends FieldValue { - const ArrayValue(this._value); - - factory ArrayValue.fromList(List list) => ArrayValue(list); - - final List _value; - - @override - int get typeOrder => FieldValue.typeOrderArray; - - @override - List get value { - return _value.map((FieldValue it) => it.value).toList(growable: false); - } - - List get internalValue => _value; - - @override - int compareTo(FieldValue other) { - if (other is ArrayValue) { - final int minLength = min(_value.length, other._value.length); - - for (int i = 0; i < minLength; i++) { - final int cmp = _value[i].compareTo(other._value[i]); - if (cmp != 0) { - return cmp; - } - } - - return _value.length.compareTo(other._value.length); - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is ArrayValue && - runtimeType == other.runtimeType && - const DeepCollectionEquality().equals(_value, other._value); - - @override - int get hashCode => const DeepCollectionEquality().hash(_value); -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/blob_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/blob_value.dart deleted file mode 100644 index 17aa419e..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/blob_value.dart +++ /dev/null @@ -1,39 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for blob values in Firestore. -class BlobValue extends FieldValue { - const BlobValue(this._value); - - factory BlobValue.valueOf(Blob blob) => BlobValue(blob); - - final Blob _value; - - @override - int get typeOrder => FieldValue.typeOrderBlob; - - @override - Blob get value => _value; - - @override - int compareTo(FieldValue other) { - if (other is BlobValue) { - return _value.compareTo(other._value); - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is BlobValue && - runtimeType == other.runtimeType && - _value == other._value; - - @override - int get hashCode => _value.hashCode; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/bool_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/bool_value.dart deleted file mode 100644 index c2a621a5..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/bool_value.dart +++ /dev/null @@ -1,45 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for boolean value in Firestore. -class BoolValue extends FieldValue { - const BoolValue._(this._value); - - // ignore: avoid_positional_boolean_parameters - factory BoolValue.valueOf(bool value) => value ? trueValue : falseValue; - - static const BoolValue trueValue = BoolValue._(true); - static const BoolValue falseValue = BoolValue._(false); - - final bool _value; - - @override - int get typeOrder => FieldValue.typeOrderBool; - - // Since we create shared instances for true / false, we can use reference equality. - @override - bool get value => _value; - - @override - int compareTo(FieldValue other) { - if (other is BoolValue) { - return _value == other._value ? 0 : _value ? 1 : -1; - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) => - identical(this, other) || - super == other && - other is BoolValue && - runtimeType == other.runtimeType && - _value == other._value; - - @override - int get hashCode => _value ? 1 : 0; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/double_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/double_value.dart deleted file mode 100644 index 7f35c008..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/double_value.dart +++ /dev/null @@ -1,70 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for float/double values in Firestore. -class DoubleValue extends NumberValue { - const DoubleValue(this._value); - - factory DoubleValue.valueOf(double value) => DoubleValue(value); - - /// A constant holding the smallest positive normal value of type [double], 2-1022. - /// It is equal to the hexadecimal floating-point literal ```0x1.0p-1022```. - static const double minNormal = 2.2250738585072014E-308; - - static const DoubleValue nan = DoubleValue(double.nan); - - final double _value; - - @override - double get value => _value; - - /// -0.0 is not equal with 0.0 - @override - bool operator ==(Object other) { - if (identical(this, other)) { - return true; - } - if (other is DoubleValue && runtimeType == other.runtimeType) { - if ((identical(_value, -0.0) && identical(other._value, 0.0)) || - (identical(_value, 0.0) && identical(other._value, -0.0))) { - return false; - } - - if (_value.isNaN && other._value.isNaN) { - return true; - } - - return _value == other._value; - } else { - return false; - } - } - - @override - int get hashCode => _value.hashCode; - - /// Comparing NaN's should return 0, if [this] is NaN it should return -1 and if [other] is NaN it - /// should return 1 - @override - int compareTo(FieldValue other) { - if (other is! NumberValue) { - return defaultCompareTo(other); - } - - final NumberValue otherValue = other; - if (value.isNaN && otherValue.value.isNaN) { - return 0; - } else if (value.isNaN) { - return -1; - } else if (otherValue.value is double && otherValue.value.isNaN) { - return 1; - } else if (otherValue.value == value) { - return 0; - } - - return value.compareTo(other.value); - } -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/field_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/field_value.dart deleted file mode 100644 index e9d00954..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/field_value.dart +++ /dev/null @@ -1,93 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -library field_value; - -import 'dart:math'; - -import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; -import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/blob.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/geo_point.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/field_path.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/field_mask.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/server_timestamp_behavior.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; -import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; -import 'package:collection/collection.dart'; - -part 'array_value.dart'; -part 'blob_value.dart'; -part 'bool_value.dart'; -part 'double_value.dart'; -part 'geo_point_value.dart'; -part 'integer_value.dart'; -part 'null_value.dart'; -part 'number_value.dart'; -part 'object_value.dart'; -part 'reference_value.dart'; -part 'server_timestamp_value.dart'; -part 'string_value.dart'; -part 'timestamp_value.dart'; - -/// A field value represents a data type as stored by Firestore. -/// -/// Supported types are: -/// * Null -/// * Boolean -/// * Double -/// * Timestamp -/// * ServerTimestamp (a sentinel used in uncommitted writes) -/// * String -/// * Binary -/// * (Document) References -/// * GeoPoint -/// * Array -/// * Object -abstract class FieldValue implements Comparable { - const FieldValue(); - - static const int typeOrderNull = 0; - static const int typeOrderBool = 1; - static const int typeOrderNumber = 2; - static const int typeOrderTimestamp = 3; - static const int typeOrderString = 4; - static const int typeOrderBlob = 5; - static const int typeOrderReference = 6; - static const int typeOrderGeopoint = 7; - static const int typeOrderArray = 8; - static const int typeOrderObject = 9; - - int get typeOrder; - - /// Converts a [FieldValue] into the value that users will see in document snapshots using the - /// default deserialization options. - Object get value; - - @override - bool operator ==(Object other); - - @override - int get hashCode; - - @override - int compareTo(FieldValue other); - - @override - String toString() { - final Object val = value; - return val == null ? 'null' : val.toString(); - } - - int defaultCompareTo(FieldValue other) { - final int cmp = typeOrder.compareTo(other.typeOrder); - hardAssert(cmp != 0, - 'Default compareTo should not be used for values of same type.'); - return cmp; - } -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/geo_point_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/geo_point_value.dart deleted file mode 100644 index 0b475f89..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/geo_point_value.dart +++ /dev/null @@ -1,39 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for geo point values in Firestore. -class GeoPointValue extends FieldValue { - const GeoPointValue(this._value); - - factory GeoPointValue.valueOf(GeoPoint value) => GeoPointValue(value); - - final GeoPoint _value; - - @override - int get typeOrder => FieldValue.typeOrderGeopoint; - - @override - GeoPoint get value => _value; - - @override - int compareTo(FieldValue other) { - if (other is GeoPointValue) { - return _value.compareTo(other._value); - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is GeoPointValue && - runtimeType == other.runtimeType && - _value == other._value; - - @override - int get hashCode => _value.hashCode; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/integer_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/integer_value.dart deleted file mode 100644 index 471b37a2..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/integer_value.dart +++ /dev/null @@ -1,48 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for integer/long values in Firestore. -class IntegerValue extends NumberValue { - const IntegerValue(this._value); - - factory IntegerValue.valueOf(int value) => IntegerValue(value); - - static const int max = 9223372036854775807; - static const int min = -9223372036854775808; - - final int _value; - - @override - int get value => _value; - - @override - bool operator ==(Object other) => - identical(this, other) || - other is IntegerValue && - runtimeType == other.runtimeType && - _value == other._value; - - @override - int get hashCode => _value.hashCode; - - /// NOTE: - /// Comparing to a NaN should always return 1; - /// Comparing 0 with 0.0 should return 0; - @override - int compareTo(FieldValue other) { - if (other is NumberValue) { - if (other.value.isNaN) { - return 1; - } else if (other.value is double && other.value == 0.0) { - return value.compareTo(0); - } else { - return value.compareTo(other.value); - } - } else { - return defaultCompareTo(other); - } - } -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/null_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/null_value.dart deleted file mode 100644 index 7c185042..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/null_value.dart +++ /dev/null @@ -1,37 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for null values in Firestore. -class NullValue extends FieldValue { - const NullValue._(); - - factory NullValue.nullValue() => _instance; - - static const NullValue _instance = NullValue._(); - - @override - int get typeOrder => FieldValue.typeOrderNull; - - @override - Object get value => null; - - @override - int compareTo(FieldValue other) { - if (other is NullValue || other == null) { - return 0; - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is NullValue && runtimeType == other.runtimeType; - - @override - int get hashCode => -1; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/number_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/number_value.dart deleted file mode 100644 index 76af4260..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/number_value.dart +++ /dev/null @@ -1,17 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// Base class inherited from by IntegerValue and DoubleValue. It implements proper number -/// comparisons between the two types. -abstract class NumberValue extends FieldValue { - const NumberValue(); - - @override - num get value; - - @override - int get typeOrder => FieldValue.typeOrderNumber; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/object_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/object_value.dart deleted file mode 100644 index 53ada25f..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/object_value.dart +++ /dev/null @@ -1,169 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -class ObjectValue extends FieldValue { - const ObjectValue(this._value); - - factory ObjectValue.fromMap(Map value) { - return ObjectValue.fromImmutableMap( - ImmutableSortedMap.fromMap(value, comparator())); - } - - factory ObjectValue.fromImmutableMap( - ImmutableSortedMap value) { - if (value.isEmpty) { - return empty; - } else { - return ObjectValue(value); - } - } - - static final ObjectValue empty = ObjectValue( - ImmutableSortedMap.emptyMap(comparator())); - - final ImmutableSortedMap _value; - - @override - int get typeOrder => FieldValue.typeOrderObject; - - /// Recursively extracts the FieldPaths that are set in this ObjectValue. - FieldMask get fieldMask { - final Set fields = {}; - for (MapEntry entry in internalValue) { - final FieldPath currentPath = FieldPath.fromSingleSegment(entry.key); - final FieldValue value = entry.value; - if (value is ObjectValue) { - final FieldMask nestedMask = value.fieldMask; - final Set nestedFields = nestedMask.mask; - if (nestedFields.isEmpty) { - // Preserve the empty map by adding it to the FieldMask. - fields.add(currentPath); - } else { - // For nested and non-empty ObjectValues, add the FieldPath of the - // leaf nodes. - for (FieldPath nestedPath in nestedFields) { - fields.add(currentPath.appendField(nestedPath)); - } - } - } else { - fields.add(currentPath); - } - } - return FieldMask(fields); - } - - @override - Map get value { - final Map res = {}; - for (MapEntry entry in _value) { - res[entry.key] = entry.value.value; - } - return res; - } - - ImmutableSortedMap get internalValue => _value; - - @override - int compareTo(FieldValue other) { - if (other is ObjectValue) { - for (int i = 0; i < _value.length && i < other._value.length; i++) { - final MapEntry entry1 = _value.elementAt(i); - final MapEntry entry2 = other._value.elementAt(i); - - final int keyCompare = entry1.key.compareTo(entry2.key); - if (keyCompare != 0) { - return keyCompare; - } - - final int valueCompare = entry1.value.compareTo(entry2.value); - - if (valueCompare != 0) { - return valueCompare; - } - } - - return _value.length.compareTo(other._value.length); - } else { - return defaultCompareTo(other); - } - } - - /// Returns a new ObjectValue with the field at the named path set to value. - ObjectValue set(FieldPath path, FieldValue value) { - hardAssert( - path.isNotEmpty, 'Cannot set field for empty path on ObjectValue'); - - final String childName = path.first; - if (path.length == 1) { - return _setChild(childName, value); - } else { - final FieldValue child = internalValue[childName]; - ObjectValue obj; - if (child is ObjectValue) { - obj = child; - } else { - obj = ObjectValue.empty; - } - - final ObjectValue newChild = obj.set(path.popFirst(), value); - return _setChild(childName, newChild); - } - } - - /// Returns an ObjectValue with the field path deleted. If there is no field at the specified path - /// nothing is changed. - ObjectValue delete(FieldPath path) { - hardAssert( - path.isNotEmpty, 'Cannot delete field for empty path on ObjectValue'); - - final String childName = path.first; - if (path.length == 1) { - return ObjectValue.fromImmutableMap(_value.remove(childName)); - } else { - final FieldValue child = _value[childName]; - if (child is ObjectValue) { - final ObjectValue newChild = child.delete(path.popFirst()); - return _setChild(childName, newChild); - } else { - // Don't actually change a primitive value to an object for a delete. - return this; - } - } - } - - /// Returns the value at the given path or null - FieldValue get(FieldPath fieldPath) { - FieldValue current = this; - - for (int i = 0; i < fieldPath.length; i++) { - if (current is ObjectValue) { - final ObjectValue object = current; - current = object._value[fieldPath[i]]; - } else { - return null; - } - } - - return current; - } - - ObjectValue _setChild(String childName, FieldValue value) { - return ObjectValue.fromImmutableMap(internalValue.insert(childName, value)); - } - - @override - String toString() => _value.toString(); - - @override - bool operator ==(Object other) => - identical(this, other) || - other is ObjectValue && - runtimeType == other.runtimeType && - _value == other._value; - - @override - int get hashCode => _value.hashCode; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/reference_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/reference_value.dart deleted file mode 100644 index c2203609..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/reference_value.dart +++ /dev/null @@ -1,44 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for reference values in Firestore. -class ReferenceValue extends FieldValue { - const ReferenceValue(this.databaseId, this.key); - - factory ReferenceValue.valueOf(DatabaseId databaseId, DocumentKey key) { - return ReferenceValue(databaseId, key); - } - - final DatabaseId databaseId; - final DocumentKey key; - - @override - int get typeOrder => FieldValue.typeOrderReference; - - @override - DocumentKey get value => key; - - @override - int compareTo(FieldValue other) { - if (other is ReferenceValue) { - final int cmp = databaseId.compareTo(other.databaseId); - return cmp != 0 ? cmp : key.compareTo(other.key); - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is ReferenceValue && - runtimeType == other.runtimeType && - databaseId == other.databaseId && - key == other.key; - - @override - int get hashCode => databaseId.hashCode ^ key.hashCode; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/server_timestamp_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/server_timestamp_value.dart deleted file mode 100644 index 704f85a0..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/server_timestamp_value.dart +++ /dev/null @@ -1,72 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// Represents a locally-applied Server Timestamp. -/// -/// Notes: -/// - ServerTimestampValue instances are created as the result of applying a [TransformMutation] -/// (see TransformMutation.applyTo methods). They can only exist in the local view of a document. -/// Therefore they do not need to be parsed or serialized. -/// - When evaluated locally (e.g. via [DocumentSnapshot.ata]), they evaluate to null. -/// - They sort after all [TimestampValue]s. With respect to other [ServerTimestampValue]s, they -/// sort by their [localWriteTime]. -class ServerTimestampValue extends FieldValue { - const ServerTimestampValue(this.localWriteTime, this._previousValue); - - final Timestamp localWriteTime; - final FieldValue _previousValue; - - @override - int get typeOrder => FieldValue.typeOrderTimestamp; - - @override - Object get value => null; - - /// Returns the value of the field before this ServerTimestamp was set. - /// - /// Preserving the previous values allows the user to display the last resoled - /// value until the backend responds with the timestamp - /// [ServerTimestampBehavior]. - Object get previousValue { - if (_previousValue is ServerTimestampValue) { - final ServerTimestampValue value = _previousValue; - return value._previousValue; - } - - return _previousValue != null ? _previousValue.value : null; - } - - @override - int compareTo(FieldValue other) { - if (other is ServerTimestampValue) { - return localWriteTime.compareTo(other.localWriteTime); - } else if (other is TimestampValue) { - // Server timestamps come after all concrete timestamps. - return 1; - } else { - return defaultCompareTo(other); - } - } - - @override - String toString() { - return (ToStringHelper(ServerTimestampValue) - ..add('localWriteTime', localWriteTime) - ..add('previousValue', previousValue)) - .toString(); - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is ServerTimestampValue && - runtimeType == other.runtimeType && - localWriteTime == other.localWriteTime && - previousValue == other.previousValue; - - @override - int get hashCode => localWriteTime.hashCode ^ previousValue.hashCode; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/string_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/string_value.dart deleted file mode 100644 index 84c40824..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/string_value.dart +++ /dev/null @@ -1,43 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -/// A wrapper for string values in Firestore. -// TODO(long1eu): Add truncation support -class StringValue extends FieldValue { - const StringValue(this._value); - - factory StringValue.valueOf(String value) { - return StringValue(value); - } - - final String _value; - - @override - int get typeOrder => FieldValue.typeOrderString; - - @override - String get value => _value; - - @override - int compareTo(FieldValue other) { - if (other is StringValue) { - return _value.compareTo(other._value); - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) { - return identical(this, other) || - other is StringValue && - runtimeType == other.runtimeType && - _value == other._value; - } - - @override - int get hashCode => _value.hashCode; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/timestamp_value.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/timestamp_value.dart deleted file mode 100644 index 7b931b7e..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/value/timestamp_value.dart +++ /dev/null @@ -1,44 +0,0 @@ -// File created by -// Lung Razvan -// on 17/09/2018 - -part of field_value; - -class TimestampValue extends FieldValue { - const TimestampValue(this._value); - - factory TimestampValue.valueOf(Timestamp value) => TimestampValue(value); - - final Timestamp _value; - - @override - int get typeOrder => FieldValue.typeOrderTimestamp; - - @override - Timestamp get value => _value; - - @override - int compareTo(FieldValue other) { - if (other is TimestampValue) { - return _value.compareTo(other._value); - } else if (other is ServerTimestampValue) { - // Concrete timestamps come before server timestamps. - return -1; - } else { - return defaultCompareTo(other); - } - } - - @override - bool operator ==(Object other) => - identical(this, other) || - other is TimestampValue && - runtimeType == other.runtimeType && - _value == other._value; - - @override - int get hashCode => _value.hashCode; - - @override - String toString() => _value.toString(); -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/values.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/values.dart new file mode 100644 index 00000000..e11a5392 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/model/values.dart @@ -0,0 +1,406 @@ +// File created by +// Lung Razvan +// on 16/01/2021 + +import 'dart:collection'; +import 'dart:math' as math; + +import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/server_timestamps.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; +import 'package:cloud_firestore_vm/src/proto/google/protobuf/struct.pbenum.dart'; +import 'package:cloud_firestore_vm/src/proto/google/protobuf/timestamp.pb.dart' as p; +import 'package:cloud_firestore_vm/src/proto/google/type/latlng.pb.dart' as p; +import 'package:fixnum/fixnum.dart'; + +final Value NAN_VALUE = Value(doubleValue: double.nan); +final Value NULL_VALUE = Value(nullValue: NullValue.NULL_VALUE); + +/// The order of types in Firestore. This order is based on the backend's ordering, but modified to +/// support server timestamps. +const int TYPE_ORDER_NULL = 0; + +const int TYPE_ORDER_bool = 1; +const int TYPE_ORDER_NUMBER = 2; +const int TYPE_ORDER_TIMESTAMP = 3; +const int TYPE_ORDER_SERVER_TIMESTAMP = 4; +const int TYPE_ORDER_STRING = 5; +const int TYPE_ORDER_BLOB = 6; +const int TYPE_ORDER_REFERENCE = 7; +const int TYPE_ORDER_GEOPOINT = 8; +const int TYPE_ORDER_ARRAY = 9; +const int TYPE_ORDER_MAP = 10; + +/// Returns the backend's type order of the given Value type. +int typeOrder(Value value) { + switch (value.whichValueType()) { + case Value_ValueType.nullValue: + return TYPE_ORDER_NULL; + case Value_ValueType.booleanValue: + return TYPE_ORDER_bool; + case Value_ValueType.integerValue: + return TYPE_ORDER_NUMBER; + case Value_ValueType.doubleValue: + return TYPE_ORDER_NUMBER; + case Value_ValueType.timestampValue: + return TYPE_ORDER_TIMESTAMP; + case Value_ValueType.stringValue: + return TYPE_ORDER_STRING; + case Value_ValueType.bytesValue: + return TYPE_ORDER_BLOB; + case Value_ValueType.referenceValue: + return TYPE_ORDER_REFERENCE; + case Value_ValueType.geoPointValue: + return TYPE_ORDER_GEOPOINT; + case Value_ValueType.arrayValue: + return TYPE_ORDER_ARRAY; + case Value_ValueType.mapValue: + if (ServerTimestamps.isServerTimestamp(value)) { + return TYPE_ORDER_SERVER_TIMESTAMP; + } + return TYPE_ORDER_MAP; + default: + throw fail('Invalid value type: ${value.whichValueType()}'); + } +} + +bool equals(Value left, Value right) { + if (left == null && right == null) { + return true; + } else if (left == null || right == null) { + return false; + } + + final int leftType = typeOrder(left); + final int rightType = typeOrder(right); + if (leftType != rightType) { + return false; + } + + switch (leftType) { + case TYPE_ORDER_NUMBER: + return _numberEquals(left, right); + case TYPE_ORDER_ARRAY: + return _arrayEquals(left, right); + case TYPE_ORDER_MAP: + return _objectEquals(left, right); + case TYPE_ORDER_SERVER_TIMESTAMP: + return ServerTimestamps.getLocalWriteTime(left) == ServerTimestamps.getLocalWriteTime(right); + default: + return left == right; + } +} + +bool _numberEquals(Value left, Value right) { + if (left.whichValueType() == Value_ValueType.integerValue && right.whichValueType() == Value_ValueType.integerValue) { + return left.integerValue == right.integerValue; + } else if (left.whichValueType() == Value_ValueType.doubleValue && + right.whichValueType() == Value_ValueType.doubleValue) { + return left.doubleValue == right.doubleValue; + } + + return false; +} + +bool _arrayEquals(Value left, Value right) { + final ArrayValue leftArray = left.arrayValue; + final ArrayValue rightArray = right.arrayValue; + + if (leftArray.values.length != rightArray.values.length) { + return false; + } + + for (int i = 0; i < leftArray.values.length; ++i) { + if (!equals(leftArray.values[i], rightArray.values[i])) { + return false; + } + } + + return true; +} + +bool _objectEquals(Value left, Value right) { + final MapValue leftMap = left.mapValue; + final MapValue rightMap = right.mapValue; + + if (leftMap.fields.length != rightMap.fields.length) { + return false; + } + + for (MapEntry entry in leftMap.fields.entries) { + final Value otherEntry = rightMap.fields[entry.key]; + if (!equals(entry.value, otherEntry)) { + return false; + } + } + + return true; +} + +/// Returns true if the Value list contains the specified element. +bool contains(ArrayValue haystack, Value needle) { + for (Value haystackElement in haystack.values) { + if (equals(haystackElement, needle)) { + return true; + } + } + return false; +} + +int compare(Value left, Value right) { + final int leftType = typeOrder(left); + final int rightType = typeOrder(right); + + if (leftType != rightType) { + return leftType.compareTo(rightType); + } + + switch (leftType) { + case TYPE_ORDER_NULL: + return 0; + case TYPE_ORDER_bool: + return compareBools(left.booleanValue, right.booleanValue); + case TYPE_ORDER_NUMBER: + return _compareNumbers(left, right); + case TYPE_ORDER_TIMESTAMP: + return _compareTimestamps(left.timestampValue, right.timestampValue); + case TYPE_ORDER_SERVER_TIMESTAMP: + return _compareTimestamps(ServerTimestamps.getLocalWriteTime(left), ServerTimestamps.getLocalWriteTime(right)); + case TYPE_ORDER_STRING: + return left.stringValue.compareTo(right.stringValue); + case TYPE_ORDER_BLOB: + return compareBytes(left.bytesValue, right.bytesValue); + case TYPE_ORDER_REFERENCE: + return _compareReferences(left.referenceValue, right.referenceValue); + case TYPE_ORDER_GEOPOINT: + return _compareGeoPoints(left.geoPointValue, right.geoPointValue); + case TYPE_ORDER_ARRAY: + return _compareArrays(left.arrayValue, right.arrayValue); + case TYPE_ORDER_MAP: + return _compareMaps(left.mapValue, right.mapValue); + default: + throw fail('Invalid value type: $leftType'); + } +} + +int _compareNumbers(Value left, Value right) { + if (left.whichValueType() == Value_ValueType.doubleValue) { + final double leftDouble = left.doubleValue; + if (right.whichValueType() == Value_ValueType.doubleValue) { + return leftDouble.compareTo(right.doubleValue); + } else if (right.whichValueType() == Value_ValueType.integerValue) { + return leftDouble.compareTo(right.integerValue.toInt()); + } + } else if (left.whichValueType() == Value_ValueType.integerValue) { + final Int64 leftInt = left.integerValue; + if (right.whichValueType() == Value_ValueType.integerValue) { + return leftInt.compareTo(right.integerValue); + } else if (right.whichValueType() == Value_ValueType.doubleValue) { + return -1 * right.doubleValue.compareTo(leftInt.toDouble()); + } + } + + throw fail('Unexpected values: $left vs $right'); +} + +int _compareTimestamps(p.Timestamp left, p.Timestamp right) { + final int cmp = left.seconds.compareTo(right.seconds); + if (cmp != 0) { + return cmp; + } + + return left.nanos.compareTo(right.nanos); +} + +int _compareReferences(String leftPath, String rightPath) { + final List leftSegments = leftPath.split('/'); + final List rightSegments = rightPath.split('/'); + + final int minLength = math.min(leftSegments.length, rightSegments.length); + for (int i = 0; i < minLength; i++) { + final int cmp = leftSegments[i].compareTo(rightSegments[i]); + if (cmp != 0) { + return cmp; + } + } + + return leftSegments.length.compareTo(rightSegments.length); +} + +int _compareGeoPoints(p.LatLng left, p.LatLng right) { + final int comparison = left.latitude.compareTo(right.latitude); + if (comparison == 0) { + return left.longitude.compareTo(right.longitude); + } + return comparison; +} + +int _compareArrays(ArrayValue left, ArrayValue right) { + final int minLength = math.min(left.values.length, right.values.length); + for (int i = 0; i < minLength; i++) { + final int cmp = compare(left.values[i], right.values[i]); + if (cmp != 0) { + return cmp; + } + } + + return left.values.length.compareTo(right.values.length); +} + +int _compareMaps(MapValue left, MapValue right) { + final Iterator> iterator1 = SplayTreeMap.of(left.fields).entries.iterator; + final Iterator> iterator2 = SplayTreeMap.of(right.fields).entries.iterator; + while (iterator1.moveNext() && iterator2.moveNext()) { + final MapEntry entry1 = iterator1.current; + final MapEntry entry2 = iterator2.current; + final int keyCompare = entry1.key.compareTo(entry2.key); + if (keyCompare != 0) { + return keyCompare; + } + final int valueCompare = compare(entry1.value, entry2.value); + if (valueCompare != 0) { + return valueCompare; + } + } + + // Only equal if both iterators are exhausted. + return compareBools(iterator1.moveNext(), iterator2.moveNext()); +} + +/// Generate the canonical ID for the provided field value (as used in Target serialization). +String canonicalId(Value value) { + final StringBuffer buffer = StringBuffer(); + _canonifyValue(buffer, value); + return buffer.toString(); +} + +void _canonifyValue(StringBuffer buffer, Value value) { + switch (value.whichValueType()) { + case Value_ValueType.nullValue: + buffer.write('null'); + break; + case Value_ValueType.booleanValue: + buffer.write(value.booleanValue); + break; + case Value_ValueType.integerValue: + buffer.write(value.integerValue); + break; + case Value_ValueType.doubleValue: + buffer.write(value.doubleValue); + break; + case Value_ValueType.timestampValue: + _canonifyTimestamp(buffer, value.timestampValue); + break; + case Value_ValueType.stringValue: + buffer.write(value.stringValue); + break; + case Value_ValueType.bytesValue: + buffer.write(toDebugString(value.bytesValue)); + break; + case Value_ValueType.referenceValue: + _canonifyReference(buffer, value); + break; + case Value_ValueType.geoPointValue: + _canonifyGeoPoint(buffer, value.geoPointValue); + break; + case Value_ValueType.arrayValue: + _canonifyArray(buffer, value.arrayValue); + break; + case Value_ValueType.mapValue: + _canonifyObject(buffer, value.mapValue); + break; + default: + throw fail('Invalid value type: ${value.whichValueType()}'); + } +} + +void _canonifyTimestamp(StringBuffer buffer, p.Timestamp timestamp) { + buffer.write('time(${timestamp.seconds},${timestamp.nanos})'); +} + +void _canonifyGeoPoint(StringBuffer buffer, p.LatLng latLng) { + buffer.write('geo(${latLng.latitude},${latLng.longitude})'); +} + +void _canonifyReference(StringBuffer buffer, Value value) { + hardAssert(isReferenceValue(value), 'Value should be a ReferenceValue'); + buffer.write(DocumentKey.fromName(value.referenceValue)); +} + +void _canonifyObject(StringBuffer buffer, MapValue mapValue) { + // Even though MapValue are likely sorted correctly based on their insertion order (e.g. when + // received from the backend), local modifications can bring elements out of order. We need to + // re-sort the elements to ensure that canonical IDs are independent of insertion order. + final List keys = mapValue.fields.keys.toList()..sort(); + + buffer.write('{'); + bool first = true; + for (String key in keys) { + if (!first) { + buffer.write(','); + } else { + first = false; + } + buffer // + ..write(key) + ..write(':'); + _canonifyValue(buffer, mapValue.fields[key]); + } + buffer.write('}'); +} + +void _canonifyArray(StringBuffer buffer, ArrayValue arrayValue) { + buffer.write('['); + for (int i = 0; i < arrayValue.values.length; ++i) { + _canonifyValue(buffer, arrayValue.values[i]); + if (i != arrayValue.values.length - 1) { + buffer.write(','); + } + } + buffer.write(']'); +} + +/// Returns true if `value` is either a INTEGER_VALUE. +bool isInteger(Value value) { + return value != null && value.whichValueType() == Value_ValueType.integerValue; +} + +/// Returns true if `value` is either a DOUBLE_VALUE. +bool isDouble(Value value) { + return value != null && value.whichValueType() == Value_ValueType.doubleValue; +} + +/// Returns true if `value` is either a INTEGER_VALUE or a DOUBLE_VALUE. +bool isNumber(Value value) { + return isInteger(value) || isDouble(value); +} + +/// Returns true if `value` is an ARRAY_VALUE. +bool isArray(Value value) { + return value != null && value.whichValueType() == Value_ValueType.arrayValue; +} + +bool isReferenceValue(Value value) { + return value != null && value.whichValueType() == Value_ValueType.referenceValue; +} + +bool isNullValue(Value value) { + return value != null && value.whichValueType() == Value_ValueType.nullValue; +} + +bool isNanValue(Value value) { + return value != null && value.doubleValue.isNaN; +} + +bool isMapValue(Value value) { + return value != null && value.whichValueType() == Value_ValueType.mapValue; +} + +Value refValue(DatabaseId databaseId, DocumentKey key) { + return Value( + referenceValue: 'projects/${databaseId.projectId}/databases/${databaseId.databaseId}/documents/$key', + ); +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/query.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/query.dart index 56880af8..1ef88bca 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/query.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/query.dart @@ -264,7 +264,7 @@ class Query { if (op == FilterOperator.IN || op == FilterOperator.arrayContainsAny) { _validateDisjunctiveFilterElements(value, op); } - fieldValue = firestore.dataConverter.parseQueryValue(value); + fieldValue = firestore.userDataReader.parseQueryValue(value); } final Filter filter = FieldFilter(fieldPath.internalPath, op, fieldValue); _validateNewFilter(filter); @@ -629,7 +629,7 @@ class Query { components.add(ReferenceValue.valueOf(firestore.databaseId, key)); } else { final FieldValue wrapped = - firestore.dataConverter.parseQueryValue(rawValue); + firestore.userDataReader.parseQueryValue(rawValue); components.add(wrapped); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/abstract_stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/abstract_stream.dart new file mode 100644 index 00000000..f028ffc8 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/abstract_stream.dart @@ -0,0 +1,327 @@ +// File created by +// Lung Razvan +// on 24/09/2018 + +import 'dart:async' hide Stream; + +import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firestore_channel.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/incoming_stream_observer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/exponential_backoff.dart'; +import 'package:grpc/grpc.dart'; +import 'package:meta/meta.dart'; +import 'package:protobuf/protobuf.dart'; + +/// An [AbstractStream] is an abstract base class that implements the [Stream] interface. +/// +/// [ReqT] The proto type that will be sent in this stream +/// [RespT] The proto type that is received through this stream +/// [CallbackT] The type which is used for stream specific callbacks. +abstract class AbstractStream implements Stream { + AbstractStream( + this._firestoreChannel, + this._methodDescriptor, + this._workerQueue, + TimerId connectionTimerId, + this._idleTimerId, + this.listener, + ) : backoff = ExponentialBackoff( + _workerQueue, + connectionTimerId, + initialDelay: _backoffInitialDelayMs, + backoffFactor: _backoffFactor, + maxDelay: _backoffMaxDelayMs, + ); + + /// Initial backoff time in milliseconds after an error. Set to 1s according to + /// https://cloud.google.com/apis/design/errors. + static const Duration _backoffInitialDelayMs = Duration(seconds: 1); + static const Duration _backoffMaxDelayMs = Duration(minutes: 1); + + static const double _backoffFactor = 1.5; + + /// The time a stream stays open after it is marked idle. + static const int _idleTimeoutMs = 60 * 1000; + + DelayedTask _idleTimer; + + final FirestoreChannel _firestoreChannel; + + final ClientMethod _methodDescriptor; + + final AsyncQueue _workerQueue; + + final TimerId _idleTimerId; + + StreamState _state = StreamState.initial; + + /// A close count that's incremented every time the stream is closed; used by [CloseGuardedRunner] + /// to invalidate callbacks that happen after close. + int _closeCount = 0; + + BidiChannel _call; + final ExponentialBackoff backoff; + final CallbackT listener; + + @override + bool get isStarted { + return _state == StreamState.starting || _state == StreamState.open || _state == StreamState.backoff; + } + + @override + bool get isOpen { + return _state == StreamState.open; + } + + @override + Future start() async { + hardAssert(_call == null, 'Last call still set'); + hardAssert(_idleTimer == null, 'Idle timer still set'); + + if (_state == StreamState.error) { + _performBackoff(); + return; + } + + hardAssert(_state == StreamState.initial, 'Already started'); + + final CloseGuardedRunner closeGuardedRunner = CloseGuardedRunner(_workerQueue, _closeCount, () => _closeCount); + final StreamObserver streamObserver = + StreamObserver(closeGuardedRunner, this); + + _call = _firestoreChannel.runBidiStreamingRpc(_methodDescriptor, streamObserver); + + // Note that Starting is only used as intermediate state until onOpen is called asynchronously, + // since auth handled transparently by gRPC + _state = StreamState.starting; + _state = StreamState.open; + + await listener.onOpen(); + } + + /// Closes the stream and cleans up as necessary: + /// * closes the underlying GRPC stream; + /// * calls the [onClose] handler with the given [status]; + /// * sets internal stream state to [finalState]; + /// * adjusts the backoff timer based on status + /// + /// A new stream can be opened by calling [start]. + Future _close(StreamState finalState, GrpcError status) async { + hardAssert(isStarted, 'Only started streams should be closed.'); + hardAssert(finalState == StreamState.error || status.code == StatusCode.ok, + 'Can\'t provide an error when not in an error state.'); + + // Cancel any outstanding timers (they're guaranteed not to execute). + _cancelIdleCheck(); + backoff.cancel(); + + // Invalidates any stream-related callbacks (e.g. from auth or the underlying stream), + // guaranteeing they won't execute. + _closeCount++; + + final int code = status.code; + if (code == StatusCode.ok) { + // If this is an intentional close ensure we don't delay our next connection attempt. + backoff.reset(); + } else if (code == StatusCode.resourceExhausted) { + Log.d(runtimeType.toString(), '($hashCode) Using maximum backoff delay to prevent overloading the backend.'); + backoff.resetToMax(); + } else if (code == StatusCode.unauthenticated) { + // 'unauthenticated' error means the token was rejected. Try force refreshing it in case it + // just expired. + _firestoreChannel.invalidateToken(); + } + + if (finalState != StreamState.error) { + Log.d(runtimeType.toString(), '($hashCode) Performing stream teardown'); + tearDown(); + } + + if (_call != null) { + // Clean up the underlying RPC. If this [close()] is in response to an error, don't attempt to + // call half-close to avoid secondary failures. + if (status.code == StatusCode.ok) { + Log.d(runtimeType.toString(), '($hashCode) Closing stream client-side'); + _call.cancel(); + } + _call = null; + } + + // This state must be assigned before calling listener.onClose to allow the callback to inhibit + // backoff or otherwise manipulate the state in its non-started state. + _state = finalState; + + // Notify the listener that the stream closed. + await listener.onClose(status); + } + + /// Can be overridden to perform additional cleanup before the stream is closed. + void tearDown() {} + + @override + Future stop() async { + if (isStarted) { + await _close(StreamState.initial, GrpcError.ok()); + } + } + + @override + void inhibitBackoff() { + hardAssert(!isStarted, 'Can only inhibit backoff after in a stopped state'); + + _state = StreamState.initial; + backoff.reset(); + } + + void writeRequest(ReqT message) { + Log.d(runtimeType.toString(), '($hashCode) Stream sending: ${message.writeToJsonMap()}'); + _cancelIdleCheck(); + _call.add(message); + } + + /// Called by the idle timer when the stream should close due to inactivity. + Future _handleIdleCloseTimer() async { + if (isOpen) { + // When timing out an idle stream there's no reason to force the stream into backoff when it + // restarts so set the stream state to Initial instead of Error. + await _close(StreamState.initial, GrpcError.ok()); + } + } + + /// Called when GRPC closes the stream, which should always be due to some error. + @visibleForTesting + Future handleServerClose(GrpcError status) async { + hardAssert(isStarted, 'Can\'t handle server close on non-started stream!'); + + // In theory the stream could close cleanly, however, in our current model we never expect this + // to happen because if we stop a stream ourselves, this callback will never be called. To + // prevent cases where we retry without a backoff accidentally, we set the stream to error in + // all cases. + await _close(StreamState.error, status); + } + + Future onNext(RespT change); + + void _performBackoff() { + hardAssert(_state == StreamState.error, 'Should only perform backoff in an error state'); + _state = StreamState.backoff; + + backoff.backoffAndRun(() async { + hardAssert(_state == StreamState.backoff, 'State should still be backoff but was $_state'); + // Momentarily set state to Initial as start() expects it. + _state = StreamState.initial; + await start(); + hardAssert(isStarted, 'Stream should have started'); + }); + } + + /// Marks this stream as idle. If no further actions are performed on the stream for one minute, + /// the stream will automatically close itself and notify the stream's [onClose] handler with + /// [GrpcError.ok]. The stream will then be in a [!isStarted] state, requiring the caller to start + /// the stream again before further use. + /// + /// Only streams that are in state [StreamState.open] can be marked idle, as all other states + /// imply pending network operations. + void markIdle() { + // Starts the idle timer if we are in state [StreamState.Open] and are not yet already running a + // timer (in which case the previous idle timeout still applies). + if (isOpen && _idleTimer == null) { + _idleTimer = _workerQueue.enqueueAfterDelay( + _idleTimerId, + const Duration(milliseconds: _idleTimeoutMs), + _handleIdleCloseTimer, + ); + } + } + + void _cancelIdleCheck() { + if (_idleTimer != null) { + _idleTimer.cancel(); + _idleTimer = null; + } + } +} + +/// A 'runner' that runs operations but only if [closeCount] remains unchanged. This allows us to +/// turn auth / stream callbacks into no-ops if the stream is closed / re-opened, etc. +/// +/// PORTING NOTE: Because all the stream callbacks already happen on the [asyncQueue], we don't need +/// to dispatch onto the queue, and so we instead only expose a run() method which asserts that +/// we're already on the [asyncQueue]. +class CloseGuardedRunner { + CloseGuardedRunner( + this.asyncQueue, + this.initialCloseCount, + this.closeCount, + ); + + final AsyncQueue asyncQueue; + final int initialCloseCount; + final int Function() closeCount; + + Future run(Task task) async { + if (closeCount() == initialCloseCount) { + await asyncQueue.enqueue(task); + } else { + Log.d('AbstractStream', 'stream callback skipped by CloseGuardedRunner.'); + } + } +} + +/// Implementation of [IncomingStreamObserver] that runs callbacks via [CloseGuardedRunner]. +class StreamObserver + implements IncomingStreamObserver { + const StreamObserver(this._dispatcher, this.stream); + + final CloseGuardedRunner _dispatcher; + final AbstractStream stream; + + @override + void onHeaders(Map headers) { + _dispatcher.run(() async { + if (Log.isDebugEnabled) { + final Map whitelistedHeaders = {}; + for (String header in headers.keys) { + if (Datastore.whiteListedHeaders.contains(header.toLowerCase())) { + whitelistedHeaders[header] = headers[header]; + } + } + if (whitelistedHeaders.isNotEmpty) { + Log.d('AbstractStream', '($hashCode) Stream received headers: $whitelistedHeaders'); + } + } + }); + } + + @override + Future onNext(RespT response) async { + await _dispatcher.run(() async { + if (Log.isDebugEnabled) { + Log.d('AbstractStream', '($hashCode) Stream received: ${response.writeToJsonMap()}'); + } + await stream.onNext(response); + }); + } + + @override + void onOpen() { + _dispatcher.run(() async => Log.d('AbstractStream', '($hashCode) Stream is open')); + } + + @override + void onClose(GrpcError status) { + _dispatcher.run(() async { + if (status.code == StatusCode.ok) { + Log.d('AbstractStream', '($hashCode) Stream closed.'); + } else { + Log.d('AbstractStream', '($hashCode) Stream closed with status: $status.'); + } + await stream.handleServerClose(status); + }); + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore.dart new file mode 100644 index 00000000..5894fb42 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore.dart @@ -0,0 +1,219 @@ +// File created by +// Lung Razvan +// on 20/09/2018 + +import 'dart:async'; + +import 'package:cloud_firestore_vm/src/firebase/firestore/auth/credentials_provider.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/database_info.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firebase_client_grpc_metadata_provider.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firestore_channel.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/write_stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; +import 'package:grpc/grpc.dart'; +import 'package:meta/meta.dart'; +import 'package:protobuf/protobuf.dart'; + +/// Datastore represents a proxy for the remote server, hiding details of the RPC layer. It: +/// * Manages connections to the server +/// * Authenticates to the server +/// * Manages threading and keeps higher-level code running on the worker queue +/// * Serializes internal model objects to and from protocol buffers +/// +/// The Datastore is generally not responsible for understanding the higher-level protocol involved +/// in actually making changes or reading data, and is otherwise stateless. +class Datastore { + factory Datastore({ + @required DatabaseInfo databaseInfo, + @required AsyncQueue workerQueue, + @required CredentialsProvider credentialsProvider, + ClientChannel clientChannel, + GrpcMetadataProvider metadataProvider, + }) { + clientChannel ??= ClientChannel( + databaseInfo.host, + options: ChannelOptions( + credentials: databaseInfo.sslEnabled // + ? const ChannelCredentials.secure() + : const ChannelCredentials.insecure(), + ), + ); + + final FirestoreChannel channel = FirestoreChannel( + asyncQueue: workerQueue, + credentialsProvider: credentialsProvider, + channel: clientChannel, + databaseId: databaseInfo.databaseId, + metadataProvider: metadataProvider, + ); + + final RemoteSerializer serializer = RemoteSerializer(databaseInfo.databaseId); + + return Datastore.init(databaseInfo, workerQueue, serializer, channel); + } + + @visibleForTesting + Datastore.init(this.databaseInfo, this.workerQueue, this.serializer, this.channel); + + /// Set of lowercase, white-listed headers for logging purposes. + static final Set whiteListedHeaders = { + 'date', + 'x-google-backends', + 'x-google-netmon-label', + 'x-google-service', + 'x-google-gfe-request-trace' + }; + + final DatabaseInfo databaseInfo; + final AsyncQueue workerQueue; + final RemoteSerializer serializer; + final FirestoreChannel channel; + + Future shutdown() => channel.shutdown(); + + /// Creates a new [WatchStream] that is still unstarted but uses a common shared channel + WatchStream createWatchStream(WatchStreamCallback listener) { + return WatchStream(channel, workerQueue, serializer, listener); + } + + /// Creates a new [WriteStream] that is still unstarted but uses a common shared channel + WriteStream createWriteStream(WriteStreamCallback listener) { + return WriteStream(channel, workerQueue, serializer, listener); + } + + Future> commit(List mutations) async { + final CommitRequest builder = CommitRequest.create()..database = serializer.databaseName; + + for (Mutation mutation in mutations) { + builder.writes.add(serializer.encodeMutation(mutation)); + } + + try { + final CommitResponse response = await channel.runRpc( + ClientMethod( + 'firestore.googleapis.com/google.firestore.v1.Firestore/Commit', + (GeneratedMessage req) => req.writeToBuffer(), + (List req) => CommitResponse.fromBuffer(req), + ), + builder.freeze(), + ); + + final SnapshotVersion commitVersion = serializer.decodeVersion(response.commitTime); + + final int count = response.writeResults.length; + final List results = List(count); + for (int i = 0; i < count; i++) { + final WriteResult result = response.writeResults[i]; + results[i] = serializer.decodeMutationResult(result, commitVersion); + } + return results; + } catch (e) { + if (e is FirestoreError && e.code == FirestoreErrorCode.unauthenticated) { + channel.invalidateToken(); + } + + rethrow; + } + } + + Future> lookup(List keys) async { + final BatchGetDocumentsRequest builder = BatchGetDocumentsRequest.create()..database = serializer.databaseName; + for (DocumentKey key in keys) { + builder.documents.add(serializer.encodeKey(key)); + } + + try { + BatchGetDocumentsRequest(); + + final List responses = await channel.runStreamingResponseRpc( + ClientMethod( + 'firestore.googleapis.com/google.firestore.v1.Firestore/BatchGetDocuments', + (GeneratedMessage req) => req.writeToBuffer(), + (List res) => BatchGetDocumentsResponse.fromBuffer(res), + ), + builder.freeze()); + + final Map resultMap = {}; + for (BatchGetDocumentsResponse response in responses) { + final MaybeDocument doc = serializer.decodeMaybeDocument(response); + resultMap[doc.key] = doc; + } + final List results = []; + for (DocumentKey key in keys) { + results.add(resultMap[key]); + } + return results; + } catch (e) { + if (e is FirestoreError && e.code == FirestoreErrorCode.unauthenticated) { + channel.invalidateToken(); + } + rethrow; + } + } + + /// Determines whether the given status has an error code that represents a permanent error when received in response + /// to a non-write operation. + /// + /// See [isPermanentWriteError] for classifying write errors. + static bool isPermanentGrpcError(GrpcError status) { + return isPermanentError(FirestoreErrorCode.fromValue(status)); + } + + /// Determines whether the given status has an error code that represents a permanent error when received in response + /// to a non-write operation. + /// + /// See [isPermanentWriteError] for classifying write errors. + static bool isPermanentError(FirestoreErrorCode code) { + // See go/firestore-client-errors + switch (code) { + case FirestoreErrorCode.ok: + throw ArgumentError('Treated status OK as error'); + case FirestoreErrorCode.cancelled: + case FirestoreErrorCode.unknown: + case FirestoreErrorCode.deadlineExceeded: + case FirestoreErrorCode.resourceExhausted: + case FirestoreErrorCode.internal: + case FirestoreErrorCode.unavailable: + case FirestoreErrorCode.unauthenticated: + // Unauthenticated means something went wrong with our token and we need to retry with new credentials which + // will happen automatically. + return false; + case FirestoreErrorCode.invalidArgument: + case FirestoreErrorCode.notFound: + case FirestoreErrorCode.alreadyExists: + case FirestoreErrorCode.permissionDenied: + case FirestoreErrorCode.failedPrecondition: + case FirestoreErrorCode.aborted: + // Aborted might be retried in some scenarios, but that is dependant on the context and should handled + // individually by the calling code. See https://cloud.google.com/apis/design/errors. + case FirestoreErrorCode.outOfRange: + case FirestoreErrorCode.unimplemented: + case FirestoreErrorCode.dataLoss: + return true; + default: + throw ArgumentError('Unknown gRPC status code: $code'); + } + } + + /// Determines whether the given status has an error code that represents a permanent error when received in response + /// to a write operation. + /// + /// Write operations must be handled specially because as of b/119437764, ABORTED errors on the write stream should be + /// retried too (even though ABORTED errors are not generally retryable). + /// + /// Note that during the initial handshake on the write stream an ABORTED error signals that we should discard our + /// stream token (i.e. it is permanent). This means a handshake error should be classified with [isPermanentError], + /// above. + static bool isPermanentWriteError(GrpcError status) { + return isPermanentGrpcError(status) && status.code != StatusCode.aborted; + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/base_stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/base_stream.dart deleted file mode 100644 index 33f996e8..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/base_stream.dart +++ /dev/null @@ -1,282 +0,0 @@ -// File created by -// Lung Razvan -// on 01/12/2019 - -part of datastore; - -abstract class BaseStream extends DelegatingStream { - BaseStream( - StreamController eventsController, - TaskScheduler scheduler, - TaskId idleTaskId, - TaskId connectionTaskId, - ) : assert(eventsController != null), - assert(scheduler != null), - assert(idleTaskId != null), - _backoff = ExponentialBackoff(scheduler, connectionTaskId), - _eventsController = eventsController, - _scheduler = scheduler, - _idleTimerId = idleTaskId, - super(eventsController.stream); - - final ExponentialBackoff _backoff; - final StreamController _eventsController; - final TaskScheduler _scheduler; - final TaskId _idleTimerId; - - State _state = State.initial; - TimerTask _idleTimer; - - ResponseStream _writeResponse; - StreamSubscription _responseSub; - StreamController _requestsController; - - bool get isStarted => - _state == State.starting || - _state == State.open || - _state == State.backoff; - - bool get isOpen => _state == State.open; - - @mustCallSuper - Future start() async { - assert(_requestsController == null, 'Last stream is still active'); - assert(_responseSub == null, 'Last subscription is still active'); - assert(_idleTimer == null, 'Idle timer still set'); - - if (_state == State.error) { - _performBackoff(); - return; - } - - assert(_state == State.initial, 'Already started'); - _changeState(State.starting); - _requestsController = StreamController.broadcast(); - _writeResponse = await _buildCall(_requestsController.stream); - - _responseSub = _writeResponse.listen(_onData, - onError: _onError, onDone: _onDone, cancelOnError: false); - unawaited(_writeResponse.headers.then(_onHeaders)); - _changeState(State.open); - } - - Future stop() async { - if (isStarted) { - return _close(State.initial, GrpcError.ok()); - } - } - - void tearDown() {} - - void inhibitBackoff() { - assert(!isStarted, 'Can inhibit backoff only after in a stopped state'); - _changeState(State.initial); - _backoff.reset(); - } - - void writeRequest(Req request) { - Log.d('$runtimeType', - '($hashCode) Stream sending: ${request.toProto3Json()}'); - _cancelIdleCheck(); - _requestsController.add(request); - } - - /// Marks this stream as idle. If no further actions are performed on the - /// stream for one minute, the stream will automatically close itself and - /// emit a [CloseEvent] with [GrpcError.ok]. The stream will then be in a - /// ![isStarted] state, requiring the caller to start the stream again before - /// further use. - /// - /// Only streams that are in state [State.open] can be marked idle, as all - /// other states imply pending network operations. - void markIdle() { - // Starts the idle timer if we are in state [State.open] and are not yet - // already running a timer (in which case the previous idle timeout still - // applies). - if (isOpen && _idleTimer == null) { - _idleTimer = _scheduler.add( - _idleTimerId, const Duration(seconds: 10), _handleIdleCloseTimer); - } - } - - /// Called when GRPC closes the stream, which should always be due to some error. - @visibleForTesting - Future handleServerClose(GrpcError status) async { - assert(isStarted, 'Can\'t handle server close on non-started stream!'); - - // In theory the stream could close cleanly, however, in our current model we never expect this to happen because - // if we stop a stream ourselves, this callback will never be called. To prevent cases where we retry without a - // backoff accidentally, we set the stream to error in all cases. - await _close(State.error, status); - } - - @visibleForTesting - void addEvent(StreamEvent event) { - _eventsController.add(event); - } - - Future> _buildCall(Stream requests); - - @mustCallSuper - void _onData(Res response) { - if (_state != State.closing) { - if (Log.isDebugEnabled) { - Log.d('$runtimeType', - '($hashCode) Stream received: ${response.toProto3Json()}'); - } - } - } - - void _onHeaders(Map headers) { - if (Log.isDebugEnabled) { - final Iterable> entries = headers.keys - .where((String key) => whiteListedHeaders.contains(key.toLowerCase())) - .map((String key) => MapEntry(key, headers[key])); - - final Map values = - Map.fromEntries(entries); - if (values.isNotEmpty) { - Log.d('$runtimeType', '($hashCode) Stream received headers: $values'); - } - } - } - - void _onError(dynamic error, StackTrace stackTrace) { - if (_state != State.closing) { - if (error is GrpcError) { - if (error.code == StatusCode.ok) { - Log.d('$runtimeType', '($hashCode) Stream closed.'); - } else { - Log.d( - '$runtimeType', '($hashCode) Stream closed with status: $error.'); - } - - handleServerClose(error); - } else { - Log.d('$runtimeType', - '($hashCode) Stream closed with status: ${StatusCode.unknown} $error.'); - handleServerClose(GrpcError.unknown(error.toString())); - } - } - } - - void _onDone() {} - - void _performBackoff() { - assert( - _state == State.error, 'Should only perform backoff in an error state'); - _changeState(State.backoff); - _backoff.backoffAndRun(_restart); - } - - Future _restart() async { - assert(_state == State.backoff, - 'State should still be backoff but was $_state'); - // Momentarily set state to Initial as start() expects it. - _changeState(State.initial); - await start(); - assert(isStarted, 'Stream should have started'); - } - - Future _close(State state, GrpcError grpcError) async { - assert(isStarted, 'Only started streams should be closed.'); - assert(state == State.error || grpcError.code == StatusCode.ok, - 'Can\'t provide an error when not in an error state.'); - - if (state != State.error) { - Log.d('$runtimeType', '($hashCode) Performing stream teardown'); - tearDown(); - } - - _changeState(State.closing); - - // Cancel any outstanding timers (they're guaranteed not to execute). - _cancelIdleCheck(); - _backoff.cancel(); - - final int code = grpcError.code; - if (code == StatusCode.ok) { - // If this is an intentional close ensure we don't delay our next - // connection attempt. - _backoff.reset(); - } else if (code == StatusCode.resourceExhausted) { - Log.d('$runtimeType', - '($hashCode) Using maximum backoff delay to prevent overloading the backend.'); - _backoff.resetToMax(); - } else if (code == StatusCode.unauthenticated) { - // 'unauthenticated' error means the token was rejected. Force refreshing - // was done by the FirestoreClient - Log.d('$runtimeType', - '($hashCode) Unauthenticated trying to refresh the token.'); - } - - if (grpcError.code == StatusCode.ok) { - Log.d('$runtimeType', '($hashCode) Closing stream client-side'); - } - - // this are null only when called from tests - - await _requestsController - ?.close() - ?.then((dynamic _) => _requestsController = null); - await _responseSub?.cancel()?.then((dynamic _) => _responseSub = null); - await _writeResponse?.cancel()?.then((_) => _writeResponse = null); - - // This state must be assigned before emitting [CloseEvent] to allow the - // callback to inhibit backoff or otherwise manipulate the state in its - // non-started state. - _changeState(state); - - // Notify the listener that the stream closed. - addEvent(CloseEvent(grpcError)); - } - - void _cancelIdleCheck() { - if (_idleTimer != null) { - _idleTimer.cancel(); - _idleTimer = null; - } - } - - /// Called by the idle timer when the stream should close due to inactivity. - Future _handleIdleCloseTimer() async { - if (isOpen) { - // When timing out an idle stream there's no reason to force the stream - // into backoff when it restarts so set the stream state to initial - // instead of error. - return _close(State.initial, GrpcError.ok()); - } - } - - void _changeState(State state) { - _state = state; - if (state == State.open) { - addEvent(const OpenEvent()); - } - } - - static final Set whiteListedHeaders = { - 'date', - 'x-google-backends', - 'x-google-netmon-label', - 'x-google-service', - 'x-google-gfe-request-trace' - }; -} - -enum State { initial, starting, open, error, backoff, closing } - -abstract class StreamEvent { - const StreamEvent(); -} - -class OpenEvent extends StreamEvent { - const OpenEvent(); -} - -class CloseEvent extends StreamEvent { - const CloseEvent(this.error); - - final GrpcError error; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/channel_options_provider.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/channel_options_provider.dart deleted file mode 100644 index 70798556..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/channel_options_provider.dart +++ /dev/null @@ -1,48 +0,0 @@ -// File created by -// Lung Razvan -// on 01/12/2019 - -import 'dart:io'; - -import 'package:cloud_firestore_vm/src/firebase/firestore/auth/credentials_provider.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; -import 'package:grpc/grpc.dart'; -import 'package:meta/meta.dart'; - -import '../firestore_call_credentials.dart'; - -/// Helper class to provide the headers that gRPC needs -class ChannelOptionsProvider { - const ChannelOptionsProvider({ - @required DatabaseId databaseId, - @required CredentialsProvider credentialsProvider, - }) : assert(databaseId != null), - assert(credentialsProvider != null), - _databaseId = databaseId, - _credentialsProvider = credentialsProvider; - - final DatabaseId _databaseId; - final CredentialsProvider _credentialsProvider; - - CallOptions get callOptions { - return CallOptions( - providers: [ - FirestoreCallCredentials(_credentialsProvider).getRequestMetadata, - (Map map, String url) { - map['x-goog-api-client'] = _xGoogApiClientValue; - map['google-cloud-resource-prefix'] = _resourcePrefix; - } - ], - ); - } - - void invalidateToken() => _credentialsProvider.invalidateToken(); - - // This header is used to improve routing and project isolation by the backend. - String get _resourcePrefix => - 'projects/${_databaseId.projectId}/databases/${_databaseId.databaseId}'; - - static final String _xGoogApiClientValue = - 'gl-dart/${Platform.version} fire/${Version.sdkVersion} grpc/${Version.grpcVersion}'; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/datastore.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/datastore.dart deleted file mode 100644 index 9aeff3ff..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/datastore.dart +++ /dev/null @@ -1,202 +0,0 @@ -// File created by -// Lung Razvan -// on 20/09/2018 - -library datastore; - -import 'dart:async'; -import 'dart:typed_data'; - -import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; -import 'package:async/async.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/auth/credentials_provider.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/database_info.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/channel_options_provider.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/exponential_backoff.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/on_error_resume.dart.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; -import 'package:cloud_firestore_vm/src/proto/index.dart' as proto; -import 'package:grpc/grpc.dart'; -import 'package:meta/meta.dart'; -import 'package:pedantic/pedantic.dart'; -import 'package:protobuf/protobuf.dart'; -import 'package:rxdart/rxdart.dart' hide OnErrorResumeStreamTransformer; - -import 'channel_options_provider.dart'; - -part 'base_stream.dart'; -part 'firestore_client.dart'; -part 'transaction_client.dart'; -part 'watch_stream.dart'; -part 'write_stream.dart'; - -/// Datastore represents a proxy for the remote server, hiding details of the -/// RPC layer. It: -/// * Manages connections to the server -/// * Authenticates to the server -/// * Serializes internal model objects to and from protocol buffers -/// -/// The Datastore is generally not responsible for understanding the -/// higher-level protocol involved in actually making changes or reading data, -/// and is otherwise stateless. -class Datastore { - factory Datastore( - TaskScheduler scheduler, - DatabaseInfo databaseInfo, - CredentialsProvider credentialsProvider, { - ClientChannel clientChannel, - }) { - clientChannel ??= ClientChannel( - databaseInfo.host, - port: databaseInfo.port, - options: ChannelOptions( - credentials: databaseInfo.sslEnabled // - ? const ChannelCredentials.secure() - : const ChannelCredentials.insecure(), - ), - ); - - final ChannelOptionsProvider optionsProvider = ChannelOptionsProvider( - databaseId: databaseInfo.databaseId, - credentialsProvider: credentialsProvider, - ); - final FirestoreClient client = - FirestoreClient(clientChannel, optionsProvider); - final RemoteSerializer serializer = - RemoteSerializer(databaseInfo.databaseId); - clientChannel - .getConnection() // - .then((dynamic connection) => connection.onStateChanged = - (dynamic c) => client.onStateChanged(c.state)); - return Datastore.test(scheduler, databaseInfo, serializer, client); - } - - @visibleForTesting - Datastore.test( - this._scheduler, - this._databaseInfo, - this._serializer, - this._client, - ); - - /// Set of lowercase, white-listed headers for logging purposes. - static final Set whiteListedHeaders = { - 'date', - 'x-google-backends', - 'x-google-netmon-label', - 'x-google-service', - 'x-google-gfe-request-trace' - }; - - final TaskScheduler _scheduler; - final DatabaseInfo _databaseInfo; - final RemoteSerializer _serializer; - final FirestoreClient _client; - - /// Creates a new [WatchStream] that is still unstarted but uses a common - /// shared channel - WatchStream get watchStream { - return WatchStream( - client: _client, - scheduler: _scheduler, - serializer: _serializer, - ); - } - - /// Creates a new [WriteStream] that is still unstarted but uses a common - /// shared channel - WriteStream get writeStream { - return WriteStream( - client: _client, - scheduler: _scheduler, - serializer: _serializer, - ); - } - - /// Creates a new [TransactionClient] that uses a common shared channel - TransactionClient get transactionClient => - TransactionClient(_client, _serializer); - - Future shutdown() { - return _client.shutdown(); - } - - /// Determines whether the given status has an error code that represents a - /// permanent error when received in response to a non-write operation. - /// - /// See [isPermanentWriteError] for classifying write errors. - static bool isPermanentGrpcError(GrpcError status) { - return isPermanentError(FirestoreErrorCode.fromValue(status)); - } - - /// Determines whether the given error code represents a permanent error when - /// received in response to a non-write operation. - /// - /// See [isPermanentWriteError] for classifying write errors. - static bool isPermanentError(FirestoreErrorCode code) { - // See go/firestore-client-errors - switch (code) { - case FirestoreErrorCode.ok: - throw ArgumentError('Treated status OK as error'); - case FirestoreErrorCode.cancelled: - case FirestoreErrorCode.unknown: - case FirestoreErrorCode.deadlineExceeded: - case FirestoreErrorCode.resourceExhausted: - case FirestoreErrorCode.internal: - case FirestoreErrorCode.unavailable: - case FirestoreErrorCode.unauthenticated: - // Unauthenticated means something went wrong with our token and we need - // to retry with new credentials which will happen automatically. - return false; - case FirestoreErrorCode.invalidArgument: - case FirestoreErrorCode.notFound: - case FirestoreErrorCode.alreadyExists: - case FirestoreErrorCode.permissionDenied: - case FirestoreErrorCode.failedPrecondition: - case FirestoreErrorCode.aborted: - // Aborted might be retried in some scenarios, but that is dependant on - // the context and should handled individually by the calling code. - // See https://cloud.google.com/apis/design/errors. - case FirestoreErrorCode.outOfRange: - case FirestoreErrorCode.unimplemented: - case FirestoreErrorCode.dataLoss: - return true; - default: - throw ArgumentError('Unknown gRPC status code: $code'); - } - } - - /// Determines whether the given status has an error code that represents a - /// permanent error when received in response to a write operation. - /// - /// Write operations must be handled specially because as of b/119437764, - /// ABORTED errors on the write stream should be retried too (even though - /// ABORTED errors are not generally retryable). - /// - /// Note that during the initial handshake on the write stream an ABORTED - /// error signals that we should discard our stream token (i.e. it is - /// permanent). This means a handshake error should be classified with - /// [isPermanentError], above. - static bool isPermanentWriteError(GrpcError status) { - return isPermanentGrpcError(status) && status.code != StatusCode.aborted; - } - - @override - String toString() { - return (ToStringHelper(Datastore) - ..add('databaseInfo', _databaseInfo) - ..add('serializer', _serializer) - ..add('client', _client)) - .toString(); - } -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/firestore_client.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/firestore_client.dart deleted file mode 100644 index 548b8b8f..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/firestore_client.dart +++ /dev/null @@ -1,202 +0,0 @@ -// File created by -// Lung Razvan -// on 02/12/2019 - -part of datastore; - -/// Helper class that wraps the gRPC calls and invalidates the auth token when [StatusCode.unauthenticated] is received -/// for both the [Future]s and the [Stream]s. -class FirestoreClient extends Client { - @visibleForTesting - FirestoreClient(ClientChannel channel, this._optionsProvider) - : assert(channel != null), - assert(_optionsProvider != null), - _channel = channel, - _state = BehaviorSubject(), - super(channel, options: _optionsProvider.callOptions); - - final BehaviorSubject _state; - final ChannelOptionsProvider _optionsProvider; - final ClientChannel _channel; - - void onStateChanged(ConnectionState state) { - Log.d('FirestoreClient', state.toString()); - _state.add(state); - } - - /// Gets multiple documents. - /// - /// Documents returned by this method are not guaranteed to be returned in the - /// same order that they were requested. - ResponseStream batchGetDocuments( - proto.BatchGetDocumentsRequest request, { - CallOptions options, - }) { - final ClientCall call = - $createCall(_batchGetDocuments, - Stream.value(request), - options: options); - return ResponseStream( - call, _buildStream(call)); - } - - /// Commits a transaction, while optionally updating documents. - ResponseFuture commit(proto.CommitRequest request, - {CallOptions options}) { - final ClientCall call = - $createCall(_commit, Stream.value(request), - options: options); - return _buildFuture(call); - } - - /// Listens to changes. - Future> listen( - Stream request) async { - final ClientCall call = - $createCall(_listen, request); - - if (_state.value == ConnectionState.ready) { - return ResponseStream(call, _buildStream(call)); - } else { - return _state - .where((ConnectionState state) => state == ConnectionState.ready) - .mapTo(ResponseStream(call, _buildStream(call))) - .first; - } - } - - /// Streams batches of document updates and deletes, in order. - Future> write( - Stream request) async { - final ClientCall call = - $createCall(_write, request); - - if (_state.value == ConnectionState.ready) { - return ResponseStream(call, _buildStream(call)); - } else { - return _state - .where((ConnectionState state) => state == ConnectionState.ready) - .mapTo(ResponseStream(call, _buildStream(call))) - .first; - } - } - - /// Terminates this connection. - /// - /// All open calls are terminated immediately, and no further calls may be made on this connection. - Future shutdown() => _channel.terminate(); - - Stream _buildStream(ClientCall call) { - return call.response.transform(DoStreamTransformer( - onError: (dynamic error, [StackTrace stackTrace]) { - if (error is GrpcError && error.code == StatusCode.unauthenticated) { - Log.d('FirestoreClient', - 'Received status ${error.code}. Invalidating the token.'); - _optionsProvider.invalidateToken(); - } - }, - )) - // TODO(long1eu): OnErrorResumeStreamTransformer doesn't provide the stacktrace - .transform(OnErrorResumeStreamTransformer( - (dynamic error, [StackTrace stackTrace]) => Stream.error( - error is GrpcError - ? FirestoreError( - error.message, FirestoreErrorCode.values[error.code]) - : FirestoreError( - error.toString(), FirestoreErrorCode.unknown), - stackTrace))); - } - - Future _buildFuture(ClientCall call) { - final Future future = _buildStream(call) // - .fold(null, _ensureOnlyOneResponse) - .then(_ensureOneResponse); - - return ResponseFuture(call, future); - } - - static R _ensureOnlyOneResponse(R previous, R element) { - if (previous != null) { - throw GrpcError.unimplemented('More than one response received'); - } - return element; - } - - static R _ensureOneResponse(R value) { - if (value == null) { - throw GrpcError.unimplemented('No responses received'); - } - return value; - } - - static final ClientMethod _batchGetDocuments = - ClientMethod( - '/google.firestore.v1.Firestore/BatchGetDocuments', - (proto.BatchGetDocumentsRequest value) => value.writeToBuffer(), - (List value) => proto.BatchGetDocumentsResponse.fromBuffer(value), - ); - - static final ClientMethod _commit = - ClientMethod( - '/google.firestore.v1.Firestore/Commit', - (proto.CommitRequest value) => value.writeToBuffer(), - (List value) => proto.CommitResponse.fromBuffer(value), - ); - - static final ClientMethod _listen = - ClientMethod( - '/google.firestore.v1.Firestore/Listen', - (proto.ListenRequest value) => value.writeToBuffer(), - (List value) => proto.ListenResponse.fromBuffer(value), - ); - - static final ClientMethod _write = - ClientMethod( - '/google.firestore.v1.Firestore/Write', - (proto.WriteRequest value) => value.writeToBuffer(), - (List value) => proto.WriteResponse.fromBuffer(value), - ); -} - -/// A gRPC response producing a stream of values. -class ResponseStream extends DelegatingStream - with _ResponseMixin { - ResponseStream(this._call, Stream response) : super(response); - - @override - final ClientCall _call; - - @override - Future> get headers => _call.headers; - - @override - Future> get trailers => _call.trailers; - - @override - Future cancel() => _call.cancel(); -} - -/// A gRPC response producing a single value. -class ResponseFuture extends DelegatingFuture - with _ResponseMixin { - ResponseFuture(this._call, Future future) : super(future); - - @override - final ClientCall _call; -} - -mixin _ResponseMixin implements Response { - ClientCall get _call; - - @override - Future> get headers => _call.headers; - - @override - Future> get trailers => _call.trailers; - - @override - Future cancel() => _call.cancel(); -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/transaction_client.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/transaction_client.dart deleted file mode 100644 index df6507c5..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/transaction_client.dart +++ /dev/null @@ -1,42 +0,0 @@ -// File created by -// Lung Razvan -// on 01/12/2019 - -part of datastore; - -/// Wrapper class for transaction specific gRPC methods -class TransactionClient { - const TransactionClient(this._client, this._serializer); - - final FirestoreClient _client; - final RemoteSerializer _serializer; - - String get _databaseName => _serializer.databaseName; - - Future> commit(List mutations) async { - final proto.CommitRequest builder = proto.CommitRequest() // - ..database = _databaseName - ..writes.addAll(mutations.map(_serializer.encodeMutation)); - - final proto.CommitResponse response = await _client.commit(builder); - final SnapshotVersion commitVersion = - _serializer.decodeVersion(response.commitTime); - return response.writeResults - .map((proto.WriteResult result) => - _serializer.decodeMutationResult(result, commitVersion)) - .toList(); - } - - Future> lookup(List keys) async { - final proto.BatchGetDocumentsRequest builder = - proto.BatchGetDocumentsRequest() // - ..database = _databaseName - ..documents.addAll(keys.map(_serializer.encodeKey)); - - return _client - .batchGetDocuments(builder) - .map(_serializer.decodeMaybeDocument) - .where((MaybeDocument doc) => keys.contains(doc.key)) - .toList(); - } -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/watch_stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/watch_stream.dart deleted file mode 100644 index a7817ccb..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/watch_stream.dart +++ /dev/null @@ -1,92 +0,0 @@ -// File created by -// Lung Razvan -// on 02/12/2019 - -part of datastore; - -class WatchStream - extends BaseStream { - factory WatchStream({ - @required FirestoreClient client, - @required TaskScheduler scheduler, - @required RemoteSerializer serializer, - }) { - // ignore: close_sinks - final StreamController controller = - StreamController.broadcast(); - return WatchStream.test(client, scheduler, serializer, controller); - } - - @visibleForTesting - WatchStream.test( - FirestoreClient client, - TaskScheduler scheduler, - RemoteSerializer serializer, - StreamController eventsController, - ) : assert(client != null), - assert(serializer != null), - _client = client, - _serializer = serializer, - super( - eventsController, - scheduler, - TaskId.listenStreamIdle, - TaskId.listenStreamConnectionBackoff, - ); - - final FirestoreClient _client; - final RemoteSerializer _serializer; - - @override - Future> _buildCall( - Stream requests) async { - return _client.listen(requests); - } - - /// Registers interest in the results of the given query. - /// - /// If the query includes a [resumeToken] it will be included in the request. - /// Results that affect the query will be streamed back as [WatchChange] - /// messages that reference the [targetId] included in query. - void watchQuery(QueryData queryData) { - hardAssert(isOpen, 'Watching queries requires an open stream'); - - final proto.ListenRequest request = proto.ListenRequest.create() - ..database = _serializer.databaseName - ..addTarget = _serializer.encodeTarget(queryData) - ..labels.addAll(_serializer.encodeListenRequestLabels(queryData)); - writeRequest(request); - } - - /// Unregisters interest in the results of the query associated with the given - /// target id. - void unwatchTarget(int targetId) { - hardAssert(isOpen, 'Unwatching targets requires an open stream'); - - final proto.ListenRequest request = proto.ListenRequest.create() - ..database = _serializer.databaseName - ..removeTarget = targetId - ..freeze(); - writeRequest(request); - } - - @override - void _onData(proto.ListenResponse response) { - super._onData(response); - // A successful response means the stream is healthy - _backoff.reset(); - - final WatchChange watchChange = _serializer.decodeWatchChange(response); - final SnapshotVersion snapshotVersion = - _serializer.decodeVersionFromListenResponse(response); - - addEvent(OnWatchChange(snapshotVersion, watchChange)); - } -} - -class OnWatchChange extends StreamEvent { - const OnWatchChange(this.snapshotVersion, this.watchChange); - - final SnapshotVersion snapshotVersion; - final WatchChange watchChange; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/write_stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/write_stream.dart deleted file mode 100644 index 5cfdd177..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/datastore/write_stream.dart +++ /dev/null @@ -1,143 +0,0 @@ -// File created by -// Lung Razvan -// on 02/12/2019 - -part of datastore; - -class WriteStream extends BaseStream { - factory WriteStream({ - @required FirestoreClient client, - @required TaskScheduler scheduler, - @required RemoteSerializer serializer, - }) { - // ignore: close_sinks - final StreamController controller = - StreamController.broadcast(); - return WriteStream.test(client, scheduler, serializer, controller); - } - - @visibleForTesting - WriteStream.test( - FirestoreClient client, - TaskScheduler scheduler, - RemoteSerializer serializer, - StreamController eventsController, - ) : assert(client != null), - assert(serializer != null), - _client = client, - _serializer = serializer, - super( - eventsController, - scheduler, - TaskId.writeStreamIdle, - TaskId.writeStreamConnectionBackoff, - ); - - final FirestoreClient _client; - final RemoteSerializer _serializer; - - /// Last received stream token from the server. - /// - /// Used to acknowledge which responses the client has processed. Stream - /// tokens are opaque checkpoint markers whose only real value is their - /// inclusion in the next request. [BaseStream] implementations manage - /// propagating of this value from responses to the next request. - /// - /// NOTE: A null streamToken is not allowed: use the empty array for the unset - /// value. - Uint8List lastStreamToken = emptyStreamToken; - - /// Tracks whether or not a handshake has been successfully exchanged and the - /// stream is ready to accept mutations. - bool handshakeComplete = false; - - @override - Future> _buildCall( - Stream requests) { - return _client.write(requests); - } - - @override - Future start() { - handshakeComplete = false; - return super.start(); - } - - @override - void _onData(proto.WriteResponse response) { - super._onData(response); - if (_state != State.closing) { - lastStreamToken = Uint8List.fromList(response.streamToken); - - if (!handshakeComplete) { - // The first response is the handshake response - handshakeComplete = true; - - addEvent(const HandshakeCompleteEvent()); - } else { - // A successful first write response means the stream is healthy. - // - // Note, that we could consider a successful handshake healthy, however, - // the write itself might be causing an error we want to back off from. - _backoff.reset(); - - final SnapshotVersion commitVersion = - _serializer.decodeVersion(response.commitTime); - final List results = response.writeResults - .map((proto.WriteResult proto) => - _serializer.decodeMutationResult(proto, commitVersion)) - .toList(); - - addEvent(OnWriteResponse(commitVersion, results)); - } - } - } - - @override - void tearDown() { - if (handshakeComplete) { - // Send an empty write request to the backend to indicate imminent stream - // closure. This allows the backend to clean up resources. - writeMutations([]); - } - } - - /// Sends an initial streamToken to the server, performing the handshake - /// required. Subsequent [writeMutations] calls should wait until - /// [HandshakeCompleteEvent] is emitted. - void writeHandshake() { - assert(isOpen, 'Writing handshake requires an opened stream'); - assert(!handshakeComplete, 'Handshake already completed'); - - // TODO(long1eu): Support stream resumption. We intentionally do not set the - // stream token on the handshake, ignoring any stream token we might have. - writeRequest(proto.WriteRequest()..database = _serializer.databaseName); - } - - /// Sends a list of mutations to the Firestore backend to apply - void writeMutations(List mutations) { - assert(isOpen, 'Writing mutations requires an opened stream'); - assert(handshakeComplete, - 'Handshake must be complete before writing mutations'); - - final proto.WriteRequest request = proto.WriteRequest.create() - ..streamToken = lastStreamToken - ..writes.addAll(mutations.map(_serializer.encodeMutation)) - ..freeze(); - writeRequest(request); - } - - /// The empty stream token. - static final Uint8List emptyStreamToken = Uint8List.fromList([0]); -} - -class HandshakeCompleteEvent extends StreamEvent { - const HandshakeCompleteEvent(); -} - -class OnWriteResponse extends StreamEvent { - const OnWriteResponse(this.version, this.results); - - final SnapshotVersion version; - final List results; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firebase_client_grpc_metadata_provider.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firebase_client_grpc_metadata_provider.dart new file mode 100644 index 00000000..44fd4ad2 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firebase_client_grpc_metadata_provider.dart @@ -0,0 +1,56 @@ +// File created by +// Lung Razvan +// on 24/01/2021 + +import 'package:firebase_core_vm/firebase_core_vm.dart'; +import 'package:meta/meta.dart'; + +typedef GrpcMetadataProvider = Future Function(Map metadata); + +/// Provides an implementation of the GrpcMetadataProvider interface. +/// +/// This updates the metadata with platformInfo string and the heartBeatInfo code. +class FirebaseClientGrpcMetadataProvider { + FirebaseClientGrpcMetadataProvider({ + @required HeartBeatInfo heartBeatInfo, + @required FirebaseOptions firebaseOptions, + UserAgentPublisher userAgentPublisher, + }) : _heartBeatInfo = heartBeatInfo, + _firebaseOptions = firebaseOptions, + _userAgentPublisher = userAgentPublisher ?? UserAgentPublisher.instance; + + final HeartBeatInfo _heartBeatInfo; + final UserAgentPublisher _userAgentPublisher; + final FirebaseOptions _firebaseOptions; + + static const String _kHeartBeatKey = 'fire-fst'; + static const String _kHeartBeatHeader = 'x-firebase-client-log-type'; + static const String _kUserAgentHeader = 'x-firebase-client'; + static const String _kGmpAppIdHeader = 'x-firebase-gmpid'; + + Future call(Map metadata) async { + if (_heartBeatInfo == null || _userAgentPublisher == null) { + return; + } + + final int heartBeatCode = _heartBeatInfo.getHeartBeatCode(_kHeartBeatKey).code; + // Non-zero values indicate some kind of heartbeat should be sent. + if (heartBeatCode != 0) { + metadata[_kHeartBeatHeader] = '$heartBeatCode'; + } + + metadata[_kUserAgentHeader] = _userAgentPublisher.userAgent; + _maybeAddGmpAppId(metadata); + } + + void _maybeAddGmpAppId(Map metadata) { + if (_firebaseOptions == null) { + return; + } + + final String gmpAppId = _firebaseOptions.appId; + if (gmpAppId.isNotEmpty) { + metadata[_kGmpAppIdHeader] = gmpAppId; + } + } +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_call_credentials.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_call_credentials.dart index 79ec54f0..219c1b84 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_call_credentials.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_call_credentials.dart @@ -17,8 +17,7 @@ class FirestoreCallCredentials { final CredentialsProvider credentialsProvider; - Future getRequestMetadata( - Map metadata, String uri) async { + Future getRequestMetadata(Map metadata, String uri) async { try { final String token = await credentialsProvider.token; if (token != null && token.isNotEmpty) { diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_channel.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_channel.dart new file mode 100644 index 00000000..0ce77497 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/firestore_channel.dart @@ -0,0 +1,194 @@ +// File created by +// Lung Razvan +// on 24/09/2018 + +import 'dart:async'; +import 'dart:io'; + +import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/auth/credentials_provider.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/version.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firebase_client_grpc_metadata_provider.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firestore_call_credentials.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/incoming_stream_observer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; +import 'package:firebase_core_vm/firebase_core_vm.dart'; +import 'package:grpc/grpc.dart'; +import 'package:meta/meta.dart'; + +/// Wrapper class around io.grpc.Channel that adds headers, exception handling and simplifies invoking RPCs. +class FirestoreChannel { + factory FirestoreChannel({ + @required AsyncQueue asyncQueue, + @required CredentialsProvider credentialsProvider, + @required ClientChannel channel, + @required DatabaseId databaseId, + @required GrpcMetadataProvider metadataProvider, + }) { + final CallOptions options = CallOptions( + providers: [ + FirestoreCallCredentials(credentialsProvider).getRequestMetadata, + (Map map, String url) => metadataProvider?.call(map), + (Map map, String url) { + map.addAll({ + _xGoogApiClientHeader: _xGoogApiClientValue, + // This header is used to improve routing and project isolation by the backend. + _resourcePrefixHeader: 'projects/${databaseId.projectId}/databases/${databaseId.databaseId}', + }); + } + ], + ); + + return FirestoreChannel._(asyncQueue, credentialsProvider, channel, options); + } + + FirestoreChannel._(this.asyncQueue, this._credentialsProvider, this._channel, this._callOptions); + + static const String _xGoogApiClientHeader = 'x-goog-api-client'; + + static const String _resourcePrefixHeader = 'google-cloud-resource-prefix'; + + static final String _xGoogApiClientValue = + 'gl-dart/${kIsWeb ? 'js' : Platform.version} fire/${Version.sdkVersion} grpc/${Version.grpcVersion}'; + + /// The async worker queue that is used to dispatch events. + final AsyncQueue asyncQueue; + + final CredentialsProvider _credentialsProvider; + + /// The underlying gRPC channel. + final ClientChannel _channel; + + /// Call options to be used when invoking RPCs. + final CallOptions _callOptions; + + /// Shuts down the grpc channel. This is not reversible and renders the FirestoreChannel unusable. + Future shutdown() async { + try { + await _channel.shutdown().timeout(const Duration(seconds: 1)); + } on TimeoutException catch (_) { + Log.d('FirestoreChannel', + 'Unable to gracefully shutdown the gRPC ManagedChannel. Will attempt an immediate shutdown.'); + try { + await _channel.terminate().timeout(const Duration(minutes: 1)); + } on TimeoutException catch (_) { + // Something bad has happened. We could assert, but this is just resource cleanup for a resource that is likely + // only released at the end of the execution. So instead, we'll just log the error. + Log.w('FirestoreChannel', 'Unable to forcefully shutdown the gRPC ManagedChannel.'); + } + } catch (e) { + // (Re-)Cancel if current thread also interrupted + await _channel.terminate(); + // Similar to above, something bad happened, but it's not worth asserting. Just log it. + Log.w('FirestoreChannel', 'Interrupted while shutting down the gRPC Managed Channel'); + } + } + + /// Creates and starts a new bi-directional streaming RPC. + BidiChannel runBidiStreamingRpc( + ClientMethod method, IncomingStreamObserver observer) { + // ignore: close_sinks + final StreamController controller = StreamController(); + + final ClientCall call = _channel.createCall(method, controller.stream, _callOptions) + ..headers.then((Map headers) { + _catchError(() => observer.onHeaders(headers)); + }) + ..response.listen( + (RespT data) => _catchError(() => observer.onNext(data)), + onDone: () => _catchError(() => observer.onClose(GrpcError.ok())), + onError: (dynamic e, StackTrace s) { + return _catchError(() => observer.onClose(e)); + }, + ); + + observer.onOpen(); + + return BidiChannel(controller, call); + } + + /// Creates and starts a streaming response RPC. + Future> runStreamingResponseRpc(ClientMethod method, ReqT request) async { + final Completer> completer = Completer>(); + final StreamController controller = StreamController(); + final ClientCall call = _channel.createCall(method, controller.stream, _callOptions); + + bool hadError = false; + final List results = []; + call.response.listen( + results.add, + onDone: () { + assert((hadError && completer.isCompleted) || !hadError && !completer.isCompleted); + if (!completer.isCompleted) { + completer.complete(results); + } + controller.close(); + }, + onError: (dynamic status) { + hadError = true; + controller.close(); + completer.completeError(exceptionFromStatus(status)); + }, + ); + + controller.add(request); + await controller.close(); + + return completer.future; + } + + /// Creates and starts a single response RPC. + Future runRpc(ClientMethod method, ReqT request) async { + final Completer completer = Completer(); + final StreamController controller = StreamController(); + final ClientCall call = _channel.createCall(method, controller.stream, _callOptions); + + call.response.listen( + (RespT message) { + completer.complete(message); + controller.close(); + }, + onDone: () { + if (!completer.isCompleted) { + completer.completeError( + FirestoreError('Received onClose with status OK, but no message.', FirestoreErrorCode.internal)); + } + }, + onError: (dynamic status) { + controller.close(); + completer.completeError(exceptionFromStatus(status)); + }, + ); + + controller.add(request); + return completer.future; + } + + void invalidateToken() => _credentialsProvider.invalidateToken(); + + static void _catchError(Function function) { + try { + function(); + } catch (t) { + AsyncQueue.panic(t); + } + } +} + +class BidiChannel { + const BidiChannel(this._sink, this._call); + + final Sink _sink; + final ClientCall _call; + + void add(ReqT data) => _sink.add(data); + + void listen(void Function(RespT event) onData, {Function onError, void Function() onDone, bool cancelOnError}) { + _call.response.listen(onData, onError: onError, onDone: onDone, cancelOnError: cancelOnError); + } + + void cancel() => _sink.close(); +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/incoming_stream_observer.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/incoming_stream_observer.dart similarity index 86% rename from cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/incoming_stream_observer.dart rename to cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/incoming_stream_observer.dart index aa10c2cf..514070cf 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/incoming_stream_observer.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/incoming_stream_observer.dart @@ -14,8 +14,8 @@ abstract class IncomingStreamObserver { /// A message was received on the stream. Future onNext(RespT response); - /// The stream is 'ready' (What the hell does that mean?!). - void onReady(); + /// The stream is open and able to accept messages. + void onOpen(); /// The stream has closed. Status.isOk() is false if there an error occurred. void onClose(GrpcError status); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/online_state_tracker.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/online_state_tracker.dart index f4e338e8..7a9a3c4a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/online_state_tracker.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/online_state_tracker.dart @@ -8,7 +8,7 @@ import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/online_state.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:grpc/grpc.dart'; /// Called whenever the online state of the client changes. This is based on the watch stream for @@ -39,7 +39,7 @@ class OnlineStateTracker { /// A timer that elapses after [_onlineStateTimeoutMs], at which point we transition from /// [OnlineState.unknown] to [OnlineState.offline] without waiting for the stream to actually fail /// ([_maxWatchStreamFailures] times). - TimerTask _onlineStateTimer; + DelayedTask _onlineStateTimer; /// Whether the client should log a warning message if it fails to connect to the backend /// (initially true, cleared after a successful stream, or if we've logged the message already). @@ -47,7 +47,7 @@ class OnlineStateTracker { /// The callback to notify on OnlineState changes. final OnlineStateCallback _onlineStateCallback; - final TaskScheduler _scheduler; + final AsyncQueue _scheduler; /// Called by [RemoteStore] when a watch stream is started (including on each /// backoff attempt). @@ -58,16 +58,15 @@ class OnlineStateTracker { if (_watchStreamFailures == 0) { await _setAndBroadcastState(OnlineState.unknown); - hardAssert(_onlineStateTimer == null, - 'onlineStateTimer shouldn\'t be started yet'); + hardAssert(_onlineStateTimer == null, 'onlineStateTimer shouldn\'t be started yet'); - _onlineStateTimer = _scheduler.add( - TaskId.onlineStateTimeout, + _onlineStateTimer = _scheduler.enqueueAfterDelay( + TimerId.onlineStateTimeout, const Duration(milliseconds: _onlineStateTimeoutMs), () async { _onlineStateTimer = null; - hardAssert(_state == OnlineState.unknown, - 'Timer should be canceled if we transitioned to a different state.'); + hardAssert( + _state == OnlineState.unknown, 'Timer should be canceled if we transitioned to a different state.'); _logClientOfflineWarningIfNecessary( 'Backend didn\'t respond within ${_onlineStateTimeoutMs / 1000} seconds\n'); await _setAndBroadcastState(OnlineState.offline); @@ -129,8 +128,7 @@ class OnlineStateTracker { } void _logClientOfflineWarningIfNecessary(String reason) { - final String message = - 'Could not reach Cloud Firestore backend. $reason\nThis typically ' + final String message = 'Could not reach Cloud Firestore backend. $reason\nThis typically ' 'indicates that your device does not have a healthy Internet connection at the moment. The ' 'client will operate in offline mode until it is able to successfully connect to the ' 'backend.'; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_serializer.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_serializer.dart index 10359ffd..ad22d0d3 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_serializer.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_serializer.dart @@ -4,13 +4,12 @@ import 'dart:typed_data'; -import 'package:cloud_firestore_vm/src/firebase/firestore/blob.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/bound.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/filter/filter.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/order_by.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/geo_point.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; @@ -28,18 +27,18 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/patch_m import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/precondition.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/server_timestamp_operation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/set_mutation.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/transform_operation.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/verify_mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/no_document.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/object_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/model/value/field_value.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/values.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/existence_filter.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; -import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' - as proto_v1; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart' as proto_v1; import 'package:cloud_firestore_vm/src/proto/index.dart' as proto; import 'package:fixnum/fixnum.dart'; import 'package:grpc/grpc.dart'; @@ -47,8 +46,7 @@ import 'package:meta/meta.dart'; /// Serializer that converts to and from Firestore API protos. class RemoteSerializer { - RemoteSerializer(this.databaseId) - : databaseName = _encodedDatabaseId(databaseId).canonicalString; + RemoteSerializer(this.databaseId) : databaseName = _encodedDatabaseId(databaseId).canonicalString; final DatabaseId databaseId; final String databaseName; @@ -76,18 +74,6 @@ class RemoteSerializer { } } - // GeoPoint - - proto.LatLng _encodeGeoPoint(GeoPoint geoPoint) { - return proto.LatLng.create() - ..latitude = geoPoint.latitude - ..longitude = geoPoint.longitude; - } - - GeoPoint _decodeGeoPoint(proto.LatLng latLng) { - return GeoPoint(latLng.latitude, latLng.longitude); - } - // Names and Keys /// Encodes the given document [key] as a fully qualified name. This includes the [databaseId] from the constructor @@ -98,10 +84,8 @@ class RemoteSerializer { DocumentKey decodeKey(String name) { final ResourcePath resource = _decodeResourceName(name); - hardAssert(resource[1] == databaseId.projectId, - 'Tried to deserialize key from different project.'); - hardAssert(resource[3] == databaseId.databaseId, - 'Tried to deserialize key from different database.'); + hardAssert(resource[1] == databaseId.projectId, 'Tried to deserialize key from different project.'); + hardAssert(resource[3] == databaseId.databaseId, 'Tried to deserialize key from different database.'); return DocumentKey.fromPath(_extractLocalPathFromResourceName(resource)); } @@ -124,37 +108,27 @@ class RemoteSerializer { /// Encodes a [databaseId] and resource path into the following form: /// '/projects/$projectId/database/$databaseId/documents/$path' String _encodeResourceName(DatabaseId databaseId, ResourcePath path) { - return _encodedDatabaseId(databaseId) - .appendSegment('documents') - .appendField(path) - .canonicalString; + return _encodedDatabaseId(databaseId).appendSegment('documents').appendField(path).canonicalString; } /// Decodes a fully qualified resource name into a resource path and validates that there is a project and database /// encoded in the path. There are no guarantees that a local path is also encoded in this resource name. ResourcePath _decodeResourceName(String encoded) { final ResourcePath resource = ResourcePath.fromString(encoded); - hardAssert(_isValidResourceName(resource), - 'Tried to deserialize invalid key $resource'); + hardAssert(_isValidResourceName(resource), 'Tried to deserialize invalid key $resource'); return resource; } /// Creates the prefix for a fully qualified resource path, without a local path on the end. static ResourcePath _encodedDatabaseId(DatabaseId databaseId) { - return ResourcePath.fromSegments([ - 'projects', - databaseId.projectId, - 'databases', - databaseId.databaseId - ]); + return ResourcePath.fromSegments(['projects', databaseId.projectId, 'databases', databaseId.databaseId]); } /// Decodes a fully qualified resource name into a resource path and validates that there is a project and database /// encoded in the path along with a local path. - static ResourcePath _extractLocalPathFromResourceName( - ResourcePath resourceName) { - hardAssert(resourceName.length > 4 && resourceName[4] == 'documents', - 'Tried to deserialize invalid key $resourceName'); + static ResourcePath _extractLocalPathFromResourceName(ResourcePath resourceName) { + hardAssert( + resourceName.length > 4 && resourceName[4] == 'documents', 'Tried to deserialize invalid key $resourceName'); return resourceName.popFirst(5); } @@ -165,152 +139,13 @@ class RemoteSerializer { return path.length >= 4 && path[0] == 'projects' && path[2] == 'databases'; } - // Values - - /// Converts the [FieldValue] model passed into the [Value] proto equivalent. - proto_v1.Value encodeValue(FieldValue value) { - final proto_v1.Value builder = proto_v1.Value.create(); - - if (value is NullValue) { - return builder // - ..nullValue = proto.NullValue.NULL_VALUE; - } - - hardAssert(value.value != null, 'Encoded field value should not be null.'); - - if (value is BoolValue) { - builder.booleanValue = value.value; - } else if (value is IntegerValue) { - builder.integerValue = Int64(value.value); - } else if (value is DoubleValue) { - builder.doubleValue = value.value; - } else if (value is StringValue) { - builder.stringValue = value.value; - } else if (value is ArrayValue) { - builder.arrayValue = _encodeArrayValue(value); - } else if (value is ObjectValue) { - builder.mapValue = _encodeMapValue(value); - } else if (value is TimestampValue) { - builder.timestampValue = encodeTimestamp(value.value); - } else if (value is GeoPointValue) { - builder.geoPointValue = _encodeGeoPoint(value.value); - } else if (value is BlobValue) { - builder.bytesValue = value.value.bytes; - } else if (value is ReferenceValue) { - final DatabaseId id = value.databaseId; - final DocumentKey key = value.value; - builder.referenceValue = _encodeResourceName(id, key.path); - } else { - throw fail('Can\'t serialize $value'); - } - - return builder..freeze(); - } - - /// Converts from the proto [Value] format to the model [FieldValue] format - FieldValue decodeValue(proto_v1.Value proto) { - switch (proto.whichValueType()) { - case proto_v1.Value_ValueType.booleanValue: - return BoolValue.valueOf(proto.booleanValue); - case proto_v1.Value_ValueType.integerValue: - return IntegerValue.valueOf(proto.integerValue.toInt()); - case proto_v1.Value_ValueType.doubleValue: - return DoubleValue.valueOf(proto.doubleValue); - case proto_v1.Value_ValueType.referenceValue: - final ResourcePath resourceName = - _decodeResourceName(proto.referenceValue); - final DatabaseId id = - DatabaseId.forDatabase(resourceName[1], resourceName[3]); - final DocumentKey key = DocumentKey.fromPath( - _extractLocalPathFromResourceName(resourceName)); - return ReferenceValue.valueOf(id, key); - case proto_v1.Value_ValueType.mapValue: - return _decodeMapValue(proto.mapValue); - case proto_v1.Value_ValueType.geoPointValue: - final dynamic /*proto.LatLng*/ latLng = proto.geoPointValue; - return GeoPointValue.valueOf(_decodeGeoPoint(latLng)); - case proto_v1.Value_ValueType.arrayValue: - return _decodeArrayValue(proto.arrayValue); - case proto_v1.Value_ValueType.timestampValue: - final Timestamp timestamp = decodeTimestamp(proto.timestampValue); - return TimestampValue.valueOf(timestamp); - case proto_v1.Value_ValueType.nullValue: - return NullValue.nullValue(); - - case proto_v1.Value_ValueType.stringValue: - return StringValue.valueOf(proto.stringValue); - case proto_v1.Value_ValueType.bytesValue: - final Blob bytes = Blob(Uint8List.fromList(proto.bytesValue)); - return BlobValue.valueOf(bytes); - default: - throw fail('Unknown value $proto'); - } - } - - proto.ArrayValue _encodeArrayValue(ArrayValue value) { - final List internalValue = value.internalValue; - - final proto.ArrayValue arrayBuilder = proto.ArrayValue.create(); - for (FieldValue subValue in internalValue) { - arrayBuilder.values.add(encodeValue(subValue)); - } - - return arrayBuilder..freeze(); - } - - ArrayValue _decodeArrayValue(proto.ArrayValue protoArray) { - final int count = protoArray.values.length; - final List wrappedList = List(count); - for (int i = 0; i < count; i++) { - wrappedList[i] = decodeValue(protoArray.values[i]); - } - return ArrayValue.fromList(wrappedList); - } - - proto.MapValue _encodeMapValue(ObjectValue value) { - final proto.MapValue builder = proto.MapValue.create(); - for (MapEntry entry in value.internalValue) { - builder.fields[entry.key] = encodeValue(entry.value); - } - return builder..freeze(); - } - - ObjectValue _decodeMapValue(proto.MapValue value) { - return decodeMapFields(value.fields); - } - - // PORTING NOTE: There's no encodeFields here because there's no way to write it that doesn't - // involve creating a temporary map. - ObjectValue decodeMapFields(Map fields) { - ObjectValue result = ObjectValue.empty; - for (String key in fields.keys) { - final FieldPath path = FieldPath.fromSingleSegment(key); - final FieldValue value = decodeValue(fields[key]); - result = result.set(path, value); - } - return result; - } - - ObjectValue decodeDocumentFields(Map fields) { - ObjectValue result = ObjectValue.empty; - for (String key in fields.keys) { - final FieldPath path = FieldPath.fromSingleSegment(key); - final FieldValue value = decodeValue(fields[key]); - result = result.set(path, value); - } - return result; - } - // Documents proto.Document encodeDocument(DocumentKey key, ObjectValue value) { - final proto.Document builder = proto.Document.create() - ..name = encodeKey(key); - - for (MapEntry entry in value.internalValue) { - builder.fields[entry.key] = encodeValue(entry.value); - } - return builder..freeze(); + return proto.Document.create() + ..name = encodeKey(key) + ..fields.addAll(value.fields) + ..freeze(); } MaybeDocument decodeMaybeDocument(proto.BatchGetDocumentsResponse response) { @@ -324,23 +159,19 @@ class RemoteSerializer { } Document _decodeFoundDocument(proto.BatchGetDocumentsResponse response) { - hardAssert(response.hasFound(), - 'Tried to deserialize a found document from a missing document.'); + hardAssert(response.hasFound(), 'Tried to deserialize a found document from a missing document.'); final DocumentKey key = decodeKey(response.found.name); + final ObjectValue value = ObjectValue.fromMap(response.found.fields); final SnapshotVersion version = decodeVersion(response.found.updateTime); - hardAssert(version != SnapshotVersion.none, - 'Got a document response with no snapshot version'); - return Document.fromProto( - key, version, DocumentState.synced, response.found, decodeValue); + hardAssert(version != SnapshotVersion.none, 'Got a document response with no snapshot version'); + return Document(key, version, value, DocumentState.synced); } NoDocument _decodeMissingDocument(proto.BatchGetDocumentsResponse response) { - hardAssert(response.hasMissing(), - 'Tried to deserialize a missing document from a found document.'); + hardAssert(response.hasMissing(), 'Tried to deserialize a missing document from a found document.'); final DocumentKey key = decodeKey(response.missing); final SnapshotVersion version = decodeVersion(response.readTime); - hardAssert(version != SnapshotVersion.none, - 'Got a no document response with no snapshot version'); + hardAssert(version != SnapshotVersion.none, 'Got a no document response with no snapshot version'); return NoDocument(key, version, hasCommittedMutations: false); } @@ -355,22 +186,16 @@ class RemoteSerializer { builder ..update = encodeDocument(mutation.key, mutation.value) ..updateMask = _encodeDocumentMask(mutation.mask); - } else if (mutation is TransformMutation) { - final proto.DocumentTransform transformBuilder = - proto.DocumentTransform.create()..document = encodeKey(mutation.key); - - for (FieldTransform fieldTransform in mutation.fieldTransforms) { - transformBuilder.fieldTransforms - .add(_encodeFieldTransform(fieldTransform)); - } - - builder.transform = transformBuilder; } else if (mutation is DeleteMutation) { builder.delete = encodeKey(mutation.key); + } else if (mutation is VerifyMutation) { + builder.verify = encodeKey(mutation.key); } else { throw fail('unknown mutation type ${mutation.runtimeType}'); } + builder.updateTransforms.addAll(mutation.fieldTransforms.map(_encodeFieldTransform)); + if (!mutation.precondition.isNone) { builder.currentDocument = _encodePrecondition(mutation.precondition); } @@ -378,36 +203,36 @@ class RemoteSerializer { } Mutation decodeMutation(proto.Write mutation) { - final Precondition precondition = mutation.hasCurrentDocument() - ? _decodePrecondition(mutation.currentDocument) - : Precondition.none; + final Precondition precondition = + mutation.hasCurrentDocument() ? _decodePrecondition(mutation.currentDocument) : Precondition.none; + + final List fieldTransforms = mutation.updateTransforms.map(_decodeFieldTransform).toList(); - if (mutation.hasUpdate()) { - if (mutation.hasUpdateMask()) { - return PatchMutation( + switch (mutation.whichOperation()) { + case proto.Write_Operation.update: + if (mutation.hasUpdateMask()) { + return PatchMutation( decodeKey(mutation.update.name), - decodeDocumentFields(mutation.update.fields), + ObjectValue.fromMap(mutation.update.fields), _decodeDocumentMask(mutation.updateMask), - precondition); - } else { - return SetMutation(decodeKey(mutation.update.name), - decodeDocumentFields(mutation.update.fields), precondition); - } - } else if (mutation.hasDelete()) { - return DeleteMutation(decodeKey(mutation.delete), precondition); - } else if (mutation.hasTransform()) { - final List fieldTransforms = []; - for (proto.DocumentTransform_FieldTransform fieldTransform - in mutation.transform.fieldTransforms) { - fieldTransforms.add(_decodeFieldTransform(fieldTransform)); - } - final bool exists = precondition.exists; - hardAssert(exists != null && exists, - 'Transforms only support precondition \'exists == true\''); - return TransformMutation( - decodeKey(mutation.transform.document), fieldTransforms); - } else { - throw fail('Unknown mutation operation: $mutation'); + precondition, + fieldTransforms, + ); + } else { + return SetMutation( + decodeKey(mutation.update.name), + ObjectValue.fromMap(mutation.update.fields), + precondition, + fieldTransforms, + ); + } + break; + case proto.Write_Operation.delete: + return DeleteMutation(decodeKey(mutation.delete), precondition); + case proto.Write_Operation.verify: + return VerifyMutation(decodeKey(mutation.verify), precondition); + default: + throw fail('Unknown mutation operation: $mutation'); } } @@ -446,118 +271,89 @@ class RemoteSerializer { } FieldMask _decodeDocumentMask(proto.DocumentMask mask) { - final Set paths = mask.fieldPaths - .map((String path) => FieldPath.fromServerFormat(path)) - .toSet(); + final Set paths = mask.fieldPaths.map((String path) => FieldPath.fromServerFormat(path)).toSet(); return FieldMask(paths); } - proto.DocumentTransform_FieldTransform _encodeFieldTransform( - FieldTransform fieldTransform) { + proto.DocumentTransform_FieldTransform _encodeFieldTransform(FieldTransform fieldTransform) { final TransformOperation transform = fieldTransform.operation; if (transform is ServerTimestampOperation) { return proto.DocumentTransform_FieldTransform.create() ..fieldPath = fieldTransform.fieldPath.canonicalString - ..setToServerValue = - proto.DocumentTransform_FieldTransform_ServerValue.REQUEST_TIME + ..setToServerValue = proto.DocumentTransform_FieldTransform_ServerValue.REQUEST_TIME ..freeze(); } else if (transform is ArrayTransformOperationUnion) { return proto.DocumentTransform_FieldTransform.create() ..fieldPath = fieldTransform.fieldPath.canonicalString - ..appendMissingElements = - _encodeArrayTransformElements(transform.elements) + ..appendMissingElements = proto.ArrayValue(values: transform.elements) ..freeze(); } else if (transform is ArrayTransformOperationRemove) { return proto.DocumentTransform_FieldTransform.create() ..fieldPath = fieldTransform.fieldPath.canonicalString - ..removeAllFromArray = _encodeArrayTransformElements(transform.elements) + ..removeAllFromArray = proto.ArrayValue(values: transform.elements) ..freeze(); } else if (transform is NumericIncrementTransformOperation) { return proto.DocumentTransform_FieldTransform.create() ..fieldPath = fieldTransform.fieldPath.canonicalString - ..increment = encodeValue(transform.operand) + ..increment = transform.operand ..freeze(); } else { throw fail('Unknown transform: $transform'); } } - proto.ArrayValue _encodeArrayTransformElements(List elements) { - final proto.ArrayValue arrayBuilder = proto.ArrayValue.create(); - for (FieldValue subValue in elements) { - arrayBuilder.values.add(encodeValue(subValue)); - } - return arrayBuilder..freeze(); - } - - FieldTransform _decodeFieldTransform( - proto.DocumentTransform_FieldTransform fieldTransform) { - if (fieldTransform.hasSetToServerValue()) { - hardAssert( - fieldTransform.setToServerValue == - proto.DocumentTransform_FieldTransform_ServerValue.REQUEST_TIME, - 'Unknown transform setToServerValue: ${fieldTransform.setToServerValue}'); - return FieldTransform( + FieldTransform _decodeFieldTransform(proto.DocumentTransform_FieldTransform fieldTransform) { + switch (fieldTransform.whichTransformType()) { + case proto.DocumentTransform_FieldTransform_TransformType.setToServerValue: + hardAssert( + fieldTransform.setToServerValue == proto.DocumentTransform_FieldTransform_ServerValue.REQUEST_TIME, + 'Unknown transform setToServerValue: ${fieldTransform.setToServerValue}', + ); + return FieldTransform( FieldPath.fromServerFormat(fieldTransform.fieldPath), - ServerTimestampOperation.sharedInstance); - } else if (fieldTransform.hasAppendMissingElements()) { - return FieldTransform( + ServerTimestampOperation.sharedInstance, + ); + break; + case proto.DocumentTransform_FieldTransform_TransformType.appendMissingElements: + return FieldTransform( FieldPath.fromServerFormat(fieldTransform.fieldPath), - ArrayTransformOperationUnion(_decodeArrayTransformElements( - fieldTransform.appendMissingElements))); - } else if (fieldTransform.hasRemoveAllFromArray()) { - return FieldTransform( + ArrayTransformOperationUnion(fieldTransform.appendMissingElements.values), + ); + break; + case proto.DocumentTransform_FieldTransform_TransformType.removeAllFromArray: + return FieldTransform( FieldPath.fromServerFormat(fieldTransform.fieldPath), - ArrayTransformOperationRemove(_decodeArrayTransformElements( - fieldTransform.removeAllFromArray))); - } else if (fieldTransform.hasIncrement()) { - return FieldTransform( + ArrayTransformOperationRemove(fieldTransform.removeAllFromArray.values), + ); + break; + case proto.DocumentTransform_FieldTransform_TransformType.increment: + return FieldTransform( FieldPath.fromServerFormat(fieldTransform.fieldPath), - NumericIncrementTransformOperation( - decodeValue(fieldTransform.increment))); - } else { - throw fail( - 'Unknown FieldTransform proto: $fieldTransform', - ); - } - } - - List _decodeArrayTransformElements( - proto.ArrayValue elementsProto) { - final int count = elementsProto.values.length; - final List result = List(count); - for (int i = 0; i < count; i++) { - result[i] = decodeValue(elementsProto.values[i]); + NumericIncrementTransformOperation(fieldTransform.increment), + ); + break; + default: + throw fail('Unknown FieldTransform proto: $fieldTransform'); } - return result; } - MutationResult decodeMutationResult( - proto.WriteResult proto, SnapshotVersion commitVersion) { + MutationResult decodeMutationResult(proto.WriteResult writeProto, SnapshotVersion commitVersion) { // NOTE: Deletes don't have an [updateTime] but the commit timestamp from the containing [CommitResponse] or // [WriteResponse] indicates essentially that the delete happened no later than that. For our purposes we don't care // exactly when the delete happened so long as we can tell when an update on the watch stream is at or later than // that change. - SnapshotVersion version = decodeVersion(proto.updateTime); + SnapshotVersion version = decodeVersion(writeProto.updateTime); if (version == SnapshotVersion.none) { version = commitVersion; } - List transformResults; - final int transformResultsCount = proto.transformResults.length; - if (transformResultsCount > 0) { - transformResults = List(transformResultsCount); - for (int i = 0; i < transformResultsCount; i++) { - transformResults[i] = decodeValue(proto.transformResults[i]); - } - } - return MutationResult(version, transformResults); + return MutationResult(version, writeProto.transformResults.isEmpty ? null : writeProto.transformResults); } // Queries - Map encodeListenRequestLabels(QueryData queryData) { - final String value = _encodeLabel(queryData.purpose); + Map encodeListenRequestLabels(TargetData targetData) { + final String value = _encodeLabel(targetData.purpose); if (value == null) { return {}; } @@ -578,102 +374,97 @@ class RemoteSerializer { } } - proto_v1.Target encodeTarget(QueryData queryData) { + proto_v1.Target encodeTarget(TargetData targetData) { final proto_v1.Target builder = proto_v1.Target.create(); - final Query query = queryData.query; + final Target target = targetData.target; - if (query.isDocumentQuery) { - builder.documents = encodeDocumentsTarget(query); + if (target.isDocumentQuery) { + builder.documents = encodeDocumentsTarget(target); } else { - builder.query = encodeQueryTarget(query); + builder.query = encodeQueryTarget(target); } return builder - ..targetId = queryData.targetId - ..resumeToken = queryData.resumeToken + ..targetId = targetData.targetId + ..resumeToken = targetData.resumeToken ..freeze(); } - proto.Target_DocumentsTarget encodeDocumentsTarget(Query query) { + proto.Target_DocumentsTarget encodeDocumentsTarget(Target target) { return proto.Target_DocumentsTarget.create() - ..documents.add(_encodeQueryPath(query.path)) + ..documents.add(_encodeQueryPath(target.path)) ..freeze(); } - Query decodeDocumentsTarget(proto.Target_DocumentsTarget target) { + Target decodeDocumentsTarget(proto.Target_DocumentsTarget target) { final int count = target.documents.length; - hardAssert( - count == 1, 'DocumentsTarget contained other than 1 document $count'); + hardAssert(count == 1, 'DocumentsTarget contained other than 1 document $count'); final String name = target.documents[0]; - return Query(_decodeQueryPath(name)); + return Query(_decodeQueryPath(name)).toTarget(); } - proto.Target_QueryTarget encodeQueryTarget(Query query) { + proto.Target_QueryTarget encodeQueryTarget(Target target) { // Dissect the path into [parent], [collectionId], and optional [key] filter. final proto.Target_QueryTarget builder = proto.Target_QueryTarget.create(); - final proto.StructuredQuery structuredQueryBuilder = - proto.StructuredQuery.create(); - final ResourcePath path = query.path; - if (query.collectionGroup != null) { - hardAssert(path.length % 2 == 0, - 'Collection Group queries should be within a document path or root.'); + final proto.StructuredQuery structuredQueryBuilder = proto.StructuredQuery.create(); + final ResourcePath path = target.path; + if (target.collectionGroup != null) { + hardAssert(path.length % 2 == 0, 'Collection Group queries should be within a document path or root.'); builder.parent = _encodeQueryPath(path); - structuredQueryBuilder.from.add(proto.StructuredQuery_CollectionSelector() - ..collectionId = query.collectionGroup - ..allDescendants = true); + structuredQueryBuilder.from.add(proto.StructuredQuery_CollectionSelector( + collectionId: target.collectionGroup, + allDescendants: true, + )); } else { - hardAssert(path.length.remainder(2) != 0, - 'Document queries with filters are not supported.'); + hardAssert(path.length.remainder(2) != 0, 'Document queries with filters are not supported.'); builder.parent = _encodeQueryPath(path.popLast()); - final proto.StructuredQuery_CollectionSelector from = - proto.StructuredQuery_CollectionSelector.create() - ..collectionId = path.last; - structuredQueryBuilder.from.add(from); + structuredQueryBuilder.from.add(proto.StructuredQuery_CollectionSelector(collectionId: path.last)); } // Encode the filters. - if (query.filters.isNotEmpty) { - structuredQueryBuilder.where = _encodeFilters(query.filters); + if (target.filters.isNotEmpty) { + structuredQueryBuilder.where = _encodeFilters(target.filters); } // Encode the orders. - for (OrderBy orderBy in query.orderByConstraints) { - structuredQueryBuilder.orderBy.add(_encodeOrderBy(orderBy)); - } + structuredQueryBuilder.orderBy.addAll(target.orderBy.map(_encodeOrderBy)); // Encode the limit. - if (query.hasLimit) { - final proto.Int32Value limit = proto.Int32Value.create() - ..value = query.getLimit(); - structuredQueryBuilder.limit = limit; + if (target.hasLimit) { + structuredQueryBuilder.limit = proto.Int32Value(value: target.limit); } - if (query.getStartAt() != null) { - structuredQueryBuilder.startAt = _encodeBound(query.getStartAt()); + if (target.startAt != null) { + structuredQueryBuilder.startAt = _encodeBound(target.startAt); } - if (query.getEndAt() != null) { - structuredQueryBuilder.endAt = _encodeBound(query.getEndAt()); + if (target.endAt != null) { + structuredQueryBuilder.endAt = _encodeBound(target.endAt); } builder.structuredQuery = structuredQueryBuilder; return builder..freeze(); } - Query decodeQueryTarget(proto.Target_QueryTarget target) { + Target decodeQueryTarget(proto.Target_QueryTarget target) { ResourcePath path = _decodeQueryPath(target.parent); final proto.StructuredQuery query = target.structuredQuery; + + String collectionGroup; final int fromCount = query.from.length; if (fromCount > 0) { - hardAssert(fromCount == 1, - 'StructuredQuery.from with more than one collection is not supported.'); + hardAssert(fromCount == 1, 'StructuredQuery.from with more than one collection is not supported.'); - final proto.StructuredQuery_CollectionSelector from = query.from[0]; - path = path.appendSegment(from.collectionId); + final proto_v1.StructuredQuery_CollectionSelector from = query.from[0]; + if (from.allDescendants) { + collectionGroup = from.collectionId; + } else { + path = path.appendSegment(from.collectionId); + } } List filterBy; @@ -694,7 +485,7 @@ class RemoteSerializer { orderBy = []; } - int limit = Query.noLimit; + int limit = Target.kNoLimit; if (query.hasLimit()) { limit = query.limit.value; } @@ -711,19 +502,20 @@ class RemoteSerializer { return Query( path, + collectionGroup: collectionGroup, filters: filterBy, explicitSortOrder: orderBy, limit: limit, + limitType: QueryLimitType.limitToFirst, startAt: startAt, endAt: endAt, - ); + ).toTarget(); } // Filters proto.StructuredQuery_Filter _encodeFilters(List filters) { - final List protos = - List(filters.length); + final List protos = List(filters.length); int i = 0; for (Filter filter in filters) { if (filter is FieldFilter) { @@ -734,10 +526,9 @@ class RemoteSerializer { if (filters.length == 1) { return protos[0]; } else { - final proto.StructuredQuery_CompositeFilter composite = - proto.StructuredQuery_CompositeFilter.create() - ..op = proto.StructuredQuery_CompositeFilter_Operator.AND - ..filters.addAll(protos); + final proto.StructuredQuery_CompositeFilter composite = proto.StructuredQuery_CompositeFilter.create() + ..op = proto.StructuredQuery_CompositeFilter_Operator.AND + ..filters.addAll(protos); return proto.StructuredQuery_Filter.create() ..compositeFilter = composite @@ -748,9 +539,7 @@ class RemoteSerializer { List _decodeFilters(proto.StructuredQuery_Filter value) { List filters; if (value.hasCompositeFilter()) { - hardAssert( - value.compositeFilter.op == - proto.StructuredQuery_CompositeFilter_Operator.AND, + hardAssert(value.compositeFilter.op == proto.StructuredQuery_CompositeFilter_Operator.AND, 'Only AND-type composite filters are supported, got ${value.compositeFilter.op}'); filters = value.compositeFilter.filters; } else { @@ -777,28 +566,27 @@ class RemoteSerializer { @visibleForTesting proto.StructuredQuery_Filter encodeUnaryOrFieldFilter(FieldFilter filter) { - if (filter.operator == FilterOperator.equal) { - final proto.StructuredQuery_UnaryFilter unaryProto = - proto.StructuredQuery_UnaryFilter() - ..field_2 = _encodeFieldPath(filter.field); - - if (filter.value == DoubleValue.nan) { - unaryProto.op = proto_v1.StructuredQuery_UnaryFilter_Operator.IS_NAN; - return proto.StructuredQuery_Filter() - ..unaryFilter = unaryProto - ..freeze(); - } else if (filter.value == NullValue.nullValue()) { - unaryProto.op = proto_v1.StructuredQuery_UnaryFilter_Operator.IS_NULL; - return proto.StructuredQuery_Filter() - ..unaryFilter = unaryProto - ..freeze(); + if (filter.operator == FilterOperator.equal || filter.operator == FilterOperator.notEqual) { + final proto.StructuredQuery_UnaryFilter unaryProto = proto.StructuredQuery_UnaryFilter( + field_2: _encodeFieldPath(filter.field), + ); + + if (isNanValue(filter.value)) { + unaryProto.op = filter.operator == FilterOperator.equal + ? proto.StructuredQuery_UnaryFilter_Operator.IS_NAN + : proto.StructuredQuery_UnaryFilter_Operator.IS_NOT_NAN; + } else if (isNullValue(filter.value)) { + unaryProto.op = filter.operator == FilterOperator.equal + ? proto.StructuredQuery_UnaryFilter_Operator.IS_NULL + : proto.StructuredQuery_UnaryFilter_Operator.IS_NOT_NULL; } + + return proto.StructuredQuery_Filter(unaryFilter: unaryProto); } - final proto.StructuredQuery_FieldFilter builder = - proto.StructuredQuery_FieldFilter() - ..field_1 = _encodeFieldPath(filter.field) - ..op = _encodeFieldFilterOperator(filter.operator) - ..value = encodeValue(filter.value); + final proto.StructuredQuery_FieldFilter builder = proto.StructuredQuery_FieldFilter() + ..field_1 = _encodeFieldPath(filter.field) + ..op = _encodeFieldFilterOperator(filter.operator) + ..value = filter.value; return proto.StructuredQuery_Filter.create() ..fieldFilter = builder @@ -807,24 +595,26 @@ class RemoteSerializer { @visibleForTesting Filter decodeFieldFilter(proto.StructuredQuery_FieldFilter builder) { - final FieldPath fieldPath = - FieldPath.fromServerFormat(builder.field_1.fieldPath); + final FieldPath fieldPath = FieldPath.fromServerFormat(builder.field_1.fieldPath); - final FilterOperator filterOperator = - _decodeFieldFilterOperator(builder.op); - final FieldValue value = decodeValue(builder.value); - return FieldFilter(fieldPath, filterOperator, value); + final FilterOperator filterOperator = _decodeFieldFilterOperator(builder.op); + return FieldFilter(fieldPath, filterOperator, builder.value); } Filter _decodeUnaryFilter(proto.StructuredQuery_UnaryFilter value) { - final FieldPath fieldPath = - FieldPath.fromServerFormat(value.field_2.fieldPath); + final FieldPath fieldPath = FieldPath.fromServerFormat(value.field_2.fieldPath); switch (value.op) { case proto.StructuredQuery_UnaryFilter_Operator.IS_NAN: - return FieldFilter(fieldPath, FilterOperator.equal, DoubleValue.nan); + return FieldFilter(fieldPath, FilterOperator.equal, NAN_VALUE); case proto.StructuredQuery_UnaryFilter_Operator.IS_NULL: - return FieldFilter( - fieldPath, FilterOperator.equal, NullValue.nullValue()); + return FieldFilter(fieldPath, FilterOperator.equal, NULL_VALUE); + break; + case proto.StructuredQuery_UnaryFilter_Operator.IS_NOT_NAN: + return FieldFilter(fieldPath, FilterOperator.notEqual, NAN_VALUE); + break; + case proto.StructuredQuery_UnaryFilter_Operator.IS_NOT_NULL: + return FieldFilter(fieldPath, FilterOperator.notEqual, NULL_VALUE); + break; default: throw fail('Unrecognized UnaryFilter.operator ${value.op}'); } @@ -836,8 +626,7 @@ class RemoteSerializer { ..freeze(); } - proto.StructuredQuery_FieldFilter_Operator _encodeFieldFilterOperator( - FilterOperator operator) { + proto.StructuredQuery_FieldFilter_Operator _encodeFieldFilterOperator(FilterOperator operator) { switch (operator) { case FilterOperator.lessThan: return proto.StructuredQuery_FieldFilter_Operator.LESS_THAN; @@ -845,23 +634,26 @@ class RemoteSerializer { return proto.StructuredQuery_FieldFilter_Operator.LESS_THAN_OR_EQUAL; case FilterOperator.equal: return proto.StructuredQuery_FieldFilter_Operator.EQUAL; + case FilterOperator.notEqual: + return proto.StructuredQuery_FieldFilter_Operator.NOT_EQUAL; case FilterOperator.graterThan: return proto.StructuredQuery_FieldFilter_Operator.GREATER_THAN; case FilterOperator.graterThanOrEqual: return proto.StructuredQuery_FieldFilter_Operator.GREATER_THAN_OR_EQUAL; case FilterOperator.arrayContains: return proto.StructuredQuery_FieldFilter_Operator.ARRAY_CONTAINS; - case FilterOperator.arrayContainsAny: - return proto.StructuredQuery_FieldFilter_Operator.ARRAY_CONTAINS_ANY; case FilterOperator.IN: return proto.StructuredQuery_FieldFilter_Operator.IN; + case FilterOperator.arrayContainsAny: + return proto.StructuredQuery_FieldFilter_Operator.ARRAY_CONTAINS_ANY; + case FilterOperator.notIn: + return proto.StructuredQuery_FieldFilter_Operator.NOT_IN; default: throw fail('Unknown operator $operator'); } } - FilterOperator _decodeFieldFilterOperator( - proto.StructuredQuery_FieldFilter_Operator operator) { + FilterOperator _decodeFieldFilterOperator(proto.StructuredQuery_FieldFilter_Operator operator) { switch (operator) { case proto.StructuredQuery_FieldFilter_Operator.LESS_THAN: return FilterOperator.lessThan; @@ -869,16 +661,20 @@ class RemoteSerializer { return FilterOperator.lessThanOrEqual; case proto.StructuredQuery_FieldFilter_Operator.EQUAL: return FilterOperator.equal; + case proto.StructuredQuery_FieldFilter_Operator.NOT_EQUAL: + return FilterOperator.notEqual; case proto.StructuredQuery_FieldFilter_Operator.GREATER_THAN_OR_EQUAL: return FilterOperator.graterThanOrEqual; case proto.StructuredQuery_FieldFilter_Operator.GREATER_THAN: return FilterOperator.graterThan; case proto.StructuredQuery_FieldFilter_Operator.ARRAY_CONTAINS: return FilterOperator.arrayContains; - case proto.StructuredQuery_FieldFilter_Operator.ARRAY_CONTAINS_ANY: - return FilterOperator.arrayContainsAny; case proto.StructuredQuery_FieldFilter_Operator.IN: return FilterOperator.IN; + case proto.StructuredQuery_FieldFilter_Operator.ARRAY_CONTAINS_ANY: + return FilterOperator.arrayContainsAny; + case proto.StructuredQuery_FieldFilter_Operator.NOT_IN: + return FilterOperator.notIn; default: throw fail('Unhandled FieldFilter.operator $operator'); } @@ -887,8 +683,7 @@ class RemoteSerializer { // Property orders proto.StructuredQuery_Order _encodeOrderBy(OrderBy orderBy) { - final proto.StructuredQuery_Order builder = - proto.StructuredQuery_Order.create(); + final proto.StructuredQuery_Order builder = proto.StructuredQuery_Order.create(); if (orderBy.direction == OrderByDirection.ascending) { builder.direction = proto.StructuredQuery_Direction.ASCENDING; } else { @@ -901,8 +696,7 @@ class RemoteSerializer { } OrderBy _decodeOrderBy(proto.StructuredQuery_Order value) { - final FieldPath fieldPath = - FieldPath.fromServerFormat(value.field_1.fieldPath); + final FieldPath fieldPath = FieldPath.fromServerFormat(value.field_1.fieldPath); OrderByDirection direction; switch (value.direction) { @@ -921,101 +715,85 @@ class RemoteSerializer { // Bounds proto.Cursor _encodeBound(Bound bound) { - final proto.Cursor builder = proto.Cursor.create()..before = bound.before; - for (FieldValue component in bound.position) { - builder.values.add(encodeValue(component)); - } - return builder..freeze(); + return proto.Cursor( + before: bound.before, + values: bound.position, + ); } Bound _decodeBound(proto.Cursor value) { - final int valuesCount = value.values.length; - final List indexComponents = List(valuesCount); - - for (int i = 0; i < valuesCount; i++) { - final proto_v1.Value valueProto = value.values[i]; - indexComponents[i] = decodeValue(valueProto); - } - return Bound(position: indexComponents, before: value.before); + return Bound(position: value.values, before: value.before); } // Watch changes WatchChange decodeWatchChange(proto.ListenResponse protoChange) { - WatchChange watchChange; - - if (protoChange.hasTargetChange()) { - final proto.TargetChange targetChange = protoChange.targetChange; - WatchTargetChangeType changeType; - GrpcError cause; - switch (targetChange.targetChangeType) { - case proto.TargetChange_TargetChangeType.NO_CHANGE: - changeType = WatchTargetChangeType.noChange; - break; - case proto.TargetChange_TargetChangeType.ADD: - changeType = WatchTargetChangeType.added; - break; - case proto.TargetChange_TargetChangeType.REMOVE: - changeType = WatchTargetChangeType.removed; - cause = _fromStatus(targetChange.cause); - break; - case proto.TargetChange_TargetChangeType.CURRENT: - changeType = WatchTargetChangeType.current; - break; - case proto.TargetChange_TargetChangeType.RESET: - changeType = WatchTargetChangeType.reset; - break; - default: - throw ArgumentError('Unknown target change type'); - } - watchChange = WatchChangeWatchTargetChange( - changeType, - targetChange.targetIds, - Uint8List.fromList(targetChange.resumeToken), - cause, - ); - } else if (protoChange.hasDocumentChange()) { - final proto.DocumentChange docChange = protoChange.documentChange; - final List added = docChange.targetIds; - final List removed = docChange.removedTargetIds; - final DocumentKey key = decodeKey(docChange.document.name); - final SnapshotVersion version = - decodeVersion(docChange.document.updateTime); - hardAssert(version != SnapshotVersion.none, - 'Got a document change without an update time'); - final Document document = Document.fromProto( - key, version, DocumentState.synced, docChange.document, decodeValue); - watchChange = - WatchChangeDocumentChange(added, removed, document.key, document); - } else if (protoChange.hasDocumentDelete()) { - final proto.DocumentDelete docDelete = protoChange.documentDelete; - final List removed = docDelete.removedTargetIds; - final DocumentKey key = decodeKey(docDelete.document); - // Note that version might be unset in which case we use SnapshotVersion.none - final SnapshotVersion version = decodeVersion(docDelete.readTime); - final NoDocument doc = - NoDocument(key, version, hasCommittedMutations: false); - watchChange = WatchChangeDocumentChange([], removed, doc.key, doc); - } else if (protoChange.hasDocumentRemove()) { - final proto.DocumentRemove docRemove = protoChange.documentRemove; - final List removed = docRemove.removedTargetIds; - final DocumentKey key = decodeKey(docRemove.document); - watchChange = WatchChangeDocumentChange([], removed, key, null); - } else if (protoChange.hasFilter()) { - final proto.ExistenceFilter protoFilter = protoChange.filter; - // TODO(long1eu): implement existence filter parsing (see b/33076578) - final ExistenceFilter filter = ExistenceFilter(protoFilter.count); - final int targetId = protoFilter.targetId; - watchChange = WatchChangeExistenceFilterWatchChange(targetId, filter); - } else { - throw ArgumentError('Unknown change type set'); + switch (protoChange.whichResponseType()) { + case proto.ListenResponse_ResponseType.targetChange: + final proto.TargetChange targetChange = protoChange.targetChange; + WatchTargetChangeType changeType; + GrpcError cause; + switch (targetChange.targetChangeType) { + case proto.TargetChange_TargetChangeType.NO_CHANGE: + changeType = WatchTargetChangeType.noChange; + break; + case proto.TargetChange_TargetChangeType.ADD: + changeType = WatchTargetChangeType.added; + break; + case proto.TargetChange_TargetChangeType.REMOVE: + changeType = WatchTargetChangeType.removed; + cause = _fromStatus(targetChange.cause); + break; + case proto.TargetChange_TargetChangeType.CURRENT: + changeType = WatchTargetChangeType.current; + break; + case proto.TargetChange_TargetChangeType.RESET: + changeType = WatchTargetChangeType.reset; + break; + default: + throw ArgumentError('Unknown target change type'); + } + return WatchChangeWatchTargetChange( + changeType, + targetChange.targetIds, + Uint8List.fromList(targetChange.resumeToken), + cause, + ); + case proto.ListenResponse_ResponseType.documentChange: + final proto.DocumentChange docChange = protoChange.documentChange; + final List added = docChange.targetIds; + final List removed = docChange.removedTargetIds; + final DocumentKey key = decodeKey(docChange.document.name); + final SnapshotVersion version = decodeVersion(docChange.document.updateTime); + hardAssert(version != SnapshotVersion.none, 'Got a document change without an update time'); + final ObjectValue data = ObjectValue.fromMap(docChange.document.fields); + final Document document = Document(key, version, data, DocumentState.synced); + return WatchChangeDocumentChange(added, removed, document.key, document); + case proto.ListenResponse_ResponseType.documentDelete: + final proto.DocumentDelete docDelete = protoChange.documentDelete; + final List removed = docDelete.removedTargetIds; + final DocumentKey key = decodeKey(docDelete.document); + // Note that version might be unset in which case we use SnapshotVersion.none + final SnapshotVersion version = decodeVersion(docDelete.readTime); + final NoDocument doc = NoDocument(key, version, hasCommittedMutations: false); + return WatchChangeDocumentChange([], removed, doc.key, doc); + case proto.ListenResponse_ResponseType.filter: + final proto.ExistenceFilter protoFilter = protoChange.filter; + // TODO(long1eu): implement existence filter parsing (see b/33076578) + final ExistenceFilter filter = ExistenceFilter(protoFilter.count); + final int targetId = protoFilter.targetId; + return WatchChangeExistenceFilterWatchChange(targetId, filter); + case proto.ListenResponse_ResponseType.documentRemove: + final proto.DocumentRemove docRemove = protoChange.documentRemove; + final List removed = docRemove.removedTargetIds; + final DocumentKey key = decodeKey(docRemove.document); + return WatchChangeDocumentChange([], removed, key, null); + default: + throw ArgumentError('Unknown change type set'); } - - return watchChange; } - SnapshotVersion decodeVersionFromListenResponse( - proto.ListenResponse watchChange) { + SnapshotVersion decodeVersionFromListenResponse(proto.ListenResponse watchChange) { // We have only reached a consistent snapshot for the entire stream if there is a [read_time] set and it applies to // all targets (i.e. the list of targets is empty). The backend is guaranteed to send such responses. @@ -1031,7 +809,6 @@ class RemoteSerializer { GrpcError _fromStatus(proto.Status status) { // TODO(long1eu): Use details? - return GrpcError.custom( - status.code, status.hasMessage() ? status.message : null); + return GrpcError.custom(status.code, status.hasMessage() ? status.message : null); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_store.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_store.dart index 018457ca..5d050e8f 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_store.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/remote_store.dart @@ -11,78 +11,102 @@ import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/online_state.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/transaction.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_batch_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/datastore.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/online_state_tracker.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_event.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/target_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change_aggregator.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/write_stream.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:grpc/grpc.dart'; import 'package:meta/meta.dart'; import 'package:rxdart/rxdart.dart'; -/// [RemoteStore] handles all interaction with the backend through a simple, -/// clean interface. This class is not thread safe and should be only called -/// from the worker [AsyncQueue]. +/// [RemoteStore] handles all interaction with the backend through a simple, clean interface. This class is not thread +/// safe and should be only called from the worker [AsyncQueue]. class RemoteStore implements TargetMetadataProvider { RemoteStore( this._remoteStoreCallback, this._localStore, this._datastore, - this._onNetworkConnected, - TaskScheduler scheduler, - ) : _listenTargets = {}, + AsyncQueue workerQueue, + BehaviorSubject onNetworkConnected, + ) : _listenTargets = {}, _writePipeline = Queue(), _onlineStateTracker = OnlineStateTracker( - scheduler, _remoteStoreCallback.handleOnlineStateChange), - _watchStream = _datastore.watchStream, - _writeStream = _datastore.writeStream { - _watchStreamSub = _watchStream.listen(_watchEvents); - _writeStreamSub = _writeStream.listen(_writeEvents); - // we skip the seed value - _onNetworkConnectedSub = _onNetworkConnected - .skip(1) - .debounceTime(const Duration(seconds: 5)) - .listen(_networkEvents); + workerQueue, + _remoteStoreCallback.handleOnlineStateChange, + ) { + // Create new streams (but note they're not started yet). + _watchStream = _datastore.createWatchStream(WatchStreamCallback( + onOpen: _handleWatchStreamOpen, + onClose: _handleWatchStreamClose, + onWatchChange: _handleWatchChange, + )); + + _writeStream = _datastore.createWriteStream(WriteStreamCallback( + // we use this so that [_writeStream] is not null when called + onOpen: () => _writeStream.writeHandshake(), + onClose: _handleWriteStreamClose, + onHandshakeComplete: _handleWriteStreamHandshakeComplete, + onWriteResponse: _handleWriteStreamMutationResults, + )); + + _networkSub = onNetworkConnected.listen((bool value) { + // If the network has been explicitly disabled, make sure we don't accidentally + // re-enable it. + if (canUseNetwork()) { + // Tear down and re-create our network streams. This will ensure the backoffs are + // reset. + Log.d(_tag, 'Restarting streams for network reachability change.'); + _restartNetwork(); + } + }); } + /// The maximum number of pending writes to allow. + // TODO(long1eu): Negotiate this value with the backend. + static const int _maxPendingWrites = 10; + + /// The log tag to use for this class. + static const String _tag = 'RemoteStore'; + final RemoteStoreCallback _remoteStoreCallback; + final LocalStore _localStore; + final Datastore _datastore; - /// A mapping of watched targets that the client cares about tracking and the - /// user has explicitly called a 'listen' for this target. + /// A mapping of watched targets that the client cares about tracking and the user has explicitly called a 'listen' + /// for this target. /// - /// These targets may or may not have been sent to or acknowledged by the - /// server. On re-establishing the listen stream, these targets should be sent - /// to the server. The targets removed with unlistens are removed eagerly - /// without waiting for confirmation from the listen stream. - final Map _listenTargets; + /// These targets may or may not have been sent to or acknowledged by the server. On re-establishing the listen + /// stream, these targets should be sent to the server. The targets removed with unlistens are removed eagerly without + /// waiting for confirmation from the listen stream. + final Map _listenTargets; + final OnlineStateTracker _onlineStateTracker; - final BehaviorSubject _onNetworkConnected; - final WatchStream _watchStream; - final WriteStream _writeStream; - StreamSubscription _watchStreamSub; - StreamSubscription _writeStreamSub; - StreamSubscription _onNetworkConnectedSub; + WatchStream _watchStream; + + WriteStream _writeStream; + + StreamSubscription _networkSub; bool _networkEnabled = false; - WatchChangeAggregator _watchChangeAggregator; - /// The maximum number of pending writes to allow. - // TODO(long1eu): Negotiate this value with the backend. - static const int _maxPendingWrites = 10; + WatchChangeAggregator _watchChangeAggregator; /// A list of up to [_maxPendingWrites] writes that we have fetched from the [LocalStore] via [fillWritePipeline] and /// have or will send to the write stream. @@ -94,17 +118,17 @@ class RemoteStore implements TargetMetadataProvider { /// re-sent if the stream is interrupted / restarted before they're acknowledged. /// /// Write responses from the backend are linked to their originating request purely based on order, and so we can just - /// poll writes from the front of the [_writePipeline] as we receive responses. + /// poll() writes from the front of the [_writePipeline] as we receive responses. final Queue _writePipeline; /// Re-enables the network. Only to be called as the counterpart to [disableNetwork]. Future enableNetwork() async { _networkEnabled = true; - if (_canUseNetwork) { + if (canUseNetwork()) { _writeStream.lastStreamToken = _localStore.lastStreamToken; - if (_shouldStartWatchStream) { + if (_shouldStartWatchStream()) { await _startWatchStream(); } else { await _onlineStateTracker.updateState(OnlineState.unknown); @@ -115,8 +139,16 @@ class RemoteStore implements TargetMetadataProvider { } } - /// Temporarily disables the network. The network can be re-enabled using - /// [enableNetwork]. + /// Re-enables the network, and forces the state to [OnlineState.online]. Without this, the state + /// will be [OnlineState.unknown]. If the [OnlineStateTracker] updates the state from + /// [OnlineState.unknown] to [OnlineState.unknown], then it doesn't trigger the callback. + @visibleForTesting + Future forceEnableNetwork() async { + await enableNetwork(); + await _onlineStateTracker.updateState(OnlineState.online); + } + + /// Temporarily disables the network. The network can be re-enabled using [enableNetwork]. Future disableNetwork() async { _networkEnabled = false; await _disableNetworkInternal(); @@ -126,20 +158,26 @@ class RemoteStore implements TargetMetadataProvider { } Future _disableNetworkInternal() async { - Log.d('RemoteStore', 'Performing write stream teardown'); - await _watchStream.stop(); await _writeStream.stop(); if (_writePipeline.isNotEmpty) { - Log.d('RemoteStore', - 'Stopping write stream with ${_writePipeline.length} pending writes'); + Log.d(_tag, 'Stopping write stream with ${_writePipeline.length} pending writes'); _writePipeline.clear(); } _cleanUpWatchStreamState(); } + Future _restartNetwork() async { + _networkEnabled = false; + await _disableNetworkInternal(); + await _onlineStateTracker.updateState(OnlineState.unknown); + _writeStream.inhibitBackoff(); + _watchStream.inhibitBackoff(); + await enableNetwork(); + } + /// Starts up the remote store, creating streams, restoring state from [LocalStore], etc. This should called before /// using any other API endpoints in this class. Future start() async { @@ -150,16 +188,14 @@ class RemoteStore implements TargetMetadataProvider { /// Shuts down the remote store, tearing down connections and otherwise cleaning up. This is not reversible and /// renders the Remote Store unusable. Future shutdown() async { - Log.d('RemoteStore', 'Shutting down'); + Log.d(_tag, 'Shutting down'); _networkEnabled = false; + await _networkSub.cancel(); await _disableNetworkInternal(); await _datastore.shutdown(); // Set the OnlineState to UNKNOWN (rather than OFFLINE) to avoid potentially triggering spurious listener events // with cached data, etc. await _onlineStateTracker.updateState(OnlineState.unknown); - await _watchStreamSub.cancel(); - await _writeStreamSub.cancel(); - await _onNetworkConnectedSub.cancel(); } /// Tells the [RemoteStore] that the currently authenticated user has changed. @@ -168,84 +204,47 @@ class RemoteStore implements TargetMetadataProvider { /// across users. Restarts the streams if appropriate. Future handleCredentialChange() async { // If the network has been explicitly disabled, make sure we don't accidentally re-enable it. - if (_canUseNetwork) { + if (canUseNetwork()) { // Tear down and re-create our network streams. This will ensure we get a fresh auth token for the new user and // re-fill the write pipeline with new mutations from the [LocalStore] (since mutations are per-user). - Log.d('RemoteStore', 'Restarting streams for new credential.'); + Log.d(_tag, 'Restarting streams for new credential.'); _networkEnabled = false; - await _disableNetworkInternal(); - await _onlineStateTracker.updateState(OnlineState.unknown); - await enableNetwork(); - } - } - - /// Re-enables the network, and forces the state to ONLINE. Without this, the - /// state will be [OnlineState.unknown]. If the [OnlineStateTracker] updates - /// the state from [OnlineState.unknown] to [OnlineState.unknown], then it - /// doesn't trigger the callback. - @visibleForTesting - void forceEnableNetwork() { - enableNetwork(); - _onlineStateTracker.updateState(OnlineState.online); - } - - void _restartNetwork() { - _networkEnabled = false; - _disableNetworkInternal(); - _onlineStateTracker.updateState(OnlineState.unknown); - enableNetwork(); - } - - void _networkEvents(bool isConnected) { - if (isConnected) { - // Tear down and re-create our network streams. This will ensure the - // backoffs are reset. - Log.d('$runtimeType', - 'Restarting streams for network reachability change.'); - _restartNetwork(); + await _restartNetwork(); } } // Watch Stream - void _watchEvents(StreamEvent event) { - if (event is OpenEvent) { - _handleWatchStreamOpen(); - } else if (event is CloseEvent) { - _handleWatchStreamClose(event.error); - } else if (event is OnWatchChange) { - _handleWatchChange(event.snapshotVersion, event.watchChange); - } - } - /// Listens to the target identified by the given [QueryData]. - Future listen(QueryData queryData) async { - final int targetId = queryData.targetId; - hardAssert(!_listenTargets.containsKey(targetId), - 'listen called with duplicate target ID: $targetId'); + Future listen(TargetData targetData) async { + final int targetId = targetData.targetId; + if (_listenTargets.containsKey(targetId)) { + return; + } - _listenTargets[targetId] = queryData; + _listenTargets[targetId] = targetData; - if (_shouldStartWatchStream) { + if (_shouldStartWatchStream()) { await _startWatchStream(); } else if (_watchStream.isOpen) { - _sendWatchRequest(queryData); + _sendWatchRequest(targetData); } } - void _sendWatchRequest(QueryData queryData) { - _watchChangeAggregator.recordPendingTargetRequest(queryData.targetId); - _watchStream.watchQuery(queryData); + void _sendWatchRequest(TargetData targetData) { + _watchChangeAggregator.recordPendingTargetRequest(targetData.targetId); + _watchStream.watchQuery(targetData); } /// Stops listening to the target with the given target ID. /// + /// It is an error if the given target id is not being listened to. + /// /// If this is called with the last active targetId, the watch stream enters idle mode and will be torn down after /// one minute of inactivity. Future stopListening(int targetId) async { - final QueryData queryData = _listenTargets.remove(targetId); - hardAssert(queryData != null, - 'stopListening called on target no currently watched: $targetId'); + final TargetData targetData = _listenTargets.remove(targetId); + hardAssert(targetData != null, 'stopListening called on target no currently watched: $targetId'); // The watch stream might not be started if we're in a disconnected state if (_watchStream.isOpen) { @@ -255,7 +254,7 @@ class RemoteStore implements TargetMetadataProvider { if (_listenTargets.isEmpty) { if (_watchStream.isOpen) { _watchStream.markIdle(); - } else if (_canUseNetwork) { + } else if (canUseNetwork()) { // Revert to [OnlineState.unknown] if the watch stream is not open and we have no listeners, since without any // listens to send we cannot confirm if the stream is healthy and upgrade to [OnlineState.online]. await _onlineStateTracker.updateState(OnlineState.unknown); @@ -269,16 +268,15 @@ class RemoteStore implements TargetMetadataProvider { } /// Returns true if the network is enabled, the write stream has not yet been started and there are pending writes. - bool get _shouldStartWriteStream { - return _canUseNetwork && - !_writeStream.isStarted && - _writePipeline.isNotEmpty; + bool _shouldStartWriteStream() { + return canUseNetwork() && !_writeStream.isStarted && _writePipeline.isNotEmpty; } /// Returns true if the network is enabled, the watch stream has not yet been started and there are active watch /// targets. - bool get _shouldStartWatchStream => - _canUseNetwork && !_watchStream.isStarted && _listenTargets.isNotEmpty; + bool _shouldStartWatchStream() { + return canUseNetwork() && !_watchStream.isStarted && _listenTargets.isNotEmpty; + } void _cleanUpWatchStreamState() { // If the connection is closed then we'll never get a snapshot version for the accumulated changes and so we'll @@ -288,8 +286,7 @@ class RemoteStore implements TargetMetadataProvider { } Future _startWatchStream() async { - hardAssert(_shouldStartWatchStream, - 'startWatchStream() called when shouldStartWatchStream() is false.'); + hardAssert(_shouldStartWatchStream(), 'startWatchStream() called when shouldStartWatchStream() is false.'); _watchChangeAggregator = WatchChangeAggregator(this); await _watchStream.start(); await _onlineStateTracker.handleWatchStreamStart(); @@ -300,8 +297,7 @@ class RemoteStore implements TargetMetadataProvider { _listenTargets.values.forEach(_sendWatchRequest); } - Future _handleWatchChange( - SnapshotVersion snapshotVersion, WatchChange watchChange) async { + Future _handleWatchChange(SnapshotVersion snapshotVersion, WatchChange watchChange) async { // Mark the connection as ONLINE because we got a message from the server. await _onlineStateTracker.updateState(OnlineState.online); @@ -328,8 +324,7 @@ class RemoteStore implements TargetMetadataProvider { } if (snapshotVersion != SnapshotVersion.none) { - final SnapshotVersion lastRemoteSnapshotVersion = - _localStore.getLastRemoteSnapshotVersion(); + final SnapshotVersion lastRemoteSnapshotVersion = _localStore.getLastRemoteSnapshotVersion(); if (snapshotVersion.compareTo(lastRemoteSnapshotVersion) >= 0) { // We have received a target change with a global snapshot if the snapshot version is not equal to @@ -343,14 +338,13 @@ class RemoteStore implements TargetMetadataProvider { Future _handleWatchStreamClose(GrpcError status) async { if (status.code == StatusCode.ok) { // Graceful stop (due to stop() or idle timeout). Make sure that's desirable. - hardAssert(!_shouldStartWatchStream, - 'Watch stream was stopped gracefully while still needed.'); + hardAssert(!_shouldStartWatchStream(), 'Watch stream was stopped gracefully while still needed.'); } _cleanUpWatchStreamState(); // If we still need the watch stream, retry the connection. - if (_shouldStartWatchStream) { + if (_shouldStartWatchStream()) { await _onlineStateTracker.handleWatchStreamFailure(status); await _startWatchStream(); @@ -361,7 +355,7 @@ class RemoteStore implements TargetMetadataProvider { } } - bool get _canUseNetwork { + bool canUseNetwork() { // PORTING NOTE: This method exists mostly because web also has to take into account primary vs. secondary state. return _networkEnabled; } @@ -369,25 +363,21 @@ class RemoteStore implements TargetMetadataProvider { /// Takes a batch of changes from the [Datastore], repackages them as a [RemoteEvent], and passes that on to the /// listener, which is typically the [SyncEngine]. Future _raiseWatchSnapshot(SnapshotVersion snapshotVersion) async { - hardAssert(snapshotVersion != SnapshotVersion.none, - 'Can\'t raise event for unknown SnapshotVersion'); - final RemoteEvent remoteEvent = - _watchChangeAggregator.createRemoteEvent(snapshotVersion); + hardAssert(snapshotVersion != SnapshotVersion.none, 'Can\'t raise event for unknown SnapshotVersion'); + final RemoteEvent remoteEvent = _watchChangeAggregator.createRemoteEvent(snapshotVersion); // Update in-memory resume tokens. [LocalStore] will update the persistent view of these when applying the // completed [RemoteEvent]. - for (MapEntry entry - in remoteEvent.targetChanges.entries) { + for (MapEntry entry in remoteEvent.targetChanges.entries) { final TargetChange targetChange = entry.value; if (targetChange.resumeToken.isNotEmpty) { final int targetId = entry.key; - final QueryData queryData = _listenTargets[targetId]; + final TargetData targetData = _listenTargets[targetId]; // A watched target might have been removed already. - if (queryData != null) { - _listenTargets[targetId] = queryData.copyWith( + if (targetData != null) { + _listenTargets[targetId] = targetData.copyWith( snapshotVersion: snapshotVersion, resumeToken: targetChange.resumeToken, - sequenceNumber: queryData.sequenceNumber, ); } } @@ -395,13 +385,12 @@ class RemoteStore implements TargetMetadataProvider { // Re-establish listens for the targets that have been invalidated by existence filter mismatches. for (int targetId in remoteEvent.targetMismatches) { - final QueryData queryData = _listenTargets[targetId]; + final TargetData targetData = _listenTargets[targetId]; // A watched target might have been removed already. - if (queryData != null) { + if (targetData != null) { // Clear the resume token for the query, since we're in a known mismatch state. - _listenTargets[targetId] = queryData.copyWith( - snapshotVersion: queryData.snapshotVersion, - sequenceNumber: queryData.sequenceNumber, + _listenTargets[targetId] = targetData.copyWith( + sequenceNumber: targetData.sequenceNumber, resumeToken: Uint8List.fromList([]), ); @@ -412,13 +401,13 @@ class RemoteStore implements TargetMetadataProvider { // Mark the query we send as being on behalf of an existence filter mismatch, but don't actually retain that in // [listenTargets]. This ensures that we flag the first re-listen this way without impacting future listens of // this target (that might happen e.g. on reconnect). - final QueryData requestQueryData = QueryData( - queryData.query, + final TargetData requestTargetData = TargetData( + targetData.target, targetId, - queryData.sequenceNumber, + targetData.sequenceNumber, QueryPurpose.existenceFilterMismatch, ); - _sendWatchRequest(requestQueryData); + _sendWatchRequest(requestTargetData); } } @@ -426,49 +415,31 @@ class RemoteStore implements TargetMetadataProvider { await _remoteStoreCallback.handleRemoteEvent(remoteEvent); } - Future _processTargetError( - WatchChangeWatchTargetChange targetChange) async { - hardAssert( - targetChange.cause != null, 'Processing target error without a cause'); + Future _processTargetError(WatchChangeWatchTargetChange targetChange) async { + hardAssert(targetChange.cause != null, 'Processing target error without a cause'); for (int targetId in targetChange.targetIds) { // Ignore targets that have been removed already. if (_listenTargets.containsKey(targetId)) { _listenTargets.remove(targetId); _watchChangeAggregator.removeTarget(targetId); - await _remoteStoreCallback.handleRejectedListen( - targetId, targetChange.cause); + await _remoteStoreCallback.handleRejectedListen(targetId, targetChange.cause); } } } // Write Stream - void _writeEvents(StreamEvent event) { - if (event is OpenEvent) { - _writeStream.writeHandshake(); - } else if (event is CloseEvent) { - _handleWriteStreamClose(event.error); - } else if (event is HandshakeCompleteEvent) { - _handleWriteStreamHandshakeComplete(); - } else if (event is OnWriteResponse) { - _handleWriteStreamMutationResults(event.version, event.results); - } - } - /// Attempts to fill our write pipeline with writes from the [LocalStore]. /// - /// Called internally to bootstrap or refill the write pipeline by - /// [SyncEngine] whenever there are new mutations to process. + /// Called internally to bootstrap or refill the write pipeline by [SyncEngine] whenever there are new mutations to + /// process. /// /// Starts the write stream if necessary. Future fillWritePipeline() async { - int lastBatchIdRetrieved = _writePipeline.isEmpty - ? MutationBatch.unknown - : _writePipeline.last.batchId; + int lastBatchIdRetrieved = _writePipeline.isEmpty ? MutationBatch.unknown : _writePipeline.last.batchId; while (_canAddToWritePipeline()) { - final MutationBatch batch = - await _localStore.getNextMutationBatch(lastBatchIdRetrieved); + final MutationBatch batch = await _localStore.getNextMutationBatch(lastBatchIdRetrieved); if (batch == null) { if (_writePipeline.isEmpty) { @@ -480,38 +451,34 @@ class RemoteStore implements TargetMetadataProvider { lastBatchIdRetrieved = batch.batchId; } - if (_shouldStartWriteStream) { - _startWriteStream(); + if (_shouldStartWriteStream()) { + await _startWriteStream(); } } - /// Returns true if we can add to the write pipeline (i.e. it is not full and - /// the network is enabled). + /// Returns true if we can add to the write pipeline (i.e. it is not full and the network is enabled). bool _canAddToWritePipeline() { - return _canUseNetwork && _writePipeline.length < _maxPendingWrites; + return canUseNetwork() && _writePipeline.length < _maxPendingWrites; } /// Queues additional writes to be sent to the write stream, sending them immediately if the write stream is /// established. void _addToWritePipeline(MutationBatch mutationBatch) { - hardAssert(_canAddToWritePipeline(), - 'addToWritePipeline called when pipeline is full'); + hardAssert(_canAddToWritePipeline(), 'addToWritePipeline called when pipeline is full'); _writePipeline.add(mutationBatch); - if (_writeStream.isOpen && _writeStream.handshakeComplete) { + if (_writeStream.isOpen && _writeStream.isHandshakeComplete) { _writeStream.writeMutations(mutationBatch.mutations); } } - void _startWriteStream() { - hardAssert(_shouldStartWriteStream, - 'startWriteStream() called when shouldStartWriteStream() is false.'); - _writeStream.start(); + Future _startWriteStream() async { + hardAssert(_shouldStartWriteStream(), 'startWriteStream() called when shouldStartWriteStream() is false.'); + await _writeStream.start(); } - /// Handles a successful handshake response from the server, which is our cue - /// to send any pending writes. + /// Handles a successful handshake response from the server, which is our cue to send any pending writes. Future _handleWriteStreamHandshakeComplete() async { // Record the stream token. await _localStore.setLastStreamToken(_writeStream.lastStreamToken); @@ -523,14 +490,13 @@ class RemoteStore implements TargetMetadataProvider { } /// Handles a successful [StreamingWriteResponse] from the server that contains a mutation result. - Future _handleWriteStreamMutationResults( - SnapshotVersion commitVersion, List results) async { + Future _handleWriteStreamMutationResults(SnapshotVersion commitVersion, List results) async { // This is a response to a write containing mutations and should be correlated to the first write in our write // pipeline. final MutationBatch batch = _writePipeline.removeFirst(); - final MutationBatchResult mutationBatchResult = MutationBatchResult.create( - batch, commitVersion, results, _writeStream.lastStreamToken); + final MutationBatchResult mutationBatchResult = + MutationBatchResult.create(batch, commitVersion, results, _writeStream.lastStreamToken); await _remoteStoreCallback.handleSuccessfulWrite(mutationBatchResult); // It's possible that with the completion of this mutation another slot has freed up. @@ -540,14 +506,13 @@ class RemoteStore implements TargetMetadataProvider { Future _handleWriteStreamClose(GrpcError status) async { if (status.code == StatusCode.ok) { // Graceful stop (due to stop() or idle timeout). Make sure that's desirable. - hardAssert(!_shouldStartWriteStream, - 'Write stream was stopped gracefully while still needed.'); + hardAssert(!_shouldStartWriteStream(), 'Write stream was stopped gracefully while still needed.'); } // If the write stream closed due to an error, invoke the error callbacks if there are pending writes. if (status.code != StatusCode.ok && _writePipeline.isNotEmpty) { // TODO(long1eu): handle UNAUTHENTICATED status, see go/firestore-client-errors - if (_writeStream.handshakeComplete) { + if (_writeStream.isHandshakeComplete) { // This error affects the actual writes await _handleWriteError(status); } else { @@ -559,28 +524,25 @@ class RemoteStore implements TargetMetadataProvider { // The write stream may have already been restarted by refilling the write pipeline for failed writes. In that case, // we don't want to start the write stream again. - if (_shouldStartWriteStream) { - _startWriteStream(); + if (_shouldStartWriteStream()) { + await _startWriteStream(); } } Future _handleWriteHandshakeError(GrpcError status) async { - hardAssert( - status.code != StatusCode.ok, 'Handling write error with status OK.'); + hardAssert(status.code != StatusCode.ok, 'Handling write error with status OK.'); // Reset the token if it's a permanent error, signaling the write stream is no longer valid. // Note that the handshake does not count as a write: see comments on isPermanentWriteError for details. if (Datastore.isPermanentGrpcError(status)) { final String token = toDebugString(_writeStream.lastStreamToken); - Log.d('RemoteStore', - 'RemoteStore error before completed handshake; resetting stream token $token: $status'); + Log.d(_tag, 'RemoteStore error before completed handshake; resetting stream token $token: $status'); _writeStream.lastStreamToken = WriteStream.emptyStreamToken; await _localStore.setLastStreamToken(WriteStream.emptyStreamToken); } } Future _handleWriteError(GrpcError status) async { - hardAssert( - status.code != StatusCode.ok, 'Handling write error with status OK.'); + hardAssert(status.code != StatusCode.ok, 'Handling write error with status OK.'); // Only handle permanent errors here. If it's transient, just let the retry logic kick in. if (Datastore.isPermanentWriteError(status)) { // If this was a permanent error, the request itself was the problem so it's not going to succeed if we resend it. @@ -597,19 +559,14 @@ class RemoteStore implements TargetMetadataProvider { } } - Transaction createTransaction() => Transaction(_datastore.transactionClient); + Transaction createTransaction() => Transaction(_datastore); @override - QueryData Function(int targetId) get getQueryDataForTarget { - return (int targetId) => _listenTargets[targetId]; - } + TargetData Function(int targetId) get getTargetDataForTarget => (int targetId) => _listenTargets[targetId]; @override - ImmutableSortedSet Function(int targetId) - get getRemoteKeysForTarget { - return (int targetId) { - return _remoteStoreCallback.getRemoteKeysForTarget(targetId); - }; + ImmutableSortedSet Function(int targetId) get getRemoteKeysForTarget { + return _remoteStoreCallback.getRemoteKeysForTarget; } } @@ -641,6 +598,5 @@ abstract class RemoteStoreCallback { /// assigned to the target when we received the last snapshot. /// /// Returns an empty set of document keys for unknown targets. - ImmutableSortedSet Function(int targetId) - get getRemoteKeysForTarget; + ImmutableSortedSet getRemoteKeysForTarget(int targetId); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/stream.dart new file mode 100644 index 00000000..0b5c5778 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/stream.dart @@ -0,0 +1,113 @@ +// File created by +// Lung Razvan +// on 24/09/2018 + +import 'dart:async'; + +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:grpc/grpc.dart'; + +/// A Stream is an interface that represents a streaming RPC to the Firestore backend. It's built on +/// top of GRPC's own support for streaming RPCs, and adds several critical features for our +/// clients: +/// * Exponential backoff on failure +/// * Authentication via CredentialsProvider +/// * Dispatching all callbacks into the shared worker queue +/// * Closing idle streams after 60 seconds of inactivity +/// +/// Implementations of Stream should use AbstractStream and provide their own serialization of +/// models to and from the protocol buffers for a specific streaming RPC. +/// +/// ## Starting and Stopping +/// +/// Streaming RPCs are stateful and need to be [start]ed before messages can be sent and received. +/// The Stream will call its [onOpen] once the stream is ready to accept requests. +/// +/// Should a [start] fail, Stream will call the [onClose] method of the provided listener. +abstract class Stream { + /// Returns true if the RPC has been created locally and has started the process of connecting. + bool get isStarted; + + /// Returns true if the RPC will accept messages to send. + bool get isOpen; + + /// Starts the RPC. Only allowed if [isStarted] returns false. The stream is immediately ready for + /// use. + /// + /// When start returns, [isStarted] will return true. + Future start(); + + /// Stops the RPC. This is guaranteed *not* to call the [onClose] of the listener in order to + /// ensure that any recovery logic there does not attempt to reuse the stream. + /// + /// When stop returns [isStarted] will return false. + Future stop(); + + /// After an error the stream will usually back off on the next attempt to start it. If the error + /// warrants an immediate restart of the stream, the sender can use this to indicate that the + /// receiver should not back off. + /// + /// Each error will call the [onClose] method of the listener. That listener can decide to inhibit + /// backoff if required. + void inhibitBackoff(); +} + +/// AbstractStream can be in one of 5 states (each described in detail below) based on the following +/// state transition diagram: +/// +/// ``` +/// start() called auth & connection succeeded +/// INITIAL ----------------> STARTING -----------------------------> OPEN +/// ^ | | +/// | | error occurred | +/// | \-----------------------------v-----/ +/// | | +/// backoff | | +/// elapsed | start() called | +/// \--- BACKOFF <---------------- ERROR +/// +/// [any state] --------------------------> INITIAL +/// stop() called or +/// idle timer expired +/// ``` +enum StreamState { + /// The streaming RPC is not yet running and there is no error condition. Calling [Stream.start] + /// will start the stream immediately without backoff. While in this state [isStarted] will return + /// false. + initial, + + /// The stream is starting, either waiting for an auth token or for the stream to successfully + /// open. While in this state, [Stream.isStarted] will return true but [Stream.isOpen] will return + /// false. + /// + /// Porting Note: Auth is handled transparently by gRPC in this implementation, so this state is + /// used as intermediate state until the [Stream.onOpen] callback is called. + starting, + + /// The streaming RPC is up and running. Requests and responses can flow freely. Both + /// [Stream.isStarted] and [Stream.isOpen] will return true. + open, + + /// The stream encountered an error. The next start attempt will back off. While in this state + /// [Stream.isStarted] will return false. + error, + + /// An in-between state after an error where the stream is waiting before re-starting. After + /// waiting is complete, the stream will try to open. While in this state [Stream.isStarted] will + /// return true but [Stream.isOpen] will return false. + backoff, +} + +typedef OnClose = Future Function(GrpcError error); + +/// A (super-interface) for the stream callbacks. Implementations of Stream should provide their own +/// interface that extends this interface. +class StreamCallback { + const StreamCallback({this.onOpen, this.onClose}); + + /// The stream is now open and is accepting messages + final Task onOpen; + + /// The stream has closed. If there was an error, the status will be != OK. + final OnClose onClose; +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/target_change.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/target_change.dart index bb87aab0..a5dccb71 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/target_change.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/target_change.dart @@ -25,6 +25,12 @@ class TargetChange { @required this.current, }); + TargetChange.createSynthesizedTargetChangeForCurrentChange(this.current) + : resumeToken = Uint8List(0), + addedDocuments = DocumentKey.emptyKeySet, + modifiedDocuments = DocumentKey.emptyKeySet, + removedDocuments = DocumentKey.emptyKeySet; + /// Returns the opaque, server-assigned token that allows watching a query to /// be resumed after disconnecting without retransmitting all the data that /// matches the query. The resume token essentially identifies a point in time @@ -53,8 +59,7 @@ class TargetChange { identical(this, other) || other is TargetChange && runtimeType == other.runtimeType && - const DeepCollectionEquality() - .equals(resumeToken, other.resumeToken) && + const DeepCollectionEquality().equals(resumeToken, other.resumeToken) && current == other.current && addedDocuments == other.addedDocuments && modifiedDocuments == other.modifiedDocuments && diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_change_aggregator.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_change_aggregator.dart index 42491ae5..5b289e25 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_change_aggregator.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_change_aggregator.dart @@ -4,8 +4,8 @@ import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/document_view_change.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/target.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; @@ -29,12 +29,10 @@ class WatchChangeAggregator { final Map _targetStates = {}; /// Keeps track of the documents to update since the last raised snapshot. - Map _pendingDocumentUpdates = - {}; + Map _pendingDocumentUpdates = {}; /// A mapping of document keys to their set of target IDs. - Map> _pendingDocumentTargetMapping = - >{}; + Map> _pendingDocumentTargetMapping = >{}; /// A list of targets with existence filter mismatches. These targets are known to be inconsistent /// and their listens needs to be re-established by [RemoteStore]. @@ -54,8 +52,7 @@ class WatchChangeAggregator { } for (int targetId in documentChange.removedTargetIds) { - _removeDocumentFromTarget( - targetId, documentKey, documentChange.newDocument); + _removeDocumentFromTarget(targetId, documentKey, documentChange.newDocument); } } @@ -89,8 +86,7 @@ class WatchChangeAggregator { if (!targetState.isPending()) { removeTarget(targetId); } - hardAssert(targetChange.cause == null, - 'WatchChangeAggregator does not handle errored targets'); + hardAssert(targetChange.cause == null, 'WatchChangeAggregator does not handle errored targets'); break; case WatchTargetChangeType.current: if (_isActiveTarget(targetId)) { @@ -108,8 +104,7 @@ class WatchChangeAggregator { } break; default: - throw fail( - 'Unknown target watch change state: ${targetChange.changeType}'); + throw fail('Unknown target watch change state: ${targetChange.changeType}'); } } } @@ -121,27 +116,26 @@ class WatchChangeAggregator { if (targetIds.isNotEmpty) { return targetIds; } else { - return _targetStates.keys; + return _targetStates.keys.where(_isActiveTarget); } } /// Handles existence filters and synthesizes deletes for filter mismatches. Targets that are /// invalidated by filter mismatches are added to [pendingTargetResets]. - void handleExistenceFilter( - WatchChangeExistenceFilterWatchChange watchChange) { + void handleExistenceFilter(WatchChangeExistenceFilterWatchChange watchChange) { final int targetId = watchChange.targetId; final int expectedCount = watchChange.existenceFilter.count; - final QueryData queryData = _queryDataForActiveTarget(targetId); - if (queryData != null) { - final Query query = queryData.query; - if (query.isDocumentQuery) { + final TargetData targetData = _queryDataForActiveTarget(targetId); + if (targetData != null) { + final Target target = targetData.target; + if (target.isDocumentQuery) { if (expectedCount == 0) { // The existence filter told us the document does not exist. We deduce that this document // does not exist and apply a deleted document to our updates. Without applying this // deleted document there might be another query that will raise this document as part of // a snapshot until it is resolved, essentially exposing inconsistency between queries. - final DocumentKey key = DocumentKey.fromPath(query.path); + final DocumentKey key = DocumentKey.fromPath(target.path); _removeDocumentFromTarget( targetId, key, @@ -152,8 +146,7 @@ class WatchChangeAggregator { ), ); } else { - hardAssert(expectedCount == 1, - 'Single document existence filter with count: $expectedCount'); + hardAssert(expectedCount == 1, 'Single document existence filter with count: $expectedCount'); } } else { final int currentSize = _getCurrentDocumentCountForTarget(targetId); @@ -176,16 +169,15 @@ class WatchChangeAggregator { final int targetId = entry.key; final TargetState targetState = entry.value; - final QueryData queryData = _queryDataForActiveTarget(targetId); - if (queryData != null) { - if (targetState.isCurrent && queryData.query.isDocumentQuery) { + final TargetData targetData = _queryDataForActiveTarget(targetId); + if (targetData != null) { + if (targetState.isCurrent && targetData.target.isDocumentQuery) { // Document queries for document that don't exist can produce an empty result set. To // update our local cache, we synthesize a document delete if we have not previously // received the document. This resolves the limbo state of the document, removing it from // [limboDocumentRefs]. - final DocumentKey key = DocumentKey.fromPath(queryData.query.path); - if (_pendingDocumentUpdates[key] == null && - !_targetContainsDocument(targetId, key)) { + final DocumentKey key = DocumentKey.fromPath(targetData.target.path); + if (_pendingDocumentUpdates[key] == null && !_targetContainsDocument(targetId, key)) { _removeDocumentFromTarget( targetId, key, @@ -209,17 +201,15 @@ class WatchChangeAggregator { // We extract the set of limbo-only document updates as the GC logic special-cases documents // that do not appear in the query cache. - for (MapEntry> entry - in _pendingDocumentTargetMapping.entries) { + for (MapEntry> entry in _pendingDocumentTargetMapping.entries) { final DocumentKey key = entry.key; final Set targets = entry.value; bool isOnlyLimboTarget = true; for (int targetId in targets) { - final QueryData queryData = _queryDataForActiveTarget(targetId); - if (queryData != null && - queryData.purpose != QueryPurpose.limboResolution) { + final TargetData targetData = _queryDataForActiveTarget(targetId); + if (targetData != null && targetData.purpose != QueryPurpose.limboResolution) { isOnlyLimboTarget = false; break; } @@ -234,8 +224,7 @@ class WatchChangeAggregator { snapshotVersion: snapshotVersion, targetChanges: Map.from(targetChanges), targetMismatches: Set.from(_pendingTargetResets), - documentUpdates: - Map.from(_pendingDocumentUpdates), + documentUpdates: Map.from(_pendingDocumentUpdates), resolvedLimboDocuments: Set.from(resolvedLimboDocuments), ); @@ -254,13 +243,11 @@ class WatchChangeAggregator { return; } - final DocumentViewChangeType changeType = - _targetContainsDocument(targetId, document.key) - ? DocumentViewChangeType.modified - : DocumentViewChangeType.added; + final DocumentViewChangeType changeType = _targetContainsDocument(targetId, document.key) + ? DocumentViewChangeType.modified + : DocumentViewChangeType.added; - final TargetState targetState = _ensureTargetState(targetId); - targetState.addDocumentChange(document.key, changeType); + _ensureTargetState(targetId).addDocumentChange(document.key, changeType); _pendingDocumentUpdates[document.key] = document; @@ -271,8 +258,7 @@ class WatchChangeAggregator { /// target, but the document's state is still known (e.g. we know that the document was deleted or /// we received the change that caused the filter mismatch), the new document can be provided to /// update the remote document cache. - void _removeDocumentFromTarget( - int targetId, DocumentKey key, MaybeDocument updatedDocument) { + void _removeDocumentFromTarget(int targetId, DocumentKey key, MaybeDocument updatedDocument) { if (!_isActiveTarget(targetId)) { return; } @@ -310,8 +296,7 @@ class WatchChangeAggregator { /// 'in-sync' with the client's active targets. void recordPendingTargetRequest(int targetId) { // For each request we get we need to record we need a response for it. - final TargetState targetState = _ensureTargetState(targetId); - targetState.recordPendingTargetRequest(); + _ensureTargetState(targetId).recordPendingTargetRequest(); } TargetState _ensureTargetState(int targetId) { @@ -329,33 +314,31 @@ class WatchChangeAggregator { return targetMapping; } - /// Verifies that the user is still interested in this target (by calling [getQueryDataForTarget]) + /// Verifies that the user is still interested in this target (by calling [getTargetDataForTarget]) /// and that we are not waiting for pending ADDs from watch. bool _isActiveTarget(int targetId) { return _queryDataForActiveTarget(targetId) != null; } - /// Returns the [QueryData] for an active target (i.e. a target that the user is still interested + /// Returns the [TargetData] for an active target (i.e. a target that the user is still interested /// in that has no outstanding target change requests). - QueryData _queryDataForActiveTarget(int targetId) { + TargetData _queryDataForActiveTarget(int targetId) { final TargetState targetState = _targetStates[targetId]; return targetState != null && targetState.isPending() ? null - : _targetMetadataProvider.getQueryDataForTarget(targetId); + : _targetMetadataProvider.getTargetDataForTarget(targetId); } /// Resets the state of a [Watch] target to its initial state (e.g. sets [current] to false, /// clears the resume token and removes its target mapping from all documents). void _resetTarget(int targetId) { hardAssert( - _targetStates[targetId] != null && !_targetStates[targetId].isPending(), - 'Should only reset active targets'); + _targetStates[targetId] != null && !_targetStates[targetId].isPending(), 'Should only reset active targets'); _targetStates[targetId] = TargetState(); // Trigger removal for any documents currently mapped to this target. These removals will be // part of the initial snapshot if [Watch] does not resend these documents. - final ImmutableSortedSet existingKeys = - _targetMetadataProvider.getRemoteKeysForTarget(targetId); + final ImmutableSortedSet existingKeys = _targetMetadataProvider.getRemoteKeysForTarget(targetId); for (DocumentKey key in existingKeys) { _removeDocumentFromTarget(targetId, key, null); } @@ -363,8 +346,7 @@ class WatchChangeAggregator { /// Returns whether the LocalStore considers the document to be part of the specified target. bool _targetContainsDocument(int targetId, DocumentKey key) { - final ImmutableSortedSet existingKeys = - _targetMetadataProvider.getRemoteKeysForTarget(targetId); + final ImmutableSortedSet existingKeys = _targetMetadataProvider.getRemoteKeysForTarget(targetId); return existingKeys.contains(key); } } @@ -373,15 +355,14 @@ class WatchChangeAggregator { class TargetMetadataProvider { const TargetMetadataProvider({ @required this.getRemoteKeysForTarget, - @required this.getQueryDataForTarget, + @required this.getTargetDataForTarget, }); /// Returns the set of remote document keys for the given target id as of the last raised snapshot /// or an empty set of document keys for unknown targets. - final ImmutableSortedSet Function(int targetId) - getRemoteKeysForTarget; + final ImmutableSortedSet Function(int targetId) getRemoteKeysForTarget; - /// Returns the [QueryData] for an active target id or 'null' if this query is unknown or has + /// Returns the [TargetData] for an active target id or 'null' if this query is unknown or has /// become inactive. - final QueryData Function(int targetId) getQueryDataForTarget; + final TargetData Function(int targetId) getTargetDataForTarget; } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_stream.dart new file mode 100644 index 00000000..b519c4b0 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/watch_stream.dart @@ -0,0 +1,102 @@ +// File created by +// Lung Razvan +// on 21/09/2018 + +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/abstract_stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firestore_channel.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:cloud_firestore_vm/src/proto/index.dart' as proto; +import 'package:grpc/grpc.dart'; +import 'package:meta/meta.dart'; +import 'package:protobuf/protobuf.dart'; + +/// A Stream that implements the [StreamingWatch] RPC. +/// +/// Once the [WatchStream] has started, any number of [watchQuery] and [unwatchTargetId] calls can +/// be sent to control what changes will be sent from the server for [WatchChanges]. +/// +/// @see firestore.proto +class WatchStream extends AbstractStream { + WatchStream(FirestoreChannel channel, AsyncQueue workerQueue, this.serializer, WatchStreamCallback listener) + : super( + channel, + ClientMethod( + 'firestore.googleapis.com/google.firestore.v1.Firestore/Listen', + (GeneratedMessage req) => req.writeToBuffer(), + (List res) => proto.ListenResponse.fromBuffer(res), + ), + workerQueue, + TimerId.listenStreamConnectionBackoff, + TimerId.listenStreamIdle, + listener, + ); + + /// The empty stream token. + static final Uint8List emptyResumeToken = Uint8List.fromList([]); + + final RemoteSerializer serializer; + + /// Registers interest in the results of the given query. If the query includes a [resumeToken] it + /// will be included in the request. Results that affect the query will be streamed back as + /// [WatchChange] messages that reference the [targetId] included in query. + void watchQuery(TargetData targetData) { + hardAssert(isOpen, 'Watching queries requires an open stream'); + final proto.ListenRequest request = proto.ListenRequest.create() + ..database = serializer.databaseName + ..addTarget = serializer.encodeTarget(targetData); + + final Map labels = serializer.encodeListenRequestLabels(targetData); + if (labels != null) { + request.labels.addAll(labels); + } + + writeRequest(request..freeze()); + } + + /// Unregisters interest in the results of the query associated with the given target id. + void unwatchTarget(int targetId) { + hardAssert(isOpen, 'Unwatching targets requires an open stream'); + + final proto.ListenRequest request = proto.ListenRequest.create() + ..database = serializer.databaseName + ..removeTarget = targetId + ..freeze(); + + writeRequest(request); + } + + @override + Future onNext(proto.ListenResponse change) async { + // A successful response means the stream is healthy + backoff.reset(); + + final WatchChange watchChange = serializer.decodeWatchChange(change); + final SnapshotVersion snapshotVersion = serializer.decodeVersionFromListenResponse(change); + + await listener.onWatchChange(snapshotVersion, watchChange); + } +} + +typedef OnWatchChange = Future Function(SnapshotVersion snapshotVersion, WatchChange watchChange); + +/// A callback interface for the set of events that can be emitted by the [WatchStream] +class WatchStreamCallback extends StreamCallback { + const WatchStreamCallback({ + @required Task onOpen, + @required OnClose onClose, + @required this.onWatchChange, + }) : super(onOpen: onOpen, onClose: onClose); + + /// A new change from the watch stream. Snapshot version will ne non-null if it was set + final OnWatchChange onWatchChange; +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/write_stream.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/write_stream.dart new file mode 100644 index 00000000..f6b5c0b0 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/remote/write_stream.dart @@ -0,0 +1,157 @@ +// File created by +// Lung Razvan +// on 21/09/2018 + +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/model/snapshot_version.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/abstract_stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/firestore_channel.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/stream.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/firestore.pb.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/index.dart'; +import 'package:cloud_firestore_vm/src/proto/google/firestore/v1/write.pb.dart'; +import 'package:grpc/grpc.dart'; +import 'package:meta/meta.dart'; +import 'package:protobuf/protobuf.dart'; + +/// A Stream that implements the StreamingWrite RPC. +/// +/// The StreamingWrite RPC requires the caller to maintain special streamToken state in between calls, to help the +/// server understand which responses the client has processed by the time the next request is made. Every response may +/// contain a streamToken; this value must be passed to the next request. +/// +/// After calling [start] on this stream, the next request must be a handshake, containing whatever streamToken is on +/// hand. Once a response to this request is received, all pending mutations may be submitted. When submitting multiple +/// batches of mutations at the same time, it's okay to use the same streamToken for the calls to [writeMutations]. +/// +/// @see firestore.proto +class WriteStream extends AbstractStream { + WriteStream(FirestoreChannel channel, AsyncQueue workerQueue, this._serializer, WriteStreamCallback listener) + : super( + channel, + ClientMethod( + 'firestore.googleapis.com/google.firestore.v1.Firestore/Write', + (GeneratedMessage req) => req.writeToBuffer(), + (List res) => WriteResponse.fromBuffer(res), + ), + workerQueue, + TimerId.writeStreamConnectionBackoff, + TimerId.writeStreamIdle, + listener, + ); + + /// The empty stream token. + static final Uint8List emptyStreamToken = Uint8List.fromList([0]); + + final RemoteSerializer _serializer; + + /// Contains last received stream token from the server, used to acknowledge which responses the client has processed. + /// Stream tokens are opaque checkpoint markers whose only real value is their inclusion in the next request. + /// + /// WriteStream implementations manage propagating this value from responses to the next request. + /// + /// NOTE: A null streamToken is not allowed: use the empty array for the unset value. + Uint8List lastStreamToken = emptyStreamToken; + + @visibleForTesting + bool handshakeComplete = false; + + @override + Future start() async { + handshakeComplete = false; + await super.start(); + } + + @override + void tearDown() { + if (handshakeComplete) { + // Send an empty write request to the backend to indicate imminent stream closure. This allows the backend to + // clean up resources. + writeMutations([]); + } + } + + /// Tracks whether or not a handshake has been successfully exchanged and the stream is ready to accept mutations. + bool get isHandshakeComplete => handshakeComplete; + + /// Sends an initial streamToken to the server, performing the handshake required to make the StreamingWrite RPC work. + /// Subsequent [writeMutations] calls should wait until a response has been delivered to + /// [WriteStreamCallback.onHandshakeComplete]. + Future writeHandshake() async { + hardAssert(isOpen, 'Writing handshake requires an opened stream'); + hardAssert(!handshakeComplete, 'Handshake already completed'); + // TODO(long1eu): Support stream resumption. We intentionally do not set the stream token on the handshake, + // ignoring any stream token we might have. + final WriteRequest request = WriteRequest.create()..database = _serializer.databaseName; + + writeRequest(request..freeze()); + } + + /// Sends a list of mutations to the Firestore backend to apply + void writeMutations(List mutations) { + hardAssert(isOpen, 'Writing mutations requires an opened stream'); + hardAssert(handshakeComplete, 'Handshake must be complete before writing mutations'); + final WriteRequest request = WriteRequest.create()..streamToken = lastStreamToken; + + for (Mutation mutation in mutations) { + request.writes.add(_serializer.encodeMutation(mutation)); + } + + writeRequest(request..freeze()); + } + + @override + Future onNext(WriteResponse change) async { + lastStreamToken = Uint8List.fromList(change.streamToken); + + if (!handshakeComplete) { + // The first response is the handshake response + handshakeComplete = true; + + await listener.onHandshakeComplete(); + } else { + // A successful first write response means the stream is healthy. + // + // Note, that we could consider a successful handshake healthy, however, the write itself might be causing an + // error we want to back off from. + backoff.reset(); + + final SnapshotVersion commitVersion = _serializer.decodeVersion(change.commitTime); + + final int count = change.writeResults.length; + final List results = List(count); + for (int i = 0; i < count; i++) { + final WriteResult result = change.writeResults[i]; + results[i] = _serializer.decodeMutationResult(result, commitVersion); + } + + await listener.onWriteResponse(commitVersion, results); + } + } +} + +typedef OnWriteResponse = Future Function(SnapshotVersion commitVersion, List mutationResults); + +/// A callback interface for the set of events that can be emitted by the [WriteStream] +class WriteStreamCallback extends StreamCallback { + const WriteStreamCallback({ + @required Task onOpen, + @required OnClose onClose, + @required this.onHandshakeComplete, + @required this.onWriteResponse, + }) : super(onOpen: onOpen, onClose: onClose); + + /// The handshake for this write stream has completed + final Task onHandshakeComplete; + + /// Response for the last write. + final OnWriteResponse onWriteResponse; +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/server_timestamp_behavior.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/server_timestamp_behavior.dart index 1589bd54..c981d83f 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/server_timestamp_behavior.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/server_timestamp_behavior.dart @@ -2,7 +2,6 @@ // Lung Razvan // on 17/09/2018 - /// Controls the return value for server timestamps that have not yet been set to their final value. enum ServerTimestampBehavior { /// Return 'null' for [FieldValue.serverTimestampServerTimestamps] that have not yet been set to diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/transaction.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/transaction.dart index 865d2d3c..d3cbc426 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/transaction.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/transaction.dart @@ -52,8 +52,8 @@ class Transaction { checkNotNull(data, 'Provided data must not be null.'); checkNotNull(options, 'Provided options must not be null.'); final UserDataParsedSetData parsed = options.merge - ? _firestore.dataConverter.parseMergeData(data, options.fieldMask) - : _firestore.dataConverter.parseSetData(data); + ? _firestore.userDataReader.parseMergeData(data, options.fieldMask) + : _firestore.userDataReader.parseSetData(data); _transaction.set(documentRef.key, parsed); return this; } @@ -67,7 +67,7 @@ class Transaction { /// /// Return this [Transaction] instance. Used for chaining method calls. Transaction updateFromList(DocumentReference documentRef, List data) { - final UserDataParsedUpdateData parsedData = _firestore.dataConverter + final UserDataParsedUpdateData parsedData = _firestore.userDataReader .parseUpdateDataFromList(collectUpdateArguments(1, data)); return _update(documentRef, parsedData); } @@ -82,7 +82,7 @@ class Transaction { /// Return this [Transaction] instance. Used for chaining method calls. Transaction update(DocumentReference documentRef, Map data) { final UserDataParsedUpdateData parsedData = - _firestore.dataConverter.parseUpdateData(data); + _firestore.userDataReader.parseUpdateData(data); return _update(documentRef, parsedData); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/async_task.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/async_task.dart new file mode 100644 index 00000000..9cd00616 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/async_task.dart @@ -0,0 +1,405 @@ +// File created by +// Lung Razvan +// on 18/09/2018 + +import 'dart:async'; +import 'dart:collection'; + +import 'package:cloud_firestore_vm/src/firebase/firestore/core/online_state.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/version.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/remote/online_state_tracker.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; +import 'package:meta/meta.dart'; + +/// Well-known 'timer' IDs used when scheduling delayed tasks on the AsyncQueue. These IDs can then +/// be used from tests to check for the presence of tasks or to run them early. +class TimerId implements Comparable { + const TimerId._(this._i); + + final int _i; + + /// ALL can be used with runDelayedTasksUntil() to run all timers. + static const TimerId all = TimerId._(0); + + /// The following 4 timers are used with the listen and write streams. The IDLE timer is used to + /// close the stream due to inactivity. The CONNECTION_BACKOFF timer is used to restart a stream + /// once the appropriate backoff delay has elapsed. + static const TimerId listenStreamIdle = TimerId._(1); + static const TimerId listenStreamConnectionBackoff = TimerId._(2); + static const TimerId writeStreamIdle = TimerId._(3); + static const TimerId writeStreamConnectionBackoff = TimerId._(4); + + /// A timer used in [OnlineStateTracker] to transition from [OnlineState.unknown] to + /// [OnlineState.offline] after a set timeout, rather than waiting indefinitely for success or + /// failure. + static const TimerId onlineStateTimeout = TimerId._(5); + + /// A timer used to periodically attempt LRU Garbage collection + static const TimerId garbageCollection = TimerId._(6); + + /// A timer used to retry transactions. Since there can be multiple concurrent transactions, + /// multiple of these may be in the queue at a given time. + static const TimerId retryTransaction = TimerId._(7); + + /// A timer used to monitor when a connection attempt in gRPC is unsuccessful and retry + /// accordingly. + static const TimerId connectivityAttemptTimer = TimerId._(8); + + @override + int compareTo(TimerId other) => _i.compareTo(other._i); + + bool operator >(TimerId other) => _i > other._i; + + bool operator >=(TimerId other) => _i >= other._i; + + bool operator <(TimerId other) => _i < other._i; + + bool operator <=(TimerId other) => _i <= other._i; + + @override + bool operator ==(Object other) => + identical(this, other) || other is TimerId && runtimeType == other.runtimeType && _i == other._i; + + @override + int get hashCode => _i.hashCode; + + static const List _values = [ + 'all', + 'listenStreamIdle', + 'listenStreamConnectionBackoff', + 'writeStreamIdle', + 'writeStreamConnectionBackoff', + 'onlineStateTimeout', + 'garbageCollection', + ]; + + @override + String toString() => _values[_i]; +} + +typedef Task = Future Function(); + +class _TaskQueueEntry { + _TaskQueueEntry(this.function) : completer = Completer(); + + Task function; + Completer completer; +} + +class TaskQueue { + final Queue<_TaskQueueEntry> _tasks = Queue<_TaskQueueEntry>(); + final List> _delayedTasks = >[]; + + Completer _recentActiveCompleter; + + Future _enqueue(Task function) async { + final _TaskQueueEntry taskEntry = _TaskQueueEntry(function); + + final bool listWasEmpty = _tasks.isEmpty; + _tasks.add(taskEntry); + + // Only run the just added task in case the queue hasn't been used yet or the last task has been + // executed + if (_recentActiveCompleter == null || _recentActiveCompleter.isCompleted && listWasEmpty) { + _runNext(); + } + + return taskEntry.completer.future; + } + + void _runNext() { + if (_tasks.isNotEmpty) { + final _TaskQueueEntry taskEntry = _tasks.first; + _recentActiveCompleter = taskEntry.completer; + + taskEntry.function().then((dynamic value) { + Future(() { + _tasks.removeFirst(); + _runNext(); + }); + taskEntry.completer.complete(value); + }).catchError((dynamic error, StackTrace s) { + Future(() { + _tasks.removeFirst(); + _runNext(); + }); + + taskEntry.completer.completeError(error, s); + }); + } + } + + bool _isShuttingDown = false; + + bool get isShuttingDown => _isShuttingDown; + + void execute(Task task) { + if (!_isShuttingDown) { + _enqueue(task); + } + } + + /// Initiate the shutdown process. + Future _executeAndInitiateShutdown(Task task) async { + if (isShuttingDown) { + return; + } + + final Future future = _executeAndReportResult(task); + // Mark the initiation of shut down. + _isShuttingDown = true; + return future; + } + + /// The [Task] will not be run if we started shutting down already. + /// + /// Returns a future that completes when the requested [Task] completes, or + /// throws an error when the [Task] runs into exceptions. + Future _executeAndReportResult(Task task) { + final Completer completer = Completer(); + execute(() => task() + .then(completer.complete) // + .catchError(completer.completeError)); + return completer.future; + } + + /// Execute the command, regardless if shutdown has been initiated. + void executeEvenAfterShutdown(Task task) { + _enqueue(task); + } + + /// Schedule a task after the specified delay. + /// + /// The returned [DelayedTask] can be used to cancel the task prior to its running. + DelayedTask schedule(Task task, Duration delay) { + final DelayedTask delayedTask = DelayedTask._( + targetTimeMs: DateTime.now().add(delay), + task: task, + enqueue: _executeAndReportResult, + removeDelayedTask: _delayedTasks.remove, + ); + _delayedTasks.add(delayedTask); + return delayedTask; + } + + void shutdownNow() { + _isShuttingDown = true; + for (final DelayedTask task in _delayedTasks) { + task.cancel(); + } + _tasks.clear(); + } +} + +/// A helper class that allows to schedule/queue [Function]s on a single queue. +class AsyncQueue { + AsyncQueue(this._name); + + // ignore: unused_field + final String _name; + + // Tasks scheduled to be queued in the future. Tasks are automatically removed after they are run + // or canceled. + // + // NOTE: We disallow duplicates currently, so this could be a Set which might have better + // theoretical removal speed, except this list will always be small so List is fine. + final List> _delayedTasks = >[]; + final TaskQueue _taskQueue = TaskQueue(); + + // List of TimerIds to fast-forward delays for. + final List _timerIdsToSkip = []; + + /// Immediately stops running any scheduled tasks and causes a 'panic' (through crashing the app). + /// + /// Should only be used for unrecoverable exceptions. + static void panic(dynamic t) { + if (t is OutOfMemoryError) { + // OOMs can happen if developers try to load too much data at once. + // Instead of treating this as an internal error, give a hint that this + // might be due to excessive queries in Firestore. + throw t; + } else { + throw StateError('Internal error in Cloud Firestore (${Version.sdkVersion}). $t'); + } + } + + /// Schedules a task and returns a [Future] which will complete when the task has been finished. + /// + /// The task will be append to the queue and run after every task added before has been executed. + Future enqueue(Task task) { + return _taskQueue._executeAndReportResult(task); + } + + /// Queue a [Task] and immediately mark the initiation of shutdown process. Tasks queued after + /// this method is called are not run unless they explicitly are requested via + /// [enqueueAndForgetEvenAfterShutdown]. + Future enqueueAndInitiateShutdown(Task task) { + return _taskQueue._executeAndInitiateShutdown(task); + } + + /// Queue and run this [Task] immediately after every other already queued task, regardless + /// if shutdown has been initiated. + void enqueueAndForgetEvenAfterShutdown(Task task) { + _taskQueue.executeEvenAfterShutdown(task); + } + + /// Has the shutdown process been initiated. + bool get isShuttingDown => _taskQueue.isShuttingDown; + + /// Queue and run this Runnable task immediately after every other already queued task. Unlike [enqueue], returns void + /// instead of a Future for use when we have no need to 'wait' on the task completing. + void enqueueAndForget(Task task) => enqueue(task); + + /// Schedule a task after the specified delay. + /// + /// The returned [DelayedTask] can be used to cancel the task prior to its running. + DelayedTask enqueueAfterDelay(TimerId timerId, Duration delay, Task task) { + // Fast-forward delays for timerIds that have been overridden. + if (_timerIdsToSkip.contains(timerId)) { + delay = Duration.zero; + } + + final DelayedTask delayedTask = _createAndScheduleDelayedTask(timerId, delay, task); + _delayedTasks.add(delayedTask); + + return delayedTask; + } + + /// For Tests: Skip all subsequent delays for a timer id. + @visibleForTesting + void skipDelaysForTimerId(TimerId timerId) { + _timerIdsToSkip.add(timerId); + } + + /// Determines if a delayed task with a particular timerId exists. */ + @visibleForTesting + bool containsDelayedTask(TimerId timerId) { + for (DelayedTask delayedTask in _delayedTasks) { + if (delayedTask.timerId == timerId) { + return true; + } + } + return false; + } + + /// Runs some or all delayed tasks early, blocking until completion. [lastTimerId] Only delayed tasks up to and + /// including one that was scheduled using this [TimerId] will be run. Method throws if no matching task exists. + /// Pass [TimerId.all] to run all delayed tasks. + @visibleForTesting + Future runDelayedTasksUntil(TimerId lastTimerId) async { + hardAssert(lastTimerId == TimerId.all || containsDelayedTask(lastTimerId), + 'Attempted to run tasks until missing TimerId: $lastTimerId'); + + // NOTE: For performance we could store the tasks sorted, but [runDelayedTasksUntil] is only called from tests, and + // the size is guaranteed to be small since we don't allow duplicate TimerIds. + _delayedTasks.sort(); + + // We copy the list before enumerating to avoid concurrent modification as we remove tasks + final List> result = >[]; + + for (DelayedTask task in _delayedTasks.toList()) { + task.cancel(); + result.add(task); + if (lastTimerId != TimerId.all && task.timerId == lastTimerId) { + break; + } + } + + await Future.wait(result.map((DelayedTask it) => enqueue(it.task))); + } + + /// Creates and returns a DelayedTask that has been scheduled to be executed on the provided queue after the provided + /// delay. + DelayedTask _createAndScheduleDelayedTask(TimerId timerId, Duration delay, Task task) { + return DelayedTask._( + timerId: timerId, + targetTimeMs: DateTime.now().add(delay), + task: task, + enqueue: enqueue, + removeDelayedTask: _removeDelayedTask, + ); + } + + /// Called by DelayedTask to remove itself from our list of pending delayed tasks. + void _removeDelayedTask(DelayedTask task) { + final bool found = _delayedTasks.remove(task); + hardAssert(found, 'Delayed task not found.'); + } +} + +/// Represents a Task scheduled to be run in the future on an AsyncQueue. Supports cancellation. +class DelayedTask implements Comparable> { + DelayedTask._({ + this.timerId, + @required this.targetTimeMs, + @required this.task, + @required this.enqueue, + this.removeDelayedTask, + }) : assert(task != null), + assert(targetTimeMs != null) + // assert(timerId != null) | both are null when used by the [TaskQueue.schedule] + // assert(removeDelayedTask != null), | + { + scheduledFuture = Timer(targetTimeMs.difference(DateTime.now()), _handleDelayElapsed); + } + + final TimerId timerId; + final DateTime targetTimeMs; + final Task task; + final Future Function(Task function) enqueue; + final void Function(DelayedTask task) removeDelayedTask; + + // It is set to null after the task has been run or canceled. + Timer scheduledFuture; + + /// Cancels the task if it hasn't already been executed or canceled. + /// + /// As long as the task has not yet been run, calling [cancel()] (from a task already running on + /// the AsyncQueue) provides a guarantee that the task will not be run. + void cancel() { + if (scheduledFuture != null) { + _markDone(); + } + } + + Future _handleDelayElapsed() async { + if (scheduledFuture != null) { + _markDone(); + return enqueue(task); + } + } + + /// Marks this delayed task as done, notifying the AsyncQueue that it should be removed. + void _markDone() { + hardAssert(scheduledFuture != null, 'Caller should have verified scheduledFuture is non-null.'); + scheduledFuture.cancel(); + scheduledFuture = null; + removeDelayedTask?.call(this); + } + + @override + int compareTo(DelayedTask other) { + return targetTimeMs.compareTo(other.targetTimeMs); + } + + @override + bool operator ==(Object other) => + identical(this, other) || + other is DelayedTask && + runtimeType == other.runtimeType && + timerId == other.timerId && + targetTimeMs == other.targetTimeMs && + task == other.task && + enqueue == other.enqueue && + removeDelayedTask == other.removeDelayedTask && + scheduledFuture == other.scheduledFuture; + + @override + int get hashCode => + timerId.hashCode ^ + targetTimeMs.hashCode ^ + task.hashCode ^ + enqueue.hashCode ^ + removeDelayedTask.hashCode ^ + scheduledFuture.hashCode; +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/exponential_backoff.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/exponential_backoff.dart index 05910a04..3d2b03db 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/exponential_backoff.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/exponential_backoff.dart @@ -5,7 +5,7 @@ import 'dart:math'; import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; /// Helper for running delayed tasks following an exponential backoff curve between attempts using the [backoffFactor] /// to determine the extended base delay after each attempt. @@ -18,35 +18,45 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; /// as little as 0.5 * [initialDelay] and as much as 1.5 * [maxDelay]. After [maxDelay] is reached no further backoff /// is performed. class ExponentialBackoff { - // Initial backoff set to 1s according to https://cloud.google.com/apis/design/errors. ExponentialBackoff( - TaskScheduler scheduler, - TaskId taskId, { - Duration initialDelay = const Duration(seconds: 1), - double backoffFactor = 1.5, - Duration maxDelay = const Duration(minutes: 1), - }) : assert(scheduler != null), - assert(taskId != null), + AsyncQueue asyncQueue, + TimerId timerId, { + Duration initialDelay = _kDefaultBackoffInitialDelay, + double backoffFactor = _kDefaultBackoffFactor, + Duration maxDelay = _kDefaultBackoffMaxDelay, + }) : assert(asyncQueue != null), + assert(timerId != null), assert(initialDelay != null), assert(backoffFactor != null), assert(maxDelay != null), - _scheduler = scheduler, - _taskId = taskId, + _asyncQueue = asyncQueue, + _timerId = timerId, _initialDelay = initialDelay, _backoffFactor = backoffFactor, _maxDelay = maxDelay, + _nextMaxDelay = maxDelay, _lastAttemptTime = DateTime.now(), _currentBase = Duration.zero; - final TaskScheduler _scheduler; - final TaskId _taskId; + /// Initial backoff time in milliseconds after an error. Set to 1s according to + /// https://cloud.google.com/apis/design/errors. + static const Duration _kDefaultBackoffInitialDelay = Duration(seconds: 1); + static const double _kDefaultBackoffFactor = 1.5; + static const Duration _kDefaultBackoffMaxDelay = Duration(seconds: 60); + + final AsyncQueue _asyncQueue; + final TimerId _timerId; final Duration _initialDelay; final double _backoffFactor; final Duration _maxDelay; + /// The maximum backoff time used when calculating the next backoff. This value can be changed for + /// a single backoffAndRun call, after which it resets to maxDelayMs. + Duration _nextMaxDelay; + Duration _currentBase; DateTime _lastAttemptTime; - TimerTask _timerTask; + DelayedTask _timerTask; /// Resets the backoff delay. /// @@ -57,6 +67,12 @@ class ExponentialBackoff { /// Resets the backoff delay to the maximum delay (e.g. for use after a RESOURCE_EXHAUSTED error). void resetToMax() => _currentBase = _maxDelay; + /// Set the backoff's maximum delay for only the next call to backoffAndRun, after which the delay + /// will be reset to maxDelayMs. + set temporaryMaxDelay(Duration newMax) { + _nextMaxDelay = newMax; + } + /// Waits for [currentDelayMs], increases the delay and runs the specified task. If there was a pending backoff task /// waiting to run already, it will be canceled. void backoffAndRun(Function task) { @@ -69,13 +85,11 @@ class ExponentialBackoff { // Guard against lastAttemptTime being in the future due to a clock change. final Duration difference = DateTime.now().difference(_lastAttemptTime); - final Duration delaySoFar = - difference < Duration.zero ? Duration.zero : difference; + final Duration delaySoFar = difference < Duration.zero ? Duration.zero : difference; // Guard against the backoff delay already being past. final Duration remaining = desiredDelayWithJitter - delaySoFar; - final Duration remainingDelay = - remaining < Duration.zero ? Duration.zero : remaining; + final Duration remainingDelay = remaining < Duration.zero ? Duration.zero : remaining; if (_currentBase > Duration.zero) { Log.d( @@ -84,12 +98,12 @@ class ExponentialBackoff { 'last attempt: $delaySoFar ago)'); } - _timerTask = _scheduler.add( - _taskId, + _timerTask = _asyncQueue.enqueueAfterDelay( + _timerId, remainingDelay, - () { + () async { _lastAttemptTime = DateTime.now(); - task(); + await task(); }, ); @@ -97,9 +111,12 @@ class ExponentialBackoff { _currentBase = _currentBase * _backoffFactor; if (_currentBase < _initialDelay) { _currentBase = _initialDelay; - } else if (_currentBase > _maxDelay) { - _currentBase = _maxDelay; + } else if (_currentBase > _nextMaxDelay) { + _currentBase = _nextMaxDelay; } + + // Reset max delay to the default. + _nextMaxDelay = _maxDelay; } void cancel() { @@ -112,7 +129,6 @@ class ExponentialBackoff { /// Returns a random value in the range [-currentBaseMs/2, currentBaseMs/2] Duration _jitterDelay() { final double value = Random().nextDouble() - 0.5; - return Duration( - milliseconds: (value * _currentBase.inMilliseconds).toInt()); + return Duration(milliseconds: (value * _currentBase.inMilliseconds).toInt()); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/timer_task.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/timer_task.dart deleted file mode 100644 index 7c9c9194..00000000 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/timer_task.dart +++ /dev/null @@ -1,179 +0,0 @@ -// File created by -// Lung Razvan -// on 14/03/2020 - -import 'dart:async'; - -import 'package:_firebase_internal_vm/_firebase_internal_vm.dart'; -import 'package:meta/meta.dart'; - -typedef TaskFunction = FutureOr Function(); - -class TaskScheduler { - TaskScheduler(this._name) : _tasks = {}; - - final Map _tasks; - final String _name; - - int _index = 0; - - TimerTask add(TaskId id, Duration delay, TaskFunction function) { - _log('Adding task $id after $delay'); - if (_tasks.containsKey(id)) { - throw ArgumentError('The is already a task with this name'); - } - - final TimerTask task = TimerTask._( - index: ++_index, - id: id, - function: function, - timer: Timer(delay, () => _tasks[id]._execute()), - scheduler: this, - ); - - _log('Task add with index $_index'); - return _tasks[id] = task; - } - - TimerTask getTask(TaskId id) { - return _tasks[id]; - } - - void runUntil(TaskId id) { - _log('Run until $id'); - (_tasks.values.toList()..sort()) - .takeWhile((TimerTask value) => value._id == id) - .toList() - .forEach((TimerTask element) { - element._timer.cancel(); - element._execute(); - }); - } - - void _remove(TaskId id) { - _tasks.remove(id); - } - - void _log(String s) { - Log.d('$runtimeType${_name == null || _name.isEmpty ? '' : '-$_name'}', s); - } -} - -class TimerTask implements Comparable { - TimerTask._({ - @required int index, - @required TaskId id, - @required Timer timer, - @required TaskFunction function, - @required TaskScheduler scheduler, - }) : assert(index != null), - assert(id != null), - assert(timer != null), - assert(function != null), - assert(scheduler != null), - _index = index, - _id = id, - _timer = timer, - _function = function, - _scheduler = scheduler; - - final int _index; - final TaskId _id; - final Timer _timer; - final TaskFunction _function; - final TaskScheduler _scheduler; - - void cancel() { - _log('Canceling task $_id'); - _scheduler._remove(_id); - _timer.cancel(); - } - - void _execute() { - _log('Executing task $_id'); - _scheduler._remove(_id); - _function(); - } - - void _log(String s) { - Log.d('$runtimeType', '${_scheduler._name}:$_id - $s'); - } - - @override - int compareTo(TimerTask other) => _index.compareTo(other._index); - - @override - bool operator ==(Object other) => - identical(this, other) || - other is TimerTask && - runtimeType == other.runtimeType && - _id == other._id; - - @override - int get hashCode => _id.hashCode; - - @override - String toString() { - return (ToStringHelper(TimerTask) // - ..add('index', _index) - ..add('name', _id)) - .toString(); - } -} - -/// Well-known 'timer' IDs used when scheduling delayed tasks on the -/// [TaskScheduler]. These IDs can then be used from tests to check for the -/// presence of tasks or to run them early. -class TaskId implements Comparable { - const TaskId._(this._i); - - final int _i; - - /// ALL can be used with [TaskScheduler.runUntil] to run all timers. - static const TaskId all = TaskId._(0); - - static const TaskId listenStreamIdle = TaskId._(1); - static const TaskId listenStreamConnectionBackoff = TaskId._(2); - static const TaskId writeStreamIdle = TaskId._(3); - static const TaskId writeStreamConnectionBackoff = TaskId._(4); - - /// A timer used in [OnlineStateTracker] to transition from - /// [OnlineState.unknown] to [OnlineState.offline] after a set timeout, rather - /// than waiting indefinitely for success or failure. - static const TaskId onlineStateTimeout = TaskId._(5); - - /// A timer used to periodically attempt LRU Garbage collection - static const TaskId garbageCollection = TaskId._(6); - - @override - int compareTo(TaskId other) => _i.compareTo(other._i); - - bool operator >(TaskId other) => _i > other._i; - - bool operator >=(TaskId other) => _i >= other._i; - - bool operator <(TaskId other) => _i < other._i; - - bool operator <=(TaskId other) => _i <= other._i; - - @override - bool operator ==(Object other) => - identical(this, other) || - other is TaskId && runtimeType == other.runtimeType && _i == other._i; - - @override - int get hashCode => _i.hashCode; - - static const List _values = [ - 'all', - 'listenStreamIdle', - 'listenStreamConnectionBackoff', - 'writeStreamIdle', - 'writeStreamConnectionBackoff', - 'onlineStateTimeout', - 'garbageCollection', - ]; - - @override - String toString() => _values[_i]; -} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/util.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/util.dart index 176f2981..84afeca1 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/util.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/util/util.dart @@ -9,10 +9,12 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/field_path.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; import 'package:grpc/grpc.dart'; +const int kMaxInt = 9223372036854775807; +const int kMinInt = -9223372036854775808; + const int _autoIdLength = 20; -const String _autoIdAlphabet = - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; +const String _autoIdAlphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; final Random rand = Random(); @@ -71,8 +73,7 @@ Future voidErrorTransformer(Future Function() operation) async { if (e is FirestoreError) { rethrow; } else { - return Future.error( - FirestoreError('$e', FirestoreErrorCode.unknown), s); + return Future.error(FirestoreError('$e', FirestoreErrorCode.unknown), s); } } } @@ -82,19 +83,16 @@ Future voidErrorTransformer(Future Function() operation) async { /// /// [fieldPathOffset] is the offset of the first field path in the original update API (used as the /// index in error messages) -List collectUpdateArguments( - int fieldPathOffset, List fieldsAndValues) { +List collectUpdateArguments(int fieldPathOffset, List fieldsAndValues) { if (fieldsAndValues.length % 2 == 1) { - throw ArgumentError( - 'Missing value in call to update(). There must be an even number of ' + throw ArgumentError('Missing value in call to update(). There must be an even number of ' 'arguments that alternate between field names and values'); } final List argumentList = fieldsAndValues.toList(growable: false); for (int i = 0; i < argumentList.length; i += 2) { final Object fieldPath = argumentList[i]; if (fieldPath is! String && fieldPath is! FieldPath) { - throw ArgumentError( - 'Excepted field name at argument position ${i + fieldPathOffset + 1} but ' + throw ArgumentError('Excepted field name at argument position ${i + fieldPathOffset + 1} but ' 'got $fieldPath in call to update. The arguments to update should alternate between ' 'field names and values'); } @@ -102,3 +100,19 @@ List collectUpdateArguments( return argumentList; } + +int compareBytes(List b1, List b2) { + final int size = min(b1.length, b2.length); + for (int i = 0; i < size; i++) { + // Make sure the bytes are unsigned + final int thisByte = b1[i] & 0xff; + final int otherByte = b2[i] & 0xff; + if (thisByte < otherByte) { + return -1; + } else if (thisByte > otherByte) { + return 1; + } + // Byte values are equal, continue with comparison + } + return b1.length.compareTo(b2.length); +} diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/write_batch.dart b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/write_batch.dart index 6610b7f8..663ad740 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/write_batch.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/firebase/firestore/write_batch.dart @@ -53,8 +53,8 @@ class WriteBatch { checkNotNull(data, 'Provided data must not be null.'); _verifyNotCommitted(); final UserDataParsedSetData parsed = options.merge - ? _firestore.dataConverter.parseMergeData(data, options.fieldMask) - : _firestore.dataConverter.parseSetData(data); + ? _firestore.userDataReader.parseMergeData(data, options.fieldMask) + : _firestore.userDataReader.parseSetData(data); _mutations .addAll(parsed.toMutationList(documentRef.key, Precondition.none)); return this; @@ -70,7 +70,7 @@ class WriteBatch { /// /// Returns this [WriteBatch] instance. Used for chaining method calls. WriteBatch updateFromList(DocumentReference documentRef, List data) { - final UserDataParsedUpdateData parsedData = _firestore.dataConverter + final UserDataParsedUpdateData parsedData = _firestore.userDataReader .parseUpdateDataFromList(collectUpdateArguments(1, data)); _firestore.validateReference(documentRef); @@ -90,7 +90,7 @@ class WriteBatch { /// Returns this [WriteBatch] instance. Used for chaining method calls. WriteBatch update(DocumentReference documentRef, Map data) { final UserDataParsedUpdateData parsedData = - _firestore.dataConverter.parseUpdateData(data); + _firestore.userDataReader.parseUpdateData(data); _firestore.validateReference(documentRef); _verifyNotCommitted(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/index.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/index.dart index 8b137891..e69de29b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/index.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/index.dart @@ -1 +0,0 @@ - diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pb.dart new file mode 100644 index 00000000..2acd257d --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pb.dart @@ -0,0 +1,495 @@ +/// +// Generated code. Do not modify. +// source: firebase/firestore/proto/bundle.proto +// +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields + +import 'dart:core' as $core; + +import 'package:fixnum/fixnum.dart' as $fixnum; +import 'package:protobuf/protobuf.dart' as $pb; + +import '../../../google/firestore/v1/query.pb.dart' as $8; +import '../../../google/protobuf/timestamp.pb.dart' as $4; +import '../../../google/firestore/v1/document.pb.dart' as $1; + +import 'bundle.pbenum.dart'; + +export 'bundle.pbenum.dart'; + +enum BundledQuery_QueryType { + structuredQuery, + notSet +} + +class BundledQuery extends $pb.GeneratedMessage { + static const $core.Map<$core.int, BundledQuery_QueryType> _BundledQuery_QueryTypeByTag = { + 2 : BundledQuery_QueryType.structuredQuery, + 0 : BundledQuery_QueryType.notSet + }; + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BundledQuery', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore'), createEmptyInstance: create) + ..oo(0, [2]) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'parent') + ..aOM<$8.StructuredQuery>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'structuredQuery', subBuilder: $8.StructuredQuery.create) + ..e(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'limitType', $pb.PbFieldType.OE, defaultOrMaker: BundledQuery_LimitType.FIRST, valueOf: BundledQuery_LimitType.valueOf, enumValues: BundledQuery_LimitType.values) + ..hasRequiredFields = false + ; + + BundledQuery._() : super(); + factory BundledQuery({ + $core.String parent, + $8.StructuredQuery structuredQuery, + BundledQuery_LimitType limitType, + }) { + final _result = create(); + if (parent != null) { + _result.parent = parent; + } + if (structuredQuery != null) { + _result.structuredQuery = structuredQuery; + } + if (limitType != null) { + _result.limitType = limitType; + } + return _result; + } + factory BundledQuery.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory BundledQuery.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + BundledQuery clone() => BundledQuery()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BundledQuery copyWith(void Function(BundledQuery) updates) => super.copyWith((message) => updates(message as BundledQuery)); // ignore: deprecated_member_use + $pb.BuilderInfo get info_ => _i; + @$core.pragma('dart2js:noInline') + static BundledQuery create() => BundledQuery._(); + BundledQuery createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static BundledQuery getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static BundledQuery _defaultInstance; + + BundledQuery_QueryType whichQueryType() => _BundledQuery_QueryTypeByTag[$_whichOneof(0)]; + void clearQueryType() => clearField($_whichOneof(0)); + + @$pb.TagNumber(1) + $core.String get parent => $_getSZ(0); + @$pb.TagNumber(1) + set parent($core.String v) { $_setString(0, v); } + @$pb.TagNumber(1) + $core.bool hasParent() => $_has(0); + @$pb.TagNumber(1) + void clearParent() => clearField(1); + + @$pb.TagNumber(2) + $8.StructuredQuery get structuredQuery => $_getN(1); + @$pb.TagNumber(2) + set structuredQuery($8.StructuredQuery v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasStructuredQuery() => $_has(1); + @$pb.TagNumber(2) + void clearStructuredQuery() => clearField(2); + @$pb.TagNumber(2) + $8.StructuredQuery ensureStructuredQuery() => $_ensure(1); + + @$pb.TagNumber(3) + BundledQuery_LimitType get limitType => $_getN(2); + @$pb.TagNumber(3) + set limitType(BundledQuery_LimitType v) { setField(3, v); } + @$pb.TagNumber(3) + $core.bool hasLimitType() => $_has(2); + @$pb.TagNumber(3) + void clearLimitType() => clearField(3); +} + +class NamedQuery extends $pb.GeneratedMessage { + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'NamedQuery', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'name') + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'bundledQuery', subBuilder: BundledQuery.create) + ..aOM<$4.Timestamp>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) + ..hasRequiredFields = false + ; + + NamedQuery._() : super(); + factory NamedQuery({ + $core.String name, + BundledQuery bundledQuery, + $4.Timestamp readTime, + }) { + final _result = create(); + if (name != null) { + _result.name = name; + } + if (bundledQuery != null) { + _result.bundledQuery = bundledQuery; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } + factory NamedQuery.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory NamedQuery.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + NamedQuery clone() => NamedQuery()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + NamedQuery copyWith(void Function(NamedQuery) updates) => super.copyWith((message) => updates(message as NamedQuery)); // ignore: deprecated_member_use + $pb.BuilderInfo get info_ => _i; + @$core.pragma('dart2js:noInline') + static NamedQuery create() => NamedQuery._(); + NamedQuery createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static NamedQuery getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static NamedQuery _defaultInstance; + + @$pb.TagNumber(1) + $core.String get name => $_getSZ(0); + @$pb.TagNumber(1) + set name($core.String v) { $_setString(0, v); } + @$pb.TagNumber(1) + $core.bool hasName() => $_has(0); + @$pb.TagNumber(1) + void clearName() => clearField(1); + + @$pb.TagNumber(2) + BundledQuery get bundledQuery => $_getN(1); + @$pb.TagNumber(2) + set bundledQuery(BundledQuery v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasBundledQuery() => $_has(1); + @$pb.TagNumber(2) + void clearBundledQuery() => clearField(2); + @$pb.TagNumber(2) + BundledQuery ensureBundledQuery() => $_ensure(1); + + @$pb.TagNumber(3) + $4.Timestamp get readTime => $_getN(2); + @$pb.TagNumber(3) + set readTime($4.Timestamp v) { setField(3, v); } + @$pb.TagNumber(3) + $core.bool hasReadTime() => $_has(2); + @$pb.TagNumber(3) + void clearReadTime() => clearField(3); + @$pb.TagNumber(3) + $4.Timestamp ensureReadTime() => $_ensure(2); +} + +class BundledDocumentMetadata extends $pb.GeneratedMessage { + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BundledDocumentMetadata', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'name') + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) + ..aOB(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'exists') + ..pPS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'queries') + ..hasRequiredFields = false + ; + + BundledDocumentMetadata._() : super(); + factory BundledDocumentMetadata({ + $core.String name, + $4.Timestamp readTime, + $core.bool exists, + $core.Iterable<$core.String> queries, + }) { + final _result = create(); + if (name != null) { + _result.name = name; + } + if (readTime != null) { + _result.readTime = readTime; + } + if (exists != null) { + _result.exists = exists; + } + if (queries != null) { + _result.queries.addAll(queries); + } + return _result; + } + factory BundledDocumentMetadata.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory BundledDocumentMetadata.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + BundledDocumentMetadata clone() => BundledDocumentMetadata()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BundledDocumentMetadata copyWith(void Function(BundledDocumentMetadata) updates) => super.copyWith((message) => updates(message as BundledDocumentMetadata)); // ignore: deprecated_member_use + $pb.BuilderInfo get info_ => _i; + @$core.pragma('dart2js:noInline') + static BundledDocumentMetadata create() => BundledDocumentMetadata._(); + BundledDocumentMetadata createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static BundledDocumentMetadata getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static BundledDocumentMetadata _defaultInstance; + + @$pb.TagNumber(1) + $core.String get name => $_getSZ(0); + @$pb.TagNumber(1) + set name($core.String v) { $_setString(0, v); } + @$pb.TagNumber(1) + $core.bool hasName() => $_has(0); + @$pb.TagNumber(1) + void clearName() => clearField(1); + + @$pb.TagNumber(2) + $4.Timestamp get readTime => $_getN(1); + @$pb.TagNumber(2) + set readTime($4.Timestamp v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasReadTime() => $_has(1); + @$pb.TagNumber(2) + void clearReadTime() => clearField(2); + @$pb.TagNumber(2) + $4.Timestamp ensureReadTime() => $_ensure(1); + + @$pb.TagNumber(3) + $core.bool get exists => $_getBF(2); + @$pb.TagNumber(3) + set exists($core.bool v) { $_setBool(2, v); } + @$pb.TagNumber(3) + $core.bool hasExists() => $_has(2); + @$pb.TagNumber(3) + void clearExists() => clearField(3); + + @$pb.TagNumber(4) + $core.List<$core.String> get queries => $_getList(3); +} + +class BundleMetadata extends $pb.GeneratedMessage { + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BundleMetadata', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'id') + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'createTime', subBuilder: $4.Timestamp.create) + ..a<$core.int>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'version', $pb.PbFieldType.OU3) + ..a<$core.int>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'totalDocuments', $pb.PbFieldType.OU3) + ..a<$fixnum.Int64>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'totalBytes', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO) + ..hasRequiredFields = false + ; + + BundleMetadata._() : super(); + factory BundleMetadata({ + $core.String id, + $4.Timestamp createTime, + $core.int version, + $core.int totalDocuments, + $fixnum.Int64 totalBytes, + }) { + final _result = create(); + if (id != null) { + _result.id = id; + } + if (createTime != null) { + _result.createTime = createTime; + } + if (version != null) { + _result.version = version; + } + if (totalDocuments != null) { + _result.totalDocuments = totalDocuments; + } + if (totalBytes != null) { + _result.totalBytes = totalBytes; + } + return _result; + } + factory BundleMetadata.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory BundleMetadata.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + BundleMetadata clone() => BundleMetadata()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BundleMetadata copyWith(void Function(BundleMetadata) updates) => super.copyWith((message) => updates(message as BundleMetadata)); // ignore: deprecated_member_use + $pb.BuilderInfo get info_ => _i; + @$core.pragma('dart2js:noInline') + static BundleMetadata create() => BundleMetadata._(); + BundleMetadata createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static BundleMetadata getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static BundleMetadata _defaultInstance; + + @$pb.TagNumber(1) + $core.String get id => $_getSZ(0); + @$pb.TagNumber(1) + set id($core.String v) { $_setString(0, v); } + @$pb.TagNumber(1) + $core.bool hasId() => $_has(0); + @$pb.TagNumber(1) + void clearId() => clearField(1); + + @$pb.TagNumber(2) + $4.Timestamp get createTime => $_getN(1); + @$pb.TagNumber(2) + set createTime($4.Timestamp v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasCreateTime() => $_has(1); + @$pb.TagNumber(2) + void clearCreateTime() => clearField(2); + @$pb.TagNumber(2) + $4.Timestamp ensureCreateTime() => $_ensure(1); + + @$pb.TagNumber(3) + $core.int get version => $_getIZ(2); + @$pb.TagNumber(3) + set version($core.int v) { $_setUnsignedInt32(2, v); } + @$pb.TagNumber(3) + $core.bool hasVersion() => $_has(2); + @$pb.TagNumber(3) + void clearVersion() => clearField(3); + + @$pb.TagNumber(4) + $core.int get totalDocuments => $_getIZ(3); + @$pb.TagNumber(4) + set totalDocuments($core.int v) { $_setUnsignedInt32(3, v); } + @$pb.TagNumber(4) + $core.bool hasTotalDocuments() => $_has(3); + @$pb.TagNumber(4) + void clearTotalDocuments() => clearField(4); + + @$pb.TagNumber(5) + $fixnum.Int64 get totalBytes => $_getI64(4); + @$pb.TagNumber(5) + set totalBytes($fixnum.Int64 v) { $_setInt64(4, v); } + @$pb.TagNumber(5) + $core.bool hasTotalBytes() => $_has(4); + @$pb.TagNumber(5) + void clearTotalBytes() => clearField(5); +} + +enum BundleElement_ElementType { + metadata, + namedQuery, + documentMetadata, + document, + notSet +} + +class BundleElement extends $pb.GeneratedMessage { + static const $core.Map<$core.int, BundleElement_ElementType> _BundleElement_ElementTypeByTag = { + 1 : BundleElement_ElementType.metadata, + 2 : BundleElement_ElementType.namedQuery, + 3 : BundleElement_ElementType.documentMetadata, + 4 : BundleElement_ElementType.document, + 0 : BundleElement_ElementType.notSet + }; + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BundleElement', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore'), createEmptyInstance: create) + ..oo(0, [1, 2, 3, 4]) + ..aOM(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'metadata', subBuilder: BundleMetadata.create) + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'namedQuery', subBuilder: NamedQuery.create) + ..aOM(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documentMetadata', subBuilder: BundledDocumentMetadata.create) + ..aOM<$1.Document>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document', subBuilder: $1.Document.create) + ..hasRequiredFields = false + ; + + BundleElement._() : super(); + factory BundleElement({ + BundleMetadata metadata, + NamedQuery namedQuery, + BundledDocumentMetadata documentMetadata, + $1.Document document, + }) { + final _result = create(); + if (metadata != null) { + _result.metadata = metadata; + } + if (namedQuery != null) { + _result.namedQuery = namedQuery; + } + if (documentMetadata != null) { + _result.documentMetadata = documentMetadata; + } + if (document != null) { + _result.document = document; + } + return _result; + } + factory BundleElement.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory BundleElement.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + BundleElement clone() => BundleElement()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BundleElement copyWith(void Function(BundleElement) updates) => super.copyWith((message) => updates(message as BundleElement)); // ignore: deprecated_member_use + $pb.BuilderInfo get info_ => _i; + @$core.pragma('dart2js:noInline') + static BundleElement create() => BundleElement._(); + BundleElement createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static BundleElement getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static BundleElement _defaultInstance; + + BundleElement_ElementType whichElementType() => _BundleElement_ElementTypeByTag[$_whichOneof(0)]; + void clearElementType() => clearField($_whichOneof(0)); + + @$pb.TagNumber(1) + BundleMetadata get metadata => $_getN(0); + @$pb.TagNumber(1) + set metadata(BundleMetadata v) { setField(1, v); } + @$pb.TagNumber(1) + $core.bool hasMetadata() => $_has(0); + @$pb.TagNumber(1) + void clearMetadata() => clearField(1); + @$pb.TagNumber(1) + BundleMetadata ensureMetadata() => $_ensure(0); + + @$pb.TagNumber(2) + NamedQuery get namedQuery => $_getN(1); + @$pb.TagNumber(2) + set namedQuery(NamedQuery v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasNamedQuery() => $_has(1); + @$pb.TagNumber(2) + void clearNamedQuery() => clearField(2); + @$pb.TagNumber(2) + NamedQuery ensureNamedQuery() => $_ensure(1); + + @$pb.TagNumber(3) + BundledDocumentMetadata get documentMetadata => $_getN(2); + @$pb.TagNumber(3) + set documentMetadata(BundledDocumentMetadata v) { setField(3, v); } + @$pb.TagNumber(3) + $core.bool hasDocumentMetadata() => $_has(2); + @$pb.TagNumber(3) + void clearDocumentMetadata() => clearField(3); + @$pb.TagNumber(3) + BundledDocumentMetadata ensureDocumentMetadata() => $_ensure(2); + + @$pb.TagNumber(4) + $1.Document get document => $_getN(3); + @$pb.TagNumber(4) + set document($1.Document v) { setField(4, v); } + @$pb.TagNumber(4) + $core.bool hasDocument() => $_has(3); + @$pb.TagNumber(4) + void clearDocument() => clearField(4); + @$pb.TagNumber(4) + $1.Document ensureDocument() => $_ensure(3); +} + diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbenum.dart new file mode 100644 index 00000000..60ab6fd0 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbenum.dart @@ -0,0 +1,26 @@ +/// +// Generated code. Do not modify. +// source: firebase/firestore/proto/bundle.proto +// +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields + +// ignore_for_file: UNDEFINED_SHOWN_NAME +import 'dart:core' as $core; +import 'package:protobuf/protobuf.dart' as $pb; + +class BundledQuery_LimitType extends $pb.ProtobufEnum { + static const BundledQuery_LimitType FIRST = BundledQuery_LimitType._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'FIRST'); + static const BundledQuery_LimitType LAST = BundledQuery_LimitType._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'LAST'); + + static const $core.List values = [ + FIRST, + LAST, + ]; + + static final $core.Map<$core.int, BundledQuery_LimitType> _byValue = $pb.ProtobufEnum.initByValue(values); + static BundledQuery_LimitType valueOf($core.int value) => _byValue[value]; + + const BundledQuery_LimitType._($core.int v, $core.String n) : super(v, n); +} + diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbjson.dart new file mode 100644 index 00000000..e3d39023 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/bundle.pbjson.dart @@ -0,0 +1,71 @@ +/// +// Generated code. Do not modify. +// source: firebase/firestore/proto/bundle.proto +// +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields + +const BundledQuery$json = const { + '1': 'BundledQuery', + '2': const [ + const {'1': 'parent', '3': 1, '4': 1, '5': 9, '10': 'parent'}, + const {'1': 'structured_query', '3': 2, '4': 1, '5': 11, '6': '.google.firestore.v1.StructuredQuery', '9': 0, '10': 'structuredQuery'}, + const {'1': 'limit_type', '3': 3, '4': 1, '5': 14, '6': '.firestore.BundledQuery.LimitType', '10': 'limitType'}, + ], + '4': const [BundledQuery_LimitType$json], + '8': const [ + const {'1': 'query_type'}, + ], +}; + +const BundledQuery_LimitType$json = const { + '1': 'LimitType', + '2': const [ + const {'1': 'FIRST', '2': 0}, + const {'1': 'LAST', '2': 1}, + ], +}; + +const NamedQuery$json = const { + '1': 'NamedQuery', + '2': const [ + const {'1': 'name', '3': 1, '4': 1, '5': 9, '10': 'name'}, + const {'1': 'bundled_query', '3': 2, '4': 1, '5': 11, '6': '.firestore.BundledQuery', '10': 'bundledQuery'}, + const {'1': 'read_time', '3': 3, '4': 1, '5': 11, '6': '.google.protobuf.Timestamp', '10': 'readTime'}, + ], +}; + +const BundledDocumentMetadata$json = const { + '1': 'BundledDocumentMetadata', + '2': const [ + const {'1': 'name', '3': 1, '4': 1, '5': 9, '10': 'name'}, + const {'1': 'read_time', '3': 2, '4': 1, '5': 11, '6': '.google.protobuf.Timestamp', '10': 'readTime'}, + const {'1': 'exists', '3': 3, '4': 1, '5': 8, '10': 'exists'}, + const {'1': 'queries', '3': 4, '4': 3, '5': 9, '10': 'queries'}, + ], +}; + +const BundleMetadata$json = const { + '1': 'BundleMetadata', + '2': const [ + const {'1': 'id', '3': 1, '4': 1, '5': 9, '10': 'id'}, + const {'1': 'create_time', '3': 2, '4': 1, '5': 11, '6': '.google.protobuf.Timestamp', '10': 'createTime'}, + const {'1': 'version', '3': 3, '4': 1, '5': 13, '10': 'version'}, + const {'1': 'total_documents', '3': 4, '4': 1, '5': 13, '10': 'totalDocuments'}, + const {'1': 'total_bytes', '3': 5, '4': 1, '5': 4, '10': 'totalBytes'}, + ], +}; + +const BundleElement$json = const { + '1': 'BundleElement', + '2': const [ + const {'1': 'metadata', '3': 1, '4': 1, '5': 11, '6': '.firestore.BundleMetadata', '9': 0, '10': 'metadata'}, + const {'1': 'named_query', '3': 2, '4': 1, '5': 11, '6': '.firestore.NamedQuery', '9': 0, '10': 'namedQuery'}, + const {'1': 'document_metadata', '3': 3, '4': 1, '5': 11, '6': '.firestore.BundledDocumentMetadata', '9': 0, '10': 'documentMetadata'}, + const {'1': 'document', '3': 4, '4': 1, '5': 11, '6': '.google.firestore.v1.Document', '9': 0, '10': 'document'}, + ], + '8': const [ + const {'1': 'element_type'}, + ], +}; + diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/index.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/index.dart index c1421c8b..9bf495db 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/index.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/index.dart @@ -1,3 +1,6 @@ +export 'bundle.pb.dart'; +export 'bundle.pbenum.dart'; +export 'bundle.pbjson.dart'; export 'maybe_document.pb.dart'; export 'maybe_document.pbenum.dart'; export 'maybe_document.pbjson.dart'; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pb.dart index 8d8f83b6..bfbf684c 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/maybe_document.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -13,18 +13,38 @@ import '../../../google/protobuf/timestamp.pb.dart' as $4; import '../../../google/firestore/v1/document.pb.dart' as $1; class NoDocument extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('NoDocument', package: const $pb.PackageName('firestore.client'), createEmptyInstance: create) - ..aOS(1, 'name') - ..aOM<$4.Timestamp>(2, 'readTime', subBuilder: $4.Timestamp.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'NoDocument', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore.client'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'name') + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; NoDocument._() : super(); - factory NoDocument() => create(); + factory NoDocument({ + $core.String name, + $4.Timestamp readTime, + }) { + final _result = create(); + if (name != null) { + _result.name = name; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory NoDocument.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory NoDocument.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') NoDocument clone() => NoDocument()..mergeFromMessage(this); - NoDocument copyWith(void Function(NoDocument) updates) => super.copyWith((message) => updates(message as NoDocument)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + NoDocument copyWith(void Function(NoDocument) updates) => super.copyWith((message) => updates(message as NoDocument)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static NoDocument create() => NoDocument._(); @@ -56,18 +76,38 @@ class NoDocument extends $pb.GeneratedMessage { } class UnknownDocument extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('UnknownDocument', package: const $pb.PackageName('firestore.client'), createEmptyInstance: create) - ..aOS(1, 'name') - ..aOM<$4.Timestamp>(2, 'version', subBuilder: $4.Timestamp.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'UnknownDocument', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore.client'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'name') + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'version', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; UnknownDocument._() : super(); - factory UnknownDocument() => create(); + factory UnknownDocument({ + $core.String name, + $4.Timestamp version, + }) { + final _result = create(); + if (name != null) { + _result.name = name; + } + if (version != null) { + _result.version = version; + } + return _result; + } factory UnknownDocument.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory UnknownDocument.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') UnknownDocument clone() => UnknownDocument()..mergeFromMessage(this); - UnknownDocument copyWith(void Function(UnknownDocument) updates) => super.copyWith((message) => updates(message as UnknownDocument)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + UnknownDocument copyWith(void Function(UnknownDocument) updates) => super.copyWith((message) => updates(message as UnknownDocument)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static UnknownDocument create() => UnknownDocument._(); @@ -112,21 +152,49 @@ class MaybeDocument extends $pb.GeneratedMessage { 3 : MaybeDocument_DocumentType.unknownDocument, 0 : MaybeDocument_DocumentType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('MaybeDocument', package: const $pb.PackageName('firestore.client'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'MaybeDocument', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore.client'), createEmptyInstance: create) ..oo(0, [1, 2, 3]) - ..aOM(1, 'noDocument', subBuilder: NoDocument.create) - ..aOM<$1.Document>(2, 'document', subBuilder: $1.Document.create) - ..aOM(3, 'unknownDocument', subBuilder: UnknownDocument.create) - ..aOB(4, 'hasCommittedMutations') + ..aOM(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'noDocument', subBuilder: NoDocument.create) + ..aOM<$1.Document>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document', subBuilder: $1.Document.create) + ..aOM(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'unknownDocument', subBuilder: UnknownDocument.create) + ..aOB(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'hasCommittedMutations') ..hasRequiredFields = false ; MaybeDocument._() : super(); - factory MaybeDocument() => create(); + factory MaybeDocument({ + NoDocument noDocument, + $1.Document document, + UnknownDocument unknownDocument, + $core.bool hasCommittedMutations, + }) { + final _result = create(); + if (noDocument != null) { + _result.noDocument = noDocument; + } + if (document != null) { + _result.document = document; + } + if (unknownDocument != null) { + _result.unknownDocument = unknownDocument; + } + if (hasCommittedMutations != null) { + _result.hasCommittedMutations = hasCommittedMutations; + } + return _result; + } factory MaybeDocument.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory MaybeDocument.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') MaybeDocument clone() => MaybeDocument()..mergeFromMessage(this); - MaybeDocument copyWith(void Function(MaybeDocument) updates) => super.copyWith((message) => updates(message as MaybeDocument)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + MaybeDocument copyWith(void Function(MaybeDocument) updates) => super.copyWith((message) => updates(message as MaybeDocument)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static MaybeDocument create() => MaybeDocument._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbenum.dart index 5489fdac..6e0d1ba3 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/maybe_document.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbjson.dart index 8569312b..4d7411d8 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/maybe_document.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/maybe_document.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const NoDocument$json = const { '1': 'NoDocument', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pb.dart index 3632667a..bd389623 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pb.dart @@ -2,29 +2,49 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/mutation.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; -import '../../../google/firestore/v1/write.pb.dart' as $8; +import '../../../google/firestore/v1/write.pb.dart' as $10; import '../../../google/protobuf/timestamp.pb.dart' as $4; class MutationQueue extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('MutationQueue', package: const $pb.PackageName('firestore.client'), createEmptyInstance: create) - ..a<$core.int>(1, 'lastAcknowledgedBatchId', $pb.PbFieldType.O3) - ..a<$core.List<$core.int>>(2, 'lastStreamToken', $pb.PbFieldType.OY) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'MutationQueue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore.client'), createEmptyInstance: create) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'lastAcknowledgedBatchId', $pb.PbFieldType.O3) + ..a<$core.List<$core.int>>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'lastStreamToken', $pb.PbFieldType.OY) ..hasRequiredFields = false ; MutationQueue._() : super(); - factory MutationQueue() => create(); + factory MutationQueue({ + $core.int lastAcknowledgedBatchId, + $core.List<$core.int> lastStreamToken, + }) { + final _result = create(); + if (lastAcknowledgedBatchId != null) { + _result.lastAcknowledgedBatchId = lastAcknowledgedBatchId; + } + if (lastStreamToken != null) { + _result.lastStreamToken = lastStreamToken; + } + return _result; + } factory MutationQueue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory MutationQueue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') MutationQueue clone() => MutationQueue()..mergeFromMessage(this); - MutationQueue copyWith(void Function(MutationQueue) updates) => super.copyWith((message) => updates(message as MutationQueue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + MutationQueue copyWith(void Function(MutationQueue) updates) => super.copyWith((message) => updates(message as MutationQueue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static MutationQueue create() => MutationQueue._(); @@ -54,20 +74,48 @@ class MutationQueue extends $pb.GeneratedMessage { } class WriteBatch extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('WriteBatch', package: const $pb.PackageName('firestore.client'), createEmptyInstance: create) - ..a<$core.int>(1, 'batchId', $pb.PbFieldType.O3) - ..pc<$8.Write>(2, 'writes', $pb.PbFieldType.PM, subBuilder: $8.Write.create) - ..aOM<$4.Timestamp>(3, 'localWriteTime', subBuilder: $4.Timestamp.create) - ..pc<$8.Write>(4, 'baseWrites', $pb.PbFieldType.PM, subBuilder: $8.Write.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'WriteBatch', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore.client'), createEmptyInstance: create) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'batchId', $pb.PbFieldType.O3) + ..pc<$10.Write>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'writes', $pb.PbFieldType.PM, subBuilder: $10.Write.create) + ..aOM<$4.Timestamp>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'localWriteTime', subBuilder: $4.Timestamp.create) + ..pc<$10.Write>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'baseWrites', $pb.PbFieldType.PM, subBuilder: $10.Write.create) ..hasRequiredFields = false ; WriteBatch._() : super(); - factory WriteBatch() => create(); + factory WriteBatch({ + $core.int batchId, + $core.Iterable<$10.Write> writes, + $4.Timestamp localWriteTime, + $core.Iterable<$10.Write> baseWrites, + }) { + final _result = create(); + if (batchId != null) { + _result.batchId = batchId; + } + if (writes != null) { + _result.writes.addAll(writes); + } + if (localWriteTime != null) { + _result.localWriteTime = localWriteTime; + } + if (baseWrites != null) { + _result.baseWrites.addAll(baseWrites); + } + return _result; + } factory WriteBatch.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory WriteBatch.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') WriteBatch clone() => WriteBatch()..mergeFromMessage(this); - WriteBatch copyWith(void Function(WriteBatch) updates) => super.copyWith((message) => updates(message as WriteBatch)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + WriteBatch copyWith(void Function(WriteBatch) updates) => super.copyWith((message) => updates(message as WriteBatch)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static WriteBatch create() => WriteBatch._(); @@ -87,7 +135,7 @@ class WriteBatch extends $pb.GeneratedMessage { void clearBatchId() => clearField(1); @$pb.TagNumber(2) - $core.List<$8.Write> get writes => $_getList(1); + $core.List<$10.Write> get writes => $_getList(1); @$pb.TagNumber(3) $4.Timestamp get localWriteTime => $_getN(2); @@ -101,6 +149,6 @@ class WriteBatch extends $pb.GeneratedMessage { $4.Timestamp ensureLocalWriteTime() => $_ensure(2); @$pb.TagNumber(4) - $core.List<$8.Write> get baseWrites => $_getList(3); + $core.List<$10.Write> get baseWrites => $_getList(3); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbenum.dart index 4ebdb11b..8f015f6a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/mutation.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbjson.dart index 2b0373dc..951f0b1c 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/mutation.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/mutation.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const MutationQueue$json = const { '1': 'MutationQueue', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pb.dart index 2bfcb65d..e684c6b6 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/target.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -25,23 +25,64 @@ class Target extends $pb.GeneratedMessage { 6 : Target_TargetType.documents, 0 : Target_TargetType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Target', package: const $pb.PackageName('firestore.client'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Target', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore.client'), createEmptyInstance: create) ..oo(0, [5, 6]) - ..a<$core.int>(1, 'targetId', $pb.PbFieldType.O3) - ..aOM<$4.Timestamp>(2, 'snapshotVersion', subBuilder: $4.Timestamp.create) - ..a<$core.List<$core.int>>(3, 'resumeToken', $pb.PbFieldType.OY) - ..aInt64(4, 'lastListenSequenceNumber') - ..aOM<$0.Target_QueryTarget>(5, 'query', subBuilder: $0.Target_QueryTarget.create) - ..aOM<$0.Target_DocumentsTarget>(6, 'documents', subBuilder: $0.Target_DocumentsTarget.create) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetId', $pb.PbFieldType.O3) + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'snapshotVersion', subBuilder: $4.Timestamp.create) + ..a<$core.List<$core.int>>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'resumeToken', $pb.PbFieldType.OY) + ..aInt64(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'lastListenSequenceNumber') + ..aOM<$0.Target_QueryTarget>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'query', subBuilder: $0.Target_QueryTarget.create) + ..aOM<$0.Target_DocumentsTarget>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documents', subBuilder: $0.Target_DocumentsTarget.create) + ..aOM<$4.Timestamp>(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'lastLimboFreeSnapshotVersion', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; Target._() : super(); - factory Target() => create(); + factory Target({ + $core.int targetId, + $4.Timestamp snapshotVersion, + $core.List<$core.int> resumeToken, + $fixnum.Int64 lastListenSequenceNumber, + $0.Target_QueryTarget query, + $0.Target_DocumentsTarget documents, + $4.Timestamp lastLimboFreeSnapshotVersion, + }) { + final _result = create(); + if (targetId != null) { + _result.targetId = targetId; + } + if (snapshotVersion != null) { + _result.snapshotVersion = snapshotVersion; + } + if (resumeToken != null) { + _result.resumeToken = resumeToken; + } + if (lastListenSequenceNumber != null) { + _result.lastListenSequenceNumber = lastListenSequenceNumber; + } + if (query != null) { + _result.query = query; + } + if (documents != null) { + _result.documents = documents; + } + if (lastLimboFreeSnapshotVersion != null) { + _result.lastLimboFreeSnapshotVersion = lastLimboFreeSnapshotVersion; + } + return _result; + } factory Target.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Target.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Target clone() => Target()..mergeFromMessage(this); - Target copyWith(void Function(Target) updates) => super.copyWith((message) => updates(message as Target)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Target copyWith(void Function(Target) updates) => super.copyWith((message) => updates(message as Target)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Target create() => Target._(); @@ -113,23 +154,62 @@ class Target extends $pb.GeneratedMessage { void clearDocuments() => clearField(6); @$pb.TagNumber(6) $0.Target_DocumentsTarget ensureDocuments() => $_ensure(5); + + @$pb.TagNumber(7) + $4.Timestamp get lastLimboFreeSnapshotVersion => $_getN(6); + @$pb.TagNumber(7) + set lastLimboFreeSnapshotVersion($4.Timestamp v) { setField(7, v); } + @$pb.TagNumber(7) + $core.bool hasLastLimboFreeSnapshotVersion() => $_has(6); + @$pb.TagNumber(7) + void clearLastLimboFreeSnapshotVersion() => clearField(7); + @$pb.TagNumber(7) + $4.Timestamp ensureLastLimboFreeSnapshotVersion() => $_ensure(6); } class TargetGlobal extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('TargetGlobal', package: const $pb.PackageName('firestore.client'), createEmptyInstance: create) - ..a<$core.int>(1, 'highestTargetId', $pb.PbFieldType.O3) - ..aInt64(2, 'highestListenSequenceNumber') - ..aOM<$4.Timestamp>(3, 'lastRemoteSnapshotVersion', subBuilder: $4.Timestamp.create) - ..a<$core.int>(4, 'targetCount', $pb.PbFieldType.O3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TargetGlobal', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'firestore.client'), createEmptyInstance: create) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'highestTargetId', $pb.PbFieldType.O3) + ..aInt64(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'highestListenSequenceNumber') + ..aOM<$4.Timestamp>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'lastRemoteSnapshotVersion', subBuilder: $4.Timestamp.create) + ..a<$core.int>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetCount', $pb.PbFieldType.O3) ..hasRequiredFields = false ; TargetGlobal._() : super(); - factory TargetGlobal() => create(); + factory TargetGlobal({ + $core.int highestTargetId, + $fixnum.Int64 highestListenSequenceNumber, + $4.Timestamp lastRemoteSnapshotVersion, + $core.int targetCount, + }) { + final _result = create(); + if (highestTargetId != null) { + _result.highestTargetId = highestTargetId; + } + if (highestListenSequenceNumber != null) { + _result.highestListenSequenceNumber = highestListenSequenceNumber; + } + if (lastRemoteSnapshotVersion != null) { + _result.lastRemoteSnapshotVersion = lastRemoteSnapshotVersion; + } + if (targetCount != null) { + _result.targetCount = targetCount; + } + return _result; + } factory TargetGlobal.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory TargetGlobal.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') TargetGlobal clone() => TargetGlobal()..mergeFromMessage(this); - TargetGlobal copyWith(void Function(TargetGlobal) updates) => super.copyWith((message) => updates(message as TargetGlobal)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + TargetGlobal copyWith(void Function(TargetGlobal) updates) => super.copyWith((message) => updates(message as TargetGlobal)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static TargetGlobal create() => TargetGlobal._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbenum.dart index b6a62cc4..e461e676 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/target.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbjson.dart index 847e8a8d..369bae16 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/firestore/proto/target.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: firebase/firestore/proto/target.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Target$json = const { '1': 'Target', @@ -14,6 +14,7 @@ const Target$json = const { const {'1': 'last_listen_sequence_number', '3': 4, '4': 1, '5': 3, '10': 'lastListenSequenceNumber'}, const {'1': 'query', '3': 5, '4': 1, '5': 11, '6': '.google.firestore.v1.Target.QueryTarget', '9': 0, '10': 'query'}, const {'1': 'documents', '3': 6, '4': 1, '5': 11, '6': '.google.firestore.v1.Target.DocumentsTarget', '9': 0, '10': 'documents'}, + const {'1': 'last_limbo_free_snapshot_version', '3': 7, '4': 1, '5': 11, '6': '.google.protobuf.Timestamp', '10': 'lastLimboFreeSnapshotVersion'}, ], '8': const [ const {'1': 'target_type'}, diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/index.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/index.dart index 8b137891..e69de29b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/index.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/firebase/index.dart @@ -1 +0,0 @@ - diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pb.dart index 02375fb5..3465e99b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/api/annotations.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -12,7 +12,7 @@ import 'package:protobuf/protobuf.dart' as $pb; import 'http.pb.dart' as $3; class Annotations { - static final $pb.Extension http = $pb.Extension<$3.HttpRule>('google.protobuf.MethodOptions', 'http', 72295728, $pb.PbFieldType.OM, defaultOrMaker: $3.HttpRule.getDefault, subBuilder: $3.HttpRule.create); + static final http = $pb.Extension<$3.HttpRule>(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf.MethodOptions', const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'http', 72295728, $pb.PbFieldType.OM, defaultOrMaker: $3.HttpRule.getDefault, subBuilder: $3.HttpRule.create); static void registerAllExtensions($pb.ExtensionRegistry registry) { registry.add(http); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbenum.dart index a6cc88b2..49a9dfbb 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/api/annotations.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbjson.dart index a6cc88b2..49a9dfbb 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/annotations.pbjson.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/api/annotations.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pb.dart index 430657de..da97f015 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pb.dart @@ -2,26 +2,46 @@ // Generated code. Do not modify. // source: google/api/http.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; class Http extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Http', package: const $pb.PackageName('google.api'), createEmptyInstance: create) - ..pc(1, 'rules', $pb.PbFieldType.PM, subBuilder: HttpRule.create) - ..aOB(2, 'fullyDecodeReservedExpansion') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Http', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.api'), createEmptyInstance: create) + ..pc(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'rules', $pb.PbFieldType.PM, subBuilder: HttpRule.create) + ..aOB(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fullyDecodeReservedExpansion') ..hasRequiredFields = false ; Http._() : super(); - factory Http() => create(); + factory Http({ + $core.Iterable rules, + $core.bool fullyDecodeReservedExpansion, + }) { + final _result = create(); + if (rules != null) { + _result.rules.addAll(rules); + } + if (fullyDecodeReservedExpansion != null) { + _result.fullyDecodeReservedExpansion = fullyDecodeReservedExpansion; + } + return _result; + } factory Http.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Http.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Http clone() => Http()..mergeFromMessage(this); - Http copyWith(void Function(Http) updates) => super.copyWith((message) => updates(message as Http)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Http copyWith(void Function(Http) updates) => super.copyWith((message) => updates(message as Http)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Http create() => Http._(); @@ -64,26 +84,74 @@ class HttpRule extends $pb.GeneratedMessage { 8 : HttpRule_Pattern.custom, 0 : HttpRule_Pattern.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('HttpRule', package: const $pb.PackageName('google.api'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'HttpRule', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.api'), createEmptyInstance: create) ..oo(0, [2, 3, 4, 5, 6, 8]) - ..aOS(1, 'selector') - ..aOS(2, 'get') - ..aOS(3, 'put') - ..aOS(4, 'post') - ..aOS(5, 'delete') - ..aOS(6, 'patch') - ..aOS(7, 'body') - ..aOM(8, 'custom', subBuilder: CustomHttpPattern.create) - ..pc(11, 'additionalBindings', $pb.PbFieldType.PM, subBuilder: HttpRule.create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'selector') + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'get') + ..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'put') + ..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'post') + ..aOS(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'delete') + ..aOS(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'patch') + ..aOS(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'body') + ..aOM(8, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'custom', subBuilder: CustomHttpPattern.create) + ..pc(11, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'additionalBindings', $pb.PbFieldType.PM, subBuilder: HttpRule.create) ..hasRequiredFields = false ; HttpRule._() : super(); - factory HttpRule() => create(); + factory HttpRule({ + $core.String selector, + $core.String get, + $core.String put, + $core.String post, + $core.String delete, + $core.String patch, + $core.String body, + CustomHttpPattern custom, + $core.Iterable additionalBindings, + }) { + final _result = create(); + if (selector != null) { + _result.selector = selector; + } + if (get != null) { + _result.get = get; + } + if (put != null) { + _result.put = put; + } + if (post != null) { + _result.post = post; + } + if (delete != null) { + _result.delete = delete; + } + if (patch != null) { + _result.patch = patch; + } + if (body != null) { + _result.body = body; + } + if (custom != null) { + _result.custom = custom; + } + if (additionalBindings != null) { + _result.additionalBindings.addAll(additionalBindings); + } + return _result; + } factory HttpRule.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory HttpRule.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') HttpRule clone() => HttpRule()..mergeFromMessage(this); - HttpRule copyWith(void Function(HttpRule) updates) => super.copyWith((message) => updates(message as HttpRule)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + HttpRule copyWith(void Function(HttpRule) updates) => super.copyWith((message) => updates(message as HttpRule)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static HttpRule create() => HttpRule._(); @@ -175,18 +243,38 @@ class HttpRule extends $pb.GeneratedMessage { } class CustomHttpPattern extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('CustomHttpPattern', package: const $pb.PackageName('google.api'), createEmptyInstance: create) - ..aOS(1, 'kind') - ..aOS(2, 'path') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CustomHttpPattern', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.api'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'kind') + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'path') ..hasRequiredFields = false ; CustomHttpPattern._() : super(); - factory CustomHttpPattern() => create(); + factory CustomHttpPattern({ + $core.String kind, + $core.String path, + }) { + final _result = create(); + if (kind != null) { + _result.kind = kind; + } + if (path != null) { + _result.path = path; + } + return _result; + } factory CustomHttpPattern.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory CustomHttpPattern.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') CustomHttpPattern clone() => CustomHttpPattern()..mergeFromMessage(this); - CustomHttpPattern copyWith(void Function(CustomHttpPattern) updates) => super.copyWith((message) => updates(message as CustomHttpPattern)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + CustomHttpPattern copyWith(void Function(CustomHttpPattern) updates) => super.copyWith((message) => updates(message as CustomHttpPattern)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static CustomHttpPattern create() => CustomHttpPattern._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbenum.dart index c6038848..15b816f8 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/api/http.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbjson.dart index 1724cca3..b4e97d0a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/api/http.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/api/http.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Http$json = const { '1': 'Http', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/index.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/index.dart index 8b137891..e69de29b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/index.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/index.dart @@ -1 +0,0 @@ - diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pb.dart index b18e1238..470e240e 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/firestore/v1/common.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -12,17 +12,33 @@ import 'package:protobuf/protobuf.dart' as $pb; import '../../protobuf/timestamp.pb.dart' as $4; class DocumentMask extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DocumentMask', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pPS(1, 'fieldPaths') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentMask', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pPS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fieldPaths') ..hasRequiredFields = false ; DocumentMask._() : super(); - factory DocumentMask() => create(); + factory DocumentMask({ + $core.Iterable<$core.String> fieldPaths, + }) { + final _result = create(); + if (fieldPaths != null) { + _result.fieldPaths.addAll(fieldPaths); + } + return _result; + } factory DocumentMask.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DocumentMask.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DocumentMask clone() => DocumentMask()..mergeFromMessage(this); - DocumentMask copyWith(void Function(DocumentMask) updates) => super.copyWith((message) => updates(message as DocumentMask)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DocumentMask copyWith(void Function(DocumentMask) updates) => super.copyWith((message) => updates(message as DocumentMask)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DocumentMask create() => DocumentMask._(); @@ -48,19 +64,39 @@ class Precondition extends $pb.GeneratedMessage { 2 : Precondition_ConditionType.updateTime, 0 : Precondition_ConditionType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Precondition', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Precondition', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [1, 2]) - ..aOB(1, 'exists') - ..aOM<$4.Timestamp>(2, 'updateTime', subBuilder: $4.Timestamp.create) + ..aOB(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'exists') + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'updateTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; Precondition._() : super(); - factory Precondition() => create(); + factory Precondition({ + $core.bool exists, + $4.Timestamp updateTime, + }) { + final _result = create(); + if (exists != null) { + _result.exists = exists; + } + if (updateTime != null) { + _result.updateTime = updateTime; + } + return _result; + } factory Precondition.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Precondition.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Precondition clone() => Precondition()..mergeFromMessage(this); - Precondition copyWith(void Function(Precondition) updates) => super.copyWith((message) => updates(message as Precondition)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Precondition copyWith(void Function(Precondition) updates) => super.copyWith((message) => updates(message as Precondition)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Precondition create() => Precondition._(); @@ -95,17 +131,33 @@ class Precondition extends $pb.GeneratedMessage { } class TransactionOptions_ReadWrite extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('TransactionOptions.ReadWrite', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..a<$core.List<$core.int>>(1, 'retryTransaction', $pb.PbFieldType.OY) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TransactionOptions.ReadWrite', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..a<$core.List<$core.int>>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'retryTransaction', $pb.PbFieldType.OY) ..hasRequiredFields = false ; TransactionOptions_ReadWrite._() : super(); - factory TransactionOptions_ReadWrite() => create(); + factory TransactionOptions_ReadWrite({ + $core.List<$core.int> retryTransaction, + }) { + final _result = create(); + if (retryTransaction != null) { + _result.retryTransaction = retryTransaction; + } + return _result; + } factory TransactionOptions_ReadWrite.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory TransactionOptions_ReadWrite.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') TransactionOptions_ReadWrite clone() => TransactionOptions_ReadWrite()..mergeFromMessage(this); - TransactionOptions_ReadWrite copyWith(void Function(TransactionOptions_ReadWrite) updates) => super.copyWith((message) => updates(message as TransactionOptions_ReadWrite)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + TransactionOptions_ReadWrite copyWith(void Function(TransactionOptions_ReadWrite) updates) => super.copyWith((message) => updates(message as TransactionOptions_ReadWrite)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static TransactionOptions_ReadWrite create() => TransactionOptions_ReadWrite._(); @@ -135,18 +187,34 @@ class TransactionOptions_ReadOnly extends $pb.GeneratedMessage { 2 : TransactionOptions_ReadOnly_ConsistencySelector.readTime, 0 : TransactionOptions_ReadOnly_ConsistencySelector.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('TransactionOptions.ReadOnly', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TransactionOptions.ReadOnly', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2]) - ..aOM<$4.Timestamp>(2, 'readTime', subBuilder: $4.Timestamp.create) + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; TransactionOptions_ReadOnly._() : super(); - factory TransactionOptions_ReadOnly() => create(); + factory TransactionOptions_ReadOnly({ + $4.Timestamp readTime, + }) { + final _result = create(); + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory TransactionOptions_ReadOnly.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory TransactionOptions_ReadOnly.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') TransactionOptions_ReadOnly clone() => TransactionOptions_ReadOnly()..mergeFromMessage(this); - TransactionOptions_ReadOnly copyWith(void Function(TransactionOptions_ReadOnly) updates) => super.copyWith((message) => updates(message as TransactionOptions_ReadOnly)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + TransactionOptions_ReadOnly copyWith(void Function(TransactionOptions_ReadOnly) updates) => super.copyWith((message) => updates(message as TransactionOptions_ReadOnly)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static TransactionOptions_ReadOnly create() => TransactionOptions_ReadOnly._(); @@ -183,19 +251,39 @@ class TransactionOptions extends $pb.GeneratedMessage { 3 : TransactionOptions_Mode.readWrite, 0 : TransactionOptions_Mode.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('TransactionOptions', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TransactionOptions', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2, 3]) - ..aOM(2, 'readOnly', subBuilder: TransactionOptions_ReadOnly.create) - ..aOM(3, 'readWrite', subBuilder: TransactionOptions_ReadWrite.create) + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readOnly', subBuilder: TransactionOptions_ReadOnly.create) + ..aOM(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readWrite', subBuilder: TransactionOptions_ReadWrite.create) ..hasRequiredFields = false ; TransactionOptions._() : super(); - factory TransactionOptions() => create(); + factory TransactionOptions({ + TransactionOptions_ReadOnly readOnly, + TransactionOptions_ReadWrite readWrite, + }) { + final _result = create(); + if (readOnly != null) { + _result.readOnly = readOnly; + } + if (readWrite != null) { + _result.readWrite = readWrite; + } + return _result; + } factory TransactionOptions.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory TransactionOptions.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') TransactionOptions clone() => TransactionOptions()..mergeFromMessage(this); - TransactionOptions copyWith(void Function(TransactionOptions) updates) => super.copyWith((message) => updates(message as TransactionOptions)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + TransactionOptions copyWith(void Function(TransactionOptions) updates) => super.copyWith((message) => updates(message as TransactionOptions)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static TransactionOptions create() => TransactionOptions._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbenum.dart index 2aac2d86..9b39f6d0 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/firestore/v1/common.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbjson.dart index 8eedca78..c33139e9 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/common.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/firestore/v1/common.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const DocumentMask$json = const { '1': 'DocumentMask', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pb.dart index a0fe46ee..5194f524 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/firestore/v1/document.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -16,20 +16,48 @@ import '../../type/latlng.pb.dart' as $5; import '../../protobuf/struct.pbenum.dart' as $6; class Document extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Document', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'name') - ..m<$core.String, Value>(2, 'fields', entryClassName: 'Document.FieldsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OM, valueCreator: Value.create, packageName: const $pb.PackageName('google.firestore.v1')) - ..aOM<$4.Timestamp>(3, 'createTime', subBuilder: $4.Timestamp.create) - ..aOM<$4.Timestamp>(4, 'updateTime', subBuilder: $4.Timestamp.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Document', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'name') + ..m<$core.String, Value>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fields', entryClassName: 'Document.FieldsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OM, valueCreator: Value.create, packageName: const $pb.PackageName('google.firestore.v1')) + ..aOM<$4.Timestamp>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'createTime', subBuilder: $4.Timestamp.create) + ..aOM<$4.Timestamp>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'updateTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; Document._() : super(); - factory Document() => create(); + factory Document({ + $core.String name, + $core.Map<$core.String, Value> fields, + $4.Timestamp createTime, + $4.Timestamp updateTime, + }) { + final _result = create(); + if (name != null) { + _result.name = name; + } + if (fields != null) { + _result.fields.addAll(fields); + } + if (createTime != null) { + _result.createTime = createTime; + } + if (updateTime != null) { + _result.updateTime = updateTime; + } + return _result; + } factory Document.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Document.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Document clone() => Document()..mergeFromMessage(this); - Document copyWith(void Function(Document) updates) => super.copyWith((message) => updates(message as Document)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Document copyWith(void Function(Document) updates) => super.copyWith((message) => updates(message as Document)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Document create() => Document._(); @@ -104,28 +132,84 @@ class Value extends $pb.GeneratedMessage { 18 : Value_ValueType.bytesValue, 0 : Value_ValueType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Value', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Value', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [1, 2, 3, 5, 6, 8, 9, 10, 11, 17, 18]) - ..aOB(1, 'booleanValue') - ..aInt64(2, 'integerValue') - ..a<$core.double>(3, 'doubleValue', $pb.PbFieldType.OD) - ..aOS(5, 'referenceValue') - ..aOM(6, 'mapValue', subBuilder: MapValue.create) - ..aOM<$5.LatLng>(8, 'geoPointValue', subBuilder: $5.LatLng.create) - ..aOM(9, 'arrayValue', subBuilder: ArrayValue.create) - ..aOM<$4.Timestamp>(10, 'timestampValue', subBuilder: $4.Timestamp.create) - ..e<$6.NullValue>(11, 'nullValue', $pb.PbFieldType.OE, defaultOrMaker: $6.NullValue.NULL_VALUE, valueOf: $6.NullValue.valueOf, enumValues: $6.NullValue.values) - ..aOS(17, 'stringValue') - ..a<$core.List<$core.int>>(18, 'bytesValue', $pb.PbFieldType.OY) + ..aOB(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'booleanValue') + ..aInt64(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'integerValue') + ..a<$core.double>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'doubleValue', $pb.PbFieldType.OD) + ..aOS(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'referenceValue') + ..aOM(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mapValue', subBuilder: MapValue.create) + ..aOM<$5.LatLng>(8, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'geoPointValue', subBuilder: $5.LatLng.create) + ..aOM(9, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'arrayValue', subBuilder: ArrayValue.create) + ..aOM<$4.Timestamp>(10, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'timestampValue', subBuilder: $4.Timestamp.create) + ..e<$6.NullValue>(11, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'nullValue', $pb.PbFieldType.OE, defaultOrMaker: $6.NullValue.NULL_VALUE, valueOf: $6.NullValue.valueOf, enumValues: $6.NullValue.values) + ..aOS(17, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'stringValue') + ..a<$core.List<$core.int>>(18, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'bytesValue', $pb.PbFieldType.OY) ..hasRequiredFields = false ; Value._() : super(); - factory Value() => create(); + factory Value({ + $core.bool booleanValue, + $fixnum.Int64 integerValue, + $core.double doubleValue, + $core.String referenceValue, + MapValue mapValue, + $5.LatLng geoPointValue, + ArrayValue arrayValue, + $4.Timestamp timestampValue, + $6.NullValue nullValue, + $core.String stringValue, + $core.List<$core.int> bytesValue, + }) { + final _result = create(); + if (booleanValue != null) { + _result.booleanValue = booleanValue; + } + if (integerValue != null) { + _result.integerValue = integerValue; + } + if (doubleValue != null) { + _result.doubleValue = doubleValue; + } + if (referenceValue != null) { + _result.referenceValue = referenceValue; + } + if (mapValue != null) { + _result.mapValue = mapValue; + } + if (geoPointValue != null) { + _result.geoPointValue = geoPointValue; + } + if (arrayValue != null) { + _result.arrayValue = arrayValue; + } + if (timestampValue != null) { + _result.timestampValue = timestampValue; + } + if (nullValue != null) { + _result.nullValue = nullValue; + } + if (stringValue != null) { + _result.stringValue = stringValue; + } + if (bytesValue != null) { + _result.bytesValue = bytesValue; + } + return _result; + } factory Value.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Value.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Value clone() => Value()..mergeFromMessage(this); - Value copyWith(void Function(Value) updates) => super.copyWith((message) => updates(message as Value)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Value copyWith(void Function(Value) updates) => super.copyWith((message) => updates(message as Value)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Value create() => Value._(); @@ -247,17 +331,33 @@ class Value extends $pb.GeneratedMessage { } class ArrayValue extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ArrayValue', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pc(1, 'values', $pb.PbFieldType.PM, subBuilder: Value.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ArrayValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pc(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'values', $pb.PbFieldType.PM, subBuilder: Value.create) ..hasRequiredFields = false ; ArrayValue._() : super(); - factory ArrayValue() => create(); + factory ArrayValue({ + $core.Iterable values, + }) { + final _result = create(); + if (values != null) { + _result.values.addAll(values); + } + return _result; + } factory ArrayValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ArrayValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ArrayValue clone() => ArrayValue()..mergeFromMessage(this); - ArrayValue copyWith(void Function(ArrayValue) updates) => super.copyWith((message) => updates(message as ArrayValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ArrayValue copyWith(void Function(ArrayValue) updates) => super.copyWith((message) => updates(message as ArrayValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ArrayValue create() => ArrayValue._(); @@ -272,17 +372,33 @@ class ArrayValue extends $pb.GeneratedMessage { } class MapValue extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('MapValue', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..m<$core.String, Value>(1, 'fields', entryClassName: 'MapValue.FieldsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OM, valueCreator: Value.create, packageName: const $pb.PackageName('google.firestore.v1')) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'MapValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..m<$core.String, Value>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fields', entryClassName: 'MapValue.FieldsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OM, valueCreator: Value.create, packageName: const $pb.PackageName('google.firestore.v1')) ..hasRequiredFields = false ; MapValue._() : super(); - factory MapValue() => create(); + factory MapValue({ + $core.Map<$core.String, Value> fields, + }) { + final _result = create(); + if (fields != null) { + _result.fields.addAll(fields); + } + return _result; + } factory MapValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory MapValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') MapValue clone() => MapValue()..mergeFromMessage(this); - MapValue copyWith(void Function(MapValue) updates) => super.copyWith((message) => updates(message as MapValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + MapValue copyWith(void Function(MapValue) updates) => super.copyWith((message) => updates(message as MapValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static MapValue create() => MapValue._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbenum.dart index 58ecbb8e..9e62d42a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/firestore/v1/document.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbjson.dart index dcacb0d5..3c27dcb5 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/document.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/firestore/v1/document.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Document$json = const { '1': 'Document', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pb.dart index da509c24..5b10aaf7 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pb.dart @@ -2,18 +2,18 @@ // Generated code. Do not modify. // source: google/firestore/v1/firestore.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; -import 'common.pb.dart' as $7; +import 'common.pb.dart' as $9; import '../../protobuf/timestamp.pb.dart' as $4; import 'document.pb.dart' as $1; -import 'write.pb.dart' as $8; -import 'query.pb.dart' as $11; +import 'write.pb.dart' as $10; +import 'query.pb.dart' as $8; import '../../rpc/status.pb.dart' as $12; import 'firestore.pbenum.dart'; @@ -32,21 +32,49 @@ class GetDocumentRequest extends $pb.GeneratedMessage { 5 : GetDocumentRequest_ConsistencySelector.readTime, 0 : GetDocumentRequest_ConsistencySelector.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('GetDocumentRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'GetDocumentRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [3, 5]) - ..aOS(1, 'name') - ..aOM<$7.DocumentMask>(2, 'mask', subBuilder: $7.DocumentMask.create) - ..a<$core.List<$core.int>>(3, 'transaction', $pb.PbFieldType.OY) - ..aOM<$4.Timestamp>(5, 'readTime', subBuilder: $4.Timestamp.create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'name') + ..aOM<$9.DocumentMask>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mask', subBuilder: $9.DocumentMask.create) + ..a<$core.List<$core.int>>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) + ..aOM<$4.Timestamp>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; GetDocumentRequest._() : super(); - factory GetDocumentRequest() => create(); + factory GetDocumentRequest({ + $core.String name, + $9.DocumentMask mask, + $core.List<$core.int> transaction, + $4.Timestamp readTime, + }) { + final _result = create(); + if (name != null) { + _result.name = name; + } + if (mask != null) { + _result.mask = mask; + } + if (transaction != null) { + _result.transaction = transaction; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory GetDocumentRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory GetDocumentRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') GetDocumentRequest clone() => GetDocumentRequest()..mergeFromMessage(this); - GetDocumentRequest copyWith(void Function(GetDocumentRequest) updates) => super.copyWith((message) => updates(message as GetDocumentRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + GetDocumentRequest copyWith(void Function(GetDocumentRequest) updates) => super.copyWith((message) => updates(message as GetDocumentRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static GetDocumentRequest create() => GetDocumentRequest._(); @@ -69,15 +97,15 @@ class GetDocumentRequest extends $pb.GeneratedMessage { void clearName() => clearField(1); @$pb.TagNumber(2) - $7.DocumentMask get mask => $_getN(1); + $9.DocumentMask get mask => $_getN(1); @$pb.TagNumber(2) - set mask($7.DocumentMask v) { setField(2, v); } + set mask($9.DocumentMask v) { setField(2, v); } @$pb.TagNumber(2) $core.bool hasMask() => $_has(1); @$pb.TagNumber(2) void clearMask() => clearField(2); @$pb.TagNumber(2) - $7.DocumentMask ensureMask() => $_ensure(1); + $9.DocumentMask ensureMask() => $_ensure(1); @$pb.TagNumber(3) $core.List<$core.int> get transaction => $_getN(2); @@ -112,26 +140,74 @@ class ListDocumentsRequest extends $pb.GeneratedMessage { 10 : ListDocumentsRequest_ConsistencySelector.readTime, 0 : ListDocumentsRequest_ConsistencySelector.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ListDocumentsRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ListDocumentsRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [8, 10]) - ..aOS(1, 'parent') - ..aOS(2, 'collectionId') - ..a<$core.int>(3, 'pageSize', $pb.PbFieldType.O3) - ..aOS(4, 'pageToken') - ..aOS(6, 'orderBy') - ..aOM<$7.DocumentMask>(7, 'mask', subBuilder: $7.DocumentMask.create) - ..a<$core.List<$core.int>>(8, 'transaction', $pb.PbFieldType.OY) - ..aOM<$4.Timestamp>(10, 'readTime', subBuilder: $4.Timestamp.create) - ..aOB(12, 'showMissing') + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'parent') + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'collectionId') + ..a<$core.int>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'pageSize', $pb.PbFieldType.O3) + ..aOS(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'pageToken') + ..aOS(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'orderBy') + ..aOM<$9.DocumentMask>(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mask', subBuilder: $9.DocumentMask.create) + ..a<$core.List<$core.int>>(8, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) + ..aOM<$4.Timestamp>(10, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) + ..aOB(12, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'showMissing') ..hasRequiredFields = false ; ListDocumentsRequest._() : super(); - factory ListDocumentsRequest() => create(); + factory ListDocumentsRequest({ + $core.String parent, + $core.String collectionId, + $core.int pageSize, + $core.String pageToken, + $core.String orderBy, + $9.DocumentMask mask, + $core.List<$core.int> transaction, + $4.Timestamp readTime, + $core.bool showMissing, + }) { + final _result = create(); + if (parent != null) { + _result.parent = parent; + } + if (collectionId != null) { + _result.collectionId = collectionId; + } + if (pageSize != null) { + _result.pageSize = pageSize; + } + if (pageToken != null) { + _result.pageToken = pageToken; + } + if (orderBy != null) { + _result.orderBy = orderBy; + } + if (mask != null) { + _result.mask = mask; + } + if (transaction != null) { + _result.transaction = transaction; + } + if (readTime != null) { + _result.readTime = readTime; + } + if (showMissing != null) { + _result.showMissing = showMissing; + } + return _result; + } factory ListDocumentsRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ListDocumentsRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ListDocumentsRequest clone() => ListDocumentsRequest()..mergeFromMessage(this); - ListDocumentsRequest copyWith(void Function(ListDocumentsRequest) updates) => super.copyWith((message) => updates(message as ListDocumentsRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ListDocumentsRequest copyWith(void Function(ListDocumentsRequest) updates) => super.copyWith((message) => updates(message as ListDocumentsRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ListDocumentsRequest create() => ListDocumentsRequest._(); @@ -190,15 +266,15 @@ class ListDocumentsRequest extends $pb.GeneratedMessage { void clearOrderBy() => clearField(6); @$pb.TagNumber(7) - $7.DocumentMask get mask => $_getN(5); + $9.DocumentMask get mask => $_getN(5); @$pb.TagNumber(7) - set mask($7.DocumentMask v) { setField(7, v); } + set mask($9.DocumentMask v) { setField(7, v); } @$pb.TagNumber(7) $core.bool hasMask() => $_has(5); @$pb.TagNumber(7) void clearMask() => clearField(7); @$pb.TagNumber(7) - $7.DocumentMask ensureMask() => $_ensure(5); + $9.DocumentMask ensureMask() => $_ensure(5); @$pb.TagNumber(8) $core.List<$core.int> get transaction => $_getN(6); @@ -231,18 +307,38 @@ class ListDocumentsRequest extends $pb.GeneratedMessage { } class ListDocumentsResponse extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ListDocumentsResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pc<$1.Document>(1, 'documents', $pb.PbFieldType.PM, subBuilder: $1.Document.create) - ..aOS(2, 'nextPageToken') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ListDocumentsResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pc<$1.Document>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documents', $pb.PbFieldType.PM, subBuilder: $1.Document.create) + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'nextPageToken') ..hasRequiredFields = false ; ListDocumentsResponse._() : super(); - factory ListDocumentsResponse() => create(); + factory ListDocumentsResponse({ + $core.Iterable<$1.Document> documents, + $core.String nextPageToken, + }) { + final _result = create(); + if (documents != null) { + _result.documents.addAll(documents); + } + if (nextPageToken != null) { + _result.nextPageToken = nextPageToken; + } + return _result; + } factory ListDocumentsResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ListDocumentsResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ListDocumentsResponse clone() => ListDocumentsResponse()..mergeFromMessage(this); - ListDocumentsResponse copyWith(void Function(ListDocumentsResponse) updates) => super.copyWith((message) => updates(message as ListDocumentsResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ListDocumentsResponse copyWith(void Function(ListDocumentsResponse) updates) => super.copyWith((message) => updates(message as ListDocumentsResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ListDocumentsResponse create() => ListDocumentsResponse._(); @@ -266,21 +362,53 @@ class ListDocumentsResponse extends $pb.GeneratedMessage { } class CreateDocumentRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('CreateDocumentRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'parent') - ..aOS(2, 'collectionId') - ..aOS(3, 'documentId') - ..aOM<$1.Document>(4, 'document', subBuilder: $1.Document.create) - ..aOM<$7.DocumentMask>(5, 'mask', subBuilder: $7.DocumentMask.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CreateDocumentRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'parent') + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'collectionId') + ..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documentId') + ..aOM<$1.Document>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document', subBuilder: $1.Document.create) + ..aOM<$9.DocumentMask>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mask', subBuilder: $9.DocumentMask.create) ..hasRequiredFields = false ; CreateDocumentRequest._() : super(); - factory CreateDocumentRequest() => create(); + factory CreateDocumentRequest({ + $core.String parent, + $core.String collectionId, + $core.String documentId, + $1.Document document, + $9.DocumentMask mask, + }) { + final _result = create(); + if (parent != null) { + _result.parent = parent; + } + if (collectionId != null) { + _result.collectionId = collectionId; + } + if (documentId != null) { + _result.documentId = documentId; + } + if (document != null) { + _result.document = document; + } + if (mask != null) { + _result.mask = mask; + } + return _result; + } factory CreateDocumentRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory CreateDocumentRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') CreateDocumentRequest clone() => CreateDocumentRequest()..mergeFromMessage(this); - CreateDocumentRequest copyWith(void Function(CreateDocumentRequest) updates) => super.copyWith((message) => updates(message as CreateDocumentRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + CreateDocumentRequest copyWith(void Function(CreateDocumentRequest) updates) => super.copyWith((message) => updates(message as CreateDocumentRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static CreateDocumentRequest create() => CreateDocumentRequest._(); @@ -329,32 +457,60 @@ class CreateDocumentRequest extends $pb.GeneratedMessage { $1.Document ensureDocument() => $_ensure(3); @$pb.TagNumber(5) - $7.DocumentMask get mask => $_getN(4); + $9.DocumentMask get mask => $_getN(4); @$pb.TagNumber(5) - set mask($7.DocumentMask v) { setField(5, v); } + set mask($9.DocumentMask v) { setField(5, v); } @$pb.TagNumber(5) $core.bool hasMask() => $_has(4); @$pb.TagNumber(5) void clearMask() => clearField(5); @$pb.TagNumber(5) - $7.DocumentMask ensureMask() => $_ensure(4); + $9.DocumentMask ensureMask() => $_ensure(4); } class UpdateDocumentRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('UpdateDocumentRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOM<$1.Document>(1, 'document', subBuilder: $1.Document.create) - ..aOM<$7.DocumentMask>(2, 'updateMask', subBuilder: $7.DocumentMask.create) - ..aOM<$7.DocumentMask>(3, 'mask', subBuilder: $7.DocumentMask.create) - ..aOM<$7.Precondition>(4, 'currentDocument', subBuilder: $7.Precondition.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'UpdateDocumentRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOM<$1.Document>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document', subBuilder: $1.Document.create) + ..aOM<$9.DocumentMask>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'updateMask', subBuilder: $9.DocumentMask.create) + ..aOM<$9.DocumentMask>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mask', subBuilder: $9.DocumentMask.create) + ..aOM<$9.Precondition>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'currentDocument', subBuilder: $9.Precondition.create) ..hasRequiredFields = false ; UpdateDocumentRequest._() : super(); - factory UpdateDocumentRequest() => create(); + factory UpdateDocumentRequest({ + $1.Document document, + $9.DocumentMask updateMask, + $9.DocumentMask mask, + $9.Precondition currentDocument, + }) { + final _result = create(); + if (document != null) { + _result.document = document; + } + if (updateMask != null) { + _result.updateMask = updateMask; + } + if (mask != null) { + _result.mask = mask; + } + if (currentDocument != null) { + _result.currentDocument = currentDocument; + } + return _result; + } factory UpdateDocumentRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory UpdateDocumentRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') UpdateDocumentRequest clone() => UpdateDocumentRequest()..mergeFromMessage(this); - UpdateDocumentRequest copyWith(void Function(UpdateDocumentRequest) updates) => super.copyWith((message) => updates(message as UpdateDocumentRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + UpdateDocumentRequest copyWith(void Function(UpdateDocumentRequest) updates) => super.copyWith((message) => updates(message as UpdateDocumentRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static UpdateDocumentRequest create() => UpdateDocumentRequest._(); @@ -376,52 +532,72 @@ class UpdateDocumentRequest extends $pb.GeneratedMessage { $1.Document ensureDocument() => $_ensure(0); @$pb.TagNumber(2) - $7.DocumentMask get updateMask => $_getN(1); + $9.DocumentMask get updateMask => $_getN(1); @$pb.TagNumber(2) - set updateMask($7.DocumentMask v) { setField(2, v); } + set updateMask($9.DocumentMask v) { setField(2, v); } @$pb.TagNumber(2) $core.bool hasUpdateMask() => $_has(1); @$pb.TagNumber(2) void clearUpdateMask() => clearField(2); @$pb.TagNumber(2) - $7.DocumentMask ensureUpdateMask() => $_ensure(1); + $9.DocumentMask ensureUpdateMask() => $_ensure(1); @$pb.TagNumber(3) - $7.DocumentMask get mask => $_getN(2); + $9.DocumentMask get mask => $_getN(2); @$pb.TagNumber(3) - set mask($7.DocumentMask v) { setField(3, v); } + set mask($9.DocumentMask v) { setField(3, v); } @$pb.TagNumber(3) $core.bool hasMask() => $_has(2); @$pb.TagNumber(3) void clearMask() => clearField(3); @$pb.TagNumber(3) - $7.DocumentMask ensureMask() => $_ensure(2); + $9.DocumentMask ensureMask() => $_ensure(2); @$pb.TagNumber(4) - $7.Precondition get currentDocument => $_getN(3); + $9.Precondition get currentDocument => $_getN(3); @$pb.TagNumber(4) - set currentDocument($7.Precondition v) { setField(4, v); } + set currentDocument($9.Precondition v) { setField(4, v); } @$pb.TagNumber(4) $core.bool hasCurrentDocument() => $_has(3); @$pb.TagNumber(4) void clearCurrentDocument() => clearField(4); @$pb.TagNumber(4) - $7.Precondition ensureCurrentDocument() => $_ensure(3); + $9.Precondition ensureCurrentDocument() => $_ensure(3); } class DeleteDocumentRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DeleteDocumentRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'name') - ..aOM<$7.Precondition>(2, 'currentDocument', subBuilder: $7.Precondition.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DeleteDocumentRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'name') + ..aOM<$9.Precondition>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'currentDocument', subBuilder: $9.Precondition.create) ..hasRequiredFields = false ; DeleteDocumentRequest._() : super(); - factory DeleteDocumentRequest() => create(); + factory DeleteDocumentRequest({ + $core.String name, + $9.Precondition currentDocument, + }) { + final _result = create(); + if (name != null) { + _result.name = name; + } + if (currentDocument != null) { + _result.currentDocument = currentDocument; + } + return _result; + } factory DeleteDocumentRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DeleteDocumentRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DeleteDocumentRequest clone() => DeleteDocumentRequest()..mergeFromMessage(this); - DeleteDocumentRequest copyWith(void Function(DeleteDocumentRequest) updates) => super.copyWith((message) => updates(message as DeleteDocumentRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DeleteDocumentRequest copyWith(void Function(DeleteDocumentRequest) updates) => super.copyWith((message) => updates(message as DeleteDocumentRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DeleteDocumentRequest create() => DeleteDocumentRequest._(); @@ -441,15 +617,15 @@ class DeleteDocumentRequest extends $pb.GeneratedMessage { void clearName() => clearField(1); @$pb.TagNumber(2) - $7.Precondition get currentDocument => $_getN(1); + $9.Precondition get currentDocument => $_getN(1); @$pb.TagNumber(2) - set currentDocument($7.Precondition v) { setField(2, v); } + set currentDocument($9.Precondition v) { setField(2, v); } @$pb.TagNumber(2) $core.bool hasCurrentDocument() => $_has(1); @$pb.TagNumber(2) void clearCurrentDocument() => clearField(2); @$pb.TagNumber(2) - $7.Precondition ensureCurrentDocument() => $_ensure(1); + $9.Precondition ensureCurrentDocument() => $_ensure(1); } enum BatchGetDocumentsRequest_ConsistencySelector { @@ -466,23 +642,59 @@ class BatchGetDocumentsRequest extends $pb.GeneratedMessage { 7 : BatchGetDocumentsRequest_ConsistencySelector.readTime, 0 : BatchGetDocumentsRequest_ConsistencySelector.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('BatchGetDocumentsRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BatchGetDocumentsRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [4, 5, 7]) - ..aOS(1, 'database') - ..pPS(2, 'documents') - ..aOM<$7.DocumentMask>(3, 'mask', subBuilder: $7.DocumentMask.create) - ..a<$core.List<$core.int>>(4, 'transaction', $pb.PbFieldType.OY) - ..aOM<$7.TransactionOptions>(5, 'newTransaction', subBuilder: $7.TransactionOptions.create) - ..aOM<$4.Timestamp>(7, 'readTime', subBuilder: $4.Timestamp.create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'database') + ..pPS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documents') + ..aOM<$9.DocumentMask>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'mask', subBuilder: $9.DocumentMask.create) + ..a<$core.List<$core.int>>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) + ..aOM<$9.TransactionOptions>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'newTransaction', subBuilder: $9.TransactionOptions.create) + ..aOM<$4.Timestamp>(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; BatchGetDocumentsRequest._() : super(); - factory BatchGetDocumentsRequest() => create(); + factory BatchGetDocumentsRequest({ + $core.String database, + $core.Iterable<$core.String> documents, + $9.DocumentMask mask, + $core.List<$core.int> transaction, + $9.TransactionOptions newTransaction, + $4.Timestamp readTime, + }) { + final _result = create(); + if (database != null) { + _result.database = database; + } + if (documents != null) { + _result.documents.addAll(documents); + } + if (mask != null) { + _result.mask = mask; + } + if (transaction != null) { + _result.transaction = transaction; + } + if (newTransaction != null) { + _result.newTransaction = newTransaction; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory BatchGetDocumentsRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory BatchGetDocumentsRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') BatchGetDocumentsRequest clone() => BatchGetDocumentsRequest()..mergeFromMessage(this); - BatchGetDocumentsRequest copyWith(void Function(BatchGetDocumentsRequest) updates) => super.copyWith((message) => updates(message as BatchGetDocumentsRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BatchGetDocumentsRequest copyWith(void Function(BatchGetDocumentsRequest) updates) => super.copyWith((message) => updates(message as BatchGetDocumentsRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static BatchGetDocumentsRequest create() => BatchGetDocumentsRequest._(); @@ -508,15 +720,15 @@ class BatchGetDocumentsRequest extends $pb.GeneratedMessage { $core.List<$core.String> get documents => $_getList(1); @$pb.TagNumber(3) - $7.DocumentMask get mask => $_getN(2); + $9.DocumentMask get mask => $_getN(2); @$pb.TagNumber(3) - set mask($7.DocumentMask v) { setField(3, v); } + set mask($9.DocumentMask v) { setField(3, v); } @$pb.TagNumber(3) $core.bool hasMask() => $_has(2); @$pb.TagNumber(3) void clearMask() => clearField(3); @$pb.TagNumber(3) - $7.DocumentMask ensureMask() => $_ensure(2); + $9.DocumentMask ensureMask() => $_ensure(2); @$pb.TagNumber(4) $core.List<$core.int> get transaction => $_getN(3); @@ -528,15 +740,15 @@ class BatchGetDocumentsRequest extends $pb.GeneratedMessage { void clearTransaction() => clearField(4); @$pb.TagNumber(5) - $7.TransactionOptions get newTransaction => $_getN(4); + $9.TransactionOptions get newTransaction => $_getN(4); @$pb.TagNumber(5) - set newTransaction($7.TransactionOptions v) { setField(5, v); } + set newTransaction($9.TransactionOptions v) { setField(5, v); } @$pb.TagNumber(5) $core.bool hasNewTransaction() => $_has(4); @$pb.TagNumber(5) void clearNewTransaction() => clearField(5); @$pb.TagNumber(5) - $7.TransactionOptions ensureNewTransaction() => $_ensure(4); + $9.TransactionOptions ensureNewTransaction() => $_ensure(4); @$pb.TagNumber(7) $4.Timestamp get readTime => $_getN(5); @@ -562,21 +774,49 @@ class BatchGetDocumentsResponse extends $pb.GeneratedMessage { 2 : BatchGetDocumentsResponse_Result.missing, 0 : BatchGetDocumentsResponse_Result.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('BatchGetDocumentsResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BatchGetDocumentsResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [1, 2]) - ..aOM<$1.Document>(1, 'found', subBuilder: $1.Document.create) - ..aOS(2, 'missing') - ..a<$core.List<$core.int>>(3, 'transaction', $pb.PbFieldType.OY) - ..aOM<$4.Timestamp>(4, 'readTime', subBuilder: $4.Timestamp.create) + ..aOM<$1.Document>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'found', subBuilder: $1.Document.create) + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'missing') + ..a<$core.List<$core.int>>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) + ..aOM<$4.Timestamp>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; BatchGetDocumentsResponse._() : super(); - factory BatchGetDocumentsResponse() => create(); + factory BatchGetDocumentsResponse({ + $1.Document found, + $core.String missing, + $core.List<$core.int> transaction, + $4.Timestamp readTime, + }) { + final _result = create(); + if (found != null) { + _result.found = found; + } + if (missing != null) { + _result.missing = missing; + } + if (transaction != null) { + _result.transaction = transaction; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory BatchGetDocumentsResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory BatchGetDocumentsResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') BatchGetDocumentsResponse clone() => BatchGetDocumentsResponse()..mergeFromMessage(this); - BatchGetDocumentsResponse copyWith(void Function(BatchGetDocumentsResponse) updates) => super.copyWith((message) => updates(message as BatchGetDocumentsResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BatchGetDocumentsResponse copyWith(void Function(BatchGetDocumentsResponse) updates) => super.copyWith((message) => updates(message as BatchGetDocumentsResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static BatchGetDocumentsResponse create() => BatchGetDocumentsResponse._(); @@ -631,18 +871,38 @@ class BatchGetDocumentsResponse extends $pb.GeneratedMessage { } class BeginTransactionRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('BeginTransactionRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'database') - ..aOM<$7.TransactionOptions>(2, 'options', subBuilder: $7.TransactionOptions.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BeginTransactionRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'database') + ..aOM<$9.TransactionOptions>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'options', subBuilder: $9.TransactionOptions.create) ..hasRequiredFields = false ; BeginTransactionRequest._() : super(); - factory BeginTransactionRequest() => create(); + factory BeginTransactionRequest({ + $core.String database, + $9.TransactionOptions options, + }) { + final _result = create(); + if (database != null) { + _result.database = database; + } + if (options != null) { + _result.options = options; + } + return _result; + } factory BeginTransactionRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory BeginTransactionRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') BeginTransactionRequest clone() => BeginTransactionRequest()..mergeFromMessage(this); - BeginTransactionRequest copyWith(void Function(BeginTransactionRequest) updates) => super.copyWith((message) => updates(message as BeginTransactionRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BeginTransactionRequest copyWith(void Function(BeginTransactionRequest) updates) => super.copyWith((message) => updates(message as BeginTransactionRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static BeginTransactionRequest create() => BeginTransactionRequest._(); @@ -662,29 +922,45 @@ class BeginTransactionRequest extends $pb.GeneratedMessage { void clearDatabase() => clearField(1); @$pb.TagNumber(2) - $7.TransactionOptions get options => $_getN(1); + $9.TransactionOptions get options => $_getN(1); @$pb.TagNumber(2) - set options($7.TransactionOptions v) { setField(2, v); } + set options($9.TransactionOptions v) { setField(2, v); } @$pb.TagNumber(2) $core.bool hasOptions() => $_has(1); @$pb.TagNumber(2) void clearOptions() => clearField(2); @$pb.TagNumber(2) - $7.TransactionOptions ensureOptions() => $_ensure(1); + $9.TransactionOptions ensureOptions() => $_ensure(1); } class BeginTransactionResponse extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('BeginTransactionResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..a<$core.List<$core.int>>(1, 'transaction', $pb.PbFieldType.OY) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BeginTransactionResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..a<$core.List<$core.int>>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) ..hasRequiredFields = false ; BeginTransactionResponse._() : super(); - factory BeginTransactionResponse() => create(); + factory BeginTransactionResponse({ + $core.List<$core.int> transaction, + }) { + final _result = create(); + if (transaction != null) { + _result.transaction = transaction; + } + return _result; + } factory BeginTransactionResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory BeginTransactionResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') BeginTransactionResponse clone() => BeginTransactionResponse()..mergeFromMessage(this); - BeginTransactionResponse copyWith(void Function(BeginTransactionResponse) updates) => super.copyWith((message) => updates(message as BeginTransactionResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BeginTransactionResponse copyWith(void Function(BeginTransactionResponse) updates) => super.copyWith((message) => updates(message as BeginTransactionResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static BeginTransactionResponse create() => BeginTransactionResponse._(); @@ -705,19 +981,43 @@ class BeginTransactionResponse extends $pb.GeneratedMessage { } class CommitRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('CommitRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'database') - ..pc<$8.Write>(2, 'writes', $pb.PbFieldType.PM, subBuilder: $8.Write.create) - ..a<$core.List<$core.int>>(3, 'transaction', $pb.PbFieldType.OY) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CommitRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'database') + ..pc<$10.Write>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'writes', $pb.PbFieldType.PM, subBuilder: $10.Write.create) + ..a<$core.List<$core.int>>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) ..hasRequiredFields = false ; CommitRequest._() : super(); - factory CommitRequest() => create(); + factory CommitRequest({ + $core.String database, + $core.Iterable<$10.Write> writes, + $core.List<$core.int> transaction, + }) { + final _result = create(); + if (database != null) { + _result.database = database; + } + if (writes != null) { + _result.writes.addAll(writes); + } + if (transaction != null) { + _result.transaction = transaction; + } + return _result; + } factory CommitRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory CommitRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') CommitRequest clone() => CommitRequest()..mergeFromMessage(this); - CommitRequest copyWith(void Function(CommitRequest) updates) => super.copyWith((message) => updates(message as CommitRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + CommitRequest copyWith(void Function(CommitRequest) updates) => super.copyWith((message) => updates(message as CommitRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static CommitRequest create() => CommitRequest._(); @@ -737,7 +1037,7 @@ class CommitRequest extends $pb.GeneratedMessage { void clearDatabase() => clearField(1); @$pb.TagNumber(2) - $core.List<$8.Write> get writes => $_getList(1); + $core.List<$10.Write> get writes => $_getList(1); @$pb.TagNumber(3) $core.List<$core.int> get transaction => $_getN(2); @@ -750,18 +1050,38 @@ class CommitRequest extends $pb.GeneratedMessage { } class CommitResponse extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('CommitResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pc<$8.WriteResult>(1, 'writeResults', $pb.PbFieldType.PM, subBuilder: $8.WriteResult.create) - ..aOM<$4.Timestamp>(2, 'commitTime', subBuilder: $4.Timestamp.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CommitResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pc<$10.WriteResult>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'writeResults', $pb.PbFieldType.PM, subBuilder: $10.WriteResult.create) + ..aOM<$4.Timestamp>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'commitTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; CommitResponse._() : super(); - factory CommitResponse() => create(); + factory CommitResponse({ + $core.Iterable<$10.WriteResult> writeResults, + $4.Timestamp commitTime, + }) { + final _result = create(); + if (writeResults != null) { + _result.writeResults.addAll(writeResults); + } + if (commitTime != null) { + _result.commitTime = commitTime; + } + return _result; + } factory CommitResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory CommitResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') CommitResponse clone() => CommitResponse()..mergeFromMessage(this); - CommitResponse copyWith(void Function(CommitResponse) updates) => super.copyWith((message) => updates(message as CommitResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + CommitResponse copyWith(void Function(CommitResponse) updates) => super.copyWith((message) => updates(message as CommitResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static CommitResponse create() => CommitResponse._(); @@ -772,7 +1092,7 @@ class CommitResponse extends $pb.GeneratedMessage { static CommitResponse _defaultInstance; @$pb.TagNumber(1) - $core.List<$8.WriteResult> get writeResults => $_getList(0); + $core.List<$10.WriteResult> get writeResults => $_getList(0); @$pb.TagNumber(2) $4.Timestamp get commitTime => $_getN(1); @@ -787,18 +1107,38 @@ class CommitResponse extends $pb.GeneratedMessage { } class RollbackRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('RollbackRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'database') - ..a<$core.List<$core.int>>(2, 'transaction', $pb.PbFieldType.OY) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RollbackRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'database') + ..a<$core.List<$core.int>>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) ..hasRequiredFields = false ; RollbackRequest._() : super(); - factory RollbackRequest() => create(); + factory RollbackRequest({ + $core.String database, + $core.List<$core.int> transaction, + }) { + final _result = create(); + if (database != null) { + _result.database = database; + } + if (transaction != null) { + _result.transaction = transaction; + } + return _result; + } factory RollbackRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory RollbackRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') RollbackRequest clone() => RollbackRequest()..mergeFromMessage(this); - RollbackRequest copyWith(void Function(RollbackRequest) updates) => super.copyWith((message) => updates(message as RollbackRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + RollbackRequest copyWith(void Function(RollbackRequest) updates) => super.copyWith((message) => updates(message as RollbackRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static RollbackRequest create() => RollbackRequest._(); @@ -850,23 +1190,55 @@ class RunQueryRequest extends $pb.GeneratedMessage { 7 : RunQueryRequest_ConsistencySelector.readTime, 0 : RunQueryRequest_ConsistencySelector.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('RunQueryRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RunQueryRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2]) ..oo(1, [5, 6, 7]) - ..aOS(1, 'parent') - ..aOM<$11.StructuredQuery>(2, 'structuredQuery', subBuilder: $11.StructuredQuery.create) - ..a<$core.List<$core.int>>(5, 'transaction', $pb.PbFieldType.OY) - ..aOM<$7.TransactionOptions>(6, 'newTransaction', subBuilder: $7.TransactionOptions.create) - ..aOM<$4.Timestamp>(7, 'readTime', subBuilder: $4.Timestamp.create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'parent') + ..aOM<$8.StructuredQuery>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'structuredQuery', subBuilder: $8.StructuredQuery.create) + ..a<$core.List<$core.int>>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) + ..aOM<$9.TransactionOptions>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'newTransaction', subBuilder: $9.TransactionOptions.create) + ..aOM<$4.Timestamp>(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; RunQueryRequest._() : super(); - factory RunQueryRequest() => create(); + factory RunQueryRequest({ + $core.String parent, + $8.StructuredQuery structuredQuery, + $core.List<$core.int> transaction, + $9.TransactionOptions newTransaction, + $4.Timestamp readTime, + }) { + final _result = create(); + if (parent != null) { + _result.parent = parent; + } + if (structuredQuery != null) { + _result.structuredQuery = structuredQuery; + } + if (transaction != null) { + _result.transaction = transaction; + } + if (newTransaction != null) { + _result.newTransaction = newTransaction; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory RunQueryRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory RunQueryRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') RunQueryRequest clone() => RunQueryRequest()..mergeFromMessage(this); - RunQueryRequest copyWith(void Function(RunQueryRequest) updates) => super.copyWith((message) => updates(message as RunQueryRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + RunQueryRequest copyWith(void Function(RunQueryRequest) updates) => super.copyWith((message) => updates(message as RunQueryRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static RunQueryRequest create() => RunQueryRequest._(); @@ -892,15 +1264,15 @@ class RunQueryRequest extends $pb.GeneratedMessage { void clearParent() => clearField(1); @$pb.TagNumber(2) - $11.StructuredQuery get structuredQuery => $_getN(1); + $8.StructuredQuery get structuredQuery => $_getN(1); @$pb.TagNumber(2) - set structuredQuery($11.StructuredQuery v) { setField(2, v); } + set structuredQuery($8.StructuredQuery v) { setField(2, v); } @$pb.TagNumber(2) $core.bool hasStructuredQuery() => $_has(1); @$pb.TagNumber(2) void clearStructuredQuery() => clearField(2); @$pb.TagNumber(2) - $11.StructuredQuery ensureStructuredQuery() => $_ensure(1); + $8.StructuredQuery ensureStructuredQuery() => $_ensure(1); @$pb.TagNumber(5) $core.List<$core.int> get transaction => $_getN(2); @@ -912,15 +1284,15 @@ class RunQueryRequest extends $pb.GeneratedMessage { void clearTransaction() => clearField(5); @$pb.TagNumber(6) - $7.TransactionOptions get newTransaction => $_getN(3); + $9.TransactionOptions get newTransaction => $_getN(3); @$pb.TagNumber(6) - set newTransaction($7.TransactionOptions v) { setField(6, v); } + set newTransaction($9.TransactionOptions v) { setField(6, v); } @$pb.TagNumber(6) $core.bool hasNewTransaction() => $_has(3); @$pb.TagNumber(6) void clearNewTransaction() => clearField(6); @$pb.TagNumber(6) - $7.TransactionOptions ensureNewTransaction() => $_ensure(3); + $9.TransactionOptions ensureNewTransaction() => $_ensure(3); @$pb.TagNumber(7) $4.Timestamp get readTime => $_getN(4); @@ -935,20 +1307,48 @@ class RunQueryRequest extends $pb.GeneratedMessage { } class RunQueryResponse extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('RunQueryResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOM<$1.Document>(1, 'document', subBuilder: $1.Document.create) - ..a<$core.List<$core.int>>(2, 'transaction', $pb.PbFieldType.OY) - ..aOM<$4.Timestamp>(3, 'readTime', subBuilder: $4.Timestamp.create) - ..a<$core.int>(4, 'skippedResults', $pb.PbFieldType.O3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'RunQueryResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOM<$1.Document>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document', subBuilder: $1.Document.create) + ..a<$core.List<$core.int>>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transaction', $pb.PbFieldType.OY) + ..aOM<$4.Timestamp>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) + ..a<$core.int>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'skippedResults', $pb.PbFieldType.O3) ..hasRequiredFields = false ; RunQueryResponse._() : super(); - factory RunQueryResponse() => create(); + factory RunQueryResponse({ + $1.Document document, + $core.List<$core.int> transaction, + $4.Timestamp readTime, + $core.int skippedResults, + }) { + final _result = create(); + if (document != null) { + _result.document = document; + } + if (transaction != null) { + _result.transaction = transaction; + } + if (readTime != null) { + _result.readTime = readTime; + } + if (skippedResults != null) { + _result.skippedResults = skippedResults; + } + return _result; + } factory RunQueryResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory RunQueryResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') RunQueryResponse clone() => RunQueryResponse()..mergeFromMessage(this); - RunQueryResponse copyWith(void Function(RunQueryResponse) updates) => super.copyWith((message) => updates(message as RunQueryResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + RunQueryResponse copyWith(void Function(RunQueryResponse) updates) => super.copyWith((message) => updates(message as RunQueryResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static RunQueryResponse create() => RunQueryResponse._(); @@ -1000,21 +1400,53 @@ class RunQueryResponse extends $pb.GeneratedMessage { } class WriteRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('WriteRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'database') - ..aOS(2, 'streamId') - ..pc<$8.Write>(3, 'writes', $pb.PbFieldType.PM, subBuilder: $8.Write.create) - ..a<$core.List<$core.int>>(4, 'streamToken', $pb.PbFieldType.OY) - ..m<$core.String, $core.String>(5, 'labels', entryClassName: 'WriteRequest.LabelsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OS, packageName: const $pb.PackageName('google.firestore.v1')) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'WriteRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'database') + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'streamId') + ..pc<$10.Write>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'writes', $pb.PbFieldType.PM, subBuilder: $10.Write.create) + ..a<$core.List<$core.int>>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'streamToken', $pb.PbFieldType.OY) + ..m<$core.String, $core.String>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'labels', entryClassName: 'WriteRequest.LabelsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OS, packageName: const $pb.PackageName('google.firestore.v1')) ..hasRequiredFields = false ; WriteRequest._() : super(); - factory WriteRequest() => create(); + factory WriteRequest({ + $core.String database, + $core.String streamId, + $core.Iterable<$10.Write> writes, + $core.List<$core.int> streamToken, + $core.Map<$core.String, $core.String> labels, + }) { + final _result = create(); + if (database != null) { + _result.database = database; + } + if (streamId != null) { + _result.streamId = streamId; + } + if (writes != null) { + _result.writes.addAll(writes); + } + if (streamToken != null) { + _result.streamToken = streamToken; + } + if (labels != null) { + _result.labels.addAll(labels); + } + return _result; + } factory WriteRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory WriteRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') WriteRequest clone() => WriteRequest()..mergeFromMessage(this); - WriteRequest copyWith(void Function(WriteRequest) updates) => super.copyWith((message) => updates(message as WriteRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + WriteRequest copyWith(void Function(WriteRequest) updates) => super.copyWith((message) => updates(message as WriteRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static WriteRequest create() => WriteRequest._(); @@ -1043,7 +1475,7 @@ class WriteRequest extends $pb.GeneratedMessage { void clearStreamId() => clearField(2); @$pb.TagNumber(3) - $core.List<$8.Write> get writes => $_getList(2); + $core.List<$10.Write> get writes => $_getList(2); @$pb.TagNumber(4) $core.List<$core.int> get streamToken => $_getN(3); @@ -1059,20 +1491,48 @@ class WriteRequest extends $pb.GeneratedMessage { } class WriteResponse extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('WriteResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'streamId') - ..a<$core.List<$core.int>>(2, 'streamToken', $pb.PbFieldType.OY) - ..pc<$8.WriteResult>(3, 'writeResults', $pb.PbFieldType.PM, subBuilder: $8.WriteResult.create) - ..aOM<$4.Timestamp>(4, 'commitTime', subBuilder: $4.Timestamp.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'WriteResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'streamId') + ..a<$core.List<$core.int>>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'streamToken', $pb.PbFieldType.OY) + ..pc<$10.WriteResult>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'writeResults', $pb.PbFieldType.PM, subBuilder: $10.WriteResult.create) + ..aOM<$4.Timestamp>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'commitTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; WriteResponse._() : super(); - factory WriteResponse() => create(); + factory WriteResponse({ + $core.String streamId, + $core.List<$core.int> streamToken, + $core.Iterable<$10.WriteResult> writeResults, + $4.Timestamp commitTime, + }) { + final _result = create(); + if (streamId != null) { + _result.streamId = streamId; + } + if (streamToken != null) { + _result.streamToken = streamToken; + } + if (writeResults != null) { + _result.writeResults.addAll(writeResults); + } + if (commitTime != null) { + _result.commitTime = commitTime; + } + return _result; + } factory WriteResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory WriteResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') WriteResponse clone() => WriteResponse()..mergeFromMessage(this); - WriteResponse copyWith(void Function(WriteResponse) updates) => super.copyWith((message) => updates(message as WriteResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + WriteResponse copyWith(void Function(WriteResponse) updates) => super.copyWith((message) => updates(message as WriteResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static WriteResponse create() => WriteResponse._(); @@ -1101,7 +1561,7 @@ class WriteResponse extends $pb.GeneratedMessage { void clearStreamToken() => clearField(2); @$pb.TagNumber(3) - $core.List<$8.WriteResult> get writeResults => $_getList(2); + $core.List<$10.WriteResult> get writeResults => $_getList(2); @$pb.TagNumber(4) $4.Timestamp get commitTime => $_getN(3); @@ -1127,21 +1587,49 @@ class ListenRequest extends $pb.GeneratedMessage { 3 : ListenRequest_TargetChange.removeTarget, 0 : ListenRequest_TargetChange.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ListenRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ListenRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2, 3]) - ..aOS(1, 'database') - ..aOM(2, 'addTarget', subBuilder: Target.create) - ..a<$core.int>(3, 'removeTarget', $pb.PbFieldType.O3) - ..m<$core.String, $core.String>(4, 'labels', entryClassName: 'ListenRequest.LabelsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OS, packageName: const $pb.PackageName('google.firestore.v1')) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'database') + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'addTarget', subBuilder: Target.create) + ..a<$core.int>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'removeTarget', $pb.PbFieldType.O3) + ..m<$core.String, $core.String>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'labels', entryClassName: 'ListenRequest.LabelsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OS, packageName: const $pb.PackageName('google.firestore.v1')) ..hasRequiredFields = false ; ListenRequest._() : super(); - factory ListenRequest() => create(); + factory ListenRequest({ + $core.String database, + Target addTarget, + $core.int removeTarget, + $core.Map<$core.String, $core.String> labels, + }) { + final _result = create(); + if (database != null) { + _result.database = database; + } + if (addTarget != null) { + _result.addTarget = addTarget; + } + if (removeTarget != null) { + _result.removeTarget = removeTarget; + } + if (labels != null) { + _result.labels.addAll(labels); + } + return _result; + } factory ListenRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ListenRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ListenRequest clone() => ListenRequest()..mergeFromMessage(this); - ListenRequest copyWith(void Function(ListenRequest) updates) => super.copyWith((message) => updates(message as ListenRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ListenRequest copyWith(void Function(ListenRequest) updates) => super.copyWith((message) => updates(message as ListenRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ListenRequest create() => ListenRequest._(); @@ -1205,22 +1693,54 @@ class ListenResponse extends $pb.GeneratedMessage { 6 : ListenResponse_ResponseType.documentRemove, 0 : ListenResponse_ResponseType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ListenResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ListenResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2, 3, 4, 5, 6]) - ..aOM(2, 'targetChange', subBuilder: TargetChange.create) - ..aOM<$8.DocumentChange>(3, 'documentChange', subBuilder: $8.DocumentChange.create) - ..aOM<$8.DocumentDelete>(4, 'documentDelete', subBuilder: $8.DocumentDelete.create) - ..aOM<$8.ExistenceFilter>(5, 'filter', subBuilder: $8.ExistenceFilter.create) - ..aOM<$8.DocumentRemove>(6, 'documentRemove', subBuilder: $8.DocumentRemove.create) + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetChange', subBuilder: TargetChange.create) + ..aOM<$10.DocumentChange>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documentChange', subBuilder: $10.DocumentChange.create) + ..aOM<$10.DocumentDelete>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documentDelete', subBuilder: $10.DocumentDelete.create) + ..aOM<$10.ExistenceFilter>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'filter', subBuilder: $10.ExistenceFilter.create) + ..aOM<$10.DocumentRemove>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documentRemove', subBuilder: $10.DocumentRemove.create) ..hasRequiredFields = false ; ListenResponse._() : super(); - factory ListenResponse() => create(); + factory ListenResponse({ + TargetChange targetChange, + $10.DocumentChange documentChange, + $10.DocumentDelete documentDelete, + $10.ExistenceFilter filter, + $10.DocumentRemove documentRemove, + }) { + final _result = create(); + if (targetChange != null) { + _result.targetChange = targetChange; + } + if (documentChange != null) { + _result.documentChange = documentChange; + } + if (documentDelete != null) { + _result.documentDelete = documentDelete; + } + if (filter != null) { + _result.filter = filter; + } + if (documentRemove != null) { + _result.documentRemove = documentRemove; + } + return _result; + } factory ListenResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ListenResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ListenResponse clone() => ListenResponse()..mergeFromMessage(this); - ListenResponse copyWith(void Function(ListenResponse) updates) => super.copyWith((message) => updates(message as ListenResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ListenResponse copyWith(void Function(ListenResponse) updates) => super.copyWith((message) => updates(message as ListenResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ListenResponse create() => ListenResponse._(); @@ -1245,62 +1765,78 @@ class ListenResponse extends $pb.GeneratedMessage { TargetChange ensureTargetChange() => $_ensure(0); @$pb.TagNumber(3) - $8.DocumentChange get documentChange => $_getN(1); + $10.DocumentChange get documentChange => $_getN(1); @$pb.TagNumber(3) - set documentChange($8.DocumentChange v) { setField(3, v); } + set documentChange($10.DocumentChange v) { setField(3, v); } @$pb.TagNumber(3) $core.bool hasDocumentChange() => $_has(1); @$pb.TagNumber(3) void clearDocumentChange() => clearField(3); @$pb.TagNumber(3) - $8.DocumentChange ensureDocumentChange() => $_ensure(1); + $10.DocumentChange ensureDocumentChange() => $_ensure(1); @$pb.TagNumber(4) - $8.DocumentDelete get documentDelete => $_getN(2); + $10.DocumentDelete get documentDelete => $_getN(2); @$pb.TagNumber(4) - set documentDelete($8.DocumentDelete v) { setField(4, v); } + set documentDelete($10.DocumentDelete v) { setField(4, v); } @$pb.TagNumber(4) $core.bool hasDocumentDelete() => $_has(2); @$pb.TagNumber(4) void clearDocumentDelete() => clearField(4); @$pb.TagNumber(4) - $8.DocumentDelete ensureDocumentDelete() => $_ensure(2); + $10.DocumentDelete ensureDocumentDelete() => $_ensure(2); @$pb.TagNumber(5) - $8.ExistenceFilter get filter => $_getN(3); + $10.ExistenceFilter get filter => $_getN(3); @$pb.TagNumber(5) - set filter($8.ExistenceFilter v) { setField(5, v); } + set filter($10.ExistenceFilter v) { setField(5, v); } @$pb.TagNumber(5) $core.bool hasFilter() => $_has(3); @$pb.TagNumber(5) void clearFilter() => clearField(5); @$pb.TagNumber(5) - $8.ExistenceFilter ensureFilter() => $_ensure(3); + $10.ExistenceFilter ensureFilter() => $_ensure(3); @$pb.TagNumber(6) - $8.DocumentRemove get documentRemove => $_getN(4); + $10.DocumentRemove get documentRemove => $_getN(4); @$pb.TagNumber(6) - set documentRemove($8.DocumentRemove v) { setField(6, v); } + set documentRemove($10.DocumentRemove v) { setField(6, v); } @$pb.TagNumber(6) $core.bool hasDocumentRemove() => $_has(4); @$pb.TagNumber(6) void clearDocumentRemove() => clearField(6); @$pb.TagNumber(6) - $8.DocumentRemove ensureDocumentRemove() => $_ensure(4); + $10.DocumentRemove ensureDocumentRemove() => $_ensure(4); } class Target_DocumentsTarget extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Target.DocumentsTarget', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pPS(2, 'documents') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Target.DocumentsTarget', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pPS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documents') ..hasRequiredFields = false ; Target_DocumentsTarget._() : super(); - factory Target_DocumentsTarget() => create(); + factory Target_DocumentsTarget({ + $core.Iterable<$core.String> documents, + }) { + final _result = create(); + if (documents != null) { + _result.documents.addAll(documents); + } + return _result; + } factory Target_DocumentsTarget.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Target_DocumentsTarget.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Target_DocumentsTarget clone() => Target_DocumentsTarget()..mergeFromMessage(this); - Target_DocumentsTarget copyWith(void Function(Target_DocumentsTarget) updates) => super.copyWith((message) => updates(message as Target_DocumentsTarget)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Target_DocumentsTarget copyWith(void Function(Target_DocumentsTarget) updates) => super.copyWith((message) => updates(message as Target_DocumentsTarget)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Target_DocumentsTarget create() => Target_DocumentsTarget._(); @@ -1324,19 +1860,39 @@ class Target_QueryTarget extends $pb.GeneratedMessage { 2 : Target_QueryTarget_QueryType.structuredQuery, 0 : Target_QueryTarget_QueryType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Target.QueryTarget', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Target.QueryTarget', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2]) - ..aOS(1, 'parent') - ..aOM<$11.StructuredQuery>(2, 'structuredQuery', subBuilder: $11.StructuredQuery.create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'parent') + ..aOM<$8.StructuredQuery>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'structuredQuery', subBuilder: $8.StructuredQuery.create) ..hasRequiredFields = false ; Target_QueryTarget._() : super(); - factory Target_QueryTarget() => create(); + factory Target_QueryTarget({ + $core.String parent, + $8.StructuredQuery structuredQuery, + }) { + final _result = create(); + if (parent != null) { + _result.parent = parent; + } + if (structuredQuery != null) { + _result.structuredQuery = structuredQuery; + } + return _result; + } factory Target_QueryTarget.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Target_QueryTarget.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Target_QueryTarget clone() => Target_QueryTarget()..mergeFromMessage(this); - Target_QueryTarget copyWith(void Function(Target_QueryTarget) updates) => super.copyWith((message) => updates(message as Target_QueryTarget)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Target_QueryTarget copyWith(void Function(Target_QueryTarget) updates) => super.copyWith((message) => updates(message as Target_QueryTarget)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Target_QueryTarget create() => Target_QueryTarget._(); @@ -1359,15 +1915,15 @@ class Target_QueryTarget extends $pb.GeneratedMessage { void clearParent() => clearField(1); @$pb.TagNumber(2) - $11.StructuredQuery get structuredQuery => $_getN(1); + $8.StructuredQuery get structuredQuery => $_getN(1); @$pb.TagNumber(2) - set structuredQuery($11.StructuredQuery v) { setField(2, v); } + set structuredQuery($8.StructuredQuery v) { setField(2, v); } @$pb.TagNumber(2) $core.bool hasStructuredQuery() => $_has(1); @$pb.TagNumber(2) void clearStructuredQuery() => clearField(2); @$pb.TagNumber(2) - $11.StructuredQuery ensureStructuredQuery() => $_ensure(1); + $8.StructuredQuery ensureStructuredQuery() => $_ensure(1); } enum Target_TargetType { @@ -1393,24 +1949,60 @@ class Target extends $pb.GeneratedMessage { 11 : Target_ResumeType.readTime, 0 : Target_ResumeType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Target', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Target', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2, 3]) ..oo(1, [4, 11]) - ..aOM(2, 'query', subBuilder: Target_QueryTarget.create) - ..aOM(3, 'documents', subBuilder: Target_DocumentsTarget.create) - ..a<$core.List<$core.int>>(4, 'resumeToken', $pb.PbFieldType.OY) - ..a<$core.int>(5, 'targetId', $pb.PbFieldType.O3) - ..aOB(6, 'once') - ..aOM<$4.Timestamp>(11, 'readTime', subBuilder: $4.Timestamp.create) + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'query', subBuilder: Target_QueryTarget.create) + ..aOM(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'documents', subBuilder: Target_DocumentsTarget.create) + ..a<$core.List<$core.int>>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'resumeToken', $pb.PbFieldType.OY) + ..a<$core.int>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetId', $pb.PbFieldType.O3) + ..aOB(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'once') + ..aOM<$4.Timestamp>(11, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; Target._() : super(); - factory Target() => create(); + factory Target({ + Target_QueryTarget query, + Target_DocumentsTarget documents, + $core.List<$core.int> resumeToken, + $core.int targetId, + $core.bool once, + $4.Timestamp readTime, + }) { + final _result = create(); + if (query != null) { + _result.query = query; + } + if (documents != null) { + _result.documents = documents; + } + if (resumeToken != null) { + _result.resumeToken = resumeToken; + } + if (targetId != null) { + _result.targetId = targetId; + } + if (once != null) { + _result.once = once; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory Target.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Target.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Target clone() => Target()..mergeFromMessage(this); - Target copyWith(void Function(Target) updates) => super.copyWith((message) => updates(message as Target)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Target copyWith(void Function(Target) updates) => super.copyWith((message) => updates(message as Target)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Target create() => Target._(); @@ -1488,21 +2080,53 @@ class Target extends $pb.GeneratedMessage { } class TargetChange extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('TargetChange', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..e(1, 'targetChangeType', $pb.PbFieldType.OE, defaultOrMaker: TargetChange_TargetChangeType.NO_CHANGE, valueOf: TargetChange_TargetChangeType.valueOf, enumValues: TargetChange_TargetChangeType.values) - ..p<$core.int>(2, 'targetIds', $pb.PbFieldType.P3) - ..aOM<$12.Status>(3, 'cause', subBuilder: $12.Status.create) - ..a<$core.List<$core.int>>(4, 'resumeToken', $pb.PbFieldType.OY) - ..aOM<$4.Timestamp>(6, 'readTime', subBuilder: $4.Timestamp.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'TargetChange', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..e(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetChangeType', $pb.PbFieldType.OE, defaultOrMaker: TargetChange_TargetChangeType.NO_CHANGE, valueOf: TargetChange_TargetChangeType.valueOf, enumValues: TargetChange_TargetChangeType.values) + ..p<$core.int>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetIds', $pb.PbFieldType.P3) + ..aOM<$12.Status>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'cause', subBuilder: $12.Status.create) + ..a<$core.List<$core.int>>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'resumeToken', $pb.PbFieldType.OY) + ..aOM<$4.Timestamp>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; TargetChange._() : super(); - factory TargetChange() => create(); + factory TargetChange({ + TargetChange_TargetChangeType targetChangeType, + $core.Iterable<$core.int> targetIds, + $12.Status cause, + $core.List<$core.int> resumeToken, + $4.Timestamp readTime, + }) { + final _result = create(); + if (targetChangeType != null) { + _result.targetChangeType = targetChangeType; + } + if (targetIds != null) { + _result.targetIds.addAll(targetIds); + } + if (cause != null) { + _result.cause = cause; + } + if (resumeToken != null) { + _result.resumeToken = resumeToken; + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory TargetChange.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory TargetChange.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') TargetChange clone() => TargetChange()..mergeFromMessage(this); - TargetChange copyWith(void Function(TargetChange) updates) => super.copyWith((message) => updates(message as TargetChange)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + TargetChange copyWith(void Function(TargetChange) updates) => super.copyWith((message) => updates(message as TargetChange)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static TargetChange create() => TargetChange._(); @@ -1557,19 +2181,43 @@ class TargetChange extends $pb.GeneratedMessage { } class ListCollectionIdsRequest extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ListCollectionIdsRequest', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'parent') - ..a<$core.int>(2, 'pageSize', $pb.PbFieldType.O3) - ..aOS(3, 'pageToken') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ListCollectionIdsRequest', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'parent') + ..a<$core.int>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'pageSize', $pb.PbFieldType.O3) + ..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'pageToken') ..hasRequiredFields = false ; ListCollectionIdsRequest._() : super(); - factory ListCollectionIdsRequest() => create(); + factory ListCollectionIdsRequest({ + $core.String parent, + $core.int pageSize, + $core.String pageToken, + }) { + final _result = create(); + if (parent != null) { + _result.parent = parent; + } + if (pageSize != null) { + _result.pageSize = pageSize; + } + if (pageToken != null) { + _result.pageToken = pageToken; + } + return _result; + } factory ListCollectionIdsRequest.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ListCollectionIdsRequest.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ListCollectionIdsRequest clone() => ListCollectionIdsRequest()..mergeFromMessage(this); - ListCollectionIdsRequest copyWith(void Function(ListCollectionIdsRequest) updates) => super.copyWith((message) => updates(message as ListCollectionIdsRequest)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ListCollectionIdsRequest copyWith(void Function(ListCollectionIdsRequest) updates) => super.copyWith((message) => updates(message as ListCollectionIdsRequest)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ListCollectionIdsRequest create() => ListCollectionIdsRequest._(); @@ -1608,18 +2256,38 @@ class ListCollectionIdsRequest extends $pb.GeneratedMessage { } class ListCollectionIdsResponse extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ListCollectionIdsResponse', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pPS(1, 'collectionIds') - ..aOS(2, 'nextPageToken') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ListCollectionIdsResponse', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pPS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'collectionIds') + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'nextPageToken') ..hasRequiredFields = false ; ListCollectionIdsResponse._() : super(); - factory ListCollectionIdsResponse() => create(); + factory ListCollectionIdsResponse({ + $core.Iterable<$core.String> collectionIds, + $core.String nextPageToken, + }) { + final _result = create(); + if (collectionIds != null) { + _result.collectionIds.addAll(collectionIds); + } + if (nextPageToken != null) { + _result.nextPageToken = nextPageToken; + } + return _result; + } factory ListCollectionIdsResponse.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ListCollectionIdsResponse.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ListCollectionIdsResponse clone() => ListCollectionIdsResponse()..mergeFromMessage(this); - ListCollectionIdsResponse copyWith(void Function(ListCollectionIdsResponse) updates) => super.copyWith((message) => updates(message as ListCollectionIdsResponse)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ListCollectionIdsResponse copyWith(void Function(ListCollectionIdsResponse) updates) => super.copyWith((message) => updates(message as ListCollectionIdsResponse)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ListCollectionIdsResponse create() => ListCollectionIdsResponse._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbenum.dart index 4110f204..56f9c31c 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbenum.dart @@ -2,19 +2,19 @@ // Generated code. Do not modify. // source: google/firestore/v1/firestore.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields -// ignore_for_file: UNDEFINED_SHOWN_NAME,UNUSED_SHOWN_NAME +// ignore_for_file: UNDEFINED_SHOWN_NAME import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; class TargetChange_TargetChangeType extends $pb.ProtobufEnum { - static const TargetChange_TargetChangeType NO_CHANGE = TargetChange_TargetChangeType._(0, 'NO_CHANGE'); - static const TargetChange_TargetChangeType ADD = TargetChange_TargetChangeType._(1, 'ADD'); - static const TargetChange_TargetChangeType REMOVE = TargetChange_TargetChangeType._(2, 'REMOVE'); - static const TargetChange_TargetChangeType CURRENT = TargetChange_TargetChangeType._(3, 'CURRENT'); - static const TargetChange_TargetChangeType RESET = TargetChange_TargetChangeType._(4, 'RESET'); + static const TargetChange_TargetChangeType NO_CHANGE = TargetChange_TargetChangeType._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'NO_CHANGE'); + static const TargetChange_TargetChangeType ADD = TargetChange_TargetChangeType._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'ADD'); + static const TargetChange_TargetChangeType REMOVE = TargetChange_TargetChangeType._(2, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'REMOVE'); + static const TargetChange_TargetChangeType CURRENT = TargetChange_TargetChangeType._(3, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'CURRENT'); + static const TargetChange_TargetChangeType RESET = TargetChange_TargetChangeType._(4, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'RESET'); static const $core.List values = [ NO_CHANGE, diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbgrpc.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbgrpc.dart index 3fff14ed..cb22ed14 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbgrpc.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbgrpc.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/firestore/v1/firestore.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:async' as $async; @@ -85,113 +85,87 @@ class FirestoreClient extends $grpc.Client { ($core.List<$core.int> value) => $0.ListCollectionIdsResponse.fromBuffer(value)); - FirestoreClient($grpc.ClientChannel channel, {$grpc.CallOptions options}) - : super(channel, options: options); + FirestoreClient($grpc.ClientChannel channel, + {$grpc.CallOptions options, + $core.Iterable<$grpc.ClientInterceptor> interceptors}) + : super(channel, options: options, interceptors: interceptors); $grpc.ResponseFuture<$1.Document> getDocument($0.GetDocumentRequest request, {$grpc.CallOptions options}) { - final call = $createCall( - _$getDocument, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$getDocument, request, options: options); } $grpc.ResponseFuture<$0.ListDocumentsResponse> listDocuments( $0.ListDocumentsRequest request, {$grpc.CallOptions options}) { - final call = $createCall( - _$listDocuments, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$listDocuments, request, options: options); } $grpc.ResponseFuture<$1.Document> createDocument( $0.CreateDocumentRequest request, {$grpc.CallOptions options}) { - final call = $createCall( - _$createDocument, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$createDocument, request, options: options); } $grpc.ResponseFuture<$1.Document> updateDocument( $0.UpdateDocumentRequest request, {$grpc.CallOptions options}) { - final call = $createCall( - _$updateDocument, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$updateDocument, request, options: options); } $grpc.ResponseFuture<$2.Empty> deleteDocument( $0.DeleteDocumentRequest request, {$grpc.CallOptions options}) { - final call = $createCall( - _$deleteDocument, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$deleteDocument, request, options: options); } $grpc.ResponseStream<$0.BatchGetDocumentsResponse> batchGetDocuments( $0.BatchGetDocumentsRequest request, {$grpc.CallOptions options}) { - final call = $createCall( + return $createStreamingCall( _$batchGetDocuments, $async.Stream.fromIterable([request]), options: options); - return $grpc.ResponseStream(call); } $grpc.ResponseFuture<$0.BeginTransactionResponse> beginTransaction( $0.BeginTransactionRequest request, {$grpc.CallOptions options}) { - final call = $createCall( - _$beginTransaction, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$beginTransaction, request, options: options); } $grpc.ResponseFuture<$0.CommitResponse> commit($0.CommitRequest request, {$grpc.CallOptions options}) { - final call = $createCall(_$commit, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$commit, request, options: options); } $grpc.ResponseFuture<$2.Empty> rollback($0.RollbackRequest request, {$grpc.CallOptions options}) { - final call = $createCall(_$rollback, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$rollback, request, options: options); } $grpc.ResponseStream<$0.RunQueryResponse> runQuery($0.RunQueryRequest request, {$grpc.CallOptions options}) { - final call = $createCall(_$runQuery, $async.Stream.fromIterable([request]), + return $createStreamingCall( + _$runQuery, $async.Stream.fromIterable([request]), options: options); - return $grpc.ResponseStream(call); } $grpc.ResponseStream<$0.WriteResponse> write( $async.Stream<$0.WriteRequest> request, {$grpc.CallOptions options}) { - final call = $createCall(_$write, request, options: options); - return $grpc.ResponseStream(call); + return $createStreamingCall(_$write, request, options: options); } $grpc.ResponseStream<$0.ListenResponse> listen( $async.Stream<$0.ListenRequest> request, {$grpc.CallOptions options}) { - final call = $createCall(_$listen, request, options: options); - return $grpc.ResponseStream(call); + return $createStreamingCall(_$listen, request, options: options); } $grpc.ResponseFuture<$0.ListCollectionIdsResponse> listCollectionIds( $0.ListCollectionIdsRequest request, {$grpc.CallOptions options}) { - final call = $createCall( - _$listCollectionIds, $async.Stream.fromIterable([request]), - options: options); - return $grpc.ResponseFuture(call); + return $createUnaryCall(_$listCollectionIds, request, options: options); } } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbjson.dart index 42773e18..0d424bd4 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/firestore.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/firestore/v1/firestore.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const GetDocumentRequest$json = const { '1': 'GetDocumentRequest', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pb.dart index cba50e78..511d676b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pb.dart @@ -2,14 +2,14 @@ // Generated code. Do not modify. // source: google/firestore/v1/query.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; -import '../../protobuf/wrappers.pb.dart' as $9; +import '../../protobuf/wrappers.pb.dart' as $7; import 'document.pb.dart' as $1; import 'query.pbenum.dart'; @@ -17,18 +17,38 @@ import 'query.pbenum.dart'; export 'query.pbenum.dart'; class StructuredQuery_CollectionSelector extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.CollectionSelector', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(2, 'collectionId') - ..aOB(3, 'allDescendants') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.CollectionSelector', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'collectionId') + ..aOB(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'allDescendants') ..hasRequiredFields = false ; StructuredQuery_CollectionSelector._() : super(); - factory StructuredQuery_CollectionSelector() => create(); + factory StructuredQuery_CollectionSelector({ + $core.String collectionId, + $core.bool allDescendants, + }) { + final _result = create(); + if (collectionId != null) { + _result.collectionId = collectionId; + } + if (allDescendants != null) { + _result.allDescendants = allDescendants; + } + return _result; + } factory StructuredQuery_CollectionSelector.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_CollectionSelector.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_CollectionSelector clone() => StructuredQuery_CollectionSelector()..mergeFromMessage(this); - StructuredQuery_CollectionSelector copyWith(void Function(StructuredQuery_CollectionSelector) updates) => super.copyWith((message) => updates(message as StructuredQuery_CollectionSelector)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_CollectionSelector copyWith(void Function(StructuredQuery_CollectionSelector) updates) => super.copyWith((message) => updates(message as StructuredQuery_CollectionSelector)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_CollectionSelector create() => StructuredQuery_CollectionSelector._(); @@ -71,20 +91,44 @@ class StructuredQuery_Filter extends $pb.GeneratedMessage { 3 : StructuredQuery_Filter_FilterType.unaryFilter, 0 : StructuredQuery_Filter_FilterType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.Filter', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.Filter', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [1, 2, 3]) - ..aOM(1, 'compositeFilter', subBuilder: StructuredQuery_CompositeFilter.create) - ..aOM(2, 'fieldFilter', subBuilder: StructuredQuery_FieldFilter.create) - ..aOM(3, 'unaryFilter', subBuilder: StructuredQuery_UnaryFilter.create) + ..aOM(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'compositeFilter', subBuilder: StructuredQuery_CompositeFilter.create) + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fieldFilter', subBuilder: StructuredQuery_FieldFilter.create) + ..aOM(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'unaryFilter', subBuilder: StructuredQuery_UnaryFilter.create) ..hasRequiredFields = false ; StructuredQuery_Filter._() : super(); - factory StructuredQuery_Filter() => create(); + factory StructuredQuery_Filter({ + StructuredQuery_CompositeFilter compositeFilter, + StructuredQuery_FieldFilter fieldFilter, + StructuredQuery_UnaryFilter unaryFilter, + }) { + final _result = create(); + if (compositeFilter != null) { + _result.compositeFilter = compositeFilter; + } + if (fieldFilter != null) { + _result.fieldFilter = fieldFilter; + } + if (unaryFilter != null) { + _result.unaryFilter = unaryFilter; + } + return _result; + } factory StructuredQuery_Filter.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_Filter.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_Filter clone() => StructuredQuery_Filter()..mergeFromMessage(this); - StructuredQuery_Filter copyWith(void Function(StructuredQuery_Filter) updates) => super.copyWith((message) => updates(message as StructuredQuery_Filter)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_Filter copyWith(void Function(StructuredQuery_Filter) updates) => super.copyWith((message) => updates(message as StructuredQuery_Filter)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_Filter create() => StructuredQuery_Filter._(); @@ -132,18 +176,38 @@ class StructuredQuery_Filter extends $pb.GeneratedMessage { } class StructuredQuery_CompositeFilter extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.CompositeFilter', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..e(1, 'op', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_CompositeFilter_Operator.OPERATOR_UNSPECIFIED, valueOf: StructuredQuery_CompositeFilter_Operator.valueOf, enumValues: StructuredQuery_CompositeFilter_Operator.values) - ..pc(2, 'filters', $pb.PbFieldType.PM, subBuilder: StructuredQuery_Filter.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.CompositeFilter', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..e(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'op', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_CompositeFilter_Operator.OPERATOR_UNSPECIFIED, valueOf: StructuredQuery_CompositeFilter_Operator.valueOf, enumValues: StructuredQuery_CompositeFilter_Operator.values) + ..pc(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'filters', $pb.PbFieldType.PM, subBuilder: StructuredQuery_Filter.create) ..hasRequiredFields = false ; StructuredQuery_CompositeFilter._() : super(); - factory StructuredQuery_CompositeFilter() => create(); + factory StructuredQuery_CompositeFilter({ + StructuredQuery_CompositeFilter_Operator op, + $core.Iterable filters, + }) { + final _result = create(); + if (op != null) { + _result.op = op; + } + if (filters != null) { + _result.filters.addAll(filters); + } + return _result; + } factory StructuredQuery_CompositeFilter.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_CompositeFilter.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_CompositeFilter clone() => StructuredQuery_CompositeFilter()..mergeFromMessage(this); - StructuredQuery_CompositeFilter copyWith(void Function(StructuredQuery_CompositeFilter) updates) => super.copyWith((message) => updates(message as StructuredQuery_CompositeFilter)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_CompositeFilter copyWith(void Function(StructuredQuery_CompositeFilter) updates) => super.copyWith((message) => updates(message as StructuredQuery_CompositeFilter)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_CompositeFilter create() => StructuredQuery_CompositeFilter._(); @@ -167,19 +231,43 @@ class StructuredQuery_CompositeFilter extends $pb.GeneratedMessage { } class StructuredQuery_FieldFilter extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.FieldFilter', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOM(1, 'field', subBuilder: StructuredQuery_FieldReference.create) - ..e(2, 'op', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_FieldFilter_Operator.OPERATOR_UNSPECIFIED, valueOf: StructuredQuery_FieldFilter_Operator.valueOf, enumValues: StructuredQuery_FieldFilter_Operator.values) - ..aOM<$1.Value>(3, 'value', subBuilder: $1.Value.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.FieldFilter', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOM(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'field', subBuilder: StructuredQuery_FieldReference.create) + ..e(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'op', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_FieldFilter_Operator.OPERATOR_UNSPECIFIED, valueOf: StructuredQuery_FieldFilter_Operator.valueOf, enumValues: StructuredQuery_FieldFilter_Operator.values) + ..aOM<$1.Value>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', subBuilder: $1.Value.create) ..hasRequiredFields = false ; StructuredQuery_FieldFilter._() : super(); - factory StructuredQuery_FieldFilter() => create(); + factory StructuredQuery_FieldFilter({ + StructuredQuery_FieldReference field_1, + StructuredQuery_FieldFilter_Operator op, + $1.Value value, + }) { + final _result = create(); + if (field_1 != null) { + _result.field_1 = field_1; + } + if (op != null) { + _result.op = op; + } + if (value != null) { + _result.value = value; + } + return _result; + } factory StructuredQuery_FieldFilter.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_FieldFilter.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_FieldFilter clone() => StructuredQuery_FieldFilter()..mergeFromMessage(this); - StructuredQuery_FieldFilter copyWith(void Function(StructuredQuery_FieldFilter) updates) => super.copyWith((message) => updates(message as StructuredQuery_FieldFilter)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_FieldFilter copyWith(void Function(StructuredQuery_FieldFilter) updates) => super.copyWith((message) => updates(message as StructuredQuery_FieldFilter)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_FieldFilter create() => StructuredQuery_FieldFilter._(); @@ -231,19 +319,39 @@ class StructuredQuery_UnaryFilter extends $pb.GeneratedMessage { 2 : StructuredQuery_UnaryFilter_OperandType.field_2, 0 : StructuredQuery_UnaryFilter_OperandType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.UnaryFilter', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.UnaryFilter', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2]) - ..e(1, 'op', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_UnaryFilter_Operator.OPERATOR_UNSPECIFIED, valueOf: StructuredQuery_UnaryFilter_Operator.valueOf, enumValues: StructuredQuery_UnaryFilter_Operator.values) - ..aOM(2, 'field', subBuilder: StructuredQuery_FieldReference.create) + ..e(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'op', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_UnaryFilter_Operator.OPERATOR_UNSPECIFIED, valueOf: StructuredQuery_UnaryFilter_Operator.valueOf, enumValues: StructuredQuery_UnaryFilter_Operator.values) + ..aOM(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'field', subBuilder: StructuredQuery_FieldReference.create) ..hasRequiredFields = false ; StructuredQuery_UnaryFilter._() : super(); - factory StructuredQuery_UnaryFilter() => create(); + factory StructuredQuery_UnaryFilter({ + StructuredQuery_UnaryFilter_Operator op, + StructuredQuery_FieldReference field_2, + }) { + final _result = create(); + if (op != null) { + _result.op = op; + } + if (field_2 != null) { + _result.field_2 = field_2; + } + return _result; + } factory StructuredQuery_UnaryFilter.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_UnaryFilter.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_UnaryFilter clone() => StructuredQuery_UnaryFilter()..mergeFromMessage(this); - StructuredQuery_UnaryFilter copyWith(void Function(StructuredQuery_UnaryFilter) updates) => super.copyWith((message) => updates(message as StructuredQuery_UnaryFilter)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_UnaryFilter copyWith(void Function(StructuredQuery_UnaryFilter) updates) => super.copyWith((message) => updates(message as StructuredQuery_UnaryFilter)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_UnaryFilter create() => StructuredQuery_UnaryFilter._(); @@ -278,18 +386,38 @@ class StructuredQuery_UnaryFilter extends $pb.GeneratedMessage { } class StructuredQuery_Order extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.Order', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOM(1, 'field', subBuilder: StructuredQuery_FieldReference.create) - ..e(2, 'direction', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_Direction.DIRECTION_UNSPECIFIED, valueOf: StructuredQuery_Direction.valueOf, enumValues: StructuredQuery_Direction.values) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.Order', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOM(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'field', subBuilder: StructuredQuery_FieldReference.create) + ..e(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'direction', $pb.PbFieldType.OE, defaultOrMaker: StructuredQuery_Direction.DIRECTION_UNSPECIFIED, valueOf: StructuredQuery_Direction.valueOf, enumValues: StructuredQuery_Direction.values) ..hasRequiredFields = false ; StructuredQuery_Order._() : super(); - factory StructuredQuery_Order() => create(); + factory StructuredQuery_Order({ + StructuredQuery_FieldReference field_1, + StructuredQuery_Direction direction, + }) { + final _result = create(); + if (field_1 != null) { + _result.field_1 = field_1; + } + if (direction != null) { + _result.direction = direction; + } + return _result; + } factory StructuredQuery_Order.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_Order.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_Order clone() => StructuredQuery_Order()..mergeFromMessage(this); - StructuredQuery_Order copyWith(void Function(StructuredQuery_Order) updates) => super.copyWith((message) => updates(message as StructuredQuery_Order)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_Order copyWith(void Function(StructuredQuery_Order) updates) => super.copyWith((message) => updates(message as StructuredQuery_Order)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_Order create() => StructuredQuery_Order._(); @@ -321,17 +449,33 @@ class StructuredQuery_Order extends $pb.GeneratedMessage { } class StructuredQuery_FieldReference extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.FieldReference', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(2, 'fieldPath') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.FieldReference', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fieldPath') ..hasRequiredFields = false ; StructuredQuery_FieldReference._() : super(); - factory StructuredQuery_FieldReference() => create(); + factory StructuredQuery_FieldReference({ + $core.String fieldPath, + }) { + final _result = create(); + if (fieldPath != null) { + _result.fieldPath = fieldPath; + } + return _result; + } factory StructuredQuery_FieldReference.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_FieldReference.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_FieldReference clone() => StructuredQuery_FieldReference()..mergeFromMessage(this); - StructuredQuery_FieldReference copyWith(void Function(StructuredQuery_FieldReference) updates) => super.copyWith((message) => updates(message as StructuredQuery_FieldReference)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_FieldReference copyWith(void Function(StructuredQuery_FieldReference) updates) => super.copyWith((message) => updates(message as StructuredQuery_FieldReference)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_FieldReference create() => StructuredQuery_FieldReference._(); @@ -352,17 +496,33 @@ class StructuredQuery_FieldReference extends $pb.GeneratedMessage { } class StructuredQuery_Projection extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery.Projection', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pc(2, 'fields', $pb.PbFieldType.PM, subBuilder: StructuredQuery_FieldReference.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery.Projection', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pc(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fields', $pb.PbFieldType.PM, subBuilder: StructuredQuery_FieldReference.create) ..hasRequiredFields = false ; StructuredQuery_Projection._() : super(); - factory StructuredQuery_Projection() => create(); + factory StructuredQuery_Projection({ + $core.Iterable fields, + }) { + final _result = create(); + if (fields != null) { + _result.fields.addAll(fields); + } + return _result; + } factory StructuredQuery_Projection.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery_Projection.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery_Projection clone() => StructuredQuery_Projection()..mergeFromMessage(this); - StructuredQuery_Projection copyWith(void Function(StructuredQuery_Projection) updates) => super.copyWith((message) => updates(message as StructuredQuery_Projection)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery_Projection copyWith(void Function(StructuredQuery_Projection) updates) => super.copyWith((message) => updates(message as StructuredQuery_Projection)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery_Projection create() => StructuredQuery_Projection._(); @@ -377,24 +537,68 @@ class StructuredQuery_Projection extends $pb.GeneratedMessage { } class StructuredQuery extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StructuredQuery', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOM(1, 'select', subBuilder: StructuredQuery_Projection.create) - ..pc(2, 'from', $pb.PbFieldType.PM, subBuilder: StructuredQuery_CollectionSelector.create) - ..aOM(3, 'where', subBuilder: StructuredQuery_Filter.create) - ..pc(4, 'orderBy', $pb.PbFieldType.PM, subBuilder: StructuredQuery_Order.create) - ..aOM<$9.Int32Value>(5, 'limit', subBuilder: $9.Int32Value.create) - ..a<$core.int>(6, 'offset', $pb.PbFieldType.O3) - ..aOM(7, 'startAt', subBuilder: Cursor.create) - ..aOM(8, 'endAt', subBuilder: Cursor.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StructuredQuery', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOM(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'select', subBuilder: StructuredQuery_Projection.create) + ..pc(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'from', $pb.PbFieldType.PM, subBuilder: StructuredQuery_CollectionSelector.create) + ..aOM(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'where', subBuilder: StructuredQuery_Filter.create) + ..pc(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'orderBy', $pb.PbFieldType.PM, subBuilder: StructuredQuery_Order.create) + ..aOM<$7.Int32Value>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'limit', subBuilder: $7.Int32Value.create) + ..a<$core.int>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'offset', $pb.PbFieldType.O3) + ..aOM(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'startAt', subBuilder: Cursor.create) + ..aOM(8, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'endAt', subBuilder: Cursor.create) ..hasRequiredFields = false ; StructuredQuery._() : super(); - factory StructuredQuery() => create(); + factory StructuredQuery({ + StructuredQuery_Projection select, + $core.Iterable from, + StructuredQuery_Filter where, + $core.Iterable orderBy, + $7.Int32Value limit, + $core.int offset, + Cursor startAt, + Cursor endAt, + }) { + final _result = create(); + if (select != null) { + _result.select = select; + } + if (from != null) { + _result.from.addAll(from); + } + if (where != null) { + _result.where = where; + } + if (orderBy != null) { + _result.orderBy.addAll(orderBy); + } + if (limit != null) { + _result.limit = limit; + } + if (offset != null) { + _result.offset = offset; + } + if (startAt != null) { + _result.startAt = startAt; + } + if (endAt != null) { + _result.endAt = endAt; + } + return _result; + } factory StructuredQuery.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StructuredQuery.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StructuredQuery clone() => StructuredQuery()..mergeFromMessage(this); - StructuredQuery copyWith(void Function(StructuredQuery) updates) => super.copyWith((message) => updates(message as StructuredQuery)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StructuredQuery copyWith(void Function(StructuredQuery) updates) => super.copyWith((message) => updates(message as StructuredQuery)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StructuredQuery create() => StructuredQuery._(); @@ -433,15 +637,15 @@ class StructuredQuery extends $pb.GeneratedMessage { $core.List get orderBy => $_getList(3); @$pb.TagNumber(5) - $9.Int32Value get limit => $_getN(4); + $7.Int32Value get limit => $_getN(4); @$pb.TagNumber(5) - set limit($9.Int32Value v) { setField(5, v); } + set limit($7.Int32Value v) { setField(5, v); } @$pb.TagNumber(5) $core.bool hasLimit() => $_has(4); @$pb.TagNumber(5) void clearLimit() => clearField(5); @$pb.TagNumber(5) - $9.Int32Value ensureLimit() => $_ensure(4); + $7.Int32Value ensureLimit() => $_ensure(4); @$pb.TagNumber(6) $core.int get offset => $_getIZ(5); @@ -476,18 +680,38 @@ class StructuredQuery extends $pb.GeneratedMessage { } class Cursor extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Cursor', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..pc<$1.Value>(1, 'values', $pb.PbFieldType.PM, subBuilder: $1.Value.create) - ..aOB(2, 'before') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Cursor', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..pc<$1.Value>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'values', $pb.PbFieldType.PM, subBuilder: $1.Value.create) + ..aOB(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'before') ..hasRequiredFields = false ; Cursor._() : super(); - factory Cursor() => create(); + factory Cursor({ + $core.Iterable<$1.Value> values, + $core.bool before, + }) { + final _result = create(); + if (values != null) { + _result.values.addAll(values); + } + if (before != null) { + _result.before = before; + } + return _result; + } factory Cursor.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Cursor.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Cursor clone() => Cursor()..mergeFromMessage(this); - Cursor copyWith(void Function(Cursor) updates) => super.copyWith((message) => updates(message as Cursor)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Cursor copyWith(void Function(Cursor) updates) => super.copyWith((message) => updates(message as Cursor)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Cursor create() => Cursor._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbenum.dart index 0c1dde3b..e3ad5856 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbenum.dart @@ -2,17 +2,17 @@ // Generated code. Do not modify. // source: google/firestore/v1/query.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields -// ignore_for_file: UNDEFINED_SHOWN_NAME,UNUSED_SHOWN_NAME +// ignore_for_file: UNDEFINED_SHOWN_NAME import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; class StructuredQuery_Direction extends $pb.ProtobufEnum { - static const StructuredQuery_Direction DIRECTION_UNSPECIFIED = StructuredQuery_Direction._(0, 'DIRECTION_UNSPECIFIED'); - static const StructuredQuery_Direction ASCENDING = StructuredQuery_Direction._(1, 'ASCENDING'); - static const StructuredQuery_Direction DESCENDING = StructuredQuery_Direction._(2, 'DESCENDING'); + static const StructuredQuery_Direction DIRECTION_UNSPECIFIED = StructuredQuery_Direction._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'DIRECTION_UNSPECIFIED'); + static const StructuredQuery_Direction ASCENDING = StructuredQuery_Direction._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'ASCENDING'); + static const StructuredQuery_Direction DESCENDING = StructuredQuery_Direction._(2, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'DESCENDING'); static const $core.List values = [ DIRECTION_UNSPECIFIED, @@ -27,8 +27,8 @@ class StructuredQuery_Direction extends $pb.ProtobufEnum { } class StructuredQuery_CompositeFilter_Operator extends $pb.ProtobufEnum { - static const StructuredQuery_CompositeFilter_Operator OPERATOR_UNSPECIFIED = StructuredQuery_CompositeFilter_Operator._(0, 'OPERATOR_UNSPECIFIED'); - static const StructuredQuery_CompositeFilter_Operator AND = StructuredQuery_CompositeFilter_Operator._(1, 'AND'); + static const StructuredQuery_CompositeFilter_Operator OPERATOR_UNSPECIFIED = StructuredQuery_CompositeFilter_Operator._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'OPERATOR_UNSPECIFIED'); + static const StructuredQuery_CompositeFilter_Operator AND = StructuredQuery_CompositeFilter_Operator._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'AND'); static const $core.List values = [ OPERATOR_UNSPECIFIED, @@ -42,15 +42,17 @@ class StructuredQuery_CompositeFilter_Operator extends $pb.ProtobufEnum { } class StructuredQuery_FieldFilter_Operator extends $pb.ProtobufEnum { - static const StructuredQuery_FieldFilter_Operator OPERATOR_UNSPECIFIED = StructuredQuery_FieldFilter_Operator._(0, 'OPERATOR_UNSPECIFIED'); - static const StructuredQuery_FieldFilter_Operator LESS_THAN = StructuredQuery_FieldFilter_Operator._(1, 'LESS_THAN'); - static const StructuredQuery_FieldFilter_Operator LESS_THAN_OR_EQUAL = StructuredQuery_FieldFilter_Operator._(2, 'LESS_THAN_OR_EQUAL'); - static const StructuredQuery_FieldFilter_Operator GREATER_THAN = StructuredQuery_FieldFilter_Operator._(3, 'GREATER_THAN'); - static const StructuredQuery_FieldFilter_Operator GREATER_THAN_OR_EQUAL = StructuredQuery_FieldFilter_Operator._(4, 'GREATER_THAN_OR_EQUAL'); - static const StructuredQuery_FieldFilter_Operator EQUAL = StructuredQuery_FieldFilter_Operator._(5, 'EQUAL'); - static const StructuredQuery_FieldFilter_Operator ARRAY_CONTAINS = StructuredQuery_FieldFilter_Operator._(7, 'ARRAY_CONTAINS'); - static const StructuredQuery_FieldFilter_Operator IN = StructuredQuery_FieldFilter_Operator._(8, 'IN'); - static const StructuredQuery_FieldFilter_Operator ARRAY_CONTAINS_ANY = StructuredQuery_FieldFilter_Operator._(9, 'ARRAY_CONTAINS_ANY'); + static const StructuredQuery_FieldFilter_Operator OPERATOR_UNSPECIFIED = StructuredQuery_FieldFilter_Operator._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'OPERATOR_UNSPECIFIED'); + static const StructuredQuery_FieldFilter_Operator LESS_THAN = StructuredQuery_FieldFilter_Operator._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'LESS_THAN'); + static const StructuredQuery_FieldFilter_Operator LESS_THAN_OR_EQUAL = StructuredQuery_FieldFilter_Operator._(2, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'LESS_THAN_OR_EQUAL'); + static const StructuredQuery_FieldFilter_Operator GREATER_THAN = StructuredQuery_FieldFilter_Operator._(3, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'GREATER_THAN'); + static const StructuredQuery_FieldFilter_Operator GREATER_THAN_OR_EQUAL = StructuredQuery_FieldFilter_Operator._(4, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'GREATER_THAN_OR_EQUAL'); + static const StructuredQuery_FieldFilter_Operator EQUAL = StructuredQuery_FieldFilter_Operator._(5, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'EQUAL'); + static const StructuredQuery_FieldFilter_Operator NOT_EQUAL = StructuredQuery_FieldFilter_Operator._(6, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'NOT_EQUAL'); + static const StructuredQuery_FieldFilter_Operator ARRAY_CONTAINS = StructuredQuery_FieldFilter_Operator._(7, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'ARRAY_CONTAINS'); + static const StructuredQuery_FieldFilter_Operator IN = StructuredQuery_FieldFilter_Operator._(8, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'IN'); + static const StructuredQuery_FieldFilter_Operator ARRAY_CONTAINS_ANY = StructuredQuery_FieldFilter_Operator._(9, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'ARRAY_CONTAINS_ANY'); + static const StructuredQuery_FieldFilter_Operator NOT_IN = StructuredQuery_FieldFilter_Operator._(10, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'NOT_IN'); static const $core.List values = [ OPERATOR_UNSPECIFIED, @@ -59,9 +61,11 @@ class StructuredQuery_FieldFilter_Operator extends $pb.ProtobufEnum { GREATER_THAN, GREATER_THAN_OR_EQUAL, EQUAL, + NOT_EQUAL, ARRAY_CONTAINS, IN, ARRAY_CONTAINS_ANY, + NOT_IN, ]; static final $core.Map<$core.int, StructuredQuery_FieldFilter_Operator> _byValue = $pb.ProtobufEnum.initByValue(values); @@ -71,14 +75,18 @@ class StructuredQuery_FieldFilter_Operator extends $pb.ProtobufEnum { } class StructuredQuery_UnaryFilter_Operator extends $pb.ProtobufEnum { - static const StructuredQuery_UnaryFilter_Operator OPERATOR_UNSPECIFIED = StructuredQuery_UnaryFilter_Operator._(0, 'OPERATOR_UNSPECIFIED'); - static const StructuredQuery_UnaryFilter_Operator IS_NAN = StructuredQuery_UnaryFilter_Operator._(2, 'IS_NAN'); - static const StructuredQuery_UnaryFilter_Operator IS_NULL = StructuredQuery_UnaryFilter_Operator._(3, 'IS_NULL'); + static const StructuredQuery_UnaryFilter_Operator OPERATOR_UNSPECIFIED = StructuredQuery_UnaryFilter_Operator._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'OPERATOR_UNSPECIFIED'); + static const StructuredQuery_UnaryFilter_Operator IS_NAN = StructuredQuery_UnaryFilter_Operator._(2, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'IS_NAN'); + static const StructuredQuery_UnaryFilter_Operator IS_NULL = StructuredQuery_UnaryFilter_Operator._(3, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'IS_NULL'); + static const StructuredQuery_UnaryFilter_Operator IS_NOT_NAN = StructuredQuery_UnaryFilter_Operator._(4, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'IS_NOT_NAN'); + static const StructuredQuery_UnaryFilter_Operator IS_NOT_NULL = StructuredQuery_UnaryFilter_Operator._(5, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'IS_NOT_NULL'); static const $core.List values = [ OPERATOR_UNSPECIFIED, IS_NAN, IS_NULL, + IS_NOT_NAN, + IS_NOT_NULL, ]; static final $core.Map<$core.int, StructuredQuery_UnaryFilter_Operator> _byValue = $pb.ProtobufEnum.initByValue(values); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbjson.dart index cc73ddbf..7f752ffe 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/query.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/firestore/v1/query.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const StructuredQuery$json = const { '1': 'StructuredQuery', @@ -77,9 +77,11 @@ const StructuredQuery_FieldFilter_Operator$json = const { const {'1': 'GREATER_THAN', '2': 3}, const {'1': 'GREATER_THAN_OR_EQUAL', '2': 4}, const {'1': 'EQUAL', '2': 5}, + const {'1': 'NOT_EQUAL', '2': 6}, const {'1': 'ARRAY_CONTAINS', '2': 7}, const {'1': 'IN', '2': 8}, const {'1': 'ARRAY_CONTAINS_ANY', '2': 9}, + const {'1': 'NOT_IN', '2': 10}, ], }; @@ -101,6 +103,8 @@ const StructuredQuery_UnaryFilter_Operator$json = const { const {'1': 'OPERATOR_UNSPECIFIED', '2': 0}, const {'1': 'IS_NAN', '2': 2}, const {'1': 'IS_NULL', '2': 3}, + const {'1': 'IS_NOT_NAN', '2': 4}, + const {'1': 'IS_NOT_NULL', '2': 5}, ], }; diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pb.dart index 623dc00c..9df82076 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pb.dart @@ -2,15 +2,15 @@ // Generated code. Do not modify. // source: google/firestore/v1/write.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; import 'document.pb.dart' as $1; -import 'common.pb.dart' as $7; +import 'common.pb.dart' as $9; import '../../protobuf/timestamp.pb.dart' as $4; import 'write.pbenum.dart'; @@ -20,6 +20,7 @@ export 'write.pbenum.dart'; enum Write_Operation { update, delete, + verify, transform, notSet } @@ -28,25 +29,68 @@ class Write extends $pb.GeneratedMessage { static const $core.Map<$core.int, Write_Operation> _Write_OperationByTag = { 1 : Write_Operation.update, 2 : Write_Operation.delete, + 5 : Write_Operation.verify, 6 : Write_Operation.transform, 0 : Write_Operation.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Write', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..oo(0, [1, 2, 6]) - ..aOM<$1.Document>(1, 'update', subBuilder: $1.Document.create) - ..aOS(2, 'delete') - ..aOM<$7.DocumentMask>(3, 'updateMask', subBuilder: $7.DocumentMask.create) - ..aOM<$7.Precondition>(4, 'currentDocument', subBuilder: $7.Precondition.create) - ..aOM(6, 'transform', subBuilder: DocumentTransform.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Write', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..oo(0, [1, 2, 5, 6]) + ..aOM<$1.Document>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'update', subBuilder: $1.Document.create) + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'delete') + ..aOM<$9.DocumentMask>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'updateMask', subBuilder: $9.DocumentMask.create) + ..aOM<$9.Precondition>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'currentDocument', subBuilder: $9.Precondition.create) + ..aOS(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'verify') + ..aOM(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transform', subBuilder: DocumentTransform.create) + ..pc(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'updateTransforms', $pb.PbFieldType.PM, subBuilder: DocumentTransform_FieldTransform.create) ..hasRequiredFields = false ; Write._() : super(); - factory Write() => create(); + factory Write({ + $1.Document update, + $core.String delete, + $9.DocumentMask updateMask, + $9.Precondition currentDocument, + $core.String verify, + DocumentTransform transform, + $core.Iterable updateTransforms, + }) { + final _result = create(); + if (update != null) { + _result.update = update; + } + if (delete != null) { + _result.delete = delete; + } + if (updateMask != null) { + _result.updateMask = updateMask; + } + if (currentDocument != null) { + _result.currentDocument = currentDocument; + } + if (verify != null) { + _result.verify = verify; + } + if (transform != null) { + _result.transform = transform; + } + if (updateTransforms != null) { + _result.updateTransforms.addAll(updateTransforms); + } + return _result; + } factory Write.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Write.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Write clone() => Write()..mergeFromMessage(this); - Write copyWith(void Function(Write) updates) => super.copyWith((message) => updates(message as Write)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Write copyWith(void Function(Write) updates) => super.copyWith((message) => updates(message as Write)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Write create() => Write._(); @@ -80,37 +124,49 @@ class Write extends $pb.GeneratedMessage { void clearDelete() => clearField(2); @$pb.TagNumber(3) - $7.DocumentMask get updateMask => $_getN(2); + $9.DocumentMask get updateMask => $_getN(2); @$pb.TagNumber(3) - set updateMask($7.DocumentMask v) { setField(3, v); } + set updateMask($9.DocumentMask v) { setField(3, v); } @$pb.TagNumber(3) $core.bool hasUpdateMask() => $_has(2); @$pb.TagNumber(3) void clearUpdateMask() => clearField(3); @$pb.TagNumber(3) - $7.DocumentMask ensureUpdateMask() => $_ensure(2); + $9.DocumentMask ensureUpdateMask() => $_ensure(2); @$pb.TagNumber(4) - $7.Precondition get currentDocument => $_getN(3); + $9.Precondition get currentDocument => $_getN(3); @$pb.TagNumber(4) - set currentDocument($7.Precondition v) { setField(4, v); } + set currentDocument($9.Precondition v) { setField(4, v); } @$pb.TagNumber(4) $core.bool hasCurrentDocument() => $_has(3); @$pb.TagNumber(4) void clearCurrentDocument() => clearField(4); @$pb.TagNumber(4) - $7.Precondition ensureCurrentDocument() => $_ensure(3); + $9.Precondition ensureCurrentDocument() => $_ensure(3); + + @$pb.TagNumber(5) + $core.String get verify => $_getSZ(4); + @$pb.TagNumber(5) + set verify($core.String v) { $_setString(4, v); } + @$pb.TagNumber(5) + $core.bool hasVerify() => $_has(4); + @$pb.TagNumber(5) + void clearVerify() => clearField(5); @$pb.TagNumber(6) - DocumentTransform get transform => $_getN(4); + DocumentTransform get transform => $_getN(5); @$pb.TagNumber(6) set transform(DocumentTransform v) { setField(6, v); } @$pb.TagNumber(6) - $core.bool hasTransform() => $_has(4); + $core.bool hasTransform() => $_has(5); @$pb.TagNumber(6) void clearTransform() => clearField(6); @$pb.TagNumber(6) - DocumentTransform ensureTransform() => $_ensure(4); + DocumentTransform ensureTransform() => $_ensure(5); + + @$pb.TagNumber(7) + $core.List get updateTransforms => $_getList(6); } enum DocumentTransform_FieldTransform_TransformType { @@ -133,24 +189,64 @@ class DocumentTransform_FieldTransform extends $pb.GeneratedMessage { 7 : DocumentTransform_FieldTransform_TransformType.removeAllFromArray, 0 : DocumentTransform_FieldTransform_TransformType.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DocumentTransform.FieldTransform', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentTransform.FieldTransform', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) ..oo(0, [2, 3, 4, 5, 6, 7]) - ..aOS(1, 'fieldPath') - ..e(2, 'setToServerValue', $pb.PbFieldType.OE, defaultOrMaker: DocumentTransform_FieldTransform_ServerValue.SERVER_VALUE_UNSPECIFIED, valueOf: DocumentTransform_FieldTransform_ServerValue.valueOf, enumValues: DocumentTransform_FieldTransform_ServerValue.values) - ..aOM<$1.Value>(3, 'increment', subBuilder: $1.Value.create) - ..aOM<$1.Value>(4, 'maximum', subBuilder: $1.Value.create) - ..aOM<$1.Value>(5, 'minimum', subBuilder: $1.Value.create) - ..aOM<$1.ArrayValue>(6, 'appendMissingElements', subBuilder: $1.ArrayValue.create) - ..aOM<$1.ArrayValue>(7, 'removeAllFromArray', subBuilder: $1.ArrayValue.create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fieldPath') + ..e(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'setToServerValue', $pb.PbFieldType.OE, defaultOrMaker: DocumentTransform_FieldTransform_ServerValue.SERVER_VALUE_UNSPECIFIED, valueOf: DocumentTransform_FieldTransform_ServerValue.valueOf, enumValues: DocumentTransform_FieldTransform_ServerValue.values) + ..aOM<$1.Value>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'increment', subBuilder: $1.Value.create) + ..aOM<$1.Value>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'maximum', subBuilder: $1.Value.create) + ..aOM<$1.Value>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'minimum', subBuilder: $1.Value.create) + ..aOM<$1.ArrayValue>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'appendMissingElements', subBuilder: $1.ArrayValue.create) + ..aOM<$1.ArrayValue>(7, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'removeAllFromArray', subBuilder: $1.ArrayValue.create) ..hasRequiredFields = false ; DocumentTransform_FieldTransform._() : super(); - factory DocumentTransform_FieldTransform() => create(); + factory DocumentTransform_FieldTransform({ + $core.String fieldPath, + DocumentTransform_FieldTransform_ServerValue setToServerValue, + $1.Value increment, + $1.Value maximum, + $1.Value minimum, + $1.ArrayValue appendMissingElements, + $1.ArrayValue removeAllFromArray, + }) { + final _result = create(); + if (fieldPath != null) { + _result.fieldPath = fieldPath; + } + if (setToServerValue != null) { + _result.setToServerValue = setToServerValue; + } + if (increment != null) { + _result.increment = increment; + } + if (maximum != null) { + _result.maximum = maximum; + } + if (minimum != null) { + _result.minimum = minimum; + } + if (appendMissingElements != null) { + _result.appendMissingElements = appendMissingElements; + } + if (removeAllFromArray != null) { + _result.removeAllFromArray = removeAllFromArray; + } + return _result; + } factory DocumentTransform_FieldTransform.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DocumentTransform_FieldTransform.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DocumentTransform_FieldTransform clone() => DocumentTransform_FieldTransform()..mergeFromMessage(this); - DocumentTransform_FieldTransform copyWith(void Function(DocumentTransform_FieldTransform) updates) => super.copyWith((message) => updates(message as DocumentTransform_FieldTransform)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DocumentTransform_FieldTransform copyWith(void Function(DocumentTransform_FieldTransform) updates) => super.copyWith((message) => updates(message as DocumentTransform_FieldTransform)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DocumentTransform_FieldTransform create() => DocumentTransform_FieldTransform._(); @@ -238,18 +334,38 @@ class DocumentTransform_FieldTransform extends $pb.GeneratedMessage { } class DocumentTransform extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DocumentTransform', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'document') - ..pc(2, 'fieldTransforms', $pb.PbFieldType.PM, subBuilder: DocumentTransform_FieldTransform.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentTransform', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document') + ..pc(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fieldTransforms', $pb.PbFieldType.PM, subBuilder: DocumentTransform_FieldTransform.create) ..hasRequiredFields = false ; DocumentTransform._() : super(); - factory DocumentTransform() => create(); + factory DocumentTransform({ + $core.String document, + $core.Iterable fieldTransforms, + }) { + final _result = create(); + if (document != null) { + _result.document = document; + } + if (fieldTransforms != null) { + _result.fieldTransforms.addAll(fieldTransforms); + } + return _result; + } factory DocumentTransform.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DocumentTransform.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DocumentTransform clone() => DocumentTransform()..mergeFromMessage(this); - DocumentTransform copyWith(void Function(DocumentTransform) updates) => super.copyWith((message) => updates(message as DocumentTransform)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DocumentTransform copyWith(void Function(DocumentTransform) updates) => super.copyWith((message) => updates(message as DocumentTransform)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DocumentTransform create() => DocumentTransform._(); @@ -273,18 +389,38 @@ class DocumentTransform extends $pb.GeneratedMessage { } class WriteResult extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('WriteResult', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOM<$4.Timestamp>(1, 'updateTime', subBuilder: $4.Timestamp.create) - ..pc<$1.Value>(2, 'transformResults', $pb.PbFieldType.PM, subBuilder: $1.Value.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'WriteResult', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOM<$4.Timestamp>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'updateTime', subBuilder: $4.Timestamp.create) + ..pc<$1.Value>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'transformResults', $pb.PbFieldType.PM, subBuilder: $1.Value.create) ..hasRequiredFields = false ; WriteResult._() : super(); - factory WriteResult() => create(); + factory WriteResult({ + $4.Timestamp updateTime, + $core.Iterable<$1.Value> transformResults, + }) { + final _result = create(); + if (updateTime != null) { + _result.updateTime = updateTime; + } + if (transformResults != null) { + _result.transformResults.addAll(transformResults); + } + return _result; + } factory WriteResult.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory WriteResult.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') WriteResult clone() => WriteResult()..mergeFromMessage(this); - WriteResult copyWith(void Function(WriteResult) updates) => super.copyWith((message) => updates(message as WriteResult)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + WriteResult copyWith(void Function(WriteResult) updates) => super.copyWith((message) => updates(message as WriteResult)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static WriteResult create() => WriteResult._(); @@ -310,19 +446,43 @@ class WriteResult extends $pb.GeneratedMessage { } class DocumentChange extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DocumentChange', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOM<$1.Document>(1, 'document', subBuilder: $1.Document.create) - ..p<$core.int>(5, 'targetIds', $pb.PbFieldType.P3) - ..p<$core.int>(6, 'removedTargetIds', $pb.PbFieldType.P3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentChange', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOM<$1.Document>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document', subBuilder: $1.Document.create) + ..p<$core.int>(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetIds', $pb.PbFieldType.P3) + ..p<$core.int>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'removedTargetIds', $pb.PbFieldType.P3) ..hasRequiredFields = false ; DocumentChange._() : super(); - factory DocumentChange() => create(); + factory DocumentChange({ + $1.Document document, + $core.Iterable<$core.int> targetIds, + $core.Iterable<$core.int> removedTargetIds, + }) { + final _result = create(); + if (document != null) { + _result.document = document; + } + if (targetIds != null) { + _result.targetIds.addAll(targetIds); + } + if (removedTargetIds != null) { + _result.removedTargetIds.addAll(removedTargetIds); + } + return _result; + } factory DocumentChange.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DocumentChange.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DocumentChange clone() => DocumentChange()..mergeFromMessage(this); - DocumentChange copyWith(void Function(DocumentChange) updates) => super.copyWith((message) => updates(message as DocumentChange)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DocumentChange copyWith(void Function(DocumentChange) updates) => super.copyWith((message) => updates(message as DocumentChange)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DocumentChange create() => DocumentChange._(); @@ -351,19 +511,43 @@ class DocumentChange extends $pb.GeneratedMessage { } class DocumentDelete extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DocumentDelete', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'document') - ..aOM<$4.Timestamp>(4, 'readTime', subBuilder: $4.Timestamp.create) - ..p<$core.int>(6, 'removedTargetIds', $pb.PbFieldType.P3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentDelete', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document') + ..aOM<$4.Timestamp>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) + ..p<$core.int>(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'removedTargetIds', $pb.PbFieldType.P3) ..hasRequiredFields = false ; DocumentDelete._() : super(); - factory DocumentDelete() => create(); + factory DocumentDelete({ + $core.String document, + $4.Timestamp readTime, + $core.Iterable<$core.int> removedTargetIds, + }) { + final _result = create(); + if (document != null) { + _result.document = document; + } + if (readTime != null) { + _result.readTime = readTime; + } + if (removedTargetIds != null) { + _result.removedTargetIds.addAll(removedTargetIds); + } + return _result; + } factory DocumentDelete.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DocumentDelete.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DocumentDelete clone() => DocumentDelete()..mergeFromMessage(this); - DocumentDelete copyWith(void Function(DocumentDelete) updates) => super.copyWith((message) => updates(message as DocumentDelete)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DocumentDelete copyWith(void Function(DocumentDelete) updates) => super.copyWith((message) => updates(message as DocumentDelete)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DocumentDelete create() => DocumentDelete._(); @@ -398,19 +582,43 @@ class DocumentDelete extends $pb.GeneratedMessage { } class DocumentRemove extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DocumentRemove', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..aOS(1, 'document') - ..p<$core.int>(2, 'removedTargetIds', $pb.PbFieldType.P3) - ..aOM<$4.Timestamp>(4, 'readTime', subBuilder: $4.Timestamp.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentRemove', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'document') + ..p<$core.int>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'removedTargetIds', $pb.PbFieldType.P3) + ..aOM<$4.Timestamp>(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'readTime', subBuilder: $4.Timestamp.create) ..hasRequiredFields = false ; DocumentRemove._() : super(); - factory DocumentRemove() => create(); + factory DocumentRemove({ + $core.String document, + $core.Iterable<$core.int> removedTargetIds, + $4.Timestamp readTime, + }) { + final _result = create(); + if (document != null) { + _result.document = document; + } + if (removedTargetIds != null) { + _result.removedTargetIds.addAll(removedTargetIds); + } + if (readTime != null) { + _result.readTime = readTime; + } + return _result; + } factory DocumentRemove.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DocumentRemove.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DocumentRemove clone() => DocumentRemove()..mergeFromMessage(this); - DocumentRemove copyWith(void Function(DocumentRemove) updates) => super.copyWith((message) => updates(message as DocumentRemove)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DocumentRemove copyWith(void Function(DocumentRemove) updates) => super.copyWith((message) => updates(message as DocumentRemove)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DocumentRemove create() => DocumentRemove._(); @@ -445,18 +653,38 @@ class DocumentRemove extends $pb.GeneratedMessage { } class ExistenceFilter extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ExistenceFilter', package: const $pb.PackageName('google.firestore.v1'), createEmptyInstance: create) - ..a<$core.int>(1, 'targetId', $pb.PbFieldType.O3) - ..a<$core.int>(2, 'count', $pb.PbFieldType.O3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ExistenceFilter', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.firestore.v1'), createEmptyInstance: create) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'targetId', $pb.PbFieldType.O3) + ..a<$core.int>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'count', $pb.PbFieldType.O3) ..hasRequiredFields = false ; ExistenceFilter._() : super(); - factory ExistenceFilter() => create(); + factory ExistenceFilter({ + $core.int targetId, + $core.int count, + }) { + final _result = create(); + if (targetId != null) { + _result.targetId = targetId; + } + if (count != null) { + _result.count = count; + } + return _result; + } factory ExistenceFilter.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ExistenceFilter.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ExistenceFilter clone() => ExistenceFilter()..mergeFromMessage(this); - ExistenceFilter copyWith(void Function(ExistenceFilter) updates) => super.copyWith((message) => updates(message as ExistenceFilter)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ExistenceFilter copyWith(void Function(ExistenceFilter) updates) => super.copyWith((message) => updates(message as ExistenceFilter)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ExistenceFilter create() => ExistenceFilter._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbenum.dart index 0cb64d3e..a905a563 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbenum.dart @@ -2,16 +2,16 @@ // Generated code. Do not modify. // source: google/firestore/v1/write.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields -// ignore_for_file: UNDEFINED_SHOWN_NAME,UNUSED_SHOWN_NAME +// ignore_for_file: UNDEFINED_SHOWN_NAME import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; class DocumentTransform_FieldTransform_ServerValue extends $pb.ProtobufEnum { - static const DocumentTransform_FieldTransform_ServerValue SERVER_VALUE_UNSPECIFIED = DocumentTransform_FieldTransform_ServerValue._(0, 'SERVER_VALUE_UNSPECIFIED'); - static const DocumentTransform_FieldTransform_ServerValue REQUEST_TIME = DocumentTransform_FieldTransform_ServerValue._(1, 'REQUEST_TIME'); + static const DocumentTransform_FieldTransform_ServerValue SERVER_VALUE_UNSPECIFIED = DocumentTransform_FieldTransform_ServerValue._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'SERVER_VALUE_UNSPECIFIED'); + static const DocumentTransform_FieldTransform_ServerValue REQUEST_TIME = DocumentTransform_FieldTransform_ServerValue._(1, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'REQUEST_TIME'); static const $core.List values = [ SERVER_VALUE_UNSPECIFIED, diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbjson.dart index a57164f5..c0d70fde 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/firestore/v1/write.pbjson.dart @@ -2,16 +2,18 @@ // Generated code. Do not modify. // source: google/firestore/v1/write.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Write$json = const { '1': 'Write', '2': const [ const {'1': 'update', '3': 1, '4': 1, '5': 11, '6': '.google.firestore.v1.Document', '9': 0, '10': 'update'}, const {'1': 'delete', '3': 2, '4': 1, '5': 9, '9': 0, '10': 'delete'}, + const {'1': 'verify', '3': 5, '4': 1, '5': 9, '9': 0, '10': 'verify'}, const {'1': 'transform', '3': 6, '4': 1, '5': 11, '6': '.google.firestore.v1.DocumentTransform', '9': 0, '10': 'transform'}, const {'1': 'update_mask', '3': 3, '4': 1, '5': 11, '6': '.google.firestore.v1.DocumentMask', '10': 'updateMask'}, + const {'1': 'update_transforms', '3': 7, '4': 3, '5': 11, '6': '.google.firestore.v1.DocumentTransform.FieldTransform', '10': 'updateTransforms'}, const {'1': 'current_document', '3': 4, '4': 1, '5': 11, '6': '.google.firestore.v1.Precondition', '10': 'currentDocument'}, ], '8': const [ diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/index.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/index.dart index 8b137891..e69de29b 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/index.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/index.dart @@ -1 +0,0 @@ - diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pb.dart index ce43319b..862db95d 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/any.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -12,18 +12,38 @@ import 'package:protobuf/protobuf.dart' as $pb; import 'package:protobuf/src/protobuf/mixins/well_known.dart' as $mixin; class Any extends $pb.GeneratedMessage with $mixin.AnyMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Any', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.AnyMixin.toProto3JsonHelper, fromProto3Json: $mixin.AnyMixin.fromProto3JsonHelper) - ..aOS(1, 'typeUrl') - ..a<$core.List<$core.int>>(2, 'value', $pb.PbFieldType.OY) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Any', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.AnyMixin.toProto3JsonHelper, fromProto3Json: $mixin.AnyMixin.fromProto3JsonHelper) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'typeUrl') + ..a<$core.List<$core.int>>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OY) ..hasRequiredFields = false ; Any._() : super(); - factory Any() => create(); + factory Any({ + $core.String typeUrl, + $core.List<$core.int> value, + }) { + final _result = create(); + if (typeUrl != null) { + _result.typeUrl = typeUrl; + } + if (value != null) { + _result.value = value; + } + return _result; + } factory Any.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Any.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Any clone() => Any()..mergeFromMessage(this); - Any copyWith(void Function(Any) updates) => super.copyWith((message) => updates(message as Any)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Any copyWith(void Function(Any) updates) => super.copyWith((message) => updates(message as Any)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Any create() => Any._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbenum.dart index 93ec58ff..54856113 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/protobuf/any.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbjson.dart index 3bc7dc64..18361924 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/any.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/any.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Any$json = const { '1': 'Any', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pb.dart index 3b5ab868..73adf03d 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pb.dart @@ -2,15 +2,15 @@ // Generated code. Do not modify. // source: google/protobuf/empty.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; class Empty extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Empty', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Empty', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create) ..hasRequiredFields = false ; @@ -18,8 +18,16 @@ class Empty extends $pb.GeneratedMessage { factory Empty() => create(); factory Empty.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Empty.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Empty clone() => Empty()..mergeFromMessage(this); - Empty copyWith(void Function(Empty) updates) => super.copyWith((message) => updates(message as Empty)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Empty copyWith(void Function(Empty) updates) => super.copyWith((message) => updates(message as Empty)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Empty create() => Empty._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbenum.dart index 1e757315..97dd20c9 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/protobuf/empty.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbjson.dart index 0b6c4bd7..8bc54726 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/empty.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/empty.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Empty$json = const { '1': 'Empty', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pb.dart index ebcb0fbe..554511b8 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/struct.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -16,17 +16,33 @@ import 'struct.pbenum.dart'; export 'struct.pbenum.dart'; class Struct extends $pb.GeneratedMessage with $mixin.StructMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Struct', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.StructMixin.toProto3JsonHelper, fromProto3Json: $mixin.StructMixin.fromProto3JsonHelper) - ..m<$core.String, Value>(1, 'fields', entryClassName: 'Struct.FieldsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OM, valueCreator: Value.create, packageName: const $pb.PackageName('google.protobuf')) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Struct', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.StructMixin.toProto3JsonHelper, fromProto3Json: $mixin.StructMixin.fromProto3JsonHelper) + ..m<$core.String, Value>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'fields', entryClassName: 'Struct.FieldsEntry', keyFieldType: $pb.PbFieldType.OS, valueFieldType: $pb.PbFieldType.OM, valueCreator: Value.create, packageName: const $pb.PackageName('google.protobuf')) ..hasRequiredFields = false ; Struct._() : super(); - factory Struct() => create(); + factory Struct({ + $core.Map<$core.String, Value> fields, + }) { + final _result = create(); + if (fields != null) { + _result.fields.addAll(fields); + } + return _result; + } factory Struct.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Struct.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Struct clone() => Struct()..mergeFromMessage(this); - Struct copyWith(void Function(Struct) updates) => super.copyWith((message) => updates(message as Struct)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Struct copyWith(void Function(Struct) updates) => super.copyWith((message) => updates(message as Struct)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Struct create() => Struct._(); @@ -60,23 +76,59 @@ class Value extends $pb.GeneratedMessage with $mixin.ValueMixin { 6 : Value_Kind.listValue, 0 : Value_Kind.notSet }; - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Value', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.ValueMixin.fromProto3JsonHelper) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Value', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.ValueMixin.fromProto3JsonHelper) ..oo(0, [1, 2, 3, 4, 5, 6]) - ..e(1, 'nullValue', $pb.PbFieldType.OE, defaultOrMaker: NullValue.NULL_VALUE, valueOf: NullValue.valueOf, enumValues: NullValue.values) - ..a<$core.double>(2, 'numberValue', $pb.PbFieldType.OD) - ..aOS(3, 'stringValue') - ..aOB(4, 'boolValue') - ..aOM(5, 'structValue', subBuilder: Struct.create) - ..aOM(6, 'listValue', subBuilder: ListValue.create) + ..e(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'nullValue', $pb.PbFieldType.OE, defaultOrMaker: NullValue.NULL_VALUE, valueOf: NullValue.valueOf, enumValues: NullValue.values) + ..a<$core.double>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'numberValue', $pb.PbFieldType.OD) + ..aOS(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'stringValue') + ..aOB(4, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'boolValue') + ..aOM(5, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'structValue', subBuilder: Struct.create) + ..aOM(6, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'listValue', subBuilder: ListValue.create) ..hasRequiredFields = false ; Value._() : super(); - factory Value() => create(); + factory Value({ + NullValue nullValue, + $core.double numberValue, + $core.String stringValue, + $core.bool boolValue, + Struct structValue, + ListValue listValue, + }) { + final _result = create(); + if (nullValue != null) { + _result.nullValue = nullValue; + } + if (numberValue != null) { + _result.numberValue = numberValue; + } + if (stringValue != null) { + _result.stringValue = stringValue; + } + if (boolValue != null) { + _result.boolValue = boolValue; + } + if (structValue != null) { + _result.structValue = structValue; + } + if (listValue != null) { + _result.listValue = listValue; + } + return _result; + } factory Value.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Value.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Value clone() => Value()..mergeFromMessage(this); - Value copyWith(void Function(Value) updates) => super.copyWith((message) => updates(message as Value)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Value copyWith(void Function(Value) updates) => super.copyWith((message) => updates(message as Value)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Value create() => Value._(); @@ -149,17 +201,33 @@ class Value extends $pb.GeneratedMessage with $mixin.ValueMixin { } class ListValue extends $pb.GeneratedMessage with $mixin.ListValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('ListValue', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.ListValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.ListValueMixin.fromProto3JsonHelper) - ..pc(1, 'values', $pb.PbFieldType.PM, subBuilder: Value.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ListValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.ListValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.ListValueMixin.fromProto3JsonHelper) + ..pc(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'values', $pb.PbFieldType.PM, subBuilder: Value.create) ..hasRequiredFields = false ; ListValue._() : super(); - factory ListValue() => create(); + factory ListValue({ + $core.Iterable values, + }) { + final _result = create(); + if (values != null) { + _result.values.addAll(values); + } + return _result; + } factory ListValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory ListValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') ListValue clone() => ListValue()..mergeFromMessage(this); - ListValue copyWith(void Function(ListValue) updates) => super.copyWith((message) => updates(message as ListValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ListValue copyWith(void Function(ListValue) updates) => super.copyWith((message) => updates(message as ListValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static ListValue create() => ListValue._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbenum.dart index 0d3111f5..a5669461 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbenum.dart @@ -2,15 +2,15 @@ // Generated code. Do not modify. // source: google/protobuf/struct.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields -// ignore_for_file: UNDEFINED_SHOWN_NAME,UNUSED_SHOWN_NAME +// ignore_for_file: UNDEFINED_SHOWN_NAME import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; class NullValue extends $pb.ProtobufEnum { - static const NullValue NULL_VALUE = NullValue._(0, 'NULL_VALUE'); + static const NullValue NULL_VALUE = NullValue._(0, const $core.bool.fromEnvironment('protobuf.omit_enum_names') ? '' : 'NULL_VALUE'); static const $core.List values = [ NULL_VALUE, diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbjson.dart index b6ead23f..3dbcf94a 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/struct.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/struct.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const NullValue$json = const { '1': 'NullValue', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pb.dart index 69af8ebf..b9c9e24e 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/timestamp.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -13,18 +13,38 @@ import 'package:protobuf/protobuf.dart' as $pb; import 'package:protobuf/src/protobuf/mixins/well_known.dart' as $mixin; class Timestamp extends $pb.GeneratedMessage with $mixin.TimestampMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Timestamp', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.TimestampMixin.toProto3JsonHelper, fromProto3Json: $mixin.TimestampMixin.fromProto3JsonHelper) - ..aInt64(1, 'seconds') - ..a<$core.int>(2, 'nanos', $pb.PbFieldType.O3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Timestamp', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.TimestampMixin.toProto3JsonHelper, fromProto3Json: $mixin.TimestampMixin.fromProto3JsonHelper) + ..aInt64(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'seconds') + ..a<$core.int>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'nanos', $pb.PbFieldType.O3) ..hasRequiredFields = false ; Timestamp._() : super(); - factory Timestamp() => create(); + factory Timestamp({ + $fixnum.Int64 seconds, + $core.int nanos, + }) { + final _result = create(); + if (seconds != null) { + _result.seconds = seconds; + } + if (nanos != null) { + _result.nanos = nanos; + } + return _result; + } factory Timestamp.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Timestamp.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Timestamp clone() => Timestamp()..mergeFromMessage(this); - Timestamp copyWith(void Function(Timestamp) updates) => super.copyWith((message) => updates(message as Timestamp)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Timestamp copyWith(void Function(Timestamp) updates) => super.copyWith((message) => updates(message as Timestamp)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Timestamp create() => Timestamp._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbenum.dart index d4604ae9..8b6b6734 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/protobuf/timestamp.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbjson.dart index b67a6858..4680bf33 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/timestamp.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/timestamp.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Timestamp$json = const { '1': 'Timestamp', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pb.dart index a13dab6a..9a4396bd 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pb.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/wrappers.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; @@ -13,17 +13,33 @@ import 'package:protobuf/protobuf.dart' as $pb; import 'package:protobuf/src/protobuf/mixins/well_known.dart' as $mixin; class DoubleValue extends $pb.GeneratedMessage with $mixin.DoubleValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('DoubleValue', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.DoubleValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.DoubleValueMixin.fromProto3JsonHelper) - ..a<$core.double>(1, 'value', $pb.PbFieldType.OD) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DoubleValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.DoubleValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.DoubleValueMixin.fromProto3JsonHelper) + ..a<$core.double>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OD) ..hasRequiredFields = false ; DoubleValue._() : super(); - factory DoubleValue() => create(); + factory DoubleValue({ + $core.double value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory DoubleValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory DoubleValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') DoubleValue clone() => DoubleValue()..mergeFromMessage(this); - DoubleValue copyWith(void Function(DoubleValue) updates) => super.copyWith((message) => updates(message as DoubleValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + DoubleValue copyWith(void Function(DoubleValue) updates) => super.copyWith((message) => updates(message as DoubleValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static DoubleValue create() => DoubleValue._(); @@ -44,17 +60,33 @@ class DoubleValue extends $pb.GeneratedMessage with $mixin.DoubleValueMixin { } class FloatValue extends $pb.GeneratedMessage with $mixin.FloatValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('FloatValue', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.FloatValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.FloatValueMixin.fromProto3JsonHelper) - ..a<$core.double>(1, 'value', $pb.PbFieldType.OF) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'FloatValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.FloatValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.FloatValueMixin.fromProto3JsonHelper) + ..a<$core.double>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OF) ..hasRequiredFields = false ; FloatValue._() : super(); - factory FloatValue() => create(); + factory FloatValue({ + $core.double value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory FloatValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory FloatValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') FloatValue clone() => FloatValue()..mergeFromMessage(this); - FloatValue copyWith(void Function(FloatValue) updates) => super.copyWith((message) => updates(message as FloatValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + FloatValue copyWith(void Function(FloatValue) updates) => super.copyWith((message) => updates(message as FloatValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static FloatValue create() => FloatValue._(); @@ -75,17 +107,33 @@ class FloatValue extends $pb.GeneratedMessage with $mixin.FloatValueMixin { } class Int64Value extends $pb.GeneratedMessage with $mixin.Int64ValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Int64Value', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.Int64ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.Int64ValueMixin.fromProto3JsonHelper) - ..aInt64(1, 'value') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Int64Value', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.Int64ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.Int64ValueMixin.fromProto3JsonHelper) + ..aInt64(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value') ..hasRequiredFields = false ; Int64Value._() : super(); - factory Int64Value() => create(); + factory Int64Value({ + $fixnum.Int64 value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory Int64Value.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Int64Value.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Int64Value clone() => Int64Value()..mergeFromMessage(this); - Int64Value copyWith(void Function(Int64Value) updates) => super.copyWith((message) => updates(message as Int64Value)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Int64Value copyWith(void Function(Int64Value) updates) => super.copyWith((message) => updates(message as Int64Value)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Int64Value create() => Int64Value._(); @@ -106,17 +154,33 @@ class Int64Value extends $pb.GeneratedMessage with $mixin.Int64ValueMixin { } class UInt64Value extends $pb.GeneratedMessage with $mixin.UInt64ValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('UInt64Value', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.UInt64ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.UInt64ValueMixin.fromProto3JsonHelper) - ..a<$fixnum.Int64>(1, 'value', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'UInt64Value', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.UInt64ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.UInt64ValueMixin.fromProto3JsonHelper) + ..a<$fixnum.Int64>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OU6, defaultOrMaker: $fixnum.Int64.ZERO) ..hasRequiredFields = false ; UInt64Value._() : super(); - factory UInt64Value() => create(); + factory UInt64Value({ + $fixnum.Int64 value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory UInt64Value.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory UInt64Value.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') UInt64Value clone() => UInt64Value()..mergeFromMessage(this); - UInt64Value copyWith(void Function(UInt64Value) updates) => super.copyWith((message) => updates(message as UInt64Value)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + UInt64Value copyWith(void Function(UInt64Value) updates) => super.copyWith((message) => updates(message as UInt64Value)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static UInt64Value create() => UInt64Value._(); @@ -137,17 +201,33 @@ class UInt64Value extends $pb.GeneratedMessage with $mixin.UInt64ValueMixin { } class Int32Value extends $pb.GeneratedMessage with $mixin.Int32ValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Int32Value', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.Int32ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.Int32ValueMixin.fromProto3JsonHelper) - ..a<$core.int>(1, 'value', $pb.PbFieldType.O3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Int32Value', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.Int32ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.Int32ValueMixin.fromProto3JsonHelper) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.O3) ..hasRequiredFields = false ; Int32Value._() : super(); - factory Int32Value() => create(); + factory Int32Value({ + $core.int value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory Int32Value.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Int32Value.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Int32Value clone() => Int32Value()..mergeFromMessage(this); - Int32Value copyWith(void Function(Int32Value) updates) => super.copyWith((message) => updates(message as Int32Value)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Int32Value copyWith(void Function(Int32Value) updates) => super.copyWith((message) => updates(message as Int32Value)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Int32Value create() => Int32Value._(); @@ -168,17 +248,33 @@ class Int32Value extends $pb.GeneratedMessage with $mixin.Int32ValueMixin { } class UInt32Value extends $pb.GeneratedMessage with $mixin.UInt32ValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('UInt32Value', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.UInt32ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.UInt32ValueMixin.fromProto3JsonHelper) - ..a<$core.int>(1, 'value', $pb.PbFieldType.OU3) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'UInt32Value', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.UInt32ValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.UInt32ValueMixin.fromProto3JsonHelper) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OU3) ..hasRequiredFields = false ; UInt32Value._() : super(); - factory UInt32Value() => create(); + factory UInt32Value({ + $core.int value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory UInt32Value.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory UInt32Value.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') UInt32Value clone() => UInt32Value()..mergeFromMessage(this); - UInt32Value copyWith(void Function(UInt32Value) updates) => super.copyWith((message) => updates(message as UInt32Value)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + UInt32Value copyWith(void Function(UInt32Value) updates) => super.copyWith((message) => updates(message as UInt32Value)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static UInt32Value create() => UInt32Value._(); @@ -199,17 +295,33 @@ class UInt32Value extends $pb.GeneratedMessage with $mixin.UInt32ValueMixin { } class BoolValue extends $pb.GeneratedMessage with $mixin.BoolValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('BoolValue', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.BoolValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.BoolValueMixin.fromProto3JsonHelper) - ..aOB(1, 'value') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BoolValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.BoolValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.BoolValueMixin.fromProto3JsonHelper) + ..aOB(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value') ..hasRequiredFields = false ; BoolValue._() : super(); - factory BoolValue() => create(); + factory BoolValue({ + $core.bool value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory BoolValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory BoolValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') BoolValue clone() => BoolValue()..mergeFromMessage(this); - BoolValue copyWith(void Function(BoolValue) updates) => super.copyWith((message) => updates(message as BoolValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BoolValue copyWith(void Function(BoolValue) updates) => super.copyWith((message) => updates(message as BoolValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static BoolValue create() => BoolValue._(); @@ -230,17 +342,33 @@ class BoolValue extends $pb.GeneratedMessage with $mixin.BoolValueMixin { } class StringValue extends $pb.GeneratedMessage with $mixin.StringValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('StringValue', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.StringValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.StringValueMixin.fromProto3JsonHelper) - ..aOS(1, 'value') + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'StringValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.StringValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.StringValueMixin.fromProto3JsonHelper) + ..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value') ..hasRequiredFields = false ; StringValue._() : super(); - factory StringValue() => create(); + factory StringValue({ + $core.String value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory StringValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory StringValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') StringValue clone() => StringValue()..mergeFromMessage(this); - StringValue copyWith(void Function(StringValue) updates) => super.copyWith((message) => updates(message as StringValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + StringValue copyWith(void Function(StringValue) updates) => super.copyWith((message) => updates(message as StringValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static StringValue create() => StringValue._(); @@ -261,17 +389,33 @@ class StringValue extends $pb.GeneratedMessage with $mixin.StringValueMixin { } class BytesValue extends $pb.GeneratedMessage with $mixin.BytesValueMixin { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('BytesValue', package: const $pb.PackageName('google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.BytesValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.BytesValueMixin.fromProto3JsonHelper) - ..a<$core.List<$core.int>>(1, 'value', $pb.PbFieldType.OY) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BytesValue', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.protobuf'), createEmptyInstance: create, toProto3Json: $mixin.BytesValueMixin.toProto3JsonHelper, fromProto3Json: $mixin.BytesValueMixin.fromProto3JsonHelper) + ..a<$core.List<$core.int>>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value', $pb.PbFieldType.OY) ..hasRequiredFields = false ; BytesValue._() : super(); - factory BytesValue() => create(); + factory BytesValue({ + $core.List<$core.int> value, + }) { + final _result = create(); + if (value != null) { + _result.value = value; + } + return _result; + } factory BytesValue.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory BytesValue.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') BytesValue clone() => BytesValue()..mergeFromMessage(this); - BytesValue copyWith(void Function(BytesValue) updates) => super.copyWith((message) => updates(message as BytesValue)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + BytesValue copyWith(void Function(BytesValue) updates) => super.copyWith((message) => updates(message as BytesValue)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static BytesValue create() => BytesValue._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbenum.dart index 44ef9054..bcd8bb67 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/protobuf/wrappers.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbjson.dart index a0ba6887..b0bad915 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/protobuf/wrappers.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/protobuf/wrappers.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const DoubleValue$json = const { '1': 'DoubleValue', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pb.dart index b4570d24..026d36bd 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pb.dart @@ -2,29 +2,53 @@ // Generated code. Do not modify. // source: google/rpc/status.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; -import '../protobuf/any.pb.dart' as $10; +import '../protobuf/any.pb.dart' as $11; class Status extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('Status', package: const $pb.PackageName('google.rpc'), createEmptyInstance: create) - ..a<$core.int>(1, 'code', $pb.PbFieldType.O3) - ..aOS(2, 'message') - ..pc<$10.Any>(3, 'details', $pb.PbFieldType.PM, subBuilder: $10.Any.create) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'Status', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.rpc'), createEmptyInstance: create) + ..a<$core.int>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'code', $pb.PbFieldType.O3) + ..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'message') + ..pc<$11.Any>(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'details', $pb.PbFieldType.PM, subBuilder: $11.Any.create) ..hasRequiredFields = false ; Status._() : super(); - factory Status() => create(); + factory Status({ + $core.int code, + $core.String message, + $core.Iterable<$11.Any> details, + }) { + final _result = create(); + if (code != null) { + _result.code = code; + } + if (message != null) { + _result.message = message; + } + if (details != null) { + _result.details.addAll(details); + } + return _result; + } factory Status.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory Status.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') Status clone() => Status()..mergeFromMessage(this); - Status copyWith(void Function(Status) updates) => super.copyWith((message) => updates(message as Status)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Status copyWith(void Function(Status) updates) => super.copyWith((message) => updates(message as Status)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static Status create() => Status._(); @@ -53,6 +77,6 @@ class Status extends $pb.GeneratedMessage { void clearMessage() => clearField(2); @$pb.TagNumber(3) - $core.List<$10.Any> get details => $_getList(2); + $core.List<$11.Any> get details => $_getList(2); } diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbenum.dart index 9671cc38..e3bc3b70 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/rpc/status.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbjson.dart index 5dbcf786..18987faf 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/rpc/status.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/rpc/status.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const Status$json = const { '1': 'Status', diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pb.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pb.dart index fc8a44d1..9836ebcd 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pb.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pb.dart @@ -2,26 +2,46 @@ // Generated code. Do not modify. // source: google/type/latlng.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields import 'dart:core' as $core; import 'package:protobuf/protobuf.dart' as $pb; class LatLng extends $pb.GeneratedMessage { - static final $pb.BuilderInfo _i = $pb.BuilderInfo('LatLng', package: const $pb.PackageName('google.type'), createEmptyInstance: create) - ..a<$core.double>(1, 'latitude', $pb.PbFieldType.OD) - ..a<$core.double>(2, 'longitude', $pb.PbFieldType.OD) + static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'LatLng', package: const $pb.PackageName(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'google.type'), createEmptyInstance: create) + ..a<$core.double>(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'latitude', $pb.PbFieldType.OD) + ..a<$core.double>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'longitude', $pb.PbFieldType.OD) ..hasRequiredFields = false ; LatLng._() : super(); - factory LatLng() => create(); + factory LatLng({ + $core.double latitude, + $core.double longitude, + }) { + final _result = create(); + if (latitude != null) { + _result.latitude = latitude; + } + if (longitude != null) { + _result.longitude = longitude; + } + return _result; + } factory LatLng.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); factory LatLng.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') LatLng clone() => LatLng()..mergeFromMessage(this); - LatLng copyWith(void Function(LatLng) updates) => super.copyWith((message) => updates(message as LatLng)); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + LatLng copyWith(void Function(LatLng) updates) => super.copyWith((message) => updates(message as LatLng)); // ignore: deprecated_member_use $pb.BuilderInfo get info_ => _i; @$core.pragma('dart2js:noInline') static LatLng create() => LatLng._(); diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbenum.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbenum.dart index 81d6295e..8bdd382f 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbenum.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbenum.dart @@ -2,6 +2,6 @@ // Generated code. Do not modify. // source: google/type/latlng.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields diff --git a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbjson.dart b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbjson.dart index b2e1cb00..4d2a93f1 100644 --- a/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbjson.dart +++ b/cloud_firestore/cloud_firestore_vm/lib/src/proto/google/type/latlng.pbjson.dart @@ -2,8 +2,8 @@ // Generated code. Do not modify. // source: google/type/latlng.proto // -// @dart = 2.3 -// ignore_for_file: camel_case_types,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type +// @dart = 2.7 +// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields const LatLng$json = const { '1': 'LatLng', diff --git a/cloud_firestore/cloud_firestore_vm/pubspec.lock b/cloud_firestore/cloud_firestore_vm/pubspec.lock index c2ebbd29..10e235a9 100644 --- a/cloud_firestore/cloud_firestore_vm/pubspec.lock +++ b/cloud_firestore/cloud_firestore_vm/pubspec.lock @@ -7,28 +7,28 @@ packages: name: _fe_analyzer_shared url: "https://pub.dartlang.org" source: hosted - version: "1.0.3" + version: "12.0.0" _firebase_database_collection_vm: dependency: "direct main" description: - name: _firebase_database_collection_vm - url: "https://pub.dartlang.org" - source: hosted - version: "0.0.2" + path: "../../firebase_core/_firebase_database_collection_vm" + relative: true + source: path + version: "0.0.3" _firebase_internal_vm: dependency: "direct main" description: name: _firebase_internal_vm url: "https://pub.dartlang.org" source: hosted - version: "0.0.2" + version: "0.0.2+1" analyzer: dependency: transitive description: name: analyzer url: "https://pub.dartlang.org" source: hosted - version: "0.39.4" + version: "0.40.6" args: dependency: transitive description: @@ -42,28 +42,63 @@ packages: name: async url: "https://pub.dartlang.org" source: hosted - version: "2.4.1" + version: "2.5.0-nullsafety.3" boolean_selector: dependency: transitive description: name: boolean_selector url: "https://pub.dartlang.org" source: hosted - version: "1.0.5" + version: "2.1.0-nullsafety.3" + build: + dependency: transitive + description: + name: build + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.1" + built_collection: + dependency: transitive + description: + name: built_collection + url: "https://pub.dartlang.org" + source: hosted + version: "4.3.2" + built_value: + dependency: transitive + description: + name: built_value + url: "https://pub.dartlang.org" + source: hosted + version: "7.1.0" charcode: dependency: transitive description: name: charcode url: "https://pub.dartlang.org" source: hosted - version: "1.1.2" + version: "1.2.0-nullsafety.3" + cli_util: + dependency: transitive + description: + name: cli_util + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.0" + code_builder: + dependency: transitive + description: + name: code_builder + url: "https://pub.dartlang.org" + source: hosted + version: "3.6.0" collection: dependency: "direct main" description: name: collection url: "https://pub.dartlang.org" source: hosted - version: "1.14.12" + version: "1.15.0-nullsafety.5" convert: dependency: transitive description: @@ -84,14 +119,14 @@ packages: name: crypto url: "https://pub.dartlang.org" source: hosted - version: "2.1.3" - csslib: + version: "2.1.5" + dart_style: dependency: transitive description: - name: csslib + name: dart_style url: "https://pub.dartlang.org" source: hosted - version: "0.16.1" + version: "1.3.10" ffi: dependency: transitive description: @@ -102,10 +137,10 @@ packages: firebase_core_vm: dependency: "direct main" description: - name: firebase_core_vm - url: "https://pub.dartlang.org" - source: hosted - version: "0.0.4" + path: "../../firebase_core/firebase_core_vm" + relative: true + source: path + version: "0.0.10" fixnum: dependency: "direct main" description: @@ -133,14 +168,14 @@ packages: name: grpc url: "https://pub.dartlang.org" source: hosted - version: "2.1.3" - html: + version: "2.8.0" + hive: dependency: transitive description: - name: html + name: hive url: "https://pub.dartlang.org" source: hosted - version: "0.14.0+3" + version: "1.4.4+1" http: dependency: transitive description: @@ -169,6 +204,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "3.1.3" + intl: + dependency: transitive + description: + name: intl + url: "https://pub.dartlang.org" + source: hosted + version: "0.16.1" io: dependency: transitive description: @@ -182,7 +224,7 @@ packages: name: js url: "https://pub.dartlang.org" source: hosted - version: "0.6.1+1" + version: "0.6.3-nullsafety.3" logging: dependency: transitive description: @@ -196,14 +238,14 @@ packages: name: matcher url: "https://pub.dartlang.org" source: hosted - version: "0.12.6" + version: "0.12.10-nullsafety.3" meta: dependency: "direct main" description: name: meta url: "https://pub.dartlang.org" source: hosted - version: "1.1.8" + version: "1.3.0-nullsafety.6" mime: dependency: transitive description: @@ -217,28 +259,21 @@ packages: name: mockito url: "https://pub.dartlang.org" source: hosted - version: "4.1.1" + version: "4.1.3" moor: dependency: transitive description: name: moor url: "https://pub.dartlang.org" source: hosted - version: "2.4.2" + version: "3.4.0" moor_ffi: dependency: "direct dev" description: name: moor_ffi url: "https://pub.dartlang.org" source: hosted - version: "0.4.0" - multi_server_socket: - dependency: transitive - description: - name: multi_server_socket - url: "https://pub.dartlang.org" - source: hosted - version: "1.0.2" + version: "0.8.0" node_interop: dependency: transitive description: @@ -266,35 +301,35 @@ packages: name: package_config url: "https://pub.dartlang.org" source: hosted - version: "1.9.1" + version: "1.9.3" path: dependency: "direct main" description: name: path url: "https://pub.dartlang.org" source: hosted - version: "1.7.0" + version: "1.8.0-nullsafety.3" pedantic: dependency: "direct main" description: name: pedantic url: "https://pub.dartlang.org" source: hosted - version: "1.9.0" + version: "1.10.0-nullsafety.3" pool: dependency: transitive description: name: pool url: "https://pub.dartlang.org" source: hosted - version: "1.4.0" + version: "1.5.0-nullsafety.3" protobuf: dependency: "direct main" description: name: protobuf url: "https://pub.dartlang.org" source: hosted - version: "1.0.1" + version: "1.1.0" pub_semver: dependency: transitive description: @@ -302,13 +337,20 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "1.4.3" + quiver: + dependency: transitive + description: + name: quiver + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.5" rxdart: dependency: "direct main" description: name: rxdart url: "https://pub.dartlang.org" source: hosted - version: "0.23.1" + version: "0.24.1" semaphore: dependency: "direct main" description: @@ -344,48 +386,69 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "0.2.3" + source_gen: + dependency: transitive + description: + name: source_gen + url: "https://pub.dartlang.org" + source: hosted + version: "0.9.10+1" source_map_stack_trace: dependency: transitive description: name: source_map_stack_trace url: "https://pub.dartlang.org" source: hosted - version: "2.0.0" + version: "2.1.0-nullsafety.4" source_maps: dependency: transitive description: name: source_maps url: "https://pub.dartlang.org" source: hosted - version: "0.10.9" + version: "0.10.10-nullsafety.3" source_span: dependency: transitive description: name: source_span url: "https://pub.dartlang.org" source: hosted - version: "1.5.5" + version: "1.8.0-nullsafety.4" + sqlite3: + dependency: transitive + description: + name: sqlite3 + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.8" + sqlite3_flutter_libs: + dependency: transitive + description: + name: sqlite3_flutter_libs + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.0" stack_trace: dependency: transitive description: name: stack_trace url: "https://pub.dartlang.org" source: hosted - version: "1.9.3" + version: "1.10.0-nullsafety.6" stream_channel: dependency: transitive description: name: stream_channel url: "https://pub.dartlang.org" source: hosted - version: "2.0.0" + version: "2.1.0-nullsafety.3" string_scanner: dependency: transitive description: name: string_scanner url: "https://pub.dartlang.org" source: hosted - version: "1.0.5" + version: "1.1.0-nullsafety.3" synchronized: dependency: transitive description: @@ -399,42 +462,42 @@ packages: name: term_glyph url: "https://pub.dartlang.org" source: hosted - version: "1.1.0" + version: "1.2.0-nullsafety.3" test: dependency: "direct dev" description: name: test url: "https://pub.dartlang.org" source: hosted - version: "1.14.2" + version: "1.16.0-nullsafety.15" test_api: dependency: transitive description: name: test_api url: "https://pub.dartlang.org" source: hosted - version: "0.2.15" + version: "0.2.19-nullsafety.6" test_core: dependency: transitive description: name: test_core url: "https://pub.dartlang.org" source: hosted - version: "0.3.3" + version: "0.3.12-nullsafety.14" typed_data: dependency: transitive description: name: typed_data url: "https://pub.dartlang.org" source: hosted - version: "1.1.6" + version: "1.3.0-nullsafety.5" uuid: dependency: "direct dev" description: name: uuid url: "https://pub.dartlang.org" source: hosted - version: "2.0.4" + version: "2.2.2" vm_service: dependency: transitive description: @@ -471,4 +534,5 @@ packages: source: hosted version: "2.2.0" sdks: - dart: ">=2.7.0 <3.0.0" + dart: ">=2.12.0-0.0 <3.0.0" + flutter: ">=1.10.1" diff --git a/cloud_firestore/cloud_firestore_vm/pubspec.yaml b/cloud_firestore/cloud_firestore_vm/pubspec.yaml index 82c2b410..53836510 100644 --- a/cloud_firestore/cloud_firestore_vm/pubspec.yaml +++ b/cloud_firestore/cloud_firestore_vm/pubspec.yaml @@ -4,27 +4,31 @@ version: 0.0.7 homepage: https://github.com/fluttercommunity/firebase_dart_sdk/tree/develop/cloud_firestore/cloud_firestore_vm environment: - sdk: ">=2.7.0 <3.0.0" + sdk: ">=2.10.0 <3.0.0" dependencies: - _firebase_database_collection_vm: ^0.0.2 - _firebase_internal_vm: ^0.0.2 - firebase_core_vm: ^0.0.4 - protobuf: ^1.0.1 - grpc: ^2.1.3 - rxdart: ^0.23.1 + + protobuf: ^1.1.0 + grpc: ^2.8.0 + rxdart: ^0.24.1 semaphore: ^0.1.4 - path: ^1.6.4 + path: ^1.7.0 fixnum: ^0.10.11 - meta: ^1.1.8 - collection: ^1.14.12 - pedantic: ^1.9.0 - async: ^2.4.1 + meta: ^1.2.4 + collection: ^1.14.13 + pedantic: ^1.9.2 + async: ^2.4.2 + + _firebase_internal_vm: ^0.0.2+1 + _firebase_database_collection_vm: + path: ../../firebase_core/_firebase_database_collection_vm + firebase_core_vm: + path: ../../firebase_core/firebase_core_vm dev_dependencies: - test: ^1.14.2 - mockito: ^4.1.1 - moor_ffi: ^0.4.0 - uuid: ^2.0.4 + test: ^1.15.7 + mockito: ^4.1.3 + moor_ffi: ^0.8.0 + uuid: ^2.2.2 latest_commit: 576da00e85e3950bd53049f800a552f28a0c6d99 \ No newline at end of file diff --git a/cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/bundle.proto b/cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/bundle.proto new file mode 100644 index 00000000..edc34462 --- /dev/null +++ b/cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/bundle.proto @@ -0,0 +1,120 @@ +// Copyright 2021 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// This file defines the format of Firestore bundle file/stream. It is not a part of the +// Firestore API, only a specification used by Server and Client SDK to write and read +// bundles. + +syntax = "proto3"; + +package firestore; + +import "google/firestore/v1/document.proto"; +import "google/firestore/v1/query.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "Firestore.Proto"; +option go_package = "google.golang.org/genproto/firestore/proto;firestore"; +option java_multiple_files = true; +option java_outer_classname = "BundleProto"; +option java_package = "com.google.firestore.proto"; +option objc_class_prefix = "FSTPB"; +option php_namespace = "Firestore\\Proto"; + +// Encodes a query saved in the bundle. +message BundledQuery { + // The parent resource name. + string parent = 1; + + // The query to run. + oneof query_type { + // A structured query. + google.firestore.v1.StructuredQuery structured_query = 2; + } + + // If the query is a limit query, should the limit be applied to the beginning or + // the end of results. + enum LimitType { + FIRST = 0; + LAST = 1; + } + LimitType limit_type = 3; +} + +// A Query associated with a name, created as part of the bundle file, and can be read +// by client SDKs once the bundle containing them is loaded. +message NamedQuery { + // Name of the query, such that client can use the name to load this query + // from bundle, and resume from when the query results are materialized + // into this bundle. + string name = 1; + + // The query saved in the bundle. + BundledQuery bundled_query = 2; + + // The read time of the query, when it is used to build the bundle. This is useful to + // resume the query from the bundle, once it is loaded by client SDKs. + google.protobuf.Timestamp read_time = 3; +} + +// Metadata describing a Firestore document saved in the bundle. +message BundledDocumentMetadata { + // The document key of a bundled document. + string name = 1; + + // The snapshot version of the document data bundled. + google.protobuf.Timestamp read_time = 2; + + // Whether the document exists. + bool exists = 3; + + // The names of the queries in this bundle that this document matches to. + repeated string queries = 4; +} + +// Metadata describing the bundle file/stream. +message BundleMetadata { + // The ID of the bundle. + string id = 1; + + // Time at which the documents snapshot is taken for this bundle. + google.protobuf.Timestamp create_time = 2; + + // The schema version of the bundle. + uint32 version = 3; + + // The number of documents in the bundle. + uint32 total_documents = 4; + + // The size of the bundle in bytes, excluding this `BundleMetadata`. + uint64 total_bytes = 5; +} + +// A Firestore bundle is a length-prefixed stream of JSON representations of +// `BundleElement`. +// Only one `BundleMetadata` is expected, and it should be the first element. +// The named queries follow after `metadata`. Every `document_metadata` is +// immediately followed by a `document`. +message BundleElement { + oneof element_type { + BundleMetadata metadata = 1; + + NamedQuery named_query = 2; + + BundledDocumentMetadata document_metadata = 3; + + google.firestore.v1.Document document = 4; + } +} diff --git a/cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/target.proto b/cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/target.proto index eb8453f4..8bca7e4a 100644 --- a/cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/target.proto +++ b/cloud_firestore/cloud_firestore_vm/res/protos/firebase/firestore/proto/target.proto @@ -78,6 +78,10 @@ message Target { // A target specified by a set of document names. google.firestore.v1.Target.DocumentsTarget documents = 6; } + + // Denotes the maximum snapshot version at which the associated query view + // contained no limbo documents. + google.protobuf.Timestamp last_limbo_free_snapshot_version = 7; } // Global state tracked across all Targets, tracked separately to avoid the diff --git a/cloud_firestore/cloud_firestore_vm/res/protos/generate_proto.sh b/cloud_firestore/cloud_firestore_vm/res/protos/generate_proto.sh index b980bd6d..5f3dd653 100755 --- a/cloud_firestore/cloud_firestore_vm/res/protos/generate_proto.sh +++ b/cloud_firestore/cloud_firestore_vm/res/protos/generate_proto.sh @@ -1,2 +1,2 @@ #!/bin/sh -protoc **/**.proto --dart_out=grpc:../../lib/src/proto \ No newline at end of file +protoc -- **/**.proto --dart_out=grpc:../../lib/src/proto \ No newline at end of file diff --git a/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/query.proto b/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/query.proto index a88d851a..4f648e24 100644 --- a/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/query.proto +++ b/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/query.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -85,32 +85,74 @@ message StructuredQuery { // Unspecified. This value must not be used. OPERATOR_UNSPECIFIED = 0; - // Less than. Requires that the field come first in `order_by`. + // The given `field` is less than the given `value`. + // + // Requires: + // + // * That `field` come first in `order_by`. LESS_THAN = 1; - // Less than or equal. Requires that the field come first in `order_by`. + // The given `field` is less than or equal to the given `value`. + // + // Requires: + // + // * That `field` come first in `order_by`. LESS_THAN_OR_EQUAL = 2; - // Greater than. Requires that the field come first in `order_by`. + // The given `field` is greater than the given `value`. + // + // Requires: + // + // * That `field` come first in `order_by`. GREATER_THAN = 3; - // Greater than or equal. Requires that the field come first in - // `order_by`. + // The given `field` is greater than or equal to the given `value`. + // + // Requires: + // + // * That `field` come first in `order_by`. GREATER_THAN_OR_EQUAL = 4; - // Equal. + // The given `field` is equal to the given `value`.. EQUAL = 5; - // Contains. Requires that the field is an array. + // The given `field` is not equal to the given `value`. + // + // Requires: + // + // * No other `NOT_EQUAL`, `NOT_IN`, `IS_NOT_NULL`, or `IS_NOT_NAN`. + // * That `field` comes first in the `order_by`. + NOT_EQUAL = 6; + + // The given `field` is an array that contains the given `value`. ARRAY_CONTAINS = 7; - // In. Requires that `value` is a non-empty ArrayValue with at most 10 - // values. + // The given `field` is equal to at least one value in the given array. + // + // Requires: + // + // * That `value` is a non-empty `ArrayValue` with at most 10 values. + // * No other `IN` or `ARRAY_CONTAINS_ANY`. (-- or `NOT_IN` --) IN = 8; - // Contains any. Requires that the field is an array and - // `value` is a non-empty ArrayValue with at most 10 values. + // The given `field` is an array that contains any of the values in the + // given array. + // + // Requires: + // + // * That `value` is a non-empty `ArrayValue` with at most 10 values. + // * No other `IN` or `ARRAY_CONTAINS_ANY`. (-- or `NOT_IN` --) ARRAY_CONTAINS_ANY = 9; + + // The value of the `field` is not in the given array. + // + // Requires: + // + // * That `value` is a non-empty `ArrayValue` with at most 10 values. + // * No other `IN`, `ARRAY_CONTAINS_ANY`, `NOT_IN`, `NOT_EQUAL`, + // `IS_NOT_NULL`, or `IS_NOT_NAN`. + // * That `field` comes first in the `order_by`. + NOT_IN = 10; } // The field to filter by. @@ -130,11 +172,27 @@ message StructuredQuery { // Unspecified. This value must not be used. OPERATOR_UNSPECIFIED = 0; - // Test if a field is equal to NaN. + // The given `field` is equal to `NaN`. IS_NAN = 2; - // Test if an exprestion evaluates to Null. + // The given `field` is equal to `NULL`. IS_NULL = 3; + + // The given `field` is not equal to `NaN`. + // + // Requires: + // + // * No other `NOT_EQUAL`, `NOT_IN`, `IS_NOT_NULL`, or `IS_NOT_NAN`. + // * That `field` comes first in the `order_by`. + IS_NOT_NAN = 4; + + // The given `field` is not equal to `NULL`. + // + // Requires: + // + // * A single `NOT_EQUAL`, `NOT_IN`, `IS_NOT_NULL`, or `IS_NOT_NAN`. + // * That `field` comes first in the `order_by`. + IS_NOT_NULL = 5; } // The unary operator to apply. diff --git a/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/write.proto b/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/write.proto index bffd0790..59b2dafe 100644 --- a/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/write.proto +++ b/cloud_firestore/cloud_firestore_vm/res/protos/google/firestore/v1/write.proto @@ -42,10 +42,12 @@ message Write { // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. string delete = 2; + // The name of a document on which to verify the `current_document` + // precondition. + // This only requires read access to the document. + string verify = 5; + // Applies a tranformation to a document. - // At most one `transform` per document is allowed in a given request. - // An `update` cannot follow a `transform` on the same document in a given - // request. DocumentTransform transform = 6; } @@ -61,6 +63,13 @@ message Write { // The field paths in this mask must not contain a reserved field name. DocumentMask update_mask = 3; + // The transforms to perform after update. + // + // This field can be set only when the operation is `update`. If present, this + // write is equivalent to performing `update` and `transform` to the same + // document atomically and in order. + repeated DocumentTransform.FieldTransform update_transforms = 7; + // An optional precondition on the document. // // The write will fail if this is set and not met by the target document. diff --git a/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/firestore_test.dart b/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/firestore_test.dart index c8b9a494..b83e746a 100644 --- a/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/firestore_test.dart +++ b/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/firestore_test.dart @@ -16,7 +16,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/query_snapshot.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/set_options.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/source.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:cloud_firestore_vm/src/firebase/timestamp.dart'; import 'package:test/test.dart'; @@ -1075,7 +1075,7 @@ void main() { test('testWriteStreamReconnectsAfterIdle', () async { final DocumentReference doc = await testDocument(); final Firestore firestore = doc.firestore; - final TaskScheduler scheduler = firestore.scheduler; + final AsyncQueue scheduler = firestore.scheduler; await doc.set(map(['foo', 'bar'])); await Future.delayed(const Duration(milliseconds: 250)); @@ -1087,7 +1087,7 @@ void main() { test('testWatchStreamReconnectsAfterIdle', () async { final DocumentReference doc = await testDocument(); final Firestore firestore = doc.firestore; - final TaskScheduler scheduler = firestore.scheduler; + final AsyncQueue scheduler = firestore.scheduler; await waitForOnlineSnapshot(doc); await Future.delayed(const Duration(milliseconds: 250)); diff --git a/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/remote/stream_test.dart b/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/remote/stream_test.dart index ae301c69..8d17d66f 100644 --- a/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/remote/stream_test.dart +++ b/cloud_firestore/cloud_firestore_vm/test/integration/firebase/firestore/remote/stream_test.dart @@ -7,7 +7,7 @@ import 'dart:async'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/empty_credentials_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/datastore.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:pedantic/pedantic.dart'; import 'package:test/test.dart'; @@ -45,7 +45,7 @@ void main() { } /// Creates a WriteStream and gets it in a state that accepts mutations. - Future createAndOpenWriteStream(TaskScheduler scheduler) async { + Future createAndOpenWriteStream(AsyncQueue scheduler) async { final Datastore datastore = Datastore( scheduler, IntegrationTestUtil.testEnvDatabaseInfo(), @@ -59,7 +59,7 @@ void main() { test('testWatchStreamStopBeforeHandshake', () async { final Datastore datastore = Datastore( - TaskScheduler(''), + AsyncQueue(''), IntegrationTestUtil.testEnvDatabaseInfo(), EmptyCredentialsProvider(), ); @@ -92,7 +92,7 @@ void main() { test('testWriteStreamStopAfterHandshake', () async { final Datastore datastore = Datastore( - TaskScheduler(''), + AsyncQueue(''), IntegrationTestUtil.testEnvDatabaseInfo(), EmptyCredentialsProvider(), ); @@ -135,7 +135,7 @@ void main() { /// [WriteStream.stop]. test('testWriteStreamStopPartial', () async { final Datastore datastore = Datastore( - TaskScheduler(''), + AsyncQueue(''), IntegrationTestUtil.testEnvDatabaseInfo(), EmptyCredentialsProvider(), ); @@ -167,7 +167,7 @@ void main() { }); test('testWriteStreamStop', () async { - final TaskScheduler scheduler = TaskScheduler(''); + final AsyncQueue scheduler = AsyncQueue(''); final WriteStream writeStream = await createAndOpenWriteStream(scheduler); writeStream.stop(); @@ -175,7 +175,7 @@ void main() { }); test('testStreamClosesWhenIdle', () async { - final TaskScheduler scheduler = TaskScheduler(''); + final AsyncQueue scheduler = AsyncQueue(''); final WriteStream writeStream = await createAndOpenWriteStream(scheduler); writeStream.markIdle(); expect(scheduler.getTask(TaskId.writeStreamIdle), isNotNull); @@ -186,7 +186,7 @@ void main() { }); test('testStreamCancelsIdleOnWrite', () async { - final TaskScheduler scheduler = TaskScheduler(''); + final AsyncQueue scheduler = AsyncQueue(''); final WriteStream writeStream = await createAndOpenWriteStream(scheduler); writeStream @@ -197,7 +197,7 @@ void main() { }); test('testStreamStaysIdle', () async { - final TaskScheduler scheduler = TaskScheduler(''); + final AsyncQueue scheduler = AsyncQueue(''); final WriteStream writeStream = await createAndOpenWriteStream(scheduler); writeStream // diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/core/target_id_generator_test.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/core/target_id_generator_test.dart index a28fbbe6..65ba0875 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/core/target_id_generator_test.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/core/target_id_generator_test.dart @@ -7,7 +7,7 @@ import 'package:test/test.dart'; void main() { test('testConstructor', () { - expect(TargetIdGenerator.forQueryCache(0).nextId, 2); + expect(TargetIdGenerator.forTargetCache(0).nextId, 2); expect(TargetIdGenerator.forSyncEngine().nextId, 1); }); diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/accumulating_stats_collector.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/accumulating_stats_collector.dart index 942ff4fe..26718a57 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/accumulating_stats_collector.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/accumulating_stats_collector.dart @@ -2,7 +2,7 @@ // Lung Razvan // on 22/03/2020 -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/stats_collector.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/stats_collector.dart'; /// A test-only collector of operation counts from the persistence layer. class AccumulatingStatsCollector extends StatsCollector { diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/index_manager_test_case.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/index_manager_test_case.dart index 9098a1af..4c5ad93c 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/index_manager_test_case.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/index_manager_test_case.dart @@ -4,8 +4,8 @@ import 'dart:async'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/index_manager.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/index_manager.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/resource_path.dart'; import 'package:test/test.dart'; diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/local_store_test_case.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/local_store_test_case.dart index 3a0a31db..acf93770 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/local_store_test_case.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/local_store_test_case.dart @@ -11,9 +11,9 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/field_value.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_view_changes.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_write_result.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; @@ -922,10 +922,10 @@ class LocalStoreTestCase { await _applyRemoteEvent(remoteEvent); // Stop listening so that the query should become inactive (but persistent) - await _localStore.releaseQuery(_query); + await _localStore.releaseTarget(_query); // Should come back with the same resume token - final QueryData queryData2 = await _localStore.allocateQuery(_query); + final QueryData queryData2 = await _localStore.allocateTarget(_query); expect(queryData2.resumeToken, _resumeToken); } @@ -969,10 +969,10 @@ class LocalStoreTestCase { await _applyRemoteEvent(remoteEvent2); // Stop listening so that the query should become inactive (but persistent) - await _localStore.releaseQuery(_query); + await _localStore.releaseTarget(_query); // Should come back with the same resume token - final QueryData queryData2 = await _localStore.allocateQuery(_query); + final QueryData queryData2 = await _localStore.allocateTarget(_query); expect(queryData2.resumeToken, _resumeToken); } @@ -1316,13 +1316,13 @@ class LocalStoreTestCase { } Future _allocateQuery(Query query) async { - final QueryData queryData = await _localStore.allocateQuery(query); + final QueryData queryData = await _localStore.allocateTarget(query); _lastTargetId = queryData.targetId; return queryData.targetId; } Future _releaseQuery(Query query) async { - await _localStore.releaseQuery(query); + await _localStore.releaseTarget(query); } /// Asserts that the last target ID is the given number. diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/lru_garbage_collector_test_case.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/lru_garbage_collector_test_case.dart index 727f7a2b..134629de 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/lru_garbage_collector_test_case.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/lru_garbage_collector_test_case.dart @@ -6,15 +6,15 @@ import 'dart:async'; import 'dart:typed_data'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/core/listent_sequence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/core/listen_sequence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_delegate.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_garbage_collector.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/query_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/reference_set.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; @@ -164,7 +164,7 @@ class LruGarbageCollectorTestCase { await _addNextQuery(); } await _persistence.runTransaction('query with a mutation', () async { - final QueryData queryData = await _addNextQueryInTransaction(); + final TargetData queryData = await _addNextQueryInTransaction(); await _addDocumentToTarget(docInQuery, queryData.targetId); }); @@ -175,9 +175,9 @@ class LruGarbageCollectorTestCase { @testMethod Future testRemoveQueriesUpThroughSequenceNumber() async { - final Map activeTargetIds = {}; + final Map activeTargetIds = {}; for (int i = 0; i < 100; i++) { - final QueryData queryData = await _addNextQuery(); + final TargetData queryData = await _addNextQuery(); // Mark odd queries as live so we can test filtering out live queries. final int targetId = queryData.targetId; if (targetId % 2 == 1) { @@ -195,7 +195,7 @@ class LruGarbageCollectorTestCase { // Make sure we removed the even targets with targetID <= 20. await _persistence .runTransaction('verify remaining targets are > 20 or odd', () async { - return _queryCache.forEachTarget((QueryData queryData) { + return _queryCache.forEachTarget((TargetData queryData) { final bool isOdd = queryData.targetId.remainder(2) == 1; final bool isOver20 = queryData.targetId > 20; expect(isOdd || isOver20, isTrue); @@ -214,7 +214,7 @@ class LruGarbageCollectorTestCase { await _persistence .runTransaction('add a target and add two documents to it', () async { // Add two documents to first target, queue a mutation on the second document - final QueryData queryData = await _addNextQueryInTransaction(); + final TargetData queryData = await _addNextQueryInTransaction(); final Document doc1 = await _cacheADocumentInTransaction(); await _addDocumentToTarget(doc1.key, queryData.targetId); expectedRetained.add(doc1.key); @@ -227,7 +227,7 @@ class LruGarbageCollectorTestCase { // Add a second query and register a third document on it await _persistence.runTransaction('second query', () async { - final QueryData queryData = await _addNextQueryInTransaction(); + final TargetData queryData = await _addNextQueryInTransaction(); final Document doc3 = await _cacheADocumentInTransaction(); await _addDocumentToTarget(doc3.key, queryData.targetId); expectedRetained.add(doc3.key); @@ -301,9 +301,9 @@ class LruGarbageCollectorTestCase { // Add oldest target, 5 documents, and add those documents to the target. This target will not be removed, so all // documents that are part of it will be retained. - final QueryData oldestTarget = await _persistence + final TargetData oldestTarget = await _persistence .runTransactionAndReturn('Add oldest target and docs', () async { - final QueryData queryData = await _addNextQueryInTransaction(); + final TargetData queryData = await _addNextQueryInTransaction(); for (int i = 0; i < 5; i++) { final Document doc = await _cacheADocumentInTransaction(); expectedRetained.add(doc.key); @@ -316,9 +316,9 @@ class LruGarbageCollectorTestCase { final Set middleDocsToRemove = {}; // This will be the document in this target that gets an update later. DocumentKey middleDocToUpdateHolder; - final QueryData middleTarget = await _persistence + final TargetData middleTarget = await _persistence .runTransactionAndReturn('Add middle target and docs', () async { - final QueryData queryData = await _addNextQueryInTransaction(); + final TargetData queryData = await _addNextQueryInTransaction(); // These docs will be removed from this target later, triggering a bump to their sequence numbers. Since they will // not be a part of the target, we expect them to be removed. for (int i = 0; i < 2; i++) { @@ -350,7 +350,7 @@ class LruGarbageCollectorTestCase { // since this target will be removed. final Set newestDocsToAddToOldest = {}; await _persistence.runTransaction('Add newest target and docs', () async { - final QueryData queryData = await _addNextQueryInTransaction(); + final TargetData queryData = await _addNextQueryInTransaction(); // These documents are only in this target. They are expected to be removed because this target will also be // removed. for (int i = 0; i < 3; i++) { @@ -525,7 +525,7 @@ class LruGarbageCollectorTestCase { // Use separate transactions so that each target and associated documents get their own sequence number. await _persistence.runTransaction('Add a target and some documents', () async { - final QueryData queryData = await _addNextQueryInTransaction(); + final TargetData queryData = await _addNextQueryInTransaction(); for (int j = 0; j < 10; j++) { final Document doc = await _cacheADocumentInTransaction(); await _addDocumentToTarget(doc.key, queryData.targetId); @@ -551,7 +551,7 @@ class LruGarbageCollectorTestCase { _persistence = await _getPersistence(params); _persistence.referenceDelegate.inMemoryPins = ReferenceSet(); - _queryCache = _persistence.queryCache; + _queryCache = _persistence.targetCache; _documentCache = _persistence.remoteDocumentCache; const User user = User('user'); _mutationQueue = _persistence.getMutationQueue(user); @@ -564,32 +564,32 @@ class LruGarbageCollectorTestCase { _lruParams = params; } - Future _nextQueryData() async { + Future _nextQueryData() async { final int targetId = ++_previousTargetId; final int sequenceNumber = _persistence.referenceDelegate.currentSequenceNumber; final Query _query = query('path$targetId'); - return QueryData(_query, targetId, sequenceNumber, QueryPurpose.listen); + return TargetData(_query, targetId, sequenceNumber, QueryPurpose.listen); } - Future _updateTargetInTransaction(QueryData queryData) async { + Future _updateTargetInTransaction(TargetData queryData) async { final SnapshotVersion _version = version(2); final Uint8List _resumeToken = resumeToken(2); - final QueryData updated = queryData.copyWith( + final TargetData updated = queryData.copyWith( sequenceNumber: _persistence.referenceDelegate.currentSequenceNumber, snapshotVersion: _version, resumeToken: _resumeToken, ); - await _queryCache.updateQueryData(updated); + await _queryCache.updateTargetData(updated); } - Future _addNextQueryInTransaction() async { - final QueryData queryData = await _nextQueryData(); - await _queryCache.addQueryData(queryData); + Future _addNextQueryInTransaction() async { + final TargetData queryData = await _nextQueryData(); + await _queryCache.addTargetData(queryData); return queryData; } - Future _addNextQuery() { + Future _addNextQuery() { return _persistence.runTransactionAndReturn( 'Add query', _addNextQueryInTransaction); } diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/mutation_queue_test_case.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/mutation_queue_test_case.dart index 5a68b58f..a01013c3 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/mutation_queue_test_case.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/mutation_queue_test_case.dart @@ -8,8 +8,8 @@ import 'dart:typed_data'; import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/mutation_queue.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/mutation_queue.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/reference_set.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/query_cache_test_case.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/query_cache_test_case.dart index 2dcb7cfa..948e44d9 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/query_cache_test_case.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/query_cache_test_case.dart @@ -6,8 +6,9 @@ import 'dart:async'; import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/query_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/target_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/sqlite/sqlite_persistence.dart'; @@ -27,7 +28,7 @@ class QueryCacheTestCase { int _previousSequenceNumber; void setUp() { - _queryCache = _persistence.queryCache; + _queryCache = _persistence.targetCache; _previousSequenceNumber = 1000; } @@ -35,7 +36,7 @@ class QueryCacheTestCase { @testMethod Future testReadQueryNotInCache() async { - expect(await _queryCache.getQueryData(query('rooms')), isNull); + expect(await _queryCache.getTargetData(query('rooms')), isNull); } @testMethod @@ -43,9 +44,9 @@ class QueryCacheTestCase { final QueryData queryData = _newQueryData(query('rooms'), 1, 1); await _addQueryData(queryData); - final QueryData result = await _queryCache.getQueryData(query('rooms')); + final QueryData result = await _queryCache.getTargetData(query('rooms')); expect(result, isNotNull); - expect(result.query, queryData.query); + expect(result.target, queryData.target); expect(result.targetId, queryData.targetId); expect(result.resumeToken, queryData.resumeToken); } @@ -62,24 +63,24 @@ class QueryCacheTestCase { await _addQueryData(data1); // Using the other query should not return the query cache entry despite equal canonicalIDs. - expect(await _queryCache.getQueryData(q2), isNull); - expect(await _queryCache.getQueryData(q1), data1); + expect(await _queryCache.getTargetData(q2), isNull); + expect(await _queryCache.getTargetData(q1), data1); final QueryData data2 = _newQueryData(q2, 2, 1); await _addQueryData(data2); expect(_queryCache.targetCount, 2); - expect(await _queryCache.getQueryData(q1), data1); - expect(await _queryCache.getQueryData(q2), data2); + expect(await _queryCache.getTargetData(q1), data1); + expect(await _queryCache.getTargetData(q2), data2); await _removeQueryData(data1); - expect(await _queryCache.getQueryData(q1), isNull); - expect(await _queryCache.getQueryData(q2), data2); + expect(await _queryCache.getTargetData(q1), isNull); + expect(await _queryCache.getTargetData(q2), data2); expect(_queryCache.targetCount, 1); await _removeQueryData(data2); - expect(await _queryCache.getQueryData(q1), isNull); - expect(await _queryCache.getQueryData(q2), isNull); + expect(await _queryCache.getTargetData(q1), isNull); + expect(await _queryCache.getTargetData(q2), isNull); expect(_queryCache.targetCount, 0); } @@ -91,7 +92,7 @@ class QueryCacheTestCase { final QueryData queryData2 = _newQueryData(query('rooms'), 1, 2); await _addQueryData(queryData2); - final QueryData result = await _queryCache.getQueryData(query('rooms')); + final QueryData result = await _queryCache.getTargetData(query('rooms')); // There's no assertArrayNotEquals expect(queryData2.resumeToken, isNot(queryData1.resumeToken)); @@ -108,7 +109,7 @@ class QueryCacheTestCase { await _removeQueryData(queryData1); - final QueryData result = await _queryCache.getQueryData(query('rooms')); + final QueryData result = await _queryCache.getTargetData(query('rooms')); expect(result, isNull); } @@ -116,7 +117,7 @@ class QueryCacheTestCase { Future testRemoveNonExistentQuery() async { // no-op, but make sure it doesn't throw. try { - await _queryCache.getQueryData(query('rooms')); + await _queryCache.getTargetData(query('rooms')); expect(true, true); } catch (e) { assert(false, 'This should not thow'); @@ -283,7 +284,7 @@ class QueryCacheTestCase { await _persistence.start(); } - _queryCache = _persistence.queryCache; + _queryCache = _persistence.targetCache; expect(_queryCache.highestTargetId, 42); } @@ -310,7 +311,7 @@ class QueryCacheTestCase { await _persistence.start(); } - _queryCache = _persistence.queryCache; + _queryCache = _persistence.targetCache; expect(_queryCache.lastRemoteSnapshotVersion, version(42)); } @@ -325,14 +326,14 @@ class QueryCacheTestCase { /// Adds the given query data to the [_queryCache] under test, committing immediately. Future _addQueryData(QueryData queryData) async { await _persistence.runTransaction( - 'addQueryData', () => _queryCache.addQueryData(queryData)); + 'addQueryData', () => _queryCache.addTargetData(queryData)); return queryData; } /// Removes the given query data from the queryCache under test, committing immediately. Future _removeQueryData(QueryData queryData) async { await _persistence.runTransaction( - 'removeQueryData', () => _queryCache.removeQueryData(queryData)); + 'removeQueryData', () => _queryCache.removeTargetData(queryData)); } Future _addMatchingKey(DocumentKey key, int targetId) async { diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/remote_document_cache_test_case.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/remote_document_cache_test_case.dart index 8fecb80b..941ca585 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/remote_document_cache_test_case.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/cases/remote_document_cache_test_case.dart @@ -6,8 +6,8 @@ import 'dart:async'; import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/remote_document_cache.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/remote_document_cache.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/maybe_document.dart'; diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/local_serializer_test.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/local_serializer_test.dart index 0e4438c7..f80d99e0 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/local_serializer_test.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/local_serializer_test.dart @@ -6,7 +6,7 @@ import 'dart:typed_data'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_serializer.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; @@ -167,7 +167,7 @@ void main() { final SnapshotVersion _version = version(1039); final Uint8List _resumeToken = resumeToken(1039); - final QueryData queryData = QueryData( + final TargetData queryData = TargetData( _query, targetId, sequenceNumber, @@ -192,8 +192,8 @@ void main() { ..structuredQuery = queryTarget.structuredQuery)) .freeze(); - expect(serializer.encodeQueryData(queryData), expected); - final QueryData decoded = serializer.decodeQueryData(expected); + expect(serializer.encodeTargetData(queryData), expected); + final TargetData decoded = serializer.decodeTargetData(expected); expect(decoded, queryData); }); } diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/persistence_test_helpers.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/persistence_test_helpers.dart index 7e619af4..81dfb667 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/persistence_test_helpers.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/local/persistence_test_helpers.dart @@ -7,7 +7,7 @@ import 'dart:async'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_serializer.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/lru_garbage_collector.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/memory/memory_persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/stats_collector.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/stats_collector.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/sqlite/sqlite_persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/mock_datastore.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/mock_datastore.dart index c1118610..6967b88b 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/mock_datastore.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/mock_datastore.dart @@ -6,7 +6,7 @@ import 'dart:async'; import 'package:cloud_firestore_vm/src/firebase/firestore/auth/empty_credentials_provider.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/database_info.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/mutation/mutation_result.dart'; @@ -16,7 +16,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/datas import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_serializer.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:grpc/grpc.dart'; import 'package:grpc/src/shared/status.dart'; @@ -26,7 +26,7 @@ import '../spec/spec_test_case.dart'; /// A mock version of [Datastore] for SpecTest that allows the test to control /// the parts that would normally be sent from the backend. class MockDatastore extends Datastore { - factory MockDatastore(TaskScheduler scheduler) { + factory MockDatastore(AsyncQueue scheduler) { final DatabaseId databaseId = DatabaseId.forDatabase('project', 'database'); final RemoteSerializer serializer = RemoteSerializer(databaseId); final DatabaseInfo databaseInfo = @@ -50,7 +50,7 @@ class MockDatastore extends Datastore { MockDatastore._( DatabaseInfo databaseInfo, - TaskScheduler scheduler, + AsyncQueue scheduler, RemoteSerializer serializer, FirestoreClient client, ) : _serializer = serializer, @@ -59,7 +59,7 @@ class MockDatastore extends Datastore { super.test(scheduler, databaseInfo, serializer, client); final FirestoreClient _client; - final TaskScheduler _scheduler; + final AsyncQueue _scheduler; final RemoteSerializer _serializer; _MockWatchStream _watchStream; @@ -112,10 +112,10 @@ class MockDatastore extends Datastore { } /// Returns the map of active targets on the watch stream, keyed by target ID. - Map get activeTargets { + Map get activeTargets { // Make a defensive copy as the watch stream continues to modify the Map of // active targets. - return Map.from(_watchStream._activeTargets); + return Map.from(_watchStream._activeTargets); } /// Helper method to expose stream state to verify in tests. @@ -126,7 +126,7 @@ class _MockWatchStream extends WatchStream { _MockWatchStream( this._datastore, RemoteSerializer serializer, - TaskScheduler scheduler, + AsyncQueue scheduler, ) : super.test( _datastore._client, scheduler, @@ -139,7 +139,7 @@ class _MockWatchStream extends WatchStream { bool _open = false; /// Tracks the currently active watch targets as sent over the watch stream. - final Map _activeTargets = {}; + final Map _activeTargets = {}; @override // ignore: must_call_super @@ -168,12 +168,12 @@ class _MockWatchStream extends WatchStream { } @override - void watchQuery(QueryData queryData) { + void watchQuery(TargetData queryData) { final String resumeToken = toDebugString(queryData.resumeToken); SpecTestCase.log( - ' watchQuery(${queryData.query}, ${queryData.targetId}, $resumeToken)'); + ' watchQuery(${queryData.target}, ${queryData.targetId}, $resumeToken)'); // Snapshot version is ignored on the wire - final QueryData sentQueryData = queryData.copyWith( + final TargetData sentQueryData = queryData.copyWith( snapshotVersion: SnapshotVersion.none, resumeToken: queryData.resumeToken, sequenceNumber: queryData.sequenceNumber); @@ -225,7 +225,7 @@ class _MockWatchStream extends WatchStream { class _MockWriteStream extends WriteStream { _MockWriteStream( - this._datastore, RemoteSerializer serializer, TaskScheduler scheduler) + this._datastore, RemoteSerializer serializer, AsyncQueue scheduler) : _sentWrites = >[], super.test( _datastore._client, diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_serializer_test.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_serializer_test.dart index eb7c0542..ca2b5fc4 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_serializer_test.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_serializer_test.dart @@ -11,7 +11,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/document_reference.dar import 'package:cloud_firestore_vm/src/firebase/firestore/field_value.dart' as firestore; import 'package:cloud_firestore_vm/src/firebase/firestore/geo_point.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; @@ -108,8 +108,8 @@ void main() { /// Wraps the given query in [QueryData]. This is useful because the APIs /// we're testing accept [QueryData], but for the most part we're just testing /// variations on [Query]. - QueryData wrapQueryData(Query query) { - return QueryData( + TargetData wrapQueryData(Query query) { + return TargetData( query, 1, 2, QueryPurpose.listen, SnapshotVersion.none, Uint8List(0)); } @@ -486,19 +486,19 @@ void main() { test('testEncodesListenRequestLabels', () { final Query _query = query('collection/key'); - QueryData queryData = QueryData(_query, 2, 3, QueryPurpose.listen); + TargetData queryData = TargetData(_query, 2, 3, QueryPurpose.listen); final Map encoded = serializer.encodeListenRequestLabels(queryData); expect(encoded, isEmpty); - queryData = QueryData(_query, 2, 3, QueryPurpose.limboResolution); + queryData = TargetData(_query, 2, 3, QueryPurpose.limboResolution); MapEntry result = serializer.encodeListenRequestLabels(queryData).entries.first; expect(result.key, 'goog-listen-tags'); expect(result.value, 'limbo-document'); - queryData = QueryData(_query, 2, 3, QueryPurpose.existenceFilterMismatch); + queryData = TargetData(_query, 2, 3, QueryPurpose.existenceFilterMismatch); result = serializer.encodeListenRequestLabels(queryData).entries.first; expect(result.key, 'goog-listen-tags'); expect(result.value, 'existence-filter-mismatch'); @@ -506,7 +506,7 @@ void main() { test('testEncodesFirstLevelKeyQueries', () { final Query q = Query(ResourcePath.fromString('docs/1')); - final proto_.Target actual = serializer.encodeTarget(QueryData(q, 1, 2, + final proto_.Target actual = serializer.encodeTarget(TargetData(q, 1, 2, QueryPurpose.limboResolution, SnapshotVersion.none, Uint8List(0))); final proto_.Target_DocumentsTarget docs = @@ -883,7 +883,7 @@ void main() { test('testEncodesResumeTokens', () { final Query q = Query(ResourcePath.fromString('docs')); - final proto_.Target actual = serializer.encodeTarget(QueryData( + final proto_.Target actual = serializer.encodeTarget(TargetData( q, 1, 2, QueryPurpose.listen, SnapshotVersion.none, resumeToken(1000))); final proto.StructuredQuery structuredQueryBuilder = diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_store_test.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_store_test.dart index 56ecc906..f11de5b1 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_store_test.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/remote/remote_store_test.dart @@ -6,10 +6,10 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/auth/empty_credentials import 'package:cloud_firestore_vm/src/firebase/firestore/auth/user.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/memory/memory_persistence.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/datastore/datastore.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:mockito/mockito.dart'; import 'package:rxdart/subjects.dart'; import 'package:test/test.dart'; @@ -19,7 +19,7 @@ import '../../../../util/integration_test_util.dart'; // ignore_for_file: cascade_invocations void main() { test('testRemoteStoreStreamStopsWhenNetworkUnreachable', () async { - final TaskScheduler scheduler = TaskScheduler(''); + final AsyncQueue scheduler = AsyncQueue(''); final Datastore datastore = Datastore( scheduler, IntegrationTestUtil.testEnvDatabaseInfo(), diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/spec/spec_test_case.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/spec/spec_test_case.dart index a7f24a83..349f8d38 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/spec/spec_test_case.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/spec/spec_test_case.dart @@ -18,7 +18,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/core/sync_engine.dart' import 'package:cloud_firestore_vm/src/firebase/firestore/core/view_snapshot.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_error.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_store.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/sqlite/sqlite_persistence.dart'; @@ -36,7 +36,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/remote/remote_store.da import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/assert.dart' as asserts; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:grpc/grpc.dart'; import 'package:rxdart/subjects.dart'; import 'package:test/test.dart'; @@ -77,7 +77,7 @@ class SpecTestCase implements RemoteStoreCallback { // Parts of the Firestore system that the spec tests need to control. Persistence _localPersistence; - TaskScheduler _queue; + AsyncQueue _queue; MockDatastore _datastore; RemoteStore _remoteStore; SyncEngine _syncEngine; @@ -188,7 +188,7 @@ class SpecTestCase implements RemoteStoreCallback { final LocalStore localStore = LocalStore(_localPersistence, _currentUser); - _queue = TaskScheduler(''); + _queue = AsyncQueue(''); // Set up the sync engine and various stores. _datastore = MockDatastore(_queue); @@ -874,7 +874,7 @@ class SpecTestCase implements RemoteStoreCallback { void _validateLimboDocs() { // Make a copy so it can modified while checking against the expected limbo docs. final Map actualLimboDocs = - Map.from(_syncEngine.getCurrentLimboDocuments()); + Map.from(_syncEngine.getActiveLimboDocumentResolutions()); // Validate that each limbo doc has an expected active target for (MapEntry limboDoc in actualLimboDocs.entries) { @@ -904,7 +904,7 @@ class SpecTestCase implements RemoteStoreCallback { // only validate properties that can be validated. // expect(actualTarget, expectedTarget); - expect(actualTarget.query, expectedTarget.query); + expect(actualTarget.target, expectedTarget.target); expect(actualTarget.targetId, expectedTarget.targetId); expect(actualTarget.snapshotVersion, expectedTarget.snapshotVersion); expect(utf8.decode(actualTarget.resumeToken), diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/test_util.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/test_util.dart index abbef31b..8473928b 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/test_util.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/test_util.dart @@ -9,7 +9,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/core/view_snapshot.dar import 'package:cloud_firestore_vm/src/firebase/firestore/document_reference.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/document_snapshot.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_set.dart'; diff --git a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/util/test_target_metadata_provider.dart b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/util/test_target_metadata_provider.dart index 4bbc37a5..94221530 100644 --- a/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/util/test_target_metadata_provider.dart +++ b/cloud_firestore/cloud_firestore_vm/test/unit/firebase/firestore/util/test_target_metadata_provider.dart @@ -3,7 +3,7 @@ // on 22/03/2020 import 'package:_firebase_database_collection_vm/_firebase_database_collection_vm.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document_key.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/remote/watch_change_aggregator.dart'; import 'package:meta/meta.dart'; @@ -11,7 +11,7 @@ import 'package:meta/meta.dart'; TestTargetMetadataProvider get testTargetMetadataProvider { final Map> syncedKeys = >{}; - final Map queryDataMap = {}; + final Map queryDataMap = {}; return TestTargetMetadataProvider( syncedKeys, @@ -33,19 +33,19 @@ class TestTargetMetadataProvider extends TargetMetadataProvider { ImmutableSortedSet Function(int targetId) getRemoteKeysForTarget, @required - QueryData Function(int targetId) getQueryDataForTarget}) + TargetData Function(int targetId) getQueryDataForTarget}) : super( getRemoteKeysForTarget: getRemoteKeysForTarget, - getQueryDataForTarget: getQueryDataForTarget, + getTargetDataForTarget: getQueryDataForTarget, ); final Map> syncedKeys; - final Map queryDataMap; + final Map queryDataMap; /// Sets or replaces the local state for the provided query data. void setSyncedKeys( - QueryData queryData, ImmutableSortedSet keys) { + TargetData queryData, ImmutableSortedSet keys) { queryDataMap[queryData.targetId] = queryData; syncedKeys[queryData.targetId] = keys; } diff --git a/cloud_firestore/cloud_firestore_vm/test/util/integration_test_util.dart b/cloud_firestore/cloud_firestore_vm/test/util/integration_test_util.dart index a6f8a671..20f2e24b 100644 --- a/cloud_firestore/cloud_firestore_vm/test/util/integration_test_util.dart +++ b/cloud_firestore/cloud_firestore_vm/test/util/integration_test_util.dart @@ -15,12 +15,12 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/document_reference.dar import 'package:cloud_firestore_vm/src/firebase/firestore/document_snapshot.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/firestore_settings.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistance/persistence.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/persistence/persistence.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/metadata_change.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/query_snapshot.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/database.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/util/timer_task.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/util/async_task.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/util/util.dart'; import 'package:rxdart/subjects.dart'; import 'package:test/test.dart'; @@ -102,7 +102,7 @@ class IntegrationTestUtil { DatabaseId databaseId, String persistenceKey, CredentialsProvider provider, - TaskScheduler scheduler, + AsyncQueue scheduler, OpenDatabase openDatabase, FirestoreSettings settings) async { final DatabaseInfo databaseInfo = DatabaseInfo( @@ -143,7 +143,7 @@ class IntegrationTestUtil { _clearPersistence(dbFullPath); - final TaskScheduler scheduler = TaskScheduler(''); + final AsyncQueue scheduler = AsyncQueue(''); final Firestore firestore = await forTests( databaseId, persistenceKey, diff --git a/cloud_firestore/cloud_firestore_vm/test/util/test_util.dart b/cloud_firestore/cloud_firestore_vm/test/util/test_util.dart index 1163471f..cbb19156 100644 --- a/cloud_firestore/cloud_firestore_vm/test/util/test_util.dart +++ b/cloud_firestore/cloud_firestore_vm/test/util/test_util.dart @@ -14,7 +14,7 @@ import 'package:cloud_firestore_vm/src/firebase/firestore/core/query.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/core/user_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/document_reference.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/local_view_changes.dart'; -import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_data.dart'; +import 'package:cloud_firestore_vm/src/firebase/firestore/local/target_data.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/local/query_purpose.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/database_id.dart'; import 'package:cloud_firestore_vm/src/firebase/firestore/model/document.dart'; @@ -255,8 +255,8 @@ void testEquality(List> equalityGroups) { } } -QueryData queryData(int targetId, QueryPurpose queryPurpose, String path) { - return QueryData( +TargetData queryData(int targetId, QueryPurpose queryPurpose, String path) { + return TargetData( query(path), targetId, arbitrarySequenceNumber, queryPurpose); } @@ -314,22 +314,22 @@ TargetChange ackTarget([List docs]) { current: true); } -Map activeQueries([List targets = const []]) { +Map activeQueries([List targets = const []]) { final Query theQuery = query('foo'); - final Map listenMap = {}; + final Map listenMap = {}; for (int targetId in targets) { - final QueryData queryData = QueryData( + final TargetData queryData = TargetData( theQuery, targetId, arbitrarySequenceNumber, QueryPurpose.listen); listenMap[targetId] = queryData; } return listenMap; } -Map activeLimboQueries(String docKey, Iterable targets) { +Map activeLimboQueries(String docKey, Iterable targets) { final Query theQuery = query(docKey); - final Map listenMap = {}; + final Map listenMap = {}; for (int targetId in targets) { - final QueryData queryData = QueryData(theQuery, targetId, + final TargetData queryData = TargetData(theQuery, targetId, arbitrarySequenceNumber, QueryPurpose.limboResolution); listenMap[targetId] = queryData; } @@ -343,7 +343,7 @@ RemoteEvent addedRemoteEvent(MaybeDocument doc, List updatedInTargets, final WatchChangeAggregator aggregator = WatchChangeAggregator(TargetMetadataProvider( getRemoteKeysForTarget: (int targetId) => DocumentKey.emptyKeySet, - getQueryDataForTarget: (int targetId) => + getTargetDataForTarget: (int targetId) => queryData(targetId, QueryPurpose.listen, doc.key.toString()), )) ..handleDocumentChange(change); @@ -360,7 +360,7 @@ RemoteEvent updateRemoteEvent( getRemoteKeysForTarget: (int targetId) { return DocumentKey.emptyKeySet.insert(doc.key); }, - getQueryDataForTarget: (int targetId) { + getTargetDataForTarget: (int targetId) { final bool isLimbo = !(updatedInTargets.contains(targetId) || removedFromTargets.contains(targetId)); final QueryPurpose purpose =