{"version":3,"file":"firebase-compat.js","sources":["../util/src/constants.ts","../util/src/assert.ts","../util/src/crypt.ts","../util/src/deepCopy.ts","../util/src/global.ts","../util/src/defaults.ts","../logger/src/logger.ts","../../node_modules/idb/build/index.js","../util/src/deferred.ts","../util/src/emulator.ts","../util/src/environment.ts","../util/src/errors.ts","../util/src/json.ts","../util/src/jwt.ts","../util/src/obj.ts","../util/src/query.ts","../util/src/sha1.ts","../util/src/subscribe.ts","../util/src/validation.ts","../util/src/uuid.ts","../util/src/utf8.ts","../util/src/exponential_backoff.ts","../util/src/compat.ts","../component/src/component.ts","../component/src/constants.ts","../component/src/provider.ts","../component/src/component_container.ts","../../node_modules/idb/build/wrap-idb-value.js","../app/src/platformLoggerService.ts","../app/src/logger.ts","../app/src/registerCoreComponents.ts","../app/src/constants.ts","../app/src/internal.ts","../app/src/errors.ts","../app/src/firebaseApp.ts","../app/src/api.ts","../app/src/indexeddb.ts","../app/src/heartbeatService.ts","../app/src/index.ts","../app-compat/src/firebaseApp.ts","../app-compat/src/errors.ts","../app-compat/src/firebaseNamespaceCore.ts","../app-compat/src/firebaseNamespace.ts","../app-compat/src/logger.ts","../app-compat/src/index.ts","../app-compat/src/registerCoreComponents.ts","compat/app/index.ts","../installations/src/util/constants.ts","../installations/src/util/errors.ts","../installations/src/functions/common.ts","../installations/src/util/sleep.ts","../installations/src/helpers/generate-fid.ts","../installations/src/helpers/buffer-to-base64-url-safe.ts","../installations/src/util/get-key.ts","../installations/src/helpers/fid-changed.ts","../installations/src/helpers/idb-manager.ts","../installations/src/helpers/get-installation-entry.ts","../installations/src/functions/create-installation-request.ts","../installations/src/functions/generate-auth-token-request.ts","../installations/src/helpers/refresh-auth-token.ts","../installations/src/api/get-token.ts","../installations/src/helpers/extract-app-config.ts","../installations/src/functions/config.ts","../installations/src/api/get-id.ts","../installations/src/index.ts","../analytics/src/constants.ts","../analytics/src/logger.ts","../analytics/src/errors.ts","../analytics/src/helpers.ts","../analytics/src/get-config.ts","../analytics/src/initialize-analytics.ts","../analytics/src/factory.ts","../analytics/src/api.ts","../analytics/src/functions.ts","../analytics/src/index.ts","../analytics-compat/src/constants.ts","../analytics-compat/src/index.ts","../analytics-compat/src/service.ts","../app-check/src/state.ts","../app-check/src/constants.ts","../app-check/src/proactive-refresh.ts","../app-check/src/errors.ts","../app-check/src/util.ts","../app-check/src/client.ts","../app-check/src/indexeddb.ts","../app-check/src/logger.ts","../app-check/src/storage.ts","../app-check/src/debug.ts","../app-check/src/internal-api.ts","../app-check/src/factory.ts","../app-check/src/recaptcha.ts","../app-check/src/providers.ts","../app-check/src/api.ts","../app-check/src/index.ts","../app-check-compat/src/errors.ts","../app-check-compat/src/service.ts","../app-check-compat/src/index.ts","../../node_modules/tslib/tslib.es6.js","../auth/src/model/enum_maps.ts","../auth/src/core/errors.ts","../auth/src/core/util/log.ts","../auth/src/core/util/assert.ts","../auth/src/core/util/location.ts","../auth/src/core/util/delay.ts","../auth/src/core/util/navigator.ts","../auth/src/core/util/emulator.ts","../auth/src/core/util/fetch_provider.ts","../auth/src/api/errors.ts","../auth/src/api/index.ts","../auth/src/platform_browser/recaptcha/recaptcha.ts","../auth/src/core/util/time.ts","../auth/src/core/user/id_token_result.ts","../auth/src/core/user/invalidation.ts","../auth/src/core/user/proactive_refresh.ts","../auth/src/core/user/user_metadata.ts","../auth/src/core/user/reload.ts","../auth/src/api/account_management/account.ts","../auth/src/core/user/token_manager.ts","../auth/src/api/authentication/token.ts","../auth/src/core/user/user_impl.ts","../auth/src/core/util/instantiator.ts","../auth/src/core/persistence/in_memory.ts","../auth/src/core/persistence/persistence_user_manager.ts","../auth/src/core/util/browser.ts","../auth/src/core/util/version.ts","../auth/src/core/auth/middleware.ts","../auth/src/core/auth/password_policy_impl.ts","../auth/src/core/auth/auth_impl.ts","../auth/src/api/password_policy/get_password_policy.ts","../auth/src/platform_browser/load_js.ts","../auth/src/platform_browser/recaptcha/recaptcha_enterprise_verifier.ts","../auth/src/api/authentication/recaptcha.ts","../auth/src/core/auth/emulator.ts","../auth/src/core/credentials/auth_credential.ts","../auth/src/api/account_management/email_and_password.ts","../auth/src/api/authentication/email_and_password.ts","../auth/src/core/credentials/email.ts","../auth/src/api/authentication/email_link.ts","../auth/src/api/authentication/idp.ts","../auth/src/core/credentials/oauth.ts","../auth/src/api/authentication/sms.ts","../auth/src/core/credentials/phone.ts","../auth/src/core/action_code_url.ts","../auth/src/core/providers/email.ts","../auth/src/core/providers/federated.ts","../auth/src/core/providers/oauth.ts","../auth/src/core/providers/facebook.ts","../auth/src/core/providers/google.ts","../auth/src/core/providers/github.ts","../auth/src/core/credentials/saml.ts","../auth/src/core/providers/saml.ts","../auth/src/core/providers/twitter.ts","../auth/src/api/authentication/sign_up.ts","../auth/src/core/user/user_credential_impl.ts","../auth/src/mfa/mfa_error.ts","../auth/src/core/util/providers.ts","../auth/src/core/user/link_unlink.ts","../auth/src/core/user/reauthenticate.ts","../auth/src/core/strategies/credential.ts","../auth/src/core/strategies/custom_token.ts","../auth/src/api/authentication/custom_token.ts","../auth/src/mfa/mfa_info.ts","../auth/src/core/strategies/action_code_settings.ts","../auth/src/core/strategies/email_and_password.ts","../auth/src/core/strategies/email.ts","../auth/src/api/authentication/create_auth_uri.ts","../auth/src/core/user/account_info.ts","../auth/src/api/account_management/profile.ts","../auth/src/core/user/additional_user_info.ts","../auth/src/mfa/mfa_session.ts","../auth/src/mfa/mfa_resolver.ts","../auth/src/mfa/mfa_user.ts","../auth/src/api/account_management/mfa.ts","../auth/src/core/persistence/index.ts","../auth/src/platform_browser/persistence/browser.ts","../auth/src/platform_browser/persistence/local_storage.ts","../auth/src/platform_browser/persistence/session_storage.ts","../auth/src/platform_browser/messagechannel/receiver.ts","../auth/src/platform_browser/messagechannel/promise.ts","../auth/src/core/util/event_id.ts","../auth/src/platform_browser/messagechannel/sender.ts","../auth/src/platform_browser/auth_window.ts","../auth/src/platform_browser/util/worker.ts","../auth/src/platform_browser/persistence/indexed_db.ts","../auth/src/platform_browser/recaptcha/recaptcha_mock.ts","../auth/src/platform_browser/recaptcha/recaptcha_loader.ts","../auth/src/platform_browser/recaptcha/recaptcha_verifier.ts","../auth/src/platform_browser/strategies/phone.ts","../auth/src/api/authentication/mfa.ts","../auth/src/platform_browser/providers/phone.ts","../auth/src/core/util/resolver.ts","../auth/src/core/strategies/idp.ts","../auth/src/core/strategies/abstract_popup_redirect_operation.ts","../auth/src/platform_browser/strategies/popup.ts","../auth/src/core/strategies/redirect.ts","../auth/src/platform_browser/strategies/redirect.ts","../auth/src/core/auth/auth_event_manager.ts","../auth/src/api/project_config/get_project_config.ts","../auth/src/core/util/validate_origin.ts","../auth/src/platform_browser/iframe/gapi.ts","../auth/src/platform_browser/iframe/iframe.ts","../auth/src/platform_browser/util/popup.ts","../auth/src/core/util/handler.ts","../auth/src/platform_browser/popup_redirect.ts","../auth/src/platform_browser/mfa/assertions/phone.ts","../auth/src/mfa/mfa_assertion.ts","../auth/src/core/auth/firebase_internal.ts","../auth/src/platform_browser/index.ts","../auth/src/core/auth/register.ts","../auth/src/platform_cordova/plugins.ts","../auth/src/core/auth/initialize.ts","../auth/src/platform_cordova/popup_redirect/utils.ts","../auth/src/platform_cordova/popup_redirect/events.ts","../auth/src/platform_cordova/popup_redirect/popup_redirect.ts","../auth-compat/index.ts","../auth-compat/src/platform.ts","../auth-compat/src/persistence.ts","../auth-compat/src/popup_redirect.ts","../auth-compat/src/wrap.ts","../auth-compat/src/user_credential.ts","../auth-compat/src/user.ts","../auth-compat/src/auth.ts","../auth/src/core/strategies/email_link.ts","../auth/internal/index.ts","../auth/src/core/strategies/anonymous.ts","../auth-compat/src/phone_auth_provider.ts","../auth-compat/src/recaptcha_verifier.ts","../database/src/core/version.ts","../database/src/core/storage/DOMStorageWrapper.ts","../database/src/core/storage/MemoryStorage.ts","../database/src/core/storage/storage.ts","../database/src/core/snap/snap.ts","../database/src/core/snap/indexes/PriorityIndex.ts","../database/src/core/operation/Operation.ts","../database/src/core/SyncPoint.ts","../database/src/core/SyncTree.ts","../database/src/register.ts","../database/src/core/util/util.ts","../database/src/core/AppCheckTokenProvider.ts","../database/src/core/AuthTokenProvider.ts","../database/src/realtime/Constants.ts","../database/src/core/RepoInfo.ts","../database/src/core/stats/StatsCollection.ts","../database/src/core/stats/StatsManager.ts","../database/src/realtime/polling/PacketReceiver.ts","../database/src/realtime/BrowserPollConnection.ts","../database/src/realtime/WebSocketConnection.ts","../database/src/realtime/TransportManager.ts","../database/src/realtime/Connection.ts","../database/src/core/ServerActions.ts","../database/src/core/util/EventEmitter.ts","../database/src/core/util/OnlineMonitor.ts","../database/src/core/util/Path.ts","../database/src/core/util/VisibilityMonitor.ts","../database/src/core/PersistentConnection.ts","../database/src/core/snap/Node.ts","../database/src/core/snap/indexes/Index.ts","../database/src/core/snap/indexes/KeyIndex.ts","../database/src/core/util/SortedMap.ts","../database/src/core/snap/comparators.ts","../database/src/core/snap/LeafNode.ts","../database/src/core/snap/childSet.ts","../database/src/core/snap/IndexMap.ts","../database/src/core/snap/ChildrenNode.ts","../database/src/core/snap/nodeFromJSON.ts","../database/src/core/snap/indexes/PathIndex.ts","../database/src/core/snap/indexes/ValueIndex.ts","../database/src/core/view/Change.ts","../database/src/core/view/filter/IndexedFilter.ts","../database/src/core/view/filter/RangedFilter.ts","../database/src/core/view/filter/LimitedFilter.ts","../database/src/core/view/QueryParams.ts","../database/src/core/ReadonlyRestClient.ts","../database/src/core/SnapshotHolder.ts","../database/src/core/SparseSnapshotTree.ts","../database/src/core/stats/StatsListener.ts","../database/src/core/stats/StatsReporter.ts","../database/src/core/operation/AckUserWrite.ts","../database/src/core/operation/ListenComplete.ts","../database/src/core/operation/Overwrite.ts","../database/src/core/operation/Merge.ts","../database/src/core/view/CacheNode.ts","../database/src/core/view/EventGenerator.ts","../database/src/core/view/ViewCache.ts","../database/src/core/util/ImmutableTree.ts","../database/src/core/CompoundWrite.ts","../database/src/core/WriteTree.ts","../database/src/core/view/ChildChangeAccumulator.ts","../database/src/core/view/CompleteChildSource.ts","../database/src/core/view/ViewProcessor.ts","../database/src/core/view/View.ts","../database/src/core/util/ServerValues.ts","../database/src/core/util/Tree.ts","../database/src/core/util/validation.ts","../database/src/core/view/EventQueue.ts","../database/src/core/Repo.ts","../database/src/core/util/libs/parser.ts","../database/src/core/util/NextPushId.ts","../database/src/core/view/Event.ts","../database/src/core/view/EventRegistration.ts","../database/src/api/OnDisconnect.ts","../database/src/api/Reference_impl.ts","../database/src/api/Database.ts","../database/src/api/ServerValue.ts","../database/src/api/Transaction.ts","../database/src/api/test_access.ts","../database-compat/src/util/util.ts","../database-compat/src/api/onDisconnect.ts","../database-compat/src/api/TransactionResult.ts","../database-compat/src/api/Reference.ts","../database-compat/src/util/validation.ts","../database-compat/src/api/Database.ts","../database-compat/src/index.ts","../database-compat/src/api/internal.ts","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/base.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/disposable/disposable.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/array/array.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/event.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/browserfeature.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/string/internal.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/useragent/util.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/reflect/reflect.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/useragent/useragent.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/useragent/browser.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/useragent/engine.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/browserevent.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/eventtype.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/listenable.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/listenablekey.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/listener.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/listenermap.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/object/object.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/events.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/eventtarget.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/json/json.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/async/workqueue.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/async/freelist.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/async/run.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/async/throwexception.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/timer/timer.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/async/throttle.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/events/eventhandler.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/webchanneldebug.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/requeststats.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/errorcode.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/eventtype.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/xmlhttpfactory.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/xmlhttp.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/channelrequest.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/environment.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/webchannelbase.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/uri/uri.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/disposable/dispose.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/wirev8.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/xhrio.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/forwardchannelrequestpool.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/structs/structs.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/string/string.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/uri/utils.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/wire.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/json/nativejsonprocessor.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/functions/functions.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/netutils.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/fetchxmlhttpfactory.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/json/hybrid.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/httpstatus.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/net/rpc/httpcors.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchannel/webchannelbasetransport.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/crypt/md5.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/crypt/hash.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/math/integer.js","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchanneltransport.js","../webchannel-wrapper/dist/temp/src/index.js","../firestore/src/remote/rpc_error.ts","../firestore/src/auth/user.ts","../firestore/src/core/version.ts","../firestore/src/util/log.ts","../firestore/src/platform/browser/format_json.ts","../firestore/src/util/assert.ts","../firestore/src/util/error.ts","../firestore/src/util/promise.ts","../firestore/src/api/credentials.ts","../firestore/src/util/misc.ts","../firestore/src/platform/browser/random_bytes.ts","../firestore/src/lite-api/timestamp.ts","../firestore/src/core/snapshot_version.ts","../firestore/src/model/path.ts","../firestore/src/model/document_key.ts","../firestore/src/model/field_index.ts","../firestore/src/local/persistence_transaction.ts","../firestore/src/local/local_store.ts","../firestore/src/local/persistence_promise.ts","../firestore/src/local/simple_db.ts","../firestore/src/local/index_backfiller.ts","../firestore/src/core/listen_sequence.ts","../firestore/src/util/types.ts","../firestore/src/local/encoded_resource_path.ts","../firestore/src/local/indexeddb_sentinels.ts","../firestore/src/local/indexeddb_schema_legacy.ts","../firestore/src/local/indexeddb_transaction.ts","../firestore/src/util/obj.ts","../firestore/src/util/sorted_map.ts","../firestore/src/util/sorted_set.ts","../firestore/src/model/field_mask.ts","../firestore/src/util/base64_decode_error.ts","../firestore/src/util/byte_string.ts","../firestore/src/platform/browser/base64.ts","../firestore/src/model/normalize.ts","../firestore/src/model/server_timestamps.ts","../firestore/src/core/database_info.ts","../firestore/src/model/values.ts","../firestore/src/model/object_value.ts","../firestore/src/model/document.ts","../firestore/src/core/bound.ts","../firestore/src/core/order_by.ts","../firestore/src/core/filter.ts","../firestore/src/core/target.ts","../firestore/src/core/query.ts","../firestore/src/util/obj_map.ts","../firestore/src/model/collections.ts","../firestore/src/remote/number_serializer.ts","../firestore/src/model/transform_operation.ts","../firestore/src/model/mutation.ts","../firestore/src/model/mutation_batch.ts","../firestore/src/model/overlay.ts","../firestore/src/remote/existence_filter.ts","../firestore/src/platform/browser/text_serializer.ts","../firestore/src/remote/bloom_filter.ts","../firestore/src/remote/remote_event.ts","../firestore/src/remote/watch_change.ts","../firestore/src/remote/serializer.ts","../firestore/src/local/target_data.ts","../firestore/src/local/local_serializer.ts","../firestore/src/local/indexeddb_bundle_cache.ts","../firestore/src/local/indexeddb_document_overlay_cache.ts","../firestore/src/index/firestore_index_value_writer.ts","../firestore/src/index/ordered_code_writer.ts","../firestore/src/index/index_byte_encoder.ts","../firestore/src/index/index_entry.ts","../firestore/src/model/target_index_matcher.ts","../firestore/src/util/logic_utils.ts","../firestore/src/local/memory_index_manager.ts","../firestore/src/local/indexeddb_index_manager.ts","../firestore/src/local/lru_garbage_collector.ts","../firestore/src/local/indexeddb_mutation_batch_impl.ts","../firestore/src/local/indexeddb_mutation_queue.ts","../firestore/src/core/target_id_generator.ts","../firestore/src/local/indexeddb_target_cache.ts","../firestore/src/local/lru_garbage_collector_impl.ts","../firestore/src/local/indexeddb_lru_delegate_impl.ts","../firestore/src/local/remote_document_change_buffer.ts","../firestore/src/local/indexeddb_remote_document_cache.ts","../firestore/src/local/overlayed_document.ts","../firestore/src/local/local_documents_view.ts","../firestore/src/local/memory_bundle_cache.ts","../firestore/src/local/memory_document_overlay_cache.ts","../firestore/src/local/reference_set.ts","../firestore/src/local/memory_mutation_queue.ts","../firestore/src/local/memory_remote_document_cache.ts","../firestore/src/local/memory_target_cache.ts","../firestore/src/local/memory_persistence.ts","../firestore/src/local/indexeddb_schema_converter.ts","../firestore/src/local/indexeddb_persistence.ts","../firestore/src/local/indexeddb_schema.ts","../firestore/src/local/local_view_changes.ts","../firestore/src/local/query_context.ts","../firestore/src/local/query_engine.ts","../firestore/src/local/local_store_impl.ts","../firestore/src/local/shared_client_state_schema.ts","../firestore/src/local/shared_client_state.ts","../firestore/src/remote/connectivity_monitor_noop.ts","../firestore/src/platform/browser/connectivity_monitor.ts","../firestore/src/util/debug_uid.ts","../firestore/src/remote/rest_connection.ts","../firestore/src/remote/stream_bridge.ts","../firestore/src/platform/browser/webchannel_connection.ts","../webchannel-wrapper/node_modules/google-closure-library/closure/goog/labs/net/webchanneltransportfactory.js","../firestore/src/platform/browser/dom.ts","../firestore/src/platform/browser/serializer.ts","../firestore/src/remote/backoff.ts","../firestore/src/remote/persistent_stream.ts","../firestore/src/remote/datastore.ts","../firestore/src/remote/online_state_tracker.ts","../firestore/src/remote/remote_store.ts","../firestore/src/util/async_queue.ts","../firestore/src/model/document_set.ts","../firestore/src/core/view_snapshot.ts","../firestore/src/core/event_manager.ts","../firestore/src/util/bundle_reader.ts","../firestore/src/core/bundle_impl.ts","../firestore/src/core/view.ts","../firestore/src/core/sync_engine_impl.ts","../firestore/src/core/component_provider.ts","../firestore/src/platform/browser/connection.ts","../firestore/src/util/byte_stream.ts","../firestore/src/util/async_observer.ts","../firestore/src/util/bundle_reader_impl.ts","../firestore/src/core/transaction.ts","../firestore/src/core/transaction_runner.ts","../firestore/src/core/firestore_client.ts","../firestore/src/platform/browser/byte_stream_reader.ts","../firestore/src/api/long_polling_options.ts","../firestore/src/lite-api/components.ts","../firestore/src/util/input_validation.ts","../firestore/src/lite-api/settings.ts","../firestore/src/lite-api/database.ts","../firestore/src/lite-api/reference.ts","../firestore/src/util/async_queue_impl.ts","../firestore/src/api/observer.ts","../firestore/src/api/bundle.ts","../firestore/src/api/database.ts","../firestore/src/register.ts","../firestore-compat/src/index.ts","../firestore-compat/src/config.ts","../functions/src/config.ts","../firestore/src/lite-api/bytes.ts","../firestore/src/lite-api/field_path.ts","../firestore/src/lite-api/field_value.ts","../firestore/src/lite-api/geo_point.ts","../firestore/src/lite-api/user_data_reader.ts","../firestore/src/lite-api/snapshot.ts","../firestore/src/lite-api/query.ts","../firestore/src/lite-api/user_data_writer.ts","../firestore/src/lite-api/reference_impl.ts","../firestore/src/api/snapshot.ts","../firestore/src/api/reference_impl.ts","../firestore/src/core/transaction_options.ts","../firestore/src/lite-api/write_batch.ts","../firestore/src/api/transaction.ts","../firestore/src/lite-api/transaction.ts","../firestore-compat/src/util/input_validation.ts","../firestore-compat/src/api/blob.ts","../firestore-compat/src/api/observer.ts","../firestore-compat/src/api/database.ts","../firestore-compat/src/api/field_path.ts","../firestore-compat/src/api/field_value.ts","../firestore/src/lite-api/field_value_impl.ts","../functions/src/serializer.ts","../functions/src/constants.ts","../functions/src/error.ts","../functions/src/context.ts","../functions/src/service.ts","../functions/src/api.ts","../functions/src/index.ts","../functions-compat/src/register.ts","../messaging/src/util/constants.ts","../messaging/src/interfaces/internal-message-payload.ts","../functions-compat/src/service.ts","../functions-compat/src/index.ts","../messaging/src/helpers/array-base64-translator.ts","../messaging/src/helpers/migrate-old-database.ts","../messaging/src/internals/idb-manager.ts","../messaging/src/util/errors.ts","../messaging/src/internals/requests.ts","../messaging/src/internals/token-manager.ts","../messaging/src/helpers/externalizePayload.ts","../messaging/src/helpers/logToFirelog.ts","../messaging/src/helpers/extract-app-config.ts","../messaging/src/messaging-service.ts","../messaging/src/helpers/registerDefaultSw.ts","../messaging/src/api/getToken.ts","../messaging/src/helpers/updateVapidKey.ts","../messaging/src/helpers/updateSwReg.ts","../messaging/src/helpers/logToScion.ts","../messaging/src/listeners/window-listener.ts","../messaging/src/helpers/is-console-message.ts","../messaging/src/helpers/register.ts","../messaging/src/api.ts","../messaging/src/api/deleteToken.ts","../messaging/src/api/onMessage.ts","../messaging/src/listeners/sw-listeners.ts","../messaging/src/helpers/sleep.ts","../messaging/src/api/onBackgroundMessage.ts","../storage/src/implementation/error.ts","../storage/src/implementation/connection.ts","../storage-compat/src/index.ts","../messaging-compat/src/messaging-compat.ts","../messaging-compat/src/registerMessagingCompat.ts","../messaging-compat/src/index.ts","../storage/src/implementation/constants.ts","../storage/src/implementation/location.ts","../storage/src/implementation/failrequest.ts","../storage/src/implementation/type.ts","../storage/src/implementation/url.ts","../storage/src/implementation/utils.ts","../storage/src/implementation/request.ts","../storage/src/implementation/backoff.ts","../storage/src/implementation/fs.ts","../storage/src/platform/browser/base64.ts","../storage/src/implementation/string.ts","../storage/src/implementation/blob.ts","../storage/src/implementation/json.ts","../storage/src/implementation/path.ts","../storage/src/implementation/metadata.ts","../storage/src/implementation/list.ts","../storage/src/implementation/requestinfo.ts","../storage/src/implementation/requests.ts","../storage/src/implementation/taskenums.ts","../storage/src/implementation/observer.ts","../storage/src/implementation/async.ts","../storage/src/platform/browser/connection.ts","../storage/src/task.ts","../storage/src/reference.ts","../storage/src/service.ts","../storage/src/api.ts","../storage/src/index.ts","../storage/src/constants.ts","../storage-compat/src/tasksnapshot.ts","../storage-compat/src/task.ts","../storage-compat/src/list.ts","../storage-compat/src/reference.ts","../storage-compat/src/service.ts","../performance/src/constants.ts","../performance/src/utils/attributes_utils.ts","../performance-compat/src/index.ts","../remote-config-compat/src/index.ts","../performance/src/utils/errors.ts","../performance/src/utils/console_logger.ts","../performance/src/services/api_service.ts","../performance/src/services/iid_service.ts","../performance/src/utils/string_merger.ts","../performance/src/services/settings_service.ts","../performance/src/utils/app_utils.ts","../performance/src/services/remote_config_service.ts","../performance/src/services/initialization_service.ts","../performance/src/services/transport_service.ts","../performance/src/services/perf_logger.ts","../performance/src/utils/metric_utils.ts","../performance/src/resources/trace.ts","../performance/src/resources/network_request.ts","../performance/src/services/oob_resources_service.ts","../performance/src/controllers/perf.ts","../performance/src/index.ts","../performance-compat/src/performance.ts","../remote-config/src/client/remote_config_fetch_client.ts","../remote-config/src/constants.ts","../remote-config/src/errors.ts","../remote-config/src/value.ts","../remote-config/src/api.ts","../remote-config/src/client/caching_client.ts","../remote-config/src/client/rest_client.ts","../remote-config/src/language.ts","../remote-config/src/client/retrying_client.ts","../remote-config/src/remote_config.ts","../remote-config/src/storage/storage.ts","../remote-config/src/storage/storage_cache.ts","../remote-config/src/api2.ts","../remote-config/src/register.ts","../remote-config-compat/src/remoteConfig.ts","compat/index.cdn.ts"],"sourcesContent":["/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * @fileoverview Firebase constants. Some of these (@defines) can be overridden at compile-time.\n */\n\nexport const CONSTANTS = {\n /**\n * @define {boolean} Whether this is the client Node.js SDK.\n */\n NODE_CLIENT: false,\n /**\n * @define {boolean} Whether this is the Admin Node.js SDK.\n */\n NODE_ADMIN: false,\n\n /**\n * Firebase SDK Version\n */\n SDK_VERSION: '${JSCORE_VERSION}'\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { CONSTANTS } from './constants';\n\n/**\n * Throws an error if the provided assertion is falsy\n */\nexport const assert = function (assertion: unknown, message: string): void {\n if (!assertion) {\n throw assertionError(message);\n }\n};\n\n/**\n * Returns an Error object suitable for throwing.\n */\nexport const assertionError = function (message: string): Error {\n return new Error(\n 'Firebase Database (' +\n CONSTANTS.SDK_VERSION +\n ') INTERNAL ASSERT FAILED: ' +\n message\n );\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst stringToByteArray = function (str: string): number[] {\n // TODO(user): Use native implementations if/when available\n const out: number[] = [];\n let p = 0;\n for (let i = 0; i < str.length; i++) {\n let c = str.charCodeAt(i);\n if (c < 128) {\n out[p++] = c;\n } else if (c < 2048) {\n out[p++] = (c >> 6) | 192;\n out[p++] = (c & 63) | 128;\n } else if (\n (c & 0xfc00) === 0xd800 &&\n i + 1 < str.length &&\n (str.charCodeAt(i + 1) & 0xfc00) === 0xdc00\n ) {\n // Surrogate Pair\n c = 0x10000 + ((c & 0x03ff) << 10) + (str.charCodeAt(++i) & 0x03ff);\n out[p++] = (c >> 18) | 240;\n out[p++] = ((c >> 12) & 63) | 128;\n out[p++] = ((c >> 6) & 63) | 128;\n out[p++] = (c & 63) | 128;\n } else {\n out[p++] = (c >> 12) | 224;\n out[p++] = ((c >> 6) & 63) | 128;\n out[p++] = (c & 63) | 128;\n }\n }\n return out;\n};\n\n/**\n * Turns an array of numbers into the string given by the concatenation of the\n * characters to which the numbers correspond.\n * @param bytes Array of numbers representing characters.\n * @return Stringification of the array.\n */\nconst byteArrayToString = function (bytes: number[]): string {\n // TODO(user): Use native implementations if/when available\n const out: string[] = [];\n let pos = 0,\n c = 0;\n while (pos < bytes.length) {\n const c1 = bytes[pos++];\n if (c1 < 128) {\n out[c++] = String.fromCharCode(c1);\n } else if (c1 > 191 && c1 < 224) {\n const c2 = bytes[pos++];\n out[c++] = String.fromCharCode(((c1 & 31) << 6) | (c2 & 63));\n } else if (c1 > 239 && c1 < 365) {\n // Surrogate Pair\n const c2 = bytes[pos++];\n const c3 = bytes[pos++];\n const c4 = bytes[pos++];\n const u =\n (((c1 & 7) << 18) | ((c2 & 63) << 12) | ((c3 & 63) << 6) | (c4 & 63)) -\n 0x10000;\n out[c++] = String.fromCharCode(0xd800 + (u >> 10));\n out[c++] = String.fromCharCode(0xdc00 + (u & 1023));\n } else {\n const c2 = bytes[pos++];\n const c3 = bytes[pos++];\n out[c++] = String.fromCharCode(\n ((c1 & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63)\n );\n }\n }\n return out.join('');\n};\n\ninterface Base64 {\n byteToCharMap_: { [key: number]: string } | null;\n charToByteMap_: { [key: string]: number } | null;\n byteToCharMapWebSafe_: { [key: number]: string } | null;\n charToByteMapWebSafe_: { [key: string]: number } | null;\n ENCODED_VALS_BASE: string;\n readonly ENCODED_VALS: string;\n readonly ENCODED_VALS_WEBSAFE: string;\n HAS_NATIVE_SUPPORT: boolean;\n encodeByteArray(input: number[] | Uint8Array, webSafe?: boolean): string;\n encodeString(input: string, webSafe?: boolean): string;\n decodeString(input: string, webSafe: boolean): string;\n decodeStringToByteArray(input: string, webSafe: boolean): number[];\n init_(): void;\n}\n\n// We define it as an object literal instead of a class because a class compiled down to es5 can't\n// be treeshaked. https://github.com/rollup/rollup/issues/1691\n// Static lookup maps, lazily populated by init_()\nexport const base64: Base64 = {\n /**\n * Maps bytes to characters.\n */\n byteToCharMap_: null,\n\n /**\n * Maps characters to bytes.\n */\n charToByteMap_: null,\n\n /**\n * Maps bytes to websafe characters.\n * @private\n */\n byteToCharMapWebSafe_: null,\n\n /**\n * Maps websafe characters to bytes.\n * @private\n */\n charToByteMapWebSafe_: null,\n\n /**\n * Our default alphabet, shared between\n * ENCODED_VALS and ENCODED_VALS_WEBSAFE\n */\n ENCODED_VALS_BASE:\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + 'abcdefghijklmnopqrstuvwxyz' + '0123456789',\n\n /**\n * Our default alphabet. Value 64 (=) is special; it means \"nothing.\"\n */\n get ENCODED_VALS() {\n return this.ENCODED_VALS_BASE + '+/=';\n },\n\n /**\n * Our websafe alphabet.\n */\n get ENCODED_VALS_WEBSAFE() {\n return this.ENCODED_VALS_BASE + '-_.';\n },\n\n /**\n * Whether this browser supports the atob and btoa functions. This extension\n * started at Mozilla but is now implemented by many browsers. We use the\n * ASSUME_* variables to avoid pulling in the full useragent detection library\n * but still allowing the standard per-browser compilations.\n *\n */\n HAS_NATIVE_SUPPORT: typeof atob === 'function',\n\n /**\n * Base64-encode an array of bytes.\n *\n * @param input An array of bytes (numbers with\n * value in [0, 255]) to encode.\n * @param webSafe Boolean indicating we should use the\n * alternative alphabet.\n * @return The base64 encoded string.\n */\n encodeByteArray(input: number[] | Uint8Array, webSafe?: boolean): string {\n if (!Array.isArray(input)) {\n throw Error('encodeByteArray takes an array as a parameter');\n }\n\n this.init_();\n\n const byteToCharMap = webSafe\n ? this.byteToCharMapWebSafe_!\n : this.byteToCharMap_!;\n\n const output = [];\n\n for (let i = 0; i < input.length; i += 3) {\n const byte1 = input[i];\n const haveByte2 = i + 1 < input.length;\n const byte2 = haveByte2 ? input[i + 1] : 0;\n const haveByte3 = i + 2 < input.length;\n const byte3 = haveByte3 ? input[i + 2] : 0;\n\n const outByte1 = byte1 >> 2;\n const outByte2 = ((byte1 & 0x03) << 4) | (byte2 >> 4);\n let outByte3 = ((byte2 & 0x0f) << 2) | (byte3 >> 6);\n let outByte4 = byte3 & 0x3f;\n\n if (!haveByte3) {\n outByte4 = 64;\n\n if (!haveByte2) {\n outByte3 = 64;\n }\n }\n\n output.push(\n byteToCharMap[outByte1],\n byteToCharMap[outByte2],\n byteToCharMap[outByte3],\n byteToCharMap[outByte4]\n );\n }\n\n return output.join('');\n },\n\n /**\n * Base64-encode a string.\n *\n * @param input A string to encode.\n * @param webSafe If true, we should use the\n * alternative alphabet.\n * @return The base64 encoded string.\n */\n encodeString(input: string, webSafe?: boolean): string {\n // Shortcut for Mozilla browsers that implement\n // a native base64 encoder in the form of \"btoa/atob\"\n if (this.HAS_NATIVE_SUPPORT && !webSafe) {\n return btoa(input);\n }\n return this.encodeByteArray(stringToByteArray(input), webSafe);\n },\n\n /**\n * Base64-decode a string.\n *\n * @param input to decode.\n * @param webSafe True if we should use the\n * alternative alphabet.\n * @return string representing the decoded value.\n */\n decodeString(input: string, webSafe: boolean): string {\n // Shortcut for Mozilla browsers that implement\n // a native base64 encoder in the form of \"btoa/atob\"\n if (this.HAS_NATIVE_SUPPORT && !webSafe) {\n return atob(input);\n }\n return byteArrayToString(this.decodeStringToByteArray(input, webSafe));\n },\n\n /**\n * Base64-decode a string.\n *\n * In base-64 decoding, groups of four characters are converted into three\n * bytes. If the encoder did not apply padding, the input length may not\n * be a multiple of 4.\n *\n * In this case, the last group will have fewer than 4 characters, and\n * padding will be inferred. If the group has one or two characters, it decodes\n * to one byte. If the group has three characters, it decodes to two bytes.\n *\n * @param input Input to decode.\n * @param webSafe True if we should use the web-safe alphabet.\n * @return bytes representing the decoded value.\n */\n decodeStringToByteArray(input: string, webSafe: boolean): number[] {\n this.init_();\n\n const charToByteMap = webSafe\n ? this.charToByteMapWebSafe_!\n : this.charToByteMap_!;\n\n const output: number[] = [];\n\n for (let i = 0; i < input.length; ) {\n const byte1 = charToByteMap[input.charAt(i++)];\n\n const haveByte2 = i < input.length;\n const byte2 = haveByte2 ? charToByteMap[input.charAt(i)] : 0;\n ++i;\n\n const haveByte3 = i < input.length;\n const byte3 = haveByte3 ? charToByteMap[input.charAt(i)] : 64;\n ++i;\n\n const haveByte4 = i < input.length;\n const byte4 = haveByte4 ? charToByteMap[input.charAt(i)] : 64;\n ++i;\n\n if (byte1 == null || byte2 == null || byte3 == null || byte4 == null) {\n throw new DecodeBase64StringError();\n }\n\n const outByte1 = (byte1 << 2) | (byte2 >> 4);\n output.push(outByte1);\n\n if (byte3 !== 64) {\n const outByte2 = ((byte2 << 4) & 0xf0) | (byte3 >> 2);\n output.push(outByte2);\n\n if (byte4 !== 64) {\n const outByte3 = ((byte3 << 6) & 0xc0) | byte4;\n output.push(outByte3);\n }\n }\n }\n\n return output;\n },\n\n /**\n * Lazy static initialization function. Called before\n * accessing any of the static map variables.\n * @private\n */\n init_() {\n if (!this.byteToCharMap_) {\n this.byteToCharMap_ = {};\n this.charToByteMap_ = {};\n this.byteToCharMapWebSafe_ = {};\n this.charToByteMapWebSafe_ = {};\n\n // We want quick mappings back and forth, so we precompute two maps.\n for (let i = 0; i < this.ENCODED_VALS.length; i++) {\n this.byteToCharMap_[i] = this.ENCODED_VALS.charAt(i);\n this.charToByteMap_[this.byteToCharMap_[i]] = i;\n this.byteToCharMapWebSafe_[i] = this.ENCODED_VALS_WEBSAFE.charAt(i);\n this.charToByteMapWebSafe_[this.byteToCharMapWebSafe_[i]] = i;\n\n // Be forgiving when decoding and correctly decode both encodings.\n if (i >= this.ENCODED_VALS_BASE.length) {\n this.charToByteMap_[this.ENCODED_VALS_WEBSAFE.charAt(i)] = i;\n this.charToByteMapWebSafe_[this.ENCODED_VALS.charAt(i)] = i;\n }\n }\n }\n }\n};\n\n/**\n * An error encountered while decoding base64 string.\n */\nexport class DecodeBase64StringError extends Error {\n readonly name = 'DecodeBase64StringError';\n}\n\n/**\n * URL-safe base64 encoding\n */\nexport const base64Encode = function (str: string): string {\n const utf8Bytes = stringToByteArray(str);\n return base64.encodeByteArray(utf8Bytes, true);\n};\n\n/**\n * URL-safe base64 encoding (without \".\" padding in the end).\n * e.g. Used in JSON Web Token (JWT) parts.\n */\nexport const base64urlEncodeWithoutPadding = function (str: string): string {\n // Use base64url encoding and remove padding in the end (dot characters).\n return base64Encode(str).replace(/\\./g, '');\n};\n\n/**\n * URL-safe base64 decoding\n *\n * NOTE: DO NOT use the global atob() function - it does NOT support the\n * base64Url variant encoding.\n *\n * @param str To be decoded\n * @return Decoded result, if possible\n */\nexport const base64Decode = function (str: string): string | null {\n try {\n return base64.decodeString(str, true);\n } catch (e) {\n console.error('base64Decode failed: ', e);\n }\n return null;\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Do a deep-copy of basic JavaScript Objects or Arrays.\n */\nexport function deepCopy(value: T): T {\n return deepExtend(undefined, value) as T;\n}\n\n/**\n * Copy properties from source to target (recursively allows extension\n * of Objects and Arrays). Scalar values in the target are over-written.\n * If target is undefined, an object of the appropriate type will be created\n * (and returned).\n *\n * We recursively copy all child properties of plain Objects in the source- so\n * that namespace- like dictionaries are merged.\n *\n * Note that the target can be a function, in which case the properties in\n * the source Object are copied onto it as static properties of the Function.\n *\n * Note: we don't merge __proto__ to prevent prototype pollution\n */\nexport function deepExtend(target: unknown, source: unknown): unknown {\n if (!(source instanceof Object)) {\n return source;\n }\n\n switch (source.constructor) {\n case Date:\n // Treat Dates like scalars; if the target date object had any child\n // properties - they will be lost!\n const dateValue = source as Date;\n return new Date(dateValue.getTime());\n\n case Object:\n if (target === undefined) {\n target = {};\n }\n break;\n case Array:\n // Always copy the array source and overwrite the target.\n target = [];\n break;\n\n default:\n // Not a plain Object - treat it as a scalar.\n return source;\n }\n\n for (const prop in source) {\n // use isValidKey to guard against prototype pollution. See https://snyk.io/vuln/SNYK-JS-LODASH-450202\n if (!source.hasOwnProperty(prop) || !isValidKey(prop)) {\n continue;\n }\n (target as Record)[prop] = deepExtend(\n (target as Record)[prop],\n (source as Record)[prop]\n );\n }\n\n return target;\n}\n\nfunction isValidKey(key: string): boolean {\n return key !== '__proto__';\n}\n","/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Polyfill for `globalThis` object.\n * @returns the `globalThis` object for the given environment.\n * @public\n */\nexport function getGlobal(): typeof globalThis {\n if (typeof self !== 'undefined') {\n return self;\n }\n if (typeof window !== 'undefined') {\n return window;\n }\n if (typeof global !== 'undefined') {\n return global;\n }\n throw new Error('Unable to locate global object.');\n}\n","/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { base64Decode } from './crypt';\nimport { getGlobal } from './global';\n\n/**\n * Keys for experimental properties on the `FirebaseDefaults` object.\n * @public\n */\nexport type ExperimentalKey = 'authTokenSyncURL' | 'authIdTokenMaxAge';\n\n/**\n * An object that can be injected into the environment as __FIREBASE_DEFAULTS__,\n * either as a property of globalThis, a shell environment variable, or a\n * cookie.\n *\n * This object can be used to automatically configure and initialize\n * a Firebase app as well as any emulators.\n *\n * @public\n */\nexport interface FirebaseDefaults {\n config?: Record;\n emulatorHosts?: Record;\n _authTokenSyncURL?: string;\n _authIdTokenMaxAge?: number;\n /**\n * Override Firebase's runtime environment detection and\n * force the SDK to act as if it were in the specified environment.\n */\n forceEnvironment?: 'browser' | 'node';\n [key: string]: unknown;\n}\n\ndeclare global {\n // Need `var` for this to work.\n // eslint-disable-next-line no-var\n var __FIREBASE_DEFAULTS__: FirebaseDefaults | undefined;\n}\n\nconst getDefaultsFromGlobal = (): FirebaseDefaults | undefined =>\n getGlobal().__FIREBASE_DEFAULTS__;\n\n/**\n * Attempt to read defaults from a JSON string provided to\n * process(.)env(.)__FIREBASE_DEFAULTS__ or a JSON file whose path is in\n * process(.)env(.)__FIREBASE_DEFAULTS_PATH__\n * The dots are in parens because certain compilers (Vite?) cannot\n * handle seeing that variable in comments.\n * See https://github.com/firebase/firebase-js-sdk/issues/6838\n */\nconst getDefaultsFromEnvVariable = (): FirebaseDefaults | undefined => {\n if (typeof process === 'undefined' || typeof process.env === 'undefined') {\n return;\n }\n const defaultsJsonString = process.env.__FIREBASE_DEFAULTS__;\n if (defaultsJsonString) {\n return JSON.parse(defaultsJsonString);\n }\n};\n\nconst getDefaultsFromCookie = (): FirebaseDefaults | undefined => {\n if (typeof document === 'undefined') {\n return;\n }\n let match;\n try {\n match = document.cookie.match(/__FIREBASE_DEFAULTS__=([^;]+)/);\n } catch (e) {\n // Some environments such as Angular Universal SSR have a\n // `document` object but error on accessing `document.cookie`.\n return;\n }\n const decoded = match && base64Decode(match[1]);\n return decoded && JSON.parse(decoded);\n};\n\n/**\n * Get the __FIREBASE_DEFAULTS__ object. It checks in order:\n * (1) if such an object exists as a property of `globalThis`\n * (2) if such an object was provided on a shell environment variable\n * (3) if such an object exists in a cookie\n * @public\n */\nexport const getDefaults = (): FirebaseDefaults | undefined => {\n try {\n return (\n getDefaultsFromGlobal() ||\n getDefaultsFromEnvVariable() ||\n getDefaultsFromCookie()\n );\n } catch (e) {\n /**\n * Catch-all for being unable to get __FIREBASE_DEFAULTS__ due\n * to any environment case we have not accounted for. Log to\n * info instead of swallowing so we can find these unknown cases\n * and add paths for them if needed.\n */\n console.info(`Unable to get __FIREBASE_DEFAULTS__ due to: ${e}`);\n return;\n }\n};\n\n/**\n * Returns emulator host stored in the __FIREBASE_DEFAULTS__ object\n * for the given product.\n * @returns a URL host formatted like `127.0.0.1:9999` or `[::1]:4000` if available\n * @public\n */\nexport const getDefaultEmulatorHost = (\n productName: string\n): string | undefined => getDefaults()?.emulatorHosts?.[productName];\n\n/**\n * Returns emulator hostname and port stored in the __FIREBASE_DEFAULTS__ object\n * for the given product.\n * @returns a pair of hostname and port like `[\"::1\", 4000]` if available\n * @public\n */\nexport const getDefaultEmulatorHostnameAndPort = (\n productName: string\n): [hostname: string, port: number] | undefined => {\n const host = getDefaultEmulatorHost(productName);\n if (!host) {\n return undefined;\n }\n const separatorIndex = host.lastIndexOf(':'); // Finding the last since IPv6 addr also has colons.\n if (separatorIndex <= 0 || separatorIndex + 1 === host.length) {\n throw new Error(`Invalid host ${host} with no separate hostname and port!`);\n }\n // eslint-disable-next-line no-restricted-globals\n const port = parseInt(host.substring(separatorIndex + 1), 10);\n if (host[0] === '[') {\n // Bracket-quoted `[ipv6addr]:port` => return \"ipv6addr\" (without brackets).\n return [host.substring(1, separatorIndex - 1), port];\n } else {\n return [host.substring(0, separatorIndex), port];\n }\n};\n\n/**\n * Returns Firebase app config stored in the __FIREBASE_DEFAULTS__ object.\n * @public\n */\nexport const getDefaultAppConfig = (): Record | undefined =>\n getDefaults()?.config;\n\n/**\n * Returns an experimental setting on the __FIREBASE_DEFAULTS__ object (properties\n * prefixed by \"_\")\n * @public\n */\nexport const getExperimentalSetting = (\n name: T\n): FirebaseDefaults[`_${T}`] =>\n getDefaults()?.[`_${name}`] as FirebaseDefaults[`_${T}`];\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type LogLevelString =\n | 'debug'\n | 'verbose'\n | 'info'\n | 'warn'\n | 'error'\n | 'silent';\n\nexport interface LogOptions {\n level: LogLevelString;\n}\n\nexport type LogCallback = (callbackParams: LogCallbackParams) => void;\n\nexport interface LogCallbackParams {\n level: LogLevelString;\n message: string;\n args: unknown[];\n type: string;\n}\n\n/**\n * A container for all of the Logger instances\n */\nexport const instances: Logger[] = [];\n\n/**\n * The JS SDK supports 5 log levels and also allows a user the ability to\n * silence the logs altogether.\n *\n * The order is a follows:\n * DEBUG < VERBOSE < INFO < WARN < ERROR\n *\n * All of the log types above the current log level will be captured (i.e. if\n * you set the log level to `INFO`, errors will still be logged, but `DEBUG` and\n * `VERBOSE` logs will not)\n */\nexport enum LogLevel {\n DEBUG,\n VERBOSE,\n INFO,\n WARN,\n ERROR,\n SILENT\n}\n\nconst levelStringToEnum: { [key in LogLevelString]: LogLevel } = {\n 'debug': LogLevel.DEBUG,\n 'verbose': LogLevel.VERBOSE,\n 'info': LogLevel.INFO,\n 'warn': LogLevel.WARN,\n 'error': LogLevel.ERROR,\n 'silent': LogLevel.SILENT\n};\n\n/**\n * The default log level\n */\nconst defaultLogLevel: LogLevel = LogLevel.INFO;\n\n/**\n * We allow users the ability to pass their own log handler. We will pass the\n * type of log, the current log level, and any other arguments passed (i.e. the\n * messages that the user wants to log) to this function.\n */\nexport type LogHandler = (\n loggerInstance: Logger,\n logType: LogLevel,\n ...args: unknown[]\n) => void;\n\n/**\n * By default, `console.debug` is not displayed in the developer console (in\n * chrome). To avoid forcing users to have to opt-in to these logs twice\n * (i.e. once for firebase, and once in the console), we are sending `DEBUG`\n * logs to the `console.log` function.\n */\nconst ConsoleMethod = {\n [LogLevel.DEBUG]: 'log',\n [LogLevel.VERBOSE]: 'log',\n [LogLevel.INFO]: 'info',\n [LogLevel.WARN]: 'warn',\n [LogLevel.ERROR]: 'error'\n};\n\n/**\n * The default log handler will forward DEBUG, VERBOSE, INFO, WARN, and ERROR\n * messages on to their corresponding console counterparts (if the log method\n * is supported by the current log level)\n */\nconst defaultLogHandler: LogHandler = (instance, logType, ...args): void => {\n if (logType < instance.logLevel) {\n return;\n }\n const now = new Date().toISOString();\n const method = ConsoleMethod[logType as keyof typeof ConsoleMethod];\n if (method) {\n console[method as 'log' | 'info' | 'warn' | 'error'](\n `[${now}] ${instance.name}:`,\n ...args\n );\n } else {\n throw new Error(\n `Attempted to log a message with an invalid logType (value: ${logType})`\n );\n }\n};\n\nexport class Logger {\n /**\n * Gives you an instance of a Logger to capture messages according to\n * Firebase's logging scheme.\n *\n * @param name The name that the logs will be associated with\n */\n constructor(public name: string) {\n /**\n * Capture the current instance for later use\n */\n instances.push(this);\n }\n\n /**\n * The log level of the given Logger instance.\n */\n private _logLevel = defaultLogLevel;\n\n get logLevel(): LogLevel {\n return this._logLevel;\n }\n\n set logLevel(val: LogLevel) {\n if (!(val in LogLevel)) {\n throw new TypeError(`Invalid value \"${val}\" assigned to \\`logLevel\\``);\n }\n this._logLevel = val;\n }\n\n // Workaround for setter/getter having to be the same type.\n setLogLevel(val: LogLevel | LogLevelString): void {\n this._logLevel = typeof val === 'string' ? levelStringToEnum[val] : val;\n }\n\n /**\n * The main (internal) log handler for the Logger instance.\n * Can be set to a new function in internal package code but not by user.\n */\n private _logHandler: LogHandler = defaultLogHandler;\n get logHandler(): LogHandler {\n return this._logHandler;\n }\n set logHandler(val: LogHandler) {\n if (typeof val !== 'function') {\n throw new TypeError('Value assigned to `logHandler` must be a function');\n }\n this._logHandler = val;\n }\n\n /**\n * The optional, additional, user-defined log handler for the Logger instance.\n */\n private _userLogHandler: LogHandler | null = null;\n get userLogHandler(): LogHandler | null {\n return this._userLogHandler;\n }\n set userLogHandler(val: LogHandler | null) {\n this._userLogHandler = val;\n }\n\n /**\n * The functions below are all based on the `console` interface\n */\n\n debug(...args: unknown[]): void {\n this._userLogHandler && this._userLogHandler(this, LogLevel.DEBUG, ...args);\n this._logHandler(this, LogLevel.DEBUG, ...args);\n }\n log(...args: unknown[]): void {\n this._userLogHandler &&\n this._userLogHandler(this, LogLevel.VERBOSE, ...args);\n this._logHandler(this, LogLevel.VERBOSE, ...args);\n }\n info(...args: unknown[]): void {\n this._userLogHandler && this._userLogHandler(this, LogLevel.INFO, ...args);\n this._logHandler(this, LogLevel.INFO, ...args);\n }\n warn(...args: unknown[]): void {\n this._userLogHandler && this._userLogHandler(this, LogLevel.WARN, ...args);\n this._logHandler(this, LogLevel.WARN, ...args);\n }\n error(...args: unknown[]): void {\n this._userLogHandler && this._userLogHandler(this, LogLevel.ERROR, ...args);\n this._logHandler(this, LogLevel.ERROR, ...args);\n }\n}\n\nexport function setLogLevel(level: LogLevelString | LogLevel): void {\n instances.forEach(inst => {\n inst.setLogLevel(level);\n });\n}\n\nexport function setUserLogHandler(\n logCallback: LogCallback | null,\n options?: LogOptions\n): void {\n for (const instance of instances) {\n let customLogLevel: LogLevel | null = null;\n if (options && options.level) {\n customLogLevel = levelStringToEnum[options.level];\n }\n if (logCallback === null) {\n instance.userLogHandler = null;\n } else {\n instance.userLogHandler = (\n instance: Logger,\n level: LogLevel,\n ...args: unknown[]\n ) => {\n const message = args\n .map(arg => {\n if (arg == null) {\n return null;\n } else if (typeof arg === 'string') {\n return arg;\n } else if (typeof arg === 'number' || typeof arg === 'boolean') {\n return arg.toString();\n } else if (arg instanceof Error) {\n return arg.message;\n } else {\n try {\n return JSON.stringify(arg);\n } catch (ignored) {\n return null;\n }\n }\n })\n .filter(arg => arg)\n .join(' ');\n if (level >= (customLogLevel ?? instance.logLevel)) {\n logCallback({\n level: LogLevel[level].toLowerCase() as LogLevelString,\n message,\n args,\n type: instance.name\n });\n }\n };\n }\n }\n}\n","import { w as wrap, r as replaceTraps } from './wrap-idb-value.js';\nexport { u as unwrap, w as wrap } from './wrap-idb-value.js';\n\n/**\n * Open a database.\n *\n * @param name Name of the database.\n * @param version Schema version.\n * @param callbacks Additional callbacks.\n */\nfunction openDB(name, version, { blocked, upgrade, blocking, terminated } = {}) {\n const request = indexedDB.open(name, version);\n const openPromise = wrap(request);\n if (upgrade) {\n request.addEventListener('upgradeneeded', (event) => {\n upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction), event);\n });\n }\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event.newVersion, event));\n }\n openPromise\n .then((db) => {\n if (terminated)\n db.addEventListener('close', () => terminated());\n if (blocking) {\n db.addEventListener('versionchange', (event) => blocking(event.oldVersion, event.newVersion, event));\n }\n })\n .catch(() => { });\n return openPromise;\n}\n/**\n * Delete a database.\n *\n * @param name Name of the database.\n */\nfunction deleteDB(name, { blocked } = {}) {\n const request = indexedDB.deleteDatabase(name);\n if (blocked) {\n request.addEventListener('blocked', (event) => blocked(\n // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405\n event.oldVersion, event));\n }\n return wrap(request).then(() => undefined);\n}\n\nconst readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count'];\nconst writeMethods = ['put', 'add', 'delete', 'clear'];\nconst cachedMethods = new Map();\nfunction getMethod(target, prop) {\n if (!(target instanceof IDBDatabase &&\n !(prop in target) &&\n typeof prop === 'string')) {\n return;\n }\n if (cachedMethods.get(prop))\n return cachedMethods.get(prop);\n const targetFuncName = prop.replace(/FromIndex$/, '');\n const useIndex = prop !== targetFuncName;\n const isWrite = writeMethods.includes(targetFuncName);\n if (\n // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge.\n !(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) ||\n !(isWrite || readMethods.includes(targetFuncName))) {\n return;\n }\n const method = async function (storeName, ...args) {\n // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :(\n const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly');\n let target = tx.store;\n if (useIndex)\n target = target.index(args.shift());\n // Must reject if op rejects.\n // If it's a write operation, must reject if tx.done rejects.\n // Must reject with op rejection first.\n // Must resolve with op value.\n // Must handle both promises (no unhandled rejections)\n return (await Promise.all([\n target[targetFuncName](...args),\n isWrite && tx.done,\n ]))[0];\n };\n cachedMethods.set(prop, method);\n return method;\n}\nreplaceTraps((oldTraps) => ({\n ...oldTraps,\n get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver),\n has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop),\n}));\n\nexport { deleteDB, openDB };\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport class Deferred {\n promise: Promise;\n reject: (value?: unknown) => void = () => {};\n resolve: (value?: unknown) => void = () => {};\n constructor() {\n this.promise = new Promise((resolve, reject) => {\n this.resolve = resolve as (value?: unknown) => void;\n this.reject = reject as (value?: unknown) => void;\n });\n }\n\n /**\n * Our API internals are not promiseified and cannot because our callback APIs have subtle expectations around\n * invoking promises inline, which Promises are forbidden to do. This method accepts an optional node-style callback\n * and returns a node-style callback which will resolve or reject the Deferred's promise.\n */\n wrapCallback(\n callback?: (error?: unknown, value?: unknown) => void\n ): (error: unknown, value?: unknown) => void {\n return (error, value?) => {\n if (error) {\n this.reject(error);\n } else {\n this.resolve(value);\n }\n if (typeof callback === 'function') {\n // Attaching noop handler just in case developer wasn't expecting\n // promises\n this.promise.catch(() => {});\n\n // Some of our callbacks don't expect a value and our own tests\n // assert that the parameter length is 1\n if (callback.length === 1) {\n callback(error);\n } else {\n callback(error, value);\n }\n }\n };\n }\n}\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { base64urlEncodeWithoutPadding } from './crypt';\n\n// Firebase Auth tokens contain snake_case claims following the JWT standard / convention.\n/* eslint-disable camelcase */\n\nexport type FirebaseSignInProvider =\n | 'custom'\n | 'email'\n | 'password'\n | 'phone'\n | 'anonymous'\n | 'google.com'\n | 'facebook.com'\n | 'github.com'\n | 'twitter.com'\n | 'microsoft.com'\n | 'apple.com';\n\ninterface FirebaseIdToken {\n // Always set to https://securetoken.google.com/PROJECT_ID\n iss: string;\n\n // Always set to PROJECT_ID\n aud: string;\n\n // The user's unique ID\n sub: string;\n\n // The token issue time, in seconds since epoch\n iat: number;\n\n // The token expiry time, normally 'iat' + 3600\n exp: number;\n\n // The user's unique ID. Must be equal to 'sub'\n user_id: string;\n\n // The time the user authenticated, normally 'iat'\n auth_time: number;\n\n // The sign in provider, only set when the provider is 'anonymous'\n provider_id?: 'anonymous';\n\n // The user's primary email\n email?: string;\n\n // The user's email verification status\n email_verified?: boolean;\n\n // The user's primary phone number\n phone_number?: string;\n\n // The user's display name\n name?: string;\n\n // The user's profile photo URL\n picture?: string;\n\n // Information on all identities linked to this user\n firebase: {\n // The primary sign-in provider\n sign_in_provider: FirebaseSignInProvider;\n\n // A map of providers to the user's list of unique identifiers from\n // each provider\n identities?: { [provider in FirebaseSignInProvider]?: string[] };\n };\n\n // Custom claims set by the developer\n [claim: string]: unknown;\n\n uid?: never; // Try to catch a common mistake of \"uid\" (should be \"sub\" instead).\n}\n\nexport type EmulatorMockTokenOptions = ({ user_id: string } | { sub: string }) &\n Partial;\n\nexport function createMockUserToken(\n token: EmulatorMockTokenOptions,\n projectId?: string\n): string {\n if (token.uid) {\n throw new Error(\n 'The \"uid\" field is no longer supported by mockUserToken. Please use \"sub\" instead for Firebase Auth User ID.'\n );\n }\n // Unsecured JWTs use \"none\" as the algorithm.\n const header = {\n alg: 'none',\n type: 'JWT'\n };\n\n const project = projectId || 'demo-project';\n const iat = token.iat || 0;\n const sub = token.sub || token.user_id;\n if (!sub) {\n throw new Error(\"mockUserToken must contain 'sub' or 'user_id' field!\");\n }\n\n const payload: FirebaseIdToken = {\n // Set all required fields to decent defaults\n iss: `https://securetoken.google.com/${project}`,\n aud: project,\n iat,\n exp: iat + 3600,\n auth_time: iat,\n sub,\n user_id: sub,\n firebase: {\n sign_in_provider: 'custom',\n identities: {}\n },\n\n // Override with user options\n ...token\n };\n\n // Unsecured JWTs use the empty string as a signature.\n const signature = '';\n return [\n base64urlEncodeWithoutPadding(JSON.stringify(header)),\n base64urlEncodeWithoutPadding(JSON.stringify(payload)),\n signature\n ].join('.');\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { CONSTANTS } from './constants';\nimport { getDefaults } from './defaults';\n\n/**\n * Returns navigator.userAgent string or '' if it's not defined.\n * @return user agent string\n */\nexport function getUA(): string {\n if (\n typeof navigator !== 'undefined' &&\n typeof navigator['userAgent'] === 'string'\n ) {\n return navigator['userAgent'];\n } else {\n return '';\n }\n}\n\n/**\n * Detect Cordova / PhoneGap / Ionic frameworks on a mobile device.\n *\n * Deliberately does not rely on checking `file://` URLs (as this fails PhoneGap\n * in the Ripple emulator) nor Cordova `onDeviceReady`, which would normally\n * wait for a callback.\n */\nexport function isMobileCordova(): boolean {\n return (\n typeof window !== 'undefined' &&\n // @ts-ignore Setting up an broadly applicable index signature for Window\n // just to deal with this case would probably be a bad idea.\n !!(window['cordova'] || window['phonegap'] || window['PhoneGap']) &&\n /ios|iphone|ipod|ipad|android|blackberry|iemobile/i.test(getUA())\n );\n}\n\n/**\n * Detect Node.js.\n *\n * @return true if Node.js environment is detected or specified.\n */\n// Node detection logic from: https://github.com/iliakan/detect-node/\nexport function isNode(): boolean {\n const forceEnvironment = getDefaults()?.forceEnvironment;\n if (forceEnvironment === 'node') {\n return true;\n } else if (forceEnvironment === 'browser') {\n return false;\n }\n\n try {\n return (\n Object.prototype.toString.call(global.process) === '[object process]'\n );\n } catch (e) {\n return false;\n }\n}\n\n/**\n * Detect Browser Environment\n */\nexport function isBrowser(): boolean {\n return typeof self === 'object' && self.self === self;\n}\n\n/**\n * Detect browser extensions (Chrome and Firefox at least).\n */\ninterface BrowserRuntime {\n id?: unknown;\n}\ndeclare const chrome: { runtime?: BrowserRuntime };\ndeclare const browser: { runtime?: BrowserRuntime };\nexport function isBrowserExtension(): boolean {\n const runtime =\n typeof chrome === 'object'\n ? chrome.runtime\n : typeof browser === 'object'\n ? browser.runtime\n : undefined;\n return typeof runtime === 'object' && runtime.id !== undefined;\n}\n\n/**\n * Detect React Native.\n *\n * @return true if ReactNative environment is detected.\n */\nexport function isReactNative(): boolean {\n return (\n typeof navigator === 'object' && navigator['product'] === 'ReactNative'\n );\n}\n\n/** Detects Electron apps. */\nexport function isElectron(): boolean {\n return getUA().indexOf('Electron/') >= 0;\n}\n\n/** Detects Internet Explorer. */\nexport function isIE(): boolean {\n const ua = getUA();\n return ua.indexOf('MSIE ') >= 0 || ua.indexOf('Trident/') >= 0;\n}\n\n/** Detects Universal Windows Platform apps. */\nexport function isUWP(): boolean {\n return getUA().indexOf('MSAppHost/') >= 0;\n}\n\n/**\n * Detect whether the current SDK build is the Node version.\n *\n * @return true if it's the Node SDK build.\n */\nexport function isNodeSdk(): boolean {\n return CONSTANTS.NODE_CLIENT === true || CONSTANTS.NODE_ADMIN === true;\n}\n\n/** Returns true if we are running in Safari. */\nexport function isSafari(): boolean {\n return (\n !isNode() &&\n !!navigator.userAgent &&\n navigator.userAgent.includes('Safari') &&\n !navigator.userAgent.includes('Chrome')\n );\n}\n\n/**\n * This method checks if indexedDB is supported by current browser/service worker context\n * @return true if indexedDB is supported by current browser/service worker context\n */\nexport function isIndexedDBAvailable(): boolean {\n try {\n return typeof indexedDB === 'object';\n } catch (e) {\n return false;\n }\n}\n\n/**\n * This method validates browser/sw context for indexedDB by opening a dummy indexedDB database and reject\n * if errors occur during the database open operation.\n *\n * @throws exception if current browser/sw context can't run idb.open (ex: Safari iframe, Firefox\n * private browsing)\n */\nexport function validateIndexedDBOpenable(): Promise {\n return new Promise((resolve, reject) => {\n try {\n let preExist: boolean = true;\n const DB_CHECK_NAME =\n 'validate-browser-context-for-indexeddb-analytics-module';\n const request = self.indexedDB.open(DB_CHECK_NAME);\n request.onsuccess = () => {\n request.result.close();\n // delete database only when it doesn't pre-exist\n if (!preExist) {\n self.indexedDB.deleteDatabase(DB_CHECK_NAME);\n }\n resolve(true);\n };\n request.onupgradeneeded = () => {\n preExist = false;\n };\n\n request.onerror = () => {\n reject(request.error?.message || '');\n };\n } catch (error) {\n reject(error);\n }\n });\n}\n\n/**\n *\n * This method checks whether cookie is enabled within current browser\n * @return true if cookie is enabled within current browser\n */\nexport function areCookiesEnabled(): boolean {\n if (typeof navigator === 'undefined' || !navigator.cookieEnabled) {\n return false;\n }\n return true;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * @fileoverview Standardized Firebase Error.\n *\n * Usage:\n *\n * // Typescript string literals for type-safe codes\n * type Err =\n * 'unknown' |\n * 'object-not-found'\n * ;\n *\n * // Closure enum for type-safe error codes\n * // at-enum {string}\n * var Err = {\n * UNKNOWN: 'unknown',\n * OBJECT_NOT_FOUND: 'object-not-found',\n * }\n *\n * let errors: Map = {\n * 'generic-error': \"Unknown error\",\n * 'file-not-found': \"Could not find file: {$file}\",\n * };\n *\n * // Type-safe function - must pass a valid error code as param.\n * let error = new ErrorFactory('service', 'Service', errors);\n *\n * ...\n * throw error.create(Err.GENERIC);\n * ...\n * throw error.create(Err.FILE_NOT_FOUND, {'file': fileName});\n * ...\n * // Service: Could not file file: foo.txt (service/file-not-found).\n *\n * catch (e) {\n * assert(e.message === \"Could not find file: foo.txt.\");\n * if ((e as FirebaseError)?.code === 'service/file-not-found') {\n * console.log(\"Could not read file: \" + e['file']);\n * }\n * }\n */\n\nexport type ErrorMap = {\n readonly [K in ErrorCode]: string;\n};\n\nconst ERROR_NAME = 'FirebaseError';\n\nexport interface StringLike {\n toString(): string;\n}\n\nexport interface ErrorData {\n [key: string]: unknown;\n}\n\n// Based on code from:\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error#Custom_Error_Types\nexport class FirebaseError extends Error {\n /** The custom name for all FirebaseErrors. */\n readonly name: string = ERROR_NAME;\n\n constructor(\n /** The error code for this error. */\n readonly code: string,\n message: string,\n /** Custom data for this error. */\n public customData?: Record\n ) {\n super(message);\n\n // Fix For ES5\n // https://github.com/Microsoft/TypeScript-wiki/blob/master/Breaking-Changes.md#extending-built-ins-like-error-array-and-map-may-no-longer-work\n Object.setPrototypeOf(this, FirebaseError.prototype);\n\n // Maintains proper stack trace for where our error was thrown.\n // Only available on V8.\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, ErrorFactory.prototype.create);\n }\n }\n}\n\nexport class ErrorFactory<\n ErrorCode extends string,\n ErrorParams extends { readonly [K in ErrorCode]?: ErrorData } = {}\n> {\n constructor(\n private readonly service: string,\n private readonly serviceName: string,\n private readonly errors: ErrorMap\n ) {}\n\n create(\n code: K,\n ...data: K extends keyof ErrorParams ? [ErrorParams[K]] : []\n ): FirebaseError {\n const customData = (data[0] as ErrorData) || {};\n const fullCode = `${this.service}/${code}`;\n const template = this.errors[code];\n\n const message = template ? replaceTemplate(template, customData) : 'Error';\n // Service Name: Error message (service/code).\n const fullMessage = `${this.serviceName}: ${message} (${fullCode}).`;\n\n const error = new FirebaseError(fullCode, fullMessage, customData);\n\n return error;\n }\n}\n\nfunction replaceTemplate(template: string, data: ErrorData): string {\n return template.replace(PATTERN, (_, key) => {\n const value = data[key];\n return value != null ? String(value) : `<${key}?>`;\n });\n}\n\nconst PATTERN = /\\{\\$([^}]+)}/g;\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Evaluates a JSON string into a javascript object.\n *\n * @param {string} str A string containing JSON.\n * @return {*} The javascript object representing the specified JSON.\n */\nexport function jsonEval(str: string): unknown {\n return JSON.parse(str);\n}\n\n/**\n * Returns JSON representing a javascript object.\n * @param {*} data Javascript object to be stringified.\n * @return {string} The JSON contents of the object.\n */\nexport function stringify(data: unknown): string {\n return JSON.stringify(data);\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { base64Decode } from './crypt';\nimport { jsonEval } from './json';\n\ninterface Claims {\n [key: string]: {};\n}\n\ninterface DecodedToken {\n header: object;\n claims: Claims;\n data: object;\n signature: string;\n}\n\n/**\n * Decodes a Firebase auth. token into constituent parts.\n *\n * Notes:\n * - May return with invalid / incomplete claims if there's no native base64 decoding support.\n * - Doesn't check if the token is actually valid.\n */\nexport const decode = function (token: string): DecodedToken {\n let header = {},\n claims: Claims = {},\n data = {},\n signature = '';\n\n try {\n const parts = token.split('.');\n header = jsonEval(base64Decode(parts[0]) || '') as object;\n claims = jsonEval(base64Decode(parts[1]) || '') as Claims;\n signature = parts[2];\n data = claims['d'] || {};\n delete claims['d'];\n } catch (e) {}\n\n return {\n header,\n claims,\n data,\n signature\n };\n};\n\ninterface DecodedToken {\n header: object;\n claims: Claims;\n data: object;\n signature: string;\n}\n\n/**\n * Decodes a Firebase auth. token and checks the validity of its time-based claims. Will return true if the\n * token is within the time window authorized by the 'nbf' (not-before) and 'iat' (issued-at) claims.\n *\n * Notes:\n * - May return a false negative if there's no native base64 decoding support.\n * - Doesn't check if the token is actually valid.\n */\nexport const isValidTimestamp = function (token: string): boolean {\n const claims: Claims = decode(token).claims;\n const now: number = Math.floor(new Date().getTime() / 1000);\n let validSince: number = 0,\n validUntil: number = 0;\n\n if (typeof claims === 'object') {\n if (claims.hasOwnProperty('nbf')) {\n validSince = claims['nbf'] as number;\n } else if (claims.hasOwnProperty('iat')) {\n validSince = claims['iat'] as number;\n }\n\n if (claims.hasOwnProperty('exp')) {\n validUntil = claims['exp'] as number;\n } else {\n // token will expire after 24h by default\n validUntil = validSince + 86400;\n }\n }\n\n return (\n !!now &&\n !!validSince &&\n !!validUntil &&\n now >= validSince &&\n now <= validUntil\n );\n};\n\n/**\n * Decodes a Firebase auth. token and returns its issued at time if valid, null otherwise.\n *\n * Notes:\n * - May return null if there's no native base64 decoding support.\n * - Doesn't check if the token is actually valid.\n */\nexport const issuedAtTime = function (token: string): number | null {\n const claims: Claims = decode(token).claims;\n if (typeof claims === 'object' && claims.hasOwnProperty('iat')) {\n return claims['iat'] as number;\n }\n return null;\n};\n\n/**\n * Decodes a Firebase auth. token and checks the validity of its format. Expects a valid issued-at time.\n *\n * Notes:\n * - May return a false negative if there's no native base64 decoding support.\n * - Doesn't check if the token is actually valid.\n */\nexport const isValidFormat = function (token: string): boolean {\n const decoded = decode(token),\n claims = decoded.claims;\n\n return !!claims && typeof claims === 'object' && claims.hasOwnProperty('iat');\n};\n\n/**\n * Attempts to peer into an auth token and determine if it's an admin auth token by looking at the claims portion.\n *\n * Notes:\n * - May return a false negative if there's no native base64 decoding support.\n * - Doesn't check if the token is actually valid.\n */\nexport const isAdmin = function (token: string): boolean {\n const claims: Claims = decode(token).claims;\n return typeof claims === 'object' && claims['admin'] === true;\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport function contains(obj: T, key: string): boolean {\n return Object.prototype.hasOwnProperty.call(obj, key);\n}\n\nexport function safeGet(\n obj: T,\n key: K\n): T[K] | undefined {\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n return obj[key];\n } else {\n return undefined;\n }\n}\n\nexport function isEmpty(obj: object): obj is {} {\n for (const key in obj) {\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n return false;\n }\n }\n return true;\n}\n\nexport function map(\n obj: { [key in K]: V },\n fn: (value: V, key: K, obj: { [key in K]: V }) => U,\n contextObj?: unknown\n): { [key in K]: U } {\n const res: Partial<{ [key in K]: U }> = {};\n for (const key in obj) {\n if (Object.prototype.hasOwnProperty.call(obj, key)) {\n res[key] = fn.call(contextObj, obj[key], key, obj);\n }\n }\n return res as { [key in K]: U };\n}\n\n/**\n * Deep equal two objects. Support Arrays and Objects.\n */\nexport function deepEqual(a: object, b: object): boolean {\n if (a === b) {\n return true;\n }\n\n const aKeys = Object.keys(a);\n const bKeys = Object.keys(b);\n for (const k of aKeys) {\n if (!bKeys.includes(k)) {\n return false;\n }\n\n const aProp = (a as Record)[k];\n const bProp = (b as Record)[k];\n if (isObject(aProp) && isObject(bProp)) {\n if (!deepEqual(aProp, bProp)) {\n return false;\n }\n } else if (aProp !== bProp) {\n return false;\n }\n }\n\n for (const k of bKeys) {\n if (!aKeys.includes(k)) {\n return false;\n }\n }\n return true;\n}\n\nfunction isObject(thing: unknown): thing is object {\n return thing !== null && typeof thing === 'object';\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Returns a querystring-formatted string (e.g. &arg=val&arg2=val2) from a\n * params object (e.g. {arg: 'val', arg2: 'val2'})\n * Note: You must prepend it with ? when adding it to a URL.\n */\nexport function querystring(querystringParams: {\n [key: string]: string | number;\n}): string {\n const params = [];\n for (const [key, value] of Object.entries(querystringParams)) {\n if (Array.isArray(value)) {\n value.forEach(arrayVal => {\n params.push(\n encodeURIComponent(key) + '=' + encodeURIComponent(arrayVal)\n );\n });\n } else {\n params.push(encodeURIComponent(key) + '=' + encodeURIComponent(value));\n }\n }\n return params.length ? '&' + params.join('&') : '';\n}\n\n/**\n * Decodes a querystring (e.g. ?arg=val&arg2=val2) into a params object\n * (e.g. {arg: 'val', arg2: 'val2'})\n */\nexport function querystringDecode(querystring: string): Record {\n const obj: Record = {};\n const tokens = querystring.replace(/^\\?/, '').split('&');\n\n tokens.forEach(token => {\n if (token) {\n const [key, value] = token.split('=');\n obj[decodeURIComponent(key)] = decodeURIComponent(value);\n }\n });\n return obj;\n}\n\n/**\n * Extract the query string part of a URL, including the leading question mark (if present).\n */\nexport function extractQuerystring(url: string): string {\n const queryStart = url.indexOf('?');\n if (!queryStart) {\n return '';\n }\n const fragmentStart = url.indexOf('#', queryStart);\n return url.substring(\n queryStart,\n fragmentStart > 0 ? fragmentStart : undefined\n );\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * @fileoverview SHA-1 cryptographic hash.\n * Variable names follow the notation in FIPS PUB 180-3:\n * http://csrc.nist.gov/publications/fips/fips180-3/fips180-3_final.pdf.\n *\n * Usage:\n * var sha1 = new sha1();\n * sha1.update(bytes);\n * var hash = sha1.digest();\n *\n * Performance:\n * Chrome 23: ~400 Mbit/s\n * Firefox 16: ~250 Mbit/s\n *\n */\n\n/**\n * SHA-1 cryptographic hash constructor.\n *\n * The properties declared here are discussed in the above algorithm document.\n * @constructor\n * @final\n * @struct\n */\nexport class Sha1 {\n /**\n * Holds the previous values of accumulated variables a-e in the compress_\n * function.\n * @private\n */\n private chain_: number[] = [];\n\n /**\n * A buffer holding the partially computed hash result.\n * @private\n */\n private buf_: number[] = [];\n\n /**\n * An array of 80 bytes, each a part of the message to be hashed. Referred to\n * as the message schedule in the docs.\n * @private\n */\n private W_: number[] = [];\n\n /**\n * Contains data needed to pad messages less than 64 bytes.\n * @private\n */\n private pad_: number[] = [];\n\n /**\n * @private {number}\n */\n private inbuf_: number = 0;\n\n /**\n * @private {number}\n */\n private total_: number = 0;\n\n blockSize: number;\n\n constructor() {\n this.blockSize = 512 / 8;\n\n this.pad_[0] = 128;\n for (let i = 1; i < this.blockSize; ++i) {\n this.pad_[i] = 0;\n }\n\n this.reset();\n }\n\n reset(): void {\n this.chain_[0] = 0x67452301;\n this.chain_[1] = 0xefcdab89;\n this.chain_[2] = 0x98badcfe;\n this.chain_[3] = 0x10325476;\n this.chain_[4] = 0xc3d2e1f0;\n\n this.inbuf_ = 0;\n this.total_ = 0;\n }\n\n /**\n * Internal compress helper function.\n * @param buf Block to compress.\n * @param offset Offset of the block in the buffer.\n * @private\n */\n compress_(buf: number[] | Uint8Array | string, offset?: number): void {\n if (!offset) {\n offset = 0;\n }\n\n const W = this.W_;\n\n // get 16 big endian words\n if (typeof buf === 'string') {\n for (let i = 0; i < 16; i++) {\n // TODO(user): [bug 8140122] Recent versions of Safari for Mac OS and iOS\n // have a bug that turns the post-increment ++ operator into pre-increment\n // during JIT compilation. We have code that depends heavily on SHA-1 for\n // correctness and which is affected by this bug, so I've removed all uses\n // of post-increment ++ in which the result value is used. We can revert\n // this change once the Safari bug\n // (https://bugs.webkit.org/show_bug.cgi?id=109036) has been fixed and\n // most clients have been updated.\n W[i] =\n (buf.charCodeAt(offset) << 24) |\n (buf.charCodeAt(offset + 1) << 16) |\n (buf.charCodeAt(offset + 2) << 8) |\n buf.charCodeAt(offset + 3);\n offset += 4;\n }\n } else {\n for (let i = 0; i < 16; i++) {\n W[i] =\n (buf[offset] << 24) |\n (buf[offset + 1] << 16) |\n (buf[offset + 2] << 8) |\n buf[offset + 3];\n offset += 4;\n }\n }\n\n // expand to 80 words\n for (let i = 16; i < 80; i++) {\n const t = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];\n W[i] = ((t << 1) | (t >>> 31)) & 0xffffffff;\n }\n\n let a = this.chain_[0];\n let b = this.chain_[1];\n let c = this.chain_[2];\n let d = this.chain_[3];\n let e = this.chain_[4];\n let f, k;\n\n // TODO(user): Try to unroll this loop to speed up the computation.\n for (let i = 0; i < 80; i++) {\n if (i < 40) {\n if (i < 20) {\n f = d ^ (b & (c ^ d));\n k = 0x5a827999;\n } else {\n f = b ^ c ^ d;\n k = 0x6ed9eba1;\n }\n } else {\n if (i < 60) {\n f = (b & c) | (d & (b | c));\n k = 0x8f1bbcdc;\n } else {\n f = b ^ c ^ d;\n k = 0xca62c1d6;\n }\n }\n\n const t = (((a << 5) | (a >>> 27)) + f + e + k + W[i]) & 0xffffffff;\n e = d;\n d = c;\n c = ((b << 30) | (b >>> 2)) & 0xffffffff;\n b = a;\n a = t;\n }\n\n this.chain_[0] = (this.chain_[0] + a) & 0xffffffff;\n this.chain_[1] = (this.chain_[1] + b) & 0xffffffff;\n this.chain_[2] = (this.chain_[2] + c) & 0xffffffff;\n this.chain_[3] = (this.chain_[3] + d) & 0xffffffff;\n this.chain_[4] = (this.chain_[4] + e) & 0xffffffff;\n }\n\n update(bytes?: number[] | Uint8Array | string, length?: number): void {\n // TODO(johnlenz): tighten the function signature and remove this check\n if (bytes == null) {\n return;\n }\n\n if (length === undefined) {\n length = bytes.length;\n }\n\n const lengthMinusBlock = length - this.blockSize;\n let n = 0;\n // Using local instead of member variables gives ~5% speedup on Firefox 16.\n const buf = this.buf_;\n let inbuf = this.inbuf_;\n\n // The outer while loop should execute at most twice.\n while (n < length) {\n // When we have no data in the block to top up, we can directly process the\n // input buffer (assuming it contains sufficient data). This gives ~25%\n // speedup on Chrome 23 and ~15% speedup on Firefox 16, but requires that\n // the data is provided in large chunks (or in multiples of 64 bytes).\n if (inbuf === 0) {\n while (n <= lengthMinusBlock) {\n this.compress_(bytes, n);\n n += this.blockSize;\n }\n }\n\n if (typeof bytes === 'string') {\n while (n < length) {\n buf[inbuf] = bytes.charCodeAt(n);\n ++inbuf;\n ++n;\n if (inbuf === this.blockSize) {\n this.compress_(buf);\n inbuf = 0;\n // Jump to the outer loop so we use the full-block optimization.\n break;\n }\n }\n } else {\n while (n < length) {\n buf[inbuf] = bytes[n];\n ++inbuf;\n ++n;\n if (inbuf === this.blockSize) {\n this.compress_(buf);\n inbuf = 0;\n // Jump to the outer loop so we use the full-block optimization.\n break;\n }\n }\n }\n }\n\n this.inbuf_ = inbuf;\n this.total_ += length;\n }\n\n /** @override */\n digest(): number[] {\n const digest: number[] = [];\n let totalBits = this.total_ * 8;\n\n // Add pad 0x80 0x00*.\n if (this.inbuf_ < 56) {\n this.update(this.pad_, 56 - this.inbuf_);\n } else {\n this.update(this.pad_, this.blockSize - (this.inbuf_ - 56));\n }\n\n // Add # bits.\n for (let i = this.blockSize - 1; i >= 56; i--) {\n this.buf_[i] = totalBits & 255;\n totalBits /= 256; // Don't use bit-shifting here!\n }\n\n this.compress_(this.buf_);\n\n let n = 0;\n for (let i = 0; i < 5; i++) {\n for (let j = 24; j >= 0; j -= 8) {\n digest[n] = (this.chain_[i] >> j) & 255;\n ++n;\n }\n }\n return digest;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport type NextFn = (value: T) => void;\nexport type ErrorFn = (error: Error) => void;\nexport type CompleteFn = () => void;\n\nexport interface Observer {\n // Called once for each value in a stream of values.\n next: NextFn;\n\n // A stream terminates by a single call to EITHER error() or complete().\n error: ErrorFn;\n\n // No events will be sent to next() once complete() is called.\n complete: CompleteFn;\n}\n\nexport type PartialObserver = Partial>;\n\n// TODO: Support also Unsubscribe.unsubscribe?\nexport type Unsubscribe = () => void;\n\n/**\n * The Subscribe interface has two forms - passing the inline function\n * callbacks, or a object interface with callback properties.\n */\nexport interface Subscribe {\n (next?: NextFn, error?: ErrorFn, complete?: CompleteFn): Unsubscribe;\n (observer: PartialObserver): Unsubscribe;\n}\n\nexport interface Observable {\n // Subscribe method\n subscribe: Subscribe;\n}\n\nexport type Executor = (observer: Observer) => void;\n\n/**\n * Helper to make a Subscribe function (just like Promise helps make a\n * Thenable).\n *\n * @param executor Function which can make calls to a single Observer\n * as a proxy.\n * @param onNoObservers Callback when count of Observers goes to zero.\n */\nexport function createSubscribe(\n executor: Executor,\n onNoObservers?: Executor\n): Subscribe {\n const proxy = new ObserverProxy(executor, onNoObservers);\n return proxy.subscribe.bind(proxy);\n}\n\n/**\n * Implement fan-out for any number of Observers attached via a subscribe\n * function.\n */\nclass ObserverProxy implements Observer {\n private observers: Array> | undefined = [];\n private unsubscribes: Unsubscribe[] = [];\n private onNoObservers: Executor | undefined;\n private observerCount = 0;\n // Micro-task scheduling by calling task.then().\n private task = Promise.resolve();\n private finalized = false;\n private finalError?: Error;\n\n /**\n * @param executor Function which can make calls to a single Observer\n * as a proxy.\n * @param onNoObservers Callback when count of Observers goes to zero.\n */\n constructor(executor: Executor, onNoObservers?: Executor) {\n this.onNoObservers = onNoObservers;\n // Call the executor asynchronously so subscribers that are called\n // synchronously after the creation of the subscribe function\n // can still receive the very first value generated in the executor.\n this.task\n .then(() => {\n executor(this);\n })\n .catch(e => {\n this.error(e);\n });\n }\n\n next(value: T): void {\n this.forEachObserver((observer: Observer) => {\n observer.next(value);\n });\n }\n\n error(error: Error): void {\n this.forEachObserver((observer: Observer) => {\n observer.error(error);\n });\n this.close(error);\n }\n\n complete(): void {\n this.forEachObserver((observer: Observer) => {\n observer.complete();\n });\n this.close();\n }\n\n /**\n * Subscribe function that can be used to add an Observer to the fan-out list.\n *\n * - We require that no event is sent to a subscriber sychronously to their\n * call to subscribe().\n */\n subscribe(\n nextOrObserver?: NextFn | PartialObserver,\n error?: ErrorFn,\n complete?: CompleteFn\n ): Unsubscribe {\n let observer: Observer;\n\n if (\n nextOrObserver === undefined &&\n error === undefined &&\n complete === undefined\n ) {\n throw new Error('Missing Observer.');\n }\n\n // Assemble an Observer object when passed as callback functions.\n if (\n implementsAnyMethods(nextOrObserver as { [key: string]: unknown }, [\n 'next',\n 'error',\n 'complete'\n ])\n ) {\n observer = nextOrObserver as Observer;\n } else {\n observer = {\n next: nextOrObserver as NextFn,\n error,\n complete\n } as Observer;\n }\n\n if (observer.next === undefined) {\n observer.next = noop as NextFn;\n }\n if (observer.error === undefined) {\n observer.error = noop as ErrorFn;\n }\n if (observer.complete === undefined) {\n observer.complete = noop as CompleteFn;\n }\n\n const unsub = this.unsubscribeOne.bind(this, this.observers!.length);\n\n // Attempt to subscribe to a terminated Observable - we\n // just respond to the Observer with the final error or complete\n // event.\n if (this.finalized) {\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.task.then(() => {\n try {\n if (this.finalError) {\n observer.error(this.finalError);\n } else {\n observer.complete();\n }\n } catch (e) {\n // nothing\n }\n return;\n });\n }\n\n this.observers!.push(observer as Observer);\n\n return unsub;\n }\n\n // Unsubscribe is synchronous - we guarantee that no events are sent to\n // any unsubscribed Observer.\n private unsubscribeOne(i: number): void {\n if (this.observers === undefined || this.observers[i] === undefined) {\n return;\n }\n\n delete this.observers[i];\n\n this.observerCount -= 1;\n if (this.observerCount === 0 && this.onNoObservers !== undefined) {\n this.onNoObservers(this);\n }\n }\n\n private forEachObserver(fn: (observer: Observer) => void): void {\n if (this.finalized) {\n // Already closed by previous event....just eat the additional values.\n return;\n }\n\n // Since sendOne calls asynchronously - there is no chance that\n // this.observers will become undefined.\n for (let i = 0; i < this.observers!.length; i++) {\n this.sendOne(i, fn);\n }\n }\n\n // Call the Observer via one of it's callback function. We are careful to\n // confirm that the observe has not been unsubscribed since this asynchronous\n // function had been queued.\n private sendOne(i: number, fn: (observer: Observer) => void): void {\n // Execute the callback asynchronously\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.task.then(() => {\n if (this.observers !== undefined && this.observers[i] !== undefined) {\n try {\n fn(this.observers[i]);\n } catch (e) {\n // Ignore exceptions raised in Observers or missing methods of an\n // Observer.\n // Log error to console. b/31404806\n if (typeof console !== 'undefined' && console.error) {\n console.error(e);\n }\n }\n }\n });\n }\n\n private close(err?: Error): void {\n if (this.finalized) {\n return;\n }\n this.finalized = true;\n if (err !== undefined) {\n this.finalError = err;\n }\n // Proxy is no longer needed - garbage collect references\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.task.then(() => {\n this.observers = undefined;\n this.onNoObservers = undefined;\n });\n }\n}\n\n/** Turn synchronous function into one called asynchronously. */\n// eslint-disable-next-line @typescript-eslint/ban-types\nexport function async(fn: Function, onError?: ErrorFn): Function {\n return (...args: unknown[]) => {\n Promise.resolve(true)\n .then(() => {\n fn(...args);\n })\n .catch((error: Error) => {\n if (onError) {\n onError(error);\n }\n });\n };\n}\n\n/**\n * Return true if the object passed in implements any of the named methods.\n */\nfunction implementsAnyMethods(\n obj: { [key: string]: unknown },\n methods: string[]\n): boolean {\n if (typeof obj !== 'object' || obj === null) {\n return false;\n }\n\n for (const method of methods) {\n if (method in obj && typeof obj[method] === 'function') {\n return true;\n }\n }\n\n return false;\n}\n\nfunction noop(): void {\n // do nothing\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Check to make sure the appropriate number of arguments are provided for a public function.\n * Throws an error if it fails.\n *\n * @param fnName The function name\n * @param minCount The minimum number of arguments to allow for the function call\n * @param maxCount The maximum number of argument to allow for the function call\n * @param argCount The actual number of arguments provided.\n */\nexport const validateArgCount = function (\n fnName: string,\n minCount: number,\n maxCount: number,\n argCount: number\n): void {\n let argError;\n if (argCount < minCount) {\n argError = 'at least ' + minCount;\n } else if (argCount > maxCount) {\n argError = maxCount === 0 ? 'none' : 'no more than ' + maxCount;\n }\n if (argError) {\n const error =\n fnName +\n ' failed: Was called with ' +\n argCount +\n (argCount === 1 ? ' argument.' : ' arguments.') +\n ' Expects ' +\n argError +\n '.';\n throw new Error(error);\n }\n};\n\n/**\n * Generates a string to prefix an error message about failed argument validation\n *\n * @param fnName The function name\n * @param argName The name of the argument\n * @return The prefix to add to the error thrown for validation.\n */\nexport function errorPrefix(fnName: string, argName: string): string {\n return `${fnName} failed: ${argName} argument `;\n}\n\n/**\n * @param fnName\n * @param argumentNumber\n * @param namespace\n * @param optional\n */\nexport function validateNamespace(\n fnName: string,\n namespace: string,\n optional: boolean\n): void {\n if (optional && !namespace) {\n return;\n }\n if (typeof namespace !== 'string') {\n //TODO: I should do more validation here. We only allow certain chars in namespaces.\n throw new Error(\n errorPrefix(fnName, 'namespace') + 'must be a valid firebase namespace.'\n );\n }\n}\n\nexport function validateCallback(\n fnName: string,\n argumentName: string,\n // eslint-disable-next-line @typescript-eslint/ban-types\n callback: Function,\n optional: boolean\n): void {\n if (optional && !callback) {\n return;\n }\n if (typeof callback !== 'function') {\n throw new Error(\n errorPrefix(fnName, argumentName) + 'must be a valid function.'\n );\n }\n}\n\nexport function validateContextObject(\n fnName: string,\n argumentName: string,\n context: unknown,\n optional: boolean\n): void {\n if (optional && !context) {\n return;\n }\n if (typeof context !== 'object' || context === null) {\n throw new Error(\n errorPrefix(fnName, argumentName) + 'must be a valid context object.'\n );\n }\n}\n","/**\n * @license\n * Copyright 2022 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Copied from https://stackoverflow.com/a/2117523\n * Generates a new uuid.\n * @public\n */\nexport const uuidv4 = function (): string {\n return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, c => {\n const r = (Math.random() * 16) | 0,\n v = c === 'x' ? r : (r & 0x3) | 0x8;\n return v.toString(16);\n });\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from './assert';\n\n// Code originally came from goog.crypt.stringToUtf8ByteArray, but for some reason they\n// automatically replaced '\\r\\n' with '\\n', and they didn't handle surrogate pairs,\n// so it's been modified.\n\n// Note that not all Unicode characters appear as single characters in JavaScript strings.\n// fromCharCode returns the UTF-16 encoding of a character - so some Unicode characters\n// use 2 characters in Javascript. All 4-byte UTF-8 characters begin with a first\n// character in the range 0xD800 - 0xDBFF (the first character of a so-called surrogate\n// pair).\n// See http://www.ecma-international.org/ecma-262/5.1/#sec-15.1.3\n\n/**\n * @param {string} str\n * @return {Array}\n */\nexport const stringToByteArray = function (str: string): number[] {\n const out: number[] = [];\n let p = 0;\n for (let i = 0; i < str.length; i++) {\n let c = str.charCodeAt(i);\n\n // Is this the lead surrogate in a surrogate pair?\n if (c >= 0xd800 && c <= 0xdbff) {\n const high = c - 0xd800; // the high 10 bits.\n i++;\n assert(i < str.length, 'Surrogate pair missing trail surrogate.');\n const low = str.charCodeAt(i) - 0xdc00; // the low 10 bits.\n c = 0x10000 + (high << 10) + low;\n }\n\n if (c < 128) {\n out[p++] = c;\n } else if (c < 2048) {\n out[p++] = (c >> 6) | 192;\n out[p++] = (c & 63) | 128;\n } else if (c < 65536) {\n out[p++] = (c >> 12) | 224;\n out[p++] = ((c >> 6) & 63) | 128;\n out[p++] = (c & 63) | 128;\n } else {\n out[p++] = (c >> 18) | 240;\n out[p++] = ((c >> 12) & 63) | 128;\n out[p++] = ((c >> 6) & 63) | 128;\n out[p++] = (c & 63) | 128;\n }\n }\n return out;\n};\n\n/**\n * Calculate length without actually converting; useful for doing cheaper validation.\n * @param {string} str\n * @return {number}\n */\nexport const stringLength = function (str: string): number {\n let p = 0;\n for (let i = 0; i < str.length; i++) {\n const c = str.charCodeAt(i);\n if (c < 128) {\n p++;\n } else if (c < 2048) {\n p += 2;\n } else if (c >= 0xd800 && c <= 0xdbff) {\n // Lead surrogate of a surrogate pair. The pair together will take 4 bytes to represent.\n p += 4;\n i++; // skip trail surrogate.\n } else {\n p += 3;\n }\n }\n return p;\n};\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * The amount of milliseconds to exponentially increase.\n */\nconst DEFAULT_INTERVAL_MILLIS = 1000;\n\n/**\n * The factor to backoff by.\n * Should be a number greater than 1.\n */\nconst DEFAULT_BACKOFF_FACTOR = 2;\n\n/**\n * The maximum milliseconds to increase to.\n *\n *

Visible for testing\n */\nexport const MAX_VALUE_MILLIS = 4 * 60 * 60 * 1000; // Four hours, like iOS and Android.\n\n/**\n * The percentage of backoff time to randomize by.\n * See\n * http://go/safe-client-behavior#step-1-determine-the-appropriate-retry-interval-to-handle-spike-traffic\n * for context.\n *\n *

Visible for testing\n */\nexport const RANDOM_FACTOR = 0.5;\n\n/**\n * Based on the backoff method from\n * https://github.com/google/closure-library/blob/master/closure/goog/math/exponentialbackoff.js.\n * Extracted here so we don't need to pass metadata and a stateful ExponentialBackoff object around.\n */\nexport function calculateBackoffMillis(\n backoffCount: number,\n intervalMillis: number = DEFAULT_INTERVAL_MILLIS,\n backoffFactor: number = DEFAULT_BACKOFF_FACTOR\n): number {\n // Calculates an exponentially increasing value.\n // Deviation: calculates value from count and a constant interval, so we only need to save value\n // and count to restore state.\n const currBaseValue = intervalMillis * Math.pow(backoffFactor, backoffCount);\n\n // A random \"fuzz\" to avoid waves of retries.\n // Deviation: randomFactor is required.\n const randomWait = Math.round(\n // A fraction of the backoff value to add/subtract.\n // Deviation: changes multiplication order to improve readability.\n RANDOM_FACTOR *\n currBaseValue *\n // A random float (rounded to int by Math.round above) in the range [-1, 1]. Determines\n // if we add or subtract.\n (Math.random() - 0.5) *\n 2\n );\n\n // Limits backoff to max to avoid effectively permanent backoff.\n return Math.min(MAX_VALUE_MILLIS, currBaseValue + randomWait);\n}\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Compat {\n _delegate: T;\n}\n\nexport function getModularInstance(\n service: Compat | ExpService\n): ExpService {\n if (service && (service as Compat)._delegate) {\n return (service as Compat)._delegate;\n } else {\n return service as ExpService;\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n InstantiationMode,\n InstanceFactory,\n ComponentType,\n Dictionary,\n Name,\n onInstanceCreatedCallback\n} from './types';\n\n/**\n * Component for service name T, e.g. `auth`, `auth-internal`\n */\nexport class Component {\n multipleInstances = false;\n /**\n * Properties to be added to the service namespace\n */\n serviceProps: Dictionary = {};\n\n instantiationMode = InstantiationMode.LAZY;\n\n onInstanceCreated: onInstanceCreatedCallback | null = null;\n\n /**\n *\n * @param name The public service name, e.g. app, auth, firestore, database\n * @param instanceFactory Service factory responsible for creating the public interface\n * @param type whether the service provided by the component is public or private\n */\n constructor(\n readonly name: T,\n readonly instanceFactory: InstanceFactory,\n readonly type: ComponentType\n ) {}\n\n setInstantiationMode(mode: InstantiationMode): this {\n this.instantiationMode = mode;\n return this;\n }\n\n setMultipleInstances(multipleInstances: boolean): this {\n this.multipleInstances = multipleInstances;\n return this;\n }\n\n setServiceProps(props: Dictionary): this {\n this.serviceProps = props;\n return this;\n }\n\n setInstanceCreatedCallback(callback: onInstanceCreatedCallback): this {\n this.onInstanceCreated = callback;\n return this;\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport const DEFAULT_ENTRY_NAME = '[DEFAULT]';\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Deferred } from '@firebase/util';\nimport { ComponentContainer } from './component_container';\nimport { DEFAULT_ENTRY_NAME } from './constants';\nimport {\n InitializeOptions,\n InstantiationMode,\n Name,\n NameServiceMapping,\n OnInitCallBack\n} from './types';\nimport { Component } from './component';\n\n/**\n * Provider for instance for service name T, e.g. 'auth', 'auth-internal'\n * NameServiceMapping[T] is an alias for the type of the instance\n */\nexport class Provider {\n private component: Component | null = null;\n private readonly instances: Map = new Map();\n private readonly instancesDeferred: Map<\n string,\n Deferred\n > = new Map();\n private readonly instancesOptions: Map> =\n new Map();\n private onInitCallbacks: Map>> = new Map();\n\n constructor(\n private readonly name: T,\n private readonly container: ComponentContainer\n ) {}\n\n /**\n * @param identifier A provider can provide mulitple instances of a service\n * if this.component.multipleInstances is true.\n */\n get(identifier?: string): Promise {\n // if multipleInstances is not supported, use the default name\n const normalizedIdentifier = this.normalizeInstanceIdentifier(identifier);\n\n if (!this.instancesDeferred.has(normalizedIdentifier)) {\n const deferred = new Deferred();\n this.instancesDeferred.set(normalizedIdentifier, deferred);\n\n if (\n this.isInitialized(normalizedIdentifier) ||\n this.shouldAutoInitialize()\n ) {\n // initialize the service if it can be auto-initialized\n try {\n const instance = this.getOrInitializeService({\n instanceIdentifier: normalizedIdentifier\n });\n if (instance) {\n deferred.resolve(instance);\n }\n } catch (e) {\n // when the instance factory throws an exception during get(), it should not cause\n // a fatal error. We just return the unresolved promise in this case.\n }\n }\n }\n\n return this.instancesDeferred.get(normalizedIdentifier)!.promise;\n }\n\n /**\n *\n * @param options.identifier A provider can provide mulitple instances of a service\n * if this.component.multipleInstances is true.\n * @param options.optional If optional is false or not provided, the method throws an error when\n * the service is not immediately available.\n * If optional is true, the method returns null if the service is not immediately available.\n */\n getImmediate(options: {\n identifier?: string;\n optional: true;\n }): NameServiceMapping[T] | null;\n getImmediate(options?: {\n identifier?: string;\n optional?: false;\n }): NameServiceMapping[T];\n getImmediate(options?: {\n identifier?: string;\n optional?: boolean;\n }): NameServiceMapping[T] | null {\n // if multipleInstances is not supported, use the default name\n const normalizedIdentifier = this.normalizeInstanceIdentifier(\n options?.identifier\n );\n const optional = options?.optional ?? false;\n\n if (\n this.isInitialized(normalizedIdentifier) ||\n this.shouldAutoInitialize()\n ) {\n try {\n return this.getOrInitializeService({\n instanceIdentifier: normalizedIdentifier\n });\n } catch (e) {\n if (optional) {\n return null;\n } else {\n throw e;\n }\n }\n } else {\n // In case a component is not initialized and should/can not be auto-initialized at the moment, return null if the optional flag is set, or throw\n if (optional) {\n return null;\n } else {\n throw Error(`Service ${this.name} is not available`);\n }\n }\n }\n\n getComponent(): Component | null {\n return this.component;\n }\n\n setComponent(component: Component): void {\n if (component.name !== this.name) {\n throw Error(\n `Mismatching Component ${component.name} for Provider ${this.name}.`\n );\n }\n\n if (this.component) {\n throw Error(`Component for ${this.name} has already been provided`);\n }\n\n this.component = component;\n\n // return early without attempting to initialize the component if the component requires explicit initialization (calling `Provider.initialize()`)\n if (!this.shouldAutoInitialize()) {\n return;\n }\n\n // if the service is eager, initialize the default instance\n if (isComponentEager(component)) {\n try {\n this.getOrInitializeService({ instanceIdentifier: DEFAULT_ENTRY_NAME });\n } catch (e) {\n // when the instance factory for an eager Component throws an exception during the eager\n // initialization, it should not cause a fatal error.\n // TODO: Investigate if we need to make it configurable, because some component may want to cause\n // a fatal error in this case?\n }\n }\n\n // Create service instances for the pending promises and resolve them\n // NOTE: if this.multipleInstances is false, only the default instance will be created\n // and all promises with resolve with it regardless of the identifier.\n for (const [\n instanceIdentifier,\n instanceDeferred\n ] of this.instancesDeferred.entries()) {\n const normalizedIdentifier =\n this.normalizeInstanceIdentifier(instanceIdentifier);\n\n try {\n // `getOrInitializeService()` should always return a valid instance since a component is guaranteed. use ! to make typescript happy.\n const instance = this.getOrInitializeService({\n instanceIdentifier: normalizedIdentifier\n })!;\n instanceDeferred.resolve(instance);\n } catch (e) {\n // when the instance factory throws an exception, it should not cause\n // a fatal error. We just leave the promise unresolved.\n }\n }\n }\n\n clearInstance(identifier: string = DEFAULT_ENTRY_NAME): void {\n this.instancesDeferred.delete(identifier);\n this.instancesOptions.delete(identifier);\n this.instances.delete(identifier);\n }\n\n // app.delete() will call this method on every provider to delete the services\n // TODO: should we mark the provider as deleted?\n async delete(): Promise {\n const services = Array.from(this.instances.values());\n\n await Promise.all([\n ...services\n .filter(service => 'INTERNAL' in service) // legacy services\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n .map(service => (service as any).INTERNAL!.delete()),\n ...services\n .filter(service => '_delete' in service) // modularized services\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n .map(service => (service as any)._delete())\n ]);\n }\n\n isComponentSet(): boolean {\n return this.component != null;\n }\n\n isInitialized(identifier: string = DEFAULT_ENTRY_NAME): boolean {\n return this.instances.has(identifier);\n }\n\n getOptions(identifier: string = DEFAULT_ENTRY_NAME): Record {\n return this.instancesOptions.get(identifier) || {};\n }\n\n initialize(opts: InitializeOptions = {}): NameServiceMapping[T] {\n const { options = {} } = opts;\n const normalizedIdentifier = this.normalizeInstanceIdentifier(\n opts.instanceIdentifier\n );\n if (this.isInitialized(normalizedIdentifier)) {\n throw Error(\n `${this.name}(${normalizedIdentifier}) has already been initialized`\n );\n }\n\n if (!this.isComponentSet()) {\n throw Error(`Component ${this.name} has not been registered yet`);\n }\n\n const instance = this.getOrInitializeService({\n instanceIdentifier: normalizedIdentifier,\n options\n })!;\n\n // resolve any pending promise waiting for the service instance\n for (const [\n instanceIdentifier,\n instanceDeferred\n ] of this.instancesDeferred.entries()) {\n const normalizedDeferredIdentifier =\n this.normalizeInstanceIdentifier(instanceIdentifier);\n if (normalizedIdentifier === normalizedDeferredIdentifier) {\n instanceDeferred.resolve(instance);\n }\n }\n\n return instance;\n }\n\n /**\n *\n * @param callback - a function that will be invoked after the provider has been initialized by calling provider.initialize().\n * The function is invoked SYNCHRONOUSLY, so it should not execute any longrunning tasks in order to not block the program.\n *\n * @param identifier An optional instance identifier\n * @returns a function to unregister the callback\n */\n onInit(callback: OnInitCallBack, identifier?: string): () => void {\n const normalizedIdentifier = this.normalizeInstanceIdentifier(identifier);\n const existingCallbacks =\n this.onInitCallbacks.get(normalizedIdentifier) ??\n new Set>();\n existingCallbacks.add(callback);\n this.onInitCallbacks.set(normalizedIdentifier, existingCallbacks);\n\n const existingInstance = this.instances.get(normalizedIdentifier);\n if (existingInstance) {\n callback(existingInstance, normalizedIdentifier);\n }\n\n return () => {\n existingCallbacks.delete(callback);\n };\n }\n\n /**\n * Invoke onInit callbacks synchronously\n * @param instance the service instance`\n */\n private invokeOnInitCallbacks(\n instance: NameServiceMapping[T],\n identifier: string\n ): void {\n const callbacks = this.onInitCallbacks.get(identifier);\n if (!callbacks) {\n return;\n }\n for (const callback of callbacks) {\n try {\n callback(instance, identifier);\n } catch {\n // ignore errors in the onInit callback\n }\n }\n }\n\n private getOrInitializeService({\n instanceIdentifier,\n options = {}\n }: {\n instanceIdentifier: string;\n options?: Record;\n }): NameServiceMapping[T] | null {\n let instance = this.instances.get(instanceIdentifier);\n if (!instance && this.component) {\n instance = this.component.instanceFactory(this.container, {\n instanceIdentifier: normalizeIdentifierForFactory(instanceIdentifier),\n options\n });\n this.instances.set(instanceIdentifier, instance);\n this.instancesOptions.set(instanceIdentifier, options);\n\n /**\n * Invoke onInit listeners.\n * Note this.component.onInstanceCreated is different, which is used by the component creator,\n * while onInit listeners are registered by consumers of the provider.\n */\n this.invokeOnInitCallbacks(instance, instanceIdentifier);\n\n /**\n * Order is important\n * onInstanceCreated() should be called after this.instances.set(instanceIdentifier, instance); which\n * makes `isInitialized()` return true.\n */\n if (this.component.onInstanceCreated) {\n try {\n this.component.onInstanceCreated(\n this.container,\n instanceIdentifier,\n instance\n );\n } catch {\n // ignore errors in the onInstanceCreatedCallback\n }\n }\n }\n\n return instance || null;\n }\n\n private normalizeInstanceIdentifier(\n identifier: string = DEFAULT_ENTRY_NAME\n ): string {\n if (this.component) {\n return this.component.multipleInstances ? identifier : DEFAULT_ENTRY_NAME;\n } else {\n return identifier; // assume multiple instances are supported before the component is provided.\n }\n }\n\n private shouldAutoInitialize(): boolean {\n return (\n !!this.component &&\n this.component.instantiationMode !== InstantiationMode.EXPLICIT\n );\n }\n}\n\n// undefined should be passed to the service factory for the default instance\nfunction normalizeIdentifierForFactory(identifier: string): string | undefined {\n return identifier === DEFAULT_ENTRY_NAME ? undefined : identifier;\n}\n\nfunction isComponentEager(component: Component): boolean {\n return component.instantiationMode === InstantiationMode.EAGER;\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Provider } from './provider';\nimport { Component } from './component';\nimport { Name } from './types';\n\n/**\n * ComponentContainer that provides Providers for service name T, e.g. `auth`, `auth-internal`\n */\nexport class ComponentContainer {\n private readonly providers = new Map>();\n\n constructor(private readonly name: string) {}\n\n /**\n *\n * @param component Component being added\n * @param overwrite When a component with the same name has already been registered,\n * if overwrite is true: overwrite the existing component with the new component and create a new\n * provider with the new component. It can be useful in tests where you want to use different mocks\n * for different tests.\n * if overwrite is false: throw an exception\n */\n addComponent(component: Component): void {\n const provider = this.getProvider(component.name);\n if (provider.isComponentSet()) {\n throw new Error(\n `Component ${component.name} has already been registered with ${this.name}`\n );\n }\n\n provider.setComponent(component);\n }\n\n addOrOverwriteComponent(component: Component): void {\n const provider = this.getProvider(component.name);\n if (provider.isComponentSet()) {\n // delete the existing provider from the container, so we can register the new component\n this.providers.delete(component.name);\n }\n\n this.addComponent(component);\n }\n\n /**\n * getProvider provides a type safe interface where it can only be called with a field name\n * present in NameServiceMapping interface.\n *\n * Firebase SDKs providing services should extend NameServiceMapping interface to register\n * themselves.\n */\n getProvider(name: T): Provider {\n if (this.providers.has(name)) {\n return this.providers.get(name) as unknown as Provider;\n }\n\n // create a Provider for a service that hasn't registered with Firebase\n const provider = new Provider(name, this);\n this.providers.set(name, provider as unknown as Provider);\n\n return provider as Provider;\n }\n\n getProviders(): Array> {\n return Array.from(this.providers.values());\n }\n}\n","const instanceOfAny = (object, constructors) => constructors.some((c) => object instanceof c);\n\nlet idbProxyableTypes;\nlet cursorAdvanceMethods;\n// This is a function to prevent it throwing up in node environments.\nfunction getIdbProxyableTypes() {\n return (idbProxyableTypes ||\n (idbProxyableTypes = [\n IDBDatabase,\n IDBObjectStore,\n IDBIndex,\n IDBCursor,\n IDBTransaction,\n ]));\n}\n// This is a function to prevent it throwing up in node environments.\nfunction getCursorAdvanceMethods() {\n return (cursorAdvanceMethods ||\n (cursorAdvanceMethods = [\n IDBCursor.prototype.advance,\n IDBCursor.prototype.continue,\n IDBCursor.prototype.continuePrimaryKey,\n ]));\n}\nconst cursorRequestMap = new WeakMap();\nconst transactionDoneMap = new WeakMap();\nconst transactionStoreNamesMap = new WeakMap();\nconst transformCache = new WeakMap();\nconst reverseTransformCache = new WeakMap();\nfunction promisifyRequest(request) {\n const promise = new Promise((resolve, reject) => {\n const unlisten = () => {\n request.removeEventListener('success', success);\n request.removeEventListener('error', error);\n };\n const success = () => {\n resolve(wrap(request.result));\n unlisten();\n };\n const error = () => {\n reject(request.error);\n unlisten();\n };\n request.addEventListener('success', success);\n request.addEventListener('error', error);\n });\n promise\n .then((value) => {\n // Since cursoring reuses the IDBRequest (*sigh*), we cache it for later retrieval\n // (see wrapFunction).\n if (value instanceof IDBCursor) {\n cursorRequestMap.set(value, request);\n }\n // Catching to avoid \"Uncaught Promise exceptions\"\n })\n .catch(() => { });\n // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This\n // is because we create many promises from a single IDBRequest.\n reverseTransformCache.set(promise, request);\n return promise;\n}\nfunction cacheDonePromiseForTransaction(tx) {\n // Early bail if we've already created a done promise for this transaction.\n if (transactionDoneMap.has(tx))\n return;\n const done = new Promise((resolve, reject) => {\n const unlisten = () => {\n tx.removeEventListener('complete', complete);\n tx.removeEventListener('error', error);\n tx.removeEventListener('abort', error);\n };\n const complete = () => {\n resolve();\n unlisten();\n };\n const error = () => {\n reject(tx.error || new DOMException('AbortError', 'AbortError'));\n unlisten();\n };\n tx.addEventListener('complete', complete);\n tx.addEventListener('error', error);\n tx.addEventListener('abort', error);\n });\n // Cache it for later retrieval.\n transactionDoneMap.set(tx, done);\n}\nlet idbProxyTraps = {\n get(target, prop, receiver) {\n if (target instanceof IDBTransaction) {\n // Special handling for transaction.done.\n if (prop === 'done')\n return transactionDoneMap.get(target);\n // Polyfill for objectStoreNames because of Edge.\n if (prop === 'objectStoreNames') {\n return target.objectStoreNames || transactionStoreNamesMap.get(target);\n }\n // Make tx.store return the only store in the transaction, or undefined if there are many.\n if (prop === 'store') {\n return receiver.objectStoreNames[1]\n ? undefined\n : receiver.objectStore(receiver.objectStoreNames[0]);\n }\n }\n // Else transform whatever we get back.\n return wrap(target[prop]);\n },\n set(target, prop, value) {\n target[prop] = value;\n return true;\n },\n has(target, prop) {\n if (target instanceof IDBTransaction &&\n (prop === 'done' || prop === 'store')) {\n return true;\n }\n return prop in target;\n },\n};\nfunction replaceTraps(callback) {\n idbProxyTraps = callback(idbProxyTraps);\n}\nfunction wrapFunction(func) {\n // Due to expected object equality (which is enforced by the caching in `wrap`), we\n // only create one new func per func.\n // Edge doesn't support objectStoreNames (booo), so we polyfill it here.\n if (func === IDBDatabase.prototype.transaction &&\n !('objectStoreNames' in IDBTransaction.prototype)) {\n return function (storeNames, ...args) {\n const tx = func.call(unwrap(this), storeNames, ...args);\n transactionStoreNamesMap.set(tx, storeNames.sort ? storeNames.sort() : [storeNames]);\n return wrap(tx);\n };\n }\n // Cursor methods are special, as the behaviour is a little more different to standard IDB. In\n // IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the\n // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense\n // with real promises, so each advance methods returns a new promise for the cursor object, or\n // undefined if the end of the cursor has been reached.\n if (getCursorAdvanceMethods().includes(func)) {\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n func.apply(unwrap(this), args);\n return wrap(cursorRequestMap.get(this));\n };\n }\n return function (...args) {\n // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use\n // the original object.\n return wrap(func.apply(unwrap(this), args));\n };\n}\nfunction transformCachableValue(value) {\n if (typeof value === 'function')\n return wrapFunction(value);\n // This doesn't return, it just creates a 'done' promise for the transaction,\n // which is later returned for transaction.done (see idbObjectHandler).\n if (value instanceof IDBTransaction)\n cacheDonePromiseForTransaction(value);\n if (instanceOfAny(value, getIdbProxyableTypes()))\n return new Proxy(value, idbProxyTraps);\n // Return the same value back if we're not going to transform it.\n return value;\n}\nfunction wrap(value) {\n // We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because\n // IDB is weird and a single IDBRequest can yield many responses, so these can't be cached.\n if (value instanceof IDBRequest)\n return promisifyRequest(value);\n // If we've already transformed this value before, reuse the transformed value.\n // This is faster, but it also provides object equality.\n if (transformCache.has(value))\n return transformCache.get(value);\n const newValue = transformCachableValue(value);\n // Not all types are transformed.\n // These may be primitive types, so they can't be WeakMap keys.\n if (newValue !== value) {\n transformCache.set(value, newValue);\n reverseTransformCache.set(newValue, value);\n }\n return newValue;\n}\nconst unwrap = (value) => reverseTransformCache.get(value);\n\nexport { reverseTransformCache as a, instanceOfAny as i, replaceTraps as r, unwrap as u, wrap as w };\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n ComponentContainer,\n ComponentType,\n Provider,\n Name\n} from '@firebase/component';\nimport { PlatformLoggerService, VersionService } from './types';\n\nexport class PlatformLoggerServiceImpl implements PlatformLoggerService {\n constructor(private readonly container: ComponentContainer) {}\n // In initial implementation, this will be called by installations on\n // auth token refresh, and installations will send this string.\n getPlatformInfoString(): string {\n const providers = this.container.getProviders();\n // Loop through providers and get library/version pairs from any that are\n // version components.\n return providers\n .map(provider => {\n if (isVersionServiceProvider(provider)) {\n const service = provider.getImmediate() as VersionService;\n return `${service.library}/${service.version}`;\n } else {\n return null;\n }\n })\n .filter(logString => logString)\n .join(' ');\n }\n}\n/**\n *\n * @param provider check if this provider provides a VersionService\n *\n * NOTE: Using Provider<'app-version'> is a hack to indicate that the provider\n * provides VersionService. The provider is not necessarily a 'app-version'\n * provider.\n */\nfunction isVersionServiceProvider(provider: Provider): boolean {\n const component = provider.getComponent();\n return component?.type === ComponentType.VERSION;\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Logger } from '@firebase/logger';\n\nexport const logger = new Logger('@firebase/app');\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Component, ComponentType } from '@firebase/component';\nimport { PlatformLoggerServiceImpl } from './platformLoggerService';\nimport { name, version } from '../package.json';\nimport { _registerComponent } from './internal';\nimport { registerVersion } from './api';\nimport { HeartbeatServiceImpl } from './heartbeatService';\n\nexport function registerCoreComponents(variant?: string): void {\n _registerComponent(\n new Component(\n 'platform-logger',\n container => new PlatformLoggerServiceImpl(container),\n ComponentType.PRIVATE\n )\n );\n _registerComponent(\n new Component(\n 'heartbeat',\n container => new HeartbeatServiceImpl(container),\n ComponentType.PRIVATE\n )\n );\n\n // Register `app` package.\n registerVersion(name, version, variant);\n // BUILD_TARGET will be replaced by values like esm5, esm2017, cjs5, etc during the compilation\n registerVersion(name, version, '__BUILD_TARGET__');\n // Register platform SDK identifier (no version).\n registerVersion('fire-js', '');\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { name as appName } from '../package.json';\nimport { name as appCompatName } from '../../app-compat/package.json';\nimport { name as analyticsCompatName } from '../../../packages/analytics-compat/package.json';\nimport { name as analyticsName } from '../../../packages/analytics/package.json';\nimport { name as appCheckCompatName } from '../../../packages/app-check-compat/package.json';\nimport { name as appCheckName } from '../../../packages/app-check/package.json';\nimport { name as authName } from '../../../packages/auth/package.json';\nimport { name as authCompatName } from '../../../packages/auth-compat/package.json';\nimport { name as databaseName } from '../../../packages/database/package.json';\nimport { name as databaseCompatName } from '../../../packages/database-compat/package.json';\nimport { name as functionsName } from '../../../packages/functions/package.json';\nimport { name as functionsCompatName } from '../../../packages/functions-compat/package.json';\nimport { name as installationsName } from '../../../packages/installations/package.json';\nimport { name as installationsCompatName } from '../../../packages/installations-compat/package.json';\nimport { name as messagingName } from '../../../packages/messaging/package.json';\nimport { name as messagingCompatName } from '../../../packages/messaging-compat/package.json';\nimport { name as performanceName } from '../../../packages/performance/package.json';\nimport { name as performanceCompatName } from '../../../packages/performance-compat/package.json';\nimport { name as remoteConfigName } from '../../../packages/remote-config/package.json';\nimport { name as remoteConfigCompatName } from '../../../packages/remote-config-compat/package.json';\nimport { name as storageName } from '../../../packages/storage/package.json';\nimport { name as storageCompatName } from '../../../packages/storage-compat/package.json';\nimport { name as firestoreName } from '../../../packages/firestore/package.json';\nimport { name as firestoreCompatName } from '../../../packages/firestore-compat/package.json';\nimport { name as packageName } from '../../../packages/firebase/package.json';\n\n/**\n * The default app name\n *\n * @internal\n */\nexport const DEFAULT_ENTRY_NAME = '[DEFAULT]';\n\nexport const PLATFORM_LOG_STRING = {\n [appName]: 'fire-core',\n [appCompatName]: 'fire-core-compat',\n [analyticsName]: 'fire-analytics',\n [analyticsCompatName]: 'fire-analytics-compat',\n [appCheckName]: 'fire-app-check',\n [appCheckCompatName]: 'fire-app-check-compat',\n [authName]: 'fire-auth',\n [authCompatName]: 'fire-auth-compat',\n [databaseName]: 'fire-rtdb',\n [databaseCompatName]: 'fire-rtdb-compat',\n [functionsName]: 'fire-fn',\n [functionsCompatName]: 'fire-fn-compat',\n [installationsName]: 'fire-iid',\n [installationsCompatName]: 'fire-iid-compat',\n [messagingName]: 'fire-fcm',\n [messagingCompatName]: 'fire-fcm-compat',\n [performanceName]: 'fire-perf',\n [performanceCompatName]: 'fire-perf-compat',\n [remoteConfigName]: 'fire-rc',\n [remoteConfigCompatName]: 'fire-rc-compat',\n [storageName]: 'fire-gcs',\n [storageCompatName]: 'fire-gcs-compat',\n [firestoreName]: 'fire-fst',\n [firestoreCompatName]: 'fire-fst-compat',\n 'fire-js': 'fire-js', // Platform identifier for JS SDK.\n [packageName]: 'fire-js-all'\n} as const;\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseApp } from './public-types';\nimport { Component, Provider, Name } from '@firebase/component';\nimport { logger } from './logger';\nimport { DEFAULT_ENTRY_NAME } from './constants';\nimport { FirebaseAppImpl } from './firebaseApp';\n\n/**\n * @internal\n */\nexport const _apps = new Map();\n\n/**\n * Registered components.\n *\n * @internal\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport const _components = new Map>();\n\n/**\n * @param component - the component being added to this app's container\n *\n * @internal\n */\nexport function _addComponent(\n app: FirebaseApp,\n component: Component\n): void {\n try {\n (app as FirebaseAppImpl).container.addComponent(component);\n } catch (e) {\n logger.debug(\n `Component ${component.name} failed to register with FirebaseApp ${app.name}`,\n e\n );\n }\n}\n\n/**\n *\n * @internal\n */\nexport function _addOrOverwriteComponent(\n app: FirebaseApp,\n component: Component\n): void {\n (app as FirebaseAppImpl).container.addOrOverwriteComponent(component);\n}\n\n/**\n *\n * @param component - the component to register\n * @returns whether or not the component is registered successfully\n *\n * @internal\n */\nexport function _registerComponent(\n component: Component\n): boolean {\n const componentName = component.name;\n if (_components.has(componentName)) {\n logger.debug(\n `There were multiple attempts to register component ${componentName}.`\n );\n\n return false;\n }\n\n _components.set(componentName, component);\n\n // add the component to existing app instances\n for (const app of _apps.values()) {\n _addComponent(app as FirebaseAppImpl, component);\n }\n\n return true;\n}\n\n/**\n *\n * @param app - FirebaseApp instance\n * @param name - service name\n *\n * @returns the provider for the service with the matching name\n *\n * @internal\n */\nexport function _getProvider(\n app: FirebaseApp,\n name: T\n): Provider {\n const heartbeatController = (app as FirebaseAppImpl).container\n .getProvider('heartbeat')\n .getImmediate({ optional: true });\n if (heartbeatController) {\n void heartbeatController.triggerHeartbeat();\n }\n return (app as FirebaseAppImpl).container.getProvider(name);\n}\n\n/**\n *\n * @param app - FirebaseApp instance\n * @param name - service name\n * @param instanceIdentifier - service instance identifier in case the service supports multiple instances\n *\n * @internal\n */\nexport function _removeServiceInstance(\n app: FirebaseApp,\n name: T,\n instanceIdentifier: string = DEFAULT_ENTRY_NAME\n): void {\n _getProvider(app, name).clearInstance(instanceIdentifier);\n}\n\n/**\n * Test only\n *\n * @internal\n */\nexport function _clearComponents(): void {\n _components.clear();\n}\n\n/**\n * Exported in order to be used in app-compat package\n */\nexport { DEFAULT_ENTRY_NAME as _DEFAULT_ENTRY_NAME };\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ErrorFactory, ErrorMap } from '@firebase/util';\n\nexport const enum AppError {\n NO_APP = 'no-app',\n BAD_APP_NAME = 'bad-app-name',\n DUPLICATE_APP = 'duplicate-app',\n APP_DELETED = 'app-deleted',\n NO_OPTIONS = 'no-options',\n INVALID_APP_ARGUMENT = 'invalid-app-argument',\n INVALID_LOG_ARGUMENT = 'invalid-log-argument',\n IDB_OPEN = 'idb-open',\n IDB_GET = 'idb-get',\n IDB_WRITE = 'idb-set',\n IDB_DELETE = 'idb-delete'\n}\n\nconst ERRORS: ErrorMap = {\n [AppError.NO_APP]:\n \"No Firebase App '{$appName}' has been created - \" +\n 'call initializeApp() first',\n [AppError.BAD_APP_NAME]: \"Illegal App name: '{$appName}\",\n [AppError.DUPLICATE_APP]:\n \"Firebase App named '{$appName}' already exists with different options or config\",\n [AppError.APP_DELETED]: \"Firebase App named '{$appName}' already deleted\",\n [AppError.NO_OPTIONS]:\n 'Need to provide options, when not being deployed to hosting via source.',\n [AppError.INVALID_APP_ARGUMENT]:\n 'firebase.{$appName}() takes either no argument or a ' +\n 'Firebase App instance.',\n [AppError.INVALID_LOG_ARGUMENT]:\n 'First argument to `onLog` must be null or a function.',\n [AppError.IDB_OPEN]:\n 'Error thrown when opening IndexedDB. Original error: {$originalErrorMessage}.',\n [AppError.IDB_GET]:\n 'Error thrown when reading from IndexedDB. Original error: {$originalErrorMessage}.',\n [AppError.IDB_WRITE]:\n 'Error thrown when writing to IndexedDB. Original error: {$originalErrorMessage}.',\n [AppError.IDB_DELETE]:\n 'Error thrown when deleting from IndexedDB. Original error: {$originalErrorMessage}.'\n};\n\ninterface ErrorParams {\n [AppError.NO_APP]: { appName: string };\n [AppError.BAD_APP_NAME]: { appName: string };\n [AppError.DUPLICATE_APP]: { appName: string };\n [AppError.APP_DELETED]: { appName: string };\n [AppError.INVALID_APP_ARGUMENT]: { appName: string };\n [AppError.IDB_OPEN]: { originalErrorMessage?: string };\n [AppError.IDB_GET]: { originalErrorMessage?: string };\n [AppError.IDB_WRITE]: { originalErrorMessage?: string };\n [AppError.IDB_DELETE]: { originalErrorMessage?: string };\n}\n\nexport const ERROR_FACTORY = new ErrorFactory(\n 'app',\n 'Firebase',\n ERRORS\n);\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n FirebaseApp,\n FirebaseOptions,\n FirebaseAppSettings\n} from './public-types';\nimport {\n ComponentContainer,\n Component,\n ComponentType\n} from '@firebase/component';\nimport { ERROR_FACTORY, AppError } from './errors';\n\nexport class FirebaseAppImpl implements FirebaseApp {\n private readonly _options: FirebaseOptions;\n private readonly _name: string;\n /**\n * Original config values passed in as a constructor parameter.\n * It is only used to compare with another config object to support idempotent initializeApp().\n *\n * Updating automaticDataCollectionEnabled on the App instance will not change its value in _config.\n */\n private readonly _config: Required;\n private _automaticDataCollectionEnabled: boolean;\n private _isDeleted = false;\n private readonly _container: ComponentContainer;\n\n constructor(\n options: FirebaseOptions,\n config: Required,\n container: ComponentContainer\n ) {\n this._options = { ...options };\n this._config = { ...config };\n this._name = config.name;\n this._automaticDataCollectionEnabled =\n config.automaticDataCollectionEnabled;\n this._container = container;\n this.container.addComponent(\n new Component('app', () => this, ComponentType.PUBLIC)\n );\n }\n\n get automaticDataCollectionEnabled(): boolean {\n this.checkDestroyed();\n return this._automaticDataCollectionEnabled;\n }\n\n set automaticDataCollectionEnabled(val: boolean) {\n this.checkDestroyed();\n this._automaticDataCollectionEnabled = val;\n }\n\n get name(): string {\n this.checkDestroyed();\n return this._name;\n }\n\n get options(): FirebaseOptions {\n this.checkDestroyed();\n return this._options;\n }\n\n get config(): Required {\n this.checkDestroyed();\n return this._config;\n }\n\n get container(): ComponentContainer {\n return this._container;\n }\n\n get isDeleted(): boolean {\n return this._isDeleted;\n }\n\n set isDeleted(val: boolean) {\n this._isDeleted = val;\n }\n\n /**\n * This function will throw an Error if the App has already been deleted -\n * use before performing API actions on the App.\n */\n private checkDestroyed(): void {\n if (this.isDeleted) {\n throw ERROR_FACTORY.create(AppError.APP_DELETED, { appName: this._name });\n }\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n FirebaseApp,\n FirebaseOptions,\n FirebaseAppSettings\n} from './public-types';\nimport { DEFAULT_ENTRY_NAME, PLATFORM_LOG_STRING } from './constants';\nimport { ERROR_FACTORY, AppError } from './errors';\nimport {\n ComponentContainer,\n Component,\n Name,\n ComponentType\n} from '@firebase/component';\nimport { version } from '../../firebase/package.json';\nimport { FirebaseAppImpl } from './firebaseApp';\nimport { _apps, _components, _registerComponent } from './internal';\nimport { logger } from './logger';\nimport {\n LogLevelString,\n setLogLevel as setLogLevelImpl,\n LogCallback,\n LogOptions,\n setUserLogHandler\n} from '@firebase/logger';\nimport { deepEqual, getDefaultAppConfig } from '@firebase/util';\n\nexport { FirebaseError } from '@firebase/util';\n\n/**\n * The current SDK version.\n *\n * @public\n */\nexport const SDK_VERSION = version;\n\n/**\n * Creates and initializes a {@link @firebase/app#FirebaseApp} instance.\n *\n * See\n * {@link\n * https://firebase.google.com/docs/web/setup#add_firebase_to_your_app\n * | Add Firebase to your app} and\n * {@link\n * https://firebase.google.com/docs/web/setup#multiple-projects\n * | Initialize multiple projects} for detailed documentation.\n *\n * @example\n * ```javascript\n *\n * // Initialize default app\n * // Retrieve your own options values by adding a web app on\n * // https://console.firebase.google.com\n * initializeApp({\n * apiKey: \"AIza....\", // Auth / General Use\n * authDomain: \"YOUR_APP.firebaseapp.com\", // Auth with popup/redirect\n * databaseURL: \"https://YOUR_APP.firebaseio.com\", // Realtime Database\n * storageBucket: \"YOUR_APP.appspot.com\", // Storage\n * messagingSenderId: \"123456789\" // Cloud Messaging\n * });\n * ```\n *\n * @example\n * ```javascript\n *\n * // Initialize another app\n * const otherApp = initializeApp({\n * databaseURL: \"https://.firebaseio.com\",\n * storageBucket: \".appspot.com\"\n * }, \"otherApp\");\n * ```\n *\n * @param options - Options to configure the app's services.\n * @param name - Optional name of the app to initialize. If no name\n * is provided, the default is `\"[DEFAULT]\"`.\n *\n * @returns The initialized app.\n *\n * @public\n */\nexport function initializeApp(\n options: FirebaseOptions,\n name?: string\n): FirebaseApp;\n/**\n * Creates and initializes a FirebaseApp instance.\n *\n * @param options - Options to configure the app's services.\n * @param config - FirebaseApp Configuration\n *\n * @public\n */\nexport function initializeApp(\n options: FirebaseOptions,\n config?: FirebaseAppSettings\n): FirebaseApp;\n/**\n * Creates and initializes a FirebaseApp instance.\n *\n * @public\n */\nexport function initializeApp(): FirebaseApp;\nexport function initializeApp(\n _options?: FirebaseOptions,\n rawConfig = {}\n): FirebaseApp {\n let options = _options;\n\n if (typeof rawConfig !== 'object') {\n const name = rawConfig;\n rawConfig = { name };\n }\n\n const config: Required = {\n name: DEFAULT_ENTRY_NAME,\n automaticDataCollectionEnabled: false,\n ...rawConfig\n };\n const name = config.name;\n\n if (typeof name !== 'string' || !name) {\n throw ERROR_FACTORY.create(AppError.BAD_APP_NAME, {\n appName: String(name)\n });\n }\n\n options ||= getDefaultAppConfig();\n\n if (!options) {\n throw ERROR_FACTORY.create(AppError.NO_OPTIONS);\n }\n\n const existingApp = _apps.get(name) as FirebaseAppImpl;\n if (existingApp) {\n // return the existing app if options and config deep equal the ones in the existing app.\n if (\n deepEqual(options, existingApp.options) &&\n deepEqual(config, existingApp.config)\n ) {\n return existingApp;\n } else {\n throw ERROR_FACTORY.create(AppError.DUPLICATE_APP, { appName: name });\n }\n }\n\n const container = new ComponentContainer(name);\n for (const component of _components.values()) {\n container.addComponent(component);\n }\n\n const newApp = new FirebaseAppImpl(options, config, container);\n\n _apps.set(name, newApp);\n\n return newApp;\n}\n\n/**\n * Retrieves a {@link @firebase/app#FirebaseApp} instance.\n *\n * When called with no arguments, the default app is returned. When an app name\n * is provided, the app corresponding to that name is returned.\n *\n * An exception is thrown if the app being retrieved has not yet been\n * initialized.\n *\n * @example\n * ```javascript\n * // Return the default app\n * const app = getApp();\n * ```\n *\n * @example\n * ```javascript\n * // Return a named app\n * const otherApp = getApp(\"otherApp\");\n * ```\n *\n * @param name - Optional name of the app to return. If no name is\n * provided, the default is `\"[DEFAULT]\"`.\n *\n * @returns The app corresponding to the provided app name.\n * If no app name is provided, the default app is returned.\n *\n * @public\n */\nexport function getApp(name: string = DEFAULT_ENTRY_NAME): FirebaseApp {\n const app = _apps.get(name);\n if (!app && name === DEFAULT_ENTRY_NAME && getDefaultAppConfig()) {\n return initializeApp();\n }\n if (!app) {\n throw ERROR_FACTORY.create(AppError.NO_APP, { appName: name });\n }\n\n return app;\n}\n\n/**\n * A (read-only) array of all initialized apps.\n * @public\n */\nexport function getApps(): FirebaseApp[] {\n return Array.from(_apps.values());\n}\n\n/**\n * Renders this app unusable and frees the resources of all associated\n * services.\n *\n * @example\n * ```javascript\n * deleteApp(app)\n * .then(function() {\n * console.log(\"App deleted successfully\");\n * })\n * .catch(function(error) {\n * console.log(\"Error deleting app:\", error);\n * });\n * ```\n *\n * @public\n */\nexport async function deleteApp(app: FirebaseApp): Promise {\n const name = app.name;\n if (_apps.has(name)) {\n _apps.delete(name);\n await Promise.all(\n (app as FirebaseAppImpl).container\n .getProviders()\n .map(provider => provider.delete())\n );\n (app as FirebaseAppImpl).isDeleted = true;\n }\n}\n\n/**\n * Registers a library's name and version for platform logging purposes.\n * @param library - Name of 1p or 3p library (e.g. firestore, angularfire)\n * @param version - Current version of that library.\n * @param variant - Bundle variant, e.g., node, rn, etc.\n *\n * @public\n */\nexport function registerVersion(\n libraryKeyOrName: string,\n version: string,\n variant?: string\n): void {\n // TODO: We can use this check to whitelist strings when/if we set up\n // a good whitelist system.\n let library = PLATFORM_LOG_STRING[libraryKeyOrName] ?? libraryKeyOrName;\n if (variant) {\n library += `-${variant}`;\n }\n const libraryMismatch = library.match(/\\s|\\//);\n const versionMismatch = version.match(/\\s|\\//);\n if (libraryMismatch || versionMismatch) {\n const warning = [\n `Unable to register library \"${library}\" with version \"${version}\":`\n ];\n if (libraryMismatch) {\n warning.push(\n `library name \"${library}\" contains illegal characters (whitespace or \"/\")`\n );\n }\n if (libraryMismatch && versionMismatch) {\n warning.push('and');\n }\n if (versionMismatch) {\n warning.push(\n `version name \"${version}\" contains illegal characters (whitespace or \"/\")`\n );\n }\n logger.warn(warning.join(' '));\n return;\n }\n _registerComponent(\n new Component(\n `${library}-version` as Name,\n () => ({ library, version }),\n ComponentType.VERSION\n )\n );\n}\n\n/**\n * Sets log handler for all Firebase SDKs.\n * @param logCallback - An optional custom log handler that executes user code whenever\n * the Firebase SDK makes a logging call.\n *\n * @public\n */\nexport function onLog(\n logCallback: LogCallback | null,\n options?: LogOptions\n): void {\n if (logCallback !== null && typeof logCallback !== 'function') {\n throw ERROR_FACTORY.create(AppError.INVALID_LOG_ARGUMENT);\n }\n setUserLogHandler(logCallback, options);\n}\n\n/**\n * Sets log level for all Firebase SDKs.\n *\n * All of the log types above the current log level are captured (i.e. if\n * you set the log level to `info`, errors are logged, but `debug` and\n * `verbose` logs are not).\n *\n * @public\n */\nexport function setLogLevel(logLevel: LogLevelString): void {\n setLogLevelImpl(logLevel);\n}\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseError } from '@firebase/util';\nimport { DBSchema, openDB, IDBPDatabase } from 'idb';\nimport { AppError, ERROR_FACTORY } from './errors';\nimport { FirebaseApp } from './public-types';\nimport { HeartbeatsInIndexedDB } from './types';\nimport { logger } from './logger';\n\nconst DB_NAME = 'firebase-heartbeat-database';\nconst DB_VERSION = 1;\nconst STORE_NAME = 'firebase-heartbeat-store';\n\ninterface AppDB extends DBSchema {\n 'firebase-heartbeat-store': {\n key: string;\n value: HeartbeatsInIndexedDB;\n };\n}\n\nlet dbPromise: Promise> | null = null;\nfunction getDbPromise(): Promise> {\n if (!dbPromise) {\n dbPromise = openDB(DB_NAME, DB_VERSION, {\n upgrade: (db, oldVersion) => {\n // We don't use 'break' in this switch statement, the fall-through\n // behavior is what we want, because if there are multiple versions between\n // the old version and the current version, we want ALL the migrations\n // that correspond to those versions to run, not only the last one.\n // eslint-disable-next-line default-case\n switch (oldVersion) {\n case 0:\n try {\n db.createObjectStore(STORE_NAME);\n } catch (e) {\n // Safari/iOS browsers throw occasional exceptions on\n // db.createObjectStore() that may be a bug. Avoid blocking\n // the rest of the app functionality.\n console.warn(e);\n }\n }\n }\n }).catch(e => {\n throw ERROR_FACTORY.create(AppError.IDB_OPEN, {\n originalErrorMessage: e.message\n });\n });\n }\n return dbPromise;\n}\n\nexport async function readHeartbeatsFromIndexedDB(\n app: FirebaseApp\n): Promise {\n try {\n const db = await getDbPromise();\n const tx = db.transaction(STORE_NAME);\n const result = await tx.objectStore(STORE_NAME).get(computeKey(app));\n // We already have the value but tx.done can throw,\n // so we need to await it here to catch errors\n await tx.done;\n return result;\n } catch (e) {\n if (e instanceof FirebaseError) {\n logger.warn(e.message);\n } else {\n const idbGetError = ERROR_FACTORY.create(AppError.IDB_GET, {\n originalErrorMessage: (e as Error)?.message\n });\n logger.warn(idbGetError.message);\n }\n }\n}\n\nexport async function writeHeartbeatsToIndexedDB(\n app: FirebaseApp,\n heartbeatObject: HeartbeatsInIndexedDB\n): Promise {\n try {\n const db = await getDbPromise();\n const tx = db.transaction(STORE_NAME, 'readwrite');\n const objectStore = tx.objectStore(STORE_NAME);\n await objectStore.put(heartbeatObject, computeKey(app));\n await tx.done;\n } catch (e) {\n if (e instanceof FirebaseError) {\n logger.warn(e.message);\n } else {\n const idbGetError = ERROR_FACTORY.create(AppError.IDB_WRITE, {\n originalErrorMessage: (e as Error)?.message\n });\n logger.warn(idbGetError.message);\n }\n }\n}\n\nfunction computeKey(app: FirebaseApp): string {\n return `${app.name}!${app.options.appId}`;\n}\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ComponentContainer } from '@firebase/component';\nimport {\n base64urlEncodeWithoutPadding,\n isIndexedDBAvailable,\n validateIndexedDBOpenable\n} from '@firebase/util';\nimport {\n readHeartbeatsFromIndexedDB,\n writeHeartbeatsToIndexedDB\n} from './indexeddb';\nimport { FirebaseApp } from './public-types';\nimport {\n HeartbeatsByUserAgent,\n HeartbeatService,\n HeartbeatsInIndexedDB,\n HeartbeatStorage,\n SingleDateHeartbeat\n} from './types';\n\nconst MAX_HEADER_BYTES = 1024;\n// 30 days\nconst STORED_HEARTBEAT_RETENTION_MAX_MILLIS = 30 * 24 * 60 * 60 * 1000;\n\nexport class HeartbeatServiceImpl implements HeartbeatService {\n /**\n * The persistence layer for heartbeats\n * Leave public for easier testing.\n */\n _storage: HeartbeatStorageImpl;\n\n /**\n * In-memory cache for heartbeats, used by getHeartbeatsHeader() to generate\n * the header string.\n * Stores one record per date. This will be consolidated into the standard\n * format of one record per user agent string before being sent as a header.\n * Populated from indexedDB when the controller is instantiated and should\n * be kept in sync with indexedDB.\n * Leave public for easier testing.\n */\n _heartbeatsCache: HeartbeatsInIndexedDB | null = null;\n\n /**\n * the initialization promise for populating heartbeatCache.\n * If getHeartbeatsHeader() is called before the promise resolves\n * (hearbeatsCache == null), it should wait for this promise\n * Leave public for easier testing.\n */\n _heartbeatsCachePromise: Promise;\n constructor(private readonly container: ComponentContainer) {\n const app = this.container.getProvider('app').getImmediate();\n this._storage = new HeartbeatStorageImpl(app);\n this._heartbeatsCachePromise = this._storage.read().then(result => {\n this._heartbeatsCache = result;\n return result;\n });\n }\n\n /**\n * Called to report a heartbeat. The function will generate\n * a HeartbeatsByUserAgent object, update heartbeatsCache, and persist it\n * to IndexedDB.\n * Note that we only store one heartbeat per day. So if a heartbeat for today is\n * already logged, subsequent calls to this function in the same day will be ignored.\n */\n async triggerHeartbeat(): Promise {\n const platformLogger = this.container\n .getProvider('platform-logger')\n .getImmediate();\n\n // This is the \"Firebase user agent\" string from the platform logger\n // service, not the browser user agent.\n const agent = platformLogger.getPlatformInfoString();\n const date = getUTCDateString();\n if (this._heartbeatsCache?.heartbeats == null) {\n this._heartbeatsCache = await this._heartbeatsCachePromise;\n // If we failed to construct a heartbeats cache, then return immediately.\n if (this._heartbeatsCache?.heartbeats == null) {\n return;\n }\n }\n // Do not store a heartbeat if one is already stored for this day\n // or if a header has already been sent today.\n if (\n this._heartbeatsCache.lastSentHeartbeatDate === date ||\n this._heartbeatsCache.heartbeats.some(\n singleDateHeartbeat => singleDateHeartbeat.date === date\n )\n ) {\n return;\n } else {\n // There is no entry for this date. Create one.\n this._heartbeatsCache.heartbeats.push({ date, agent });\n }\n // Remove entries older than 30 days.\n this._heartbeatsCache.heartbeats = this._heartbeatsCache.heartbeats.filter(\n singleDateHeartbeat => {\n const hbTimestamp = new Date(singleDateHeartbeat.date).valueOf();\n const now = Date.now();\n return now - hbTimestamp <= STORED_HEARTBEAT_RETENTION_MAX_MILLIS;\n }\n );\n return this._storage.overwrite(this._heartbeatsCache);\n }\n\n /**\n * Returns a base64 encoded string which can be attached to the heartbeat-specific header directly.\n * It also clears all heartbeats from memory as well as in IndexedDB.\n *\n * NOTE: Consuming product SDKs should not send the header if this method\n * returns an empty string.\n */\n async getHeartbeatsHeader(): Promise {\n if (this._heartbeatsCache === null) {\n await this._heartbeatsCachePromise;\n }\n // If it's still null or the array is empty, there is no data to send.\n if (\n this._heartbeatsCache?.heartbeats == null ||\n this._heartbeatsCache.heartbeats.length === 0\n ) {\n return '';\n }\n const date = getUTCDateString();\n // Extract as many heartbeats from the cache as will fit under the size limit.\n const { heartbeatsToSend, unsentEntries } = extractHeartbeatsForHeader(\n this._heartbeatsCache.heartbeats\n );\n const headerString = base64urlEncodeWithoutPadding(\n JSON.stringify({ version: 2, heartbeats: heartbeatsToSend })\n );\n // Store last sent date to prevent another being logged/sent for the same day.\n this._heartbeatsCache.lastSentHeartbeatDate = date;\n if (unsentEntries.length > 0) {\n // Store any unsent entries if they exist.\n this._heartbeatsCache.heartbeats = unsentEntries;\n // This seems more likely than emptying the array (below) to lead to some odd state\n // since the cache isn't empty and this will be called again on the next request,\n // and is probably safest if we await it.\n await this._storage.overwrite(this._heartbeatsCache);\n } else {\n this._heartbeatsCache.heartbeats = [];\n // Do not wait for this, to reduce latency.\n void this._storage.overwrite(this._heartbeatsCache);\n }\n return headerString;\n }\n}\n\nfunction getUTCDateString(): string {\n const today = new Date();\n // Returns date format 'YYYY-MM-DD'\n return today.toISOString().substring(0, 10);\n}\n\nexport function extractHeartbeatsForHeader(\n heartbeatsCache: SingleDateHeartbeat[],\n maxSize = MAX_HEADER_BYTES\n): {\n heartbeatsToSend: HeartbeatsByUserAgent[];\n unsentEntries: SingleDateHeartbeat[];\n} {\n // Heartbeats grouped by user agent in the standard format to be sent in\n // the header.\n const heartbeatsToSend: HeartbeatsByUserAgent[] = [];\n // Single date format heartbeats that are not sent.\n let unsentEntries = heartbeatsCache.slice();\n for (const singleDateHeartbeat of heartbeatsCache) {\n // Look for an existing entry with the same user agent.\n const heartbeatEntry = heartbeatsToSend.find(\n hb => hb.agent === singleDateHeartbeat.agent\n );\n if (!heartbeatEntry) {\n // If no entry for this user agent exists, create one.\n heartbeatsToSend.push({\n agent: singleDateHeartbeat.agent,\n dates: [singleDateHeartbeat.date]\n });\n if (countBytes(heartbeatsToSend) > maxSize) {\n // If the header would exceed max size, remove the added heartbeat\n // entry and stop adding to the header.\n heartbeatsToSend.pop();\n break;\n }\n } else {\n heartbeatEntry.dates.push(singleDateHeartbeat.date);\n // If the header would exceed max size, remove the added date\n // and stop adding to the header.\n if (countBytes(heartbeatsToSend) > maxSize) {\n heartbeatEntry.dates.pop();\n break;\n }\n }\n // Pop unsent entry from queue. (Skipped if adding the entry exceeded\n // quota and the loop breaks early.)\n unsentEntries = unsentEntries.slice(1);\n }\n return {\n heartbeatsToSend,\n unsentEntries\n };\n}\n\nexport class HeartbeatStorageImpl implements HeartbeatStorage {\n private _canUseIndexedDBPromise: Promise;\n constructor(public app: FirebaseApp) {\n this._canUseIndexedDBPromise = this.runIndexedDBEnvironmentCheck();\n }\n async runIndexedDBEnvironmentCheck(): Promise {\n if (!isIndexedDBAvailable()) {\n return false;\n } else {\n return validateIndexedDBOpenable()\n .then(() => true)\n .catch(() => false);\n }\n }\n /**\n * Read all heartbeats.\n */\n async read(): Promise {\n const canUseIndexedDB = await this._canUseIndexedDBPromise;\n if (!canUseIndexedDB) {\n return { heartbeats: [] };\n } else {\n const idbHeartbeatObject = await readHeartbeatsFromIndexedDB(this.app);\n if (idbHeartbeatObject?.heartbeats) {\n return idbHeartbeatObject;\n } else {\n return { heartbeats: [] };\n }\n }\n }\n // overwrite the storage with the provided heartbeats\n async overwrite(heartbeatsObject: HeartbeatsInIndexedDB): Promise {\n const canUseIndexedDB = await this._canUseIndexedDBPromise;\n if (!canUseIndexedDB) {\n return;\n } else {\n const existingHeartbeatsObject = await this.read();\n return writeHeartbeatsToIndexedDB(this.app, {\n lastSentHeartbeatDate:\n heartbeatsObject.lastSentHeartbeatDate ??\n existingHeartbeatsObject.lastSentHeartbeatDate,\n heartbeats: heartbeatsObject.heartbeats\n });\n }\n }\n // add heartbeats\n async add(heartbeatsObject: HeartbeatsInIndexedDB): Promise {\n const canUseIndexedDB = await this._canUseIndexedDBPromise;\n if (!canUseIndexedDB) {\n return;\n } else {\n const existingHeartbeatsObject = await this.read();\n return writeHeartbeatsToIndexedDB(this.app, {\n lastSentHeartbeatDate:\n heartbeatsObject.lastSentHeartbeatDate ??\n existingHeartbeatsObject.lastSentHeartbeatDate,\n heartbeats: [\n ...existingHeartbeatsObject.heartbeats,\n ...heartbeatsObject.heartbeats\n ]\n });\n }\n }\n}\n\n/**\n * Calculate bytes of a HeartbeatsByUserAgent array after being wrapped\n * in a platform logging header JSON object, stringified, and converted\n * to base 64.\n */\nexport function countBytes(heartbeatsCache: HeartbeatsByUserAgent[]): number {\n // base64 has a restricted set of characters, all of which should be 1 byte.\n return base64urlEncodeWithoutPadding(\n // heartbeatsCache wrapper properties\n JSON.stringify({ version: 2, heartbeats: heartbeatsCache })\n ).length;\n}\n","/**\n * Firebase App\n *\n * @remarks This package coordinates the communication between the different Firebase components\n * @packageDocumentation\n */\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { registerCoreComponents } from './registerCoreComponents';\n\nexport * from './api';\nexport * from './internal';\nexport * from './public-types';\n\nregisterCoreComponents('__RUNTIME_ENV__');\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseOptions } from './public-types';\nimport {\n Component,\n ComponentContainer,\n ComponentType,\n InstantiationMode,\n Name\n} from '@firebase/component';\nimport {\n deleteApp,\n _addComponent,\n _addOrOverwriteComponent,\n _DEFAULT_ENTRY_NAME,\n _FirebaseAppInternal as _FirebaseAppExp\n} from '@firebase/app';\nimport { _FirebaseService, _FirebaseNamespace } from './types';\nimport { Compat } from '@firebase/util';\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport interface _FirebaseApp {\n /**\n * The (read-only) name (identifier) for this App. '[DEFAULT]' is the default\n * App.\n */\n name: string;\n\n /**\n * The (read-only) configuration options from the app initialization.\n */\n options: FirebaseOptions;\n\n /**\n * The settable config flag for GDPR opt-in/opt-out\n */\n automaticDataCollectionEnabled: boolean;\n\n /**\n * Make the given App unusable and free resources.\n */\n delete(): Promise;\n}\n/**\n * Global context object for a collection of services using\n * a shared authentication state.\n *\n * marked as internal because it references internal types exported from @firebase/app\n * @internal\n */\nexport class FirebaseAppImpl implements Compat<_FirebaseAppExp>, _FirebaseApp {\n private container: ComponentContainer;\n\n constructor(\n readonly _delegate: _FirebaseAppExp,\n private readonly firebase: _FirebaseNamespace\n ) {\n // add itself to container\n _addComponent(\n _delegate,\n new Component('app-compat', () => this, ComponentType.PUBLIC)\n );\n\n this.container = _delegate.container;\n }\n\n get automaticDataCollectionEnabled(): boolean {\n return this._delegate.automaticDataCollectionEnabled;\n }\n\n set automaticDataCollectionEnabled(val) {\n this._delegate.automaticDataCollectionEnabled = val;\n }\n\n get name(): string {\n return this._delegate.name;\n }\n\n get options(): FirebaseOptions {\n return this._delegate.options;\n }\n\n delete(): Promise {\n return new Promise(resolve => {\n this._delegate.checkDestroyed();\n resolve();\n }).then(() => {\n this.firebase.INTERNAL.removeApp(this.name);\n return deleteApp(this._delegate);\n });\n }\n\n /**\n * Return a service instance associated with this app (creating it\n * on demand), identified by the passed instanceIdentifier.\n *\n * NOTE: Currently storage and functions are the only ones that are leveraging this\n * functionality. They invoke it by calling:\n *\n * ```javascript\n * firebase.app().storage('STORAGE BUCKET ID')\n * ```\n *\n * The service name is passed to this already\n * @internal\n */\n _getService(\n name: string,\n instanceIdentifier: string = _DEFAULT_ENTRY_NAME\n ): _FirebaseService {\n this._delegate.checkDestroyed();\n\n // Initialize instance if InstatiationMode is `EXPLICIT`.\n const provider = this._delegate.container.getProvider(name as Name);\n if (\n !provider.isInitialized() &&\n provider.getComponent()?.instantiationMode === InstantiationMode.EXPLICIT\n ) {\n provider.initialize();\n }\n\n // getImmediate will always succeed because _getService is only called for registered components.\n return provider.getImmediate({\n identifier: instanceIdentifier\n }) as unknown as _FirebaseService;\n }\n\n /**\n * Remove a service instance from the cache, so we will create a new instance for this service\n * when people try to get it again.\n *\n * NOTE: currently only firestore uses this functionality to support firestore shutdown.\n *\n * @param name The service name\n * @param instanceIdentifier instance identifier in case multiple instances are allowed\n * @internal\n */\n _removeServiceInstance(\n name: string,\n instanceIdentifier: string = _DEFAULT_ENTRY_NAME\n ): void {\n this._delegate.container\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n .getProvider(name as any)\n .clearInstance(instanceIdentifier);\n }\n\n /**\n * @param component the component being added to this app's container\n * @internal\n */\n _addComponent(component: Component): void {\n _addComponent(this._delegate, component);\n }\n\n _addOrOverwriteComponent(component: Component): void {\n _addOrOverwriteComponent(this._delegate, component);\n }\n\n toJSON(): object {\n return {\n name: this.name,\n automaticDataCollectionEnabled: this.automaticDataCollectionEnabled,\n options: this.options\n };\n }\n}\n\n// TODO: investigate why the following needs to be commented out\n// Prevent dead-code elimination of these methods w/o invalid property\n// copying.\n// (FirebaseAppImpl.prototype.name && FirebaseAppImpl.prototype.options) ||\n// FirebaseAppImpl.prototype.delete ||\n// console.log('dc');\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ErrorFactory, ErrorMap } from '@firebase/util';\n\nexport const enum AppError {\n NO_APP = 'no-app',\n INVALID_APP_ARGUMENT = 'invalid-app-argument'\n}\n\nconst ERRORS: ErrorMap = {\n [AppError.NO_APP]:\n \"No Firebase App '{$appName}' has been created - \" +\n 'call Firebase App.initializeApp()',\n [AppError.INVALID_APP_ARGUMENT]:\n 'firebase.{$appName}() takes either no argument or a ' +\n 'Firebase App instance.'\n};\n\ntype ErrorParams = { [key in AppError]: { appName: string } };\n\nexport const ERROR_FACTORY = new ErrorFactory(\n 'app-compat',\n 'Firebase',\n ERRORS\n);\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseApp, FirebaseOptions } from './public-types';\nimport {\n _FirebaseNamespace,\n _FirebaseService,\n FirebaseServiceNamespace\n} from './types';\nimport * as modularAPIs from '@firebase/app';\nimport { _FirebaseAppInternal as _FirebaseAppExp } from '@firebase/app';\nimport { Component, ComponentType, Name } from '@firebase/component';\n\nimport { deepExtend, contains } from '@firebase/util';\nimport { FirebaseAppImpl } from './firebaseApp';\nimport { ERROR_FACTORY, AppError } from './errors';\nimport { FirebaseAppLiteImpl } from './lite/firebaseAppLite';\n\n/**\n * Because auth can't share code with other components, we attach the utility functions\n * in an internal namespace to share code.\n * This function return a firebase namespace object without\n * any utility functions, so it can be shared between the regular firebaseNamespace and\n * the lite version.\n */\nexport function createFirebaseNamespaceCore(\n firebaseAppImpl: typeof FirebaseAppImpl | typeof FirebaseAppLiteImpl\n): _FirebaseNamespace {\n const apps: { [name: string]: FirebaseApp } = {};\n // // eslint-disable-next-line @typescript-eslint/no-explicit-any\n // const components = new Map>();\n\n // A namespace is a plain JavaScript Object.\n const namespace: _FirebaseNamespace = {\n // Hack to prevent Babel from modifying the object returned\n // as the firebase namespace.\n // @ts-ignore\n __esModule: true,\n initializeApp: initializeAppCompat,\n // @ts-ignore\n app,\n registerVersion: modularAPIs.registerVersion,\n setLogLevel: modularAPIs.setLogLevel,\n onLog: modularAPIs.onLog,\n // @ts-ignore\n apps: null,\n SDK_VERSION: modularAPIs.SDK_VERSION,\n INTERNAL: {\n registerComponent: registerComponentCompat,\n removeApp,\n useAsService,\n modularAPIs\n }\n };\n\n // Inject a circular default export to allow Babel users who were previously\n // using:\n //\n // import firebase from 'firebase';\n // which becomes: var firebase = require('firebase').default;\n //\n // instead of\n //\n // import * as firebase from 'firebase';\n // which becomes: var firebase = require('firebase');\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (namespace as any)['default'] = namespace;\n\n // firebase.apps is a read-only getter.\n Object.defineProperty(namespace, 'apps', {\n get: getApps\n });\n\n /**\n * Called by App.delete() - but before any services associated with the App\n * are deleted.\n */\n function removeApp(name: string): void {\n delete apps[name];\n }\n\n /**\n * Get the App object for a given name (or DEFAULT).\n */\n function app(name?: string): FirebaseApp {\n name = name || modularAPIs._DEFAULT_ENTRY_NAME;\n if (!contains(apps, name)) {\n throw ERROR_FACTORY.create(AppError.NO_APP, { appName: name });\n }\n return apps[name];\n }\n\n // @ts-ignore\n app['App'] = firebaseAppImpl;\n\n /**\n * Create a new App instance (name must be unique).\n *\n * This function is idempotent. It can be called more than once and return the same instance using the same options and config.\n */\n function initializeAppCompat(\n options: FirebaseOptions,\n rawConfig = {}\n ): FirebaseApp {\n const app = modularAPIs.initializeApp(\n options,\n rawConfig\n ) as _FirebaseAppExp;\n\n if (contains(apps, app.name)) {\n return apps[app.name];\n }\n\n const appCompat = new firebaseAppImpl(app, namespace);\n apps[app.name] = appCompat;\n return appCompat;\n }\n\n /*\n * Return an array of all the non-deleted FirebaseApps.\n */\n function getApps(): FirebaseApp[] {\n // Make a copy so caller cannot mutate the apps list.\n return Object.keys(apps).map(name => apps[name]);\n }\n\n function registerComponentCompat(\n component: Component\n ): FirebaseServiceNamespace<_FirebaseService> | null {\n const componentName = component.name;\n const componentNameWithoutCompat = componentName.replace('-compat', '');\n if (\n modularAPIs._registerComponent(component) &&\n component.type === ComponentType.PUBLIC\n ) {\n // create service namespace for public components\n // The Service namespace is an accessor function ...\n const serviceNamespace = (\n appArg: FirebaseApp = app()\n ): _FirebaseService => {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n if (typeof (appArg as any)[componentNameWithoutCompat] !== 'function') {\n // Invalid argument.\n // This happens in the following case: firebase.storage('gs:/')\n throw ERROR_FACTORY.create(AppError.INVALID_APP_ARGUMENT, {\n appName: componentName\n });\n }\n\n // Forward service instance lookup to the FirebaseApp.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (appArg as any)[componentNameWithoutCompat]();\n };\n\n // ... and a container for service-level properties.\n if (component.serviceProps !== undefined) {\n deepExtend(serviceNamespace, component.serviceProps);\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (namespace as any)[componentNameWithoutCompat] = serviceNamespace;\n\n // Patch the FirebaseAppImpl prototype\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (firebaseAppImpl.prototype as any)[componentNameWithoutCompat] =\n // TODO: The eslint disable can be removed and the 'ignoreRestArgs'\n // option added to the no-explicit-any rule when ESlint releases it.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n function (...args: any) {\n const serviceFxn = this._getService.bind(this, componentName);\n return serviceFxn.apply(\n this,\n component.multipleInstances ? args : []\n );\n };\n }\n\n return component.type === ComponentType.PUBLIC\n ? // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (namespace as any)[componentNameWithoutCompat]\n : null;\n }\n\n // Map the requested service to a registered service name\n // (used to map auth to serverAuth service when needed).\n function useAsService(app: FirebaseApp, name: string): string | null {\n if (name === 'serverAuth') {\n return null;\n }\n\n const useService = name;\n\n return useService;\n }\n\n return namespace;\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseNamespace } from './public-types';\nimport { createSubscribe, deepExtend, ErrorFactory } from '@firebase/util';\nimport { FirebaseAppImpl } from './firebaseApp';\nimport { createFirebaseNamespaceCore } from './firebaseNamespaceCore';\n\n/**\n * Return a firebase namespace object.\n *\n * In production, this will be called exactly once and the result\n * assigned to the 'firebase' global. It may be called multiple times\n * in unit tests.\n */\nexport function createFirebaseNamespace(): FirebaseNamespace {\n const namespace = createFirebaseNamespaceCore(FirebaseAppImpl);\n namespace.INTERNAL = {\n ...namespace.INTERNAL,\n createFirebaseNamespace,\n extendNamespace,\n createSubscribe,\n ErrorFactory,\n deepExtend\n };\n\n /**\n * Patch the top-level firebase namespace with additional properties.\n *\n * firebase.INTERNAL.extendNamespace()\n */\n function extendNamespace(props: { [prop: string]: unknown }): void {\n deepExtend(namespace, props);\n }\n\n return namespace;\n}\n\nexport const firebase = createFirebaseNamespace();\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Logger } from '@firebase/logger';\n\nexport const logger = new Logger('@firebase/app-compat');\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseNamespace } from './public-types';\nimport { isBrowser } from '@firebase/util';\nimport { firebase as firebaseNamespace } from './firebaseNamespace';\nimport { logger } from './logger';\nimport { registerCoreComponents } from './registerCoreComponents';\n\n// Firebase Lite detection\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nif (isBrowser() && (self as any).firebase !== undefined) {\n logger.warn(`\n Warning: Firebase is already defined in the global scope. Please make sure\n Firebase library is only loaded once.\n `);\n\n // eslint-disable-next-line\n const sdkVersion = ((self as any).firebase as FirebaseNamespace).SDK_VERSION;\n if (sdkVersion && sdkVersion.indexOf('LITE') >= 0) {\n logger.warn(`\n Warning: You are trying to load Firebase while using Firebase Performance standalone script.\n You should load Firebase Performance with this instance of Firebase to avoid loading duplicate code.\n `);\n }\n}\n\nconst firebase = firebaseNamespace;\n\nregisterCoreComponents();\n\n// eslint-disable-next-line import/no-default-export\nexport default firebase;\n\nexport { _FirebaseNamespace, _FirebaseService } from './types';\nexport { FirebaseApp, FirebaseNamespace } from './public-types';\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { registerVersion } from '@firebase/app';\n\nimport { name, version } from '../package.json';\n\nexport function registerCoreComponents(variant?: string): void {\n // Register `app` package.\n registerVersion(name, version, variant);\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport firebase from '@firebase/app-compat';\nimport { name, version } from '../../package.json';\n\nfirebase.registerVersion(name, version, 'app-compat');\n\nexport default firebase;\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { version } from '../../package.json';\n\nexport const PENDING_TIMEOUT_MS = 10000;\n\nexport const PACKAGE_VERSION = `w:${version}`;\nexport const INTERNAL_AUTH_VERSION = 'FIS_v2';\n\nexport const INSTALLATIONS_API_URL =\n 'https://firebaseinstallations.googleapis.com/v1';\n\nexport const TOKEN_EXPIRATION_BUFFER = 60 * 60 * 1000; // One hour\n\nexport const SERVICE = 'installations';\nexport const SERVICE_NAME = 'Installations';\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ErrorFactory, FirebaseError } from '@firebase/util';\nimport { SERVICE, SERVICE_NAME } from './constants';\n\nexport const enum ErrorCode {\n MISSING_APP_CONFIG_VALUES = 'missing-app-config-values',\n NOT_REGISTERED = 'not-registered',\n INSTALLATION_NOT_FOUND = 'installation-not-found',\n REQUEST_FAILED = 'request-failed',\n APP_OFFLINE = 'app-offline',\n DELETE_PENDING_REGISTRATION = 'delete-pending-registration'\n}\n\nconst ERROR_DESCRIPTION_MAP: { readonly [key in ErrorCode]: string } = {\n [ErrorCode.MISSING_APP_CONFIG_VALUES]:\n 'Missing App configuration value: \"{$valueName}\"',\n [ErrorCode.NOT_REGISTERED]: 'Firebase Installation is not registered.',\n [ErrorCode.INSTALLATION_NOT_FOUND]: 'Firebase Installation not found.',\n [ErrorCode.REQUEST_FAILED]:\n '{$requestName} request failed with error \"{$serverCode} {$serverStatus}: {$serverMessage}\"',\n [ErrorCode.APP_OFFLINE]: 'Could not process request. Application offline.',\n [ErrorCode.DELETE_PENDING_REGISTRATION]:\n \"Can't delete installation while there is a pending registration request.\"\n};\n\ninterface ErrorParams {\n [ErrorCode.MISSING_APP_CONFIG_VALUES]: {\n valueName: string;\n };\n [ErrorCode.REQUEST_FAILED]: {\n requestName: string;\n [index: string]: string | number; // to make Typescript 3.8 happy\n } & ServerErrorData;\n}\n\nexport const ERROR_FACTORY = new ErrorFactory(\n SERVICE,\n SERVICE_NAME,\n ERROR_DESCRIPTION_MAP\n);\n\nexport interface ServerErrorData {\n serverCode: number;\n serverMessage: string;\n serverStatus: string;\n}\n\nexport type ServerError = FirebaseError & { customData: ServerErrorData };\n\n/** Returns true if error is a FirebaseError that is based on an error from the server. */\nexport function isServerError(error: unknown): error is ServerError {\n return (\n error instanceof FirebaseError &&\n error.code.includes(ErrorCode.REQUEST_FAILED)\n );\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseError } from '@firebase/util';\nimport { GenerateAuthTokenResponse } from '../interfaces/api-response';\nimport {\n CompletedAuthToken,\n RegisteredInstallationEntry,\n RequestStatus\n} from '../interfaces/installation-entry';\nimport {\n INSTALLATIONS_API_URL,\n INTERNAL_AUTH_VERSION\n} from '../util/constants';\nimport { ERROR_FACTORY, ErrorCode } from '../util/errors';\nimport { AppConfig } from '../interfaces/installation-impl';\n\nexport function getInstallationsEndpoint({ projectId }: AppConfig): string {\n return `${INSTALLATIONS_API_URL}/projects/${projectId}/installations`;\n}\n\nexport function extractAuthTokenInfoFromResponse(\n response: GenerateAuthTokenResponse\n): CompletedAuthToken {\n return {\n token: response.token,\n requestStatus: RequestStatus.COMPLETED,\n expiresIn: getExpiresInFromResponseExpiresIn(response.expiresIn),\n creationTime: Date.now()\n };\n}\n\nexport async function getErrorFromResponse(\n requestName: string,\n response: Response\n): Promise {\n const responseJson: ErrorResponse = await response.json();\n const errorData = responseJson.error;\n return ERROR_FACTORY.create(ErrorCode.REQUEST_FAILED, {\n requestName,\n serverCode: errorData.code,\n serverMessage: errorData.message,\n serverStatus: errorData.status\n });\n}\n\nexport function getHeaders({ apiKey }: AppConfig): Headers {\n return new Headers({\n 'Content-Type': 'application/json',\n Accept: 'application/json',\n 'x-goog-api-key': apiKey\n });\n}\n\nexport function getHeadersWithAuth(\n appConfig: AppConfig,\n { refreshToken }: RegisteredInstallationEntry\n): Headers {\n const headers = getHeaders(appConfig);\n headers.append('Authorization', getAuthorizationHeader(refreshToken));\n return headers;\n}\n\nexport interface ErrorResponse {\n error: {\n code: number;\n message: string;\n status: string;\n };\n}\n\n/**\n * Calls the passed in fetch wrapper and returns the response.\n * If the returned response has a status of 5xx, re-runs the function once and\n * returns the response.\n */\nexport async function retryIfServerError(\n fn: () => Promise\n): Promise {\n const result = await fn();\n\n if (result.status >= 500 && result.status < 600) {\n // Internal Server Error. Retry request.\n return fn();\n }\n\n return result;\n}\n\nfunction getExpiresInFromResponseExpiresIn(responseExpiresIn: string): number {\n // This works because the server will never respond with fractions of a second.\n return Number(responseExpiresIn.replace('s', '000'));\n}\n\nfunction getAuthorizationHeader(refreshToken: string): string {\n return `${INTERNAL_AUTH_VERSION} ${refreshToken}`;\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** Returns a promise that resolves after given time passes. */\nexport function sleep(ms: number): Promise {\n return new Promise(resolve => {\n setTimeout(resolve, ms);\n });\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { bufferToBase64UrlSafe } from './buffer-to-base64-url-safe';\n\nexport const VALID_FID_PATTERN = /^[cdef][\\w-]{21}$/;\nexport const INVALID_FID = '';\n\n/**\n * Generates a new FID using random values from Web Crypto API.\n * Returns an empty string if FID generation fails for any reason.\n */\nexport function generateFid(): string {\n try {\n // A valid FID has exactly 22 base64 characters, which is 132 bits, or 16.5\n // bytes. our implementation generates a 17 byte array instead.\n const fidByteArray = new Uint8Array(17);\n const crypto =\n self.crypto || (self as unknown as { msCrypto: Crypto }).msCrypto;\n crypto.getRandomValues(fidByteArray);\n\n // Replace the first 4 random bits with the constant FID header of 0b0111.\n fidByteArray[0] = 0b01110000 + (fidByteArray[0] % 0b00010000);\n\n const fid = encode(fidByteArray);\n\n return VALID_FID_PATTERN.test(fid) ? fid : INVALID_FID;\n } catch {\n // FID generation errored\n return INVALID_FID;\n }\n}\n\n/** Converts a FID Uint8Array to a base64 string representation. */\nfunction encode(fidByteArray: Uint8Array): string {\n const b64String = bufferToBase64UrlSafe(fidByteArray);\n\n // Remove the 23rd character that was added because of the extra 4 bits at the\n // end of our 17 byte array, and the '=' padding.\n return b64String.substr(0, 22);\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport function bufferToBase64UrlSafe(array: Uint8Array): string {\n const b64 = btoa(String.fromCharCode(...array));\n return b64.replace(/\\+/g, '-').replace(/\\//g, '_');\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { AppConfig } from '../interfaces/installation-impl';\n\n/** Returns a string key that can be used to identify the app. */\nexport function getKey(appConfig: AppConfig): string {\n return `${appConfig.appName}!${appConfig.appId}`;\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getKey } from '../util/get-key';\nimport { AppConfig } from '../interfaces/installation-impl';\nimport { IdChangeCallbackFn } from '../api';\n\nconst fidChangeCallbacks: Map> = new Map();\n\n/**\n * Calls the onIdChange callbacks with the new FID value, and broadcasts the\n * change to other tabs.\n */\nexport function fidChanged(appConfig: AppConfig, fid: string): void {\n const key = getKey(appConfig);\n\n callFidChangeCallbacks(key, fid);\n broadcastFidChange(key, fid);\n}\n\nexport function addCallback(\n appConfig: AppConfig,\n callback: IdChangeCallbackFn\n): void {\n // Open the broadcast channel if it's not already open,\n // to be able to listen to change events from other tabs.\n getBroadcastChannel();\n\n const key = getKey(appConfig);\n\n let callbackSet = fidChangeCallbacks.get(key);\n if (!callbackSet) {\n callbackSet = new Set();\n fidChangeCallbacks.set(key, callbackSet);\n }\n callbackSet.add(callback);\n}\n\nexport function removeCallback(\n appConfig: AppConfig,\n callback: IdChangeCallbackFn\n): void {\n const key = getKey(appConfig);\n\n const callbackSet = fidChangeCallbacks.get(key);\n\n if (!callbackSet) {\n return;\n }\n\n callbackSet.delete(callback);\n if (callbackSet.size === 0) {\n fidChangeCallbacks.delete(key);\n }\n\n // Close broadcast channel if there are no more callbacks.\n closeBroadcastChannel();\n}\n\nfunction callFidChangeCallbacks(key: string, fid: string): void {\n const callbacks = fidChangeCallbacks.get(key);\n if (!callbacks) {\n return;\n }\n\n for (const callback of callbacks) {\n callback(fid);\n }\n}\n\nfunction broadcastFidChange(key: string, fid: string): void {\n const channel = getBroadcastChannel();\n if (channel) {\n channel.postMessage({ key, fid });\n }\n closeBroadcastChannel();\n}\n\nlet broadcastChannel: BroadcastChannel | null = null;\n/** Opens and returns a BroadcastChannel if it is supported by the browser. */\nfunction getBroadcastChannel(): BroadcastChannel | null {\n if (!broadcastChannel && 'BroadcastChannel' in self) {\n broadcastChannel = new BroadcastChannel('[Firebase] FID Change');\n broadcastChannel.onmessage = e => {\n callFidChangeCallbacks(e.data.key, e.data.fid);\n };\n }\n return broadcastChannel;\n}\n\nfunction closeBroadcastChannel(): void {\n if (fidChangeCallbacks.size === 0 && broadcastChannel) {\n broadcastChannel.close();\n broadcastChannel = null;\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DBSchema, IDBPDatabase, openDB } from 'idb';\nimport { AppConfig } from '../interfaces/installation-impl';\nimport { InstallationEntry } from '../interfaces/installation-entry';\nimport { getKey } from '../util/get-key';\nimport { fidChanged } from './fid-changed';\n\nconst DATABASE_NAME = 'firebase-installations-database';\nconst DATABASE_VERSION = 1;\nconst OBJECT_STORE_NAME = 'firebase-installations-store';\n\ninterface InstallationsDB extends DBSchema {\n 'firebase-installations-store': {\n key: string;\n value: InstallationEntry | undefined;\n };\n}\n\nlet dbPromise: Promise> | null = null;\nfunction getDbPromise(): Promise> {\n if (!dbPromise) {\n dbPromise = openDB(DATABASE_NAME, DATABASE_VERSION, {\n upgrade: (db, oldVersion) => {\n // We don't use 'break' in this switch statement, the fall-through\n // behavior is what we want, because if there are multiple versions between\n // the old version and the current version, we want ALL the migrations\n // that correspond to those versions to run, not only the last one.\n // eslint-disable-next-line default-case\n switch (oldVersion) {\n case 0:\n db.createObjectStore(OBJECT_STORE_NAME);\n }\n }\n });\n }\n return dbPromise;\n}\n\n/** Gets record(s) from the objectStore that match the given key. */\nexport async function get(\n appConfig: AppConfig\n): Promise {\n const key = getKey(appConfig);\n const db = await getDbPromise();\n return db\n .transaction(OBJECT_STORE_NAME)\n .objectStore(OBJECT_STORE_NAME)\n .get(key) as Promise;\n}\n\n/** Assigns or overwrites the record for the given key with the given value. */\nexport async function set(\n appConfig: AppConfig,\n value: ValueType\n): Promise {\n const key = getKey(appConfig);\n const db = await getDbPromise();\n const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');\n const objectStore = tx.objectStore(OBJECT_STORE_NAME);\n const oldValue = (await objectStore.get(key)) as InstallationEntry;\n await objectStore.put(value, key);\n await tx.done;\n\n if (!oldValue || oldValue.fid !== value.fid) {\n fidChanged(appConfig, value.fid);\n }\n\n return value;\n}\n\n/** Removes record(s) from the objectStore that match the given key. */\nexport async function remove(appConfig: AppConfig): Promise {\n const key = getKey(appConfig);\n const db = await getDbPromise();\n const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');\n await tx.objectStore(OBJECT_STORE_NAME).delete(key);\n await tx.done;\n}\n\n/**\n * Atomically updates a record with the result of updateFn, which gets\n * called with the current value. If newValue is undefined, the record is\n * deleted instead.\n * @return Updated value\n */\nexport async function update(\n appConfig: AppConfig,\n updateFn: (previousValue: InstallationEntry | undefined) => ValueType\n): Promise {\n const key = getKey(appConfig);\n const db = await getDbPromise();\n const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');\n const store = tx.objectStore(OBJECT_STORE_NAME);\n const oldValue: InstallationEntry | undefined = (await store.get(\n key\n )) as InstallationEntry;\n const newValue = updateFn(oldValue);\n\n if (newValue === undefined) {\n await store.delete(key);\n } else {\n await store.put(newValue, key);\n }\n await tx.done;\n\n if (newValue && (!oldValue || oldValue.fid !== newValue.fid)) {\n fidChanged(appConfig, newValue.fid);\n }\n\n return newValue;\n}\n\nexport async function clear(): Promise {\n const db = await getDbPromise();\n const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');\n await tx.objectStore(OBJECT_STORE_NAME).clear();\n await tx.done;\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createInstallationRequest } from '../functions/create-installation-request';\nimport {\n AppConfig,\n FirebaseInstallationsImpl\n} from '../interfaces/installation-impl';\nimport {\n InProgressInstallationEntry,\n InstallationEntry,\n RegisteredInstallationEntry,\n RequestStatus\n} from '../interfaces/installation-entry';\nimport { PENDING_TIMEOUT_MS } from '../util/constants';\nimport { ERROR_FACTORY, ErrorCode, isServerError } from '../util/errors';\nimport { sleep } from '../util/sleep';\nimport { generateFid, INVALID_FID } from './generate-fid';\nimport { remove, set, update } from './idb-manager';\n\nexport interface InstallationEntryWithRegistrationPromise {\n installationEntry: InstallationEntry;\n /** Exist iff the installationEntry is not registered. */\n registrationPromise?: Promise;\n}\n\n/**\n * Updates and returns the InstallationEntry from the database.\n * Also triggers a registration request if it is necessary and possible.\n */\nexport async function getInstallationEntry(\n installations: FirebaseInstallationsImpl\n): Promise {\n let registrationPromise: Promise | undefined;\n\n const installationEntry = await update(installations.appConfig, oldEntry => {\n const installationEntry = updateOrCreateInstallationEntry(oldEntry);\n const entryWithPromise = triggerRegistrationIfNecessary(\n installations,\n installationEntry\n );\n registrationPromise = entryWithPromise.registrationPromise;\n return entryWithPromise.installationEntry;\n });\n\n if (installationEntry.fid === INVALID_FID) {\n // FID generation failed. Waiting for the FID from the server.\n return { installationEntry: await registrationPromise! };\n }\n\n return {\n installationEntry,\n registrationPromise\n };\n}\n\n/**\n * Creates a new Installation Entry if one does not exist.\n * Also clears timed out pending requests.\n */\nfunction updateOrCreateInstallationEntry(\n oldEntry: InstallationEntry | undefined\n): InstallationEntry {\n const entry: InstallationEntry = oldEntry || {\n fid: generateFid(),\n registrationStatus: RequestStatus.NOT_STARTED\n };\n\n return clearTimedOutRequest(entry);\n}\n\n/**\n * If the Firebase Installation is not registered yet, this will trigger the\n * registration and return an InProgressInstallationEntry.\n *\n * If registrationPromise does not exist, the installationEntry is guaranteed\n * to be registered.\n */\nfunction triggerRegistrationIfNecessary(\n installations: FirebaseInstallationsImpl,\n installationEntry: InstallationEntry\n): InstallationEntryWithRegistrationPromise {\n if (installationEntry.registrationStatus === RequestStatus.NOT_STARTED) {\n if (!navigator.onLine) {\n // Registration required but app is offline.\n const registrationPromiseWithError = Promise.reject(\n ERROR_FACTORY.create(ErrorCode.APP_OFFLINE)\n );\n return {\n installationEntry,\n registrationPromise: registrationPromiseWithError\n };\n }\n\n // Try registering. Change status to IN_PROGRESS.\n const inProgressEntry: InProgressInstallationEntry = {\n fid: installationEntry.fid,\n registrationStatus: RequestStatus.IN_PROGRESS,\n registrationTime: Date.now()\n };\n const registrationPromise = registerInstallation(\n installations,\n inProgressEntry\n );\n return { installationEntry: inProgressEntry, registrationPromise };\n } else if (\n installationEntry.registrationStatus === RequestStatus.IN_PROGRESS\n ) {\n return {\n installationEntry,\n registrationPromise: waitUntilFidRegistration(installations)\n };\n } else {\n return { installationEntry };\n }\n}\n\n/** This will be executed only once for each new Firebase Installation. */\nasync function registerInstallation(\n installations: FirebaseInstallationsImpl,\n installationEntry: InProgressInstallationEntry\n): Promise {\n try {\n const registeredInstallationEntry = await createInstallationRequest(\n installations,\n installationEntry\n );\n return set(installations.appConfig, registeredInstallationEntry);\n } catch (e) {\n if (isServerError(e) && e.customData.serverCode === 409) {\n // Server returned a \"FID can not be used\" error.\n // Generate a new ID next time.\n await remove(installations.appConfig);\n } else {\n // Registration failed. Set FID as not registered.\n await set(installations.appConfig, {\n fid: installationEntry.fid,\n registrationStatus: RequestStatus.NOT_STARTED\n });\n }\n throw e;\n }\n}\n\n/** Call if FID registration is pending in another request. */\nasync function waitUntilFidRegistration(\n installations: FirebaseInstallationsImpl\n): Promise {\n // Unfortunately, there is no way of reliably observing when a value in\n // IndexedDB changes (yet, see https://github.com/WICG/indexed-db-observers),\n // so we need to poll.\n\n let entry: InstallationEntry = await updateInstallationRequest(\n installations.appConfig\n );\n while (entry.registrationStatus === RequestStatus.IN_PROGRESS) {\n // createInstallation request still in progress.\n await sleep(100);\n\n entry = await updateInstallationRequest(installations.appConfig);\n }\n\n if (entry.registrationStatus === RequestStatus.NOT_STARTED) {\n // The request timed out or failed in a different call. Try again.\n const { installationEntry, registrationPromise } =\n await getInstallationEntry(installations);\n\n if (registrationPromise) {\n return registrationPromise;\n } else {\n // if there is no registrationPromise, entry is registered.\n return installationEntry as RegisteredInstallationEntry;\n }\n }\n\n return entry;\n}\n\n/**\n * Called only if there is a CreateInstallation request in progress.\n *\n * Updates the InstallationEntry in the DB based on the status of the\n * CreateInstallation request.\n *\n * Returns the updated InstallationEntry.\n */\nfunction updateInstallationRequest(\n appConfig: AppConfig\n): Promise {\n return update(appConfig, oldEntry => {\n if (!oldEntry) {\n throw ERROR_FACTORY.create(ErrorCode.INSTALLATION_NOT_FOUND);\n }\n return clearTimedOutRequest(oldEntry);\n });\n}\n\nfunction clearTimedOutRequest(entry: InstallationEntry): InstallationEntry {\n if (hasInstallationRequestTimedOut(entry)) {\n return {\n fid: entry.fid,\n registrationStatus: RequestStatus.NOT_STARTED\n };\n }\n\n return entry;\n}\n\nfunction hasInstallationRequestTimedOut(\n installationEntry: InstallationEntry\n): boolean {\n return (\n installationEntry.registrationStatus === RequestStatus.IN_PROGRESS &&\n installationEntry.registrationTime + PENDING_TIMEOUT_MS < Date.now()\n );\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { CreateInstallationResponse } from '../interfaces/api-response';\nimport {\n InProgressInstallationEntry,\n RegisteredInstallationEntry,\n RequestStatus\n} from '../interfaces/installation-entry';\nimport { INTERNAL_AUTH_VERSION, PACKAGE_VERSION } from '../util/constants';\nimport {\n extractAuthTokenInfoFromResponse,\n getErrorFromResponse,\n getHeaders,\n getInstallationsEndpoint,\n retryIfServerError\n} from './common';\nimport { FirebaseInstallationsImpl } from '../interfaces/installation-impl';\n\nexport async function createInstallationRequest(\n { appConfig, heartbeatServiceProvider }: FirebaseInstallationsImpl,\n { fid }: InProgressInstallationEntry\n): Promise {\n const endpoint = getInstallationsEndpoint(appConfig);\n\n const headers = getHeaders(appConfig);\n\n // If heartbeat service exists, add the heartbeat string to the header.\n const heartbeatService = heartbeatServiceProvider.getImmediate({\n optional: true\n });\n if (heartbeatService) {\n const heartbeatsHeader = await heartbeatService.getHeartbeatsHeader();\n if (heartbeatsHeader) {\n headers.append('x-firebase-client', heartbeatsHeader);\n }\n }\n\n const body = {\n fid,\n authVersion: INTERNAL_AUTH_VERSION,\n appId: appConfig.appId,\n sdkVersion: PACKAGE_VERSION\n };\n\n const request: RequestInit = {\n method: 'POST',\n headers,\n body: JSON.stringify(body)\n };\n\n const response = await retryIfServerError(() => fetch(endpoint, request));\n if (response.ok) {\n const responseValue: CreateInstallationResponse = await response.json();\n const registeredInstallationEntry: RegisteredInstallationEntry = {\n fid: responseValue.fid || fid,\n registrationStatus: RequestStatus.COMPLETED,\n refreshToken: responseValue.refreshToken,\n authToken: extractAuthTokenInfoFromResponse(responseValue.authToken)\n };\n return registeredInstallationEntry;\n } else {\n throw await getErrorFromResponse('Create Installation', response);\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { GenerateAuthTokenResponse } from '../interfaces/api-response';\nimport {\n CompletedAuthToken,\n RegisteredInstallationEntry\n} from '../interfaces/installation-entry';\nimport { PACKAGE_VERSION } from '../util/constants';\nimport {\n extractAuthTokenInfoFromResponse,\n getErrorFromResponse,\n getHeadersWithAuth,\n getInstallationsEndpoint,\n retryIfServerError\n} from './common';\nimport {\n FirebaseInstallationsImpl,\n AppConfig\n} from '../interfaces/installation-impl';\n\nexport async function generateAuthTokenRequest(\n { appConfig, heartbeatServiceProvider }: FirebaseInstallationsImpl,\n installationEntry: RegisteredInstallationEntry\n): Promise {\n const endpoint = getGenerateAuthTokenEndpoint(appConfig, installationEntry);\n\n const headers = getHeadersWithAuth(appConfig, installationEntry);\n\n // If heartbeat service exists, add the heartbeat string to the header.\n const heartbeatService = heartbeatServiceProvider.getImmediate({\n optional: true\n });\n if (heartbeatService) {\n const heartbeatsHeader = await heartbeatService.getHeartbeatsHeader();\n if (heartbeatsHeader) {\n headers.append('x-firebase-client', heartbeatsHeader);\n }\n }\n\n const body = {\n installation: {\n sdkVersion: PACKAGE_VERSION,\n appId: appConfig.appId\n }\n };\n\n const request: RequestInit = {\n method: 'POST',\n headers,\n body: JSON.stringify(body)\n };\n\n const response = await retryIfServerError(() => fetch(endpoint, request));\n if (response.ok) {\n const responseValue: GenerateAuthTokenResponse = await response.json();\n const completedAuthToken: CompletedAuthToken =\n extractAuthTokenInfoFromResponse(responseValue);\n return completedAuthToken;\n } else {\n throw await getErrorFromResponse('Generate Auth Token', response);\n }\n}\n\nfunction getGenerateAuthTokenEndpoint(\n appConfig: AppConfig,\n { fid }: RegisteredInstallationEntry\n): string {\n return `${getInstallationsEndpoint(appConfig)}/${fid}/authTokens:generate`;\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { generateAuthTokenRequest } from '../functions/generate-auth-token-request';\nimport {\n AppConfig,\n FirebaseInstallationsImpl\n} from '../interfaces/installation-impl';\nimport {\n AuthToken,\n CompletedAuthToken,\n InProgressAuthToken,\n InstallationEntry,\n RegisteredInstallationEntry,\n RequestStatus\n} from '../interfaces/installation-entry';\nimport { PENDING_TIMEOUT_MS, TOKEN_EXPIRATION_BUFFER } from '../util/constants';\nimport { ERROR_FACTORY, ErrorCode, isServerError } from '../util/errors';\nimport { sleep } from '../util/sleep';\nimport { remove, set, update } from './idb-manager';\n\n/**\n * Returns a valid authentication token for the installation. Generates a new\n * token if one doesn't exist, is expired or about to expire.\n *\n * Should only be called if the Firebase Installation is registered.\n */\nexport async function refreshAuthToken(\n installations: FirebaseInstallationsImpl,\n forceRefresh = false\n): Promise {\n let tokenPromise: Promise | undefined;\n const entry = await update(installations.appConfig, oldEntry => {\n if (!isEntryRegistered(oldEntry)) {\n throw ERROR_FACTORY.create(ErrorCode.NOT_REGISTERED);\n }\n\n const oldAuthToken = oldEntry.authToken;\n if (!forceRefresh && isAuthTokenValid(oldAuthToken)) {\n // There is a valid token in the DB.\n return oldEntry;\n } else if (oldAuthToken.requestStatus === RequestStatus.IN_PROGRESS) {\n // There already is a token request in progress.\n tokenPromise = waitUntilAuthTokenRequest(installations, forceRefresh);\n return oldEntry;\n } else {\n // No token or token expired.\n if (!navigator.onLine) {\n throw ERROR_FACTORY.create(ErrorCode.APP_OFFLINE);\n }\n\n const inProgressEntry = makeAuthTokenRequestInProgressEntry(oldEntry);\n tokenPromise = fetchAuthTokenFromServer(installations, inProgressEntry);\n return inProgressEntry;\n }\n });\n\n const authToken = tokenPromise\n ? await tokenPromise\n : (entry.authToken as CompletedAuthToken);\n return authToken;\n}\n\n/**\n * Call only if FID is registered and Auth Token request is in progress.\n *\n * Waits until the current pending request finishes. If the request times out,\n * tries once in this thread as well.\n */\nasync function waitUntilAuthTokenRequest(\n installations: FirebaseInstallationsImpl,\n forceRefresh: boolean\n): Promise {\n // Unfortunately, there is no way of reliably observing when a value in\n // IndexedDB changes (yet, see https://github.com/WICG/indexed-db-observers),\n // so we need to poll.\n\n let entry = await updateAuthTokenRequest(installations.appConfig);\n while (entry.authToken.requestStatus === RequestStatus.IN_PROGRESS) {\n // generateAuthToken still in progress.\n await sleep(100);\n\n entry = await updateAuthTokenRequest(installations.appConfig);\n }\n\n const authToken = entry.authToken;\n if (authToken.requestStatus === RequestStatus.NOT_STARTED) {\n // The request timed out or failed in a different call. Try again.\n return refreshAuthToken(installations, forceRefresh);\n } else {\n return authToken;\n }\n}\n\n/**\n * Called only if there is a GenerateAuthToken request in progress.\n *\n * Updates the InstallationEntry in the DB based on the status of the\n * GenerateAuthToken request.\n *\n * Returns the updated InstallationEntry.\n */\nfunction updateAuthTokenRequest(\n appConfig: AppConfig\n): Promise {\n return update(appConfig, oldEntry => {\n if (!isEntryRegistered(oldEntry)) {\n throw ERROR_FACTORY.create(ErrorCode.NOT_REGISTERED);\n }\n\n const oldAuthToken = oldEntry.authToken;\n if (hasAuthTokenRequestTimedOut(oldAuthToken)) {\n return {\n ...oldEntry,\n authToken: { requestStatus: RequestStatus.NOT_STARTED }\n };\n }\n\n return oldEntry;\n });\n}\n\nasync function fetchAuthTokenFromServer(\n installations: FirebaseInstallationsImpl,\n installationEntry: RegisteredInstallationEntry\n): Promise {\n try {\n const authToken = await generateAuthTokenRequest(\n installations,\n installationEntry\n );\n const updatedInstallationEntry: RegisteredInstallationEntry = {\n ...installationEntry,\n authToken\n };\n await set(installations.appConfig, updatedInstallationEntry);\n return authToken;\n } catch (e) {\n if (\n isServerError(e) &&\n (e.customData.serverCode === 401 || e.customData.serverCode === 404)\n ) {\n // Server returned a \"FID not found\" or a \"Invalid authentication\" error.\n // Generate a new ID next time.\n await remove(installations.appConfig);\n } else {\n const updatedInstallationEntry: RegisteredInstallationEntry = {\n ...installationEntry,\n authToken: { requestStatus: RequestStatus.NOT_STARTED }\n };\n await set(installations.appConfig, updatedInstallationEntry);\n }\n throw e;\n }\n}\n\nfunction isEntryRegistered(\n installationEntry: InstallationEntry | undefined\n): installationEntry is RegisteredInstallationEntry {\n return (\n installationEntry !== undefined &&\n installationEntry.registrationStatus === RequestStatus.COMPLETED\n );\n}\n\nfunction isAuthTokenValid(authToken: AuthToken): boolean {\n return (\n authToken.requestStatus === RequestStatus.COMPLETED &&\n !isAuthTokenExpired(authToken)\n );\n}\n\nfunction isAuthTokenExpired(authToken: CompletedAuthToken): boolean {\n const now = Date.now();\n return (\n now < authToken.creationTime ||\n authToken.creationTime + authToken.expiresIn < now + TOKEN_EXPIRATION_BUFFER\n );\n}\n\n/** Returns an updated InstallationEntry with an InProgressAuthToken. */\nfunction makeAuthTokenRequestInProgressEntry(\n oldEntry: RegisteredInstallationEntry\n): RegisteredInstallationEntry {\n const inProgressAuthToken: InProgressAuthToken = {\n requestStatus: RequestStatus.IN_PROGRESS,\n requestTime: Date.now()\n };\n return {\n ...oldEntry,\n authToken: inProgressAuthToken\n };\n}\n\nfunction hasAuthTokenRequestTimedOut(authToken: AuthToken): boolean {\n return (\n authToken.requestStatus === RequestStatus.IN_PROGRESS &&\n authToken.requestTime + PENDING_TIMEOUT_MS < Date.now()\n );\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getInstallationEntry } from '../helpers/get-installation-entry';\nimport { refreshAuthToken } from '../helpers/refresh-auth-token';\nimport { FirebaseInstallationsImpl } from '../interfaces/installation-impl';\nimport { Installations } from '../interfaces/public-types';\n\n/**\n * Returns a Firebase Installations auth token, identifying the current\n * Firebase Installation.\n * @param installations - The `Installations` instance.\n * @param forceRefresh - Force refresh regardless of token expiration.\n *\n * @public\n */\nexport async function getToken(\n installations: Installations,\n forceRefresh = false\n): Promise {\n const installationsImpl = installations as FirebaseInstallationsImpl;\n await completeInstallationRegistration(installationsImpl);\n\n // At this point we either have a Registered Installation in the DB, or we've\n // already thrown an error.\n const authToken = await refreshAuthToken(installationsImpl, forceRefresh);\n return authToken.token;\n}\n\nasync function completeInstallationRegistration(\n installations: FirebaseInstallationsImpl\n): Promise {\n const { registrationPromise } = await getInstallationEntry(installations);\n\n if (registrationPromise) {\n // A createInstallation request is in progress. Wait until it finishes.\n await registrationPromise;\n }\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { FirebaseApp, FirebaseOptions } from '@firebase/app';\nimport { FirebaseError } from '@firebase/util';\nimport { AppConfig } from '../interfaces/installation-impl';\nimport { ERROR_FACTORY, ErrorCode } from '../util/errors';\n\nexport function extractAppConfig(app: FirebaseApp): AppConfig {\n if (!app || !app.options) {\n throw getMissingValueError('App Configuration');\n }\n\n if (!app.name) {\n throw getMissingValueError('App Name');\n }\n\n // Required app config keys\n const configKeys: Array = [\n 'projectId',\n 'apiKey',\n 'appId'\n ];\n\n for (const keyName of configKeys) {\n if (!app.options[keyName]) {\n throw getMissingValueError(keyName);\n }\n }\n\n return {\n appName: app.name,\n projectId: app.options.projectId!,\n apiKey: app.options.apiKey!,\n appId: app.options.appId!\n };\n}\n\nfunction getMissingValueError(valueName: string): FirebaseError {\n return ERROR_FACTORY.create(ErrorCode.MISSING_APP_CONFIG_VALUES, {\n valueName\n });\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { _registerComponent, _getProvider } from '@firebase/app';\nimport {\n Component,\n ComponentType,\n InstanceFactory,\n ComponentContainer\n} from '@firebase/component';\nimport { getId, getToken } from '../api/index';\nimport { _FirebaseInstallationsInternal } from '../interfaces/public-types';\nimport { FirebaseInstallationsImpl } from '../interfaces/installation-impl';\nimport { extractAppConfig } from '../helpers/extract-app-config';\n\nconst INSTALLATIONS_NAME = 'installations';\nconst INSTALLATIONS_NAME_INTERNAL = 'installations-internal';\n\nconst publicFactory: InstanceFactory<'installations'> = (\n container: ComponentContainer\n) => {\n const app = container.getProvider('app').getImmediate();\n // Throws if app isn't configured properly.\n const appConfig = extractAppConfig(app);\n const heartbeatServiceProvider = _getProvider(app, 'heartbeat');\n\n const installationsImpl: FirebaseInstallationsImpl = {\n app,\n appConfig,\n heartbeatServiceProvider,\n _delete: () => Promise.resolve()\n };\n return installationsImpl;\n};\n\nconst internalFactory: InstanceFactory<'installations-internal'> = (\n container: ComponentContainer\n) => {\n const app = container.getProvider('app').getImmediate();\n // Internal FIS instance relies on public FIS instance.\n const installations = _getProvider(app, INSTALLATIONS_NAME).getImmediate();\n\n const installationsInternal: _FirebaseInstallationsInternal = {\n getId: () => getId(installations),\n getToken: (forceRefresh?: boolean) => getToken(installations, forceRefresh)\n };\n return installationsInternal;\n};\n\nexport function registerInstallations(): void {\n _registerComponent(\n new Component(INSTALLATIONS_NAME, publicFactory, ComponentType.PUBLIC)\n );\n _registerComponent(\n new Component(\n INSTALLATIONS_NAME_INTERNAL,\n internalFactory,\n ComponentType.PRIVATE\n )\n );\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getInstallationEntry } from '../helpers/get-installation-entry';\nimport { refreshAuthToken } from '../helpers/refresh-auth-token';\nimport { FirebaseInstallationsImpl } from '../interfaces/installation-impl';\nimport { Installations } from '../interfaces/public-types';\n\n/**\n * Creates a Firebase Installation if there isn't one for the app and\n * returns the Installation ID.\n * @param installations - The `Installations` instance.\n *\n * @public\n */\nexport async function getId(installations: Installations): Promise {\n const installationsImpl = installations as FirebaseInstallationsImpl;\n const { installationEntry, registrationPromise } = await getInstallationEntry(\n installationsImpl\n );\n\n if (registrationPromise) {\n registrationPromise.catch(console.error);\n } else {\n // If the installation is already registered, update the authentication\n // token if needed.\n refreshAuthToken(installationsImpl).catch(console.error);\n }\n\n return installationEntry.fid;\n}\n","/**\n * The Firebase Installations Web SDK.\n * This SDK does not work in a Node.js environment.\n *\n * @packageDocumentation\n */\n\n/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { registerInstallations } from './functions/config';\nimport { registerVersion } from '@firebase/app';\nimport { name, version } from '../package.json';\n\nexport * from './api';\nexport * from './interfaces/public-types';\n\nregisterInstallations();\nregisterVersion(name, version);\n// BUILD_TARGET will be replaced by values like esm5, esm2017, cjs5, etc during the compilation\nregisterVersion(name, version, '__BUILD_TARGET__');\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Type constant for Firebase Analytics.\n */\nexport const ANALYTICS_TYPE = 'analytics';\n\n// Key to attach FID to in gtag params.\nexport const GA_FID_KEY = 'firebase_id';\nexport const ORIGIN_KEY = 'origin';\n\nexport const FETCH_TIMEOUT_MILLIS = 60 * 1000;\n\nexport const DYNAMIC_CONFIG_URL =\n 'https://firebase.googleapis.com/v1alpha/projects/-/apps/{app-id}/webConfig';\n\nexport const GTAG_URL = 'https://www.googletagmanager.com/gtag/js';\n\nexport const enum GtagCommand {\n EVENT = 'event',\n SET = 'set',\n CONFIG = 'config',\n CONSENT = 'consent',\n GET = 'get'\n}\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Logger } from '@firebase/logger';\n\nexport const logger = new Logger('@firebase/analytics');\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ErrorFactory, ErrorMap } from '@firebase/util';\n\nexport const enum AnalyticsError {\n ALREADY_EXISTS = 'already-exists',\n ALREADY_INITIALIZED = 'already-initialized',\n ALREADY_INITIALIZED_SETTINGS = 'already-initialized-settings',\n INTEROP_COMPONENT_REG_FAILED = 'interop-component-reg-failed',\n INVALID_ANALYTICS_CONTEXT = 'invalid-analytics-context',\n INDEXEDDB_UNAVAILABLE = 'indexeddb-unavailable',\n FETCH_THROTTLE = 'fetch-throttle',\n CONFIG_FETCH_FAILED = 'config-fetch-failed',\n NO_API_KEY = 'no-api-key',\n NO_APP_ID = 'no-app-id',\n NO_CLIENT_ID = 'no-client-id',\n INVALID_GTAG_RESOURCE = 'invalid-gtag-resource'\n}\n\nconst ERRORS: ErrorMap = {\n [AnalyticsError.ALREADY_EXISTS]:\n 'A Firebase Analytics instance with the appId {$id} ' +\n ' already exists. ' +\n 'Only one Firebase Analytics instance can be created for each appId.',\n [AnalyticsError.ALREADY_INITIALIZED]:\n 'initializeAnalytics() cannot be called again with different options than those ' +\n 'it was initially called with. It can be called again with the same options to ' +\n 'return the existing instance, or getAnalytics() can be used ' +\n 'to get a reference to the already-intialized instance.',\n [AnalyticsError.ALREADY_INITIALIZED_SETTINGS]:\n 'Firebase Analytics has already been initialized.' +\n 'settings() must be called before initializing any Analytics instance' +\n 'or it will have no effect.',\n [AnalyticsError.INTEROP_COMPONENT_REG_FAILED]:\n 'Firebase Analytics Interop Component failed to instantiate: {$reason}',\n [AnalyticsError.INVALID_ANALYTICS_CONTEXT]:\n 'Firebase Analytics is not supported in this environment. ' +\n 'Wrap initialization of analytics in analytics.isSupported() ' +\n 'to prevent initialization in unsupported environments. Details: {$errorInfo}',\n [AnalyticsError.INDEXEDDB_UNAVAILABLE]:\n 'IndexedDB unavailable or restricted in this environment. ' +\n 'Wrap initialization of analytics in analytics.isSupported() ' +\n 'to prevent initialization in unsupported environments. Details: {$errorInfo}',\n [AnalyticsError.FETCH_THROTTLE]:\n 'The config fetch request timed out while in an exponential backoff state.' +\n ' Unix timestamp in milliseconds when fetch request throttling ends: {$throttleEndTimeMillis}.',\n [AnalyticsError.CONFIG_FETCH_FAILED]:\n 'Dynamic config fetch failed: [{$httpStatus}] {$responseMessage}',\n [AnalyticsError.NO_API_KEY]:\n 'The \"apiKey\" field is empty in the local Firebase config. Firebase Analytics requires this field to' +\n 'contain a valid API key.',\n [AnalyticsError.NO_APP_ID]:\n 'The \"appId\" field is empty in the local Firebase config. Firebase Analytics requires this field to' +\n 'contain a valid app ID.',\n [AnalyticsError.NO_CLIENT_ID]: 'The \"client_id\" field is empty.',\n [AnalyticsError.INVALID_GTAG_RESOURCE]:\n 'Trusted Types detected an invalid gtag resource: {$gtagURL}.'\n};\n\ninterface ErrorParams {\n [AnalyticsError.ALREADY_EXISTS]: { id: string };\n [AnalyticsError.INTEROP_COMPONENT_REG_FAILED]: { reason: Error };\n [AnalyticsError.FETCH_THROTTLE]: { throttleEndTimeMillis: number };\n [AnalyticsError.CONFIG_FETCH_FAILED]: {\n httpStatus: number;\n responseMessage: string;\n };\n [AnalyticsError.INVALID_ANALYTICS_CONTEXT]: { errorInfo: string };\n [AnalyticsError.INDEXEDDB_UNAVAILABLE]: { errorInfo: string };\n [AnalyticsError.INVALID_GTAG_RESOURCE]: { gtagURL: string };\n}\n\nexport const ERROR_FACTORY = new ErrorFactory(\n 'analytics',\n 'Analytics',\n ERRORS\n);\n","/**\n * @license\n * Copyright 2019 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n CustomParams,\n ControlParams,\n EventParams,\n ConsentSettings\n} from './public-types';\nimport { DynamicConfig, DataLayer, Gtag, MinimalDynamicConfig } from './types';\nimport { GtagCommand, GTAG_URL } from './constants';\nimport { logger } from './logger';\nimport { AnalyticsError, ERROR_FACTORY } from './errors';\n\n// Possible parameter types for gtag 'event' and 'config' commands\ntype GtagConfigOrEventParams = ControlParams & EventParams & CustomParams;\n\n/**\n * Verifies and creates a TrustedScriptURL.\n */\nexport function createGtagTrustedTypesScriptURL(url: string): string {\n if (!url.startsWith(GTAG_URL)) {\n const err = ERROR_FACTORY.create(AnalyticsError.INVALID_GTAG_RESOURCE, {\n gtagURL: url\n });\n logger.warn(err.message);\n return '';\n }\n return url;\n}\n\n/**\n * Makeshift polyfill for Promise.allSettled(). Resolves when all promises\n * have either resolved or rejected.\n *\n * @param promises Array of promises to wait for.\n */\nexport function promiseAllSettled(\n promises: Array>\n): Promise {\n return Promise.all(promises.map(promise => promise.catch(e => e)));\n}\n\n/**\n * Creates a TrustedTypePolicy object that implements the rules passed as policyOptions.\n *\n * @param policyName A string containing the name of the policy\n * @param policyOptions Object containing implementations of instance methods for TrustedTypesPolicy, see {@link https://developer.mozilla.org/en-US/docs/Web/API/TrustedTypePolicy#instance_methods\n * | the TrustedTypePolicy reference documentation}.\n */\nexport function createTrustedTypesPolicy(\n policyName: string,\n policyOptions: Partial\n): Partial | undefined {\n // Create a TrustedTypes policy that we can use for updating src\n // properties\n let trustedTypesPolicy: Partial | undefined;\n if (window.trustedTypes) {\n trustedTypesPolicy = window.trustedTypes.createPolicy(\n policyName,\n policyOptions\n );\n }\n return trustedTypesPolicy;\n}\n\n/**\n * Inserts gtag script tag into the page to asynchronously download gtag.\n * @param dataLayerName Name of datalayer (most often the default, \"_dataLayer\").\n */\nexport function insertScriptTag(\n dataLayerName: string,\n measurementId: string\n): void {\n const trustedTypesPolicy = createTrustedTypesPolicy(\n 'firebase-js-sdk-policy',\n {\n createScriptURL: createGtagTrustedTypesScriptURL\n }\n );\n\n const script = document.createElement('script');\n // We are not providing an analyticsId in the URL because it would trigger a `page_view`\n // without fid. We will initialize ga-id using gtag (config) command together with fid.\n\n const gtagScriptURL = `${GTAG_URL}?l=${dataLayerName}&id=${measurementId}`;\n (script.src as string | TrustedScriptURL) = trustedTypesPolicy\n ? (trustedTypesPolicy as TrustedTypePolicy)?.createScriptURL(gtagScriptURL)\n : gtagScriptURL;\n\n script.async = true;\n document.head.appendChild(script);\n}\n\n/**\n * Get reference to, or create, global datalayer.\n * @param dataLayerName Name of datalayer (most often the default, \"_dataLayer\").\n */\nexport function getOrCreateDataLayer(dataLayerName: string): DataLayer {\n // Check for existing dataLayer and create if needed.\n let dataLayer: DataLayer = [];\n if (Array.isArray(window[dataLayerName])) {\n dataLayer = window[dataLayerName] as DataLayer;\n } else {\n window[dataLayerName] = dataLayer;\n }\n return dataLayer;\n}\n\n/**\n * Wrapped gtag logic when gtag is called with 'config' command.\n *\n * @param gtagCore Basic gtag function that just appends to dataLayer.\n * @param initializationPromisesMap Map of appIds to their initialization promises.\n * @param dynamicConfigPromisesList Array of dynamic config fetch promises.\n * @param measurementIdToAppId Map of GA measurementIDs to corresponding Firebase appId.\n * @param measurementId GA Measurement ID to set config for.\n * @param gtagParams Gtag config params to set.\n */\nasync function gtagOnConfig(\n gtagCore: Gtag,\n initializationPromisesMap: { [appId: string]: Promise },\n dynamicConfigPromisesList: Array<\n Promise\n >,\n measurementIdToAppId: { [measurementId: string]: string },\n measurementId: string,\n gtagParams?: ControlParams & EventParams & CustomParams\n): Promise {\n // If config is already fetched, we know the appId and can use it to look up what FID promise we\n /// are waiting for, and wait only on that one.\n const correspondingAppId = measurementIdToAppId[measurementId as string];\n try {\n if (correspondingAppId) {\n await initializationPromisesMap[correspondingAppId];\n } else {\n // If config is not fetched yet, wait for all configs (we don't know which one we need) and\n // find the appId (if any) corresponding to this measurementId. If there is one, wait on\n // that appId's initialization promise. If there is none, promise resolves and gtag\n // call goes through.\n const dynamicConfigResults = await promiseAllSettled(\n dynamicConfigPromisesList\n );\n const foundConfig = dynamicConfigResults.find(\n config => config.measurementId === measurementId\n );\n if (foundConfig) {\n await initializationPromisesMap[foundConfig.appId];\n }\n }\n } catch (e) {\n logger.error(e);\n }\n gtagCore(GtagCommand.CONFIG, measurementId, gtagParams);\n}\n\n/**\n * Wrapped gtag logic when gtag is called with 'event' command.\n *\n * @param gtagCore Basic gtag function that just appends to dataLayer.\n * @param initializationPromisesMap Map of appIds to their initialization promises.\n * @param dynamicConfigPromisesList Array of dynamic config fetch promises.\n * @param measurementId GA Measurement ID to log event to.\n * @param gtagParams Params to log with this event.\n */\nasync function gtagOnEvent(\n gtagCore: Gtag,\n initializationPromisesMap: { [appId: string]: Promise },\n dynamicConfigPromisesList: Array<\n Promise\n >,\n measurementId: string,\n gtagParams?: ControlParams & EventParams & CustomParams\n): Promise {\n try {\n let initializationPromisesToWaitFor: Array> = [];\n\n // If there's a 'send_to' param, check if any ID specified matches\n // an initializeIds() promise we are waiting for.\n if (gtagParams && gtagParams['send_to']) {\n let gaSendToList: string | string[] = gtagParams['send_to'];\n // Make it an array if is isn't, so it can be dealt with the same way.\n if (!Array.isArray(gaSendToList)) {\n gaSendToList = [gaSendToList];\n }\n // Checking 'send_to' fields requires having all measurement ID results back from\n // the dynamic config fetch.\n const dynamicConfigResults = await promiseAllSettled(\n dynamicConfigPromisesList\n );\n for (const sendToId of gaSendToList) {\n // Any fetched dynamic measurement ID that matches this 'send_to' ID\n const foundConfig = dynamicConfigResults.find(\n config => config.measurementId === sendToId\n );\n const initializationPromise =\n foundConfig && initializationPromisesMap[foundConfig.appId];\n if (initializationPromise) {\n initializationPromisesToWaitFor.push(initializationPromise);\n } else {\n // Found an item in 'send_to' that is not associated\n // directly with an FID, possibly a group. Empty this array,\n // exit the loop early, and let it get populated below.\n initializationPromisesToWaitFor = [];\n break;\n }\n }\n }\n\n // This will be unpopulated if there was no 'send_to' field , or\n // if not all entries in the 'send_to' field could be mapped to\n // a FID. In these cases, wait on all pending initialization promises.\n if (initializationPromisesToWaitFor.length === 0) {\n initializationPromisesToWaitFor = Object.values(\n initializationPromisesMap\n );\n }\n\n // Run core gtag function with args after all relevant initialization\n // promises have been resolved.\n await Promise.all(initializationPromisesToWaitFor);\n // Workaround for http://b/141370449 - third argument cannot be undefined.\n gtagCore(GtagCommand.EVENT, measurementId, gtagParams || {});\n } catch (e) {\n logger.error(e);\n }\n}\n\n/**\n * Wraps a standard gtag function with extra code to wait for completion of\n * relevant initialization promises before sending requests.\n *\n * @param gtagCore Basic gtag function that just appends to dataLayer.\n * @param initializationPromisesMap Map of appIds to their initialization promises.\n * @param dynamicConfigPromisesList Array of dynamic config fetch promises.\n * @param measurementIdToAppId Map of GA measurementIDs to corresponding Firebase appId.\n */\nfunction wrapGtag(\n gtagCore: Gtag,\n /**\n * Allows wrapped gtag calls to wait on whichever intialization promises are required,\n * depending on the contents of the gtag params' `send_to` field, if any.\n */\n initializationPromisesMap: { [appId: string]: Promise },\n /**\n * Wrapped gtag calls sometimes require all dynamic config fetches to have returned\n * before determining what initialization promises (which include FIDs) to wait for.\n */\n dynamicConfigPromisesList: Array<\n Promise\n >,\n /**\n * Wrapped gtag config calls can narrow down which initialization promise (with FID)\n * to wait for if the measurementId is already fetched, by getting the corresponding appId,\n * which is the key for the initialization promises map.\n */\n measurementIdToAppId: { [measurementId: string]: string }\n): Gtag {\n /**\n * Wrapper around gtag that ensures FID is sent with gtag calls.\n * @param command Gtag command type.\n * @param idOrNameOrParams Measurement ID if command is EVENT/CONFIG, params if command is SET.\n * @param gtagParams Params if event is EVENT/CONFIG.\n */\n async function gtagWrapper(\n command: 'config' | 'set' | 'event' | 'consent' | 'get' | string,\n ...args: unknown[]\n ): Promise {\n try {\n // If event, check that relevant initialization promises have completed.\n if (command === GtagCommand.EVENT) {\n const [measurementId, gtagParams] = args;\n // If EVENT, second arg must be measurementId.\n await gtagOnEvent(\n gtagCore,\n initializationPromisesMap,\n dynamicConfigPromisesList,\n measurementId as string,\n gtagParams as GtagConfigOrEventParams\n );\n } else if (command === GtagCommand.CONFIG) {\n const [measurementId, gtagParams] = args;\n // If CONFIG, second arg must be measurementId.\n await gtagOnConfig(\n gtagCore,\n initializationPromisesMap,\n dynamicConfigPromisesList,\n measurementIdToAppId,\n measurementId as string,\n gtagParams as GtagConfigOrEventParams\n );\n } else if (command === GtagCommand.CONSENT) {\n const [gtagParams] = args;\n gtagCore(GtagCommand.CONSENT, 'update', gtagParams as ConsentSettings);\n } else if (command === GtagCommand.GET) {\n const [measurementId, fieldName, callback] = args;\n gtagCore(\n GtagCommand.GET,\n measurementId as string,\n fieldName as string,\n callback as (...args: unknown[]) => void\n );\n } else if (command === GtagCommand.SET) {\n const [customParams] = args;\n // If SET, second arg must be params.\n gtagCore(GtagCommand.SET, customParams as CustomParams);\n } else {\n gtagCore(command, ...args);\n }\n } catch (e) {\n logger.error(e);\n }\n }\n return gtagWrapper as Gtag;\n}\n\n/**\n * Creates global gtag function or wraps existing one if found.\n * This wrapped function attaches Firebase instance ID (FID) to gtag 'config' and\n * 'event' calls that belong to the GAID associated with this Firebase instance.\n *\n * @param initializationPromisesMap Map of appIds to their initialization promises.\n * @param dynamicConfigPromisesList Array of dynamic config fetch promises.\n * @param measurementIdToAppId Map of GA measurementIDs to corresponding Firebase appId.\n * @param dataLayerName Name of global GA datalayer array.\n * @param gtagFunctionName Name of global gtag function (\"gtag\" if not user-specified).\n */\nexport function wrapOrCreateGtag(\n initializationPromisesMap: { [appId: string]: Promise },\n dynamicConfigPromisesList: Array<\n Promise\n >,\n measurementIdToAppId: { [measurementId: string]: string },\n dataLayerName: string,\n gtagFunctionName: string\n): {\n gtagCore: Gtag;\n wrappedGtag: Gtag;\n} {\n // Create a basic core gtag function\n let gtagCore: Gtag = function (..._args: unknown[]) {\n // Must push IArguments object, not an array.\n (window[dataLayerName] as DataLayer).push(arguments);\n };\n\n // Replace it with existing one if found\n if (\n window[gtagFunctionName] &&\n typeof window[gtagFunctionName] === 'function'\n ) {\n // @ts-ignore\n gtagCore = window[gtagFunctionName];\n }\n\n window[gtagFunctionName] = wrapGtag(\n gtagCore,\n initializationPromisesMap,\n dynamicConfigPromisesList,\n measurementIdToAppId\n );\n\n return {\n gtagCore,\n wrappedGtag: window[gtagFunctionName] as Gtag\n };\n}\n\n/**\n * Returns the script tag in the DOM matching both the gtag url pattern\n * and the provided data layer name.\n */\nexport function findGtagScriptOnPage(\n dataLayerName: string\n): HTMLScriptElement | null {\n const scriptTags = window.document.getElementsByTagName('script');\n for (const tag of Object.values(scriptTags)) {\n if (\n tag.src &&\n tag.src.includes(GTAG_URL) &&\n tag.src.includes(dataLayerName)\n ) {\n return tag;\n }\n }\n return null;\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * @fileoverview Most logic is copied from packages/remote-config/src/client/retrying_client.ts\n */\n\nimport { FirebaseApp } from '@firebase/app';\nimport { DynamicConfig, ThrottleMetadata, MinimalDynamicConfig } from './types';\nimport { FirebaseError, calculateBackoffMillis } from '@firebase/util';\nimport { AnalyticsError, ERROR_FACTORY } from './errors';\nimport { DYNAMIC_CONFIG_URL, FETCH_TIMEOUT_MILLIS } from './constants';\nimport { logger } from './logger';\n\n// App config fields needed by analytics.\nexport interface AppFields {\n appId: string;\n apiKey: string;\n measurementId?: string;\n}\n\n/**\n * Backoff factor for 503 errors, which we want to be conservative about\n * to avoid overloading servers. Each retry interval will be\n * BASE_INTERVAL_MILLIS * LONG_RETRY_FACTOR ^ retryCount, so the second one\n * will be ~30 seconds (with fuzzing).\n */\nexport const LONG_RETRY_FACTOR = 30;\n\n/**\n * Base wait interval to multiplied by backoffFactor^backoffCount.\n */\nconst BASE_INTERVAL_MILLIS = 1000;\n\n/**\n * Stubbable retry data storage class.\n */\nclass RetryData {\n constructor(\n public throttleMetadata: { [appId: string]: ThrottleMetadata } = {},\n public intervalMillis: number = BASE_INTERVAL_MILLIS\n ) {}\n\n getThrottleMetadata(appId: string): ThrottleMetadata {\n return this.throttleMetadata[appId];\n }\n\n setThrottleMetadata(appId: string, metadata: ThrottleMetadata): void {\n this.throttleMetadata[appId] = metadata;\n }\n\n deleteThrottleMetadata(appId: string): void {\n delete this.throttleMetadata[appId];\n }\n}\n\nconst defaultRetryData = new RetryData();\n\n/**\n * Set GET request headers.\n * @param apiKey App API key.\n */\nfunction getHeaders(apiKey: string): Headers {\n return new Headers({\n Accept: 'application/json',\n 'x-goog-api-key': apiKey\n });\n}\n\n/**\n * Fetches dynamic config from backend.\n * @param app Firebase app to fetch config for.\n */\nexport async function fetchDynamicConfig(\n appFields: AppFields\n): Promise {\n const { appId, apiKey } = appFields;\n const request: RequestInit = {\n method: 'GET',\n headers: getHeaders(apiKey)\n };\n const appUrl = DYNAMIC_CONFIG_URL.replace('{app-id}', appId);\n const response = await fetch(appUrl, request);\n if (response.status !== 200 && response.status !== 304) {\n let errorMessage = '';\n try {\n // Try to get any error message text from server response.\n const jsonResponse = (await response.json()) as {\n error?: { message?: string };\n };\n if (jsonResponse.error?.message) {\n errorMessage = jsonResponse.error.message;\n }\n } catch (_ignored) {}\n throw ERROR_FACTORY.create(AnalyticsError.CONFIG_FETCH_FAILED, {\n httpStatus: response.status,\n responseMessage: errorMessage\n });\n }\n return response.json();\n}\n\n/**\n * Fetches dynamic config from backend, retrying if failed.\n * @param app Firebase app to fetch config for.\n */\nexport async function fetchDynamicConfigWithRetry(\n app: FirebaseApp,\n // retryData and timeoutMillis are parameterized to allow passing a different value for testing.\n retryData: RetryData = defaultRetryData,\n timeoutMillis?: number\n): Promise {\n const { appId, apiKey, measurementId } = app.options;\n\n if (!appId) {\n throw ERROR_FACTORY.create(AnalyticsError.NO_APP_ID);\n }\n\n if (!apiKey) {\n if (measurementId) {\n return {\n measurementId,\n appId\n };\n }\n throw ERROR_FACTORY.create(AnalyticsError.NO_API_KEY);\n }\n\n const throttleMetadata: ThrottleMetadata = retryData.getThrottleMetadata(\n appId\n ) || {\n backoffCount: 0,\n throttleEndTimeMillis: Date.now()\n };\n\n const signal = new AnalyticsAbortSignal();\n\n setTimeout(\n async () => {\n // Note a very low delay, eg < 10ms, can elapse before listeners are initialized.\n signal.abort();\n },\n timeoutMillis !== undefined ? timeoutMillis : FETCH_TIMEOUT_MILLIS\n );\n\n return attemptFetchDynamicConfigWithRetry(\n { appId, apiKey, measurementId },\n throttleMetadata,\n signal,\n retryData\n );\n}\n\n/**\n * Runs one retry attempt.\n * @param appFields Necessary app config fields.\n * @param throttleMetadata Ongoing metadata to determine throttling times.\n * @param signal Abort signal.\n */\nasync function attemptFetchDynamicConfigWithRetry(\n appFields: AppFields,\n { throttleEndTimeMillis, backoffCount }: ThrottleMetadata,\n signal: AnalyticsAbortSignal,\n retryData: RetryData = defaultRetryData // for testing\n): Promise {\n const { appId, measurementId } = appFields;\n // Starts with a (potentially zero) timeout to support resumption from stored state.\n // Ensures the throttle end time is honored if the last attempt timed out.\n // Note the SDK will never make a request if the fetch timeout expires at this point.\n try {\n await setAbortableTimeout(signal, throttleEndTimeMillis);\n } catch (e) {\n if (measurementId) {\n logger.warn(\n `Timed out fetching this Firebase app's measurement ID from the server.` +\n ` Falling back to the measurement ID ${measurementId}` +\n ` provided in the \"measurementId\" field in the local Firebase config. [${\n (e as Error)?.message\n }]`\n );\n return { appId, measurementId };\n }\n throw e;\n }\n\n try {\n const response = await fetchDynamicConfig(appFields);\n\n // Note the SDK only clears throttle state if response is success or non-retriable.\n retryData.deleteThrottleMetadata(appId);\n\n return response;\n } catch (e) {\n const error = e as Error;\n if (!isRetriableError(error)) {\n retryData.deleteThrottleMetadata(appId);\n if (measurementId) {\n logger.warn(\n `Failed to fetch this Firebase app's measurement ID from the server.` +\n ` Falling back to the measurement ID ${measurementId}` +\n ` provided in the \"measurementId\" field in the local Firebase config. [${error?.message}]`\n );\n return { appId, measurementId };\n } else {\n throw e;\n }\n }\n\n const backoffMillis =\n Number(error?.customData?.httpStatus) === 503\n ? calculateBackoffMillis(\n backoffCount,\n retryData.intervalMillis,\n LONG_RETRY_FACTOR\n )\n : calculateBackoffMillis(backoffCount, retryData.intervalMillis);\n\n // Increments backoff state.\n const throttleMetadata = {\n throttleEndTimeMillis: Date.now() + backoffMillis,\n backoffCount: backoffCount + 1\n };\n\n // Persists state.\n retryData.setThrottleMetadata(appId, throttleMetadata);\n logger.debug(`Calling attemptFetch again in ${backoffMillis} millis`);\n\n return attemptFetchDynamicConfigWithRetry(\n appFields,\n throttleMetadata,\n signal,\n retryData\n );\n }\n}\n\n/**\n * Supports waiting on a backoff by:\n *\n *

    \n *
  • Promisifying setTimeout, so we can set a timeout in our Promise chain
  • \n *
  • Listening on a signal bus for abort events, just like the Fetch API
  • \n *
  • Failing in the same way the Fetch API fails, so timing out a live request and a throttled\n * request appear the same.
  • \n *
\n *\n *

Visible for testing.\n */\nfunction setAbortableTimeout(\n signal: AnalyticsAbortSignal,\n throttleEndTimeMillis: number\n): Promise {\n return new Promise((resolve, reject) => {\n // Derives backoff from given end time, normalizing negative numbers to zero.\n const backoffMillis = Math.max(throttleEndTimeMillis - Date.now(), 0);\n\n const timeout = setTimeout(resolve, backoffMillis);\n\n // Adds listener, rather than sets onabort, because signal is a shared object.\n signal.addEventListener(() => {\n clearTimeout(timeout);\n // If the request completes before this timeout, the rejection has no effect.\n reject(\n ERROR_FACTORY.create(AnalyticsError.FETCH_THROTTLE, {\n throttleEndTimeMillis\n })\n );\n });\n });\n}\n\ntype RetriableError = FirebaseError & { customData: { httpStatus: string } };\n\n/**\n * Returns true if the {@link Error} indicates a fetch request may succeed later.\n */\nfunction isRetriableError(e: Error): e is RetriableError {\n if (!(e instanceof FirebaseError) || !e.customData) {\n return false;\n }\n\n // Uses string index defined by ErrorData, which FirebaseError implements.\n const httpStatus = Number(e.customData['httpStatus']);\n\n return (\n httpStatus === 429 ||\n httpStatus === 500 ||\n httpStatus === 503 ||\n httpStatus === 504\n );\n}\n\n/**\n * Shims a minimal AbortSignal (copied from Remote Config).\n *\n *

AbortController's AbortSignal conveniently decouples fetch timeout logic from other aspects\n * of networking, such as retries. Firebase doesn't use AbortController enough to justify a\n * polyfill recommendation, like we do with the Fetch API, but this minimal shim can easily be\n * swapped out if/when we do.\n */\nexport class AnalyticsAbortSignal {\n listeners: Array<() => void> = [];\n addEventListener(listener: () => void): void {\n this.listeners.push(listener);\n }\n abort(): void {\n this.listeners.forEach(listener => listener());\n }\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DynamicConfig, Gtag, MinimalDynamicConfig } from './types';\nimport { GtagCommand, GA_FID_KEY, ORIGIN_KEY } from './constants';\nimport { _FirebaseInstallationsInternal } from '@firebase/installations';\nimport { fetchDynamicConfigWithRetry } from './get-config';\nimport { logger } from './logger';\nimport { FirebaseApp } from '@firebase/app';\nimport {\n isIndexedDBAvailable,\n validateIndexedDBOpenable\n} from '@firebase/util';\nimport { ERROR_FACTORY, AnalyticsError } from './errors';\nimport { findGtagScriptOnPage, insertScriptTag } from './helpers';\nimport { AnalyticsSettings } from './public-types';\nimport {\n defaultConsentSettingsForInit,\n _setConsentDefaultForInit,\n defaultEventParametersForInit,\n _setDefaultEventParametersForInit\n} from './functions';\n\nasync function validateIndexedDB(): Promise {\n if (!isIndexedDBAvailable()) {\n logger.warn(\n ERROR_FACTORY.create(AnalyticsError.INDEXEDDB_UNAVAILABLE, {\n errorInfo: 'IndexedDB is not available in this environment.'\n }).message\n );\n return false;\n } else {\n try {\n await validateIndexedDBOpenable();\n } catch (e) {\n logger.warn(\n ERROR_FACTORY.create(AnalyticsError.INDEXEDDB_UNAVAILABLE, {\n errorInfo: (e as Error)?.toString()\n }).message\n );\n return false;\n }\n }\n return true;\n}\n\n/**\n * Initialize the analytics instance in gtag.js by calling config command with fid.\n *\n * NOTE: We combine analytics initialization and setting fid together because we want fid to be\n * part of the `page_view` event that's sent during the initialization\n * @param app Firebase app\n * @param gtagCore The gtag function that's not wrapped.\n * @param dynamicConfigPromisesList Array of all dynamic config promises.\n * @param measurementIdToAppId Maps measurementID to appID.\n * @param installations _FirebaseInstallationsInternal instance.\n *\n * @returns Measurement ID.\n */\nexport async function _initializeAnalytics(\n app: FirebaseApp,\n dynamicConfigPromisesList: Array<\n Promise\n >,\n measurementIdToAppId: { [key: string]: string },\n installations: _FirebaseInstallationsInternal,\n gtagCore: Gtag,\n dataLayerName: string,\n options?: AnalyticsSettings\n): Promise {\n const dynamicConfigPromise = fetchDynamicConfigWithRetry(app);\n // Once fetched, map measurementIds to appId, for ease of lookup in wrapped gtag function.\n dynamicConfigPromise\n .then(config => {\n measurementIdToAppId[config.measurementId] = config.appId;\n if (\n app.options.measurementId &&\n config.measurementId !== app.options.measurementId\n ) {\n logger.warn(\n `The measurement ID in the local Firebase config (${app.options.measurementId})` +\n ` does not match the measurement ID fetched from the server (${config.measurementId}).` +\n ` To ensure analytics events are always sent to the correct Analytics property,` +\n ` update the` +\n ` measurement ID field in the local config or remove it from the local config.`\n );\n }\n })\n .catch(e => logger.error(e));\n // Add to list to track state of all dynamic config promises.\n dynamicConfigPromisesList.push(dynamicConfigPromise);\n\n const fidPromise: Promise = validateIndexedDB().then(\n envIsValid => {\n if (envIsValid) {\n return installations.getId();\n } else {\n return undefined;\n }\n }\n );\n\n const [dynamicConfig, fid] = await Promise.all([\n dynamicConfigPromise,\n fidPromise\n ]);\n\n // Detect if user has already put the gtag ';\n }\n const iframeContents = '' + script + '';\n try {\n this.myIFrame.doc.open();\n this.myIFrame.doc.write(iframeContents);\n this.myIFrame.doc.close();\n } catch (e) {\n log('frame writing exception');\n if (e.stack) {\n log(e.stack);\n }\n log(e);\n }\n } else {\n this.commandCB = commandCB;\n this.onMessageCB = onMessageCB;\n }\n }\n\n /**\n * Each browser has its own funny way to handle iframes. Here we mush them all together into one object that I can\n * actually use.\n */\n private static createIFrame_(): IFrameElement {\n const iframe = document.createElement('iframe') as IFrameElement;\n iframe.style.display = 'none';\n\n // This is necessary in order to initialize the document inside the iframe\n if (document.body) {\n document.body.appendChild(iframe);\n try {\n // If document.domain has been modified in IE, this will throw an error, and we need to set the\n // domain of the iframe's document manually. We can do this via a javascript: url as the src attribute\n // Also note that we must do this *after* the iframe has been appended to the page. Otherwise it doesn't work.\n const a = iframe.contentWindow.document;\n if (!a) {\n // Apologies for the log-spam, I need to do something to keep closure from optimizing out the assignment above.\n log('No IE domain setting required');\n }\n } catch (e) {\n const domain = document.domain;\n iframe.src =\n \"javascript:void((function(){document.open();document.domain='\" +\n domain +\n \"';document.close();})())\";\n }\n } else {\n // LongPollConnection attempts to delay initialization until the document is ready, so hopefully this\n // never gets hit.\n throw 'Document body has not initialized. Wait to initialize Firebase until after the document is ready.';\n }\n\n // Get the document of the iframe in a browser-specific way.\n if (iframe.contentDocument) {\n iframe.doc = iframe.contentDocument; // Firefox, Opera, Safari\n } else if (iframe.contentWindow) {\n iframe.doc = iframe.contentWindow.document; // Internet Explorer\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n } else if ((iframe as any).document) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n iframe.doc = (iframe as any).document; //others?\n }\n\n return iframe;\n }\n\n /**\n * Cancel all outstanding queries and remove the frame.\n */\n close() {\n //Mark this iframe as dead, so no new requests are sent.\n this.alive = false;\n\n if (this.myIFrame) {\n //We have to actually remove all of the html inside this iframe before removing it from the\n //window, or IE will continue loading and executing the script tags we've already added, which\n //can lead to some errors being thrown. Setting textContent seems to be the safest way to do this.\n this.myIFrame.doc.body.textContent = '';\n setTimeout(() => {\n if (this.myIFrame !== null) {\n document.body.removeChild(this.myIFrame);\n this.myIFrame = null;\n }\n }, Math.floor(0));\n }\n\n // Protect from being called recursively.\n const onDisconnect = this.onDisconnect;\n if (onDisconnect) {\n this.onDisconnect = null;\n onDisconnect();\n }\n }\n\n /**\n * Actually start the long-polling session by adding the first script tag(s) to the iframe.\n * @param id - The ID of this connection\n * @param pw - The password for this connection\n */\n startLongPoll(id: string, pw: string) {\n this.myID = id;\n this.myPW = pw;\n this.alive = true;\n\n //send the initial request. If there are requests queued, make sure that we transmit as many as we are currently able to.\n while (this.newRequest_()) {}\n }\n\n /**\n * This is called any time someone might want a script tag to be added. It adds a script tag when there aren't\n * too many outstanding requests and we are still alive.\n *\n * If there are outstanding packet segments to send, it sends one. If there aren't, it sends a long-poll anyways if\n * needed.\n */\n private newRequest_() {\n // We keep one outstanding request open all the time to receive data, but if we need to send data\n // (pendingSegs.length > 0) then we create a new request to send the data. The server will automatically\n // close the old request.\n if (\n this.alive &&\n this.sendNewPolls &&\n this.outstandingRequests.size < (this.pendingSegs.length > 0 ? 2 : 1)\n ) {\n //construct our url\n this.currentSerial++;\n const urlParams: { [k: string]: string | number } = {};\n urlParams[FIREBASE_LONGPOLL_ID_PARAM] = this.myID;\n urlParams[FIREBASE_LONGPOLL_PW_PARAM] = this.myPW;\n urlParams[FIREBASE_LONGPOLL_SERIAL_PARAM] = this.currentSerial;\n let theURL = this.urlFn(urlParams);\n //Now add as much data as we can.\n let curDataString = '';\n let i = 0;\n\n while (this.pendingSegs.length > 0) {\n //first, lets see if the next segment will fit.\n const nextSeg = this.pendingSegs[0];\n if (\n (nextSeg.d as unknown[]).length +\n SEG_HEADER_SIZE +\n curDataString.length <=\n MAX_URL_DATA_SIZE\n ) {\n //great, the segment will fit. Lets append it.\n const theSeg = this.pendingSegs.shift();\n curDataString =\n curDataString +\n '&' +\n FIREBASE_LONGPOLL_SEGMENT_NUM_PARAM +\n i +\n '=' +\n theSeg.seg +\n '&' +\n FIREBASE_LONGPOLL_SEGMENTS_IN_PACKET +\n i +\n '=' +\n theSeg.ts +\n '&' +\n FIREBASE_LONGPOLL_DATA_PARAM +\n i +\n '=' +\n theSeg.d;\n i++;\n } else {\n break;\n }\n }\n\n theURL = theURL + curDataString;\n this.addLongPollTag_(theURL, this.currentSerial);\n\n return true;\n } else {\n return false;\n }\n }\n\n /**\n * Queue a packet for transmission to the server.\n * @param segnum - A sequential id for this packet segment used for reassembly\n * @param totalsegs - The total number of segments in this packet\n * @param data - The data for this segment.\n */\n enqueueSegment(segnum: number, totalsegs: number, data: unknown) {\n //add this to the queue of segments to send.\n this.pendingSegs.push({ seg: segnum, ts: totalsegs, d: data });\n\n //send the data immediately if there isn't already data being transmitted, unless\n //startLongPoll hasn't been called yet.\n if (this.alive) {\n this.newRequest_();\n }\n }\n\n /**\n * Add a script tag for a regular long-poll request.\n * @param url - The URL of the script tag.\n * @param serial - The serial number of the request.\n */\n private addLongPollTag_(url: string, serial: number) {\n //remember that we sent this request.\n this.outstandingRequests.add(serial);\n\n const doNewRequest = () => {\n this.outstandingRequests.delete(serial);\n this.newRequest_();\n };\n\n // If this request doesn't return on its own accord (by the server sending us some data), we'll\n // create a new one after the KEEPALIVE interval to make sure we always keep a fresh request open.\n const keepaliveTimeout = setTimeout(\n doNewRequest,\n Math.floor(KEEPALIVE_REQUEST_INTERVAL)\n );\n\n const readyStateCB = () => {\n // Request completed. Cancel the keepalive.\n clearTimeout(keepaliveTimeout);\n\n // Trigger a new request so we can continue receiving data.\n doNewRequest();\n };\n\n this.addTag(url, readyStateCB);\n }\n\n /**\n * Add an arbitrary script tag to the iframe.\n * @param url - The URL for the script tag source.\n * @param loadCB - A callback to be triggered once the script has loaded.\n */\n addTag(url: string, loadCB: () => void) {\n if (isNodeSdk()) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (this as any).doNodeLongPoll(url, loadCB);\n } else {\n setTimeout(() => {\n try {\n // if we're already closed, don't add this poll\n if (!this.sendNewPolls) {\n return;\n }\n const newScript = this.myIFrame.doc.createElement('script');\n newScript.type = 'text/javascript';\n newScript.async = true;\n newScript.src = url;\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n newScript.onload = (newScript as any).onreadystatechange =\n function () {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const rstate = (newScript as any).readyState;\n if (!rstate || rstate === 'loaded' || rstate === 'complete') {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n newScript.onload = (newScript as any).onreadystatechange = null;\n if (newScript.parentNode) {\n newScript.parentNode.removeChild(newScript);\n }\n loadCB();\n }\n };\n newScript.onerror = () => {\n log('Long-poll script failed to load: ' + url);\n this.sendNewPolls = false;\n this.close();\n };\n this.myIFrame.doc.body.appendChild(newScript);\n } catch (e) {\n // TODO: we should make this error visible somehow\n }\n }, Math.floor(1));\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, isNodeSdk, jsonEval, stringify } from '@firebase/util';\n\nimport { RepoInfo, repoInfoConnectionURL } from '../core/RepoInfo';\nimport { StatsCollection } from '../core/stats/StatsCollection';\nimport { statsManagerGetCollection } from '../core/stats/StatsManager';\nimport { PersistentStorage } from '../core/storage/storage';\nimport { logWrapper, splitStringBySize } from '../core/util/util';\nimport { SDK_VERSION } from '../core/version';\n\nimport {\n APPLICATION_ID_PARAM,\n APP_CHECK_TOKEN_PARAM,\n FORGE_DOMAIN_RE,\n FORGE_REF,\n LAST_SESSION_PARAM,\n PROTOCOL_VERSION,\n REFERER_PARAM,\n TRANSPORT_SESSION_PARAM,\n VERSION_PARAM,\n WEBSOCKET\n} from './Constants';\nimport { Transport } from './Transport';\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\ndeclare const MozWebSocket: any;\n\nconst WEBSOCKET_MAX_FRAME_SIZE = 16384;\nconst WEBSOCKET_KEEPALIVE_INTERVAL = 45000;\n\nlet WebSocketImpl = null;\nif (typeof MozWebSocket !== 'undefined') {\n WebSocketImpl = MozWebSocket;\n} else if (typeof WebSocket !== 'undefined') {\n WebSocketImpl = WebSocket;\n}\n\nexport function setWebSocketImpl(impl) {\n WebSocketImpl = impl;\n}\n\n/**\n * Create a new websocket connection with the given callbacks.\n */\nexport class WebSocketConnection implements Transport {\n keepaliveTimer: number | null = null;\n frames: string[] | null = null;\n totalFrames = 0;\n bytesSent = 0;\n bytesReceived = 0;\n connURL: string;\n onDisconnect: (a?: boolean) => void;\n onMessage: (msg: {}) => void;\n mySock: WebSocket | null;\n private log_: (...a: unknown[]) => void;\n private stats_: StatsCollection;\n private everConnected_: boolean;\n private isClosed_: boolean;\n private nodeAdmin: boolean;\n\n /**\n * @param connId identifier for this transport\n * @param repoInfo The info for the websocket endpoint.\n * @param applicationId The Firebase App ID for this project.\n * @param appCheckToken The App Check Token for this client.\n * @param authToken The Auth Token for this client.\n * @param transportSessionId Optional transportSessionId if this is connecting\n * to an existing transport session\n * @param lastSessionId Optional lastSessionId if there was a previous\n * connection\n */\n constructor(\n public connId: string,\n repoInfo: RepoInfo,\n private applicationId?: string,\n private appCheckToken?: string,\n private authToken?: string,\n transportSessionId?: string,\n lastSessionId?: string\n ) {\n this.log_ = logWrapper(this.connId);\n this.stats_ = statsManagerGetCollection(repoInfo);\n this.connURL = WebSocketConnection.connectionURL_(\n repoInfo,\n transportSessionId,\n lastSessionId,\n appCheckToken,\n applicationId\n );\n this.nodeAdmin = repoInfo.nodeAdmin;\n }\n\n /**\n * @param repoInfo - The info for the websocket endpoint.\n * @param transportSessionId - Optional transportSessionId if this is connecting to an existing transport\n * session\n * @param lastSessionId - Optional lastSessionId if there was a previous connection\n * @returns connection url\n */\n private static connectionURL_(\n repoInfo: RepoInfo,\n transportSessionId?: string,\n lastSessionId?: string,\n appCheckToken?: string,\n applicationId?: string\n ): string {\n const urlParams: { [k: string]: string } = {};\n urlParams[VERSION_PARAM] = PROTOCOL_VERSION;\n\n if (\n !isNodeSdk() &&\n typeof location !== 'undefined' &&\n location.hostname &&\n FORGE_DOMAIN_RE.test(location.hostname)\n ) {\n urlParams[REFERER_PARAM] = FORGE_REF;\n }\n if (transportSessionId) {\n urlParams[TRANSPORT_SESSION_PARAM] = transportSessionId;\n }\n if (lastSessionId) {\n urlParams[LAST_SESSION_PARAM] = lastSessionId;\n }\n if (appCheckToken) {\n urlParams[APP_CHECK_TOKEN_PARAM] = appCheckToken;\n }\n if (applicationId) {\n urlParams[APPLICATION_ID_PARAM] = applicationId;\n }\n\n return repoInfoConnectionURL(repoInfo, WEBSOCKET, urlParams);\n }\n\n /**\n * @param onMessage - Callback when messages arrive\n * @param onDisconnect - Callback with connection lost.\n */\n open(onMessage: (msg: {}) => void, onDisconnect: (a?: boolean) => void) {\n this.onDisconnect = onDisconnect;\n this.onMessage = onMessage;\n\n this.log_('Websocket connecting to ' + this.connURL);\n\n this.everConnected_ = false;\n // Assume failure until proven otherwise.\n PersistentStorage.set('previous_websocket_failure', true);\n\n try {\n let options: { [k: string]: object };\n if (isNodeSdk()) {\n const device = this.nodeAdmin ? 'AdminNode' : 'Node';\n // UA Format: Firebase////\n options = {\n headers: {\n 'User-Agent': `Firebase/${PROTOCOL_VERSION}/${SDK_VERSION}/${process.platform}/${device}`,\n 'X-Firebase-GMPID': this.applicationId || ''\n }\n };\n\n // If using Node with admin creds, AppCheck-related checks are unnecessary.\n // Note that we send the credentials here even if they aren't admin credentials, which is\n // not a problem.\n // Note that this header is just used to bypass appcheck, and the token should still be sent\n // through the websocket connection once it is established.\n if (this.authToken) {\n options.headers['Authorization'] = `Bearer ${this.authToken}`;\n }\n if (this.appCheckToken) {\n options.headers['X-Firebase-AppCheck'] = this.appCheckToken;\n }\n\n // Plumb appropriate http_proxy environment variable into faye-websocket if it exists.\n const env = process['env'];\n const proxy =\n this.connURL.indexOf('wss://') === 0\n ? env['HTTPS_PROXY'] || env['https_proxy']\n : env['HTTP_PROXY'] || env['http_proxy'];\n\n if (proxy) {\n options['proxy'] = { origin: proxy };\n }\n }\n this.mySock = new WebSocketImpl(this.connURL, [], options);\n } catch (e) {\n this.log_('Error instantiating WebSocket.');\n const error = e.message || e.data;\n if (error) {\n this.log_(error);\n }\n this.onClosed_();\n return;\n }\n\n this.mySock.onopen = () => {\n this.log_('Websocket connected.');\n this.everConnected_ = true;\n };\n\n this.mySock.onclose = () => {\n this.log_('Websocket connection was disconnected.');\n this.mySock = null;\n this.onClosed_();\n };\n\n this.mySock.onmessage = m => {\n this.handleIncomingFrame(m as {});\n };\n\n this.mySock.onerror = e => {\n this.log_('WebSocket error. Closing connection.');\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const error = (e as any).message || (e as any).data;\n if (error) {\n this.log_(error);\n }\n this.onClosed_();\n };\n }\n\n /**\n * No-op for websockets, we don't need to do anything once the connection is confirmed as open\n */\n start() {}\n\n static forceDisallow_: boolean;\n\n static forceDisallow() {\n WebSocketConnection.forceDisallow_ = true;\n }\n\n static isAvailable(): boolean {\n let isOldAndroid = false;\n if (typeof navigator !== 'undefined' && navigator.userAgent) {\n const oldAndroidRegex = /Android ([0-9]{0,}\\.[0-9]{0,})/;\n const oldAndroidMatch = navigator.userAgent.match(oldAndroidRegex);\n if (oldAndroidMatch && oldAndroidMatch.length > 1) {\n if (parseFloat(oldAndroidMatch[1]) < 4.4) {\n isOldAndroid = true;\n }\n }\n }\n\n return (\n !isOldAndroid &&\n WebSocketImpl !== null &&\n !WebSocketConnection.forceDisallow_\n );\n }\n\n /**\n * Number of response before we consider the connection \"healthy.\"\n */\n static responsesRequiredToBeHealthy = 2;\n\n /**\n * Time to wait for the connection te become healthy before giving up.\n */\n static healthyTimeout = 30000;\n\n /**\n * Returns true if we previously failed to connect with this transport.\n */\n static previouslyFailed(): boolean {\n // If our persistent storage is actually only in-memory storage,\n // we default to assuming that it previously failed to be safe.\n return (\n PersistentStorage.isInMemoryStorage ||\n PersistentStorage.get('previous_websocket_failure') === true\n );\n }\n\n markConnectionHealthy() {\n PersistentStorage.remove('previous_websocket_failure');\n }\n\n private appendFrame_(data: string) {\n this.frames.push(data);\n if (this.frames.length === this.totalFrames) {\n const fullMess = this.frames.join('');\n this.frames = null;\n const jsonMess = jsonEval(fullMess) as object;\n\n //handle the message\n this.onMessage(jsonMess);\n }\n }\n\n /**\n * @param frameCount - The number of frames we are expecting from the server\n */\n private handleNewFrameCount_(frameCount: number) {\n this.totalFrames = frameCount;\n this.frames = [];\n }\n\n /**\n * Attempts to parse a frame count out of some text. If it can't, assumes a value of 1\n * @returns Any remaining data to be process, or null if there is none\n */\n private extractFrameCount_(data: string): string | null {\n assert(this.frames === null, 'We already have a frame buffer');\n // TODO: The server is only supposed to send up to 9999 frames (i.e. length <= 4), but that isn't being enforced\n // currently. So allowing larger frame counts (length <= 6). See https://app.asana.com/0/search/8688598998380/8237608042508\n if (data.length <= 6) {\n const frameCount = Number(data);\n if (!isNaN(frameCount)) {\n this.handleNewFrameCount_(frameCount);\n return null;\n }\n }\n this.handleNewFrameCount_(1);\n return data;\n }\n\n /**\n * Process a websocket frame that has arrived from the server.\n * @param mess - The frame data\n */\n handleIncomingFrame(mess: { [k: string]: unknown }) {\n if (this.mySock === null) {\n return; // Chrome apparently delivers incoming packets even after we .close() the connection sometimes.\n }\n const data = mess['data'] as string;\n this.bytesReceived += data.length;\n this.stats_.incrementCounter('bytes_received', data.length);\n\n this.resetKeepAlive();\n\n if (this.frames !== null) {\n // we're buffering\n this.appendFrame_(data);\n } else {\n // try to parse out a frame count, otherwise, assume 1 and process it\n const remainingData = this.extractFrameCount_(data);\n if (remainingData !== null) {\n this.appendFrame_(remainingData);\n }\n }\n }\n\n /**\n * Send a message to the server\n * @param data - The JSON object to transmit\n */\n send(data: {}) {\n this.resetKeepAlive();\n\n const dataStr = stringify(data);\n this.bytesSent += dataStr.length;\n this.stats_.incrementCounter('bytes_sent', dataStr.length);\n\n //We can only fit a certain amount in each websocket frame, so we need to split this request\n //up into multiple pieces if it doesn't fit in one request.\n\n const dataSegs = splitStringBySize(dataStr, WEBSOCKET_MAX_FRAME_SIZE);\n\n //Send the length header\n if (dataSegs.length > 1) {\n this.sendString_(String(dataSegs.length));\n }\n\n //Send the actual data in segments.\n for (let i = 0; i < dataSegs.length; i++) {\n this.sendString_(dataSegs[i]);\n }\n }\n\n private shutdown_() {\n this.isClosed_ = true;\n if (this.keepaliveTimer) {\n clearInterval(this.keepaliveTimer);\n this.keepaliveTimer = null;\n }\n\n if (this.mySock) {\n this.mySock.close();\n this.mySock = null;\n }\n }\n\n private onClosed_() {\n if (!this.isClosed_) {\n this.log_('WebSocket is closing itself');\n this.shutdown_();\n\n // since this is an internal close, trigger the close listener\n if (this.onDisconnect) {\n this.onDisconnect(this.everConnected_);\n this.onDisconnect = null;\n }\n }\n }\n\n /**\n * External-facing close handler.\n * Close the websocket and kill the connection.\n */\n close() {\n if (!this.isClosed_) {\n this.log_('WebSocket is being closed');\n this.shutdown_();\n }\n }\n\n /**\n * Kill the current keepalive timer and start a new one, to ensure that it always fires N seconds after\n * the last activity.\n */\n resetKeepAlive() {\n clearInterval(this.keepaliveTimer);\n this.keepaliveTimer = setInterval(() => {\n //If there has been no websocket activity for a while, send a no-op\n if (this.mySock) {\n this.sendString_('0');\n }\n this.resetKeepAlive();\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n }, Math.floor(WEBSOCKET_KEEPALIVE_INTERVAL)) as any;\n }\n\n /**\n * Send a string over the websocket.\n *\n * @param str - String to send.\n */\n private sendString_(str: string) {\n // Firefox seems to sometimes throw exceptions (NS_ERROR_UNEXPECTED) from websocket .send()\n // calls for some unknown reason. We treat these as an error and disconnect.\n // See https://app.asana.com/0/58926111402292/68021340250410\n try {\n this.mySock.send(str);\n } catch (e) {\n this.log_(\n 'Exception thrown from WebSocket.send():',\n e.message || e.data,\n 'Closing connection.'\n );\n setTimeout(this.onClosed_.bind(this), 0);\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { RepoInfo } from '../core/RepoInfo';\nimport { warn } from '../core/util/util';\n\nimport { BrowserPollConnection } from './BrowserPollConnection';\nimport { TransportConstructor } from './Transport';\nimport { WebSocketConnection } from './WebSocketConnection';\n\n/**\n * Currently simplistic, this class manages what transport a Connection should use at various stages of its\n * lifecycle.\n *\n * It starts with longpolling in a browser, and httppolling on node. It then upgrades to websockets if\n * they are available.\n */\nexport class TransportManager {\n private transports_: TransportConstructor[];\n\n // Keeps track of whether the TransportManager has already chosen a transport to use\n static globalTransportInitialized_ = false;\n\n static get ALL_TRANSPORTS() {\n return [BrowserPollConnection, WebSocketConnection];\n }\n\n /**\n * Returns whether transport has been selected to ensure WebSocketConnection or BrowserPollConnection are not called after\n * TransportManager has already set up transports_\n */\n static get IS_TRANSPORT_INITIALIZED() {\n return this.globalTransportInitialized_;\n }\n\n /**\n * @param repoInfo - Metadata around the namespace we're connecting to\n */\n constructor(repoInfo: RepoInfo) {\n this.initTransports_(repoInfo);\n }\n\n private initTransports_(repoInfo: RepoInfo) {\n const isWebSocketsAvailable: boolean =\n WebSocketConnection && WebSocketConnection['isAvailable']();\n let isSkipPollConnection =\n isWebSocketsAvailable && !WebSocketConnection.previouslyFailed();\n\n if (repoInfo.webSocketOnly) {\n if (!isWebSocketsAvailable) {\n warn(\n \"wss:// URL used, but browser isn't known to support websockets. Trying anyway.\"\n );\n }\n\n isSkipPollConnection = true;\n }\n\n if (isSkipPollConnection) {\n this.transports_ = [WebSocketConnection];\n } else {\n const transports = (this.transports_ = [] as TransportConstructor[]);\n for (const transport of TransportManager.ALL_TRANSPORTS) {\n if (transport && transport['isAvailable']()) {\n transports.push(transport);\n }\n }\n TransportManager.globalTransportInitialized_ = true;\n }\n }\n\n /**\n * @returns The constructor for the initial transport to use\n */\n initialTransport(): TransportConstructor {\n if (this.transports_.length > 0) {\n return this.transports_[0];\n } else {\n throw new Error('No transports available');\n }\n }\n\n /**\n * @returns The constructor for the next transport, or null\n */\n upgradeTransport(): TransportConstructor | null {\n if (this.transports_.length > 1) {\n return this.transports_[1];\n } else {\n return null;\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { RepoInfo } from '../core/RepoInfo';\nimport { PersistentStorage } from '../core/storage/storage';\nimport { Indexable } from '../core/util/misc';\nimport {\n error,\n logWrapper,\n requireKey,\n setTimeoutNonBlocking,\n warn\n} from '../core/util/util';\n\nimport { PROTOCOL_VERSION } from './Constants';\nimport { Transport, TransportConstructor } from './Transport';\nimport { TransportManager } from './TransportManager';\n\n// Abort upgrade attempt if it takes longer than 60s.\nconst UPGRADE_TIMEOUT = 60000;\n\n// For some transports (WebSockets), we need to \"validate\" the transport by exchanging a few requests and responses.\n// If we haven't sent enough requests within 5s, we'll start sending noop ping requests.\nconst DELAY_BEFORE_SENDING_EXTRA_REQUESTS = 5000;\n\n// If the initial data sent triggers a lot of bandwidth (i.e. it's a large put or a listen for a large amount of data)\n// then we may not be able to exchange our ping/pong requests within the healthy timeout. So if we reach the timeout\n// but we've sent/received enough bytes, we don't cancel the connection.\nconst BYTES_SENT_HEALTHY_OVERRIDE = 10 * 1024;\nconst BYTES_RECEIVED_HEALTHY_OVERRIDE = 100 * 1024;\n\nconst enum RealtimeState {\n CONNECTING,\n CONNECTED,\n DISCONNECTED\n}\n\nconst MESSAGE_TYPE = 't';\nconst MESSAGE_DATA = 'd';\nconst CONTROL_SHUTDOWN = 's';\nconst CONTROL_RESET = 'r';\nconst CONTROL_ERROR = 'e';\nconst CONTROL_PONG = 'o';\nconst SWITCH_ACK = 'a';\nconst END_TRANSMISSION = 'n';\nconst PING = 'p';\n\nconst SERVER_HELLO = 'h';\n\n/**\n * Creates a new real-time connection to the server using whichever method works\n * best in the current browser.\n */\nexport class Connection {\n connectionCount = 0;\n pendingDataMessages: unknown[] = [];\n sessionId: string;\n\n private conn_: Transport;\n private healthyTimeout_: number;\n private isHealthy_: boolean;\n private log_: (...args: unknown[]) => void;\n private primaryResponsesRequired_: number;\n private rx_: Transport;\n private secondaryConn_: Transport;\n private secondaryResponsesRequired_: number;\n private state_ = RealtimeState.CONNECTING;\n private transportManager_: TransportManager;\n private tx_: Transport;\n\n /**\n * @param id - an id for this connection\n * @param repoInfo_ - the info for the endpoint to connect to\n * @param applicationId_ - the Firebase App ID for this project\n * @param appCheckToken_ - The App Check Token for this device.\n * @param authToken_ - The auth token for this session.\n * @param onMessage_ - the callback to be triggered when a server-push message arrives\n * @param onReady_ - the callback to be triggered when this connection is ready to send messages.\n * @param onDisconnect_ - the callback to be triggered when a connection was lost\n * @param onKill_ - the callback to be triggered when this connection has permanently shut down.\n * @param lastSessionId - last session id in persistent connection. is used to clean up old session in real-time server\n */\n constructor(\n public id: string,\n private repoInfo_: RepoInfo,\n private applicationId_: string | undefined,\n private appCheckToken_: string | undefined,\n private authToken_: string | undefined,\n private onMessage_: (a: {}) => void,\n private onReady_: (a: number, b: string) => void,\n private onDisconnect_: () => void,\n private onKill_: (a: string) => void,\n public lastSessionId?: string\n ) {\n this.log_ = logWrapper('c:' + this.id + ':');\n this.transportManager_ = new TransportManager(repoInfo_);\n this.log_('Connection created');\n this.start_();\n }\n\n /**\n * Starts a connection attempt\n */\n private start_(): void {\n const conn = this.transportManager_.initialTransport();\n this.conn_ = new conn(\n this.nextTransportId_(),\n this.repoInfo_,\n this.applicationId_,\n this.appCheckToken_,\n this.authToken_,\n null,\n this.lastSessionId\n );\n\n // For certain transports (WebSockets), we need to send and receive several messages back and forth before we\n // can consider the transport healthy.\n this.primaryResponsesRequired_ = conn['responsesRequiredToBeHealthy'] || 0;\n\n const onMessageReceived = this.connReceiver_(this.conn_);\n const onConnectionLost = this.disconnReceiver_(this.conn_);\n this.tx_ = this.conn_;\n this.rx_ = this.conn_;\n this.secondaryConn_ = null;\n this.isHealthy_ = false;\n\n /*\n * Firefox doesn't like when code from one iframe tries to create another iframe by way of the parent frame.\n * This can occur in the case of a redirect, i.e. we guessed wrong on what server to connect to and received a reset.\n * Somehow, setTimeout seems to make this ok. That doesn't make sense from a security perspective, since you should\n * still have the context of your originating frame.\n */\n setTimeout(() => {\n // this.conn_ gets set to null in some of the tests. Check to make sure it still exists before using it\n this.conn_ && this.conn_.open(onMessageReceived, onConnectionLost);\n }, Math.floor(0));\n\n const healthyTimeoutMS = conn['healthyTimeout'] || 0;\n if (healthyTimeoutMS > 0) {\n this.healthyTimeout_ = setTimeoutNonBlocking(() => {\n this.healthyTimeout_ = null;\n if (!this.isHealthy_) {\n if (\n this.conn_ &&\n this.conn_.bytesReceived > BYTES_RECEIVED_HEALTHY_OVERRIDE\n ) {\n this.log_(\n 'Connection exceeded healthy timeout but has received ' +\n this.conn_.bytesReceived +\n ' bytes. Marking connection healthy.'\n );\n this.isHealthy_ = true;\n this.conn_.markConnectionHealthy();\n } else if (\n this.conn_ &&\n this.conn_.bytesSent > BYTES_SENT_HEALTHY_OVERRIDE\n ) {\n this.log_(\n 'Connection exceeded healthy timeout but has sent ' +\n this.conn_.bytesSent +\n ' bytes. Leaving connection alive.'\n );\n // NOTE: We don't want to mark it healthy, since we have no guarantee that the bytes have made it to\n // the server.\n } else {\n this.log_('Closing unhealthy connection after timeout.');\n this.close();\n }\n }\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n }, Math.floor(healthyTimeoutMS)) as any;\n }\n }\n\n private nextTransportId_(): string {\n return 'c:' + this.id + ':' + this.connectionCount++;\n }\n\n private disconnReceiver_(conn) {\n return everConnected => {\n if (conn === this.conn_) {\n this.onConnectionLost_(everConnected);\n } else if (conn === this.secondaryConn_) {\n this.log_('Secondary connection lost.');\n this.onSecondaryConnectionLost_();\n } else {\n this.log_('closing an old connection');\n }\n };\n }\n\n private connReceiver_(conn: Transport) {\n return (message: Indexable) => {\n if (this.state_ !== RealtimeState.DISCONNECTED) {\n if (conn === this.rx_) {\n this.onPrimaryMessageReceived_(message);\n } else if (conn === this.secondaryConn_) {\n this.onSecondaryMessageReceived_(message);\n } else {\n this.log_('message on old connection');\n }\n }\n };\n }\n\n /**\n * @param dataMsg - An arbitrary data message to be sent to the server\n */\n sendRequest(dataMsg: object) {\n // wrap in a data message envelope and send it on\n const msg = { t: 'd', d: dataMsg };\n this.sendData_(msg);\n }\n\n tryCleanupConnection() {\n if (this.tx_ === this.secondaryConn_ && this.rx_ === this.secondaryConn_) {\n this.log_(\n 'cleaning up and promoting a connection: ' + this.secondaryConn_.connId\n );\n this.conn_ = this.secondaryConn_;\n this.secondaryConn_ = null;\n // the server will shutdown the old connection\n }\n }\n\n private onSecondaryControl_(controlData: { [k: string]: unknown }) {\n if (MESSAGE_TYPE in controlData) {\n const cmd = controlData[MESSAGE_TYPE] as string;\n if (cmd === SWITCH_ACK) {\n this.upgradeIfSecondaryHealthy_();\n } else if (cmd === CONTROL_RESET) {\n // Most likely the session wasn't valid. Abandon the switch attempt\n this.log_('Got a reset on secondary, closing it');\n this.secondaryConn_.close();\n // If we were already using this connection for something, than we need to fully close\n if (\n this.tx_ === this.secondaryConn_ ||\n this.rx_ === this.secondaryConn_\n ) {\n this.close();\n }\n } else if (cmd === CONTROL_PONG) {\n this.log_('got pong on secondary.');\n this.secondaryResponsesRequired_--;\n this.upgradeIfSecondaryHealthy_();\n }\n }\n }\n\n private onSecondaryMessageReceived_(parsedData: Indexable) {\n const layer: string = requireKey('t', parsedData) as string;\n const data: unknown = requireKey('d', parsedData);\n if (layer === 'c') {\n this.onSecondaryControl_(data as Indexable);\n } else if (layer === 'd') {\n // got a data message, but we're still second connection. Need to buffer it up\n this.pendingDataMessages.push(data);\n } else {\n throw new Error('Unknown protocol layer: ' + layer);\n }\n }\n\n private upgradeIfSecondaryHealthy_() {\n if (this.secondaryResponsesRequired_ <= 0) {\n this.log_('Secondary connection is healthy.');\n this.isHealthy_ = true;\n this.secondaryConn_.markConnectionHealthy();\n this.proceedWithUpgrade_();\n } else {\n // Send a ping to make sure the connection is healthy.\n this.log_('sending ping on secondary.');\n this.secondaryConn_.send({ t: 'c', d: { t: PING, d: {} } });\n }\n }\n\n private proceedWithUpgrade_() {\n // tell this connection to consider itself open\n this.secondaryConn_.start();\n // send ack\n this.log_('sending client ack on secondary');\n this.secondaryConn_.send({ t: 'c', d: { t: SWITCH_ACK, d: {} } });\n\n // send end packet on primary transport, switch to sending on this one\n // can receive on this one, buffer responses until end received on primary transport\n this.log_('Ending transmission on primary');\n this.conn_.send({ t: 'c', d: { t: END_TRANSMISSION, d: {} } });\n this.tx_ = this.secondaryConn_;\n\n this.tryCleanupConnection();\n }\n\n private onPrimaryMessageReceived_(parsedData: { [k: string]: unknown }) {\n // Must refer to parsedData properties in quotes, so closure doesn't touch them.\n const layer: string = requireKey('t', parsedData) as string;\n const data: unknown = requireKey('d', parsedData);\n if (layer === 'c') {\n this.onControl_(data as { [k: string]: unknown });\n } else if (layer === 'd') {\n this.onDataMessage_(data);\n }\n }\n\n private onDataMessage_(message: unknown) {\n this.onPrimaryResponse_();\n\n // We don't do anything with data messages, just kick them up a level\n this.onMessage_(message);\n }\n\n private onPrimaryResponse_() {\n if (!this.isHealthy_) {\n this.primaryResponsesRequired_--;\n if (this.primaryResponsesRequired_ <= 0) {\n this.log_('Primary connection is healthy.');\n this.isHealthy_ = true;\n this.conn_.markConnectionHealthy();\n }\n }\n }\n\n private onControl_(controlData: { [k: string]: unknown }) {\n const cmd: string = requireKey(MESSAGE_TYPE, controlData) as string;\n if (MESSAGE_DATA in controlData) {\n const payload = controlData[MESSAGE_DATA];\n if (cmd === SERVER_HELLO) {\n const handshakePayload = {\n ...(payload as {\n ts: number;\n v: string;\n h: string;\n s: string;\n })\n };\n if (this.repoInfo_.isUsingEmulator) {\n // Upon connecting, the emulator will pass the hostname that it's aware of, but we prefer the user's set hostname via `connectDatabaseEmulator` over what the emulator passes.\n handshakePayload.h = this.repoInfo_.host;\n }\n this.onHandshake_(handshakePayload);\n } else if (cmd === END_TRANSMISSION) {\n this.log_('recvd end transmission on primary');\n this.rx_ = this.secondaryConn_;\n for (let i = 0; i < this.pendingDataMessages.length; ++i) {\n this.onDataMessage_(this.pendingDataMessages[i]);\n }\n this.pendingDataMessages = [];\n this.tryCleanupConnection();\n } else if (cmd === CONTROL_SHUTDOWN) {\n // This was previously the 'onKill' callback passed to the lower-level connection\n // payload in this case is the reason for the shutdown. Generally a human-readable error\n this.onConnectionShutdown_(payload as string);\n } else if (cmd === CONTROL_RESET) {\n // payload in this case is the host we should contact\n this.onReset_(payload as string);\n } else if (cmd === CONTROL_ERROR) {\n error('Server Error: ' + payload);\n } else if (cmd === CONTROL_PONG) {\n this.log_('got pong on primary.');\n this.onPrimaryResponse_();\n this.sendPingOnPrimaryIfNecessary_();\n } else {\n error('Unknown control packet command: ' + cmd);\n }\n }\n }\n\n /**\n * @param handshake - The handshake data returned from the server\n */\n private onHandshake_(handshake: {\n ts: number;\n v: string;\n h: string;\n s: string;\n }): void {\n const timestamp = handshake.ts;\n const version = handshake.v;\n const host = handshake.h;\n this.sessionId = handshake.s;\n this.repoInfo_.host = host;\n // if we've already closed the connection, then don't bother trying to progress further\n if (this.state_ === RealtimeState.CONNECTING) {\n this.conn_.start();\n this.onConnectionEstablished_(this.conn_, timestamp);\n if (PROTOCOL_VERSION !== version) {\n warn('Protocol version mismatch detected');\n }\n // TODO: do we want to upgrade? when? maybe a delay?\n this.tryStartUpgrade_();\n }\n }\n\n private tryStartUpgrade_() {\n const conn = this.transportManager_.upgradeTransport();\n if (conn) {\n this.startUpgrade_(conn);\n }\n }\n\n private startUpgrade_(conn: TransportConstructor) {\n this.secondaryConn_ = new conn(\n this.nextTransportId_(),\n this.repoInfo_,\n this.applicationId_,\n this.appCheckToken_,\n this.authToken_,\n this.sessionId\n );\n // For certain transports (WebSockets), we need to send and receive several messages back and forth before we\n // can consider the transport healthy.\n this.secondaryResponsesRequired_ =\n conn['responsesRequiredToBeHealthy'] || 0;\n\n const onMessage = this.connReceiver_(this.secondaryConn_);\n const onDisconnect = this.disconnReceiver_(this.secondaryConn_);\n this.secondaryConn_.open(onMessage, onDisconnect);\n\n // If we haven't successfully upgraded after UPGRADE_TIMEOUT, give up and kill the secondary.\n setTimeoutNonBlocking(() => {\n if (this.secondaryConn_) {\n this.log_('Timed out trying to upgrade.');\n this.secondaryConn_.close();\n }\n }, Math.floor(UPGRADE_TIMEOUT));\n }\n\n private onReset_(host: string) {\n this.log_('Reset packet received. New host: ' + host);\n this.repoInfo_.host = host;\n // TODO: if we're already \"connected\", we need to trigger a disconnect at the next layer up.\n // We don't currently support resets after the connection has already been established\n if (this.state_ === RealtimeState.CONNECTED) {\n this.close();\n } else {\n // Close whatever connections we have open and start again.\n this.closeConnections_();\n this.start_();\n }\n }\n\n private onConnectionEstablished_(conn: Transport, timestamp: number) {\n this.log_('Realtime connection established.');\n this.conn_ = conn;\n this.state_ = RealtimeState.CONNECTED;\n\n if (this.onReady_) {\n this.onReady_(timestamp, this.sessionId);\n this.onReady_ = null;\n }\n\n // If after 5 seconds we haven't sent enough requests to the server to get the connection healthy,\n // send some pings.\n if (this.primaryResponsesRequired_ === 0) {\n this.log_('Primary connection is healthy.');\n this.isHealthy_ = true;\n } else {\n setTimeoutNonBlocking(() => {\n this.sendPingOnPrimaryIfNecessary_();\n }, Math.floor(DELAY_BEFORE_SENDING_EXTRA_REQUESTS));\n }\n }\n\n private sendPingOnPrimaryIfNecessary_() {\n // If the connection isn't considered healthy yet, we'll send a noop ping packet request.\n if (!this.isHealthy_ && this.state_ === RealtimeState.CONNECTED) {\n this.log_('sending ping on primary.');\n this.sendData_({ t: 'c', d: { t: PING, d: {} } });\n }\n }\n\n private onSecondaryConnectionLost_() {\n const conn = this.secondaryConn_;\n this.secondaryConn_ = null;\n if (this.tx_ === conn || this.rx_ === conn) {\n // we are relying on this connection already in some capacity. Therefore, a failure is real\n this.close();\n }\n }\n\n /**\n * @param everConnected - Whether or not the connection ever reached a server. Used to determine if\n * we should flush the host cache\n */\n private onConnectionLost_(everConnected: boolean) {\n this.conn_ = null;\n\n // NOTE: IF you're seeing a Firefox error for this line, I think it might be because it's getting\n // called on window close and RealtimeState.CONNECTING is no longer defined. Just a guess.\n if (!everConnected && this.state_ === RealtimeState.CONNECTING) {\n this.log_('Realtime connection failed.');\n // Since we failed to connect at all, clear any cached entry for this namespace in case the machine went away\n if (this.repoInfo_.isCacheableHost()) {\n PersistentStorage.remove('host:' + this.repoInfo_.host);\n // reset the internal host to what we would show the user, i.e. .firebaseio.com\n this.repoInfo_.internalHost = this.repoInfo_.host;\n }\n } else if (this.state_ === RealtimeState.CONNECTED) {\n this.log_('Realtime connection lost.');\n }\n\n this.close();\n }\n\n private onConnectionShutdown_(reason: string) {\n this.log_('Connection shutdown command received. Shutting down...');\n\n if (this.onKill_) {\n this.onKill_(reason);\n this.onKill_ = null;\n }\n\n // We intentionally don't want to fire onDisconnect (kill is a different case),\n // so clear the callback.\n this.onDisconnect_ = null;\n\n this.close();\n }\n\n private sendData_(data: object) {\n if (this.state_ !== RealtimeState.CONNECTED) {\n throw 'Connection is not connected';\n } else {\n this.tx_.send(data);\n }\n }\n\n /**\n * Cleans up this connection, calling the appropriate callbacks\n */\n close() {\n if (this.state_ !== RealtimeState.DISCONNECTED) {\n this.log_('Closing realtime connection.');\n this.state_ = RealtimeState.DISCONNECTED;\n\n this.closeConnections_();\n\n if (this.onDisconnect_) {\n this.onDisconnect_();\n this.onDisconnect_ = null;\n }\n }\n }\n\n private closeConnections_() {\n this.log_('Shutting down all connections');\n if (this.conn_) {\n this.conn_.close();\n this.conn_ = null;\n }\n\n if (this.secondaryConn_) {\n this.secondaryConn_.close();\n this.secondaryConn_ = null;\n }\n\n if (this.healthyTimeout_) {\n clearTimeout(this.healthyTimeout_);\n this.healthyTimeout_ = null;\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { QueryContext } from './view/EventRegistration';\n\n/**\n * Interface defining the set of actions that can be performed against the Firebase server\n * (basically corresponds to our wire protocol).\n *\n * @interface\n */\nexport abstract class ServerActions {\n abstract listen(\n query: QueryContext,\n currentHashFn: () => string,\n tag: number | null,\n onComplete: (a: string, b: unknown) => void\n ): void;\n\n /**\n * Remove a listen.\n */\n abstract unlisten(query: QueryContext, tag: number | null): void;\n\n /**\n * Get the server value satisfying this query.\n */\n abstract get(query: QueryContext): Promise;\n\n put(\n pathString: string,\n data: unknown,\n onComplete?: (a: string, b: string) => void,\n hash?: string\n ) {}\n\n merge(\n pathString: string,\n data: unknown,\n onComplete: (a: string, b: string | null) => void,\n hash?: string\n ) {}\n\n /**\n * Refreshes the auth token for the current connection.\n * @param token - The authentication token\n */\n refreshAuthToken(token: string) {}\n\n /**\n * Refreshes the app check token for the current connection.\n * @param token The app check token\n */\n refreshAppCheckToken(token: string) {}\n\n onDisconnectPut(\n pathString: string,\n data: unknown,\n onComplete?: (a: string, b: string) => void\n ) {}\n\n onDisconnectMerge(\n pathString: string,\n data: unknown,\n onComplete?: (a: string, b: string) => void\n ) {}\n\n onDisconnectCancel(\n pathString: string,\n onComplete?: (a: string, b: string) => void\n ) {}\n\n reportStats(stats: { [k: string]: unknown }) {}\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\n/**\n * Base class to be used if you want to emit events. Call the constructor with\n * the set of allowed event names.\n */\nexport abstract class EventEmitter {\n private listeners_: {\n [eventType: string]: Array<{\n callback(...args: unknown[]): void;\n context: unknown;\n }>;\n } = {};\n\n constructor(private allowedEvents_: string[]) {\n assert(\n Array.isArray(allowedEvents_) && allowedEvents_.length > 0,\n 'Requires a non-empty array'\n );\n }\n\n /**\n * To be overridden by derived classes in order to fire an initial event when\n * somebody subscribes for data.\n *\n * @returns {Array.<*>} Array of parameters to trigger initial event with.\n */\n abstract getInitialEvent(eventType: string): unknown[];\n\n /**\n * To be called by derived classes to trigger events.\n */\n protected trigger(eventType: string, ...varArgs: unknown[]) {\n if (Array.isArray(this.listeners_[eventType])) {\n // Clone the list, since callbacks could add/remove listeners.\n const listeners = [...this.listeners_[eventType]];\n\n for (let i = 0; i < listeners.length; i++) {\n listeners[i].callback.apply(listeners[i].context, varArgs);\n }\n }\n }\n\n on(eventType: string, callback: (a: unknown) => void, context: unknown) {\n this.validateEventType_(eventType);\n this.listeners_[eventType] = this.listeners_[eventType] || [];\n this.listeners_[eventType].push({ callback, context });\n\n const eventData = this.getInitialEvent(eventType);\n if (eventData) {\n callback.apply(context, eventData);\n }\n }\n\n off(eventType: string, callback: (a: unknown) => void, context: unknown) {\n this.validateEventType_(eventType);\n const listeners = this.listeners_[eventType] || [];\n for (let i = 0; i < listeners.length; i++) {\n if (\n listeners[i].callback === callback &&\n (!context || context === listeners[i].context)\n ) {\n listeners.splice(i, 1);\n return;\n }\n }\n }\n\n private validateEventType_(eventType: string) {\n assert(\n this.allowedEvents_.find(et => {\n return et === eventType;\n }),\n 'Unknown event: ' + eventType\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, isMobileCordova } from '@firebase/util';\n\nimport { EventEmitter } from './EventEmitter';\n\n/**\n * Monitors online state (as reported by window.online/offline events).\n *\n * The expectation is that this could have many false positives (thinks we are online\n * when we're not), but no false negatives. So we can safely use it to determine when\n * we definitely cannot reach the internet.\n */\nexport class OnlineMonitor extends EventEmitter {\n private online_ = true;\n\n static getInstance() {\n return new OnlineMonitor();\n }\n\n constructor() {\n super(['online']);\n\n // We've had repeated complaints that Cordova apps can get stuck \"offline\", e.g.\n // https://forum.ionicframework.com/t/firebase-connection-is-lost-and-never-come-back/43810\n // It would seem that the 'online' event does not always fire consistently. So we disable it\n // for Cordova.\n if (\n typeof window !== 'undefined' &&\n typeof window.addEventListener !== 'undefined' &&\n !isMobileCordova()\n ) {\n window.addEventListener(\n 'online',\n () => {\n if (!this.online_) {\n this.online_ = true;\n this.trigger('online', true);\n }\n },\n false\n );\n\n window.addEventListener(\n 'offline',\n () => {\n if (this.online_) {\n this.online_ = false;\n this.trigger('online', false);\n }\n },\n false\n );\n }\n }\n\n getInitialEvent(eventType: string): boolean[] {\n assert(eventType === 'online', 'Unknown event type: ' + eventType);\n return [this.online_];\n }\n\n currentlyOnline(): boolean {\n return this.online_;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { stringLength } from '@firebase/util';\n\nimport { nameCompare } from './util';\n\n/** Maximum key depth. */\nconst MAX_PATH_DEPTH = 32;\n\n/** Maximum number of (UTF8) bytes in a Firebase path. */\nconst MAX_PATH_LENGTH_BYTES = 768;\n\n/**\n * An immutable object representing a parsed path. It's immutable so that you\n * can pass them around to other functions without worrying about them changing\n * it.\n */\n\nexport class Path {\n pieces_: string[];\n pieceNum_: number;\n\n /**\n * @param pathOrString - Path string to parse, or another path, or the raw\n * tokens array\n */\n constructor(pathOrString: string | string[], pieceNum?: number) {\n if (pieceNum === void 0) {\n this.pieces_ = (pathOrString as string).split('/');\n\n // Remove empty pieces.\n let copyTo = 0;\n for (let i = 0; i < this.pieces_.length; i++) {\n if (this.pieces_[i].length > 0) {\n this.pieces_[copyTo] = this.pieces_[i];\n copyTo++;\n }\n }\n this.pieces_.length = copyTo;\n\n this.pieceNum_ = 0;\n } else {\n this.pieces_ = pathOrString as string[];\n this.pieceNum_ = pieceNum;\n }\n }\n\n toString(): string {\n let pathString = '';\n for (let i = this.pieceNum_; i < this.pieces_.length; i++) {\n if (this.pieces_[i] !== '') {\n pathString += '/' + this.pieces_[i];\n }\n }\n\n return pathString || '/';\n }\n}\n\nexport function newEmptyPath(): Path {\n return new Path('');\n}\n\nexport function pathGetFront(path: Path): string | null {\n if (path.pieceNum_ >= path.pieces_.length) {\n return null;\n }\n\n return path.pieces_[path.pieceNum_];\n}\n\n/**\n * @returns The number of segments in this path\n */\nexport function pathGetLength(path: Path): number {\n return path.pieces_.length - path.pieceNum_;\n}\n\nexport function pathPopFront(path: Path): Path {\n let pieceNum = path.pieceNum_;\n if (pieceNum < path.pieces_.length) {\n pieceNum++;\n }\n return new Path(path.pieces_, pieceNum);\n}\n\nexport function pathGetBack(path: Path): string | null {\n if (path.pieceNum_ < path.pieces_.length) {\n return path.pieces_[path.pieces_.length - 1];\n }\n\n return null;\n}\n\nexport function pathToUrlEncodedString(path: Path): string {\n let pathString = '';\n for (let i = path.pieceNum_; i < path.pieces_.length; i++) {\n if (path.pieces_[i] !== '') {\n pathString += '/' + encodeURIComponent(String(path.pieces_[i]));\n }\n }\n\n return pathString || '/';\n}\n\n/**\n * Shallow copy of the parts of the path.\n *\n */\nexport function pathSlice(path: Path, begin: number = 0): string[] {\n return path.pieces_.slice(path.pieceNum_ + begin);\n}\n\nexport function pathParent(path: Path): Path | null {\n if (path.pieceNum_ >= path.pieces_.length) {\n return null;\n }\n\n const pieces = [];\n for (let i = path.pieceNum_; i < path.pieces_.length - 1; i++) {\n pieces.push(path.pieces_[i]);\n }\n\n return new Path(pieces, 0);\n}\n\nexport function pathChild(path: Path, childPathObj: string | Path): Path {\n const pieces = [];\n for (let i = path.pieceNum_; i < path.pieces_.length; i++) {\n pieces.push(path.pieces_[i]);\n }\n\n if (childPathObj instanceof Path) {\n for (let i = childPathObj.pieceNum_; i < childPathObj.pieces_.length; i++) {\n pieces.push(childPathObj.pieces_[i]);\n }\n } else {\n const childPieces = childPathObj.split('/');\n for (let i = 0; i < childPieces.length; i++) {\n if (childPieces[i].length > 0) {\n pieces.push(childPieces[i]);\n }\n }\n }\n\n return new Path(pieces, 0);\n}\n\n/**\n * @returns True if there are no segments in this path\n */\nexport function pathIsEmpty(path: Path): boolean {\n return path.pieceNum_ >= path.pieces_.length;\n}\n\n/**\n * @returns The path from outerPath to innerPath\n */\nexport function newRelativePath(outerPath: Path, innerPath: Path): Path {\n const outer = pathGetFront(outerPath),\n inner = pathGetFront(innerPath);\n if (outer === null) {\n return innerPath;\n } else if (outer === inner) {\n return newRelativePath(pathPopFront(outerPath), pathPopFront(innerPath));\n } else {\n throw new Error(\n 'INTERNAL ERROR: innerPath (' +\n innerPath +\n ') is not within ' +\n 'outerPath (' +\n outerPath +\n ')'\n );\n }\n}\n\n/**\n * @returns -1, 0, 1 if left is less, equal, or greater than the right.\n */\nexport function pathCompare(left: Path, right: Path): number {\n const leftKeys = pathSlice(left, 0);\n const rightKeys = pathSlice(right, 0);\n for (let i = 0; i < leftKeys.length && i < rightKeys.length; i++) {\n const cmp = nameCompare(leftKeys[i], rightKeys[i]);\n if (cmp !== 0) {\n return cmp;\n }\n }\n if (leftKeys.length === rightKeys.length) {\n return 0;\n }\n return leftKeys.length < rightKeys.length ? -1 : 1;\n}\n\n/**\n * @returns true if paths are the same.\n */\nexport function pathEquals(path: Path, other: Path): boolean {\n if (pathGetLength(path) !== pathGetLength(other)) {\n return false;\n }\n\n for (\n let i = path.pieceNum_, j = other.pieceNum_;\n i <= path.pieces_.length;\n i++, j++\n ) {\n if (path.pieces_[i] !== other.pieces_[j]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * @returns True if this path is a parent of (or the same as) other\n */\nexport function pathContains(path: Path, other: Path): boolean {\n let i = path.pieceNum_;\n let j = other.pieceNum_;\n if (pathGetLength(path) > pathGetLength(other)) {\n return false;\n }\n while (i < path.pieces_.length) {\n if (path.pieces_[i] !== other.pieces_[j]) {\n return false;\n }\n ++i;\n ++j;\n }\n return true;\n}\n\n/**\n * Dynamic (mutable) path used to count path lengths.\n *\n * This class is used to efficiently check paths for valid\n * length (in UTF8 bytes) and depth (used in path validation).\n *\n * Throws Error exception if path is ever invalid.\n *\n * The definition of a path always begins with '/'.\n */\nexport class ValidationPath {\n parts_: string[];\n /** Initialize to number of '/' chars needed in path. */\n byteLength_: number;\n\n /**\n * @param path - Initial Path.\n * @param errorPrefix_ - Prefix for any error messages.\n */\n constructor(path: Path, public errorPrefix_: string) {\n this.parts_ = pathSlice(path, 0);\n /** Initialize to number of '/' chars needed in path. */\n this.byteLength_ = Math.max(1, this.parts_.length);\n\n for (let i = 0; i < this.parts_.length; i++) {\n this.byteLength_ += stringLength(this.parts_[i]);\n }\n validationPathCheckValid(this);\n }\n}\n\nexport function validationPathPush(\n validationPath: ValidationPath,\n child: string\n): void {\n // Count the needed '/'\n if (validationPath.parts_.length > 0) {\n validationPath.byteLength_ += 1;\n }\n validationPath.parts_.push(child);\n validationPath.byteLength_ += stringLength(child);\n validationPathCheckValid(validationPath);\n}\n\nexport function validationPathPop(validationPath: ValidationPath): void {\n const last = validationPath.parts_.pop();\n validationPath.byteLength_ -= stringLength(last);\n // Un-count the previous '/'\n if (validationPath.parts_.length > 0) {\n validationPath.byteLength_ -= 1;\n }\n}\n\nfunction validationPathCheckValid(validationPath: ValidationPath): void {\n if (validationPath.byteLength_ > MAX_PATH_LENGTH_BYTES) {\n throw new Error(\n validationPath.errorPrefix_ +\n 'has a key path longer than ' +\n MAX_PATH_LENGTH_BYTES +\n ' bytes (' +\n validationPath.byteLength_ +\n ').'\n );\n }\n if (validationPath.parts_.length > MAX_PATH_DEPTH) {\n throw new Error(\n validationPath.errorPrefix_ +\n 'path specified exceeds the maximum depth that can be written (' +\n MAX_PATH_DEPTH +\n ') or object contains a cycle ' +\n validationPathToErrorString(validationPath)\n );\n }\n}\n\n/**\n * String for use in error messages - uses '.' notation for path.\n */\nexport function validationPathToErrorString(\n validationPath: ValidationPath\n): string {\n if (validationPath.parts_.length === 0) {\n return '';\n }\n return \"in property '\" + validationPath.parts_.join('.') + \"'\";\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { EventEmitter } from './EventEmitter';\n\ndeclare const document: Document;\n\nexport class VisibilityMonitor extends EventEmitter {\n private visible_: boolean;\n\n static getInstance() {\n return new VisibilityMonitor();\n }\n\n constructor() {\n super(['visible']);\n let hidden: string;\n let visibilityChange: string;\n if (\n typeof document !== 'undefined' &&\n typeof document.addEventListener !== 'undefined'\n ) {\n if (typeof document['hidden'] !== 'undefined') {\n // Opera 12.10 and Firefox 18 and later support\n visibilityChange = 'visibilitychange';\n hidden = 'hidden';\n } else if (typeof document['mozHidden'] !== 'undefined') {\n visibilityChange = 'mozvisibilitychange';\n hidden = 'mozHidden';\n } else if (typeof document['msHidden'] !== 'undefined') {\n visibilityChange = 'msvisibilitychange';\n hidden = 'msHidden';\n } else if (typeof document['webkitHidden'] !== 'undefined') {\n visibilityChange = 'webkitvisibilitychange';\n hidden = 'webkitHidden';\n }\n }\n\n // Initially, we always assume we are visible. This ensures that in browsers\n // without page visibility support or in cases where we are never visible\n // (e.g. chrome extension), we act as if we are visible, i.e. don't delay\n // reconnects\n this.visible_ = true;\n\n if (visibilityChange) {\n document.addEventListener(\n visibilityChange,\n () => {\n const visible = !document[hidden];\n if (visible !== this.visible_) {\n this.visible_ = visible;\n this.trigger('visible', visible);\n }\n },\n false\n );\n }\n }\n\n getInitialEvent(eventType: string): boolean[] {\n assert(eventType === 'visible', 'Unknown event type: ' + eventType);\n return [this.visible_];\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n assert,\n contains,\n Deferred,\n isEmpty,\n isMobileCordova,\n isNodeSdk,\n isReactNative,\n isValidFormat,\n safeGet,\n stringify,\n isAdmin\n} from '@firebase/util';\n\nimport { Connection } from '../realtime/Connection';\n\nimport { AppCheckTokenProvider } from './AppCheckTokenProvider';\nimport { AuthTokenProvider } from './AuthTokenProvider';\nimport { RepoInfo } from './RepoInfo';\nimport { ServerActions } from './ServerActions';\nimport { OnlineMonitor } from './util/OnlineMonitor';\nimport { Path } from './util/Path';\nimport { error, log, logWrapper, warn, ObjectToUniqueKey } from './util/util';\nimport { VisibilityMonitor } from './util/VisibilityMonitor';\nimport { SDK_VERSION } from './version';\nimport { QueryContext } from './view/EventRegistration';\n\nconst RECONNECT_MIN_DELAY = 1000;\nconst RECONNECT_MAX_DELAY_DEFAULT = 60 * 5 * 1000; // 5 minutes in milliseconds (Case: 1858)\nconst RECONNECT_MAX_DELAY_FOR_ADMINS = 30 * 1000; // 30 seconds for admin clients (likely to be a backend server)\nconst RECONNECT_DELAY_MULTIPLIER = 1.3;\nconst RECONNECT_DELAY_RESET_TIMEOUT = 30000; // Reset delay back to MIN_DELAY after being connected for 30sec.\nconst SERVER_KILL_INTERRUPT_REASON = 'server_kill';\n\n// If auth fails repeatedly, we'll assume something is wrong and log a warning / back off.\nconst INVALID_TOKEN_THRESHOLD = 3;\n\ninterface ListenSpec {\n onComplete(s: string, p?: unknown): void;\n\n hashFn(): string;\n\n query: QueryContext;\n tag: number | null;\n}\n\ninterface OnDisconnectRequest {\n pathString: string;\n action: string;\n data: unknown;\n onComplete?: (a: string, b: string) => void;\n}\n\ninterface OutstandingPut {\n action: string;\n request: object;\n queued?: boolean;\n onComplete: (a: string, b?: string) => void;\n}\n\ninterface OutstandingGet {\n request: object;\n onComplete: (response: { [k: string]: unknown }) => void;\n}\n\n/**\n * Firebase connection. Abstracts wire protocol and handles reconnecting.\n *\n * NOTE: All JSON objects sent to the realtime connection must have property names enclosed\n * in quotes to make sure the closure compiler does not minify them.\n */\nexport class PersistentConnection extends ServerActions {\n // Used for diagnostic logging.\n id = PersistentConnection.nextPersistentConnectionId_++;\n private log_ = logWrapper('p:' + this.id + ':');\n\n private interruptReasons_: { [reason: string]: boolean } = {};\n private readonly listens: Map<\n /* path */ string,\n Map\n > = new Map();\n private outstandingPuts_: OutstandingPut[] = [];\n private outstandingGets_: OutstandingGet[] = [];\n private outstandingPutCount_ = 0;\n private outstandingGetCount_ = 0;\n private onDisconnectRequestQueue_: OnDisconnectRequest[] = [];\n private connected_ = false;\n private reconnectDelay_ = RECONNECT_MIN_DELAY;\n private maxReconnectDelay_ = RECONNECT_MAX_DELAY_DEFAULT;\n private securityDebugCallback_: ((a: object) => void) | null = null;\n lastSessionId: string | null = null;\n\n private establishConnectionTimer_: number | null = null;\n\n private visible_: boolean = false;\n\n // Before we get connected, we keep a queue of pending messages to send.\n private requestCBHash_: { [k: number]: (a: unknown) => void } = {};\n private requestNumber_ = 0;\n\n private realtime_: {\n sendRequest(a: object): void;\n close(): void;\n } | null = null;\n\n private authToken_: string | null = null;\n private appCheckToken_: string | null = null;\n private forceTokenRefresh_ = false;\n private invalidAuthTokenCount_ = 0;\n private invalidAppCheckTokenCount_ = 0;\n\n private firstConnection_ = true;\n private lastConnectionAttemptTime_: number | null = null;\n private lastConnectionEstablishedTime_: number | null = null;\n\n private static nextPersistentConnectionId_ = 0;\n\n /**\n * Counter for number of connections created. Mainly used for tagging in the logs\n */\n private static nextConnectionId_ = 0;\n\n /**\n * @param repoInfo_ - Data about the namespace we are connecting to\n * @param applicationId_ - The Firebase App ID for this project\n * @param onDataUpdate_ - A callback for new data from the server\n */\n constructor(\n private repoInfo_: RepoInfo,\n private applicationId_: string,\n private onDataUpdate_: (\n a: string,\n b: unknown,\n c: boolean,\n d: number | null\n ) => void,\n private onConnectStatus_: (a: boolean) => void,\n private onServerInfoUpdate_: (a: unknown) => void,\n private authTokenProvider_: AuthTokenProvider,\n private appCheckTokenProvider_: AppCheckTokenProvider,\n private authOverride_?: object | null\n ) {\n super();\n\n if (authOverride_ && !isNodeSdk()) {\n throw new Error(\n 'Auth override specified in options, but not supported on non Node.js platforms'\n );\n }\n\n VisibilityMonitor.getInstance().on('visible', this.onVisible_, this);\n\n if (repoInfo_.host.indexOf('fblocal') === -1) {\n OnlineMonitor.getInstance().on('online', this.onOnline_, this);\n }\n }\n\n protected sendRequest(\n action: string,\n body: unknown,\n onResponse?: (a: unknown) => void\n ) {\n const curReqNum = ++this.requestNumber_;\n\n const msg = { r: curReqNum, a: action, b: body };\n this.log_(stringify(msg));\n assert(\n this.connected_,\n \"sendRequest call when we're not connected not allowed.\"\n );\n this.realtime_.sendRequest(msg);\n if (onResponse) {\n this.requestCBHash_[curReqNum] = onResponse;\n }\n }\n\n get(query: QueryContext): Promise {\n this.initConnection_();\n\n const deferred = new Deferred();\n const request = {\n p: query._path.toString(),\n q: query._queryObject\n };\n const outstandingGet = {\n action: 'g',\n request,\n onComplete: (message: { [k: string]: unknown }) => {\n const payload = message['d'] as string;\n if (message['s'] === 'ok') {\n deferred.resolve(payload);\n } else {\n deferred.reject(payload);\n }\n }\n };\n this.outstandingGets_.push(outstandingGet);\n this.outstandingGetCount_++;\n const index = this.outstandingGets_.length - 1;\n\n if (this.connected_) {\n this.sendGet_(index);\n }\n\n return deferred.promise;\n }\n\n listen(\n query: QueryContext,\n currentHashFn: () => string,\n tag: number | null,\n onComplete: (a: string, b: unknown) => void\n ) {\n this.initConnection_();\n\n const queryId = query._queryIdentifier;\n const pathString = query._path.toString();\n this.log_('Listen called for ' + pathString + ' ' + queryId);\n if (!this.listens.has(pathString)) {\n this.listens.set(pathString, new Map());\n }\n assert(\n query._queryParams.isDefault() || !query._queryParams.loadsAllData(),\n 'listen() called for non-default but complete query'\n );\n assert(\n !this.listens.get(pathString)!.has(queryId),\n `listen() called twice for same path/queryId.`\n );\n const listenSpec: ListenSpec = {\n onComplete,\n hashFn: currentHashFn,\n query,\n tag\n };\n this.listens.get(pathString)!.set(queryId, listenSpec);\n\n if (this.connected_) {\n this.sendListen_(listenSpec);\n }\n }\n\n private sendGet_(index: number) {\n const get = this.outstandingGets_[index];\n this.sendRequest('g', get.request, (message: { [k: string]: unknown }) => {\n delete this.outstandingGets_[index];\n this.outstandingGetCount_--;\n if (this.outstandingGetCount_ === 0) {\n this.outstandingGets_ = [];\n }\n if (get.onComplete) {\n get.onComplete(message);\n }\n });\n }\n\n private sendListen_(listenSpec: ListenSpec) {\n const query = listenSpec.query;\n const pathString = query._path.toString();\n const queryId = query._queryIdentifier;\n this.log_('Listen on ' + pathString + ' for ' + queryId);\n const req: { [k: string]: unknown } = { /*path*/ p: pathString };\n\n const action = 'q';\n\n // Only bother to send query if it's non-default.\n if (listenSpec.tag) {\n req['q'] = query._queryObject;\n req['t'] = listenSpec.tag;\n }\n\n req[/*hash*/ 'h'] = listenSpec.hashFn();\n\n this.sendRequest(action, req, (message: { [k: string]: unknown }) => {\n const payload: unknown = message[/*data*/ 'd'];\n const status = message[/*status*/ 's'] as string;\n\n // print warnings in any case...\n PersistentConnection.warnOnListenWarnings_(payload, query);\n\n const currentListenSpec =\n this.listens.get(pathString) &&\n this.listens.get(pathString)!.get(queryId);\n // only trigger actions if the listen hasn't been removed and readded\n if (currentListenSpec === listenSpec) {\n this.log_('listen response', message);\n\n if (status !== 'ok') {\n this.removeListen_(pathString, queryId);\n }\n\n if (listenSpec.onComplete) {\n listenSpec.onComplete(status, payload);\n }\n }\n });\n }\n\n private static warnOnListenWarnings_(payload: unknown, query: QueryContext) {\n if (payload && typeof payload === 'object' && contains(payload, 'w')) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const warnings = safeGet(payload as any, 'w');\n if (Array.isArray(warnings) && ~warnings.indexOf('no_index')) {\n const indexSpec =\n '\".indexOn\": \"' + query._queryParams.getIndex().toString() + '\"';\n const indexPath = query._path.toString();\n warn(\n `Using an unspecified index. Your data will be downloaded and ` +\n `filtered on the client. Consider adding ${indexSpec} at ` +\n `${indexPath} to your security rules for better performance.`\n );\n }\n }\n }\n\n refreshAuthToken(token: string) {\n this.authToken_ = token;\n this.log_('Auth token refreshed');\n if (this.authToken_) {\n this.tryAuth();\n } else {\n //If we're connected we want to let the server know to unauthenticate us. If we're not connected, simply delete\n //the credential so we dont become authenticated next time we connect.\n if (this.connected_) {\n this.sendRequest('unauth', {}, () => {});\n }\n }\n\n this.reduceReconnectDelayIfAdminCredential_(token);\n }\n\n private reduceReconnectDelayIfAdminCredential_(credential: string) {\n // NOTE: This isn't intended to be bulletproof (a malicious developer can always just modify the client).\n // Additionally, we don't bother resetting the max delay back to the default if auth fails / expires.\n const isFirebaseSecret = credential && credential.length === 40;\n if (isFirebaseSecret || isAdmin(credential)) {\n this.log_(\n 'Admin auth credential detected. Reducing max reconnect time.'\n );\n this.maxReconnectDelay_ = RECONNECT_MAX_DELAY_FOR_ADMINS;\n }\n }\n\n refreshAppCheckToken(token: string | null) {\n this.appCheckToken_ = token;\n this.log_('App check token refreshed');\n if (this.appCheckToken_) {\n this.tryAppCheck();\n } else {\n //If we're connected we want to let the server know to unauthenticate us.\n //If we're not connected, simply delete the credential so we dont become\n // authenticated next time we connect.\n if (this.connected_) {\n this.sendRequest('unappeck', {}, () => {});\n }\n }\n }\n\n /**\n * Attempts to authenticate with the given credentials. If the authentication attempt fails, it's triggered like\n * a auth revoked (the connection is closed).\n */\n tryAuth() {\n if (this.connected_ && this.authToken_) {\n const token = this.authToken_;\n const authMethod = isValidFormat(token) ? 'auth' : 'gauth';\n const requestData: { [k: string]: unknown } = { cred: token };\n if (this.authOverride_ === null) {\n requestData['noauth'] = true;\n } else if (typeof this.authOverride_ === 'object') {\n requestData['authvar'] = this.authOverride_;\n }\n this.sendRequest(\n authMethod,\n requestData,\n (res: { [k: string]: unknown }) => {\n const status = res[/*status*/ 's'] as string;\n const data = (res[/*data*/ 'd'] as string) || 'error';\n\n if (this.authToken_ === token) {\n if (status === 'ok') {\n this.invalidAuthTokenCount_ = 0;\n } else {\n // Triggers reconnect and force refresh for auth token\n this.onAuthRevoked_(status, data);\n }\n }\n }\n );\n }\n }\n\n /**\n * Attempts to authenticate with the given token. If the authentication\n * attempt fails, it's triggered like the token was revoked (the connection is\n * closed).\n */\n tryAppCheck() {\n if (this.connected_ && this.appCheckToken_) {\n this.sendRequest(\n 'appcheck',\n { 'token': this.appCheckToken_ },\n (res: { [k: string]: unknown }) => {\n const status = res[/*status*/ 's'] as string;\n const data = (res[/*data*/ 'd'] as string) || 'error';\n if (status === 'ok') {\n this.invalidAppCheckTokenCount_ = 0;\n } else {\n this.onAppCheckRevoked_(status, data);\n }\n }\n );\n }\n }\n\n /**\n * @inheritDoc\n */\n unlisten(query: QueryContext, tag: number | null) {\n const pathString = query._path.toString();\n const queryId = query._queryIdentifier;\n\n this.log_('Unlisten called for ' + pathString + ' ' + queryId);\n\n assert(\n query._queryParams.isDefault() || !query._queryParams.loadsAllData(),\n 'unlisten() called for non-default but complete query'\n );\n const listen = this.removeListen_(pathString, queryId);\n if (listen && this.connected_) {\n this.sendUnlisten_(pathString, queryId, query._queryObject, tag);\n }\n }\n\n private sendUnlisten_(\n pathString: string,\n queryId: string,\n queryObj: object,\n tag: number | null\n ) {\n this.log_('Unlisten on ' + pathString + ' for ' + queryId);\n\n const req: { [k: string]: unknown } = { /*path*/ p: pathString };\n const action = 'n';\n // Only bother sending queryId if it's non-default.\n if (tag) {\n req['q'] = queryObj;\n req['t'] = tag;\n }\n\n this.sendRequest(action, req);\n }\n\n onDisconnectPut(\n pathString: string,\n data: unknown,\n onComplete?: (a: string, b: string) => void\n ) {\n this.initConnection_();\n\n if (this.connected_) {\n this.sendOnDisconnect_('o', pathString, data, onComplete);\n } else {\n this.onDisconnectRequestQueue_.push({\n pathString,\n action: 'o',\n data,\n onComplete\n });\n }\n }\n\n onDisconnectMerge(\n pathString: string,\n data: unknown,\n onComplete?: (a: string, b: string) => void\n ) {\n this.initConnection_();\n\n if (this.connected_) {\n this.sendOnDisconnect_('om', pathString, data, onComplete);\n } else {\n this.onDisconnectRequestQueue_.push({\n pathString,\n action: 'om',\n data,\n onComplete\n });\n }\n }\n\n onDisconnectCancel(\n pathString: string,\n onComplete?: (a: string, b: string) => void\n ) {\n this.initConnection_();\n\n if (this.connected_) {\n this.sendOnDisconnect_('oc', pathString, null, onComplete);\n } else {\n this.onDisconnectRequestQueue_.push({\n pathString,\n action: 'oc',\n data: null,\n onComplete\n });\n }\n }\n\n private sendOnDisconnect_(\n action: string,\n pathString: string,\n data: unknown,\n onComplete: (a: string, b: string) => void\n ) {\n const request = { /*path*/ p: pathString, /*data*/ d: data };\n this.log_('onDisconnect ' + action, request);\n this.sendRequest(action, request, (response: { [k: string]: unknown }) => {\n if (onComplete) {\n setTimeout(() => {\n onComplete(\n response[/*status*/ 's'] as string,\n response[/* data */ 'd'] as string\n );\n }, Math.floor(0));\n }\n });\n }\n\n put(\n pathString: string,\n data: unknown,\n onComplete?: (a: string, b: string) => void,\n hash?: string\n ) {\n this.putInternal('p', pathString, data, onComplete, hash);\n }\n\n merge(\n pathString: string,\n data: unknown,\n onComplete: (a: string, b: string | null) => void,\n hash?: string\n ) {\n this.putInternal('m', pathString, data, onComplete, hash);\n }\n\n putInternal(\n action: string,\n pathString: string,\n data: unknown,\n onComplete: (a: string, b: string | null) => void,\n hash?: string\n ) {\n this.initConnection_();\n\n const request: { [k: string]: unknown } = {\n /*path*/ p: pathString,\n /*data*/ d: data\n };\n\n if (hash !== undefined) {\n request[/*hash*/ 'h'] = hash;\n }\n\n // TODO: Only keep track of the most recent put for a given path?\n this.outstandingPuts_.push({\n action,\n request,\n onComplete\n });\n\n this.outstandingPutCount_++;\n const index = this.outstandingPuts_.length - 1;\n\n if (this.connected_) {\n this.sendPut_(index);\n } else {\n this.log_('Buffering put: ' + pathString);\n }\n }\n\n private sendPut_(index: number) {\n const action = this.outstandingPuts_[index].action;\n const request = this.outstandingPuts_[index].request;\n const onComplete = this.outstandingPuts_[index].onComplete;\n this.outstandingPuts_[index].queued = this.connected_;\n\n this.sendRequest(action, request, (message: { [k: string]: unknown }) => {\n this.log_(action + ' response', message);\n\n delete this.outstandingPuts_[index];\n this.outstandingPutCount_--;\n\n // Clean up array occasionally.\n if (this.outstandingPutCount_ === 0) {\n this.outstandingPuts_ = [];\n }\n\n if (onComplete) {\n onComplete(\n message[/*status*/ 's'] as string,\n message[/* data */ 'd'] as string\n );\n }\n });\n }\n\n reportStats(stats: { [k: string]: unknown }) {\n // If we're not connected, we just drop the stats.\n if (this.connected_) {\n const request = { /*counters*/ c: stats };\n this.log_('reportStats', request);\n\n this.sendRequest(/*stats*/ 's', request, result => {\n const status = result[/*status*/ 's'];\n if (status !== 'ok') {\n const errorReason = result[/* data */ 'd'];\n this.log_('reportStats', 'Error sending stats: ' + errorReason);\n }\n });\n }\n }\n\n private onDataMessage_(message: { [k: string]: unknown }) {\n if ('r' in message) {\n // this is a response\n this.log_('from server: ' + stringify(message));\n const reqNum = message['r'] as string;\n const onResponse = this.requestCBHash_[reqNum];\n if (onResponse) {\n delete this.requestCBHash_[reqNum];\n onResponse(message[/*body*/ 'b']);\n }\n } else if ('error' in message) {\n throw 'A server-side error has occurred: ' + message['error'];\n } else if ('a' in message) {\n // a and b are action and body, respectively\n this.onDataPush_(message['a'] as string, message['b'] as {});\n }\n }\n\n private onDataPush_(action: string, body: { [k: string]: unknown }) {\n this.log_('handleServerMessage', action, body);\n if (action === 'd') {\n this.onDataUpdate_(\n body[/*path*/ 'p'] as string,\n body[/*data*/ 'd'],\n /*isMerge*/ false,\n body['t'] as number\n );\n } else if (action === 'm') {\n this.onDataUpdate_(\n body[/*path*/ 'p'] as string,\n body[/*data*/ 'd'],\n /*isMerge=*/ true,\n body['t'] as number\n );\n } else if (action === 'c') {\n this.onListenRevoked_(\n body[/*path*/ 'p'] as string,\n body[/*query*/ 'q'] as unknown[]\n );\n } else if (action === 'ac') {\n this.onAuthRevoked_(\n body[/*status code*/ 's'] as string,\n body[/* explanation */ 'd'] as string\n );\n } else if (action === 'apc') {\n this.onAppCheckRevoked_(\n body[/*status code*/ 's'] as string,\n body[/* explanation */ 'd'] as string\n );\n } else if (action === 'sd') {\n this.onSecurityDebugPacket_(body);\n } else {\n error(\n 'Unrecognized action received from server: ' +\n stringify(action) +\n '\\nAre you using the latest client?'\n );\n }\n }\n\n private onReady_(timestamp: number, sessionId: string) {\n this.log_('connection ready');\n this.connected_ = true;\n this.lastConnectionEstablishedTime_ = new Date().getTime();\n this.handleTimestamp_(timestamp);\n this.lastSessionId = sessionId;\n if (this.firstConnection_) {\n this.sendConnectStats_();\n }\n this.restoreState_();\n this.firstConnection_ = false;\n this.onConnectStatus_(true);\n }\n\n private scheduleConnect_(timeout: number) {\n assert(\n !this.realtime_,\n \"Scheduling a connect when we're already connected/ing?\"\n );\n\n if (this.establishConnectionTimer_) {\n clearTimeout(this.establishConnectionTimer_);\n }\n\n // NOTE: Even when timeout is 0, it's important to do a setTimeout to work around an infuriating \"Security Error\" in\n // Firefox when trying to write to our long-polling iframe in some scenarios (e.g. Forge or our unit tests).\n\n this.establishConnectionTimer_ = setTimeout(() => {\n this.establishConnectionTimer_ = null;\n this.establishConnection_();\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n }, Math.floor(timeout)) as any;\n }\n\n private initConnection_() {\n if (!this.realtime_ && this.firstConnection_) {\n this.scheduleConnect_(0);\n }\n }\n\n private onVisible_(visible: boolean) {\n // NOTE: Tabbing away and back to a window will defeat our reconnect backoff, but I think that's fine.\n if (\n visible &&\n !this.visible_ &&\n this.reconnectDelay_ === this.maxReconnectDelay_\n ) {\n this.log_('Window became visible. Reducing delay.');\n this.reconnectDelay_ = RECONNECT_MIN_DELAY;\n\n if (!this.realtime_) {\n this.scheduleConnect_(0);\n }\n }\n this.visible_ = visible;\n }\n\n private onOnline_(online: boolean) {\n if (online) {\n this.log_('Browser went online.');\n this.reconnectDelay_ = RECONNECT_MIN_DELAY;\n if (!this.realtime_) {\n this.scheduleConnect_(0);\n }\n } else {\n this.log_('Browser went offline. Killing connection.');\n if (this.realtime_) {\n this.realtime_.close();\n }\n }\n }\n\n private onRealtimeDisconnect_() {\n this.log_('data client disconnected');\n this.connected_ = false;\n this.realtime_ = null;\n\n // Since we don't know if our sent transactions succeeded or not, we need to cancel them.\n this.cancelSentTransactions_();\n\n // Clear out the pending requests.\n this.requestCBHash_ = {};\n\n if (this.shouldReconnect_()) {\n if (!this.visible_) {\n this.log_(\"Window isn't visible. Delaying reconnect.\");\n this.reconnectDelay_ = this.maxReconnectDelay_;\n this.lastConnectionAttemptTime_ = new Date().getTime();\n } else if (this.lastConnectionEstablishedTime_) {\n // If we've been connected long enough, reset reconnect delay to minimum.\n const timeSinceLastConnectSucceeded =\n new Date().getTime() - this.lastConnectionEstablishedTime_;\n if (timeSinceLastConnectSucceeded > RECONNECT_DELAY_RESET_TIMEOUT) {\n this.reconnectDelay_ = RECONNECT_MIN_DELAY;\n }\n this.lastConnectionEstablishedTime_ = null;\n }\n\n const timeSinceLastConnectAttempt =\n new Date().getTime() - this.lastConnectionAttemptTime_;\n let reconnectDelay = Math.max(\n 0,\n this.reconnectDelay_ - timeSinceLastConnectAttempt\n );\n reconnectDelay = Math.random() * reconnectDelay;\n\n this.log_('Trying to reconnect in ' + reconnectDelay + 'ms');\n this.scheduleConnect_(reconnectDelay);\n\n // Adjust reconnect delay for next time.\n this.reconnectDelay_ = Math.min(\n this.maxReconnectDelay_,\n this.reconnectDelay_ * RECONNECT_DELAY_MULTIPLIER\n );\n }\n this.onConnectStatus_(false);\n }\n\n private async establishConnection_() {\n if (this.shouldReconnect_()) {\n this.log_('Making a connection attempt');\n this.lastConnectionAttemptTime_ = new Date().getTime();\n this.lastConnectionEstablishedTime_ = null;\n const onDataMessage = this.onDataMessage_.bind(this);\n const onReady = this.onReady_.bind(this);\n const onDisconnect = this.onRealtimeDisconnect_.bind(this);\n const connId = this.id + ':' + PersistentConnection.nextConnectionId_++;\n const lastSessionId = this.lastSessionId;\n let canceled = false;\n let connection: Connection | null = null;\n const closeFn = function () {\n if (connection) {\n connection.close();\n } else {\n canceled = true;\n onDisconnect();\n }\n };\n const sendRequestFn = function (msg: object) {\n assert(\n connection,\n \"sendRequest call when we're not connected not allowed.\"\n );\n connection.sendRequest(msg);\n };\n\n this.realtime_ = {\n close: closeFn,\n sendRequest: sendRequestFn\n };\n\n const forceRefresh = this.forceTokenRefresh_;\n this.forceTokenRefresh_ = false;\n\n try {\n // First fetch auth and app check token, and establish connection after\n // fetching the token was successful\n const [authToken, appCheckToken] = await Promise.all([\n this.authTokenProvider_.getToken(forceRefresh),\n this.appCheckTokenProvider_.getToken(forceRefresh)\n ]);\n\n if (!canceled) {\n log('getToken() completed. Creating connection.');\n this.authToken_ = authToken && authToken.accessToken;\n this.appCheckToken_ = appCheckToken && appCheckToken.token;\n connection = new Connection(\n connId,\n this.repoInfo_,\n this.applicationId_,\n this.appCheckToken_,\n this.authToken_,\n onDataMessage,\n onReady,\n onDisconnect,\n /* onKill= */ reason => {\n warn(reason + ' (' + this.repoInfo_.toString() + ')');\n this.interrupt(SERVER_KILL_INTERRUPT_REASON);\n },\n lastSessionId\n );\n } else {\n log('getToken() completed but was canceled');\n }\n } catch (error) {\n this.log_('Failed to get token: ' + error);\n if (!canceled) {\n if (this.repoInfo_.nodeAdmin) {\n // This may be a critical error for the Admin Node.js SDK, so log a warning.\n // But getToken() may also just have temporarily failed, so we still want to\n // continue retrying.\n warn(error);\n }\n closeFn();\n }\n }\n }\n }\n\n interrupt(reason: string) {\n log('Interrupting connection for reason: ' + reason);\n this.interruptReasons_[reason] = true;\n if (this.realtime_) {\n this.realtime_.close();\n } else {\n if (this.establishConnectionTimer_) {\n clearTimeout(this.establishConnectionTimer_);\n this.establishConnectionTimer_ = null;\n }\n if (this.connected_) {\n this.onRealtimeDisconnect_();\n }\n }\n }\n\n resume(reason: string) {\n log('Resuming connection for reason: ' + reason);\n delete this.interruptReasons_[reason];\n if (isEmpty(this.interruptReasons_)) {\n this.reconnectDelay_ = RECONNECT_MIN_DELAY;\n if (!this.realtime_) {\n this.scheduleConnect_(0);\n }\n }\n }\n\n private handleTimestamp_(timestamp: number) {\n const delta = timestamp - new Date().getTime();\n this.onServerInfoUpdate_({ serverTimeOffset: delta });\n }\n\n private cancelSentTransactions_() {\n for (let i = 0; i < this.outstandingPuts_.length; i++) {\n const put = this.outstandingPuts_[i];\n if (put && /*hash*/ 'h' in put.request && put.queued) {\n if (put.onComplete) {\n put.onComplete('disconnect');\n }\n\n delete this.outstandingPuts_[i];\n this.outstandingPutCount_--;\n }\n }\n\n // Clean up array occasionally.\n if (this.outstandingPutCount_ === 0) {\n this.outstandingPuts_ = [];\n }\n }\n\n private onListenRevoked_(pathString: string, query?: unknown[]) {\n // Remove the listen and manufacture a \"permission_denied\" error for the failed listen.\n let queryId;\n if (!query) {\n queryId = 'default';\n } else {\n queryId = query.map(q => ObjectToUniqueKey(q)).join('$');\n }\n const listen = this.removeListen_(pathString, queryId);\n if (listen && listen.onComplete) {\n listen.onComplete('permission_denied');\n }\n }\n\n private removeListen_(pathString: string, queryId: string): ListenSpec {\n const normalizedPathString = new Path(pathString).toString(); // normalize path.\n let listen;\n if (this.listens.has(normalizedPathString)) {\n const map = this.listens.get(normalizedPathString)!;\n listen = map.get(queryId);\n map.delete(queryId);\n if (map.size === 0) {\n this.listens.delete(normalizedPathString);\n }\n } else {\n // all listens for this path has already been removed\n listen = undefined;\n }\n return listen;\n }\n\n private onAuthRevoked_(statusCode: string, explanation: string) {\n log('Auth token revoked: ' + statusCode + '/' + explanation);\n this.authToken_ = null;\n this.forceTokenRefresh_ = true;\n this.realtime_.close();\n if (statusCode === 'invalid_token' || statusCode === 'permission_denied') {\n // We'll wait a couple times before logging the warning / increasing the\n // retry period since oauth tokens will report as \"invalid\" if they're\n // just expired. Plus there may be transient issues that resolve themselves.\n this.invalidAuthTokenCount_++;\n if (this.invalidAuthTokenCount_ >= INVALID_TOKEN_THRESHOLD) {\n // Set a long reconnect delay because recovery is unlikely\n this.reconnectDelay_ = RECONNECT_MAX_DELAY_FOR_ADMINS;\n\n // Notify the auth token provider that the token is invalid, which will log\n // a warning\n this.authTokenProvider_.notifyForInvalidToken();\n }\n }\n }\n\n private onAppCheckRevoked_(statusCode: string, explanation: string) {\n log('App check token revoked: ' + statusCode + '/' + explanation);\n this.appCheckToken_ = null;\n this.forceTokenRefresh_ = true;\n // Note: We don't close the connection as the developer may not have\n // enforcement enabled. The backend closes connections with enforcements.\n if (statusCode === 'invalid_token' || statusCode === 'permission_denied') {\n // We'll wait a couple times before logging the warning / increasing the\n // retry period since oauth tokens will report as \"invalid\" if they're\n // just expired. Plus there may be transient issues that resolve themselves.\n this.invalidAppCheckTokenCount_++;\n if (this.invalidAppCheckTokenCount_ >= INVALID_TOKEN_THRESHOLD) {\n this.appCheckTokenProvider_.notifyForInvalidToken();\n }\n }\n }\n\n private onSecurityDebugPacket_(body: { [k: string]: unknown }) {\n if (this.securityDebugCallback_) {\n this.securityDebugCallback_(body);\n } else {\n if ('msg' in body) {\n console.log(\n 'FIREBASE: ' + (body['msg'] as string).replace('\\n', '\\nFIREBASE: ')\n );\n }\n }\n }\n\n private restoreState_() {\n //Re-authenticate ourselves if we have a credential stored.\n this.tryAuth();\n this.tryAppCheck();\n\n // Puts depend on having received the corresponding data update from the server before they complete, so we must\n // make sure to send listens before puts.\n for (const queries of this.listens.values()) {\n for (const listenSpec of queries.values()) {\n this.sendListen_(listenSpec);\n }\n }\n\n for (let i = 0; i < this.outstandingPuts_.length; i++) {\n if (this.outstandingPuts_[i]) {\n this.sendPut_(i);\n }\n }\n\n while (this.onDisconnectRequestQueue_.length) {\n const request = this.onDisconnectRequestQueue_.shift();\n this.sendOnDisconnect_(\n request.action,\n request.pathString,\n request.data,\n request.onComplete\n );\n }\n\n for (let i = 0; i < this.outstandingGets_.length; i++) {\n if (this.outstandingGets_[i]) {\n this.sendGet_(i);\n }\n }\n }\n\n /**\n * Sends client stats for first connection\n */\n private sendConnectStats_() {\n const stats: { [k: string]: number } = {};\n\n let clientName = 'js';\n if (isNodeSdk()) {\n if (this.repoInfo_.nodeAdmin) {\n clientName = 'admin_node';\n } else {\n clientName = 'node';\n }\n }\n\n stats['sdk.' + clientName + '.' + SDK_VERSION.replace(/\\./g, '-')] = 1;\n\n if (isMobileCordova()) {\n stats['framework.cordova'] = 1;\n } else if (isReactNative()) {\n stats['framework.reactnative'] = 1;\n }\n this.reportStats(stats);\n }\n\n private shouldReconnect_(): boolean {\n const online = OnlineMonitor.getInstance().currentlyOnline();\n return isEmpty(this.interruptReasons_) && online;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Path } from '../util/Path';\n\nimport { Index } from './indexes/Index';\n\n/**\n * Node is an interface defining the common functionality for nodes in\n * a DataSnapshot.\n *\n * @interface\n */\nexport interface Node {\n /**\n * Whether this node is a leaf node.\n * @returns Whether this is a leaf node.\n */\n isLeafNode(): boolean;\n\n /**\n * Gets the priority of the node.\n * @returns The priority of the node.\n */\n getPriority(): Node;\n\n /**\n * Returns a duplicate node with the new priority.\n * @param newPriorityNode - New priority to set for the node.\n * @returns Node with new priority.\n */\n updatePriority(newPriorityNode: Node): Node;\n\n /**\n * Returns the specified immediate child, or null if it doesn't exist.\n * @param childName - The name of the child to retrieve.\n * @returns The retrieved child, or an empty node.\n */\n getImmediateChild(childName: string): Node;\n\n /**\n * Returns a child by path, or null if it doesn't exist.\n * @param path - The path of the child to retrieve.\n * @returns The retrieved child or an empty node.\n */\n getChild(path: Path): Node;\n\n /**\n * Returns the name of the child immediately prior to the specified childNode, or null.\n * @param childName - The name of the child to find the predecessor of.\n * @param childNode - The node to find the predecessor of.\n * @param index - The index to use to determine the predecessor\n * @returns The name of the predecessor child, or null if childNode is the first child.\n */\n getPredecessorChildName(\n childName: string,\n childNode: Node,\n index: Index\n ): string | null;\n\n /**\n * Returns a duplicate node, with the specified immediate child updated.\n * Any value in the node will be removed.\n * @param childName - The name of the child to update.\n * @param newChildNode - The new child node\n * @returns The updated node.\n */\n updateImmediateChild(childName: string, newChildNode: Node): Node;\n\n /**\n * Returns a duplicate node, with the specified child updated. Any value will\n * be removed.\n * @param path - The path of the child to update.\n * @param newChildNode - The new child node, which may be an empty node\n * @returns The updated node.\n */\n updateChild(path: Path, newChildNode: Node): Node;\n\n /**\n * True if the immediate child specified exists\n */\n hasChild(childName: string): boolean;\n\n /**\n * @returns True if this node has no value or children.\n */\n isEmpty(): boolean;\n\n /**\n * @returns The number of children of this node.\n */\n numChildren(): number;\n\n /**\n * Calls action for each child.\n * @param action - Action to be called for\n * each child. It's passed the child name and the child node.\n * @returns The first truthy value return by action, or the last falsey one\n */\n forEachChild(index: Index, action: (a: string, b: Node) => void): unknown;\n\n /**\n * @param exportFormat - True for export format (also wire protocol format).\n * @returns Value of this node as JSON.\n */\n val(exportFormat?: boolean): unknown;\n\n /**\n * @returns hash representing the node contents.\n */\n hash(): string;\n\n /**\n * @param other - Another node\n * @returns -1 for less than, 0 for equal, 1 for greater than other\n */\n compareTo(other: Node): number;\n\n /**\n * @returns Whether or not this snapshot equals other\n */\n equals(other: Node): boolean;\n\n /**\n * @returns This node, with the specified index now available\n */\n withIndex(indexDefinition: Index): Node;\n\n isIndexed(indexDefinition: Index): boolean;\n}\n\nexport class NamedNode {\n constructor(public name: string, public node: Node) {}\n\n static Wrap(name: string, node: Node) {\n return new NamedNode(name, node);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Comparator } from '../../util/SortedMap';\nimport { MIN_NAME } from '../../util/util';\nimport { Node, NamedNode } from '../Node';\n\nexport abstract class Index {\n abstract compare(a: NamedNode, b: NamedNode): number;\n\n abstract isDefinedOn(node: Node): boolean;\n\n /**\n * @returns A standalone comparison function for\n * this index\n */\n getCompare(): Comparator {\n return this.compare.bind(this);\n }\n\n /**\n * Given a before and after value for a node, determine if the indexed value has changed. Even if they are different,\n * it's possible that the changes are isolated to parts of the snapshot that are not indexed.\n *\n *\n * @returns True if the portion of the snapshot being indexed changed between oldNode and newNode\n */\n indexedValueChanged(oldNode: Node, newNode: Node): boolean {\n const oldWrapped = new NamedNode(MIN_NAME, oldNode);\n const newWrapped = new NamedNode(MIN_NAME, newNode);\n return this.compare(oldWrapped, newWrapped) !== 0;\n }\n\n /**\n * @returns a node wrapper that will sort equal to or less than\n * any other node wrapper, using this index\n */\n minPost(): NamedNode {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (NamedNode as any).MIN;\n }\n\n /**\n * @returns a node wrapper that will sort greater than or equal to\n * any other node wrapper, using this index\n */\n abstract maxPost(): NamedNode;\n\n abstract makePost(indexValue: unknown, name: string): NamedNode;\n\n /**\n * @returns String representation for inclusion in a query spec\n */\n abstract toString(): string;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, assertionError } from '@firebase/util';\n\nimport { nameCompare, MAX_NAME } from '../../util/util';\nimport { ChildrenNode } from '../ChildrenNode';\nimport { Node, NamedNode } from '../Node';\n\nimport { Index } from './Index';\n\nlet __EMPTY_NODE: ChildrenNode;\n\nexport class KeyIndex extends Index {\n static get __EMPTY_NODE() {\n return __EMPTY_NODE;\n }\n\n static set __EMPTY_NODE(val) {\n __EMPTY_NODE = val;\n }\n compare(a: NamedNode, b: NamedNode): number {\n return nameCompare(a.name, b.name);\n }\n isDefinedOn(node: Node): boolean {\n // We could probably return true here (since every node has a key), but it's never called\n // so just leaving unimplemented for now.\n throw assertionError('KeyIndex.isDefinedOn not expected to be called.');\n }\n indexedValueChanged(oldNode: Node, newNode: Node): boolean {\n return false; // The key for a node never changes.\n }\n minPost() {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (NamedNode as any).MIN;\n }\n maxPost(): NamedNode {\n // TODO: This should really be created once and cached in a static property, but\n // NamedNode isn't defined yet, so I can't use it in a static. Bleh.\n return new NamedNode(MAX_NAME, __EMPTY_NODE);\n }\n\n makePost(indexValue: string, name: string): NamedNode {\n assert(\n typeof indexValue === 'string',\n 'KeyIndex indexValue must always be a string.'\n );\n // We just use empty node, but it'll never be compared, since our comparator only looks at name.\n return new NamedNode(indexValue, __EMPTY_NODE);\n }\n\n /**\n * @returns String representation for inclusion in a query spec\n */\n toString(): string {\n return '.key';\n }\n}\n\nexport const KEY_INDEX = new KeyIndex();\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * @fileoverview Implementation of an immutable SortedMap using a Left-leaning\n * Red-Black Tree, adapted from the implementation in Mugs\n * (http://mads379.github.com/mugs/) by Mads Hartmann Jensen\n * (mads379\\@gmail.com).\n *\n * Original paper on Left-leaning Red-Black Trees:\n * http://www.cs.princeton.edu/~rs/talks/LLRB/LLRB.pdf\n *\n * Invariant 1: No red node has a red child\n * Invariant 2: Every leaf path has the same number of black nodes\n * Invariant 3: Only the left child can be red (left leaning)\n */\n\n// TODO: There are some improvements I'd like to make to improve memory / perf:\n// * Create two prototypes, LLRedNode and LLBlackNode, instead of storing a\n// color property in every node.\n// TODO: It would also be good (and possibly necessary) to create a base\n// interface for LLRBNode and LLRBEmptyNode.\n\nexport type Comparator = (key1: K, key2: K) => number;\n\n/**\n * An iterator over an LLRBNode.\n */\nexport class SortedMapIterator {\n private nodeStack_: Array | LLRBEmptyNode> = [];\n\n /**\n * @param node - Node to iterate.\n * @param isReverse_ - Whether or not to iterate in reverse\n */\n constructor(\n node: LLRBNode | LLRBEmptyNode,\n startKey: K | null,\n comparator: Comparator,\n private isReverse_: boolean,\n private resultGenerator_: ((k: K, v: V) => T) | null = null\n ) {\n let cmp = 1;\n while (!node.isEmpty()) {\n node = node as LLRBNode;\n cmp = startKey ? comparator(node.key, startKey) : 1;\n // flip the comparison if we're going in reverse\n if (isReverse_) {\n cmp *= -1;\n }\n\n if (cmp < 0) {\n // This node is less than our start key. ignore it\n if (this.isReverse_) {\n node = node.left;\n } else {\n node = node.right;\n }\n } else if (cmp === 0) {\n // This node is exactly equal to our start key. Push it on the stack, but stop iterating;\n this.nodeStack_.push(node);\n break;\n } else {\n // This node is greater than our start key, add it to the stack and move to the next one\n this.nodeStack_.push(node);\n if (this.isReverse_) {\n node = node.right;\n } else {\n node = node.left;\n }\n }\n }\n }\n\n getNext(): T {\n if (this.nodeStack_.length === 0) {\n return null;\n }\n\n let node = this.nodeStack_.pop();\n let result: T;\n if (this.resultGenerator_) {\n result = this.resultGenerator_(node.key, node.value);\n } else {\n result = { key: node.key, value: node.value } as unknown as T;\n }\n\n if (this.isReverse_) {\n node = node.left;\n while (!node.isEmpty()) {\n this.nodeStack_.push(node);\n node = node.right;\n }\n } else {\n node = node.right;\n while (!node.isEmpty()) {\n this.nodeStack_.push(node);\n node = node.left;\n }\n }\n\n return result;\n }\n\n hasNext(): boolean {\n return this.nodeStack_.length > 0;\n }\n\n peek(): T {\n if (this.nodeStack_.length === 0) {\n return null;\n }\n\n const node = this.nodeStack_[this.nodeStack_.length - 1];\n if (this.resultGenerator_) {\n return this.resultGenerator_(node.key, node.value);\n } else {\n return { key: node.key, value: node.value } as unknown as T;\n }\n }\n}\n\n/**\n * Represents a node in a Left-leaning Red-Black tree.\n */\nexport class LLRBNode {\n color: boolean;\n left: LLRBNode | LLRBEmptyNode;\n right: LLRBNode | LLRBEmptyNode;\n\n /**\n * @param key - Key associated with this node.\n * @param value - Value associated with this node.\n * @param color - Whether this node is red.\n * @param left - Left child.\n * @param right - Right child.\n */\n constructor(\n public key: K,\n public value: V,\n color: boolean | null,\n left?: LLRBNode | LLRBEmptyNode | null,\n right?: LLRBNode | LLRBEmptyNode | null\n ) {\n this.color = color != null ? color : LLRBNode.RED;\n this.left =\n left != null ? left : (SortedMap.EMPTY_NODE as LLRBEmptyNode);\n this.right =\n right != null ? right : (SortedMap.EMPTY_NODE as LLRBEmptyNode);\n }\n\n static RED = true;\n static BLACK = false;\n\n /**\n * Returns a copy of the current node, optionally replacing pieces of it.\n *\n * @param key - New key for the node, or null.\n * @param value - New value for the node, or null.\n * @param color - New color for the node, or null.\n * @param left - New left child for the node, or null.\n * @param right - New right child for the node, or null.\n * @returns The node copy.\n */\n copy(\n key: K | null,\n value: V | null,\n color: boolean | null,\n left: LLRBNode | LLRBEmptyNode | null,\n right: LLRBNode | LLRBEmptyNode | null\n ): LLRBNode {\n return new LLRBNode(\n key != null ? key : this.key,\n value != null ? value : this.value,\n color != null ? color : this.color,\n left != null ? left : this.left,\n right != null ? right : this.right\n );\n }\n\n /**\n * @returns The total number of nodes in the tree.\n */\n count(): number {\n return this.left.count() + 1 + this.right.count();\n }\n\n /**\n * @returns True if the tree is empty.\n */\n isEmpty(): boolean {\n return false;\n }\n\n /**\n * Traverses the tree in key order and calls the specified action function\n * for each node.\n *\n * @param action - Callback function to be called for each\n * node. If it returns true, traversal is aborted.\n * @returns The first truthy value returned by action, or the last falsey\n * value returned by action\n */\n inorderTraversal(action: (k: K, v: V) => unknown): boolean {\n return (\n this.left.inorderTraversal(action) ||\n !!action(this.key, this.value) ||\n this.right.inorderTraversal(action)\n );\n }\n\n /**\n * Traverses the tree in reverse key order and calls the specified action function\n * for each node.\n *\n * @param action - Callback function to be called for each\n * node. If it returns true, traversal is aborted.\n * @returns True if traversal was aborted.\n */\n reverseTraversal(action: (k: K, v: V) => void): boolean {\n return (\n this.right.reverseTraversal(action) ||\n action(this.key, this.value) ||\n this.left.reverseTraversal(action)\n );\n }\n\n /**\n * @returns The minimum node in the tree.\n */\n private min_(): LLRBNode {\n if (this.left.isEmpty()) {\n return this;\n } else {\n return (this.left as LLRBNode).min_();\n }\n }\n\n /**\n * @returns The maximum key in the tree.\n */\n minKey(): K {\n return this.min_().key;\n }\n\n /**\n * @returns The maximum key in the tree.\n */\n maxKey(): K {\n if (this.right.isEmpty()) {\n return this.key;\n } else {\n return this.right.maxKey();\n }\n }\n\n /**\n * @param key - Key to insert.\n * @param value - Value to insert.\n * @param comparator - Comparator.\n * @returns New tree, with the key/value added.\n */\n insert(key: K, value: V, comparator: Comparator): LLRBNode {\n let n: LLRBNode = this;\n const cmp = comparator(key, n.key);\n if (cmp < 0) {\n n = n.copy(null, null, null, n.left.insert(key, value, comparator), null);\n } else if (cmp === 0) {\n n = n.copy(null, value, null, null, null);\n } else {\n n = n.copy(\n null,\n null,\n null,\n null,\n n.right.insert(key, value, comparator)\n );\n }\n return n.fixUp_();\n }\n\n /**\n * @returns New tree, with the minimum key removed.\n */\n private removeMin_(): LLRBNode | LLRBEmptyNode {\n if (this.left.isEmpty()) {\n return SortedMap.EMPTY_NODE as LLRBEmptyNode;\n }\n let n: LLRBNode = this;\n if (!n.left.isRed_() && !n.left.left.isRed_()) {\n n = n.moveRedLeft_();\n }\n n = n.copy(null, null, null, (n.left as LLRBNode).removeMin_(), null);\n return n.fixUp_();\n }\n\n /**\n * @param key - The key of the item to remove.\n * @param comparator - Comparator.\n * @returns New tree, with the specified item removed.\n */\n remove(\n key: K,\n comparator: Comparator\n ): LLRBNode | LLRBEmptyNode {\n let n, smallest;\n n = this;\n if (comparator(key, n.key) < 0) {\n if (!n.left.isEmpty() && !n.left.isRed_() && !n.left.left.isRed_()) {\n n = n.moveRedLeft_();\n }\n n = n.copy(null, null, null, n.left.remove(key, comparator), null);\n } else {\n if (n.left.isRed_()) {\n n = n.rotateRight_();\n }\n if (!n.right.isEmpty() && !n.right.isRed_() && !n.right.left.isRed_()) {\n n = n.moveRedRight_();\n }\n if (comparator(key, n.key) === 0) {\n if (n.right.isEmpty()) {\n return SortedMap.EMPTY_NODE as LLRBEmptyNode;\n } else {\n smallest = (n.right as LLRBNode).min_();\n n = n.copy(\n smallest.key,\n smallest.value,\n null,\n null,\n (n.right as LLRBNode).removeMin_()\n );\n }\n }\n n = n.copy(null, null, null, null, n.right.remove(key, comparator));\n }\n return n.fixUp_();\n }\n\n /**\n * @returns Whether this is a RED node.\n */\n isRed_(): boolean {\n return this.color;\n }\n\n /**\n * @returns New tree after performing any needed rotations.\n */\n private fixUp_(): LLRBNode {\n let n: LLRBNode = this;\n if (n.right.isRed_() && !n.left.isRed_()) {\n n = n.rotateLeft_();\n }\n if (n.left.isRed_() && n.left.left.isRed_()) {\n n = n.rotateRight_();\n }\n if (n.left.isRed_() && n.right.isRed_()) {\n n = n.colorFlip_();\n }\n return n;\n }\n\n /**\n * @returns New tree, after moveRedLeft.\n */\n private moveRedLeft_(): LLRBNode {\n let n = this.colorFlip_();\n if (n.right.left.isRed_()) {\n n = n.copy(\n null,\n null,\n null,\n null,\n (n.right as LLRBNode).rotateRight_()\n );\n n = n.rotateLeft_();\n n = n.colorFlip_();\n }\n return n;\n }\n\n /**\n * @returns New tree, after moveRedRight.\n */\n private moveRedRight_(): LLRBNode {\n let n = this.colorFlip_();\n if (n.left.left.isRed_()) {\n n = n.rotateRight_();\n n = n.colorFlip_();\n }\n return n;\n }\n\n /**\n * @returns New tree, after rotateLeft.\n */\n private rotateLeft_(): LLRBNode {\n const nl = this.copy(null, null, LLRBNode.RED, null, this.right.left);\n return this.right.copy(null, null, this.color, nl, null) as LLRBNode;\n }\n\n /**\n * @returns New tree, after rotateRight.\n */\n private rotateRight_(): LLRBNode {\n const nr = this.copy(null, null, LLRBNode.RED, this.left.right, null);\n return this.left.copy(null, null, this.color, null, nr) as LLRBNode;\n }\n\n /**\n * @returns Newt ree, after colorFlip.\n */\n private colorFlip_(): LLRBNode {\n const left = this.left.copy(null, null, !this.left.color, null, null);\n const right = this.right.copy(null, null, !this.right.color, null, null);\n return this.copy(null, null, !this.color, left, right);\n }\n\n /**\n * For testing.\n *\n * @returns True if all is well.\n */\n private checkMaxDepth_(): boolean {\n const blackDepth = this.check_();\n return Math.pow(2.0, blackDepth) <= this.count() + 1;\n }\n\n check_(): number {\n if (this.isRed_() && this.left.isRed_()) {\n throw new Error(\n 'Red node has red child(' + this.key + ',' + this.value + ')'\n );\n }\n if (this.right.isRed_()) {\n throw new Error(\n 'Right child of (' + this.key + ',' + this.value + ') is red'\n );\n }\n const blackDepth = this.left.check_();\n if (blackDepth !== this.right.check_()) {\n throw new Error('Black depths differ');\n } else {\n return blackDepth + (this.isRed_() ? 0 : 1);\n }\n }\n}\n\n/**\n * Represents an empty node (a leaf node in the Red-Black Tree).\n */\nexport class LLRBEmptyNode {\n key: K;\n value: V;\n left: LLRBNode | LLRBEmptyNode;\n right: LLRBNode | LLRBEmptyNode;\n color: boolean;\n\n /**\n * Returns a copy of the current node.\n *\n * @returns The node copy.\n */\n copy(\n key: K | null,\n value: V | null,\n color: boolean | null,\n left: LLRBNode | LLRBEmptyNode | null,\n right: LLRBNode | LLRBEmptyNode | null\n ): LLRBEmptyNode {\n return this;\n }\n\n /**\n * Returns a copy of the tree, with the specified key/value added.\n *\n * @param key - Key to be added.\n * @param value - Value to be added.\n * @param comparator - Comparator.\n * @returns New tree, with item added.\n */\n insert(key: K, value: V, comparator: Comparator): LLRBNode {\n return new LLRBNode(key, value, null);\n }\n\n /**\n * Returns a copy of the tree, with the specified key removed.\n *\n * @param key - The key to remove.\n * @param comparator - Comparator.\n * @returns New tree, with item removed.\n */\n remove(key: K, comparator: Comparator): LLRBEmptyNode {\n return this;\n }\n\n /**\n * @returns The total number of nodes in the tree.\n */\n count(): number {\n return 0;\n }\n\n /**\n * @returns True if the tree is empty.\n */\n isEmpty(): boolean {\n return true;\n }\n\n /**\n * Traverses the tree in key order and calls the specified action function\n * for each node.\n *\n * @param action - Callback function to be called for each\n * node. If it returns true, traversal is aborted.\n * @returns True if traversal was aborted.\n */\n inorderTraversal(action: (k: K, v: V) => unknown): boolean {\n return false;\n }\n\n /**\n * Traverses the tree in reverse key order and calls the specified action function\n * for each node.\n *\n * @param action - Callback function to be called for each\n * node. If it returns true, traversal is aborted.\n * @returns True if traversal was aborted.\n */\n reverseTraversal(action: (k: K, v: V) => void): boolean {\n return false;\n }\n\n minKey(): null {\n return null;\n }\n\n maxKey(): null {\n return null;\n }\n\n check_(): number {\n return 0;\n }\n\n /**\n * @returns Whether this node is red.\n */\n isRed_() {\n return false;\n }\n}\n\n/**\n * An immutable sorted map implementation, based on a Left-leaning Red-Black\n * tree.\n */\nexport class SortedMap {\n /**\n * Always use the same empty node, to reduce memory.\n */\n static EMPTY_NODE = new LLRBEmptyNode();\n\n /**\n * @param comparator_ - Key comparator.\n * @param root_ - Optional root node for the map.\n */\n constructor(\n private comparator_: Comparator,\n private root_:\n | LLRBNode\n | LLRBEmptyNode = SortedMap.EMPTY_NODE as LLRBEmptyNode\n ) {}\n\n /**\n * Returns a copy of the map, with the specified key/value added or replaced.\n * (TODO: We should perhaps rename this method to 'put')\n *\n * @param key - Key to be added.\n * @param value - Value to be added.\n * @returns New map, with item added.\n */\n insert(key: K, value: V): SortedMap {\n return new SortedMap(\n this.comparator_,\n this.root_\n .insert(key, value, this.comparator_)\n .copy(null, null, LLRBNode.BLACK, null, null)\n );\n }\n\n /**\n * Returns a copy of the map, with the specified key removed.\n *\n * @param key - The key to remove.\n * @returns New map, with item removed.\n */\n remove(key: K): SortedMap {\n return new SortedMap(\n this.comparator_,\n this.root_\n .remove(key, this.comparator_)\n .copy(null, null, LLRBNode.BLACK, null, null)\n );\n }\n\n /**\n * Returns the value of the node with the given key, or null.\n *\n * @param key - The key to look up.\n * @returns The value of the node with the given key, or null if the\n * key doesn't exist.\n */\n get(key: K): V | null {\n let cmp;\n let node = this.root_;\n while (!node.isEmpty()) {\n cmp = this.comparator_(key, node.key);\n if (cmp === 0) {\n return node.value;\n } else if (cmp < 0) {\n node = node.left;\n } else if (cmp > 0) {\n node = node.right;\n }\n }\n return null;\n }\n\n /**\n * Returns the key of the item *before* the specified key, or null if key is the first item.\n * @param key - The key to find the predecessor of\n * @returns The predecessor key.\n */\n getPredecessorKey(key: K): K | null {\n let cmp,\n node = this.root_,\n rightParent = null;\n while (!node.isEmpty()) {\n cmp = this.comparator_(key, node.key);\n if (cmp === 0) {\n if (!node.left.isEmpty()) {\n node = node.left;\n while (!node.right.isEmpty()) {\n node = node.right;\n }\n return node.key;\n } else if (rightParent) {\n return rightParent.key;\n } else {\n return null; // first item.\n }\n } else if (cmp < 0) {\n node = node.left;\n } else if (cmp > 0) {\n rightParent = node;\n node = node.right;\n }\n }\n\n throw new Error(\n 'Attempted to find predecessor key for a nonexistent key. What gives?'\n );\n }\n\n /**\n * @returns True if the map is empty.\n */\n isEmpty(): boolean {\n return this.root_.isEmpty();\n }\n\n /**\n * @returns The total number of nodes in the map.\n */\n count(): number {\n return this.root_.count();\n }\n\n /**\n * @returns The minimum key in the map.\n */\n minKey(): K | null {\n return this.root_.minKey();\n }\n\n /**\n * @returns The maximum key in the map.\n */\n maxKey(): K | null {\n return this.root_.maxKey();\n }\n\n /**\n * Traverses the map in key order and calls the specified action function\n * for each key/value pair.\n *\n * @param action - Callback function to be called\n * for each key/value pair. If action returns true, traversal is aborted.\n * @returns The first truthy value returned by action, or the last falsey\n * value returned by action\n */\n inorderTraversal(action: (k: K, v: V) => unknown): boolean {\n return this.root_.inorderTraversal(action);\n }\n\n /**\n * Traverses the map in reverse key order and calls the specified action function\n * for each key/value pair.\n *\n * @param action - Callback function to be called\n * for each key/value pair. If action returns true, traversal is aborted.\n * @returns True if the traversal was aborted.\n */\n reverseTraversal(action: (k: K, v: V) => void): boolean {\n return this.root_.reverseTraversal(action);\n }\n\n /**\n * Returns an iterator over the SortedMap.\n * @returns The iterator.\n */\n getIterator(\n resultGenerator?: (k: K, v: V) => T\n ): SortedMapIterator {\n return new SortedMapIterator(\n this.root_,\n null,\n this.comparator_,\n false,\n resultGenerator\n );\n }\n\n getIteratorFrom(\n key: K,\n resultGenerator?: (k: K, v: V) => T\n ): SortedMapIterator {\n return new SortedMapIterator(\n this.root_,\n key,\n this.comparator_,\n false,\n resultGenerator\n );\n }\n\n getReverseIteratorFrom(\n key: K,\n resultGenerator?: (k: K, v: V) => T\n ): SortedMapIterator {\n return new SortedMapIterator(\n this.root_,\n key,\n this.comparator_,\n true,\n resultGenerator\n );\n }\n\n getReverseIterator(\n resultGenerator?: (k: K, v: V) => T\n ): SortedMapIterator {\n return new SortedMapIterator(\n this.root_,\n null,\n this.comparator_,\n true,\n resultGenerator\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { nameCompare } from '../util/util';\n\nimport { NamedNode } from './Node';\n\nexport function NAME_ONLY_COMPARATOR(left: NamedNode, right: NamedNode) {\n return nameCompare(left.name, right.name);\n}\n\nexport function NAME_COMPARATOR(left: string, right: string) {\n return nameCompare(left, right);\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { Indexable } from '../util/misc';\nimport {\n Path,\n pathGetFront,\n pathGetLength,\n pathIsEmpty,\n pathPopFront\n} from '../util/Path';\nimport { doubleToIEEE754String, sha1 } from '../util/util';\n\nimport { ChildrenNodeConstructor } from './ChildrenNode';\nimport { Index } from './indexes/Index';\nimport { Node } from './Node';\nimport { priorityHashText, validatePriorityNode } from './snap';\n\nlet __childrenNodeConstructor: ChildrenNodeConstructor;\n\n/**\n * LeafNode is a class for storing leaf nodes in a DataSnapshot. It\n * implements Node and stores the value of the node (a string,\n * number, or boolean) accessible via getValue().\n */\nexport class LeafNode implements Node {\n static set __childrenNodeConstructor(val: ChildrenNodeConstructor) {\n __childrenNodeConstructor = val;\n }\n\n static get __childrenNodeConstructor() {\n return __childrenNodeConstructor;\n }\n\n /**\n * The sort order for comparing leaf nodes of different types. If two leaf nodes have\n * the same type, the comparison falls back to their value\n */\n static VALUE_TYPE_ORDER = ['object', 'boolean', 'number', 'string'];\n\n private lazyHash_: string | null = null;\n\n /**\n * @param value_ - The value to store in this leaf node. The object type is\n * possible in the event of a deferred value\n * @param priorityNode_ - The priority of this node.\n */\n constructor(\n private readonly value_: string | number | boolean | Indexable,\n private priorityNode_: Node = LeafNode.__childrenNodeConstructor.EMPTY_NODE\n ) {\n assert(\n this.value_ !== undefined && this.value_ !== null,\n \"LeafNode shouldn't be created with null/undefined value.\"\n );\n\n validatePriorityNode(this.priorityNode_);\n }\n\n /** @inheritDoc */\n isLeafNode(): boolean {\n return true;\n }\n\n /** @inheritDoc */\n getPriority(): Node {\n return this.priorityNode_;\n }\n\n /** @inheritDoc */\n updatePriority(newPriorityNode: Node): Node {\n return new LeafNode(this.value_, newPriorityNode);\n }\n\n /** @inheritDoc */\n getImmediateChild(childName: string): Node {\n // Hack to treat priority as a regular child\n if (childName === '.priority') {\n return this.priorityNode_;\n } else {\n return LeafNode.__childrenNodeConstructor.EMPTY_NODE;\n }\n }\n\n /** @inheritDoc */\n getChild(path: Path): Node {\n if (pathIsEmpty(path)) {\n return this;\n } else if (pathGetFront(path) === '.priority') {\n return this.priorityNode_;\n } else {\n return LeafNode.__childrenNodeConstructor.EMPTY_NODE;\n }\n }\n hasChild(): boolean {\n return false;\n }\n\n /** @inheritDoc */\n getPredecessorChildName(childName: string, childNode: Node): null {\n return null;\n }\n\n /** @inheritDoc */\n updateImmediateChild(childName: string, newChildNode: Node): Node {\n if (childName === '.priority') {\n return this.updatePriority(newChildNode);\n } else if (newChildNode.isEmpty() && childName !== '.priority') {\n return this;\n } else {\n return LeafNode.__childrenNodeConstructor.EMPTY_NODE.updateImmediateChild(\n childName,\n newChildNode\n ).updatePriority(this.priorityNode_);\n }\n }\n\n /** @inheritDoc */\n updateChild(path: Path, newChildNode: Node): Node {\n const front = pathGetFront(path);\n if (front === null) {\n return newChildNode;\n } else if (newChildNode.isEmpty() && front !== '.priority') {\n return this;\n } else {\n assert(\n front !== '.priority' || pathGetLength(path) === 1,\n '.priority must be the last token in a path'\n );\n\n return this.updateImmediateChild(\n front,\n LeafNode.__childrenNodeConstructor.EMPTY_NODE.updateChild(\n pathPopFront(path),\n newChildNode\n )\n );\n }\n }\n\n /** @inheritDoc */\n isEmpty(): boolean {\n return false;\n }\n\n /** @inheritDoc */\n numChildren(): number {\n return 0;\n }\n\n /** @inheritDoc */\n forEachChild(index: Index, action: (s: string, n: Node) => void): boolean {\n return false;\n }\n val(exportFormat?: boolean): {} {\n if (exportFormat && !this.getPriority().isEmpty()) {\n return {\n '.value': this.getValue(),\n '.priority': this.getPriority().val()\n };\n } else {\n return this.getValue();\n }\n }\n\n /** @inheritDoc */\n hash(): string {\n if (this.lazyHash_ === null) {\n let toHash = '';\n if (!this.priorityNode_.isEmpty()) {\n toHash +=\n 'priority:' +\n priorityHashText(this.priorityNode_.val() as number | string) +\n ':';\n }\n\n const type = typeof this.value_;\n toHash += type + ':';\n if (type === 'number') {\n toHash += doubleToIEEE754String(this.value_ as number);\n } else {\n toHash += this.value_;\n }\n this.lazyHash_ = sha1(toHash);\n }\n return this.lazyHash_;\n }\n\n /**\n * Returns the value of the leaf node.\n * @returns The value of the node.\n */\n getValue(): Indexable | string | number | boolean {\n return this.value_;\n }\n compareTo(other: Node): number {\n if (other === LeafNode.__childrenNodeConstructor.EMPTY_NODE) {\n return 1;\n } else if (other instanceof LeafNode.__childrenNodeConstructor) {\n return -1;\n } else {\n assert(other.isLeafNode(), 'Unknown node type');\n return this.compareToLeafNode_(other as LeafNode);\n }\n }\n\n /**\n * Comparison specifically for two leaf nodes\n */\n private compareToLeafNode_(otherLeaf: LeafNode): number {\n const otherLeafType = typeof otherLeaf.value_;\n const thisLeafType = typeof this.value_;\n const otherIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(otherLeafType);\n const thisIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(thisLeafType);\n assert(otherIndex >= 0, 'Unknown leaf type: ' + otherLeafType);\n assert(thisIndex >= 0, 'Unknown leaf type: ' + thisLeafType);\n if (otherIndex === thisIndex) {\n // Same type, compare values\n if (thisLeafType === 'object') {\n // Deferred value nodes are all equal, but we should also never get to this point...\n return 0;\n } else {\n // Note that this works because true > false, all others are number or string comparisons\n if (this.value_ < otherLeaf.value_) {\n return -1;\n } else if (this.value_ === otherLeaf.value_) {\n return 0;\n } else {\n return 1;\n }\n }\n } else {\n return thisIndex - otherIndex;\n }\n }\n withIndex(): Node {\n return this;\n }\n isIndexed(): boolean {\n return true;\n }\n equals(other: Node): boolean {\n if (other === this) {\n return true;\n } else if (other.isLeafNode()) {\n const otherLeaf = other as LeafNode;\n return (\n this.value_ === otherLeaf.value_ &&\n this.priorityNode_.equals(otherLeaf.priorityNode_)\n );\n } else {\n return false;\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { LLRBNode, SortedMap } from '../util/SortedMap';\n\nimport { NamedNode } from './Node';\n\nconst LOG_2 = Math.log(2);\n\nclass Base12Num {\n count: number;\n private current_: number;\n private bits_: number;\n\n constructor(length: number) {\n const logBase2 = (num: number) =>\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n parseInt((Math.log(num) / LOG_2) as any, 10);\n const bitMask = (bits: number) => parseInt(Array(bits + 1).join('1'), 2);\n this.count = logBase2(length + 1);\n this.current_ = this.count - 1;\n const mask = bitMask(this.count);\n this.bits_ = (length + 1) & mask;\n }\n\n nextBitIsOne(): boolean {\n //noinspection JSBitwiseOperatorUsage\n const result = !(this.bits_ & (0x1 << this.current_));\n this.current_--;\n return result;\n }\n}\n\n/**\n * Takes a list of child nodes and constructs a SortedSet using the given comparison\n * function\n *\n * Uses the algorithm described in the paper linked here:\n * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.46.1458\n *\n * @param childList - Unsorted list of children\n * @param cmp - The comparison method to be used\n * @param keyFn - An optional function to extract K from a node wrapper, if K's\n * type is not NamedNode\n * @param mapSortFn - An optional override for comparator used by the generated sorted map\n */\nexport const buildChildSet = function (\n childList: NamedNode[],\n cmp: (a: NamedNode, b: NamedNode) => number,\n keyFn?: (a: NamedNode) => K,\n mapSortFn?: (a: K, b: K) => number\n): SortedMap {\n childList.sort(cmp);\n\n const buildBalancedTree = function (\n low: number,\n high: number\n ): LLRBNode | null {\n const length = high - low;\n let namedNode: NamedNode;\n let key: K;\n if (length === 0) {\n return null;\n } else if (length === 1) {\n namedNode = childList[low];\n key = keyFn ? keyFn(namedNode) : (namedNode as unknown as K);\n return new LLRBNode(\n key,\n namedNode.node as unknown as V,\n LLRBNode.BLACK,\n null,\n null\n );\n } else {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const middle = parseInt((length / 2) as any, 10) + low;\n const left = buildBalancedTree(low, middle);\n const right = buildBalancedTree(middle + 1, high);\n namedNode = childList[middle];\n key = keyFn ? keyFn(namedNode) : (namedNode as unknown as K);\n return new LLRBNode(\n key,\n namedNode.node as unknown as V,\n LLRBNode.BLACK,\n left,\n right\n );\n }\n };\n\n const buildFrom12Array = function (base12: Base12Num): LLRBNode {\n let node: LLRBNode = null;\n let root = null;\n let index = childList.length;\n\n const buildPennant = function (chunkSize: number, color: boolean) {\n const low = index - chunkSize;\n const high = index;\n index -= chunkSize;\n const childTree = buildBalancedTree(low + 1, high);\n const namedNode = childList[low];\n const key: K = keyFn ? keyFn(namedNode) : (namedNode as unknown as K);\n attachPennant(\n new LLRBNode(\n key,\n namedNode.node as unknown as V,\n color,\n null,\n childTree\n )\n );\n };\n\n const attachPennant = function (pennant: LLRBNode) {\n if (node) {\n node.left = pennant;\n node = pennant;\n } else {\n root = pennant;\n node = pennant;\n }\n };\n\n for (let i = 0; i < base12.count; ++i) {\n const isOne = base12.nextBitIsOne();\n // The number of nodes taken in each slice is 2^(arr.length - (i + 1))\n const chunkSize = Math.pow(2, base12.count - (i + 1));\n if (isOne) {\n buildPennant(chunkSize, LLRBNode.BLACK);\n } else {\n // current == 2\n buildPennant(chunkSize, LLRBNode.BLACK);\n buildPennant(chunkSize, LLRBNode.RED);\n }\n }\n return root;\n };\n\n const base12 = new Base12Num(childList.length);\n const root = buildFrom12Array(base12);\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return new SortedMap(mapSortFn || (cmp as any), root);\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, contains, map, safeGet } from '@firebase/util';\n\nimport { SortedMap } from '../util/SortedMap';\n\nimport { buildChildSet } from './childSet';\nimport { Index } from './indexes/Index';\nimport { KEY_INDEX } from './indexes/KeyIndex';\nimport { PRIORITY_INDEX } from './indexes/PriorityIndex';\nimport { NamedNode, Node } from './Node';\n\nlet _defaultIndexMap: IndexMap;\n\nconst fallbackObject = {};\n\nexport class IndexMap {\n /**\n * The default IndexMap for nodes without a priority\n */\n static get Default(): IndexMap {\n assert(\n fallbackObject && PRIORITY_INDEX,\n 'ChildrenNode.ts has not been loaded'\n );\n _defaultIndexMap =\n _defaultIndexMap ||\n new IndexMap(\n { '.priority': fallbackObject },\n { '.priority': PRIORITY_INDEX }\n );\n return _defaultIndexMap;\n }\n\n constructor(\n private indexes_: {\n [k: string]: SortedMap | /*FallbackType*/ object;\n },\n private indexSet_: { [k: string]: Index }\n ) {}\n\n get(indexKey: string): SortedMap | null {\n const sortedMap = safeGet(this.indexes_, indexKey);\n if (!sortedMap) {\n throw new Error('No index defined for ' + indexKey);\n }\n\n if (sortedMap instanceof SortedMap) {\n return sortedMap;\n } else {\n // The index exists, but it falls back to just name comparison. Return null so that the calling code uses the\n // regular child map\n return null;\n }\n }\n\n hasIndex(indexDefinition: Index): boolean {\n return contains(this.indexSet_, indexDefinition.toString());\n }\n\n addIndex(\n indexDefinition: Index,\n existingChildren: SortedMap\n ): IndexMap {\n assert(\n indexDefinition !== KEY_INDEX,\n \"KeyIndex always exists and isn't meant to be added to the IndexMap.\"\n );\n const childList = [];\n let sawIndexedValue = false;\n const iter = existingChildren.getIterator(NamedNode.Wrap);\n let next = iter.getNext();\n while (next) {\n sawIndexedValue =\n sawIndexedValue || indexDefinition.isDefinedOn(next.node);\n childList.push(next);\n next = iter.getNext();\n }\n let newIndex;\n if (sawIndexedValue) {\n newIndex = buildChildSet(childList, indexDefinition.getCompare());\n } else {\n newIndex = fallbackObject;\n }\n const indexName = indexDefinition.toString();\n const newIndexSet = { ...this.indexSet_ };\n newIndexSet[indexName] = indexDefinition;\n const newIndexes = { ...this.indexes_ };\n newIndexes[indexName] = newIndex;\n return new IndexMap(newIndexes, newIndexSet);\n }\n\n /**\n * Ensure that this node is properly tracked in any indexes that we're maintaining\n */\n addToIndexes(\n namedNode: NamedNode,\n existingChildren: SortedMap\n ): IndexMap {\n const newIndexes = map(\n this.indexes_,\n (indexedChildren: SortedMap, indexName: string) => {\n const index = safeGet(this.indexSet_, indexName);\n assert(index, 'Missing index implementation for ' + indexName);\n if (indexedChildren === fallbackObject) {\n // Check to see if we need to index everything\n if (index.isDefinedOn(namedNode.node)) {\n // We need to build this index\n const childList = [];\n const iter = existingChildren.getIterator(NamedNode.Wrap);\n let next = iter.getNext();\n while (next) {\n if (next.name !== namedNode.name) {\n childList.push(next);\n }\n next = iter.getNext();\n }\n childList.push(namedNode);\n return buildChildSet(childList, index.getCompare());\n } else {\n // No change, this remains a fallback\n return fallbackObject;\n }\n } else {\n const existingSnap = existingChildren.get(namedNode.name);\n let newChildren = indexedChildren;\n if (existingSnap) {\n newChildren = newChildren.remove(\n new NamedNode(namedNode.name, existingSnap)\n );\n }\n return newChildren.insert(namedNode, namedNode.node);\n }\n }\n );\n return new IndexMap(newIndexes, this.indexSet_);\n }\n\n /**\n * Create a new IndexMap instance with the given value removed\n */\n removeFromIndexes(\n namedNode: NamedNode,\n existingChildren: SortedMap\n ): IndexMap {\n const newIndexes = map(\n this.indexes_,\n (indexedChildren: SortedMap) => {\n if (indexedChildren === fallbackObject) {\n // This is the fallback. Just return it, nothing to do in this case\n return indexedChildren;\n } else {\n const existingSnap = existingChildren.get(namedNode.name);\n if (existingSnap) {\n return indexedChildren.remove(\n new NamedNode(namedNode.name, existingSnap)\n );\n } else {\n // No record of this child\n return indexedChildren;\n }\n }\n }\n );\n return new IndexMap(newIndexes, this.indexSet_);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { Path, pathGetFront, pathGetLength, pathPopFront } from '../util/Path';\nimport { SortedMap, SortedMapIterator } from '../util/SortedMap';\nimport { MAX_NAME, MIN_NAME, sha1 } from '../util/util';\n\nimport { NAME_COMPARATOR } from './comparators';\nimport { Index } from './indexes/Index';\nimport { KEY_INDEX, KeyIndex } from './indexes/KeyIndex';\nimport {\n PRIORITY_INDEX,\n setMaxNode as setPriorityMaxNode\n} from './indexes/PriorityIndex';\nimport { IndexMap } from './IndexMap';\nimport { LeafNode } from './LeafNode';\nimport { NamedNode, Node } from './Node';\nimport { priorityHashText, setMaxNode, validatePriorityNode } from './snap';\n\nexport interface ChildrenNodeConstructor {\n new (\n children_: SortedMap,\n priorityNode_: Node | null,\n indexMap_: IndexMap\n ): ChildrenNode;\n EMPTY_NODE: ChildrenNode;\n}\n\n// TODO: For memory savings, don't store priorityNode_ if it's empty.\n\nlet EMPTY_NODE: ChildrenNode;\n\n/**\n * ChildrenNode is a class for storing internal nodes in a DataSnapshot\n * (i.e. nodes with children). It implements Node and stores the\n * list of children in the children property, sorted by child name.\n */\nexport class ChildrenNode implements Node {\n private lazyHash_: string | null = null;\n\n static get EMPTY_NODE(): ChildrenNode {\n return (\n EMPTY_NODE ||\n (EMPTY_NODE = new ChildrenNode(\n new SortedMap(NAME_COMPARATOR),\n null,\n IndexMap.Default\n ))\n );\n }\n\n /**\n * @param children_ - List of children of this node..\n * @param priorityNode_ - The priority of this node (as a snapshot node).\n */\n constructor(\n private readonly children_: SortedMap,\n private readonly priorityNode_: Node | null,\n private indexMap_: IndexMap\n ) {\n /**\n * Note: The only reason we allow null priority is for EMPTY_NODE, since we can't use\n * EMPTY_NODE as the priority of EMPTY_NODE. We might want to consider making EMPTY_NODE its own\n * class instead of an empty ChildrenNode.\n */\n if (this.priorityNode_) {\n validatePriorityNode(this.priorityNode_);\n }\n\n if (this.children_.isEmpty()) {\n assert(\n !this.priorityNode_ || this.priorityNode_.isEmpty(),\n 'An empty node cannot have a priority'\n );\n }\n }\n\n /** @inheritDoc */\n isLeafNode(): boolean {\n return false;\n }\n\n /** @inheritDoc */\n getPriority(): Node {\n return this.priorityNode_ || EMPTY_NODE;\n }\n\n /** @inheritDoc */\n updatePriority(newPriorityNode: Node): Node {\n if (this.children_.isEmpty()) {\n // Don't allow priorities on empty nodes\n return this;\n } else {\n return new ChildrenNode(this.children_, newPriorityNode, this.indexMap_);\n }\n }\n\n /** @inheritDoc */\n getImmediateChild(childName: string): Node {\n // Hack to treat priority as a regular child\n if (childName === '.priority') {\n return this.getPriority();\n } else {\n const child = this.children_.get(childName);\n return child === null ? EMPTY_NODE : child;\n }\n }\n\n /** @inheritDoc */\n getChild(path: Path): Node {\n const front = pathGetFront(path);\n if (front === null) {\n return this;\n }\n\n return this.getImmediateChild(front).getChild(pathPopFront(path));\n }\n\n /** @inheritDoc */\n hasChild(childName: string): boolean {\n return this.children_.get(childName) !== null;\n }\n\n /** @inheritDoc */\n updateImmediateChild(childName: string, newChildNode: Node): Node {\n assert(newChildNode, 'We should always be passing snapshot nodes');\n if (childName === '.priority') {\n return this.updatePriority(newChildNode);\n } else {\n const namedNode = new NamedNode(childName, newChildNode);\n let newChildren, newIndexMap;\n if (newChildNode.isEmpty()) {\n newChildren = this.children_.remove(childName);\n newIndexMap = this.indexMap_.removeFromIndexes(\n namedNode,\n this.children_\n );\n } else {\n newChildren = this.children_.insert(childName, newChildNode);\n newIndexMap = this.indexMap_.addToIndexes(namedNode, this.children_);\n }\n\n const newPriority = newChildren.isEmpty()\n ? EMPTY_NODE\n : this.priorityNode_;\n return new ChildrenNode(newChildren, newPriority, newIndexMap);\n }\n }\n\n /** @inheritDoc */\n updateChild(path: Path, newChildNode: Node): Node {\n const front = pathGetFront(path);\n if (front === null) {\n return newChildNode;\n } else {\n assert(\n pathGetFront(path) !== '.priority' || pathGetLength(path) === 1,\n '.priority must be the last token in a path'\n );\n const newImmediateChild = this.getImmediateChild(front).updateChild(\n pathPopFront(path),\n newChildNode\n );\n return this.updateImmediateChild(front, newImmediateChild);\n }\n }\n\n /** @inheritDoc */\n isEmpty(): boolean {\n return this.children_.isEmpty();\n }\n\n /** @inheritDoc */\n numChildren(): number {\n return this.children_.count();\n }\n\n private static INTEGER_REGEXP_ = /^(0|[1-9]\\d*)$/;\n\n /** @inheritDoc */\n val(exportFormat?: boolean): object {\n if (this.isEmpty()) {\n return null;\n }\n\n const obj: { [k: string]: unknown } = {};\n let numKeys = 0,\n maxKey = 0,\n allIntegerKeys = true;\n this.forEachChild(PRIORITY_INDEX, (key: string, childNode: Node) => {\n obj[key] = childNode.val(exportFormat);\n\n numKeys++;\n if (allIntegerKeys && ChildrenNode.INTEGER_REGEXP_.test(key)) {\n maxKey = Math.max(maxKey, Number(key));\n } else {\n allIntegerKeys = false;\n }\n });\n\n if (!exportFormat && allIntegerKeys && maxKey < 2 * numKeys) {\n // convert to array.\n const array: unknown[] = [];\n // eslint-disable-next-line guard-for-in\n for (const key in obj) {\n array[key as unknown as number] = obj[key];\n }\n\n return array;\n } else {\n if (exportFormat && !this.getPriority().isEmpty()) {\n obj['.priority'] = this.getPriority().val();\n }\n return obj;\n }\n }\n\n /** @inheritDoc */\n hash(): string {\n if (this.lazyHash_ === null) {\n let toHash = '';\n if (!this.getPriority().isEmpty()) {\n toHash +=\n 'priority:' +\n priorityHashText(this.getPriority().val() as string | number) +\n ':';\n }\n\n this.forEachChild(PRIORITY_INDEX, (key, childNode) => {\n const childHash = childNode.hash();\n if (childHash !== '') {\n toHash += ':' + key + ':' + childHash;\n }\n });\n\n this.lazyHash_ = toHash === '' ? '' : sha1(toHash);\n }\n return this.lazyHash_;\n }\n\n /** @inheritDoc */\n getPredecessorChildName(\n childName: string,\n childNode: Node,\n index: Index\n ): string {\n const idx = this.resolveIndex_(index);\n if (idx) {\n const predecessor = idx.getPredecessorKey(\n new NamedNode(childName, childNode)\n );\n return predecessor ? predecessor.name : null;\n } else {\n return this.children_.getPredecessorKey(childName);\n }\n }\n\n getFirstChildName(indexDefinition: Index): string | null {\n const idx = this.resolveIndex_(indexDefinition);\n if (idx) {\n const minKey = idx.minKey();\n return minKey && minKey.name;\n } else {\n return this.children_.minKey();\n }\n }\n\n getFirstChild(indexDefinition: Index): NamedNode | null {\n const minKey = this.getFirstChildName(indexDefinition);\n if (minKey) {\n return new NamedNode(minKey, this.children_.get(minKey));\n } else {\n return null;\n }\n }\n\n /**\n * Given an index, return the key name of the largest value we have, according to that index\n */\n getLastChildName(indexDefinition: Index): string | null {\n const idx = this.resolveIndex_(indexDefinition);\n if (idx) {\n const maxKey = idx.maxKey();\n return maxKey && maxKey.name;\n } else {\n return this.children_.maxKey();\n }\n }\n\n getLastChild(indexDefinition: Index): NamedNode | null {\n const maxKey = this.getLastChildName(indexDefinition);\n if (maxKey) {\n return new NamedNode(maxKey, this.children_.get(maxKey));\n } else {\n return null;\n }\n }\n forEachChild(\n index: Index,\n action: (key: string, node: Node) => boolean | void\n ): boolean {\n const idx = this.resolveIndex_(index);\n if (idx) {\n return idx.inorderTraversal(wrappedNode => {\n return action(wrappedNode.name, wrappedNode.node);\n });\n } else {\n return this.children_.inorderTraversal(action);\n }\n }\n\n getIterator(\n indexDefinition: Index\n ): SortedMapIterator {\n return this.getIteratorFrom(indexDefinition.minPost(), indexDefinition);\n }\n\n getIteratorFrom(\n startPost: NamedNode,\n indexDefinition: Index\n ): SortedMapIterator {\n const idx = this.resolveIndex_(indexDefinition);\n if (idx) {\n return idx.getIteratorFrom(startPost, key => key);\n } else {\n const iterator = this.children_.getIteratorFrom(\n startPost.name,\n NamedNode.Wrap\n );\n let next = iterator.peek();\n while (next != null && indexDefinition.compare(next, startPost) < 0) {\n iterator.getNext();\n next = iterator.peek();\n }\n return iterator;\n }\n }\n\n getReverseIterator(\n indexDefinition: Index\n ): SortedMapIterator {\n return this.getReverseIteratorFrom(\n indexDefinition.maxPost(),\n indexDefinition\n );\n }\n\n getReverseIteratorFrom(\n endPost: NamedNode,\n indexDefinition: Index\n ): SortedMapIterator {\n const idx = this.resolveIndex_(indexDefinition);\n if (idx) {\n return idx.getReverseIteratorFrom(endPost, key => {\n return key;\n });\n } else {\n const iterator = this.children_.getReverseIteratorFrom(\n endPost.name,\n NamedNode.Wrap\n );\n let next = iterator.peek();\n while (next != null && indexDefinition.compare(next, endPost) > 0) {\n iterator.getNext();\n next = iterator.peek();\n }\n return iterator;\n }\n }\n compareTo(other: ChildrenNode): number {\n if (this.isEmpty()) {\n if (other.isEmpty()) {\n return 0;\n } else {\n return -1;\n }\n } else if (other.isLeafNode() || other.isEmpty()) {\n return 1;\n } else if (other === MAX_NODE) {\n return -1;\n } else {\n // Must be another node with children.\n return 0;\n }\n }\n withIndex(indexDefinition: Index): Node {\n if (\n indexDefinition === KEY_INDEX ||\n this.indexMap_.hasIndex(indexDefinition)\n ) {\n return this;\n } else {\n const newIndexMap = this.indexMap_.addIndex(\n indexDefinition,\n this.children_\n );\n return new ChildrenNode(this.children_, this.priorityNode_, newIndexMap);\n }\n }\n isIndexed(index: Index): boolean {\n return index === KEY_INDEX || this.indexMap_.hasIndex(index);\n }\n equals(other: Node): boolean {\n if (other === this) {\n return true;\n } else if (other.isLeafNode()) {\n return false;\n } else {\n const otherChildrenNode = other as ChildrenNode;\n if (!this.getPriority().equals(otherChildrenNode.getPriority())) {\n return false;\n } else if (\n this.children_.count() === otherChildrenNode.children_.count()\n ) {\n const thisIter = this.getIterator(PRIORITY_INDEX);\n const otherIter = otherChildrenNode.getIterator(PRIORITY_INDEX);\n let thisCurrent = thisIter.getNext();\n let otherCurrent = otherIter.getNext();\n while (thisCurrent && otherCurrent) {\n if (\n thisCurrent.name !== otherCurrent.name ||\n !thisCurrent.node.equals(otherCurrent.node)\n ) {\n return false;\n }\n thisCurrent = thisIter.getNext();\n otherCurrent = otherIter.getNext();\n }\n return thisCurrent === null && otherCurrent === null;\n } else {\n return false;\n }\n }\n }\n\n /**\n * Returns a SortedMap ordered by index, or null if the default (by-key) ordering can be used\n * instead.\n *\n */\n private resolveIndex_(\n indexDefinition: Index\n ): SortedMap | null {\n if (indexDefinition === KEY_INDEX) {\n return null;\n } else {\n return this.indexMap_.get(indexDefinition.toString());\n }\n }\n}\n\nexport class MaxNode extends ChildrenNode {\n constructor() {\n super(\n new SortedMap(NAME_COMPARATOR),\n ChildrenNode.EMPTY_NODE,\n IndexMap.Default\n );\n }\n\n compareTo(other: Node): number {\n if (other === this) {\n return 0;\n } else {\n return 1;\n }\n }\n\n equals(other: Node): boolean {\n // Not that we every compare it, but MAX_NODE is only ever equal to itself\n return other === this;\n }\n\n getPriority(): MaxNode {\n return this;\n }\n\n getImmediateChild(childName: string): ChildrenNode {\n return ChildrenNode.EMPTY_NODE;\n }\n\n isEmpty(): boolean {\n return false;\n }\n}\n\n/**\n * Marker that will sort higher than any other snapshot.\n */\nexport const MAX_NODE = new MaxNode();\n\n/**\n * Document NamedNode extensions\n */\ndeclare module './Node' {\n interface NamedNode {\n MIN: NamedNode;\n MAX: NamedNode;\n }\n}\n\nObject.defineProperties(NamedNode, {\n MIN: {\n value: new NamedNode(MIN_NAME, ChildrenNode.EMPTY_NODE)\n },\n MAX: {\n value: new NamedNode(MAX_NAME, MAX_NODE)\n }\n});\n\n/**\n * Reference Extensions\n */\nKeyIndex.__EMPTY_NODE = ChildrenNode.EMPTY_NODE;\nLeafNode.__childrenNodeConstructor = ChildrenNode;\nsetMaxNode(MAX_NODE);\nsetPriorityMaxNode(MAX_NODE);\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { contains, assert } from '@firebase/util';\n\nimport { Indexable } from '../util/misc';\nimport { SortedMap } from '../util/SortedMap';\nimport { each } from '../util/util';\n\nimport { ChildrenNode } from './ChildrenNode';\nimport { buildChildSet } from './childSet';\nimport { NAME_COMPARATOR, NAME_ONLY_COMPARATOR } from './comparators';\nimport { PRIORITY_INDEX, setNodeFromJSON } from './indexes/PriorityIndex';\nimport { IndexMap } from './IndexMap';\nimport { LeafNode } from './LeafNode';\nimport { NamedNode, Node } from './Node';\n\nconst USE_HINZE = true;\n\n/**\n * Constructs a snapshot node representing the passed JSON and returns it.\n * @param json - JSON to create a node for.\n * @param priority - Optional priority to use. This will be ignored if the\n * passed JSON contains a .priority property.\n */\nexport function nodeFromJSON(\n json: unknown | null,\n priority: unknown = null\n): Node {\n if (json === null) {\n return ChildrenNode.EMPTY_NODE;\n }\n\n if (typeof json === 'object' && '.priority' in json) {\n priority = json['.priority'];\n }\n\n assert(\n priority === null ||\n typeof priority === 'string' ||\n typeof priority === 'number' ||\n (typeof priority === 'object' && '.sv' in (priority as object)),\n 'Invalid priority type found: ' + typeof priority\n );\n\n if (typeof json === 'object' && '.value' in json && json['.value'] !== null) {\n json = json['.value'];\n }\n\n // Valid leaf nodes include non-objects or server-value wrapper objects\n if (typeof json !== 'object' || '.sv' in json) {\n const jsonLeaf = json as string | number | boolean | Indexable;\n return new LeafNode(jsonLeaf, nodeFromJSON(priority));\n }\n\n if (!(json instanceof Array) && USE_HINZE) {\n const children: NamedNode[] = [];\n let childrenHavePriority = false;\n const hinzeJsonObj = json;\n each(hinzeJsonObj, (key, child) => {\n if (key.substring(0, 1) !== '.') {\n // Ignore metadata nodes\n const childNode = nodeFromJSON(child);\n if (!childNode.isEmpty()) {\n childrenHavePriority =\n childrenHavePriority || !childNode.getPriority().isEmpty();\n children.push(new NamedNode(key, childNode));\n }\n }\n });\n\n if (children.length === 0) {\n return ChildrenNode.EMPTY_NODE;\n }\n\n const childSet = buildChildSet(\n children,\n NAME_ONLY_COMPARATOR,\n namedNode => namedNode.name,\n NAME_COMPARATOR\n ) as SortedMap;\n if (childrenHavePriority) {\n const sortedChildSet = buildChildSet(\n children,\n PRIORITY_INDEX.getCompare()\n );\n return new ChildrenNode(\n childSet,\n nodeFromJSON(priority),\n new IndexMap(\n { '.priority': sortedChildSet },\n { '.priority': PRIORITY_INDEX }\n )\n );\n } else {\n return new ChildrenNode(\n childSet,\n nodeFromJSON(priority),\n IndexMap.Default\n );\n }\n } else {\n let node: Node = ChildrenNode.EMPTY_NODE;\n each(json, (key: string, childData: unknown) => {\n if (contains(json as object, key)) {\n if (key.substring(0, 1) !== '.') {\n // ignore metadata nodes.\n const childNode = nodeFromJSON(childData);\n if (childNode.isLeafNode() || !childNode.isEmpty()) {\n node = node.updateImmediateChild(key, childNode);\n }\n }\n }\n });\n\n return node.updatePriority(nodeFromJSON(priority));\n }\n}\n\nsetNodeFromJSON(nodeFromJSON);\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { Path, pathGetFront, pathIsEmpty, pathSlice } from '../../util/Path';\nimport { MAX_NAME, nameCompare } from '../../util/util';\nimport { ChildrenNode, MAX_NODE } from '../ChildrenNode';\nimport { NamedNode, Node } from '../Node';\nimport { nodeFromJSON } from '../nodeFromJSON';\n\nimport { Index } from './Index';\n\nexport class PathIndex extends Index {\n constructor(private indexPath_: Path) {\n super();\n\n assert(\n !pathIsEmpty(indexPath_) && pathGetFront(indexPath_) !== '.priority',\n \"Can't create PathIndex with empty path or .priority key\"\n );\n }\n\n protected extractChild(snap: Node): Node {\n return snap.getChild(this.indexPath_);\n }\n isDefinedOn(node: Node): boolean {\n return !node.getChild(this.indexPath_).isEmpty();\n }\n compare(a: NamedNode, b: NamedNode): number {\n const aChild = this.extractChild(a.node);\n const bChild = this.extractChild(b.node);\n const indexCmp = aChild.compareTo(bChild);\n if (indexCmp === 0) {\n return nameCompare(a.name, b.name);\n } else {\n return indexCmp;\n }\n }\n makePost(indexValue: object, name: string): NamedNode {\n const valueNode = nodeFromJSON(indexValue);\n const node = ChildrenNode.EMPTY_NODE.updateChild(\n this.indexPath_,\n valueNode\n );\n return new NamedNode(name, node);\n }\n maxPost(): NamedNode {\n const node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, MAX_NODE);\n return new NamedNode(MAX_NAME, node);\n }\n toString(): string {\n return pathSlice(this.indexPath_, 0).join('/');\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { nameCompare } from '../../util/util';\nimport { NamedNode, Node } from '../Node';\nimport { nodeFromJSON } from '../nodeFromJSON';\n\nimport { Index } from './Index';\n\nexport class ValueIndex extends Index {\n compare(a: NamedNode, b: NamedNode): number {\n const indexCmp = a.node.compareTo(b.node);\n if (indexCmp === 0) {\n return nameCompare(a.name, b.name);\n } else {\n return indexCmp;\n }\n }\n isDefinedOn(node: Node): boolean {\n return true;\n }\n indexedValueChanged(oldNode: Node, newNode: Node): boolean {\n return !oldNode.equals(newNode);\n }\n minPost(): NamedNode {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (NamedNode as any).MIN;\n }\n maxPost(): NamedNode {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n return (NamedNode as any).MAX;\n }\n\n makePost(indexValue: object, name: string): NamedNode {\n const valueNode = nodeFromJSON(indexValue);\n return new NamedNode(name, valueNode);\n }\n\n /**\n * @returns String representation for inclusion in a query spec\n */\n toString(): string {\n return '.value';\n }\n}\n\nexport const VALUE_INDEX = new ValueIndex();\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Node } from '../snap/Node';\n\nexport const enum ChangeType {\n /** Event type for a child added */\n CHILD_ADDED = 'child_added',\n /** Event type for a child removed */\n CHILD_REMOVED = 'child_removed',\n /** Event type for a child changed */\n CHILD_CHANGED = 'child_changed',\n /** Event type for a child moved */\n CHILD_MOVED = 'child_moved',\n /** Event type for a value change */\n VALUE = 'value'\n}\n\nexport interface Change {\n /** @param type - The event type */\n type: ChangeType;\n /** @param snapshotNode - The data */\n snapshotNode: Node;\n /** @param childName - The name for this child, if it's a child even */\n childName?: string;\n /** @param oldSnap - Used for intermediate processing of child changed events */\n oldSnap?: Node;\n /** * @param prevName - The name for the previous child, if applicable */\n prevName?: string | null;\n}\n\nexport function changeValue(snapshotNode: Node): Change {\n return { type: ChangeType.VALUE, snapshotNode };\n}\n\nexport function changeChildAdded(\n childName: string,\n snapshotNode: Node\n): Change {\n return { type: ChangeType.CHILD_ADDED, snapshotNode, childName };\n}\n\nexport function changeChildRemoved(\n childName: string,\n snapshotNode: Node\n): Change {\n return { type: ChangeType.CHILD_REMOVED, snapshotNode, childName };\n}\n\nexport function changeChildChanged(\n childName: string,\n snapshotNode: Node,\n oldSnap: Node\n): Change {\n return {\n type: ChangeType.CHILD_CHANGED,\n snapshotNode,\n childName,\n oldSnap\n };\n}\n\nexport function changeChildMoved(\n childName: string,\n snapshotNode: Node\n): Change {\n return { type: ChangeType.CHILD_MOVED, snapshotNode, childName };\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { ChildrenNode } from '../../snap/ChildrenNode';\nimport { Index } from '../../snap/indexes/Index';\nimport { PRIORITY_INDEX } from '../../snap/indexes/PriorityIndex';\nimport { Node } from '../../snap/Node';\nimport { Path } from '../../util/Path';\nimport {\n changeChildAdded,\n changeChildChanged,\n changeChildRemoved\n} from '../Change';\nimport { ChildChangeAccumulator } from '../ChildChangeAccumulator';\nimport { CompleteChildSource } from '../CompleteChildSource';\n\nimport { NodeFilter } from './NodeFilter';\n\n/**\n * Doesn't really filter nodes but applies an index to the node and keeps track of any changes\n */\nexport class IndexedFilter implements NodeFilter {\n constructor(private readonly index_: Index) {}\n\n updateChild(\n snap: Node,\n key: string,\n newChild: Node,\n affectedPath: Path,\n source: CompleteChildSource,\n optChangeAccumulator: ChildChangeAccumulator | null\n ): Node {\n assert(\n snap.isIndexed(this.index_),\n 'A node must be indexed if only a child is updated'\n );\n const oldChild = snap.getImmediateChild(key);\n // Check if anything actually changed.\n if (\n oldChild.getChild(affectedPath).equals(newChild.getChild(affectedPath))\n ) {\n // There's an edge case where a child can enter or leave the view because affectedPath was set to null.\n // In this case, affectedPath will appear null in both the old and new snapshots. So we need\n // to avoid treating these cases as \"nothing changed.\"\n if (oldChild.isEmpty() === newChild.isEmpty()) {\n // Nothing changed.\n\n // This assert should be valid, but it's expensive (can dominate perf testing) so don't actually do it.\n //assert(oldChild.equals(newChild), 'Old and new snapshots should be equal.');\n return snap;\n }\n }\n\n if (optChangeAccumulator != null) {\n if (newChild.isEmpty()) {\n if (snap.hasChild(key)) {\n optChangeAccumulator.trackChildChange(\n changeChildRemoved(key, oldChild)\n );\n } else {\n assert(\n snap.isLeafNode(),\n 'A child remove without an old child only makes sense on a leaf node'\n );\n }\n } else if (oldChild.isEmpty()) {\n optChangeAccumulator.trackChildChange(changeChildAdded(key, newChild));\n } else {\n optChangeAccumulator.trackChildChange(\n changeChildChanged(key, newChild, oldChild)\n );\n }\n }\n if (snap.isLeafNode() && newChild.isEmpty()) {\n return snap;\n } else {\n // Make sure the node is indexed\n return snap.updateImmediateChild(key, newChild).withIndex(this.index_);\n }\n }\n updateFullNode(\n oldSnap: Node,\n newSnap: Node,\n optChangeAccumulator: ChildChangeAccumulator | null\n ): Node {\n if (optChangeAccumulator != null) {\n if (!oldSnap.isLeafNode()) {\n oldSnap.forEachChild(PRIORITY_INDEX, (key, childNode) => {\n if (!newSnap.hasChild(key)) {\n optChangeAccumulator.trackChildChange(\n changeChildRemoved(key, childNode)\n );\n }\n });\n }\n if (!newSnap.isLeafNode()) {\n newSnap.forEachChild(PRIORITY_INDEX, (key, childNode) => {\n if (oldSnap.hasChild(key)) {\n const oldChild = oldSnap.getImmediateChild(key);\n if (!oldChild.equals(childNode)) {\n optChangeAccumulator.trackChildChange(\n changeChildChanged(key, childNode, oldChild)\n );\n }\n } else {\n optChangeAccumulator.trackChildChange(\n changeChildAdded(key, childNode)\n );\n }\n });\n }\n }\n return newSnap.withIndex(this.index_);\n }\n updatePriority(oldSnap: Node, newPriority: Node): Node {\n if (oldSnap.isEmpty()) {\n return ChildrenNode.EMPTY_NODE;\n } else {\n return oldSnap.updatePriority(newPriority);\n }\n }\n filtersNodes(): boolean {\n return false;\n }\n getIndexedFilter(): IndexedFilter {\n return this;\n }\n getIndex(): Index {\n return this.index_;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NamedNode, Node } from '../../../core/snap/Node';\nimport { ChildrenNode } from '../../snap/ChildrenNode';\nimport { Index } from '../../snap/indexes/Index';\nimport { PRIORITY_INDEX } from '../../snap/indexes/PriorityIndex';\nimport { Path } from '../../util/Path';\nimport { ChildChangeAccumulator } from '../ChildChangeAccumulator';\nimport { CompleteChildSource } from '../CompleteChildSource';\nimport { QueryParams } from '../QueryParams';\n\nimport { IndexedFilter } from './IndexedFilter';\nimport { NodeFilter } from './NodeFilter';\n\n/**\n * Filters nodes by range and uses an IndexFilter to track any changes after filtering the node\n */\nexport class RangedFilter implements NodeFilter {\n private indexedFilter_: IndexedFilter;\n\n private index_: Index;\n\n private startPost_: NamedNode;\n\n private endPost_: NamedNode;\n\n private startIsInclusive_: boolean;\n\n private endIsInclusive_: boolean;\n\n constructor(params: QueryParams) {\n this.indexedFilter_ = new IndexedFilter(params.getIndex());\n this.index_ = params.getIndex();\n this.startPost_ = RangedFilter.getStartPost_(params);\n this.endPost_ = RangedFilter.getEndPost_(params);\n this.startIsInclusive_ = !params.startAfterSet_;\n this.endIsInclusive_ = !params.endBeforeSet_;\n }\n\n getStartPost(): NamedNode {\n return this.startPost_;\n }\n\n getEndPost(): NamedNode {\n return this.endPost_;\n }\n\n matches(node: NamedNode): boolean {\n const isWithinStart = this.startIsInclusive_\n ? this.index_.compare(this.getStartPost(), node) <= 0\n : this.index_.compare(this.getStartPost(), node) < 0;\n const isWithinEnd = this.endIsInclusive_\n ? this.index_.compare(node, this.getEndPost()) <= 0\n : this.index_.compare(node, this.getEndPost()) < 0;\n return isWithinStart && isWithinEnd;\n }\n updateChild(\n snap: Node,\n key: string,\n newChild: Node,\n affectedPath: Path,\n source: CompleteChildSource,\n optChangeAccumulator: ChildChangeAccumulator | null\n ): Node {\n if (!this.matches(new NamedNode(key, newChild))) {\n newChild = ChildrenNode.EMPTY_NODE;\n }\n return this.indexedFilter_.updateChild(\n snap,\n key,\n newChild,\n affectedPath,\n source,\n optChangeAccumulator\n );\n }\n updateFullNode(\n oldSnap: Node,\n newSnap: Node,\n optChangeAccumulator: ChildChangeAccumulator | null\n ): Node {\n if (newSnap.isLeafNode()) {\n // Make sure we have a children node with the correct index, not a leaf node;\n newSnap = ChildrenNode.EMPTY_NODE;\n }\n let filtered = newSnap.withIndex(this.index_);\n // Don't support priorities on queries\n filtered = filtered.updatePriority(ChildrenNode.EMPTY_NODE);\n const self = this;\n newSnap.forEachChild(PRIORITY_INDEX, (key, childNode) => {\n if (!self.matches(new NamedNode(key, childNode))) {\n filtered = filtered.updateImmediateChild(key, ChildrenNode.EMPTY_NODE);\n }\n });\n return this.indexedFilter_.updateFullNode(\n oldSnap,\n filtered,\n optChangeAccumulator\n );\n }\n updatePriority(oldSnap: Node, newPriority: Node): Node {\n // Don't support priorities on queries\n return oldSnap;\n }\n filtersNodes(): boolean {\n return true;\n }\n getIndexedFilter(): IndexedFilter {\n return this.indexedFilter_;\n }\n getIndex(): Index {\n return this.index_;\n }\n\n private static getStartPost_(params: QueryParams): NamedNode {\n if (params.hasStart()) {\n const startName = params.getIndexStartName();\n return params.getIndex().makePost(params.getIndexStartValue(), startName);\n } else {\n return params.getIndex().minPost();\n }\n }\n\n private static getEndPost_(params: QueryParams): NamedNode {\n if (params.hasEnd()) {\n const endName = params.getIndexEndName();\n return params.getIndex().makePost(params.getIndexEndValue(), endName);\n } else {\n return params.getIndex().maxPost();\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { ChildrenNode } from '../../snap/ChildrenNode';\nimport { Index } from '../../snap/indexes/Index';\nimport { NamedNode, Node } from '../../snap/Node';\nimport { Path } from '../../util/Path';\nimport {\n changeChildAdded,\n changeChildChanged,\n changeChildRemoved\n} from '../Change';\nimport { ChildChangeAccumulator } from '../ChildChangeAccumulator';\nimport { CompleteChildSource } from '../CompleteChildSource';\nimport { QueryParams } from '../QueryParams';\n\nimport { IndexedFilter } from './IndexedFilter';\nimport { NodeFilter } from './NodeFilter';\nimport { RangedFilter } from './RangedFilter';\n\n/**\n * Applies a limit and a range to a node and uses RangedFilter to do the heavy lifting where possible\n */\nexport class LimitedFilter implements NodeFilter {\n private readonly rangedFilter_: RangedFilter;\n\n private readonly index_: Index;\n\n private readonly limit_: number;\n\n private readonly reverse_: boolean;\n\n private readonly startIsInclusive_: boolean;\n\n private readonly endIsInclusive_: boolean;\n\n constructor(params: QueryParams) {\n this.rangedFilter_ = new RangedFilter(params);\n this.index_ = params.getIndex();\n this.limit_ = params.getLimit();\n this.reverse_ = !params.isViewFromLeft();\n this.startIsInclusive_ = !params.startAfterSet_;\n this.endIsInclusive_ = !params.endBeforeSet_;\n }\n updateChild(\n snap: Node,\n key: string,\n newChild: Node,\n affectedPath: Path,\n source: CompleteChildSource,\n optChangeAccumulator: ChildChangeAccumulator | null\n ): Node {\n if (!this.rangedFilter_.matches(new NamedNode(key, newChild))) {\n newChild = ChildrenNode.EMPTY_NODE;\n }\n if (snap.getImmediateChild(key).equals(newChild)) {\n // No change\n return snap;\n } else if (snap.numChildren() < this.limit_) {\n return this.rangedFilter_\n .getIndexedFilter()\n .updateChild(\n snap,\n key,\n newChild,\n affectedPath,\n source,\n optChangeAccumulator\n );\n } else {\n return this.fullLimitUpdateChild_(\n snap,\n key,\n newChild,\n source,\n optChangeAccumulator\n );\n }\n }\n updateFullNode(\n oldSnap: Node,\n newSnap: Node,\n optChangeAccumulator: ChildChangeAccumulator | null\n ): Node {\n let filtered;\n if (newSnap.isLeafNode() || newSnap.isEmpty()) {\n // Make sure we have a children node with the correct index, not a leaf node;\n filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_);\n } else {\n if (\n this.limit_ * 2 < newSnap.numChildren() &&\n newSnap.isIndexed(this.index_)\n ) {\n // Easier to build up a snapshot, since what we're given has more than twice the elements we want\n filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_);\n // anchor to the startPost, endPost, or last element as appropriate\n let iterator;\n if (this.reverse_) {\n iterator = (newSnap as ChildrenNode).getReverseIteratorFrom(\n this.rangedFilter_.getEndPost(),\n this.index_\n );\n } else {\n iterator = (newSnap as ChildrenNode).getIteratorFrom(\n this.rangedFilter_.getStartPost(),\n this.index_\n );\n }\n let count = 0;\n while (iterator.hasNext() && count < this.limit_) {\n const next = iterator.getNext();\n if (!this.withinDirectionalStart(next)) {\n // if we have not reached the start, skip to the next element\n continue;\n } else if (!this.withinDirectionalEnd(next)) {\n // if we have reached the end, stop adding elements\n break;\n } else {\n filtered = filtered.updateImmediateChild(next.name, next.node);\n count++;\n }\n }\n } else {\n // The snap contains less than twice the limit. Faster to delete from the snap than build up a new one\n filtered = newSnap.withIndex(this.index_);\n // Don't support priorities on queries\n filtered = filtered.updatePriority(\n ChildrenNode.EMPTY_NODE\n ) as ChildrenNode;\n\n let iterator;\n if (this.reverse_) {\n iterator = filtered.getReverseIterator(this.index_);\n } else {\n iterator = filtered.getIterator(this.index_);\n }\n\n let count = 0;\n while (iterator.hasNext()) {\n const next = iterator.getNext();\n const inRange =\n count < this.limit_ &&\n this.withinDirectionalStart(next) &&\n this.withinDirectionalEnd(next);\n if (inRange) {\n count++;\n } else {\n filtered = filtered.updateImmediateChild(\n next.name,\n ChildrenNode.EMPTY_NODE\n );\n }\n }\n }\n }\n return this.rangedFilter_\n .getIndexedFilter()\n .updateFullNode(oldSnap, filtered, optChangeAccumulator);\n }\n updatePriority(oldSnap: Node, newPriority: Node): Node {\n // Don't support priorities on queries\n return oldSnap;\n }\n filtersNodes(): boolean {\n return true;\n }\n getIndexedFilter(): IndexedFilter {\n return this.rangedFilter_.getIndexedFilter();\n }\n getIndex(): Index {\n return this.index_;\n }\n\n private fullLimitUpdateChild_(\n snap: Node,\n childKey: string,\n childSnap: Node,\n source: CompleteChildSource,\n changeAccumulator: ChildChangeAccumulator | null\n ): Node {\n // TODO: rename all cache stuff etc to general snap terminology\n let cmp;\n if (this.reverse_) {\n const indexCmp = this.index_.getCompare();\n cmp = (a: NamedNode, b: NamedNode) => indexCmp(b, a);\n } else {\n cmp = this.index_.getCompare();\n }\n const oldEventCache = snap as ChildrenNode;\n assert(oldEventCache.numChildren() === this.limit_, '');\n const newChildNamedNode = new NamedNode(childKey, childSnap);\n const windowBoundary = this.reverse_\n ? oldEventCache.getFirstChild(this.index_)\n : (oldEventCache.getLastChild(this.index_) as NamedNode);\n const inRange = this.rangedFilter_.matches(newChildNamedNode);\n if (oldEventCache.hasChild(childKey)) {\n const oldChildSnap = oldEventCache.getImmediateChild(childKey);\n let nextChild = source.getChildAfterChild(\n this.index_,\n windowBoundary,\n this.reverse_\n );\n while (\n nextChild != null &&\n (nextChild.name === childKey || oldEventCache.hasChild(nextChild.name))\n ) {\n // There is a weird edge case where a node is updated as part of a merge in the write tree, but hasn't\n // been applied to the limited filter yet. Ignore this next child which will be updated later in\n // the limited filter...\n nextChild = source.getChildAfterChild(\n this.index_,\n nextChild,\n this.reverse_\n );\n }\n const compareNext =\n nextChild == null ? 1 : cmp(nextChild, newChildNamedNode);\n const remainsInWindow =\n inRange && !childSnap.isEmpty() && compareNext >= 0;\n if (remainsInWindow) {\n if (changeAccumulator != null) {\n changeAccumulator.trackChildChange(\n changeChildChanged(childKey, childSnap, oldChildSnap)\n );\n }\n return oldEventCache.updateImmediateChild(childKey, childSnap);\n } else {\n if (changeAccumulator != null) {\n changeAccumulator.trackChildChange(\n changeChildRemoved(childKey, oldChildSnap)\n );\n }\n const newEventCache = oldEventCache.updateImmediateChild(\n childKey,\n ChildrenNode.EMPTY_NODE\n );\n const nextChildInRange =\n nextChild != null && this.rangedFilter_.matches(nextChild);\n if (nextChildInRange) {\n if (changeAccumulator != null) {\n changeAccumulator.trackChildChange(\n changeChildAdded(nextChild.name, nextChild.node)\n );\n }\n return newEventCache.updateImmediateChild(\n nextChild.name,\n nextChild.node\n );\n } else {\n return newEventCache;\n }\n }\n } else if (childSnap.isEmpty()) {\n // we're deleting a node, but it was not in the window, so ignore it\n return snap;\n } else if (inRange) {\n if (cmp(windowBoundary, newChildNamedNode) >= 0) {\n if (changeAccumulator != null) {\n changeAccumulator.trackChildChange(\n changeChildRemoved(windowBoundary.name, windowBoundary.node)\n );\n changeAccumulator.trackChildChange(\n changeChildAdded(childKey, childSnap)\n );\n }\n return oldEventCache\n .updateImmediateChild(childKey, childSnap)\n .updateImmediateChild(windowBoundary.name, ChildrenNode.EMPTY_NODE);\n } else {\n return snap;\n }\n } else {\n return snap;\n }\n }\n\n private withinDirectionalStart = (node: NamedNode) =>\n this.reverse_ ? this.withinEndPost(node) : this.withinStartPost(node);\n\n private withinDirectionalEnd = (node: NamedNode) =>\n this.reverse_ ? this.withinStartPost(node) : this.withinEndPost(node);\n\n private withinStartPost = (node: NamedNode) => {\n const compareRes = this.index_.compare(\n this.rangedFilter_.getStartPost(),\n node\n );\n return this.startIsInclusive_ ? compareRes <= 0 : compareRes < 0;\n };\n\n private withinEndPost = (node: NamedNode) => {\n const compareRes = this.index_.compare(\n node,\n this.rangedFilter_.getEndPost()\n );\n return this.endIsInclusive_ ? compareRes <= 0 : compareRes < 0;\n };\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, stringify } from '@firebase/util';\n\nimport { Index } from '../snap/indexes/Index';\nimport { KEY_INDEX } from '../snap/indexes/KeyIndex';\nimport { PathIndex } from '../snap/indexes/PathIndex';\nimport { PRIORITY_INDEX, PriorityIndex } from '../snap/indexes/PriorityIndex';\nimport { VALUE_INDEX } from '../snap/indexes/ValueIndex';\nimport { MAX_NAME, MIN_NAME } from '../util/util';\n\nimport { IndexedFilter } from './filter/IndexedFilter';\nimport { LimitedFilter } from './filter/LimitedFilter';\nimport { NodeFilter } from './filter/NodeFilter';\nimport { RangedFilter } from './filter/RangedFilter';\n\n/**\n * Wire Protocol Constants\n */\nconst enum WIRE_PROTOCOL_CONSTANTS {\n INDEX_START_VALUE = 'sp',\n INDEX_START_NAME = 'sn',\n INDEX_START_IS_INCLUSIVE = 'sin',\n INDEX_END_VALUE = 'ep',\n INDEX_END_NAME = 'en',\n INDEX_END_IS_INCLUSIVE = 'ein',\n LIMIT = 'l',\n VIEW_FROM = 'vf',\n VIEW_FROM_LEFT = 'l',\n VIEW_FROM_RIGHT = 'r',\n INDEX = 'i'\n}\n\n/**\n * REST Query Constants\n */\nconst enum REST_QUERY_CONSTANTS {\n ORDER_BY = 'orderBy',\n PRIORITY_INDEX = '$priority',\n VALUE_INDEX = '$value',\n KEY_INDEX = '$key',\n START_AFTER = 'startAfter',\n START_AT = 'startAt',\n END_AT = 'endAt',\n END_BEFORE = 'endBefore',\n LIMIT_TO_FIRST = 'limitToFirst',\n LIMIT_TO_LAST = 'limitToLast'\n}\n\n/**\n * This class is an immutable-from-the-public-api struct containing a set of query parameters defining a\n * range to be returned for a particular location. It is assumed that validation of parameters is done at the\n * user-facing API level, so it is not done here.\n *\n * @internal\n */\nexport class QueryParams {\n limitSet_ = false;\n startSet_ = false;\n startNameSet_ = false;\n startAfterSet_ = false; // can only be true if startSet_ is true\n endSet_ = false;\n endNameSet_ = false;\n endBeforeSet_ = false; // can only be true if endSet_ is true\n limit_ = 0;\n viewFrom_ = '';\n indexStartValue_: unknown | null = null;\n indexStartName_ = '';\n indexEndValue_: unknown | null = null;\n indexEndName_ = '';\n index_: PriorityIndex = PRIORITY_INDEX;\n\n hasStart(): boolean {\n return this.startSet_;\n }\n\n /**\n * @returns True if it would return from left.\n */\n isViewFromLeft(): boolean {\n if (this.viewFrom_ === '') {\n // limit(), rather than limitToFirst or limitToLast was called.\n // This means that only one of startSet_ and endSet_ is true. Use them\n // to calculate which side of the view to anchor to. If neither is set,\n // anchor to the end.\n return this.startSet_;\n } else {\n return this.viewFrom_ === WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_LEFT;\n }\n }\n\n /**\n * Only valid to call if hasStart() returns true\n */\n getIndexStartValue(): unknown {\n assert(this.startSet_, 'Only valid if start has been set');\n return this.indexStartValue_;\n }\n\n /**\n * Only valid to call if hasStart() returns true.\n * Returns the starting key name for the range defined by these query parameters\n */\n getIndexStartName(): string {\n assert(this.startSet_, 'Only valid if start has been set');\n if (this.startNameSet_) {\n return this.indexStartName_;\n } else {\n return MIN_NAME;\n }\n }\n\n hasEnd(): boolean {\n return this.endSet_;\n }\n\n /**\n * Only valid to call if hasEnd() returns true.\n */\n getIndexEndValue(): unknown {\n assert(this.endSet_, 'Only valid if end has been set');\n return this.indexEndValue_;\n }\n\n /**\n * Only valid to call if hasEnd() returns true.\n * Returns the end key name for the range defined by these query parameters\n */\n getIndexEndName(): string {\n assert(this.endSet_, 'Only valid if end has been set');\n if (this.endNameSet_) {\n return this.indexEndName_;\n } else {\n return MAX_NAME;\n }\n }\n\n hasLimit(): boolean {\n return this.limitSet_;\n }\n\n /**\n * @returns True if a limit has been set and it has been explicitly anchored\n */\n hasAnchoredLimit(): boolean {\n return this.limitSet_ && this.viewFrom_ !== '';\n }\n\n /**\n * Only valid to call if hasLimit() returns true\n */\n getLimit(): number {\n assert(this.limitSet_, 'Only valid if limit has been set');\n return this.limit_;\n }\n\n getIndex(): Index {\n return this.index_;\n }\n\n loadsAllData(): boolean {\n return !(this.startSet_ || this.endSet_ || this.limitSet_);\n }\n\n isDefault(): boolean {\n return this.loadsAllData() && this.index_ === PRIORITY_INDEX;\n }\n\n copy(): QueryParams {\n const copy = new QueryParams();\n copy.limitSet_ = this.limitSet_;\n copy.limit_ = this.limit_;\n copy.startSet_ = this.startSet_;\n copy.startAfterSet_ = this.startAfterSet_;\n copy.indexStartValue_ = this.indexStartValue_;\n copy.startNameSet_ = this.startNameSet_;\n copy.indexStartName_ = this.indexStartName_;\n copy.endSet_ = this.endSet_;\n copy.endBeforeSet_ = this.endBeforeSet_;\n copy.indexEndValue_ = this.indexEndValue_;\n copy.endNameSet_ = this.endNameSet_;\n copy.indexEndName_ = this.indexEndName_;\n copy.index_ = this.index_;\n copy.viewFrom_ = this.viewFrom_;\n return copy;\n }\n}\n\nexport function queryParamsGetNodeFilter(queryParams: QueryParams): NodeFilter {\n if (queryParams.loadsAllData()) {\n return new IndexedFilter(queryParams.getIndex());\n } else if (queryParams.hasLimit()) {\n return new LimitedFilter(queryParams);\n } else {\n return new RangedFilter(queryParams);\n }\n}\n\nexport function queryParamsLimit(\n queryParams: QueryParams,\n newLimit: number\n): QueryParams {\n const newParams = queryParams.copy();\n newParams.limitSet_ = true;\n newParams.limit_ = newLimit;\n newParams.viewFrom_ = '';\n return newParams;\n}\n\nexport function queryParamsLimitToFirst(\n queryParams: QueryParams,\n newLimit: number\n): QueryParams {\n const newParams = queryParams.copy();\n newParams.limitSet_ = true;\n newParams.limit_ = newLimit;\n newParams.viewFrom_ = WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_LEFT;\n return newParams;\n}\n\nexport function queryParamsLimitToLast(\n queryParams: QueryParams,\n newLimit: number\n): QueryParams {\n const newParams = queryParams.copy();\n newParams.limitSet_ = true;\n newParams.limit_ = newLimit;\n newParams.viewFrom_ = WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_RIGHT;\n return newParams;\n}\n\nexport function queryParamsStartAt(\n queryParams: QueryParams,\n indexValue: unknown,\n key?: string | null\n): QueryParams {\n const newParams = queryParams.copy();\n newParams.startSet_ = true;\n if (indexValue === undefined) {\n indexValue = null;\n }\n newParams.indexStartValue_ = indexValue;\n if (key != null) {\n newParams.startNameSet_ = true;\n newParams.indexStartName_ = key;\n } else {\n newParams.startNameSet_ = false;\n newParams.indexStartName_ = '';\n }\n return newParams;\n}\n\nexport function queryParamsStartAfter(\n queryParams: QueryParams,\n indexValue: unknown,\n key?: string | null\n): QueryParams {\n let params: QueryParams;\n if (queryParams.index_ === KEY_INDEX || !!key) {\n params = queryParamsStartAt(queryParams, indexValue, key);\n } else {\n params = queryParamsStartAt(queryParams, indexValue, MAX_NAME);\n }\n params.startAfterSet_ = true;\n return params;\n}\n\nexport function queryParamsEndAt(\n queryParams: QueryParams,\n indexValue: unknown,\n key?: string | null\n): QueryParams {\n const newParams = queryParams.copy();\n newParams.endSet_ = true;\n if (indexValue === undefined) {\n indexValue = null;\n }\n newParams.indexEndValue_ = indexValue;\n if (key !== undefined) {\n newParams.endNameSet_ = true;\n newParams.indexEndName_ = key;\n } else {\n newParams.endNameSet_ = false;\n newParams.indexEndName_ = '';\n }\n return newParams;\n}\n\nexport function queryParamsEndBefore(\n queryParams: QueryParams,\n indexValue: unknown,\n key?: string | null\n): QueryParams {\n let params: QueryParams;\n if (queryParams.index_ === KEY_INDEX || !!key) {\n params = queryParamsEndAt(queryParams, indexValue, key);\n } else {\n params = queryParamsEndAt(queryParams, indexValue, MIN_NAME);\n }\n params.endBeforeSet_ = true;\n return params;\n}\n\nexport function queryParamsOrderBy(\n queryParams: QueryParams,\n index: Index\n): QueryParams {\n const newParams = queryParams.copy();\n newParams.index_ = index;\n return newParams;\n}\n\n/**\n * Returns a set of REST query string parameters representing this query.\n *\n * @returns query string parameters\n */\nexport function queryParamsToRestQueryStringParameters(\n queryParams: QueryParams\n): Record {\n const qs: Record = {};\n\n if (queryParams.isDefault()) {\n return qs;\n }\n\n let orderBy;\n if (queryParams.index_ === PRIORITY_INDEX) {\n orderBy = REST_QUERY_CONSTANTS.PRIORITY_INDEX;\n } else if (queryParams.index_ === VALUE_INDEX) {\n orderBy = REST_QUERY_CONSTANTS.VALUE_INDEX;\n } else if (queryParams.index_ === KEY_INDEX) {\n orderBy = REST_QUERY_CONSTANTS.KEY_INDEX;\n } else {\n assert(queryParams.index_ instanceof PathIndex, 'Unrecognized index type!');\n orderBy = queryParams.index_.toString();\n }\n qs[REST_QUERY_CONSTANTS.ORDER_BY] = stringify(orderBy);\n\n if (queryParams.startSet_) {\n const startParam = queryParams.startAfterSet_\n ? REST_QUERY_CONSTANTS.START_AFTER\n : REST_QUERY_CONSTANTS.START_AT;\n qs[startParam] = stringify(queryParams.indexStartValue_);\n if (queryParams.startNameSet_) {\n qs[startParam] += ',' + stringify(queryParams.indexStartName_);\n }\n }\n\n if (queryParams.endSet_) {\n const endParam = queryParams.endBeforeSet_\n ? REST_QUERY_CONSTANTS.END_BEFORE\n : REST_QUERY_CONSTANTS.END_AT;\n qs[endParam] = stringify(queryParams.indexEndValue_);\n if (queryParams.endNameSet_) {\n qs[endParam] += ',' + stringify(queryParams.indexEndName_);\n }\n }\n\n if (queryParams.limitSet_) {\n if (queryParams.isViewFromLeft()) {\n qs[REST_QUERY_CONSTANTS.LIMIT_TO_FIRST] = queryParams.limit_;\n } else {\n qs[REST_QUERY_CONSTANTS.LIMIT_TO_LAST] = queryParams.limit_;\n }\n }\n\n return qs;\n}\n\nexport function queryParamsGetQueryObject(\n queryParams: QueryParams\n): Record {\n const obj: Record = {};\n if (queryParams.startSet_) {\n obj[WIRE_PROTOCOL_CONSTANTS.INDEX_START_VALUE] =\n queryParams.indexStartValue_;\n if (queryParams.startNameSet_) {\n obj[WIRE_PROTOCOL_CONSTANTS.INDEX_START_NAME] =\n queryParams.indexStartName_;\n }\n obj[WIRE_PROTOCOL_CONSTANTS.INDEX_START_IS_INCLUSIVE] =\n !queryParams.startAfterSet_;\n }\n if (queryParams.endSet_) {\n obj[WIRE_PROTOCOL_CONSTANTS.INDEX_END_VALUE] = queryParams.indexEndValue_;\n if (queryParams.endNameSet_) {\n obj[WIRE_PROTOCOL_CONSTANTS.INDEX_END_NAME] = queryParams.indexEndName_;\n }\n obj[WIRE_PROTOCOL_CONSTANTS.INDEX_END_IS_INCLUSIVE] =\n !queryParams.endBeforeSet_;\n }\n if (queryParams.limitSet_) {\n obj[WIRE_PROTOCOL_CONSTANTS.LIMIT] = queryParams.limit_;\n let viewFrom = queryParams.viewFrom_;\n if (viewFrom === '') {\n if (queryParams.isViewFromLeft()) {\n viewFrom = WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_LEFT;\n } else {\n viewFrom = WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_RIGHT;\n }\n }\n obj[WIRE_PROTOCOL_CONSTANTS.VIEW_FROM] = viewFrom;\n }\n // For now, priority index is the default, so we only specify if it's some other index\n if (queryParams.index_ !== PRIORITY_INDEX) {\n obj[WIRE_PROTOCOL_CONSTANTS.INDEX] = queryParams.index_.toString();\n }\n return obj;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n assert,\n jsonEval,\n safeGet,\n querystring,\n Deferred\n} from '@firebase/util';\n\nimport { AppCheckTokenProvider } from './AppCheckTokenProvider';\nimport { AuthTokenProvider } from './AuthTokenProvider';\nimport { RepoInfo } from './RepoInfo';\nimport { ServerActions } from './ServerActions';\nimport { logWrapper, warn } from './util/util';\nimport { QueryContext } from './view/EventRegistration';\nimport { queryParamsToRestQueryStringParameters } from './view/QueryParams';\n\n/**\n * An implementation of ServerActions that communicates with the server via REST requests.\n * This is mostly useful for compatibility with crawlers, where we don't want to spin up a full\n * persistent connection (using WebSockets or long-polling)\n */\nexport class ReadonlyRestClient extends ServerActions {\n reportStats(stats: { [k: string]: unknown }): void {\n throw new Error('Method not implemented.');\n }\n\n /** @private {function(...[*])} */\n private log_: (...args: unknown[]) => void = logWrapper('p:rest:');\n\n /**\n * We don't actually need to track listens, except to prevent us calling an onComplete for a listen\n * that's been removed. :-/\n */\n private listens_: { [k: string]: object } = {};\n\n static getListenId_(query: QueryContext, tag?: number | null): string {\n if (tag !== undefined) {\n return 'tag$' + tag;\n } else {\n assert(\n query._queryParams.isDefault(),\n \"should have a tag if it's not a default query.\"\n );\n return query._path.toString();\n }\n }\n\n /**\n * @param repoInfo_ - Data about the namespace we are connecting to\n * @param onDataUpdate_ - A callback for new data from the server\n */\n constructor(\n private repoInfo_: RepoInfo,\n private onDataUpdate_: (\n a: string,\n b: unknown,\n c: boolean,\n d: number | null\n ) => void,\n private authTokenProvider_: AuthTokenProvider,\n private appCheckTokenProvider_: AppCheckTokenProvider\n ) {\n super();\n }\n\n /** @inheritDoc */\n listen(\n query: QueryContext,\n currentHashFn: () => string,\n tag: number | null,\n onComplete: (a: string, b: unknown) => void\n ) {\n const pathString = query._path.toString();\n this.log_('Listen called for ' + pathString + ' ' + query._queryIdentifier);\n\n // Mark this listener so we can tell if it's removed.\n const listenId = ReadonlyRestClient.getListenId_(query, tag);\n const thisListen = {};\n this.listens_[listenId] = thisListen;\n\n const queryStringParameters = queryParamsToRestQueryStringParameters(\n query._queryParams\n );\n\n this.restRequest_(\n pathString + '.json',\n queryStringParameters,\n (error, result) => {\n let data = result;\n\n if (error === 404) {\n data = null;\n error = null;\n }\n\n if (error === null) {\n this.onDataUpdate_(pathString, data, /*isMerge=*/ false, tag);\n }\n\n if (safeGet(this.listens_, listenId) === thisListen) {\n let status;\n if (!error) {\n status = 'ok';\n } else if (error === 401) {\n status = 'permission_denied';\n } else {\n status = 'rest_error:' + error;\n }\n\n onComplete(status, null);\n }\n }\n );\n }\n\n /** @inheritDoc */\n unlisten(query: QueryContext, tag: number | null) {\n const listenId = ReadonlyRestClient.getListenId_(query, tag);\n delete this.listens_[listenId];\n }\n\n get(query: QueryContext): Promise {\n const queryStringParameters = queryParamsToRestQueryStringParameters(\n query._queryParams\n );\n\n const pathString = query._path.toString();\n\n const deferred = new Deferred();\n\n this.restRequest_(\n pathString + '.json',\n queryStringParameters,\n (error, result) => {\n let data = result;\n\n if (error === 404) {\n data = null;\n error = null;\n }\n\n if (error === null) {\n this.onDataUpdate_(\n pathString,\n data,\n /*isMerge=*/ false,\n /*tag=*/ null\n );\n deferred.resolve(data as string);\n } else {\n deferred.reject(new Error(data as string));\n }\n }\n );\n return deferred.promise;\n }\n\n /** @inheritDoc */\n refreshAuthToken(token: string) {\n // no-op since we just always call getToken.\n }\n\n /**\n * Performs a REST request to the given path, with the provided query string parameters,\n * and any auth credentials we have.\n */\n private restRequest_(\n pathString: string,\n queryStringParameters: { [k: string]: string | number } = {},\n callback: ((a: number | null, b?: unknown) => void) | null\n ) {\n queryStringParameters['format'] = 'export';\n\n return Promise.all([\n this.authTokenProvider_.getToken(/*forceRefresh=*/ false),\n this.appCheckTokenProvider_.getToken(/*forceRefresh=*/ false)\n ]).then(([authToken, appCheckToken]) => {\n if (authToken && authToken.accessToken) {\n queryStringParameters['auth'] = authToken.accessToken;\n }\n if (appCheckToken && appCheckToken.token) {\n queryStringParameters['ac'] = appCheckToken.token;\n }\n\n const url =\n (this.repoInfo_.secure ? 'https://' : 'http://') +\n this.repoInfo_.host +\n pathString +\n '?' +\n 'ns=' +\n this.repoInfo_.namespace +\n querystring(queryStringParameters);\n\n this.log_('Sending REST request for ' + url);\n const xhr = new XMLHttpRequest();\n xhr.onreadystatechange = () => {\n if (callback && xhr.readyState === 4) {\n this.log_(\n 'REST Response for ' + url + ' received. status:',\n xhr.status,\n 'response:',\n xhr.responseText\n );\n let res = null;\n if (xhr.status >= 200 && xhr.status < 300) {\n try {\n res = jsonEval(xhr.responseText);\n } catch (e) {\n warn(\n 'Failed to parse JSON response for ' +\n url +\n ': ' +\n xhr.responseText\n );\n }\n callback(null, res);\n } else {\n // 401 and 404 are expected.\n if (xhr.status !== 401 && xhr.status !== 404) {\n warn(\n 'Got unsuccessful REST response for ' +\n url +\n ' Status: ' +\n xhr.status\n );\n }\n callback(xhr.status);\n }\n callback = null;\n }\n };\n\n xhr.open('GET', url, /*asynchronous=*/ true);\n xhr.send();\n });\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ChildrenNode } from './snap/ChildrenNode';\nimport { Node } from './snap/Node';\nimport { Path } from './util/Path';\n\n/**\n * Mutable object which basically just stores a reference to the \"latest\" immutable snapshot.\n */\nexport class SnapshotHolder {\n private rootNode_: Node = ChildrenNode.EMPTY_NODE;\n\n getNode(path: Path): Node {\n return this.rootNode_.getChild(path);\n }\n\n updateSnapshot(path: Path, newSnapshotNode: Node) {\n this.rootNode_ = this.rootNode_.updateChild(path, newSnapshotNode);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { PRIORITY_INDEX } from './snap/indexes/PriorityIndex';\nimport { Node } from './snap/Node';\nimport { Path, pathGetFront, pathIsEmpty, pathPopFront } from './util/Path';\n\n/**\n * Helper class to store a sparse set of snapshots.\n */\nexport interface SparseSnapshotTree {\n value: Node | null;\n readonly children: Map;\n}\n\nexport function newSparseSnapshotTree(): SparseSnapshotTree {\n return {\n value: null,\n children: new Map()\n };\n}\n\n/**\n * Gets the node stored at the given path if one exists.\n * Only seems to be used in tests.\n *\n * @param path - Path to look up snapshot for.\n * @returns The retrieved node, or null.\n */\nexport function sparseSnapshotTreeFind(\n sparseSnapshotTree: SparseSnapshotTree,\n path: Path\n): Node | null {\n if (sparseSnapshotTree.value != null) {\n return sparseSnapshotTree.value.getChild(path);\n } else if (!pathIsEmpty(path) && sparseSnapshotTree.children.size > 0) {\n const childKey = pathGetFront(path);\n path = pathPopFront(path);\n if (sparseSnapshotTree.children.has(childKey)) {\n const childTree = sparseSnapshotTree.children.get(childKey);\n return sparseSnapshotTreeFind(childTree, path);\n } else {\n return null;\n }\n } else {\n return null;\n }\n}\n\n/**\n * Stores the given node at the specified path. If there is already a node\n * at a shallower path, it merges the new data into that snapshot node.\n *\n * @param path - Path to look up snapshot for.\n * @param data - The new data, or null.\n */\nexport function sparseSnapshotTreeRemember(\n sparseSnapshotTree: SparseSnapshotTree,\n path: Path,\n data: Node\n): void {\n if (pathIsEmpty(path)) {\n sparseSnapshotTree.value = data;\n sparseSnapshotTree.children.clear();\n } else if (sparseSnapshotTree.value !== null) {\n sparseSnapshotTree.value = sparseSnapshotTree.value.updateChild(path, data);\n } else {\n const childKey = pathGetFront(path);\n if (!sparseSnapshotTree.children.has(childKey)) {\n sparseSnapshotTree.children.set(childKey, newSparseSnapshotTree());\n }\n\n const child = sparseSnapshotTree.children.get(childKey);\n path = pathPopFront(path);\n sparseSnapshotTreeRemember(child, path, data);\n }\n}\n\n/**\n * Purge the data at path from the cache.\n *\n * @param path - Path to look up snapshot for.\n * @returns True if this node should now be removed.\n */\nexport function sparseSnapshotTreeForget(\n sparseSnapshotTree: SparseSnapshotTree,\n path: Path\n): boolean {\n if (pathIsEmpty(path)) {\n sparseSnapshotTree.value = null;\n sparseSnapshotTree.children.clear();\n return true;\n } else {\n if (sparseSnapshotTree.value !== null) {\n if (sparseSnapshotTree.value.isLeafNode()) {\n // We're trying to forget a node that doesn't exist\n return false;\n } else {\n const value = sparseSnapshotTree.value;\n sparseSnapshotTree.value = null;\n\n value.forEachChild(PRIORITY_INDEX, (key, tree) => {\n sparseSnapshotTreeRemember(sparseSnapshotTree, new Path(key), tree);\n });\n\n return sparseSnapshotTreeForget(sparseSnapshotTree, path);\n }\n } else if (sparseSnapshotTree.children.size > 0) {\n const childKey = pathGetFront(path);\n path = pathPopFront(path);\n if (sparseSnapshotTree.children.has(childKey)) {\n const safeToRemove = sparseSnapshotTreeForget(\n sparseSnapshotTree.children.get(childKey),\n path\n );\n if (safeToRemove) {\n sparseSnapshotTree.children.delete(childKey);\n }\n }\n\n return sparseSnapshotTree.children.size === 0;\n } else {\n return true;\n }\n }\n}\n\n/**\n * Recursively iterates through all of the stored tree and calls the\n * callback on each one.\n *\n * @param prefixPath - Path to look up node for.\n * @param func - The function to invoke for each tree.\n */\nexport function sparseSnapshotTreeForEachTree(\n sparseSnapshotTree: SparseSnapshotTree,\n prefixPath: Path,\n func: (a: Path, b: Node) => unknown\n): void {\n if (sparseSnapshotTree.value !== null) {\n func(prefixPath, sparseSnapshotTree.value);\n } else {\n sparseSnapshotTreeForEachChild(sparseSnapshotTree, (key, tree) => {\n const path = new Path(prefixPath.toString() + '/' + key);\n sparseSnapshotTreeForEachTree(tree, path, func);\n });\n }\n}\n\n/**\n * Iterates through each immediate child and triggers the callback.\n * Only seems to be used in tests.\n *\n * @param func - The function to invoke for each child.\n */\nexport function sparseSnapshotTreeForEachChild(\n sparseSnapshotTree: SparseSnapshotTree,\n func: (a: string, b: SparseSnapshotTree) => void\n): void {\n sparseSnapshotTree.children.forEach((tree, key) => {\n func(key, tree);\n });\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { each } from '../util/util';\n\nimport { StatsCollection } from './StatsCollection';\n\n/**\n * Returns the delta from the previous call to get stats.\n *\n * @param collection_ - The collection to \"listen\" to.\n */\nexport class StatsListener {\n private last_: { [k: string]: number } | null = null;\n\n constructor(private collection_: StatsCollection) {}\n\n get(): { [k: string]: number } {\n const newStats = this.collection_.get();\n\n const delta = { ...newStats };\n if (this.last_) {\n each(this.last_, (stat: string, value: number) => {\n delta[stat] = delta[stat] - value;\n });\n }\n this.last_ = newStats;\n\n return delta;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { contains } from '@firebase/util';\n\nimport { ServerActions } from '../ServerActions';\nimport { setTimeoutNonBlocking, each } from '../util/util';\n\nimport { StatsCollection } from './StatsCollection';\nimport { StatsListener } from './StatsListener';\n\n// Assuming some apps may have a short amount of time on page, and a bulk of firebase operations probably\n// happen on page load, we try to report our first set of stats pretty quickly, but we wait at least 10\n// seconds to try to ensure the Firebase connection is established / settled.\nconst FIRST_STATS_MIN_TIME = 10 * 1000;\nconst FIRST_STATS_MAX_TIME = 30 * 1000;\n\n// We'll continue to report stats on average every 5 minutes.\nconst REPORT_STATS_INTERVAL = 5 * 60 * 1000;\n\nexport class StatsReporter {\n private statsListener_: StatsListener;\n statsToReport_: { [k: string]: boolean } = {};\n\n constructor(collection: StatsCollection, private server_: ServerActions) {\n this.statsListener_ = new StatsListener(collection);\n\n const timeout =\n FIRST_STATS_MIN_TIME +\n (FIRST_STATS_MAX_TIME - FIRST_STATS_MIN_TIME) * Math.random();\n setTimeoutNonBlocking(this.reportStats_.bind(this), Math.floor(timeout));\n }\n\n private reportStats_() {\n const stats = this.statsListener_.get();\n const reportedStats: typeof stats = {};\n let haveStatsToReport = false;\n\n each(stats, (stat: string, value: number) => {\n if (value > 0 && contains(this.statsToReport_, stat)) {\n reportedStats[stat] = value;\n haveStatsToReport = true;\n }\n });\n\n if (haveStatsToReport) {\n this.server_.reportStats(reportedStats);\n }\n\n // queue our next run.\n setTimeoutNonBlocking(\n this.reportStats_.bind(this),\n Math.floor(Math.random() * 2 * REPORT_STATS_INTERVAL)\n );\n }\n}\n\nexport function statsReporterIncludeStat(\n reporter: StatsReporter,\n stat: string\n) {\n reporter.statsToReport_[stat] = true;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { ImmutableTree } from '../util/ImmutableTree';\nimport {\n newEmptyPath,\n Path,\n pathGetFront,\n pathIsEmpty,\n pathPopFront\n} from '../util/Path';\n\nimport { newOperationSourceUser, Operation, OperationType } from './Operation';\n\nexport class AckUserWrite implements Operation {\n /** @inheritDoc */\n type = OperationType.ACK_USER_WRITE;\n\n /** @inheritDoc */\n source = newOperationSourceUser();\n\n /**\n * @param affectedTree - A tree containing true for each affected path. Affected paths can't overlap.\n */\n constructor(\n /** @inheritDoc */ public path: Path,\n /** @inheritDoc */ public affectedTree: ImmutableTree,\n /** @inheritDoc */ public revert: boolean\n ) {}\n operationForChild(childName: string): AckUserWrite {\n if (!pathIsEmpty(this.path)) {\n assert(\n pathGetFront(this.path) === childName,\n 'operationForChild called for unrelated child.'\n );\n return new AckUserWrite(\n pathPopFront(this.path),\n this.affectedTree,\n this.revert\n );\n } else if (this.affectedTree.value != null) {\n assert(\n this.affectedTree.children.isEmpty(),\n 'affectedTree should not have overlapping affected paths.'\n );\n // All child locations are affected as well; just return same operation.\n return this;\n } else {\n const childTree = this.affectedTree.subtree(new Path(childName));\n return new AckUserWrite(newEmptyPath(), childTree, this.revert);\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { newEmptyPath, Path, pathIsEmpty, pathPopFront } from '../util/Path';\n\nimport { Operation, OperationSource, OperationType } from './Operation';\n\nexport class ListenComplete implements Operation {\n /** @inheritDoc */\n type = OperationType.LISTEN_COMPLETE;\n\n constructor(public source: OperationSource, public path: Path) {}\n\n operationForChild(childName: string): ListenComplete {\n if (pathIsEmpty(this.path)) {\n return new ListenComplete(this.source, newEmptyPath());\n } else {\n return new ListenComplete(this.source, pathPopFront(this.path));\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Node } from '../snap/Node';\nimport { newEmptyPath, Path, pathIsEmpty, pathPopFront } from '../util/Path';\n\nimport { Operation, OperationSource, OperationType } from './Operation';\n\nexport class Overwrite implements Operation {\n /** @inheritDoc */\n type = OperationType.OVERWRITE;\n\n constructor(\n public source: OperationSource,\n public path: Path,\n public snap: Node\n ) {}\n\n operationForChild(childName: string): Overwrite {\n if (pathIsEmpty(this.path)) {\n return new Overwrite(\n this.source,\n newEmptyPath(),\n this.snap.getImmediateChild(childName)\n );\n } else {\n return new Overwrite(this.source, pathPopFront(this.path), this.snap);\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { Node } from '../snap/Node';\nimport { ImmutableTree } from '../util/ImmutableTree';\nimport {\n newEmptyPath,\n Path,\n pathGetFront,\n pathIsEmpty,\n pathPopFront\n} from '../util/Path';\n\nimport { Operation, OperationSource, OperationType } from './Operation';\nimport { Overwrite } from './Overwrite';\n\nexport class Merge implements Operation {\n /** @inheritDoc */\n type = OperationType.MERGE;\n\n constructor(\n /** @inheritDoc */ public source: OperationSource,\n /** @inheritDoc */ public path: Path,\n /** @inheritDoc */ public children: ImmutableTree\n ) {}\n operationForChild(childName: string): Operation {\n if (pathIsEmpty(this.path)) {\n const childTree = this.children.subtree(new Path(childName));\n if (childTree.isEmpty()) {\n // This child is unaffected\n return null;\n } else if (childTree.value) {\n // We have a snapshot for the child in question. This becomes an overwrite of the child.\n return new Overwrite(this.source, newEmptyPath(), childTree.value);\n } else {\n // This is a merge at a deeper level\n return new Merge(this.source, newEmptyPath(), childTree);\n }\n } else {\n assert(\n pathGetFront(this.path) === childName,\n \"Can't get a merge for a child not on the path of the operation\"\n );\n return new Merge(this.source, pathPopFront(this.path), this.children);\n }\n }\n toString(): string {\n return (\n 'Operation(' +\n this.path +\n ': ' +\n this.source.toString() +\n ' merge: ' +\n this.children.toString() +\n ')'\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Node } from '../snap/Node';\nimport { Path, pathGetFront, pathIsEmpty } from '../util/Path';\n\n/**\n * A cache node only stores complete children. Additionally it holds a flag whether the node can be considered fully\n * initialized in the sense that we know at one point in time this represented a valid state of the world, e.g.\n * initialized with data from the server, or a complete overwrite by the client. The filtered flag also tracks\n * whether a node potentially had children removed due to a filter.\n */\nexport class CacheNode {\n constructor(\n private node_: Node,\n private fullyInitialized_: boolean,\n private filtered_: boolean\n ) {}\n\n /**\n * Returns whether this node was fully initialized with either server data or a complete overwrite by the client\n */\n isFullyInitialized(): boolean {\n return this.fullyInitialized_;\n }\n\n /**\n * Returns whether this node is potentially missing children due to a filter applied to the node\n */\n isFiltered(): boolean {\n return this.filtered_;\n }\n\n isCompleteForPath(path: Path): boolean {\n if (pathIsEmpty(path)) {\n return this.isFullyInitialized() && !this.filtered_;\n }\n\n const childKey = pathGetFront(path);\n return this.isCompleteForChild(childKey);\n }\n\n isCompleteForChild(key: string): boolean {\n return (\n (this.isFullyInitialized() && !this.filtered_) || this.node_.hasChild(key)\n );\n }\n\n getNode(): Node {\n return this.node_;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assertionError } from '@firebase/util';\n\nimport { Index } from '../snap/indexes/Index';\nimport { NamedNode, Node } from '../snap/Node';\n\nimport { Change, ChangeType, changeChildMoved } from './Change';\nimport { Event } from './Event';\nimport { EventRegistration, QueryContext } from './EventRegistration';\n\n/**\n * An EventGenerator is used to convert \"raw\" changes (Change) as computed by the\n * CacheDiffer into actual events (Event) that can be raised. See generateEventsForChanges()\n * for details.\n *\n */\nexport class EventGenerator {\n index_: Index;\n\n constructor(public query_: QueryContext) {\n this.index_ = this.query_._queryParams.getIndex();\n }\n}\n\n/**\n * Given a set of raw changes (no moved events and prevName not specified yet), and a set of\n * EventRegistrations that should be notified of these changes, generate the actual events to be raised.\n *\n * Notes:\n * - child_moved events will be synthesized at this time for any child_changed events that affect\n * our index.\n * - prevName will be calculated based on the index ordering.\n */\nexport function eventGeneratorGenerateEventsForChanges(\n eventGenerator: EventGenerator,\n changes: Change[],\n eventCache: Node,\n eventRegistrations: EventRegistration[]\n): Event[] {\n const events: Event[] = [];\n const moves: Change[] = [];\n\n changes.forEach(change => {\n if (\n change.type === ChangeType.CHILD_CHANGED &&\n eventGenerator.index_.indexedValueChanged(\n change.oldSnap as Node,\n change.snapshotNode\n )\n ) {\n moves.push(changeChildMoved(change.childName, change.snapshotNode));\n }\n });\n\n eventGeneratorGenerateEventsForType(\n eventGenerator,\n events,\n ChangeType.CHILD_REMOVED,\n changes,\n eventRegistrations,\n eventCache\n );\n eventGeneratorGenerateEventsForType(\n eventGenerator,\n events,\n ChangeType.CHILD_ADDED,\n changes,\n eventRegistrations,\n eventCache\n );\n eventGeneratorGenerateEventsForType(\n eventGenerator,\n events,\n ChangeType.CHILD_MOVED,\n moves,\n eventRegistrations,\n eventCache\n );\n eventGeneratorGenerateEventsForType(\n eventGenerator,\n events,\n ChangeType.CHILD_CHANGED,\n changes,\n eventRegistrations,\n eventCache\n );\n eventGeneratorGenerateEventsForType(\n eventGenerator,\n events,\n ChangeType.VALUE,\n changes,\n eventRegistrations,\n eventCache\n );\n\n return events;\n}\n\n/**\n * Given changes of a single change type, generate the corresponding events.\n */\nfunction eventGeneratorGenerateEventsForType(\n eventGenerator: EventGenerator,\n events: Event[],\n eventType: string,\n changes: Change[],\n registrations: EventRegistration[],\n eventCache: Node\n) {\n const filteredChanges = changes.filter(change => change.type === eventType);\n\n filteredChanges.sort((a, b) =>\n eventGeneratorCompareChanges(eventGenerator, a, b)\n );\n filteredChanges.forEach(change => {\n const materializedChange = eventGeneratorMaterializeSingleChange(\n eventGenerator,\n change,\n eventCache\n );\n registrations.forEach(registration => {\n if (registration.respondsTo(change.type)) {\n events.push(\n registration.createEvent(materializedChange, eventGenerator.query_)\n );\n }\n });\n });\n}\n\nfunction eventGeneratorMaterializeSingleChange(\n eventGenerator: EventGenerator,\n change: Change,\n eventCache: Node\n): Change {\n if (change.type === 'value' || change.type === 'child_removed') {\n return change;\n } else {\n change.prevName = eventCache.getPredecessorChildName(\n change.childName,\n change.snapshotNode,\n eventGenerator.index_\n );\n return change;\n }\n}\n\nfunction eventGeneratorCompareChanges(\n eventGenerator: EventGenerator,\n a: Change,\n b: Change\n) {\n if (a.childName == null || b.childName == null) {\n throw assertionError('Should only compare child_ events.');\n }\n const aWrapped = new NamedNode(a.childName, a.snapshotNode);\n const bWrapped = new NamedNode(b.childName, b.snapshotNode);\n return eventGenerator.index_.compare(aWrapped, bWrapped);\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Node } from '../snap/Node';\n\nimport { CacheNode } from './CacheNode';\n\n/**\n * Stores the data we have cached for a view.\n *\n * serverSnap is the cached server data, eventSnap is the cached event data (server data plus any local writes).\n */\nexport interface ViewCache {\n readonly eventCache: CacheNode;\n readonly serverCache: CacheNode;\n}\n\nexport function newViewCache(\n eventCache: CacheNode,\n serverCache: CacheNode\n): ViewCache {\n return { eventCache, serverCache };\n}\n\nexport function viewCacheUpdateEventSnap(\n viewCache: ViewCache,\n eventSnap: Node,\n complete: boolean,\n filtered: boolean\n): ViewCache {\n return newViewCache(\n new CacheNode(eventSnap, complete, filtered),\n viewCache.serverCache\n );\n}\n\nexport function viewCacheUpdateServerSnap(\n viewCache: ViewCache,\n serverSnap: Node,\n complete: boolean,\n filtered: boolean\n): ViewCache {\n return newViewCache(\n viewCache.eventCache,\n new CacheNode(serverSnap, complete, filtered)\n );\n}\n\nexport function viewCacheGetCompleteEventSnap(\n viewCache: ViewCache\n): Node | null {\n return viewCache.eventCache.isFullyInitialized()\n ? viewCache.eventCache.getNode()\n : null;\n}\n\nexport function viewCacheGetCompleteServerSnap(\n viewCache: ViewCache\n): Node | null {\n return viewCache.serverCache.isFullyInitialized()\n ? viewCache.serverCache.getNode()\n : null;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n newEmptyPath,\n Path,\n pathChild,\n pathGetFront,\n pathIsEmpty,\n pathPopFront\n} from './Path';\nimport { SortedMap } from './SortedMap';\nimport { each, stringCompare } from './util';\n\nlet emptyChildrenSingleton: SortedMap>;\n\n/**\n * Singleton empty children collection.\n *\n */\nconst EmptyChildren = (): SortedMap> => {\n if (!emptyChildrenSingleton) {\n emptyChildrenSingleton = new SortedMap>(\n stringCompare\n );\n }\n return emptyChildrenSingleton;\n};\n\n/**\n * A tree with immutable elements.\n */\nexport class ImmutableTree {\n static fromObject(obj: { [k: string]: T }): ImmutableTree {\n let tree: ImmutableTree = new ImmutableTree(null);\n each(obj, (childPath: string, childSnap: T) => {\n tree = tree.set(new Path(childPath), childSnap);\n });\n return tree;\n }\n\n constructor(\n public readonly value: T | null,\n public readonly children: SortedMap<\n string,\n ImmutableTree\n > = EmptyChildren()\n ) {}\n\n /**\n * True if the value is empty and there are no children\n */\n isEmpty(): boolean {\n return this.value === null && this.children.isEmpty();\n }\n\n /**\n * Given a path and predicate, return the first node and the path to that node\n * where the predicate returns true.\n *\n * TODO Do a perf test -- If we're creating a bunch of `{path: value:}`\n * objects on the way back out, it may be better to pass down a pathSoFar obj.\n *\n * @param relativePath - The remainder of the path\n * @param predicate - The predicate to satisfy to return a node\n */\n findRootMostMatchingPathAndValue(\n relativePath: Path,\n predicate: (a: T) => boolean\n ): { path: Path; value: T } | null {\n if (this.value != null && predicate(this.value)) {\n return { path: newEmptyPath(), value: this.value };\n } else {\n if (pathIsEmpty(relativePath)) {\n return null;\n } else {\n const front = pathGetFront(relativePath);\n const child = this.children.get(front);\n if (child !== null) {\n const childExistingPathAndValue =\n child.findRootMostMatchingPathAndValue(\n pathPopFront(relativePath),\n predicate\n );\n if (childExistingPathAndValue != null) {\n const fullPath = pathChild(\n new Path(front),\n childExistingPathAndValue.path\n );\n return { path: fullPath, value: childExistingPathAndValue.value };\n } else {\n return null;\n }\n } else {\n return null;\n }\n }\n }\n }\n\n /**\n * Find, if it exists, the shortest subpath of the given path that points a defined\n * value in the tree\n */\n findRootMostValueAndPath(\n relativePath: Path\n ): { path: Path; value: T } | null {\n return this.findRootMostMatchingPathAndValue(relativePath, () => true);\n }\n\n /**\n * @returns The subtree at the given path\n */\n subtree(relativePath: Path): ImmutableTree {\n if (pathIsEmpty(relativePath)) {\n return this;\n } else {\n const front = pathGetFront(relativePath);\n const childTree = this.children.get(front);\n if (childTree !== null) {\n return childTree.subtree(pathPopFront(relativePath));\n } else {\n return new ImmutableTree(null);\n }\n }\n }\n\n /**\n * Sets a value at the specified path.\n *\n * @param relativePath - Path to set value at.\n * @param toSet - Value to set.\n * @returns Resulting tree.\n */\n set(relativePath: Path, toSet: T | null): ImmutableTree {\n if (pathIsEmpty(relativePath)) {\n return new ImmutableTree(toSet, this.children);\n } else {\n const front = pathGetFront(relativePath);\n const child = this.children.get(front) || new ImmutableTree(null);\n const newChild = child.set(pathPopFront(relativePath), toSet);\n const newChildren = this.children.insert(front, newChild);\n return new ImmutableTree(this.value, newChildren);\n }\n }\n\n /**\n * Removes the value at the specified path.\n *\n * @param relativePath - Path to value to remove.\n * @returns Resulting tree.\n */\n remove(relativePath: Path): ImmutableTree {\n if (pathIsEmpty(relativePath)) {\n if (this.children.isEmpty()) {\n return new ImmutableTree(null);\n } else {\n return new ImmutableTree(null, this.children);\n }\n } else {\n const front = pathGetFront(relativePath);\n const child = this.children.get(front);\n if (child) {\n const newChild = child.remove(pathPopFront(relativePath));\n let newChildren;\n if (newChild.isEmpty()) {\n newChildren = this.children.remove(front);\n } else {\n newChildren = this.children.insert(front, newChild);\n }\n if (this.value === null && newChildren.isEmpty()) {\n return new ImmutableTree(null);\n } else {\n return new ImmutableTree(this.value, newChildren);\n }\n } else {\n return this;\n }\n }\n }\n\n /**\n * Gets a value from the tree.\n *\n * @param relativePath - Path to get value for.\n * @returns Value at path, or null.\n */\n get(relativePath: Path): T | null {\n if (pathIsEmpty(relativePath)) {\n return this.value;\n } else {\n const front = pathGetFront(relativePath);\n const child = this.children.get(front);\n if (child) {\n return child.get(pathPopFront(relativePath));\n } else {\n return null;\n }\n }\n }\n\n /**\n * Replace the subtree at the specified path with the given new tree.\n *\n * @param relativePath - Path to replace subtree for.\n * @param newTree - New tree.\n * @returns Resulting tree.\n */\n setTree(relativePath: Path, newTree: ImmutableTree): ImmutableTree {\n if (pathIsEmpty(relativePath)) {\n return newTree;\n } else {\n const front = pathGetFront(relativePath);\n const child = this.children.get(front) || new ImmutableTree(null);\n const newChild = child.setTree(pathPopFront(relativePath), newTree);\n let newChildren;\n if (newChild.isEmpty()) {\n newChildren = this.children.remove(front);\n } else {\n newChildren = this.children.insert(front, newChild);\n }\n return new ImmutableTree(this.value, newChildren);\n }\n }\n\n /**\n * Performs a depth first fold on this tree. Transforms a tree into a single\n * value, given a function that operates on the path to a node, an optional\n * current value, and a map of child names to folded subtrees\n */\n fold(fn: (path: Path, value: T, children: { [k: string]: V }) => V): V {\n return this.fold_(newEmptyPath(), fn);\n }\n\n /**\n * Recursive helper for public-facing fold() method\n */\n private fold_(\n pathSoFar: Path,\n fn: (path: Path, value: T | null, children: { [k: string]: V }) => V\n ): V {\n const accum: { [k: string]: V } = {};\n this.children.inorderTraversal(\n (childKey: string, childTree: ImmutableTree) => {\n accum[childKey] = childTree.fold_(pathChild(pathSoFar, childKey), fn);\n }\n );\n return fn(pathSoFar, this.value, accum);\n }\n\n /**\n * Find the first matching value on the given path. Return the result of applying f to it.\n */\n findOnPath(path: Path, f: (path: Path, value: T) => V | null): V | null {\n return this.findOnPath_(path, newEmptyPath(), f);\n }\n\n private findOnPath_(\n pathToFollow: Path,\n pathSoFar: Path,\n f: (path: Path, value: T) => V | null\n ): V | null {\n const result = this.value ? f(pathSoFar, this.value) : false;\n if (result) {\n return result;\n } else {\n if (pathIsEmpty(pathToFollow)) {\n return null;\n } else {\n const front = pathGetFront(pathToFollow)!;\n const nextChild = this.children.get(front);\n if (nextChild) {\n return nextChild.findOnPath_(\n pathPopFront(pathToFollow),\n pathChild(pathSoFar, front),\n f\n );\n } else {\n return null;\n }\n }\n }\n }\n\n foreachOnPath(\n path: Path,\n f: (path: Path, value: T) => void\n ): ImmutableTree {\n return this.foreachOnPath_(path, newEmptyPath(), f);\n }\n\n private foreachOnPath_(\n pathToFollow: Path,\n currentRelativePath: Path,\n f: (path: Path, value: T) => void\n ): ImmutableTree {\n if (pathIsEmpty(pathToFollow)) {\n return this;\n } else {\n if (this.value) {\n f(currentRelativePath, this.value);\n }\n const front = pathGetFront(pathToFollow);\n const nextChild = this.children.get(front);\n if (nextChild) {\n return nextChild.foreachOnPath_(\n pathPopFront(pathToFollow),\n pathChild(currentRelativePath, front),\n f\n );\n } else {\n return new ImmutableTree(null);\n }\n }\n }\n\n /**\n * Calls the given function for each node in the tree that has a value.\n *\n * @param f - A function to be called with the path from the root of the tree to\n * a node, and the value at that node. Called in depth-first order.\n */\n foreach(f: (path: Path, value: T) => void) {\n this.foreach_(newEmptyPath(), f);\n }\n\n private foreach_(\n currentRelativePath: Path,\n f: (path: Path, value: T) => void\n ) {\n this.children.inorderTraversal((childName, childTree) => {\n childTree.foreach_(pathChild(currentRelativePath, childName), f);\n });\n if (this.value) {\n f(currentRelativePath, this.value);\n }\n }\n\n foreachChild(f: (name: string, value: T) => void) {\n this.children.inorderTraversal(\n (childName: string, childTree: ImmutableTree) => {\n if (childTree.value) {\n f(childName, childTree.value);\n }\n }\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { ChildrenNode } from './snap/ChildrenNode';\nimport { PRIORITY_INDEX } from './snap/indexes/PriorityIndex';\nimport { NamedNode, Node } from './snap/Node';\nimport { ImmutableTree } from './util/ImmutableTree';\nimport {\n newEmptyPath,\n newRelativePath,\n Path,\n pathChild,\n pathIsEmpty\n} from './util/Path';\nimport { each } from './util/util';\n\n/**\n * This class holds a collection of writes that can be applied to nodes in unison. It abstracts away the logic with\n * dealing with priority writes and multiple nested writes. At any given path there is only allowed to be one write\n * modifying that path. Any write to an existing path or shadowing an existing path will modify that existing write\n * to reflect the write added.\n */\nexport class CompoundWrite {\n constructor(public writeTree_: ImmutableTree) {}\n\n static empty(): CompoundWrite {\n return new CompoundWrite(new ImmutableTree(null));\n }\n}\n\nexport function compoundWriteAddWrite(\n compoundWrite: CompoundWrite,\n path: Path,\n node: Node\n): CompoundWrite {\n if (pathIsEmpty(path)) {\n return new CompoundWrite(new ImmutableTree(node));\n } else {\n const rootmost = compoundWrite.writeTree_.findRootMostValueAndPath(path);\n if (rootmost != null) {\n const rootMostPath = rootmost.path;\n let value = rootmost.value;\n const relativePath = newRelativePath(rootMostPath, path);\n value = value.updateChild(relativePath, node);\n return new CompoundWrite(\n compoundWrite.writeTree_.set(rootMostPath, value)\n );\n } else {\n const subtree = new ImmutableTree(node);\n const newWriteTree = compoundWrite.writeTree_.setTree(path, subtree);\n return new CompoundWrite(newWriteTree);\n }\n }\n}\n\nexport function compoundWriteAddWrites(\n compoundWrite: CompoundWrite,\n path: Path,\n updates: { [name: string]: Node }\n): CompoundWrite {\n let newWrite = compoundWrite;\n each(updates, (childKey: string, node: Node) => {\n newWrite = compoundWriteAddWrite(newWrite, pathChild(path, childKey), node);\n });\n return newWrite;\n}\n\n/**\n * Will remove a write at the given path and deeper paths. This will not modify a write at a higher\n * location, which must be removed by calling this method with that path.\n *\n * @param compoundWrite - The CompoundWrite to remove.\n * @param path - The path at which a write and all deeper writes should be removed\n * @returns The new CompoundWrite with the removed path\n */\nexport function compoundWriteRemoveWrite(\n compoundWrite: CompoundWrite,\n path: Path\n): CompoundWrite {\n if (pathIsEmpty(path)) {\n return CompoundWrite.empty();\n } else {\n const newWriteTree = compoundWrite.writeTree_.setTree(\n path,\n new ImmutableTree(null)\n );\n return new CompoundWrite(newWriteTree);\n }\n}\n\n/**\n * Returns whether this CompoundWrite will fully overwrite a node at a given location and can therefore be\n * considered \"complete\".\n *\n * @param compoundWrite - The CompoundWrite to check.\n * @param path - The path to check for\n * @returns Whether there is a complete write at that path\n */\nexport function compoundWriteHasCompleteWrite(\n compoundWrite: CompoundWrite,\n path: Path\n): boolean {\n return compoundWriteGetCompleteNode(compoundWrite, path) != null;\n}\n\n/**\n * Returns a node for a path if and only if the node is a \"complete\" overwrite at that path. This will not aggregate\n * writes from deeper paths, but will return child nodes from a more shallow path.\n *\n * @param compoundWrite - The CompoundWrite to get the node from.\n * @param path - The path to get a complete write\n * @returns The node if complete at that path, or null otherwise.\n */\nexport function compoundWriteGetCompleteNode(\n compoundWrite: CompoundWrite,\n path: Path\n): Node | null {\n const rootmost = compoundWrite.writeTree_.findRootMostValueAndPath(path);\n if (rootmost != null) {\n return compoundWrite.writeTree_\n .get(rootmost.path)\n .getChild(newRelativePath(rootmost.path, path));\n } else {\n return null;\n }\n}\n\n/**\n * Returns all children that are guaranteed to be a complete overwrite.\n *\n * @param compoundWrite - The CompoundWrite to get children from.\n * @returns A list of all complete children.\n */\nexport function compoundWriteGetCompleteChildren(\n compoundWrite: CompoundWrite\n): NamedNode[] {\n const children: NamedNode[] = [];\n const node = compoundWrite.writeTree_.value;\n if (node != null) {\n // If it's a leaf node, it has no children; so nothing to do.\n if (!node.isLeafNode()) {\n (node as ChildrenNode).forEachChild(\n PRIORITY_INDEX,\n (childName, childNode) => {\n children.push(new NamedNode(childName, childNode));\n }\n );\n }\n } else {\n compoundWrite.writeTree_.children.inorderTraversal(\n (childName, childTree) => {\n if (childTree.value != null) {\n children.push(new NamedNode(childName, childTree.value));\n }\n }\n );\n }\n return children;\n}\n\nexport function compoundWriteChildCompoundWrite(\n compoundWrite: CompoundWrite,\n path: Path\n): CompoundWrite {\n if (pathIsEmpty(path)) {\n return compoundWrite;\n } else {\n const shadowingNode = compoundWriteGetCompleteNode(compoundWrite, path);\n if (shadowingNode != null) {\n return new CompoundWrite(new ImmutableTree(shadowingNode));\n } else {\n return new CompoundWrite(compoundWrite.writeTree_.subtree(path));\n }\n }\n}\n\n/**\n * Returns true if this CompoundWrite is empty and therefore does not modify any nodes.\n * @returns Whether this CompoundWrite is empty\n */\nexport function compoundWriteIsEmpty(compoundWrite: CompoundWrite): boolean {\n return compoundWrite.writeTree_.isEmpty();\n}\n\n/**\n * Applies this CompoundWrite to a node. The node is returned with all writes from this CompoundWrite applied to the\n * node\n * @param node - The node to apply this CompoundWrite to\n * @returns The node with all writes applied\n */\nexport function compoundWriteApply(\n compoundWrite: CompoundWrite,\n node: Node\n): Node {\n return applySubtreeWrite(newEmptyPath(), compoundWrite.writeTree_, node);\n}\n\nfunction applySubtreeWrite(\n relativePath: Path,\n writeTree: ImmutableTree,\n node: Node\n): Node {\n if (writeTree.value != null) {\n // Since there a write is always a leaf, we're done here\n return node.updateChild(relativePath, writeTree.value);\n } else {\n let priorityWrite = null;\n writeTree.children.inorderTraversal((childKey, childTree) => {\n if (childKey === '.priority') {\n // Apply priorities at the end so we don't update priorities for either empty nodes or forget\n // to apply priorities to empty nodes that are later filled\n assert(\n childTree.value !== null,\n 'Priority writes must always be leaf nodes'\n );\n priorityWrite = childTree.value;\n } else {\n node = applySubtreeWrite(\n pathChild(relativePath, childKey),\n childTree,\n node\n );\n }\n });\n // If there was a priority write, we only apply it if the node is not empty\n if (!node.getChild(relativePath).isEmpty() && priorityWrite !== null) {\n node = node.updateChild(\n pathChild(relativePath, '.priority'),\n priorityWrite\n );\n }\n return node;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, assertionError, safeGet } from '@firebase/util';\n\nimport {\n CompoundWrite,\n compoundWriteAddWrite,\n compoundWriteAddWrites,\n compoundWriteApply,\n compoundWriteChildCompoundWrite,\n compoundWriteGetCompleteChildren,\n compoundWriteGetCompleteNode,\n compoundWriteHasCompleteWrite,\n compoundWriteIsEmpty,\n compoundWriteRemoveWrite\n} from './CompoundWrite';\nimport { ChildrenNode } from './snap/ChildrenNode';\nimport { Index } from './snap/indexes/Index';\nimport { PRIORITY_INDEX } from './snap/indexes/PriorityIndex';\nimport { NamedNode, Node } from './snap/Node';\nimport {\n newEmptyPath,\n newRelativePath,\n Path,\n pathChild,\n pathContains,\n pathGetFront,\n pathIsEmpty,\n pathPopFront\n} from './util/Path';\nimport { each } from './util/util';\nimport { CacheNode } from './view/CacheNode';\n\n/**\n * Defines a single user-initiated write operation. May be the result of a set(), transaction(), or update() call. In\n * the case of a set() or transaction, snap wil be non-null. In the case of an update(), children will be non-null.\n */\nexport interface WriteRecord {\n writeId: number;\n path: Path;\n snap?: Node | null;\n children?: { [k: string]: Node } | null;\n visible: boolean;\n}\n\n/**\n * Create a new WriteTreeRef for the given path. For use with a new sync point at the given path.\n *\n */\nexport function writeTreeChildWrites(\n writeTree: WriteTree,\n path: Path\n): WriteTreeRef {\n return newWriteTreeRef(path, writeTree);\n}\n\n/**\n * Record a new overwrite from user code.\n *\n * @param visible - This is set to false by some transactions. It should be excluded from event caches\n */\nexport function writeTreeAddOverwrite(\n writeTree: WriteTree,\n path: Path,\n snap: Node,\n writeId: number,\n visible?: boolean\n) {\n assert(\n writeId > writeTree.lastWriteId,\n 'Stacking an older write on top of newer ones'\n );\n if (visible === undefined) {\n visible = true;\n }\n writeTree.allWrites.push({\n path,\n snap,\n writeId,\n visible\n });\n\n if (visible) {\n writeTree.visibleWrites = compoundWriteAddWrite(\n writeTree.visibleWrites,\n path,\n snap\n );\n }\n writeTree.lastWriteId = writeId;\n}\n\n/**\n * Record a new merge from user code.\n */\nexport function writeTreeAddMerge(\n writeTree: WriteTree,\n path: Path,\n changedChildren: { [k: string]: Node },\n writeId: number\n) {\n assert(\n writeId > writeTree.lastWriteId,\n 'Stacking an older merge on top of newer ones'\n );\n writeTree.allWrites.push({\n path,\n children: changedChildren,\n writeId,\n visible: true\n });\n\n writeTree.visibleWrites = compoundWriteAddWrites(\n writeTree.visibleWrites,\n path,\n changedChildren\n );\n writeTree.lastWriteId = writeId;\n}\n\nexport function writeTreeGetWrite(\n writeTree: WriteTree,\n writeId: number\n): WriteRecord | null {\n for (let i = 0; i < writeTree.allWrites.length; i++) {\n const record = writeTree.allWrites[i];\n if (record.writeId === writeId) {\n return record;\n }\n }\n return null;\n}\n\n/**\n * Remove a write (either an overwrite or merge) that has been successfully acknowledge by the server. Recalculates\n * the tree if necessary. We return true if it may have been visible, meaning views need to reevaluate.\n *\n * @returns true if the write may have been visible (meaning we'll need to reevaluate / raise\n * events as a result).\n */\nexport function writeTreeRemoveWrite(\n writeTree: WriteTree,\n writeId: number\n): boolean {\n // Note: disabling this check. It could be a transaction that preempted another transaction, and thus was applied\n // out of order.\n //const validClear = revert || this.allWrites_.length === 0 || writeId <= this.allWrites_[0].writeId;\n //assert(validClear, \"Either we don't have this write, or it's the first one in the queue\");\n\n const idx = writeTree.allWrites.findIndex(s => {\n return s.writeId === writeId;\n });\n assert(idx >= 0, 'removeWrite called with nonexistent writeId.');\n const writeToRemove = writeTree.allWrites[idx];\n writeTree.allWrites.splice(idx, 1);\n\n let removedWriteWasVisible = writeToRemove.visible;\n let removedWriteOverlapsWithOtherWrites = false;\n\n let i = writeTree.allWrites.length - 1;\n\n while (removedWriteWasVisible && i >= 0) {\n const currentWrite = writeTree.allWrites[i];\n if (currentWrite.visible) {\n if (\n i >= idx &&\n writeTreeRecordContainsPath_(currentWrite, writeToRemove.path)\n ) {\n // The removed write was completely shadowed by a subsequent write.\n removedWriteWasVisible = false;\n } else if (pathContains(writeToRemove.path, currentWrite.path)) {\n // Either we're covering some writes or they're covering part of us (depending on which came first).\n removedWriteOverlapsWithOtherWrites = true;\n }\n }\n i--;\n }\n\n if (!removedWriteWasVisible) {\n return false;\n } else if (removedWriteOverlapsWithOtherWrites) {\n // There's some shadowing going on. Just rebuild the visible writes from scratch.\n writeTreeResetTree_(writeTree);\n return true;\n } else {\n // There's no shadowing. We can safely just remove the write(s) from visibleWrites.\n if (writeToRemove.snap) {\n writeTree.visibleWrites = compoundWriteRemoveWrite(\n writeTree.visibleWrites,\n writeToRemove.path\n );\n } else {\n const children = writeToRemove.children;\n each(children, (childName: string) => {\n writeTree.visibleWrites = compoundWriteRemoveWrite(\n writeTree.visibleWrites,\n pathChild(writeToRemove.path, childName)\n );\n });\n }\n return true;\n }\n}\n\nfunction writeTreeRecordContainsPath_(\n writeRecord: WriteRecord,\n path: Path\n): boolean {\n if (writeRecord.snap) {\n return pathContains(writeRecord.path, path);\n } else {\n for (const childName in writeRecord.children) {\n if (\n writeRecord.children.hasOwnProperty(childName) &&\n pathContains(pathChild(writeRecord.path, childName), path)\n ) {\n return true;\n }\n }\n return false;\n }\n}\n\n/**\n * Re-layer the writes and merges into a tree so we can efficiently calculate event snapshots\n */\nfunction writeTreeResetTree_(writeTree: WriteTree) {\n writeTree.visibleWrites = writeTreeLayerTree_(\n writeTree.allWrites,\n writeTreeDefaultFilter_,\n newEmptyPath()\n );\n if (writeTree.allWrites.length > 0) {\n writeTree.lastWriteId =\n writeTree.allWrites[writeTree.allWrites.length - 1].writeId;\n } else {\n writeTree.lastWriteId = -1;\n }\n}\n\n/**\n * The default filter used when constructing the tree. Keep everything that's visible.\n */\nfunction writeTreeDefaultFilter_(write: WriteRecord) {\n return write.visible;\n}\n\n/**\n * Static method. Given an array of WriteRecords, a filter for which ones to include, and a path, construct the tree of\n * event data at that path.\n */\nfunction writeTreeLayerTree_(\n writes: WriteRecord[],\n filter: (w: WriteRecord) => boolean,\n treeRoot: Path\n): CompoundWrite {\n let compoundWrite = CompoundWrite.empty();\n for (let i = 0; i < writes.length; ++i) {\n const write = writes[i];\n // Theory, a later set will either:\n // a) abort a relevant transaction, so no need to worry about excluding it from calculating that transaction\n // b) not be relevant to a transaction (separate branch), so again will not affect the data for that transaction\n if (filter(write)) {\n const writePath = write.path;\n let relativePath: Path;\n if (write.snap) {\n if (pathContains(treeRoot, writePath)) {\n relativePath = newRelativePath(treeRoot, writePath);\n compoundWrite = compoundWriteAddWrite(\n compoundWrite,\n relativePath,\n write.snap\n );\n } else if (pathContains(writePath, treeRoot)) {\n relativePath = newRelativePath(writePath, treeRoot);\n compoundWrite = compoundWriteAddWrite(\n compoundWrite,\n newEmptyPath(),\n write.snap.getChild(relativePath)\n );\n } else {\n // There is no overlap between root path and write path, ignore write\n }\n } else if (write.children) {\n if (pathContains(treeRoot, writePath)) {\n relativePath = newRelativePath(treeRoot, writePath);\n compoundWrite = compoundWriteAddWrites(\n compoundWrite,\n relativePath,\n write.children\n );\n } else if (pathContains(writePath, treeRoot)) {\n relativePath = newRelativePath(writePath, treeRoot);\n if (pathIsEmpty(relativePath)) {\n compoundWrite = compoundWriteAddWrites(\n compoundWrite,\n newEmptyPath(),\n write.children\n );\n } else {\n const child = safeGet(write.children, pathGetFront(relativePath));\n if (child) {\n // There exists a child in this node that matches the root path\n const deepNode = child.getChild(pathPopFront(relativePath));\n compoundWrite = compoundWriteAddWrite(\n compoundWrite,\n newEmptyPath(),\n deepNode\n );\n }\n }\n } else {\n // There is no overlap between root path and write path, ignore write\n }\n } else {\n throw assertionError('WriteRecord should have .snap or .children');\n }\n }\n }\n return compoundWrite;\n}\n\n/**\n * Return a complete snapshot for the given path if there's visible write data at that path, else null.\n * No server data is considered.\n *\n */\nexport function writeTreeGetCompleteWriteData(\n writeTree: WriteTree,\n path: Path\n): Node | null {\n return compoundWriteGetCompleteNode(writeTree.visibleWrites, path);\n}\n\n/**\n * Given optional, underlying server data, and an optional set of constraints (exclude some sets, include hidden\n * writes), attempt to calculate a complete snapshot for the given path\n *\n * @param writeIdsToExclude - An optional set to be excluded\n * @param includeHiddenWrites - Defaults to false, whether or not to layer on writes with visible set to false\n */\nexport function writeTreeCalcCompleteEventCache(\n writeTree: WriteTree,\n treePath: Path,\n completeServerCache: Node | null,\n writeIdsToExclude?: number[],\n includeHiddenWrites?: boolean\n): Node | null {\n if (!writeIdsToExclude && !includeHiddenWrites) {\n const shadowingNode = compoundWriteGetCompleteNode(\n writeTree.visibleWrites,\n treePath\n );\n if (shadowingNode != null) {\n return shadowingNode;\n } else {\n const subMerge = compoundWriteChildCompoundWrite(\n writeTree.visibleWrites,\n treePath\n );\n if (compoundWriteIsEmpty(subMerge)) {\n return completeServerCache;\n } else if (\n completeServerCache == null &&\n !compoundWriteHasCompleteWrite(subMerge, newEmptyPath())\n ) {\n // We wouldn't have a complete snapshot, since there's no underlying data and no complete shadow\n return null;\n } else {\n const layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE;\n return compoundWriteApply(subMerge, layeredCache);\n }\n }\n } else {\n const merge = compoundWriteChildCompoundWrite(\n writeTree.visibleWrites,\n treePath\n );\n if (!includeHiddenWrites && compoundWriteIsEmpty(merge)) {\n return completeServerCache;\n } else {\n // If the server cache is null, and we don't have a complete cache, we need to return null\n if (\n !includeHiddenWrites &&\n completeServerCache == null &&\n !compoundWriteHasCompleteWrite(merge, newEmptyPath())\n ) {\n return null;\n } else {\n const filter = function (write: WriteRecord) {\n return (\n (write.visible || includeHiddenWrites) &&\n (!writeIdsToExclude ||\n !~writeIdsToExclude.indexOf(write.writeId)) &&\n (pathContains(write.path, treePath) ||\n pathContains(treePath, write.path))\n );\n };\n const mergeAtPath = writeTreeLayerTree_(\n writeTree.allWrites,\n filter,\n treePath\n );\n const layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE;\n return compoundWriteApply(mergeAtPath, layeredCache);\n }\n }\n }\n}\n\n/**\n * With optional, underlying server data, attempt to return a children node of children that we have complete data for.\n * Used when creating new views, to pre-fill their complete event children snapshot.\n */\nexport function writeTreeCalcCompleteEventChildren(\n writeTree: WriteTree,\n treePath: Path,\n completeServerChildren: ChildrenNode | null\n) {\n let completeChildren = ChildrenNode.EMPTY_NODE as Node;\n const topLevelSet = compoundWriteGetCompleteNode(\n writeTree.visibleWrites,\n treePath\n );\n if (topLevelSet) {\n if (!topLevelSet.isLeafNode()) {\n // we're shadowing everything. Return the children.\n topLevelSet.forEachChild(PRIORITY_INDEX, (childName, childSnap) => {\n completeChildren = completeChildren.updateImmediateChild(\n childName,\n childSnap\n );\n });\n }\n return completeChildren;\n } else if (completeServerChildren) {\n // Layer any children we have on top of this\n // We know we don't have a top-level set, so just enumerate existing children\n const merge = compoundWriteChildCompoundWrite(\n writeTree.visibleWrites,\n treePath\n );\n completeServerChildren.forEachChild(\n PRIORITY_INDEX,\n (childName, childNode) => {\n const node = compoundWriteApply(\n compoundWriteChildCompoundWrite(merge, new Path(childName)),\n childNode\n );\n completeChildren = completeChildren.updateImmediateChild(\n childName,\n node\n );\n }\n );\n // Add any complete children we have from the set\n compoundWriteGetCompleteChildren(merge).forEach(namedNode => {\n completeChildren = completeChildren.updateImmediateChild(\n namedNode.name,\n namedNode.node\n );\n });\n return completeChildren;\n } else {\n // We don't have anything to layer on top of. Layer on any children we have\n // Note that we can return an empty snap if we have a defined delete\n const merge = compoundWriteChildCompoundWrite(\n writeTree.visibleWrites,\n treePath\n );\n compoundWriteGetCompleteChildren(merge).forEach(namedNode => {\n completeChildren = completeChildren.updateImmediateChild(\n namedNode.name,\n namedNode.node\n );\n });\n return completeChildren;\n }\n}\n\n/**\n * Given that the underlying server data has updated, determine what, if anything, needs to be\n * applied to the event cache.\n *\n * Possibilities:\n *\n * 1. No writes are shadowing. Events should be raised, the snap to be applied comes from the server data\n *\n * 2. Some write is completely shadowing. No events to be raised\n *\n * 3. Is partially shadowed. Events\n *\n * Either existingEventSnap or existingServerSnap must exist\n */\nexport function writeTreeCalcEventCacheAfterServerOverwrite(\n writeTree: WriteTree,\n treePath: Path,\n childPath: Path,\n existingEventSnap: Node | null,\n existingServerSnap: Node | null\n): Node | null {\n assert(\n existingEventSnap || existingServerSnap,\n 'Either existingEventSnap or existingServerSnap must exist'\n );\n const path = pathChild(treePath, childPath);\n if (compoundWriteHasCompleteWrite(writeTree.visibleWrites, path)) {\n // At this point we can probably guarantee that we're in case 2, meaning no events\n // May need to check visibility while doing the findRootMostValueAndPath call\n return null;\n } else {\n // No complete shadowing. We're either partially shadowing or not shadowing at all.\n const childMerge = compoundWriteChildCompoundWrite(\n writeTree.visibleWrites,\n path\n );\n if (compoundWriteIsEmpty(childMerge)) {\n // We're not shadowing at all. Case 1\n return existingServerSnap.getChild(childPath);\n } else {\n // This could be more efficient if the serverNode + updates doesn't change the eventSnap\n // However this is tricky to find out, since user updates don't necessary change the server\n // snap, e.g. priority updates on empty nodes, or deep deletes. Another special case is if the server\n // adds nodes, but doesn't change any existing writes. It is therefore not enough to\n // only check if the updates change the serverNode.\n // Maybe check if the merge tree contains these special cases and only do a full overwrite in that case?\n return compoundWriteApply(\n childMerge,\n existingServerSnap.getChild(childPath)\n );\n }\n }\n}\n\n/**\n * Returns a complete child for a given server snap after applying all user writes or null if there is no\n * complete child for this ChildKey.\n */\nexport function writeTreeCalcCompleteChild(\n writeTree: WriteTree,\n treePath: Path,\n childKey: string,\n existingServerSnap: CacheNode\n): Node | null {\n const path = pathChild(treePath, childKey);\n const shadowingNode = compoundWriteGetCompleteNode(\n writeTree.visibleWrites,\n path\n );\n if (shadowingNode != null) {\n return shadowingNode;\n } else {\n if (existingServerSnap.isCompleteForChild(childKey)) {\n const childMerge = compoundWriteChildCompoundWrite(\n writeTree.visibleWrites,\n path\n );\n return compoundWriteApply(\n childMerge,\n existingServerSnap.getNode().getImmediateChild(childKey)\n );\n } else {\n return null;\n }\n }\n}\n\n/**\n * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at\n * a higher path, this will return the child of that write relative to the write and this path.\n * Returns null if there is no write at this path.\n */\nexport function writeTreeShadowingWrite(\n writeTree: WriteTree,\n path: Path\n): Node | null {\n return compoundWriteGetCompleteNode(writeTree.visibleWrites, path);\n}\n\n/**\n * This method is used when processing child remove events on a query. If we can, we pull in children that were outside\n * the window, but may now be in the window.\n */\nexport function writeTreeCalcIndexedSlice(\n writeTree: WriteTree,\n treePath: Path,\n completeServerData: Node | null,\n startPost: NamedNode,\n count: number,\n reverse: boolean,\n index: Index\n): NamedNode[] {\n let toIterate: Node;\n const merge = compoundWriteChildCompoundWrite(\n writeTree.visibleWrites,\n treePath\n );\n const shadowingNode = compoundWriteGetCompleteNode(merge, newEmptyPath());\n if (shadowingNode != null) {\n toIterate = shadowingNode;\n } else if (completeServerData != null) {\n toIterate = compoundWriteApply(merge, completeServerData);\n } else {\n // no children to iterate on\n return [];\n }\n toIterate = toIterate.withIndex(index);\n if (!toIterate.isEmpty() && !toIterate.isLeafNode()) {\n const nodes = [];\n const cmp = index.getCompare();\n const iter = reverse\n ? (toIterate as ChildrenNode).getReverseIteratorFrom(startPost, index)\n : (toIterate as ChildrenNode).getIteratorFrom(startPost, index);\n let next = iter.getNext();\n while (next && nodes.length < count) {\n if (cmp(next, startPost) !== 0) {\n nodes.push(next);\n }\n next = iter.getNext();\n }\n return nodes;\n } else {\n return [];\n }\n}\n\nexport function newWriteTree(): WriteTree {\n return {\n visibleWrites: CompoundWrite.empty(),\n allWrites: [],\n lastWriteId: -1\n };\n}\n\n/**\n * WriteTree tracks all pending user-initiated writes and has methods to calculate the result of merging them\n * with underlying server data (to create \"event cache\" data). Pending writes are added with addOverwrite()\n * and addMerge(), and removed with removeWrite().\n */\nexport interface WriteTree {\n /**\n * A tree tracking the result of applying all visible writes. This does not include transactions with\n * applyLocally=false or writes that are completely shadowed by other writes.\n */\n visibleWrites: CompoundWrite;\n\n /**\n * A list of all pending writes, regardless of visibility and shadowed-ness. Used to calculate arbitrary\n * sets of the changed data, such as hidden writes (from transactions) or changes with certain writes excluded (also\n * used by transactions).\n */\n allWrites: WriteRecord[];\n\n lastWriteId: number;\n}\n\n/**\n * If possible, returns a complete event cache, using the underlying server data if possible. In addition, can be used\n * to get a cache that includes hidden writes, and excludes arbitrary writes. Note that customizing the returned node\n * can lead to a more expensive calculation.\n *\n * @param writeIdsToExclude - Optional writes to exclude.\n * @param includeHiddenWrites - Defaults to false, whether or not to layer on writes with visible set to false\n */\nexport function writeTreeRefCalcCompleteEventCache(\n writeTreeRef: WriteTreeRef,\n completeServerCache: Node | null,\n writeIdsToExclude?: number[],\n includeHiddenWrites?: boolean\n): Node | null {\n return writeTreeCalcCompleteEventCache(\n writeTreeRef.writeTree,\n writeTreeRef.treePath,\n completeServerCache,\n writeIdsToExclude,\n includeHiddenWrites\n );\n}\n\n/**\n * If possible, returns a children node containing all of the complete children we have data for. The returned data is a\n * mix of the given server data and write data.\n *\n */\nexport function writeTreeRefCalcCompleteEventChildren(\n writeTreeRef: WriteTreeRef,\n completeServerChildren: ChildrenNode | null\n): ChildrenNode {\n return writeTreeCalcCompleteEventChildren(\n writeTreeRef.writeTree,\n writeTreeRef.treePath,\n completeServerChildren\n ) as ChildrenNode;\n}\n\n/**\n * Given that either the underlying server data has updated or the outstanding writes have updated, determine what,\n * if anything, needs to be applied to the event cache.\n *\n * Possibilities:\n *\n * 1. No writes are shadowing. Events should be raised, the snap to be applied comes from the server data\n *\n * 2. Some write is completely shadowing. No events to be raised\n *\n * 3. Is partially shadowed. Events should be raised\n *\n * Either existingEventSnap or existingServerSnap must exist, this is validated via an assert\n *\n *\n */\nexport function writeTreeRefCalcEventCacheAfterServerOverwrite(\n writeTreeRef: WriteTreeRef,\n path: Path,\n existingEventSnap: Node | null,\n existingServerSnap: Node | null\n): Node | null {\n return writeTreeCalcEventCacheAfterServerOverwrite(\n writeTreeRef.writeTree,\n writeTreeRef.treePath,\n path,\n existingEventSnap,\n existingServerSnap\n );\n}\n\n/**\n * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at\n * a higher path, this will return the child of that write relative to the write and this path.\n * Returns null if there is no write at this path.\n *\n */\nexport function writeTreeRefShadowingWrite(\n writeTreeRef: WriteTreeRef,\n path: Path\n): Node | null {\n return writeTreeShadowingWrite(\n writeTreeRef.writeTree,\n pathChild(writeTreeRef.treePath, path)\n );\n}\n\n/**\n * This method is used when processing child remove events on a query. If we can, we pull in children that were outside\n * the window, but may now be in the window\n */\nexport function writeTreeRefCalcIndexedSlice(\n writeTreeRef: WriteTreeRef,\n completeServerData: Node | null,\n startPost: NamedNode,\n count: number,\n reverse: boolean,\n index: Index\n): NamedNode[] {\n return writeTreeCalcIndexedSlice(\n writeTreeRef.writeTree,\n writeTreeRef.treePath,\n completeServerData,\n startPost,\n count,\n reverse,\n index\n );\n}\n\n/**\n * Returns a complete child for a given server snap after applying all user writes or null if there is no\n * complete child for this ChildKey.\n */\nexport function writeTreeRefCalcCompleteChild(\n writeTreeRef: WriteTreeRef,\n childKey: string,\n existingServerCache: CacheNode\n): Node | null {\n return writeTreeCalcCompleteChild(\n writeTreeRef.writeTree,\n writeTreeRef.treePath,\n childKey,\n existingServerCache\n );\n}\n\n/**\n * Return a WriteTreeRef for a child.\n */\nexport function writeTreeRefChild(\n writeTreeRef: WriteTreeRef,\n childName: string\n): WriteTreeRef {\n return newWriteTreeRef(\n pathChild(writeTreeRef.treePath, childName),\n writeTreeRef.writeTree\n );\n}\n\nexport function newWriteTreeRef(\n path: Path,\n writeTree: WriteTree\n): WriteTreeRef {\n return {\n treePath: path,\n writeTree\n };\n}\n\n/**\n * A WriteTreeRef wraps a WriteTree and a path, for convenient access to a particular subtree. All of the methods\n * just proxy to the underlying WriteTree.\n *\n */\nexport interface WriteTreeRef {\n /**\n * The path to this particular write tree ref. Used for calling methods on writeTree_ while exposing a simpler\n * interface to callers.\n */\n readonly treePath: Path;\n\n /**\n * * A reference to the actual tree of write data. All methods are pass-through to the tree, but with the appropriate\n * path prefixed.\n *\n * This lets us make cheap references to points in the tree for sync points without having to copy and maintain all of\n * the data.\n */\n readonly writeTree: WriteTree;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, assertionError } from '@firebase/util';\n\nimport {\n Change,\n ChangeType,\n changeChildAdded,\n changeChildChanged,\n changeChildRemoved\n} from './Change';\n\nexport class ChildChangeAccumulator {\n private readonly changeMap: Map = new Map();\n\n trackChildChange(change: Change) {\n const type = change.type;\n const childKey = change.childName!;\n assert(\n type === ChangeType.CHILD_ADDED ||\n type === ChangeType.CHILD_CHANGED ||\n type === ChangeType.CHILD_REMOVED,\n 'Only child changes supported for tracking'\n );\n assert(\n childKey !== '.priority',\n 'Only non-priority child changes can be tracked.'\n );\n const oldChange = this.changeMap.get(childKey);\n if (oldChange) {\n const oldType = oldChange.type;\n if (\n type === ChangeType.CHILD_ADDED &&\n oldType === ChangeType.CHILD_REMOVED\n ) {\n this.changeMap.set(\n childKey,\n changeChildChanged(\n childKey,\n change.snapshotNode,\n oldChange.snapshotNode\n )\n );\n } else if (\n type === ChangeType.CHILD_REMOVED &&\n oldType === ChangeType.CHILD_ADDED\n ) {\n this.changeMap.delete(childKey);\n } else if (\n type === ChangeType.CHILD_REMOVED &&\n oldType === ChangeType.CHILD_CHANGED\n ) {\n this.changeMap.set(\n childKey,\n changeChildRemoved(childKey, oldChange.oldSnap)\n );\n } else if (\n type === ChangeType.CHILD_CHANGED &&\n oldType === ChangeType.CHILD_ADDED\n ) {\n this.changeMap.set(\n childKey,\n changeChildAdded(childKey, change.snapshotNode)\n );\n } else if (\n type === ChangeType.CHILD_CHANGED &&\n oldType === ChangeType.CHILD_CHANGED\n ) {\n this.changeMap.set(\n childKey,\n changeChildChanged(childKey, change.snapshotNode, oldChange.oldSnap)\n );\n } else {\n throw assertionError(\n 'Illegal combination of changes: ' +\n change +\n ' occurred after ' +\n oldChange\n );\n }\n } else {\n this.changeMap.set(childKey, change);\n }\n }\n\n getChanges(): Change[] {\n return Array.from(this.changeMap.values());\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Index } from '../snap/indexes/Index';\nimport { NamedNode, Node } from '../snap/Node';\nimport {\n WriteTreeRef,\n writeTreeRefCalcCompleteChild,\n writeTreeRefCalcIndexedSlice\n} from '../WriteTree';\n\nimport { CacheNode } from './CacheNode';\nimport { ViewCache, viewCacheGetCompleteServerSnap } from './ViewCache';\n\n/**\n * Since updates to filtered nodes might require nodes to be pulled in from \"outside\" the node, this interface\n * can help to get complete children that can be pulled in.\n * A class implementing this interface takes potentially multiple sources (e.g. user writes, server data from\n * other views etc.) to try it's best to get a complete child that might be useful in pulling into the view.\n *\n * @interface\n */\nexport interface CompleteChildSource {\n getCompleteChild(childKey: string): Node | null;\n\n getChildAfterChild(\n index: Index,\n child: NamedNode,\n reverse: boolean\n ): NamedNode | null;\n}\n\n/**\n * An implementation of CompleteChildSource that never returns any additional children\n */\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport class NoCompleteChildSource_ implements CompleteChildSource {\n getCompleteChild(childKey?: string): Node | null {\n return null;\n }\n getChildAfterChild(\n index?: Index,\n child?: NamedNode,\n reverse?: boolean\n ): NamedNode | null {\n return null;\n }\n}\n\n/**\n * Singleton instance.\n */\nexport const NO_COMPLETE_CHILD_SOURCE = new NoCompleteChildSource_();\n\n/**\n * An implementation of CompleteChildSource that uses a WriteTree in addition to any other server data or\n * old event caches available to calculate complete children.\n */\nexport class WriteTreeCompleteChildSource implements CompleteChildSource {\n constructor(\n private writes_: WriteTreeRef,\n private viewCache_: ViewCache,\n private optCompleteServerCache_: Node | null = null\n ) {}\n getCompleteChild(childKey: string): Node | null {\n const node = this.viewCache_.eventCache;\n if (node.isCompleteForChild(childKey)) {\n return node.getNode().getImmediateChild(childKey);\n } else {\n const serverNode =\n this.optCompleteServerCache_ != null\n ? new CacheNode(this.optCompleteServerCache_, true, false)\n : this.viewCache_.serverCache;\n return writeTreeRefCalcCompleteChild(this.writes_, childKey, serverNode);\n }\n }\n getChildAfterChild(\n index: Index,\n child: NamedNode,\n reverse: boolean\n ): NamedNode | null {\n const completeServerData =\n this.optCompleteServerCache_ != null\n ? this.optCompleteServerCache_\n : viewCacheGetCompleteServerSnap(this.viewCache_);\n const nodes = writeTreeRefCalcIndexedSlice(\n this.writes_,\n completeServerData,\n child,\n 1,\n reverse,\n index\n );\n if (nodes.length === 0) {\n return null;\n } else {\n return nodes[0];\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, assertionError } from '@firebase/util';\n\nimport { AckUserWrite } from '../operation/AckUserWrite';\nimport { Merge } from '../operation/Merge';\nimport { Operation, OperationType } from '../operation/Operation';\nimport { Overwrite } from '../operation/Overwrite';\nimport { ChildrenNode } from '../snap/ChildrenNode';\nimport { KEY_INDEX } from '../snap/indexes/KeyIndex';\nimport { Node } from '../snap/Node';\nimport { ImmutableTree } from '../util/ImmutableTree';\nimport {\n newEmptyPath,\n Path,\n pathChild,\n pathGetBack,\n pathGetFront,\n pathGetLength,\n pathIsEmpty,\n pathParent,\n pathPopFront\n} from '../util/Path';\nimport {\n WriteTreeRef,\n writeTreeRefCalcCompleteChild,\n writeTreeRefCalcCompleteEventCache,\n writeTreeRefCalcCompleteEventChildren,\n writeTreeRefCalcEventCacheAfterServerOverwrite,\n writeTreeRefShadowingWrite\n} from '../WriteTree';\n\nimport { Change, changeValue } from './Change';\nimport { ChildChangeAccumulator } from './ChildChangeAccumulator';\nimport {\n CompleteChildSource,\n NO_COMPLETE_CHILD_SOURCE,\n WriteTreeCompleteChildSource\n} from './CompleteChildSource';\nimport { NodeFilter } from './filter/NodeFilter';\nimport {\n ViewCache,\n viewCacheGetCompleteEventSnap,\n viewCacheGetCompleteServerSnap,\n viewCacheUpdateEventSnap,\n viewCacheUpdateServerSnap\n} from './ViewCache';\n\nexport interface ProcessorResult {\n readonly viewCache: ViewCache;\n readonly changes: Change[];\n}\n\nexport interface ViewProcessor {\n readonly filter: NodeFilter;\n}\n\nexport function newViewProcessor(filter: NodeFilter): ViewProcessor {\n return { filter };\n}\n\nexport function viewProcessorAssertIndexed(\n viewProcessor: ViewProcessor,\n viewCache: ViewCache\n): void {\n assert(\n viewCache.eventCache.getNode().isIndexed(viewProcessor.filter.getIndex()),\n 'Event snap not indexed'\n );\n assert(\n viewCache.serverCache.getNode().isIndexed(viewProcessor.filter.getIndex()),\n 'Server snap not indexed'\n );\n}\n\nexport function viewProcessorApplyOperation(\n viewProcessor: ViewProcessor,\n oldViewCache: ViewCache,\n operation: Operation,\n writesCache: WriteTreeRef,\n completeCache: Node | null\n): ProcessorResult {\n const accumulator = new ChildChangeAccumulator();\n let newViewCache, filterServerNode;\n if (operation.type === OperationType.OVERWRITE) {\n const overwrite = operation as Overwrite;\n if (overwrite.source.fromUser) {\n newViewCache = viewProcessorApplyUserOverwrite(\n viewProcessor,\n oldViewCache,\n overwrite.path,\n overwrite.snap,\n writesCache,\n completeCache,\n accumulator\n );\n } else {\n assert(overwrite.source.fromServer, 'Unknown source.');\n // We filter the node if it's a tagged update or the node has been previously filtered and the\n // update is not at the root in which case it is ok (and necessary) to mark the node unfiltered\n // again\n filterServerNode =\n overwrite.source.tagged ||\n (oldViewCache.serverCache.isFiltered() && !pathIsEmpty(overwrite.path));\n newViewCache = viewProcessorApplyServerOverwrite(\n viewProcessor,\n oldViewCache,\n overwrite.path,\n overwrite.snap,\n writesCache,\n completeCache,\n filterServerNode,\n accumulator\n );\n }\n } else if (operation.type === OperationType.MERGE) {\n const merge = operation as Merge;\n if (merge.source.fromUser) {\n newViewCache = viewProcessorApplyUserMerge(\n viewProcessor,\n oldViewCache,\n merge.path,\n merge.children,\n writesCache,\n completeCache,\n accumulator\n );\n } else {\n assert(merge.source.fromServer, 'Unknown source.');\n // We filter the node if it's a tagged update or the node has been previously filtered\n filterServerNode =\n merge.source.tagged || oldViewCache.serverCache.isFiltered();\n newViewCache = viewProcessorApplyServerMerge(\n viewProcessor,\n oldViewCache,\n merge.path,\n merge.children,\n writesCache,\n completeCache,\n filterServerNode,\n accumulator\n );\n }\n } else if (operation.type === OperationType.ACK_USER_WRITE) {\n const ackUserWrite = operation as AckUserWrite;\n if (!ackUserWrite.revert) {\n newViewCache = viewProcessorAckUserWrite(\n viewProcessor,\n oldViewCache,\n ackUserWrite.path,\n ackUserWrite.affectedTree,\n writesCache,\n completeCache,\n accumulator\n );\n } else {\n newViewCache = viewProcessorRevertUserWrite(\n viewProcessor,\n oldViewCache,\n ackUserWrite.path,\n writesCache,\n completeCache,\n accumulator\n );\n }\n } else if (operation.type === OperationType.LISTEN_COMPLETE) {\n newViewCache = viewProcessorListenComplete(\n viewProcessor,\n oldViewCache,\n operation.path,\n writesCache,\n accumulator\n );\n } else {\n throw assertionError('Unknown operation type: ' + operation.type);\n }\n const changes = accumulator.getChanges();\n viewProcessorMaybeAddValueEvent(oldViewCache, newViewCache, changes);\n return { viewCache: newViewCache, changes };\n}\n\nfunction viewProcessorMaybeAddValueEvent(\n oldViewCache: ViewCache,\n newViewCache: ViewCache,\n accumulator: Change[]\n): void {\n const eventSnap = newViewCache.eventCache;\n if (eventSnap.isFullyInitialized()) {\n const isLeafOrEmpty =\n eventSnap.getNode().isLeafNode() || eventSnap.getNode().isEmpty();\n const oldCompleteSnap = viewCacheGetCompleteEventSnap(oldViewCache);\n if (\n accumulator.length > 0 ||\n !oldViewCache.eventCache.isFullyInitialized() ||\n (isLeafOrEmpty && !eventSnap.getNode().equals(oldCompleteSnap)) ||\n !eventSnap.getNode().getPriority().equals(oldCompleteSnap.getPriority())\n ) {\n accumulator.push(\n changeValue(viewCacheGetCompleteEventSnap(newViewCache))\n );\n }\n }\n}\n\nfunction viewProcessorGenerateEventCacheAfterServerEvent(\n viewProcessor: ViewProcessor,\n viewCache: ViewCache,\n changePath: Path,\n writesCache: WriteTreeRef,\n source: CompleteChildSource,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n const oldEventSnap = viewCache.eventCache;\n if (writeTreeRefShadowingWrite(writesCache, changePath) != null) {\n // we have a shadowing write, ignore changes\n return viewCache;\n } else {\n let newEventCache, serverNode;\n if (pathIsEmpty(changePath)) {\n // TODO: figure out how this plays with \"sliding ack windows\"\n assert(\n viewCache.serverCache.isFullyInitialized(),\n 'If change path is empty, we must have complete server data'\n );\n if (viewCache.serverCache.isFiltered()) {\n // We need to special case this, because we need to only apply writes to complete children, or\n // we might end up raising events for incomplete children. If the server data is filtered deep\n // writes cannot be guaranteed to be complete\n const serverCache = viewCacheGetCompleteServerSnap(viewCache);\n const completeChildren =\n serverCache instanceof ChildrenNode\n ? serverCache\n : ChildrenNode.EMPTY_NODE;\n const completeEventChildren = writeTreeRefCalcCompleteEventChildren(\n writesCache,\n completeChildren\n );\n newEventCache = viewProcessor.filter.updateFullNode(\n viewCache.eventCache.getNode(),\n completeEventChildren,\n accumulator\n );\n } else {\n const completeNode = writeTreeRefCalcCompleteEventCache(\n writesCache,\n viewCacheGetCompleteServerSnap(viewCache)\n );\n newEventCache = viewProcessor.filter.updateFullNode(\n viewCache.eventCache.getNode(),\n completeNode,\n accumulator\n );\n }\n } else {\n const childKey = pathGetFront(changePath);\n if (childKey === '.priority') {\n assert(\n pathGetLength(changePath) === 1,\n \"Can't have a priority with additional path components\"\n );\n const oldEventNode = oldEventSnap.getNode();\n serverNode = viewCache.serverCache.getNode();\n // we might have overwrites for this priority\n const updatedPriority = writeTreeRefCalcEventCacheAfterServerOverwrite(\n writesCache,\n changePath,\n oldEventNode,\n serverNode\n );\n if (updatedPriority != null) {\n newEventCache = viewProcessor.filter.updatePriority(\n oldEventNode,\n updatedPriority\n );\n } else {\n // priority didn't change, keep old node\n newEventCache = oldEventSnap.getNode();\n }\n } else {\n const childChangePath = pathPopFront(changePath);\n // update child\n let newEventChild;\n if (oldEventSnap.isCompleteForChild(childKey)) {\n serverNode = viewCache.serverCache.getNode();\n const eventChildUpdate =\n writeTreeRefCalcEventCacheAfterServerOverwrite(\n writesCache,\n changePath,\n oldEventSnap.getNode(),\n serverNode\n );\n if (eventChildUpdate != null) {\n newEventChild = oldEventSnap\n .getNode()\n .getImmediateChild(childKey)\n .updateChild(childChangePath, eventChildUpdate);\n } else {\n // Nothing changed, just keep the old child\n newEventChild = oldEventSnap.getNode().getImmediateChild(childKey);\n }\n } else {\n newEventChild = writeTreeRefCalcCompleteChild(\n writesCache,\n childKey,\n viewCache.serverCache\n );\n }\n if (newEventChild != null) {\n newEventCache = viewProcessor.filter.updateChild(\n oldEventSnap.getNode(),\n childKey,\n newEventChild,\n childChangePath,\n source,\n accumulator\n );\n } else {\n // no complete child available or no change\n newEventCache = oldEventSnap.getNode();\n }\n }\n }\n return viewCacheUpdateEventSnap(\n viewCache,\n newEventCache,\n oldEventSnap.isFullyInitialized() || pathIsEmpty(changePath),\n viewProcessor.filter.filtersNodes()\n );\n }\n}\n\nfunction viewProcessorApplyServerOverwrite(\n viewProcessor: ViewProcessor,\n oldViewCache: ViewCache,\n changePath: Path,\n changedSnap: Node,\n writesCache: WriteTreeRef,\n completeCache: Node | null,\n filterServerNode: boolean,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n const oldServerSnap = oldViewCache.serverCache;\n let newServerCache;\n const serverFilter = filterServerNode\n ? viewProcessor.filter\n : viewProcessor.filter.getIndexedFilter();\n if (pathIsEmpty(changePath)) {\n newServerCache = serverFilter.updateFullNode(\n oldServerSnap.getNode(),\n changedSnap,\n null\n );\n } else if (serverFilter.filtersNodes() && !oldServerSnap.isFiltered()) {\n // we want to filter the server node, but we didn't filter the server node yet, so simulate a full update\n const newServerNode = oldServerSnap\n .getNode()\n .updateChild(changePath, changedSnap);\n newServerCache = serverFilter.updateFullNode(\n oldServerSnap.getNode(),\n newServerNode,\n null\n );\n } else {\n const childKey = pathGetFront(changePath);\n if (\n !oldServerSnap.isCompleteForPath(changePath) &&\n pathGetLength(changePath) > 1\n ) {\n // We don't update incomplete nodes with updates intended for other listeners\n return oldViewCache;\n }\n const childChangePath = pathPopFront(changePath);\n const childNode = oldServerSnap.getNode().getImmediateChild(childKey);\n const newChildNode = childNode.updateChild(childChangePath, changedSnap);\n if (childKey === '.priority') {\n newServerCache = serverFilter.updatePriority(\n oldServerSnap.getNode(),\n newChildNode\n );\n } else {\n newServerCache = serverFilter.updateChild(\n oldServerSnap.getNode(),\n childKey,\n newChildNode,\n childChangePath,\n NO_COMPLETE_CHILD_SOURCE,\n null\n );\n }\n }\n const newViewCache = viewCacheUpdateServerSnap(\n oldViewCache,\n newServerCache,\n oldServerSnap.isFullyInitialized() || pathIsEmpty(changePath),\n serverFilter.filtersNodes()\n );\n const source = new WriteTreeCompleteChildSource(\n writesCache,\n newViewCache,\n completeCache\n );\n return viewProcessorGenerateEventCacheAfterServerEvent(\n viewProcessor,\n newViewCache,\n changePath,\n writesCache,\n source,\n accumulator\n );\n}\n\nfunction viewProcessorApplyUserOverwrite(\n viewProcessor: ViewProcessor,\n oldViewCache: ViewCache,\n changePath: Path,\n changedSnap: Node,\n writesCache: WriteTreeRef,\n completeCache: Node | null,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n const oldEventSnap = oldViewCache.eventCache;\n let newViewCache, newEventCache;\n const source = new WriteTreeCompleteChildSource(\n writesCache,\n oldViewCache,\n completeCache\n );\n if (pathIsEmpty(changePath)) {\n newEventCache = viewProcessor.filter.updateFullNode(\n oldViewCache.eventCache.getNode(),\n changedSnap,\n accumulator\n );\n newViewCache = viewCacheUpdateEventSnap(\n oldViewCache,\n newEventCache,\n true,\n viewProcessor.filter.filtersNodes()\n );\n } else {\n const childKey = pathGetFront(changePath);\n if (childKey === '.priority') {\n newEventCache = viewProcessor.filter.updatePriority(\n oldViewCache.eventCache.getNode(),\n changedSnap\n );\n newViewCache = viewCacheUpdateEventSnap(\n oldViewCache,\n newEventCache,\n oldEventSnap.isFullyInitialized(),\n oldEventSnap.isFiltered()\n );\n } else {\n const childChangePath = pathPopFront(changePath);\n const oldChild = oldEventSnap.getNode().getImmediateChild(childKey);\n let newChild;\n if (pathIsEmpty(childChangePath)) {\n // Child overwrite, we can replace the child\n newChild = changedSnap;\n } else {\n const childNode = source.getCompleteChild(childKey);\n if (childNode != null) {\n if (\n pathGetBack(childChangePath) === '.priority' &&\n childNode.getChild(pathParent(childChangePath)).isEmpty()\n ) {\n // This is a priority update on an empty node. If this node exists on the server, the\n // server will send down the priority in the update, so ignore for now\n newChild = childNode;\n } else {\n newChild = childNode.updateChild(childChangePath, changedSnap);\n }\n } else {\n // There is no complete child node available\n newChild = ChildrenNode.EMPTY_NODE;\n }\n }\n if (!oldChild.equals(newChild)) {\n const newEventSnap = viewProcessor.filter.updateChild(\n oldEventSnap.getNode(),\n childKey,\n newChild,\n childChangePath,\n source,\n accumulator\n );\n newViewCache = viewCacheUpdateEventSnap(\n oldViewCache,\n newEventSnap,\n oldEventSnap.isFullyInitialized(),\n viewProcessor.filter.filtersNodes()\n );\n } else {\n newViewCache = oldViewCache;\n }\n }\n }\n return newViewCache;\n}\n\nfunction viewProcessorCacheHasChild(\n viewCache: ViewCache,\n childKey: string\n): boolean {\n return viewCache.eventCache.isCompleteForChild(childKey);\n}\n\nfunction viewProcessorApplyUserMerge(\n viewProcessor: ViewProcessor,\n viewCache: ViewCache,\n path: Path,\n changedChildren: ImmutableTree,\n writesCache: WriteTreeRef,\n serverCache: Node | null,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n // HACK: In the case of a limit query, there may be some changes that bump things out of the\n // window leaving room for new items. It's important we process these changes first, so we\n // iterate the changes twice, first processing any that affect items currently in view.\n // TODO: I consider an item \"in view\" if cacheHasChild is true, which checks both the server\n // and event snap. I'm not sure if this will result in edge cases when a child is in one but\n // not the other.\n let curViewCache = viewCache;\n changedChildren.foreach((relativePath, childNode) => {\n const writePath = pathChild(path, relativePath);\n if (viewProcessorCacheHasChild(viewCache, pathGetFront(writePath))) {\n curViewCache = viewProcessorApplyUserOverwrite(\n viewProcessor,\n curViewCache,\n writePath,\n childNode,\n writesCache,\n serverCache,\n accumulator\n );\n }\n });\n\n changedChildren.foreach((relativePath, childNode) => {\n const writePath = pathChild(path, relativePath);\n if (!viewProcessorCacheHasChild(viewCache, pathGetFront(writePath))) {\n curViewCache = viewProcessorApplyUserOverwrite(\n viewProcessor,\n curViewCache,\n writePath,\n childNode,\n writesCache,\n serverCache,\n accumulator\n );\n }\n });\n\n return curViewCache;\n}\n\nfunction viewProcessorApplyMerge(\n viewProcessor: ViewProcessor,\n node: Node,\n merge: ImmutableTree\n): Node {\n merge.foreach((relativePath, childNode) => {\n node = node.updateChild(relativePath, childNode);\n });\n return node;\n}\n\nfunction viewProcessorApplyServerMerge(\n viewProcessor: ViewProcessor,\n viewCache: ViewCache,\n path: Path,\n changedChildren: ImmutableTree,\n writesCache: WriteTreeRef,\n serverCache: Node | null,\n filterServerNode: boolean,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n // If we don't have a cache yet, this merge was intended for a previously listen in the same location. Ignore it and\n // wait for the complete data update coming soon.\n if (\n viewCache.serverCache.getNode().isEmpty() &&\n !viewCache.serverCache.isFullyInitialized()\n ) {\n return viewCache;\n }\n\n // HACK: In the case of a limit query, there may be some changes that bump things out of the\n // window leaving room for new items. It's important we process these changes first, so we\n // iterate the changes twice, first processing any that affect items currently in view.\n // TODO: I consider an item \"in view\" if cacheHasChild is true, which checks both the server\n // and event snap. I'm not sure if this will result in edge cases when a child is in one but\n // not the other.\n let curViewCache = viewCache;\n let viewMergeTree: ImmutableTree;\n if (pathIsEmpty(path)) {\n viewMergeTree = changedChildren;\n } else {\n viewMergeTree = new ImmutableTree(null).setTree(\n path,\n changedChildren\n );\n }\n const serverNode = viewCache.serverCache.getNode();\n viewMergeTree.children.inorderTraversal((childKey, childTree) => {\n if (serverNode.hasChild(childKey)) {\n const serverChild = viewCache.serverCache\n .getNode()\n .getImmediateChild(childKey);\n const newChild = viewProcessorApplyMerge(\n viewProcessor,\n serverChild,\n childTree\n );\n curViewCache = viewProcessorApplyServerOverwrite(\n viewProcessor,\n curViewCache,\n new Path(childKey),\n newChild,\n writesCache,\n serverCache,\n filterServerNode,\n accumulator\n );\n }\n });\n viewMergeTree.children.inorderTraversal((childKey, childMergeTree) => {\n const isUnknownDeepMerge =\n !viewCache.serverCache.isCompleteForChild(childKey) &&\n childMergeTree.value === null;\n if (!serverNode.hasChild(childKey) && !isUnknownDeepMerge) {\n const serverChild = viewCache.serverCache\n .getNode()\n .getImmediateChild(childKey);\n const newChild = viewProcessorApplyMerge(\n viewProcessor,\n serverChild,\n childMergeTree\n );\n curViewCache = viewProcessorApplyServerOverwrite(\n viewProcessor,\n curViewCache,\n new Path(childKey),\n newChild,\n writesCache,\n serverCache,\n filterServerNode,\n accumulator\n );\n }\n });\n\n return curViewCache;\n}\n\nfunction viewProcessorAckUserWrite(\n viewProcessor: ViewProcessor,\n viewCache: ViewCache,\n ackPath: Path,\n affectedTree: ImmutableTree,\n writesCache: WriteTreeRef,\n completeCache: Node | null,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n if (writeTreeRefShadowingWrite(writesCache, ackPath) != null) {\n return viewCache;\n }\n\n // Only filter server node if it is currently filtered\n const filterServerNode = viewCache.serverCache.isFiltered();\n\n // Essentially we'll just get our existing server cache for the affected paths and re-apply it as a server update\n // now that it won't be shadowed.\n const serverCache = viewCache.serverCache;\n if (affectedTree.value != null) {\n // This is an overwrite.\n if (\n (pathIsEmpty(ackPath) && serverCache.isFullyInitialized()) ||\n serverCache.isCompleteForPath(ackPath)\n ) {\n return viewProcessorApplyServerOverwrite(\n viewProcessor,\n viewCache,\n ackPath,\n serverCache.getNode().getChild(ackPath),\n writesCache,\n completeCache,\n filterServerNode,\n accumulator\n );\n } else if (pathIsEmpty(ackPath)) {\n // This is a goofy edge case where we are acking data at this location but don't have full data. We\n // should just re-apply whatever we have in our cache as a merge.\n let changedChildren = new ImmutableTree(null);\n serverCache.getNode().forEachChild(KEY_INDEX, (name, node) => {\n changedChildren = changedChildren.set(new Path(name), node);\n });\n return viewProcessorApplyServerMerge(\n viewProcessor,\n viewCache,\n ackPath,\n changedChildren,\n writesCache,\n completeCache,\n filterServerNode,\n accumulator\n );\n } else {\n return viewCache;\n }\n } else {\n // This is a merge.\n let changedChildren = new ImmutableTree(null);\n affectedTree.foreach((mergePath, value) => {\n const serverCachePath = pathChild(ackPath, mergePath);\n if (serverCache.isCompleteForPath(serverCachePath)) {\n changedChildren = changedChildren.set(\n mergePath,\n serverCache.getNode().getChild(serverCachePath)\n );\n }\n });\n return viewProcessorApplyServerMerge(\n viewProcessor,\n viewCache,\n ackPath,\n changedChildren,\n writesCache,\n completeCache,\n filterServerNode,\n accumulator\n );\n }\n}\n\nfunction viewProcessorListenComplete(\n viewProcessor: ViewProcessor,\n viewCache: ViewCache,\n path: Path,\n writesCache: WriteTreeRef,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n const oldServerNode = viewCache.serverCache;\n const newViewCache = viewCacheUpdateServerSnap(\n viewCache,\n oldServerNode.getNode(),\n oldServerNode.isFullyInitialized() || pathIsEmpty(path),\n oldServerNode.isFiltered()\n );\n return viewProcessorGenerateEventCacheAfterServerEvent(\n viewProcessor,\n newViewCache,\n path,\n writesCache,\n NO_COMPLETE_CHILD_SOURCE,\n accumulator\n );\n}\n\nfunction viewProcessorRevertUserWrite(\n viewProcessor: ViewProcessor,\n viewCache: ViewCache,\n path: Path,\n writesCache: WriteTreeRef,\n completeServerCache: Node | null,\n accumulator: ChildChangeAccumulator\n): ViewCache {\n let complete;\n if (writeTreeRefShadowingWrite(writesCache, path) != null) {\n return viewCache;\n } else {\n const source = new WriteTreeCompleteChildSource(\n writesCache,\n viewCache,\n completeServerCache\n );\n const oldEventCache = viewCache.eventCache.getNode();\n let newEventCache;\n if (pathIsEmpty(path) || pathGetFront(path) === '.priority') {\n let newNode;\n if (viewCache.serverCache.isFullyInitialized()) {\n newNode = writeTreeRefCalcCompleteEventCache(\n writesCache,\n viewCacheGetCompleteServerSnap(viewCache)\n );\n } else {\n const serverChildren = viewCache.serverCache.getNode();\n assert(\n serverChildren instanceof ChildrenNode,\n 'serverChildren would be complete if leaf node'\n );\n newNode = writeTreeRefCalcCompleteEventChildren(\n writesCache,\n serverChildren as ChildrenNode\n );\n }\n newNode = newNode as Node;\n newEventCache = viewProcessor.filter.updateFullNode(\n oldEventCache,\n newNode,\n accumulator\n );\n } else {\n const childKey = pathGetFront(path);\n let newChild = writeTreeRefCalcCompleteChild(\n writesCache,\n childKey,\n viewCache.serverCache\n );\n if (\n newChild == null &&\n viewCache.serverCache.isCompleteForChild(childKey)\n ) {\n newChild = oldEventCache.getImmediateChild(childKey);\n }\n if (newChild != null) {\n newEventCache = viewProcessor.filter.updateChild(\n oldEventCache,\n childKey,\n newChild,\n pathPopFront(path),\n source,\n accumulator\n );\n } else if (viewCache.eventCache.getNode().hasChild(childKey)) {\n // No complete child available, delete the existing one, if any\n newEventCache = viewProcessor.filter.updateChild(\n oldEventCache,\n childKey,\n ChildrenNode.EMPTY_NODE,\n pathPopFront(path),\n source,\n accumulator\n );\n } else {\n newEventCache = oldEventCache;\n }\n if (\n newEventCache.isEmpty() &&\n viewCache.serverCache.isFullyInitialized()\n ) {\n // We might have reverted all child writes. Maybe the old event was a leaf node\n complete = writeTreeRefCalcCompleteEventCache(\n writesCache,\n viewCacheGetCompleteServerSnap(viewCache)\n );\n if (complete.isLeafNode()) {\n newEventCache = viewProcessor.filter.updateFullNode(\n newEventCache,\n complete,\n accumulator\n );\n }\n }\n }\n complete =\n viewCache.serverCache.isFullyInitialized() ||\n writeTreeRefShadowingWrite(writesCache, newEmptyPath()) != null;\n return viewCacheUpdateEventSnap(\n viewCache,\n newEventCache,\n complete,\n viewProcessor.filter.filtersNodes()\n );\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { Operation, OperationType } from '../operation/Operation';\nimport { ChildrenNode } from '../snap/ChildrenNode';\nimport { PRIORITY_INDEX } from '../snap/indexes/PriorityIndex';\nimport { Node } from '../snap/Node';\nimport { Path, pathGetFront, pathIsEmpty } from '../util/Path';\nimport { WriteTreeRef } from '../WriteTree';\n\nimport { CacheNode } from './CacheNode';\nimport { Change, changeChildAdded, changeValue } from './Change';\nimport { CancelEvent, Event } from './Event';\nimport {\n EventGenerator,\n eventGeneratorGenerateEventsForChanges\n} from './EventGenerator';\nimport { EventRegistration, QueryContext } from './EventRegistration';\nimport { IndexedFilter } from './filter/IndexedFilter';\nimport { queryParamsGetNodeFilter } from './QueryParams';\nimport {\n newViewCache,\n ViewCache,\n viewCacheGetCompleteEventSnap,\n viewCacheGetCompleteServerSnap\n} from './ViewCache';\nimport {\n newViewProcessor,\n ViewProcessor,\n viewProcessorApplyOperation,\n viewProcessorAssertIndexed\n} from './ViewProcessor';\n\n/**\n * A view represents a specific location and query that has 1 or more event registrations.\n *\n * It does several things:\n * - Maintains the list of event registrations for this location/query.\n * - Maintains a cache of the data visible for this location/query.\n * - Applies new operations (via applyOperation), updates the cache, and based on the event\n * registrations returns the set of events to be raised.\n */\nexport class View {\n processor_: ViewProcessor;\n viewCache_: ViewCache;\n eventRegistrations_: EventRegistration[] = [];\n eventGenerator_: EventGenerator;\n\n constructor(private query_: QueryContext, initialViewCache: ViewCache) {\n const params = this.query_._queryParams;\n\n const indexFilter = new IndexedFilter(params.getIndex());\n const filter = queryParamsGetNodeFilter(params);\n\n this.processor_ = newViewProcessor(filter);\n\n const initialServerCache = initialViewCache.serverCache;\n const initialEventCache = initialViewCache.eventCache;\n\n // Don't filter server node with other filter than index, wait for tagged listen\n const serverSnap = indexFilter.updateFullNode(\n ChildrenNode.EMPTY_NODE,\n initialServerCache.getNode(),\n null\n );\n const eventSnap = filter.updateFullNode(\n ChildrenNode.EMPTY_NODE,\n initialEventCache.getNode(),\n null\n );\n const newServerCache = new CacheNode(\n serverSnap,\n initialServerCache.isFullyInitialized(),\n indexFilter.filtersNodes()\n );\n const newEventCache = new CacheNode(\n eventSnap,\n initialEventCache.isFullyInitialized(),\n filter.filtersNodes()\n );\n\n this.viewCache_ = newViewCache(newEventCache, newServerCache);\n this.eventGenerator_ = new EventGenerator(this.query_);\n }\n\n get query(): QueryContext {\n return this.query_;\n }\n}\n\nexport function viewGetServerCache(view: View): Node | null {\n return view.viewCache_.serverCache.getNode();\n}\n\nexport function viewGetCompleteNode(view: View): Node | null {\n return viewCacheGetCompleteEventSnap(view.viewCache_);\n}\n\nexport function viewGetCompleteServerCache(\n view: View,\n path: Path\n): Node | null {\n const cache = viewCacheGetCompleteServerSnap(view.viewCache_);\n if (cache) {\n // If this isn't a \"loadsAllData\" view, then cache isn't actually a complete cache and\n // we need to see if it contains the child we're interested in.\n if (\n view.query._queryParams.loadsAllData() ||\n (!pathIsEmpty(path) &&\n !cache.getImmediateChild(pathGetFront(path)).isEmpty())\n ) {\n return cache.getChild(path);\n }\n }\n return null;\n}\n\nexport function viewIsEmpty(view: View): boolean {\n return view.eventRegistrations_.length === 0;\n}\n\nexport function viewAddEventRegistration(\n view: View,\n eventRegistration: EventRegistration\n) {\n view.eventRegistrations_.push(eventRegistration);\n}\n\n/**\n * @param eventRegistration - If null, remove all callbacks.\n * @param cancelError - If a cancelError is provided, appropriate cancel events will be returned.\n * @returns Cancel events, if cancelError was provided.\n */\nexport function viewRemoveEventRegistration(\n view: View,\n eventRegistration: EventRegistration | null,\n cancelError?: Error\n): Event[] {\n const cancelEvents: CancelEvent[] = [];\n if (cancelError) {\n assert(\n eventRegistration == null,\n 'A cancel should cancel all event registrations.'\n );\n const path = view.query._path;\n view.eventRegistrations_.forEach(registration => {\n const maybeEvent = registration.createCancelEvent(cancelError, path);\n if (maybeEvent) {\n cancelEvents.push(maybeEvent);\n }\n });\n }\n\n if (eventRegistration) {\n let remaining = [];\n for (let i = 0; i < view.eventRegistrations_.length; ++i) {\n const existing = view.eventRegistrations_[i];\n if (!existing.matches(eventRegistration)) {\n remaining.push(existing);\n } else if (eventRegistration.hasAnyCallback()) {\n // We're removing just this one\n remaining = remaining.concat(view.eventRegistrations_.slice(i + 1));\n break;\n }\n }\n view.eventRegistrations_ = remaining;\n } else {\n view.eventRegistrations_ = [];\n }\n return cancelEvents;\n}\n\n/**\n * Applies the given Operation, updates our cache, and returns the appropriate events.\n */\nexport function viewApplyOperation(\n view: View,\n operation: Operation,\n writesCache: WriteTreeRef,\n completeServerCache: Node | null\n): Event[] {\n if (\n operation.type === OperationType.MERGE &&\n operation.source.queryId !== null\n ) {\n assert(\n viewCacheGetCompleteServerSnap(view.viewCache_),\n 'We should always have a full cache before handling merges'\n );\n assert(\n viewCacheGetCompleteEventSnap(view.viewCache_),\n 'Missing event cache, even though we have a server cache'\n );\n }\n\n const oldViewCache = view.viewCache_;\n const result = viewProcessorApplyOperation(\n view.processor_,\n oldViewCache,\n operation,\n writesCache,\n completeServerCache\n );\n viewProcessorAssertIndexed(view.processor_, result.viewCache);\n\n assert(\n result.viewCache.serverCache.isFullyInitialized() ||\n !oldViewCache.serverCache.isFullyInitialized(),\n 'Once a server snap is complete, it should never go back'\n );\n\n view.viewCache_ = result.viewCache;\n\n return viewGenerateEventsForChanges_(\n view,\n result.changes,\n result.viewCache.eventCache.getNode(),\n null\n );\n}\n\nexport function viewGetInitialEvents(\n view: View,\n registration: EventRegistration\n): Event[] {\n const eventSnap = view.viewCache_.eventCache;\n const initialChanges: Change[] = [];\n if (!eventSnap.getNode().isLeafNode()) {\n const eventNode = eventSnap.getNode() as ChildrenNode;\n eventNode.forEachChild(PRIORITY_INDEX, (key, childNode) => {\n initialChanges.push(changeChildAdded(key, childNode));\n });\n }\n if (eventSnap.isFullyInitialized()) {\n initialChanges.push(changeValue(eventSnap.getNode()));\n }\n return viewGenerateEventsForChanges_(\n view,\n initialChanges,\n eventSnap.getNode(),\n registration\n );\n}\n\nfunction viewGenerateEventsForChanges_(\n view: View,\n changes: Change[],\n eventCache: Node,\n eventRegistration?: EventRegistration\n): Event[] {\n const registrations = eventRegistration\n ? [eventRegistration]\n : view.eventRegistrations_;\n return eventGeneratorGenerateEventsForChanges(\n view.eventGenerator_,\n changes,\n eventCache,\n registrations\n );\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { ChildrenNode } from '../snap/ChildrenNode';\nimport { PRIORITY_INDEX } from '../snap/indexes/PriorityIndex';\nimport { LeafNode } from '../snap/LeafNode';\nimport { Node } from '../snap/Node';\nimport { nodeFromJSON } from '../snap/nodeFromJSON';\nimport { SyncTree, syncTreeCalcCompleteEventCache } from '../SyncTree';\n\nimport { Indexable } from './misc';\nimport { Path, pathChild } from './Path';\n\n/* It's critical for performance that we do not calculate actual values from a SyncTree\n * unless and until the value is needed. Because we expose both a SyncTree and Node\n * version of deferred value resolution, we ned a wrapper class that will let us share\n * code.\n *\n * @see https://github.com/firebase/firebase-js-sdk/issues/2487\n */\ninterface ValueProvider {\n getImmediateChild(childName: string): ValueProvider;\n node(): Node;\n}\n\nclass ExistingValueProvider implements ValueProvider {\n constructor(readonly node_: Node) {}\n\n getImmediateChild(childName: string): ValueProvider {\n const child = this.node_.getImmediateChild(childName);\n return new ExistingValueProvider(child);\n }\n\n node(): Node {\n return this.node_;\n }\n}\n\nclass DeferredValueProvider implements ValueProvider {\n private syncTree_: SyncTree;\n private path_: Path;\n\n constructor(syncTree: SyncTree, path: Path) {\n this.syncTree_ = syncTree;\n this.path_ = path;\n }\n\n getImmediateChild(childName: string): ValueProvider {\n const childPath = pathChild(this.path_, childName);\n return new DeferredValueProvider(this.syncTree_, childPath);\n }\n\n node(): Node {\n return syncTreeCalcCompleteEventCache(this.syncTree_, this.path_);\n }\n}\n\n/**\n * Generate placeholders for deferred values.\n */\nexport const generateWithValues = function (\n values: {\n [k: string]: unknown;\n } | null\n): { [k: string]: unknown } {\n values = values || {};\n values['timestamp'] = values['timestamp'] || new Date().getTime();\n return values;\n};\n\n/**\n * Value to use when firing local events. When writing server values, fire\n * local events with an approximate value, otherwise return value as-is.\n */\nexport const resolveDeferredLeafValue = function (\n value: { [k: string]: unknown } | string | number | boolean,\n existingVal: ValueProvider,\n serverValues: { [k: string]: unknown }\n): string | number | boolean {\n if (!value || typeof value !== 'object') {\n return value as string | number | boolean;\n }\n assert('.sv' in value, 'Unexpected leaf node or priority contents');\n\n if (typeof value['.sv'] === 'string') {\n return resolveScalarDeferredValue(value['.sv'], existingVal, serverValues);\n } else if (typeof value['.sv'] === 'object') {\n return resolveComplexDeferredValue(value['.sv'], existingVal, serverValues);\n } else {\n assert(false, 'Unexpected server value: ' + JSON.stringify(value, null, 2));\n }\n};\n\nconst resolveScalarDeferredValue = function (\n op: string,\n existing: ValueProvider,\n serverValues: { [k: string]: unknown }\n): string | number | boolean {\n switch (op) {\n case 'timestamp':\n return serverValues['timestamp'] as string | number | boolean;\n default:\n assert(false, 'Unexpected server value: ' + op);\n }\n};\n\nconst resolveComplexDeferredValue = function (\n op: object,\n existing: ValueProvider,\n unused: { [k: string]: unknown }\n): string | number | boolean {\n if (!op.hasOwnProperty('increment')) {\n assert(false, 'Unexpected server value: ' + JSON.stringify(op, null, 2));\n }\n const delta = op['increment'];\n if (typeof delta !== 'number') {\n assert(false, 'Unexpected increment value: ' + delta);\n }\n\n const existingNode = existing.node();\n assert(\n existingNode !== null && typeof existingNode !== 'undefined',\n 'Expected ChildrenNode.EMPTY_NODE for nulls'\n );\n\n // Incrementing a non-number sets the value to the incremented amount\n if (!existingNode.isLeafNode()) {\n return delta;\n }\n\n const leaf = existingNode as LeafNode;\n const existingVal = leaf.getValue();\n if (typeof existingVal !== 'number') {\n return delta;\n }\n\n // No need to do over/underflow arithmetic here because JS only handles floats under the covers\n return existingVal + delta;\n};\n\n/**\n * Recursively replace all deferred values and priorities in the tree with the\n * specified generated replacement values.\n * @param path - path to which write is relative\n * @param node - new data written at path\n * @param syncTree - current data\n */\nexport const resolveDeferredValueTree = function (\n path: Path,\n node: Node,\n syncTree: SyncTree,\n serverValues: Indexable\n): Node {\n return resolveDeferredValue(\n node,\n new DeferredValueProvider(syncTree, path),\n serverValues\n );\n};\n\n/**\n * Recursively replace all deferred values and priorities in the node with the\n * specified generated replacement values. If there are no server values in the node,\n * it'll be returned as-is.\n */\nexport const resolveDeferredValueSnapshot = function (\n node: Node,\n existing: Node,\n serverValues: Indexable\n): Node {\n return resolveDeferredValue(\n node,\n new ExistingValueProvider(existing),\n serverValues\n );\n};\n\nfunction resolveDeferredValue(\n node: Node,\n existingVal: ValueProvider,\n serverValues: Indexable\n): Node {\n const rawPri = node.getPriority().val() as\n | Indexable\n | boolean\n | null\n | number\n | string;\n const priority = resolveDeferredLeafValue(\n rawPri,\n existingVal.getImmediateChild('.priority'),\n serverValues\n );\n let newNode: Node;\n\n if (node.isLeafNode()) {\n const leafNode = node as LeafNode;\n const value = resolveDeferredLeafValue(\n leafNode.getValue(),\n existingVal,\n serverValues\n );\n if (\n value !== leafNode.getValue() ||\n priority !== leafNode.getPriority().val()\n ) {\n return new LeafNode(value, nodeFromJSON(priority));\n } else {\n return node;\n }\n } else {\n const childrenNode = node as ChildrenNode;\n newNode = childrenNode;\n if (priority !== childrenNode.getPriority().val()) {\n newNode = newNode.updatePriority(new LeafNode(priority));\n }\n childrenNode.forEachChild(PRIORITY_INDEX, (childName, childNode) => {\n const newChildNode = resolveDeferredValue(\n childNode,\n existingVal.getImmediateChild(childName),\n serverValues\n );\n if (newChildNode !== childNode) {\n newNode = newNode.updateImmediateChild(childName, newChildNode);\n }\n });\n return newNode;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { contains, safeGet } from '@firebase/util';\n\nimport { Path, pathGetFront, pathPopFront } from './Path';\nimport { each } from './util';\n\n/**\n * Node in a Tree.\n */\nexport interface TreeNode {\n // TODO: Consider making accessors that create children and value lazily or\n // separate Internal / Leaf 'types'.\n children: Record>;\n childCount: number;\n value?: T;\n}\n\n/**\n * A light-weight tree, traversable by path. Nodes can have both values and children.\n * Nodes are not enumerated (by forEachChild) unless they have a value or non-empty\n * children.\n */\nexport class Tree {\n /**\n * @param name - Optional name of the node.\n * @param parent - Optional parent node.\n * @param node - Optional node to wrap.\n */\n constructor(\n readonly name: string = '',\n readonly parent: Tree | null = null,\n public node: TreeNode = { children: {}, childCount: 0 }\n ) {}\n}\n\n/**\n * Returns a sub-Tree for the given path.\n *\n * @param pathObj - Path to look up.\n * @returns Tree for path.\n */\nexport function treeSubTree(tree: Tree, pathObj: string | Path): Tree {\n // TODO: Require pathObj to be Path?\n let path = pathObj instanceof Path ? pathObj : new Path(pathObj);\n let child = tree,\n next = pathGetFront(path);\n while (next !== null) {\n const childNode = safeGet(child.node.children, next) || {\n children: {},\n childCount: 0\n };\n child = new Tree(next, child, childNode);\n path = pathPopFront(path);\n next = pathGetFront(path);\n }\n\n return child;\n}\n\n/**\n * Returns the data associated with this tree node.\n *\n * @returns The data or null if no data exists.\n */\nexport function treeGetValue(tree: Tree): T | undefined {\n return tree.node.value;\n}\n\n/**\n * Sets data to this tree node.\n *\n * @param value - Value to set.\n */\nexport function treeSetValue(tree: Tree, value: T | undefined): void {\n tree.node.value = value;\n treeUpdateParents(tree);\n}\n\n/**\n * @returns Whether the tree has any children.\n */\nexport function treeHasChildren(tree: Tree): boolean {\n return tree.node.childCount > 0;\n}\n\n/**\n * @returns Whethe rthe tree is empty (no value or children).\n */\nexport function treeIsEmpty(tree: Tree): boolean {\n return treeGetValue(tree) === undefined && !treeHasChildren(tree);\n}\n\n/**\n * Calls action for each child of this tree node.\n *\n * @param action - Action to be called for each child.\n */\nexport function treeForEachChild(\n tree: Tree,\n action: (tree: Tree) => void\n): void {\n each(tree.node.children, (child: string, childTree: TreeNode) => {\n action(new Tree(child, tree, childTree));\n });\n}\n\n/**\n * Does a depth-first traversal of this node's descendants, calling action for each one.\n *\n * @param action - Action to be called for each child.\n * @param includeSelf - Whether to call action on this node as well. Defaults to\n * false.\n * @param childrenFirst - Whether to call action on children before calling it on\n * parent.\n */\nexport function treeForEachDescendant(\n tree: Tree,\n action: (tree: Tree) => void,\n includeSelf?: boolean,\n childrenFirst?: boolean\n): void {\n if (includeSelf && !childrenFirst) {\n action(tree);\n }\n\n treeForEachChild(tree, child => {\n treeForEachDescendant(child, action, true, childrenFirst);\n });\n\n if (includeSelf && childrenFirst) {\n action(tree);\n }\n}\n\n/**\n * Calls action on each ancestor node.\n *\n * @param action - Action to be called on each parent; return\n * true to abort.\n * @param includeSelf - Whether to call action on this node as well.\n * @returns true if the action callback returned true.\n */\nexport function treeForEachAncestor(\n tree: Tree,\n action: (tree: Tree) => unknown,\n includeSelf?: boolean\n): boolean {\n let node = includeSelf ? tree : tree.parent;\n while (node !== null) {\n if (action(node)) {\n return true;\n }\n node = node.parent;\n }\n return false;\n}\n\n/**\n * Does a depth-first traversal of this node's descendants. When a descendant with a value\n * is found, action is called on it and traversal does not continue inside the node.\n * Action is *not* called on this node.\n *\n * @param action - Action to be called for each child.\n */\nexport function treeForEachImmediateDescendantWithValue(\n tree: Tree,\n action: (tree: Tree) => void\n): void {\n treeForEachChild(tree, child => {\n if (treeGetValue(child) !== undefined) {\n action(child);\n } else {\n treeForEachImmediateDescendantWithValue(child, action);\n }\n });\n}\n\n/**\n * @returns The path of this tree node, as a Path.\n */\nexport function treeGetPath(tree: Tree) {\n return new Path(\n tree.parent === null\n ? tree.name\n : treeGetPath(tree.parent) + '/' + tree.name\n );\n}\n\n/**\n * Adds or removes this child from its parent based on whether it's empty or not.\n */\nfunction treeUpdateParents(tree: Tree) {\n if (tree.parent !== null) {\n treeUpdateChild(tree.parent, tree.name, tree);\n }\n}\n\n/**\n * Adds or removes the passed child to this tree node, depending on whether it's empty.\n *\n * @param childName - The name of the child to update.\n * @param child - The child to update.\n */\nfunction treeUpdateChild(tree: Tree, childName: string, child: Tree) {\n const childEmpty = treeIsEmpty(child);\n const childExists = contains(tree.node.children, childName);\n if (childEmpty && childExists) {\n delete tree.node.children[childName];\n tree.node.childCount--;\n treeUpdateParents(tree);\n } else if (!childEmpty && !childExists) {\n tree.node.children[childName] = child.node;\n tree.node.childCount++;\n treeUpdateParents(tree);\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n contains,\n errorPrefix as errorPrefixFxn,\n safeGet,\n stringLength\n} from '@firebase/util';\n\nimport { RepoInfo } from '../RepoInfo';\n\nimport {\n Path,\n pathChild,\n pathCompare,\n pathContains,\n pathGetBack,\n pathGetFront,\n pathSlice,\n ValidationPath,\n validationPathPop,\n validationPathPush,\n validationPathToErrorString\n} from './Path';\nimport { each, isInvalidJSONNumber } from './util';\n\n/**\n * True for invalid Firebase keys\n */\nexport const INVALID_KEY_REGEX_ = /[\\[\\].#$\\/\\u0000-\\u001F\\u007F]/;\n\n/**\n * True for invalid Firebase paths.\n * Allows '/' in paths.\n */\nexport const INVALID_PATH_REGEX_ = /[\\[\\].#$\\u0000-\\u001F\\u007F]/;\n\n/**\n * Maximum number of characters to allow in leaf value\n */\nexport const MAX_LEAF_SIZE_ = 10 * 1024 * 1024;\n\nexport const isValidKey = function (key: unknown): boolean {\n return (\n typeof key === 'string' && key.length !== 0 && !INVALID_KEY_REGEX_.test(key)\n );\n};\n\nexport const isValidPathString = function (pathString: string): boolean {\n return (\n typeof pathString === 'string' &&\n pathString.length !== 0 &&\n !INVALID_PATH_REGEX_.test(pathString)\n );\n};\n\nexport const isValidRootPathString = function (pathString: string): boolean {\n if (pathString) {\n // Allow '/.info/' at the beginning.\n pathString = pathString.replace(/^\\/*\\.info(\\/|$)/, '/');\n }\n\n return isValidPathString(pathString);\n};\n\nexport const isValidPriority = function (priority: unknown): boolean {\n return (\n priority === null ||\n typeof priority === 'string' ||\n (typeof priority === 'number' && !isInvalidJSONNumber(priority)) ||\n (priority &&\n typeof priority === 'object' &&\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n contains(priority as any, '.sv'))\n );\n};\n\n/**\n * Pre-validate a datum passed as an argument to Firebase function.\n */\nexport const validateFirebaseDataArg = function (\n fnName: string,\n value: unknown,\n path: Path,\n optional: boolean\n) {\n if (optional && value === undefined) {\n return;\n }\n\n validateFirebaseData(errorPrefixFxn(fnName, 'value'), value, path);\n};\n\n/**\n * Validate a data object client-side before sending to server.\n */\nexport const validateFirebaseData = function (\n errorPrefix: string,\n data: unknown,\n path_: Path | ValidationPath\n) {\n const path =\n path_ instanceof Path ? new ValidationPath(path_, errorPrefix) : path_;\n\n if (data === undefined) {\n throw new Error(\n errorPrefix + 'contains undefined ' + validationPathToErrorString(path)\n );\n }\n if (typeof data === 'function') {\n throw new Error(\n errorPrefix +\n 'contains a function ' +\n validationPathToErrorString(path) +\n ' with contents = ' +\n data.toString()\n );\n }\n if (isInvalidJSONNumber(data)) {\n throw new Error(\n errorPrefix +\n 'contains ' +\n data.toString() +\n ' ' +\n validationPathToErrorString(path)\n );\n }\n\n // Check max leaf size, but try to avoid the utf8 conversion if we can.\n if (\n typeof data === 'string' &&\n data.length > MAX_LEAF_SIZE_ / 3 &&\n stringLength(data) > MAX_LEAF_SIZE_\n ) {\n throw new Error(\n errorPrefix +\n 'contains a string greater than ' +\n MAX_LEAF_SIZE_ +\n ' utf8 bytes ' +\n validationPathToErrorString(path) +\n \" ('\" +\n data.substring(0, 50) +\n \"...')\"\n );\n }\n\n // TODO = Perf = Consider combining the recursive validation of keys into NodeFromJSON\n // to save extra walking of large objects.\n if (data && typeof data === 'object') {\n let hasDotValue = false;\n let hasActualChild = false;\n each(data, (key: string, value: unknown) => {\n if (key === '.value') {\n hasDotValue = true;\n } else if (key !== '.priority' && key !== '.sv') {\n hasActualChild = true;\n if (!isValidKey(key)) {\n throw new Error(\n errorPrefix +\n ' contains an invalid key (' +\n key +\n ') ' +\n validationPathToErrorString(path) +\n '. Keys must be non-empty strings ' +\n 'and can\\'t contain \".\", \"#\", \"$\", \"/\", \"[\", or \"]\"'\n );\n }\n }\n\n validationPathPush(path, key);\n validateFirebaseData(errorPrefix, value, path);\n validationPathPop(path);\n });\n\n if (hasDotValue && hasActualChild) {\n throw new Error(\n errorPrefix +\n ' contains \".value\" child ' +\n validationPathToErrorString(path) +\n ' in addition to actual children.'\n );\n }\n }\n};\n\n/**\n * Pre-validate paths passed in the firebase function.\n */\nexport const validateFirebaseMergePaths = function (\n errorPrefix: string,\n mergePaths: Path[]\n) {\n let i, curPath: Path;\n for (i = 0; i < mergePaths.length; i++) {\n curPath = mergePaths[i];\n const keys = pathSlice(curPath);\n for (let j = 0; j < keys.length; j++) {\n if (keys[j] === '.priority' && j === keys.length - 1) {\n // .priority is OK\n } else if (!isValidKey(keys[j])) {\n throw new Error(\n errorPrefix +\n 'contains an invalid key (' +\n keys[j] +\n ') in path ' +\n curPath.toString() +\n '. Keys must be non-empty strings ' +\n 'and can\\'t contain \".\", \"#\", \"$\", \"/\", \"[\", or \"]\"'\n );\n }\n }\n }\n\n // Check that update keys are not descendants of each other.\n // We rely on the property that sorting guarantees that ancestors come\n // right before descendants.\n mergePaths.sort(pathCompare);\n let prevPath: Path | null = null;\n for (i = 0; i < mergePaths.length; i++) {\n curPath = mergePaths[i];\n if (prevPath !== null && pathContains(prevPath, curPath)) {\n throw new Error(\n errorPrefix +\n 'contains a path ' +\n prevPath.toString() +\n ' that is ancestor of another path ' +\n curPath.toString()\n );\n }\n prevPath = curPath;\n }\n};\n\n/**\n * pre-validate an object passed as an argument to firebase function (\n * must be an object - e.g. for firebase.update()).\n */\nexport const validateFirebaseMergeDataArg = function (\n fnName: string,\n data: unknown,\n path: Path,\n optional: boolean\n) {\n if (optional && data === undefined) {\n return;\n }\n\n const errorPrefix = errorPrefixFxn(fnName, 'values');\n\n if (!(data && typeof data === 'object') || Array.isArray(data)) {\n throw new Error(\n errorPrefix + ' must be an object containing the children to replace.'\n );\n }\n\n const mergePaths: Path[] = [];\n each(data, (key: string, value: unknown) => {\n const curPath = new Path(key);\n validateFirebaseData(errorPrefix, value, pathChild(path, curPath));\n if (pathGetBack(curPath) === '.priority') {\n if (!isValidPriority(value)) {\n throw new Error(\n errorPrefix +\n \"contains an invalid value for '\" +\n curPath.toString() +\n \"', which must be a valid \" +\n 'Firebase priority (a string, finite number, server value, or null).'\n );\n }\n }\n mergePaths.push(curPath);\n });\n validateFirebaseMergePaths(errorPrefix, mergePaths);\n};\n\nexport const validatePriority = function (\n fnName: string,\n priority: unknown,\n optional: boolean\n) {\n if (optional && priority === undefined) {\n return;\n }\n if (isInvalidJSONNumber(priority)) {\n throw new Error(\n errorPrefixFxn(fnName, 'priority') +\n 'is ' +\n priority.toString() +\n ', but must be a valid Firebase priority (a string, finite number, ' +\n 'server value, or null).'\n );\n }\n // Special case to allow importing data with a .sv.\n if (!isValidPriority(priority)) {\n throw new Error(\n errorPrefixFxn(fnName, 'priority') +\n 'must be a valid Firebase priority ' +\n '(a string, finite number, server value, or null).'\n );\n }\n};\n\nexport const validateKey = function (\n fnName: string,\n argumentName: string,\n key: string,\n optional: boolean\n) {\n if (optional && key === undefined) {\n return;\n }\n if (!isValidKey(key)) {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) +\n 'was an invalid key = \"' +\n key +\n '\". Firebase keys must be non-empty strings and ' +\n 'can\\'t contain \".\", \"#\", \"$\", \"/\", \"[\", or \"]\").'\n );\n }\n};\n\n/**\n * @internal\n */\nexport const validatePathString = function (\n fnName: string,\n argumentName: string,\n pathString: string,\n optional: boolean\n) {\n if (optional && pathString === undefined) {\n return;\n }\n\n if (!isValidPathString(pathString)) {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) +\n 'was an invalid path = \"' +\n pathString +\n '\". Paths must be non-empty strings and ' +\n 'can\\'t contain \".\", \"#\", \"$\", \"[\", or \"]\"'\n );\n }\n};\n\nexport const validateRootPathString = function (\n fnName: string,\n argumentName: string,\n pathString: string,\n optional: boolean\n) {\n if (pathString) {\n // Allow '/.info/' at the beginning.\n pathString = pathString.replace(/^\\/*\\.info(\\/|$)/, '/');\n }\n\n validatePathString(fnName, argumentName, pathString, optional);\n};\n\n/**\n * @internal\n */\nexport const validateWritablePath = function (fnName: string, path: Path) {\n if (pathGetFront(path) === '.info') {\n throw new Error(fnName + \" failed = Can't modify data under /.info/\");\n }\n};\n\nexport const validateUrl = function (\n fnName: string,\n parsedUrl: { repoInfo: RepoInfo; path: Path }\n) {\n // TODO = Validate server better.\n const pathString = parsedUrl.path.toString();\n if (\n !(typeof parsedUrl.repoInfo.host === 'string') ||\n parsedUrl.repoInfo.host.length === 0 ||\n (!isValidKey(parsedUrl.repoInfo.namespace) &&\n parsedUrl.repoInfo.host.split(':')[0] !== 'localhost') ||\n (pathString.length !== 0 && !isValidRootPathString(pathString))\n ) {\n throw new Error(\n errorPrefixFxn(fnName, 'url') +\n 'must be a valid firebase URL and ' +\n 'the path can\\'t contain \".\", \"#\", \"$\", \"[\", or \"]\".'\n );\n }\n};\n\nexport const validateString = function (\n fnName: string,\n argumentName: string,\n string: unknown,\n optional: boolean\n) {\n if (optional && string === undefined) {\n return;\n }\n if (!(typeof string === 'string')) {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) + 'must be a valid string.'\n );\n }\n};\n\nexport const validateObject = function (\n fnName: string,\n argumentName: string,\n obj: unknown,\n optional: boolean\n) {\n if (optional && obj === undefined) {\n return;\n }\n if (!(obj && typeof obj === 'object') || obj === null) {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) + 'must be a valid object.'\n );\n }\n};\n\nexport const validateObjectContainsKey = function (\n fnName: string,\n argumentName: string,\n obj: unknown,\n key: string,\n optional: boolean,\n optType?: string\n) {\n const objectContainsKey =\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n obj && typeof obj === 'object' && contains(obj as any, key);\n\n if (!objectContainsKey) {\n if (optional) {\n return;\n } else {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) +\n 'must contain the key \"' +\n key +\n '\"'\n );\n }\n }\n\n if (optType) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const val = safeGet(obj as any, key);\n if (\n (optType === 'number' && !(typeof val === 'number')) ||\n (optType === 'string' && !(typeof val === 'string')) ||\n (optType === 'boolean' && !(typeof val === 'boolean')) ||\n (optType === 'function' && !(typeof val === 'function')) ||\n (optType === 'object' && !(typeof val === 'object') && val)\n ) {\n if (optional) {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) +\n 'contains invalid value for key \"' +\n key +\n '\" (must be of type \"' +\n optType +\n '\")'\n );\n } else {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) +\n 'must contain the key \"' +\n key +\n '\" with type \"' +\n optType +\n '\"'\n );\n }\n }\n }\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Path, pathContains, pathEquals } from '../util/Path';\nimport { exceptionGuard, log, logger } from '../util/util';\n\nimport { Event } from './Event';\n\n/**\n * The event queue serves a few purposes:\n * 1. It ensures we maintain event order in the face of event callbacks doing operations that result in more\n * events being queued.\n * 2. raiseQueuedEvents() handles being called reentrantly nicely. That is, if in the course of raising events,\n * raiseQueuedEvents() is called again, the \"inner\" call will pick up raising events where the \"outer\" call\n * left off, ensuring that the events are still raised synchronously and in order.\n * 3. You can use raiseEventsAtPath and raiseEventsForChangedPath to ensure only relevant previously-queued\n * events are raised synchronously.\n *\n * NOTE: This can all go away if/when we move to async events.\n *\n */\nexport class EventQueue {\n eventLists_: EventList[] = [];\n\n /**\n * Tracks recursion depth of raiseQueuedEvents_, for debugging purposes.\n */\n recursionDepth_ = 0;\n}\n\n/**\n * @param eventDataList - The new events to queue.\n */\nexport function eventQueueQueueEvents(\n eventQueue: EventQueue,\n eventDataList: Event[]\n) {\n // We group events by path, storing them in a single EventList, to make it easier to skip over them quickly.\n let currList: EventList | null = null;\n for (let i = 0; i < eventDataList.length; i++) {\n const data = eventDataList[i];\n const path = data.getPath();\n if (currList !== null && !pathEquals(path, currList.path)) {\n eventQueue.eventLists_.push(currList);\n currList = null;\n }\n\n if (currList === null) {\n currList = { events: [], path };\n }\n\n currList.events.push(data);\n }\n if (currList) {\n eventQueue.eventLists_.push(currList);\n }\n}\n\n/**\n * Queues the specified events and synchronously raises all events (including previously queued ones)\n * for the specified path.\n *\n * It is assumed that the new events are all for the specified path.\n *\n * @param path - The path to raise events for.\n * @param eventDataList - The new events to raise.\n */\nexport function eventQueueRaiseEventsAtPath(\n eventQueue: EventQueue,\n path: Path,\n eventDataList: Event[]\n) {\n eventQueueQueueEvents(eventQueue, eventDataList);\n eventQueueRaiseQueuedEventsMatchingPredicate(eventQueue, eventPath =>\n pathEquals(eventPath, path)\n );\n}\n\n/**\n * Queues the specified events and synchronously raises all events (including previously queued ones) for\n * locations related to the specified change path (i.e. all ancestors and descendants).\n *\n * It is assumed that the new events are all related (ancestor or descendant) to the specified path.\n *\n * @param changedPath - The path to raise events for.\n * @param eventDataList - The events to raise\n */\nexport function eventQueueRaiseEventsForChangedPath(\n eventQueue: EventQueue,\n changedPath: Path,\n eventDataList: Event[]\n) {\n eventQueueQueueEvents(eventQueue, eventDataList);\n eventQueueRaiseQueuedEventsMatchingPredicate(\n eventQueue,\n eventPath =>\n pathContains(eventPath, changedPath) ||\n pathContains(changedPath, eventPath)\n );\n}\n\nfunction eventQueueRaiseQueuedEventsMatchingPredicate(\n eventQueue: EventQueue,\n predicate: (path: Path) => boolean\n) {\n eventQueue.recursionDepth_++;\n\n let sentAll = true;\n for (let i = 0; i < eventQueue.eventLists_.length; i++) {\n const eventList = eventQueue.eventLists_[i];\n if (eventList) {\n const eventPath = eventList.path;\n if (predicate(eventPath)) {\n eventListRaise(eventQueue.eventLists_[i]);\n eventQueue.eventLists_[i] = null;\n } else {\n sentAll = false;\n }\n }\n }\n\n if (sentAll) {\n eventQueue.eventLists_ = [];\n }\n\n eventQueue.recursionDepth_--;\n}\n\ninterface EventList {\n events: Event[];\n path: Path;\n}\n\n/**\n * Iterates through the list and raises each event\n */\nfunction eventListRaise(eventList: EventList) {\n for (let i = 0; i < eventList.events.length; i++) {\n const eventData = eventList.events[i];\n if (eventData !== null) {\n eventList.events[i] = null;\n const eventFn = eventData.getEventRunner();\n if (logger) {\n log('event: ' + eventData.toString());\n }\n exceptionGuard(eventFn);\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n assert,\n contains,\n isEmpty,\n map,\n safeGet,\n stringify\n} from '@firebase/util';\n\nimport { ValueEventRegistration } from '../api/Reference_impl';\n\nimport { AppCheckTokenProvider } from './AppCheckTokenProvider';\nimport { AuthTokenProvider } from './AuthTokenProvider';\nimport { PersistentConnection } from './PersistentConnection';\nimport { ReadonlyRestClient } from './ReadonlyRestClient';\nimport { RepoInfo } from './RepoInfo';\nimport { ServerActions } from './ServerActions';\nimport { ChildrenNode } from './snap/ChildrenNode';\nimport { Node } from './snap/Node';\nimport { nodeFromJSON } from './snap/nodeFromJSON';\nimport { SnapshotHolder } from './SnapshotHolder';\nimport {\n newSparseSnapshotTree,\n SparseSnapshotTree,\n sparseSnapshotTreeForEachTree,\n sparseSnapshotTreeForget,\n sparseSnapshotTreeRemember\n} from './SparseSnapshotTree';\nimport { StatsCollection } from './stats/StatsCollection';\nimport { StatsListener } from './stats/StatsListener';\nimport {\n statsManagerGetCollection,\n statsManagerGetOrCreateReporter\n} from './stats/StatsManager';\nimport { StatsReporter, statsReporterIncludeStat } from './stats/StatsReporter';\nimport {\n SyncTree,\n syncTreeAckUserWrite,\n syncTreeAddEventRegistration,\n syncTreeApplyServerMerge,\n syncTreeApplyServerOverwrite,\n syncTreeApplyTaggedQueryMerge,\n syncTreeApplyTaggedQueryOverwrite,\n syncTreeApplyUserMerge,\n syncTreeApplyUserOverwrite,\n syncTreeCalcCompleteEventCache,\n syncTreeGetServerValue,\n syncTreeRemoveEventRegistration,\n syncTreeTagForQuery\n} from './SyncTree';\nimport { Indexable } from './util/misc';\nimport {\n newEmptyPath,\n newRelativePath,\n Path,\n pathChild,\n pathGetFront,\n pathPopFront\n} from './util/Path';\nimport {\n generateWithValues,\n resolveDeferredValueSnapshot,\n resolveDeferredValueTree\n} from './util/ServerValues';\nimport {\n Tree,\n treeForEachAncestor,\n treeForEachChild,\n treeForEachDescendant,\n treeGetPath,\n treeGetValue,\n treeHasChildren,\n treeSetValue,\n treeSubTree\n} from './util/Tree';\nimport {\n beingCrawled,\n each,\n exceptionGuard,\n log,\n LUIDGenerator,\n warn\n} from './util/util';\nimport { isValidPriority, validateFirebaseData } from './util/validation';\nimport { Event } from './view/Event';\nimport {\n EventQueue,\n eventQueueQueueEvents,\n eventQueueRaiseEventsAtPath,\n eventQueueRaiseEventsForChangedPath\n} from './view/EventQueue';\nimport { EventRegistration, QueryContext } from './view/EventRegistration';\n\nconst INTERRUPT_REASON = 'repo_interrupt';\n\n/**\n * If a transaction does not succeed after 25 retries, we abort it. Among other\n * things this ensure that if there's ever a bug causing a mismatch between\n * client / server hashes for some data, we won't retry indefinitely.\n */\nconst MAX_TRANSACTION_RETRIES = 25;\n\nconst enum TransactionStatus {\n // We've run the transaction and updated transactionResultData_ with the result, but it isn't currently sent to the\n // server. A transaction will go from RUN -> SENT -> RUN if it comes back from the server as rejected due to\n // mismatched hash.\n RUN,\n\n // We've run the transaction and sent it to the server and it's currently outstanding (hasn't come back as accepted\n // or rejected yet).\n SENT,\n\n // Temporary state used to mark completed transactions (whether successful or aborted). The transaction will be\n // removed when we get a chance to prune completed ones.\n COMPLETED,\n\n // Used when an already-sent transaction needs to be aborted (e.g. due to a conflicting set() call that was made).\n // If it comes back as unsuccessful, we'll abort it.\n SENT_NEEDS_ABORT,\n\n // Temporary state used to mark transactions that need to be aborted.\n NEEDS_ABORT\n}\n\ninterface Transaction {\n path: Path;\n update: (a: unknown) => unknown;\n onComplete: (\n error: Error | null,\n committed: boolean,\n node: Node | null\n ) => void;\n status: TransactionStatus;\n order: number;\n applyLocally: boolean;\n retryCount: number;\n unwatcher: () => void;\n abortReason: string | null;\n currentWriteId: number;\n currentInputSnapshot: Node | null;\n currentOutputSnapshotRaw: Node | null;\n currentOutputSnapshotResolved: Node | null;\n}\n\n/**\n * A connection to a single data repository.\n */\nexport class Repo {\n /** Key for uniquely identifying this repo, used in RepoManager */\n readonly key: string;\n\n dataUpdateCount = 0;\n infoSyncTree_: SyncTree;\n serverSyncTree_: SyncTree;\n\n stats_: StatsCollection;\n statsListener_: StatsListener | null = null;\n eventQueue_ = new EventQueue();\n nextWriteId_ = 1;\n server_: ServerActions;\n statsReporter_: StatsReporter;\n infoData_: SnapshotHolder;\n interceptServerDataCallback_: ((a: string, b: unknown) => void) | null = null;\n\n /** A list of data pieces and paths to be set when this client disconnects. */\n onDisconnect_: SparseSnapshotTree = newSparseSnapshotTree();\n\n /** Stores queues of outstanding transactions for Firebase locations. */\n transactionQueueTree_ = new Tree();\n\n // TODO: This should be @private but it's used by test_access.js and internal.js\n persistentConnection_: PersistentConnection | null = null;\n\n constructor(\n public repoInfo_: RepoInfo,\n public forceRestClient_: boolean,\n public authTokenProvider_: AuthTokenProvider,\n public appCheckProvider_: AppCheckTokenProvider\n ) {\n // This key is intentionally not updated if RepoInfo is later changed or replaced\n this.key = this.repoInfo_.toURLString();\n }\n\n /**\n * @returns The URL corresponding to the root of this Firebase.\n */\n toString(): string {\n return (\n (this.repoInfo_.secure ? 'https://' : 'http://') + this.repoInfo_.host\n );\n }\n}\n\nexport function repoStart(\n repo: Repo,\n appId: string,\n authOverride?: object\n): void {\n repo.stats_ = statsManagerGetCollection(repo.repoInfo_);\n\n if (repo.forceRestClient_ || beingCrawled()) {\n repo.server_ = new ReadonlyRestClient(\n repo.repoInfo_,\n (\n pathString: string,\n data: unknown,\n isMerge: boolean,\n tag: number | null\n ) => {\n repoOnDataUpdate(repo, pathString, data, isMerge, tag);\n },\n repo.authTokenProvider_,\n repo.appCheckProvider_\n );\n\n // Minor hack: Fire onConnect immediately, since there's no actual connection.\n setTimeout(() => repoOnConnectStatus(repo, /* connectStatus= */ true), 0);\n } else {\n // Validate authOverride\n if (typeof authOverride !== 'undefined' && authOverride !== null) {\n if (typeof authOverride !== 'object') {\n throw new Error(\n 'Only objects are supported for option databaseAuthVariableOverride'\n );\n }\n try {\n stringify(authOverride);\n } catch (e) {\n throw new Error('Invalid authOverride provided: ' + e);\n }\n }\n\n repo.persistentConnection_ = new PersistentConnection(\n repo.repoInfo_,\n appId,\n (\n pathString: string,\n data: unknown,\n isMerge: boolean,\n tag: number | null\n ) => {\n repoOnDataUpdate(repo, pathString, data, isMerge, tag);\n },\n (connectStatus: boolean) => {\n repoOnConnectStatus(repo, connectStatus);\n },\n (updates: object) => {\n repoOnServerInfoUpdate(repo, updates);\n },\n repo.authTokenProvider_,\n repo.appCheckProvider_,\n authOverride\n );\n\n repo.server_ = repo.persistentConnection_;\n }\n\n repo.authTokenProvider_.addTokenChangeListener(token => {\n repo.server_.refreshAuthToken(token);\n });\n\n repo.appCheckProvider_.addTokenChangeListener(result => {\n repo.server_.refreshAppCheckToken(result.token);\n });\n\n // In the case of multiple Repos for the same repoInfo (i.e. there are multiple Firebase.Contexts being used),\n // we only want to create one StatsReporter. As such, we'll report stats over the first Repo created.\n repo.statsReporter_ = statsManagerGetOrCreateReporter(\n repo.repoInfo_,\n () => new StatsReporter(repo.stats_, repo.server_)\n );\n\n // Used for .info.\n repo.infoData_ = new SnapshotHolder();\n repo.infoSyncTree_ = new SyncTree({\n startListening: (query, tag, currentHashFn, onComplete) => {\n let infoEvents: Event[] = [];\n const node = repo.infoData_.getNode(query._path);\n // This is possibly a hack, but we have different semantics for .info endpoints. We don't raise null events\n // on initial data...\n if (!node.isEmpty()) {\n infoEvents = syncTreeApplyServerOverwrite(\n repo.infoSyncTree_,\n query._path,\n node\n );\n setTimeout(() => {\n onComplete('ok');\n }, 0);\n }\n return infoEvents;\n },\n stopListening: () => {}\n });\n repoUpdateInfo(repo, 'connected', false);\n\n repo.serverSyncTree_ = new SyncTree({\n startListening: (query, tag, currentHashFn, onComplete) => {\n repo.server_.listen(query, currentHashFn, tag, (status, data) => {\n const events = onComplete(status, data);\n eventQueueRaiseEventsForChangedPath(\n repo.eventQueue_,\n query._path,\n events\n );\n });\n // No synchronous events for network-backed sync trees\n return [];\n },\n stopListening: (query, tag) => {\n repo.server_.unlisten(query, tag);\n }\n });\n}\n\n/**\n * @returns The time in milliseconds, taking the server offset into account if we have one.\n */\nexport function repoServerTime(repo: Repo): number {\n const offsetNode = repo.infoData_.getNode(new Path('.info/serverTimeOffset'));\n const offset = (offsetNode.val() as number) || 0;\n return new Date().getTime() + offset;\n}\n\n/**\n * Generate ServerValues using some variables from the repo object.\n */\nexport function repoGenerateServerValues(repo: Repo): Indexable {\n return generateWithValues({\n timestamp: repoServerTime(repo)\n });\n}\n\n/**\n * Called by realtime when we get new messages from the server.\n */\nfunction repoOnDataUpdate(\n repo: Repo,\n pathString: string,\n data: unknown,\n isMerge: boolean,\n tag: number | null\n): void {\n // For testing.\n repo.dataUpdateCount++;\n const path = new Path(pathString);\n data = repo.interceptServerDataCallback_\n ? repo.interceptServerDataCallback_(pathString, data)\n : data;\n let events = [];\n if (tag) {\n if (isMerge) {\n const taggedChildren = map(\n data as { [k: string]: unknown },\n (raw: unknown) => nodeFromJSON(raw)\n );\n events = syncTreeApplyTaggedQueryMerge(\n repo.serverSyncTree_,\n path,\n taggedChildren,\n tag\n );\n } else {\n const taggedSnap = nodeFromJSON(data);\n events = syncTreeApplyTaggedQueryOverwrite(\n repo.serverSyncTree_,\n path,\n taggedSnap,\n tag\n );\n }\n } else if (isMerge) {\n const changedChildren = map(\n data as { [k: string]: unknown },\n (raw: unknown) => nodeFromJSON(raw)\n );\n events = syncTreeApplyServerMerge(\n repo.serverSyncTree_,\n path,\n changedChildren\n );\n } else {\n const snap = nodeFromJSON(data);\n events = syncTreeApplyServerOverwrite(repo.serverSyncTree_, path, snap);\n }\n let affectedPath = path;\n if (events.length > 0) {\n // Since we have a listener outstanding for each transaction, receiving any events\n // is a proxy for some change having occurred.\n affectedPath = repoRerunTransactions(repo, path);\n }\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, affectedPath, events);\n}\n\n// TODO: This should be @private but it's used by test_access.js and internal.js\nexport function repoInterceptServerData(\n repo: Repo,\n callback: ((a: string, b: unknown) => unknown) | null\n): void {\n repo.interceptServerDataCallback_ = callback;\n}\n\nfunction repoOnConnectStatus(repo: Repo, connectStatus: boolean): void {\n repoUpdateInfo(repo, 'connected', connectStatus);\n if (connectStatus === false) {\n repoRunOnDisconnectEvents(repo);\n }\n}\n\nfunction repoOnServerInfoUpdate(repo: Repo, updates: object): void {\n each(updates, (key: string, value: unknown) => {\n repoUpdateInfo(repo, key, value);\n });\n}\n\nfunction repoUpdateInfo(repo: Repo, pathString: string, value: unknown): void {\n const path = new Path('/.info/' + pathString);\n const newNode = nodeFromJSON(value);\n repo.infoData_.updateSnapshot(path, newNode);\n const events = syncTreeApplyServerOverwrite(\n repo.infoSyncTree_,\n path,\n newNode\n );\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, path, events);\n}\n\nfunction repoGetNextWriteId(repo: Repo): number {\n return repo.nextWriteId_++;\n}\n\n/**\n * The purpose of `getValue` is to return the latest known value\n * satisfying `query`.\n *\n * This method will first check for in-memory cached values\n * belonging to active listeners. If they are found, such values\n * are considered to be the most up-to-date.\n *\n * If the client is not connected, this method will wait until the\n * repo has established a connection and then request the value for `query`.\n * If the client is not able to retrieve the query result for another reason,\n * it reports an error.\n *\n * @param query - The query to surface a value for.\n */\nexport function repoGetValue(\n repo: Repo,\n query: QueryContext,\n eventRegistration: ValueEventRegistration\n): Promise {\n // Only active queries are cached. There is no persisted cache.\n const cached = syncTreeGetServerValue(repo.serverSyncTree_, query);\n if (cached != null) {\n return Promise.resolve(cached);\n }\n return repo.server_.get(query).then(\n payload => {\n const node = nodeFromJSON(payload).withIndex(\n query._queryParams.getIndex()\n );\n /**\n * Below we simulate the actions of an `onlyOnce` `onValue()` event where:\n * Add an event registration,\n * Update data at the path,\n * Raise any events,\n * Cleanup the SyncTree\n */\n syncTreeAddEventRegistration(\n repo.serverSyncTree_,\n query,\n eventRegistration,\n true\n );\n let events: Event[];\n if (query._queryParams.loadsAllData()) {\n events = syncTreeApplyServerOverwrite(\n repo.serverSyncTree_,\n query._path,\n node\n );\n } else {\n const tag = syncTreeTagForQuery(repo.serverSyncTree_, query);\n events = syncTreeApplyTaggedQueryOverwrite(\n repo.serverSyncTree_,\n query._path,\n node,\n tag\n );\n }\n /*\n * We need to raise events in the scenario where `get()` is called at a parent path, and\n * while the `get()` is pending, `onValue` is called at a child location. While get() is waiting\n * for the data, `onValue` will register a new event. Then, get() will come back, and update the syncTree\n * and its corresponding serverCache, including the child location where `onValue` is called. Then,\n * `onValue` will receive the event from the server, but look at the syncTree and see that the data received\n * from the server is already at the SyncPoint, and so the `onValue` callback will never get fired.\n * Calling `eventQueueRaiseEventsForChangedPath()` is the correct way to propagate the events and\n * ensure the corresponding child events will get fired.\n */\n eventQueueRaiseEventsForChangedPath(\n repo.eventQueue_,\n query._path,\n events\n );\n syncTreeRemoveEventRegistration(\n repo.serverSyncTree_,\n query,\n eventRegistration,\n null,\n true\n );\n return node;\n },\n err => {\n repoLog(repo, 'get for query ' + stringify(query) + ' failed: ' + err);\n return Promise.reject(new Error(err as string));\n }\n );\n}\n\nexport function repoSetWithPriority(\n repo: Repo,\n path: Path,\n newVal: unknown,\n newPriority: number | string | null,\n onComplete: ((status: Error | null, errorReason?: string) => void) | null\n): void {\n repoLog(repo, 'set', {\n path: path.toString(),\n value: newVal,\n priority: newPriority\n });\n\n // TODO: Optimize this behavior to either (a) store flag to skip resolving where possible and / or\n // (b) store unresolved paths on JSON parse\n const serverValues = repoGenerateServerValues(repo);\n const newNodeUnresolved = nodeFromJSON(newVal, newPriority);\n const existing = syncTreeCalcCompleteEventCache(repo.serverSyncTree_, path);\n const newNode = resolveDeferredValueSnapshot(\n newNodeUnresolved,\n existing,\n serverValues\n );\n\n const writeId = repoGetNextWriteId(repo);\n const events = syncTreeApplyUserOverwrite(\n repo.serverSyncTree_,\n path,\n newNode,\n writeId,\n true\n );\n eventQueueQueueEvents(repo.eventQueue_, events);\n repo.server_.put(\n path.toString(),\n newNodeUnresolved.val(/*export=*/ true),\n (status, errorReason) => {\n const success = status === 'ok';\n if (!success) {\n warn('set at ' + path + ' failed: ' + status);\n }\n\n const clearEvents = syncTreeAckUserWrite(\n repo.serverSyncTree_,\n writeId,\n !success\n );\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, path, clearEvents);\n repoCallOnCompleteCallback(repo, onComplete, status, errorReason);\n }\n );\n const affectedPath = repoAbortTransactions(repo, path);\n repoRerunTransactions(repo, affectedPath);\n // We queued the events above, so just flush the queue here\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, affectedPath, []);\n}\n\nexport function repoUpdate(\n repo: Repo,\n path: Path,\n childrenToMerge: { [k: string]: unknown },\n onComplete: ((status: Error | null, errorReason?: string) => void) | null\n): void {\n repoLog(repo, 'update', { path: path.toString(), value: childrenToMerge });\n\n // Start with our existing data and merge each child into it.\n let empty = true;\n const serverValues = repoGenerateServerValues(repo);\n const changedChildren: { [k: string]: Node } = {};\n each(childrenToMerge, (changedKey: string, changedValue: unknown) => {\n empty = false;\n changedChildren[changedKey] = resolveDeferredValueTree(\n pathChild(path, changedKey),\n nodeFromJSON(changedValue),\n repo.serverSyncTree_,\n serverValues\n );\n });\n\n if (!empty) {\n const writeId = repoGetNextWriteId(repo);\n const events = syncTreeApplyUserMerge(\n repo.serverSyncTree_,\n path,\n changedChildren,\n writeId\n );\n eventQueueQueueEvents(repo.eventQueue_, events);\n repo.server_.merge(\n path.toString(),\n childrenToMerge,\n (status, errorReason) => {\n const success = status === 'ok';\n if (!success) {\n warn('update at ' + path + ' failed: ' + status);\n }\n\n const clearEvents = syncTreeAckUserWrite(\n repo.serverSyncTree_,\n writeId,\n !success\n );\n const affectedPath =\n clearEvents.length > 0 ? repoRerunTransactions(repo, path) : path;\n eventQueueRaiseEventsForChangedPath(\n repo.eventQueue_,\n affectedPath,\n clearEvents\n );\n repoCallOnCompleteCallback(repo, onComplete, status, errorReason);\n }\n );\n\n each(childrenToMerge, (changedPath: string) => {\n const affectedPath = repoAbortTransactions(\n repo,\n pathChild(path, changedPath)\n );\n repoRerunTransactions(repo, affectedPath);\n });\n\n // We queued the events above, so just flush the queue here\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, path, []);\n } else {\n log(\"update() called with empty data. Don't do anything.\");\n repoCallOnCompleteCallback(repo, onComplete, 'ok', undefined);\n }\n}\n\n/**\n * Applies all of the changes stored up in the onDisconnect_ tree.\n */\nfunction repoRunOnDisconnectEvents(repo: Repo): void {\n repoLog(repo, 'onDisconnectEvents');\n\n const serverValues = repoGenerateServerValues(repo);\n const resolvedOnDisconnectTree = newSparseSnapshotTree();\n sparseSnapshotTreeForEachTree(\n repo.onDisconnect_,\n newEmptyPath(),\n (path, node) => {\n const resolved = resolveDeferredValueTree(\n path,\n node,\n repo.serverSyncTree_,\n serverValues\n );\n sparseSnapshotTreeRemember(resolvedOnDisconnectTree, path, resolved);\n }\n );\n let events: Event[] = [];\n\n sparseSnapshotTreeForEachTree(\n resolvedOnDisconnectTree,\n newEmptyPath(),\n (path, snap) => {\n events = events.concat(\n syncTreeApplyServerOverwrite(repo.serverSyncTree_, path, snap)\n );\n const affectedPath = repoAbortTransactions(repo, path);\n repoRerunTransactions(repo, affectedPath);\n }\n );\n\n repo.onDisconnect_ = newSparseSnapshotTree();\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, newEmptyPath(), events);\n}\n\nexport function repoOnDisconnectCancel(\n repo: Repo,\n path: Path,\n onComplete: ((status: Error | null, errorReason?: string) => void) | null\n): void {\n repo.server_.onDisconnectCancel(path.toString(), (status, errorReason) => {\n if (status === 'ok') {\n sparseSnapshotTreeForget(repo.onDisconnect_, path);\n }\n repoCallOnCompleteCallback(repo, onComplete, status, errorReason);\n });\n}\n\nexport function repoOnDisconnectSet(\n repo: Repo,\n path: Path,\n value: unknown,\n onComplete: ((status: Error | null, errorReason?: string) => void) | null\n): void {\n const newNode = nodeFromJSON(value);\n repo.server_.onDisconnectPut(\n path.toString(),\n newNode.val(/*export=*/ true),\n (status, errorReason) => {\n if (status === 'ok') {\n sparseSnapshotTreeRemember(repo.onDisconnect_, path, newNode);\n }\n repoCallOnCompleteCallback(repo, onComplete, status, errorReason);\n }\n );\n}\n\nexport function repoOnDisconnectSetWithPriority(\n repo: Repo,\n path: Path,\n value: unknown,\n priority: unknown,\n onComplete: ((status: Error | null, errorReason?: string) => void) | null\n): void {\n const newNode = nodeFromJSON(value, priority);\n repo.server_.onDisconnectPut(\n path.toString(),\n newNode.val(/*export=*/ true),\n (status, errorReason) => {\n if (status === 'ok') {\n sparseSnapshotTreeRemember(repo.onDisconnect_, path, newNode);\n }\n repoCallOnCompleteCallback(repo, onComplete, status, errorReason);\n }\n );\n}\n\nexport function repoOnDisconnectUpdate(\n repo: Repo,\n path: Path,\n childrenToMerge: { [k: string]: unknown },\n onComplete: ((status: Error | null, errorReason?: string) => void) | null\n): void {\n if (isEmpty(childrenToMerge)) {\n log(\"onDisconnect().update() called with empty data. Don't do anything.\");\n repoCallOnCompleteCallback(repo, onComplete, 'ok', undefined);\n return;\n }\n\n repo.server_.onDisconnectMerge(\n path.toString(),\n childrenToMerge,\n (status, errorReason) => {\n if (status === 'ok') {\n each(childrenToMerge, (childName: string, childNode: unknown) => {\n const newChildNode = nodeFromJSON(childNode);\n sparseSnapshotTreeRemember(\n repo.onDisconnect_,\n pathChild(path, childName),\n newChildNode\n );\n });\n }\n repoCallOnCompleteCallback(repo, onComplete, status, errorReason);\n }\n );\n}\n\nexport function repoAddEventCallbackForQuery(\n repo: Repo,\n query: QueryContext,\n eventRegistration: EventRegistration\n): void {\n let events;\n if (pathGetFront(query._path) === '.info') {\n events = syncTreeAddEventRegistration(\n repo.infoSyncTree_,\n query,\n eventRegistration\n );\n } else {\n events = syncTreeAddEventRegistration(\n repo.serverSyncTree_,\n query,\n eventRegistration\n );\n }\n eventQueueRaiseEventsAtPath(repo.eventQueue_, query._path, events);\n}\n\nexport function repoRemoveEventCallbackForQuery(\n repo: Repo,\n query: QueryContext,\n eventRegistration: EventRegistration\n): void {\n // These are guaranteed not to raise events, since we're not passing in a cancelError. However, we can future-proof\n // a little bit by handling the return values anyways.\n let events;\n if (pathGetFront(query._path) === '.info') {\n events = syncTreeRemoveEventRegistration(\n repo.infoSyncTree_,\n query,\n eventRegistration\n );\n } else {\n events = syncTreeRemoveEventRegistration(\n repo.serverSyncTree_,\n query,\n eventRegistration\n );\n }\n eventQueueRaiseEventsAtPath(repo.eventQueue_, query._path, events);\n}\n\nexport function repoInterrupt(repo: Repo): void {\n if (repo.persistentConnection_) {\n repo.persistentConnection_.interrupt(INTERRUPT_REASON);\n }\n}\n\nexport function repoResume(repo: Repo): void {\n if (repo.persistentConnection_) {\n repo.persistentConnection_.resume(INTERRUPT_REASON);\n }\n}\n\nexport function repoStats(repo: Repo, showDelta: boolean = false): void {\n if (typeof console === 'undefined') {\n return;\n }\n\n let stats: { [k: string]: unknown };\n if (showDelta) {\n if (!repo.statsListener_) {\n repo.statsListener_ = new StatsListener(repo.stats_);\n }\n stats = repo.statsListener_.get();\n } else {\n stats = repo.stats_.get();\n }\n\n const longestName = Object.keys(stats).reduce(\n (previousValue, currentValue) =>\n Math.max(currentValue.length, previousValue),\n 0\n );\n\n each(stats, (stat: string, value: unknown) => {\n let paddedStat = stat;\n // pad stat names to be the same length (plus 2 extra spaces).\n for (let i = stat.length; i < longestName + 2; i++) {\n paddedStat += ' ';\n }\n console.log(paddedStat + value);\n });\n}\n\nexport function repoStatsIncrementCounter(repo: Repo, metric: string): void {\n repo.stats_.incrementCounter(metric);\n statsReporterIncludeStat(repo.statsReporter_, metric);\n}\n\nfunction repoLog(repo: Repo, ...varArgs: unknown[]): void {\n let prefix = '';\n if (repo.persistentConnection_) {\n prefix = repo.persistentConnection_.id + ':';\n }\n log(prefix, ...varArgs);\n}\n\nexport function repoCallOnCompleteCallback(\n repo: Repo,\n callback: ((status: Error | null, errorReason?: string) => void) | null,\n status: string,\n errorReason?: string | null\n): void {\n if (callback) {\n exceptionGuard(() => {\n if (status === 'ok') {\n callback(null);\n } else {\n const code = (status || 'error').toUpperCase();\n let message = code;\n if (errorReason) {\n message += ': ' + errorReason;\n }\n\n const error = new Error(message);\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (error as any).code = code;\n callback(error);\n }\n });\n }\n}\n\n/**\n * Creates a new transaction, adds it to the transactions we're tracking, and\n * sends it to the server if possible.\n *\n * @param path - Path at which to do transaction.\n * @param transactionUpdate - Update callback.\n * @param onComplete - Completion callback.\n * @param unwatcher - Function that will be called when the transaction no longer\n * need data updates for `path`.\n * @param applyLocally - Whether or not to make intermediate results visible\n */\nexport function repoStartTransaction(\n repo: Repo,\n path: Path,\n transactionUpdate: (a: unknown) => unknown,\n onComplete: ((error: Error, committed: boolean, node: Node) => void) | null,\n unwatcher: () => void,\n applyLocally: boolean\n): void {\n repoLog(repo, 'transaction on ' + path);\n\n // Initialize transaction.\n const transaction: Transaction = {\n path,\n update: transactionUpdate,\n onComplete,\n // One of TransactionStatus enums.\n status: null,\n // Used when combining transactions at different locations to figure out\n // which one goes first.\n order: LUIDGenerator(),\n // Whether to raise local events for this transaction.\n applyLocally,\n // Count of how many times we've retried the transaction.\n retryCount: 0,\n // Function to call to clean up our .on() listener.\n unwatcher,\n // Stores why a transaction was aborted.\n abortReason: null,\n currentWriteId: null,\n currentInputSnapshot: null,\n currentOutputSnapshotRaw: null,\n currentOutputSnapshotResolved: null\n };\n\n // Run transaction initially.\n const currentState = repoGetLatestState(repo, path, undefined);\n transaction.currentInputSnapshot = currentState;\n const newVal = transaction.update(currentState.val());\n if (newVal === undefined) {\n // Abort transaction.\n transaction.unwatcher();\n transaction.currentOutputSnapshotRaw = null;\n transaction.currentOutputSnapshotResolved = null;\n if (transaction.onComplete) {\n transaction.onComplete(null, false, transaction.currentInputSnapshot);\n }\n } else {\n validateFirebaseData(\n 'transaction failed: Data returned ',\n newVal,\n transaction.path\n );\n\n // Mark as run and add to our queue.\n transaction.status = TransactionStatus.RUN;\n const queueNode = treeSubTree(repo.transactionQueueTree_, path);\n const nodeQueue = treeGetValue(queueNode) || [];\n nodeQueue.push(transaction);\n\n treeSetValue(queueNode, nodeQueue);\n\n // Update visibleData and raise events\n // Note: We intentionally raise events after updating all of our\n // transaction state, since the user could start new transactions from the\n // event callbacks.\n let priorityForNode;\n if (\n typeof newVal === 'object' &&\n newVal !== null &&\n contains(newVal, '.priority')\n ) {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n priorityForNode = safeGet(newVal as any, '.priority');\n assert(\n isValidPriority(priorityForNode),\n 'Invalid priority returned by transaction. ' +\n 'Priority must be a valid string, finite number, server value, or null.'\n );\n } else {\n const currentNode =\n syncTreeCalcCompleteEventCache(repo.serverSyncTree_, path) ||\n ChildrenNode.EMPTY_NODE;\n priorityForNode = currentNode.getPriority().val();\n }\n\n const serverValues = repoGenerateServerValues(repo);\n const newNodeUnresolved = nodeFromJSON(newVal, priorityForNode);\n const newNode = resolveDeferredValueSnapshot(\n newNodeUnresolved,\n currentState,\n serverValues\n );\n transaction.currentOutputSnapshotRaw = newNodeUnresolved;\n transaction.currentOutputSnapshotResolved = newNode;\n transaction.currentWriteId = repoGetNextWriteId(repo);\n\n const events = syncTreeApplyUserOverwrite(\n repo.serverSyncTree_,\n path,\n newNode,\n transaction.currentWriteId,\n transaction.applyLocally\n );\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, path, events);\n\n repoSendReadyTransactions(repo, repo.transactionQueueTree_);\n }\n}\n\n/**\n * @param excludeSets - A specific set to exclude\n */\nfunction repoGetLatestState(\n repo: Repo,\n path: Path,\n excludeSets?: number[]\n): Node {\n return (\n syncTreeCalcCompleteEventCache(repo.serverSyncTree_, path, excludeSets) ||\n ChildrenNode.EMPTY_NODE\n );\n}\n\n/**\n * Sends any already-run transactions that aren't waiting for outstanding\n * transactions to complete.\n *\n * Externally it's called with no arguments, but it calls itself recursively\n * with a particular transactionQueueTree node to recurse through the tree.\n *\n * @param node - transactionQueueTree node to start at.\n */\nfunction repoSendReadyTransactions(\n repo: Repo,\n node: Tree = repo.transactionQueueTree_\n): void {\n // Before recursing, make sure any completed transactions are removed.\n if (!node) {\n repoPruneCompletedTransactionsBelowNode(repo, node);\n }\n\n if (treeGetValue(node)) {\n const queue = repoBuildTransactionQueue(repo, node);\n assert(queue.length > 0, 'Sending zero length transaction queue');\n\n const allRun = queue.every(\n (transaction: Transaction) => transaction.status === TransactionStatus.RUN\n );\n\n // If they're all run (and not sent), we can send them. Else, we must wait.\n if (allRun) {\n repoSendTransactionQueue(repo, treeGetPath(node), queue);\n }\n } else if (treeHasChildren(node)) {\n treeForEachChild(node, childNode => {\n repoSendReadyTransactions(repo, childNode);\n });\n }\n}\n\n/**\n * Given a list of run transactions, send them to the server and then handle\n * the result (success or failure).\n *\n * @param path - The location of the queue.\n * @param queue - Queue of transactions under the specified location.\n */\nfunction repoSendTransactionQueue(\n repo: Repo,\n path: Path,\n queue: Transaction[]\n): void {\n // Mark transactions as sent and increment retry count!\n const setsToIgnore = queue.map(txn => {\n return txn.currentWriteId;\n });\n const latestState = repoGetLatestState(repo, path, setsToIgnore);\n let snapToSend = latestState;\n const latestHash = latestState.hash();\n for (let i = 0; i < queue.length; i++) {\n const txn = queue[i];\n assert(\n txn.status === TransactionStatus.RUN,\n 'tryToSendTransactionQueue_: items in queue should all be run.'\n );\n txn.status = TransactionStatus.SENT;\n txn.retryCount++;\n const relativePath = newRelativePath(path, txn.path);\n // If we've gotten to this point, the output snapshot must be defined.\n snapToSend = snapToSend.updateChild(\n relativePath /** @type {!Node} */,\n txn.currentOutputSnapshotRaw\n );\n }\n\n const dataToSend = snapToSend.val(true);\n const pathToSend = path;\n\n // Send the put.\n repo.server_.put(\n pathToSend.toString(),\n dataToSend,\n (status: string) => {\n repoLog(repo, 'transaction put response', {\n path: pathToSend.toString(),\n status\n });\n\n let events: Event[] = [];\n if (status === 'ok') {\n // Queue up the callbacks and fire them after cleaning up all of our\n // transaction state, since the callback could trigger more\n // transactions or sets.\n const callbacks = [];\n for (let i = 0; i < queue.length; i++) {\n queue[i].status = TransactionStatus.COMPLETED;\n events = events.concat(\n syncTreeAckUserWrite(repo.serverSyncTree_, queue[i].currentWriteId)\n );\n if (queue[i].onComplete) {\n // We never unset the output snapshot, and given that this\n // transaction is complete, it should be set\n callbacks.push(() =>\n queue[i].onComplete(\n null,\n true,\n queue[i].currentOutputSnapshotResolved\n )\n );\n }\n queue[i].unwatcher();\n }\n\n // Now remove the completed transactions.\n repoPruneCompletedTransactionsBelowNode(\n repo,\n treeSubTree(repo.transactionQueueTree_, path)\n );\n // There may be pending transactions that we can now send.\n repoSendReadyTransactions(repo, repo.transactionQueueTree_);\n\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, path, events);\n\n // Finally, trigger onComplete callbacks.\n for (let i = 0; i < callbacks.length; i++) {\n exceptionGuard(callbacks[i]);\n }\n } else {\n // transactions are no longer sent. Update their status appropriately.\n if (status === 'datastale') {\n for (let i = 0; i < queue.length; i++) {\n if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) {\n queue[i].status = TransactionStatus.NEEDS_ABORT;\n } else {\n queue[i].status = TransactionStatus.RUN;\n }\n }\n } else {\n warn(\n 'transaction at ' + pathToSend.toString() + ' failed: ' + status\n );\n for (let i = 0; i < queue.length; i++) {\n queue[i].status = TransactionStatus.NEEDS_ABORT;\n queue[i].abortReason = status;\n }\n }\n\n repoRerunTransactions(repo, path);\n }\n },\n latestHash\n );\n}\n\n/**\n * Finds all transactions dependent on the data at changedPath and reruns them.\n *\n * Should be called any time cached data changes.\n *\n * Return the highest path that was affected by rerunning transactions. This\n * is the path at which events need to be raised for.\n *\n * @param changedPath - The path in mergedData that changed.\n * @returns The rootmost path that was affected by rerunning transactions.\n */\nfunction repoRerunTransactions(repo: Repo, changedPath: Path): Path {\n const rootMostTransactionNode = repoGetAncestorTransactionNode(\n repo,\n changedPath\n );\n const path = treeGetPath(rootMostTransactionNode);\n\n const queue = repoBuildTransactionQueue(repo, rootMostTransactionNode);\n repoRerunTransactionQueue(repo, queue, path);\n\n return path;\n}\n\n/**\n * Does all the work of rerunning transactions (as well as cleans up aborted\n * transactions and whatnot).\n *\n * @param queue - The queue of transactions to run.\n * @param path - The path the queue is for.\n */\nfunction repoRerunTransactionQueue(\n repo: Repo,\n queue: Transaction[],\n path: Path\n): void {\n if (queue.length === 0) {\n return; // Nothing to do!\n }\n\n // Queue up the callbacks and fire them after cleaning up all of our\n // transaction state, since the callback could trigger more transactions or\n // sets.\n const callbacks = [];\n let events: Event[] = [];\n // Ignore all of the sets we're going to re-run.\n const txnsToRerun = queue.filter(q => {\n return q.status === TransactionStatus.RUN;\n });\n const setsToIgnore = txnsToRerun.map(q => {\n return q.currentWriteId;\n });\n for (let i = 0; i < queue.length; i++) {\n const transaction = queue[i];\n const relativePath = newRelativePath(path, transaction.path);\n let abortTransaction = false,\n abortReason;\n assert(\n relativePath !== null,\n 'rerunTransactionsUnderNode_: relativePath should not be null.'\n );\n\n if (transaction.status === TransactionStatus.NEEDS_ABORT) {\n abortTransaction = true;\n abortReason = transaction.abortReason;\n events = events.concat(\n syncTreeAckUserWrite(\n repo.serverSyncTree_,\n transaction.currentWriteId,\n true\n )\n );\n } else if (transaction.status === TransactionStatus.RUN) {\n if (transaction.retryCount >= MAX_TRANSACTION_RETRIES) {\n abortTransaction = true;\n abortReason = 'maxretry';\n events = events.concat(\n syncTreeAckUserWrite(\n repo.serverSyncTree_,\n transaction.currentWriteId,\n true\n )\n );\n } else {\n // This code reruns a transaction\n const currentNode = repoGetLatestState(\n repo,\n transaction.path,\n setsToIgnore\n );\n transaction.currentInputSnapshot = currentNode;\n const newData = queue[i].update(currentNode.val());\n if (newData !== undefined) {\n validateFirebaseData(\n 'transaction failed: Data returned ',\n newData,\n transaction.path\n );\n let newDataNode = nodeFromJSON(newData);\n const hasExplicitPriority =\n typeof newData === 'object' &&\n newData != null &&\n contains(newData, '.priority');\n if (!hasExplicitPriority) {\n // Keep the old priority if there wasn't a priority explicitly specified.\n newDataNode = newDataNode.updatePriority(currentNode.getPriority());\n }\n\n const oldWriteId = transaction.currentWriteId;\n const serverValues = repoGenerateServerValues(repo);\n const newNodeResolved = resolveDeferredValueSnapshot(\n newDataNode,\n currentNode,\n serverValues\n );\n\n transaction.currentOutputSnapshotRaw = newDataNode;\n transaction.currentOutputSnapshotResolved = newNodeResolved;\n transaction.currentWriteId = repoGetNextWriteId(repo);\n // Mutates setsToIgnore in place\n setsToIgnore.splice(setsToIgnore.indexOf(oldWriteId), 1);\n events = events.concat(\n syncTreeApplyUserOverwrite(\n repo.serverSyncTree_,\n transaction.path,\n newNodeResolved,\n transaction.currentWriteId,\n transaction.applyLocally\n )\n );\n events = events.concat(\n syncTreeAckUserWrite(repo.serverSyncTree_, oldWriteId, true)\n );\n } else {\n abortTransaction = true;\n abortReason = 'nodata';\n events = events.concat(\n syncTreeAckUserWrite(\n repo.serverSyncTree_,\n transaction.currentWriteId,\n true\n )\n );\n }\n }\n }\n eventQueueRaiseEventsForChangedPath(repo.eventQueue_, path, events);\n events = [];\n if (abortTransaction) {\n // Abort.\n queue[i].status = TransactionStatus.COMPLETED;\n\n // Removing a listener can trigger pruning which can muck with\n // mergedData/visibleData (as it prunes data). So defer the unwatcher\n // until we're done.\n (function (unwatcher) {\n setTimeout(unwatcher, Math.floor(0));\n })(queue[i].unwatcher);\n\n if (queue[i].onComplete) {\n if (abortReason === 'nodata') {\n callbacks.push(() =>\n queue[i].onComplete(null, false, queue[i].currentInputSnapshot)\n );\n } else {\n callbacks.push(() =>\n queue[i].onComplete(new Error(abortReason), false, null)\n );\n }\n }\n }\n }\n\n // Clean up completed transactions.\n repoPruneCompletedTransactionsBelowNode(repo, repo.transactionQueueTree_);\n\n // Now fire callbacks, now that we're in a good, known state.\n for (let i = 0; i < callbacks.length; i++) {\n exceptionGuard(callbacks[i]);\n }\n\n // Try to send the transaction result to the server.\n repoSendReadyTransactions(repo, repo.transactionQueueTree_);\n}\n\n/**\n * Returns the rootmost ancestor node of the specified path that has a pending\n * transaction on it, or just returns the node for the given path if there are\n * no pending transactions on any ancestor.\n *\n * @param path - The location to start at.\n * @returns The rootmost node with a transaction.\n */\nfunction repoGetAncestorTransactionNode(\n repo: Repo,\n path: Path\n): Tree {\n let front;\n\n // Start at the root and walk deeper into the tree towards path until we\n // find a node with pending transactions.\n let transactionNode = repo.transactionQueueTree_;\n front = pathGetFront(path);\n while (front !== null && treeGetValue(transactionNode) === undefined) {\n transactionNode = treeSubTree(transactionNode, front);\n path = pathPopFront(path);\n front = pathGetFront(path);\n }\n\n return transactionNode;\n}\n\n/**\n * Builds the queue of all transactions at or below the specified\n * transactionNode.\n *\n * @param transactionNode\n * @returns The generated queue.\n */\nfunction repoBuildTransactionQueue(\n repo: Repo,\n transactionNode: Tree\n): Transaction[] {\n // Walk any child transaction queues and aggregate them into a single queue.\n const transactionQueue: Transaction[] = [];\n repoAggregateTransactionQueuesForNode(\n repo,\n transactionNode,\n transactionQueue\n );\n\n // Sort them by the order the transactions were created.\n transactionQueue.sort((a, b) => a.order - b.order);\n\n return transactionQueue;\n}\n\nfunction repoAggregateTransactionQueuesForNode(\n repo: Repo,\n node: Tree,\n queue: Transaction[]\n): void {\n const nodeQueue = treeGetValue(node);\n if (nodeQueue) {\n for (let i = 0; i < nodeQueue.length; i++) {\n queue.push(nodeQueue[i]);\n }\n }\n\n treeForEachChild(node, child => {\n repoAggregateTransactionQueuesForNode(repo, child, queue);\n });\n}\n\n/**\n * Remove COMPLETED transactions at or below this node in the transactionQueueTree_.\n */\nfunction repoPruneCompletedTransactionsBelowNode(\n repo: Repo,\n node: Tree\n): void {\n const queue = treeGetValue(node);\n if (queue) {\n let to = 0;\n for (let from = 0; from < queue.length; from++) {\n if (queue[from].status !== TransactionStatus.COMPLETED) {\n queue[to] = queue[from];\n to++;\n }\n }\n queue.length = to;\n treeSetValue(node, queue.length > 0 ? queue : undefined);\n }\n\n treeForEachChild(node, childNode => {\n repoPruneCompletedTransactionsBelowNode(repo, childNode);\n });\n}\n\n/**\n * Aborts all transactions on ancestors or descendants of the specified path.\n * Called when doing a set() or update() since we consider them incompatible\n * with transactions.\n *\n * @param path - Path for which we want to abort related transactions.\n */\nfunction repoAbortTransactions(repo: Repo, path: Path): Path {\n const affectedPath = treeGetPath(repoGetAncestorTransactionNode(repo, path));\n\n const transactionNode = treeSubTree(repo.transactionQueueTree_, path);\n\n treeForEachAncestor(transactionNode, (node: Tree) => {\n repoAbortTransactionsOnNode(repo, node);\n });\n\n repoAbortTransactionsOnNode(repo, transactionNode);\n\n treeForEachDescendant(transactionNode, (node: Tree) => {\n repoAbortTransactionsOnNode(repo, node);\n });\n\n return affectedPath;\n}\n\n/**\n * Abort transactions stored in this transaction queue node.\n *\n * @param node - Node to abort transactions for.\n */\nfunction repoAbortTransactionsOnNode(\n repo: Repo,\n node: Tree\n): void {\n const queue = treeGetValue(node);\n if (queue) {\n // Queue up the callbacks and fire them after cleaning up all of our\n // transaction state, since the callback could trigger more transactions\n // or sets.\n const callbacks = [];\n\n // Go through queue. Any already-sent transactions must be marked for\n // abort, while the unsent ones can be immediately aborted and removed.\n let events: Event[] = [];\n let lastSent = -1;\n for (let i = 0; i < queue.length; i++) {\n if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) {\n // Already marked. No action needed.\n } else if (queue[i].status === TransactionStatus.SENT) {\n assert(\n lastSent === i - 1,\n 'All SENT items should be at beginning of queue.'\n );\n lastSent = i;\n // Mark transaction for abort when it comes back.\n queue[i].status = TransactionStatus.SENT_NEEDS_ABORT;\n queue[i].abortReason = 'set';\n } else {\n assert(\n queue[i].status === TransactionStatus.RUN,\n 'Unexpected transaction status in abort'\n );\n // We can abort it immediately.\n queue[i].unwatcher();\n events = events.concat(\n syncTreeAckUserWrite(\n repo.serverSyncTree_,\n queue[i].currentWriteId,\n true\n )\n );\n if (queue[i].onComplete) {\n callbacks.push(\n queue[i].onComplete.bind(null, new Error('set'), false, null)\n );\n }\n }\n }\n if (lastSent === -1) {\n // We're not waiting for any sent transactions. We can clear the queue.\n treeSetValue(node, undefined);\n } else {\n // Remove the transactions we aborted.\n queue.length = lastSent + 1;\n }\n\n // Now fire the callbacks.\n eventQueueRaiseEventsForChangedPath(\n repo.eventQueue_,\n treeGetPath(node),\n events\n );\n for (let i = 0; i < callbacks.length; i++) {\n exceptionGuard(callbacks[i]);\n }\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { RepoInfo } from '../../RepoInfo';\nimport { Path } from '../Path';\nimport { warnIfPageIsSecure, warn, fatal } from '../util';\n\nfunction decodePath(pathString: string): string {\n let pathStringDecoded = '';\n const pieces = pathString.split('/');\n for (let i = 0; i < pieces.length; i++) {\n if (pieces[i].length > 0) {\n let piece = pieces[i];\n try {\n piece = decodeURIComponent(piece.replace(/\\+/g, ' '));\n } catch (e) {}\n pathStringDecoded += '/' + piece;\n }\n }\n return pathStringDecoded;\n}\n\n/**\n * @returns key value hash\n */\nfunction decodeQuery(queryString: string): { [key: string]: string } {\n const results = {};\n if (queryString.charAt(0) === '?') {\n queryString = queryString.substring(1);\n }\n for (const segment of queryString.split('&')) {\n if (segment.length === 0) {\n continue;\n }\n const kv = segment.split('=');\n if (kv.length === 2) {\n results[decodeURIComponent(kv[0])] = decodeURIComponent(kv[1]);\n } else {\n warn(`Invalid query segment '${segment}' in query '${queryString}'`);\n }\n }\n return results;\n}\n\nexport const parseRepoInfo = function (\n dataURL: string,\n nodeAdmin: boolean\n): { repoInfo: RepoInfo; path: Path } {\n const parsedUrl = parseDatabaseURL(dataURL),\n namespace = parsedUrl.namespace;\n\n if (parsedUrl.domain === 'firebase.com') {\n fatal(\n parsedUrl.host +\n ' is no longer supported. ' +\n 'Please use .firebaseio.com instead'\n );\n }\n\n // Catch common error of uninitialized namespace value.\n if (\n (!namespace || namespace === 'undefined') &&\n parsedUrl.domain !== 'localhost'\n ) {\n fatal(\n 'Cannot parse Firebase url. Please use https://.firebaseio.com'\n );\n }\n\n if (!parsedUrl.secure) {\n warnIfPageIsSecure();\n }\n\n const webSocketOnly = parsedUrl.scheme === 'ws' || parsedUrl.scheme === 'wss';\n\n return {\n repoInfo: new RepoInfo(\n parsedUrl.host,\n parsedUrl.secure,\n namespace,\n webSocketOnly,\n nodeAdmin,\n /*persistenceKey=*/ '',\n /*includeNamespaceInQueryParams=*/ namespace !== parsedUrl.subdomain\n ),\n path: new Path(parsedUrl.pathString)\n };\n};\n\nexport const parseDatabaseURL = function (dataURL: string): {\n host: string;\n port: number;\n domain: string;\n subdomain: string;\n secure: boolean;\n scheme: string;\n pathString: string;\n namespace: string;\n} {\n // Default to empty strings in the event of a malformed string.\n let host = '',\n domain = '',\n subdomain = '',\n pathString = '',\n namespace = '';\n\n // Always default to SSL, unless otherwise specified.\n let secure = true,\n scheme = 'https',\n port = 443;\n\n // Don't do any validation here. The caller is responsible for validating the result of parsing.\n if (typeof dataURL === 'string') {\n // Parse scheme.\n let colonInd = dataURL.indexOf('//');\n if (colonInd >= 0) {\n scheme = dataURL.substring(0, colonInd - 1);\n dataURL = dataURL.substring(colonInd + 2);\n }\n\n // Parse host, path, and query string.\n let slashInd = dataURL.indexOf('/');\n if (slashInd === -1) {\n slashInd = dataURL.length;\n }\n let questionMarkInd = dataURL.indexOf('?');\n if (questionMarkInd === -1) {\n questionMarkInd = dataURL.length;\n }\n host = dataURL.substring(0, Math.min(slashInd, questionMarkInd));\n if (slashInd < questionMarkInd) {\n // For pathString, questionMarkInd will always come after slashInd\n pathString = decodePath(dataURL.substring(slashInd, questionMarkInd));\n }\n const queryParams = decodeQuery(\n dataURL.substring(Math.min(dataURL.length, questionMarkInd))\n );\n\n // If we have a port, use scheme for determining if it's secure.\n colonInd = host.indexOf(':');\n if (colonInd >= 0) {\n secure = scheme === 'https' || scheme === 'wss';\n port = parseInt(host.substring(colonInd + 1), 10);\n } else {\n colonInd = host.length;\n }\n\n const hostWithoutPort = host.slice(0, colonInd);\n if (hostWithoutPort.toLowerCase() === 'localhost') {\n domain = 'localhost';\n } else if (hostWithoutPort.split('.').length <= 2) {\n domain = hostWithoutPort;\n } else {\n // Interpret the subdomain of a 3 or more component URL as the namespace name.\n const dotInd = host.indexOf('.');\n subdomain = host.substring(0, dotInd).toLowerCase();\n domain = host.substring(dotInd + 1);\n // Normalize namespaces to lowercase to share storage / connection.\n namespace = subdomain;\n }\n // Always treat the value of the `ns` as the namespace name if it is present.\n if ('ns' in queryParams) {\n namespace = queryParams['ns'];\n }\n }\n\n return {\n host,\n port,\n domain,\n subdomain,\n secure,\n scheme,\n pathString,\n namespace\n };\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport {\n tryParseInt,\n MAX_NAME,\n MIN_NAME,\n INTEGER_32_MIN,\n INTEGER_32_MAX\n} from '../util/util';\n\n// Modeled after base64 web-safe chars, but ordered by ASCII.\nconst PUSH_CHARS =\n '-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz';\n\nconst MIN_PUSH_CHAR = '-';\n\nconst MAX_PUSH_CHAR = 'z';\n\nconst MAX_KEY_LEN = 786;\n\n/**\n * Fancy ID generator that creates 20-character string identifiers with the\n * following properties:\n *\n * 1. They're based on timestamp so that they sort *after* any existing ids.\n * 2. They contain 72-bits of random data after the timestamp so that IDs won't\n * collide with other clients' IDs.\n * 3. They sort *lexicographically* (so the timestamp is converted to characters\n * that will sort properly).\n * 4. They're monotonically increasing. Even if you generate more than one in\n * the same timestamp, the latter ones will sort after the former ones. We do\n * this by using the previous random bits but \"incrementing\" them by 1 (only\n * in the case of a timestamp collision).\n */\nexport const nextPushId = (function () {\n // Timestamp of last push, used to prevent local collisions if you push twice\n // in one ms.\n let lastPushTime = 0;\n\n // We generate 72-bits of randomness which get turned into 12 characters and\n // appended to the timestamp to prevent collisions with other clients. We\n // store the last characters we generated because in the event of a collision,\n // we'll use those same characters except \"incremented\" by one.\n const lastRandChars: number[] = [];\n\n return function (now: number) {\n const duplicateTime = now === lastPushTime;\n lastPushTime = now;\n\n let i;\n const timeStampChars = new Array(8);\n for (i = 7; i >= 0; i--) {\n timeStampChars[i] = PUSH_CHARS.charAt(now % 64);\n // NOTE: Can't use << here because javascript will convert to int and lose\n // the upper bits.\n now = Math.floor(now / 64);\n }\n assert(now === 0, 'Cannot push at time == 0');\n\n let id = timeStampChars.join('');\n\n if (!duplicateTime) {\n for (i = 0; i < 12; i++) {\n lastRandChars[i] = Math.floor(Math.random() * 64);\n }\n } else {\n // If the timestamp hasn't changed since last push, use the same random\n // number, except incremented by 1.\n for (i = 11; i >= 0 && lastRandChars[i] === 63; i--) {\n lastRandChars[i] = 0;\n }\n lastRandChars[i]++;\n }\n for (i = 0; i < 12; i++) {\n id += PUSH_CHARS.charAt(lastRandChars[i]);\n }\n assert(id.length === 20, 'nextPushId: Length should be 20.');\n\n return id;\n };\n})();\n\nexport const successor = function (key: string) {\n if (key === '' + INTEGER_32_MAX) {\n // See https://firebase.google.com/docs/database/web/lists-of-data#data-order\n return MIN_PUSH_CHAR;\n }\n const keyAsInt: number = tryParseInt(key);\n if (keyAsInt != null) {\n return '' + (keyAsInt + 1);\n }\n const next = new Array(key.length);\n\n for (let i = 0; i < next.length; i++) {\n next[i] = key.charAt(i);\n }\n\n if (next.length < MAX_KEY_LEN) {\n next.push(MIN_PUSH_CHAR);\n return next.join('');\n }\n\n let i = next.length - 1;\n\n while (i >= 0 && next[i] === MAX_PUSH_CHAR) {\n i--;\n }\n\n // `successor` was called on the largest possible key, so return the\n // MAX_NAME, which sorts larger than all keys.\n if (i === -1) {\n return MAX_NAME;\n }\n\n const source = next[i];\n const sourcePlusOne = PUSH_CHARS.charAt(PUSH_CHARS.indexOf(source) + 1);\n next[i] = sourcePlusOne;\n\n return next.slice(0, i + 1).join('');\n};\n\n// `key` is assumed to be non-empty.\nexport const predecessor = function (key: string) {\n if (key === '' + INTEGER_32_MIN) {\n return MIN_NAME;\n }\n const keyAsInt: number = tryParseInt(key);\n if (keyAsInt != null) {\n return '' + (keyAsInt - 1);\n }\n const next = new Array(key.length);\n for (let i = 0; i < next.length; i++) {\n next[i] = key.charAt(i);\n }\n // If `key` ends in `MIN_PUSH_CHAR`, the largest key lexicographically\n // smaller than `key`, is `key[0:key.length - 1]`. The next key smaller\n // than that, `predecessor(predecessor(key))`, is\n //\n // `key[0:key.length - 2] + (key[key.length - 1] - 1) + \\\n // { MAX_PUSH_CHAR repeated MAX_KEY_LEN - (key.length - 1) times }\n //\n // analogous to increment/decrement for base-10 integers.\n //\n // This works because lexigographic comparison works character-by-character,\n // using length as a tie-breaker if one key is a prefix of the other.\n if (next[next.length - 1] === MIN_PUSH_CHAR) {\n if (next.length === 1) {\n // See https://firebase.google.com/docs/database/web/lists-of-data#orderbykey\n return '' + INTEGER_32_MAX;\n }\n delete next[next.length - 1];\n return next.join('');\n }\n // Replace the last character with it's immediate predecessor, and\n // fill the suffix of the key with MAX_PUSH_CHAR. This is the\n // lexicographically largest possible key smaller than `key`.\n next[next.length - 1] = PUSH_CHARS.charAt(\n PUSH_CHARS.indexOf(next[next.length - 1]) - 1\n );\n return next.join('') + MAX_PUSH_CHAR.repeat(MAX_KEY_LEN - next.length);\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { stringify } from '@firebase/util';\n\nimport { DataSnapshot as ExpDataSnapshot } from '../../api/Reference_impl';\nimport { Path } from '../util/Path';\n\nimport { EventRegistration } from './EventRegistration';\n\n/**\n * Encapsulates the data needed to raise an event\n * @interface\n */\nexport interface Event {\n getPath(): Path;\n\n getEventType(): string;\n\n getEventRunner(): () => void;\n\n toString(): string;\n}\n\n/**\n * One of the following strings: \"value\", \"child_added\", \"child_changed\",\n * \"child_removed\", or \"child_moved.\"\n */\nexport type EventType =\n | 'value'\n | 'child_added'\n | 'child_changed'\n | 'child_moved'\n | 'child_removed';\n\n/**\n * Encapsulates the data needed to raise an event\n */\nexport class DataEvent implements Event {\n /**\n * @param eventType - One of: value, child_added, child_changed, child_moved, child_removed\n * @param eventRegistration - The function to call to with the event data. User provided\n * @param snapshot - The data backing the event\n * @param prevName - Optional, the name of the previous child for child_* events.\n */\n constructor(\n public eventType: EventType,\n public eventRegistration: EventRegistration,\n public snapshot: ExpDataSnapshot,\n public prevName?: string | null\n ) {}\n getPath(): Path {\n const ref = this.snapshot.ref;\n if (this.eventType === 'value') {\n return ref._path;\n } else {\n return ref.parent._path;\n }\n }\n getEventType(): string {\n return this.eventType;\n }\n getEventRunner(): () => void {\n return this.eventRegistration.getEventRunner(this);\n }\n toString(): string {\n return (\n this.getPath().toString() +\n ':' +\n this.eventType +\n ':' +\n stringify(this.snapshot.exportVal())\n );\n }\n}\n\nexport class CancelEvent implements Event {\n constructor(\n public eventRegistration: EventRegistration,\n public error: Error,\n public path: Path\n ) {}\n getPath(): Path {\n return this.path;\n }\n getEventType(): string {\n return 'cancel';\n }\n getEventRunner(): () => void {\n return this.eventRegistration.getEventRunner(this);\n }\n toString(): string {\n return this.path.toString() + ':cancel';\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert } from '@firebase/util';\n\nimport { DataSnapshot } from '../../api/Reference_impl';\nimport { Repo } from '../Repo';\nimport { Path } from '../util/Path';\n\nimport { Change } from './Change';\nimport { CancelEvent, Event } from './Event';\nimport { QueryParams } from './QueryParams';\n\n/**\n * A user callback. Callbacks issues from the Legacy SDK maintain references\n * to the original user-issued callbacks, which allows equality\n * comparison by reference even though this callbacks are wrapped before\n * they can be passed to the firebase@exp SDK.\n *\n * @internal\n */\nexport interface UserCallback {\n (dataSnapshot: DataSnapshot, previousChildName?: string | null): unknown;\n userCallback?: unknown;\n context?: object | null;\n}\n\n/**\n * A wrapper class that converts events from the database@exp SDK to the legacy\n * Database SDK. Events are not converted directly as event registration relies\n * on reference comparison of the original user callback (see `matches()`) and\n * relies on equality of the legacy SDK's `context` object.\n */\nexport class CallbackContext {\n constructor(\n private readonly snapshotCallback: UserCallback,\n private readonly cancelCallback?: (error: Error) => unknown\n ) {}\n\n onValue(\n expDataSnapshot: DataSnapshot,\n previousChildName?: string | null\n ): void {\n this.snapshotCallback.call(null, expDataSnapshot, previousChildName);\n }\n\n onCancel(error: Error): void {\n assert(\n this.hasCancelCallback,\n 'Raising a cancel event on a listener with no cancel callback'\n );\n return this.cancelCallback.call(null, error);\n }\n\n get hasCancelCallback(): boolean {\n return !!this.cancelCallback;\n }\n\n matches(other: CallbackContext): boolean {\n return (\n this.snapshotCallback === other.snapshotCallback ||\n (this.snapshotCallback.userCallback !== undefined &&\n this.snapshotCallback.userCallback ===\n other.snapshotCallback.userCallback &&\n this.snapshotCallback.context === other.snapshotCallback.context)\n );\n }\n}\n\nexport interface QueryContext {\n readonly _queryIdentifier: string;\n readonly _queryObject: object;\n readonly _repo: Repo;\n readonly _path: Path;\n readonly _queryParams: QueryParams;\n}\n\n/**\n * An EventRegistration is basically an event type ('value', 'child_added', etc.) and a callback\n * to be notified of that type of event.\n *\n * That said, it can also contain a cancel callback to be notified if the event is canceled. And\n * currently, this code is organized around the idea that you would register multiple child_ callbacks\n * together, as a single EventRegistration. Though currently we don't do that.\n */\nexport interface EventRegistration {\n /**\n * True if this container has a callback to trigger for this event type\n */\n respondsTo(eventType: string): boolean;\n\n createEvent(change: Change, query: QueryContext): Event;\n\n /**\n * Given event data, return a function to trigger the user's callback\n */\n getEventRunner(eventData: Event): () => void;\n\n createCancelEvent(error: Error, path: Path): CancelEvent | null;\n\n matches(other: EventRegistration): boolean;\n\n /**\n * False basically means this is a \"dummy\" callback container being used as a sentinel\n * to remove all callback containers of a particular type. (e.g. if the user does\n * ref.off('value') without specifying a specific callback).\n *\n * (TODO: Rework this, since it's hacky)\n *\n */\n hasAnyCallback(): boolean;\n}\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Deferred } from '@firebase/util';\n\nimport {\n Repo,\n repoOnDisconnectCancel,\n repoOnDisconnectSet,\n repoOnDisconnectSetWithPriority,\n repoOnDisconnectUpdate\n} from '../core/Repo';\nimport { Path } from '../core/util/Path';\nimport {\n validateFirebaseDataArg,\n validateFirebaseMergeDataArg,\n validatePriority,\n validateWritablePath\n} from '../core/util/validation';\n\n/**\n * The `onDisconnect` class allows you to write or clear data when your client\n * disconnects from the Database server. These updates occur whether your\n * client disconnects cleanly or not, so you can rely on them to clean up data\n * even if a connection is dropped or a client crashes.\n *\n * The `onDisconnect` class is most commonly used to manage presence in\n * applications where it is useful to detect how many clients are connected and\n * when other clients disconnect. See\n * {@link https://firebase.google.com/docs/database/web/offline-capabilities | Enabling Offline Capabilities in JavaScript}\n * for more information.\n *\n * To avoid problems when a connection is dropped before the requests can be\n * transferred to the Database server, these functions should be called before\n * writing any data.\n *\n * Note that `onDisconnect` operations are only triggered once. If you want an\n * operation to occur each time a disconnect occurs, you'll need to re-establish\n * the `onDisconnect` operations each time you reconnect.\n */\nexport class OnDisconnect {\n /** @hideconstructor */\n constructor(private _repo: Repo, private _path: Path) {}\n\n /**\n * Cancels all previously queued `onDisconnect()` set or update events for this\n * location and all children.\n *\n * If a write has been queued for this location via a `set()` or `update()` at a\n * parent location, the write at this location will be canceled, though writes\n * to sibling locations will still occur.\n *\n * @returns Resolves when synchronization to the server is complete.\n */\n cancel(): Promise {\n const deferred = new Deferred();\n repoOnDisconnectCancel(\n this._repo,\n this._path,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n }\n\n /**\n * Ensures the data at this location is deleted when the client is disconnected\n * (due to closing the browser, navigating to a new page, or network issues).\n *\n * @returns Resolves when synchronization to the server is complete.\n */\n remove(): Promise {\n validateWritablePath('OnDisconnect.remove', this._path);\n const deferred = new Deferred();\n repoOnDisconnectSet(\n this._repo,\n this._path,\n null,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n }\n\n /**\n * Ensures the data at this location is set to the specified value when the\n * client is disconnected (due to closing the browser, navigating to a new page,\n * or network issues).\n *\n * `set()` is especially useful for implementing \"presence\" systems, where a\n * value should be changed or cleared when a user disconnects so that they\n * appear \"offline\" to other users. See\n * {@link https://firebase.google.com/docs/database/web/offline-capabilities | Enabling Offline Capabilities in JavaScript}\n * for more information.\n *\n * Note that `onDisconnect` operations are only triggered once. If you want an\n * operation to occur each time a disconnect occurs, you'll need to re-establish\n * the `onDisconnect` operations each time.\n *\n * @param value - The value to be written to this location on disconnect (can\n * be an object, array, string, number, boolean, or null).\n * @returns Resolves when synchronization to the Database is complete.\n */\n set(value: unknown): Promise {\n validateWritablePath('OnDisconnect.set', this._path);\n validateFirebaseDataArg('OnDisconnect.set', value, this._path, false);\n const deferred = new Deferred();\n repoOnDisconnectSet(\n this._repo,\n this._path,\n value,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n }\n\n /**\n * Ensures the data at this location is set to the specified value and priority\n * when the client is disconnected (due to closing the browser, navigating to a\n * new page, or network issues).\n *\n * @param value - The value to be written to this location on disconnect (can\n * be an object, array, string, number, boolean, or null).\n * @param priority - The priority to be written (string, number, or null).\n * @returns Resolves when synchronization to the Database is complete.\n */\n setWithPriority(\n value: unknown,\n priority: number | string | null\n ): Promise {\n validateWritablePath('OnDisconnect.setWithPriority', this._path);\n validateFirebaseDataArg(\n 'OnDisconnect.setWithPriority',\n value,\n this._path,\n false\n );\n validatePriority('OnDisconnect.setWithPriority', priority, false);\n\n const deferred = new Deferred();\n repoOnDisconnectSetWithPriority(\n this._repo,\n this._path,\n value,\n priority,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n }\n\n /**\n * Writes multiple values at this location when the client is disconnected (due\n * to closing the browser, navigating to a new page, or network issues).\n *\n * The `values` argument contains multiple property-value pairs that will be\n * written to the Database together. Each child property can either be a simple\n * property (for example, \"name\") or a relative path (for example, \"name/first\")\n * from the current location to the data to update.\n *\n * As opposed to the `set()` method, `update()` can be use to selectively update\n * only the referenced properties at the current location (instead of replacing\n * all the child properties at the current location).\n *\n * @param values - Object containing multiple values.\n * @returns Resolves when synchronization to the Database is complete.\n */\n update(values: object): Promise {\n validateWritablePath('OnDisconnect.update', this._path);\n validateFirebaseMergeDataArg(\n 'OnDisconnect.update',\n values,\n this._path,\n false\n );\n const deferred = new Deferred();\n repoOnDisconnectUpdate(\n this._repo,\n this._path,\n values as Record,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n }\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { assert, getModularInstance, Deferred } from '@firebase/util';\n\nimport {\n Repo,\n repoAddEventCallbackForQuery,\n repoGetValue,\n repoRemoveEventCallbackForQuery,\n repoServerTime,\n repoSetWithPriority,\n repoUpdate\n} from '../core/Repo';\nimport { ChildrenNode } from '../core/snap/ChildrenNode';\nimport { Index } from '../core/snap/indexes/Index';\nimport { KEY_INDEX } from '../core/snap/indexes/KeyIndex';\nimport { PathIndex } from '../core/snap/indexes/PathIndex';\nimport { PRIORITY_INDEX } from '../core/snap/indexes/PriorityIndex';\nimport { VALUE_INDEX } from '../core/snap/indexes/ValueIndex';\nimport { Node } from '../core/snap/Node';\nimport { syncPointSetReferenceConstructor } from '../core/SyncPoint';\nimport { syncTreeSetReferenceConstructor } from '../core/SyncTree';\nimport { parseRepoInfo } from '../core/util/libs/parser';\nimport { nextPushId } from '../core/util/NextPushId';\nimport {\n Path,\n pathEquals,\n pathGetBack,\n pathGetFront,\n pathChild,\n pathParent,\n pathToUrlEncodedString,\n pathIsEmpty\n} from '../core/util/Path';\nimport {\n fatal,\n MAX_NAME,\n MIN_NAME,\n ObjectToUniqueKey\n} from '../core/util/util';\nimport {\n isValidPriority,\n validateFirebaseDataArg,\n validateFirebaseMergeDataArg,\n validateKey,\n validatePathString,\n validatePriority,\n validateRootPathString,\n validateUrl,\n validateWritablePath\n} from '../core/util/validation';\nimport { Change } from '../core/view/Change';\nimport { CancelEvent, DataEvent, EventType } from '../core/view/Event';\nimport {\n CallbackContext,\n EventRegistration,\n QueryContext,\n UserCallback\n} from '../core/view/EventRegistration';\nimport {\n QueryParams,\n queryParamsEndAt,\n queryParamsEndBefore,\n queryParamsGetQueryObject,\n queryParamsLimitToFirst,\n queryParamsLimitToLast,\n queryParamsOrderBy,\n queryParamsStartAfter,\n queryParamsStartAt\n} from '../core/view/QueryParams';\n\nimport { Database } from './Database';\nimport { OnDisconnect } from './OnDisconnect';\nimport {\n ListenOptions,\n Query as Query,\n DatabaseReference,\n Unsubscribe,\n ThenableReference\n} from './Reference';\n\n/**\n * @internal\n */\nexport class QueryImpl implements Query, QueryContext {\n /**\n * @hideconstructor\n */\n constructor(\n readonly _repo: Repo,\n readonly _path: Path,\n readonly _queryParams: QueryParams,\n readonly _orderByCalled: boolean\n ) {}\n\n get key(): string | null {\n if (pathIsEmpty(this._path)) {\n return null;\n } else {\n return pathGetBack(this._path);\n }\n }\n\n get ref(): DatabaseReference {\n return new ReferenceImpl(this._repo, this._path);\n }\n\n get _queryIdentifier(): string {\n const obj = queryParamsGetQueryObject(this._queryParams);\n const id = ObjectToUniqueKey(obj);\n return id === '{}' ? 'default' : id;\n }\n\n /**\n * An object representation of the query parameters used by this Query.\n */\n get _queryObject(): object {\n return queryParamsGetQueryObject(this._queryParams);\n }\n\n isEqual(other: QueryImpl | null): boolean {\n other = getModularInstance(other);\n if (!(other instanceof QueryImpl)) {\n return false;\n }\n\n const sameRepo = this._repo === other._repo;\n const samePath = pathEquals(this._path, other._path);\n const sameQueryIdentifier =\n this._queryIdentifier === other._queryIdentifier;\n\n return sameRepo && samePath && sameQueryIdentifier;\n }\n\n toJSON(): string {\n return this.toString();\n }\n\n toString(): string {\n return this._repo.toString() + pathToUrlEncodedString(this._path);\n }\n}\n\n/**\n * Validates that no other order by call has been made\n */\nfunction validateNoPreviousOrderByCall(query: QueryImpl, fnName: string) {\n if (query._orderByCalled === true) {\n throw new Error(fnName + \": You can't combine multiple orderBy calls.\");\n }\n}\n\n/**\n * Validates start/end values for queries.\n */\nfunction validateQueryEndpoints(params: QueryParams) {\n let startNode = null;\n let endNode = null;\n if (params.hasStart()) {\n startNode = params.getIndexStartValue();\n }\n if (params.hasEnd()) {\n endNode = params.getIndexEndValue();\n }\n\n if (params.getIndex() === KEY_INDEX) {\n const tooManyArgsError =\n 'Query: When ordering by key, you may only pass one argument to ' +\n 'startAt(), endAt(), or equalTo().';\n const wrongArgTypeError =\n 'Query: When ordering by key, the argument passed to startAt(), startAfter(), ' +\n 'endAt(), endBefore(), or equalTo() must be a string.';\n if (params.hasStart()) {\n const startName = params.getIndexStartName();\n if (startName !== MIN_NAME) {\n throw new Error(tooManyArgsError);\n } else if (typeof startNode !== 'string') {\n throw new Error(wrongArgTypeError);\n }\n }\n if (params.hasEnd()) {\n const endName = params.getIndexEndName();\n if (endName !== MAX_NAME) {\n throw new Error(tooManyArgsError);\n } else if (typeof endNode !== 'string') {\n throw new Error(wrongArgTypeError);\n }\n }\n } else if (params.getIndex() === PRIORITY_INDEX) {\n if (\n (startNode != null && !isValidPriority(startNode)) ||\n (endNode != null && !isValidPriority(endNode))\n ) {\n throw new Error(\n 'Query: When ordering by priority, the first argument passed to startAt(), ' +\n 'startAfter() endAt(), endBefore(), or equalTo() must be a valid priority value ' +\n '(null, a number, or a string).'\n );\n }\n } else {\n assert(\n params.getIndex() instanceof PathIndex ||\n params.getIndex() === VALUE_INDEX,\n 'unknown index type.'\n );\n if (\n (startNode != null && typeof startNode === 'object') ||\n (endNode != null && typeof endNode === 'object')\n ) {\n throw new Error(\n 'Query: First argument passed to startAt(), startAfter(), endAt(), endBefore(), or ' +\n 'equalTo() cannot be an object.'\n );\n }\n }\n}\n\n/**\n * Validates that limit* has been called with the correct combination of parameters\n */\nfunction validateLimit(params: QueryParams) {\n if (\n params.hasStart() &&\n params.hasEnd() &&\n params.hasLimit() &&\n !params.hasAnchoredLimit()\n ) {\n throw new Error(\n \"Query: Can't combine startAt(), startAfter(), endAt(), endBefore(), and limit(). Use \" +\n 'limitToFirst() or limitToLast() instead.'\n );\n }\n}\n/**\n * @internal\n */\nexport class ReferenceImpl extends QueryImpl implements DatabaseReference {\n /** @hideconstructor */\n constructor(repo: Repo, path: Path) {\n super(repo, path, new QueryParams(), false);\n }\n\n get parent(): ReferenceImpl | null {\n const parentPath = pathParent(this._path);\n return parentPath === null\n ? null\n : new ReferenceImpl(this._repo, parentPath);\n }\n\n get root(): ReferenceImpl {\n let ref: ReferenceImpl = this;\n while (ref.parent !== null) {\n ref = ref.parent;\n }\n return ref;\n }\n}\n\n/**\n * A `DataSnapshot` contains data from a Database location.\n *\n * Any time you read data from the Database, you receive the data as a\n * `DataSnapshot`. A `DataSnapshot` is passed to the event callbacks you attach\n * with `on()` or `once()`. You can extract the contents of the snapshot as a\n * JavaScript object by calling the `val()` method. Alternatively, you can\n * traverse into the snapshot by calling `child()` to return child snapshots\n * (which you could then call `val()` on).\n *\n * A `DataSnapshot` is an efficiently generated, immutable copy of the data at\n * a Database location. It cannot be modified and will never change (to modify\n * data, you always call the `set()` method on a `Reference` directly).\n */\nexport class DataSnapshot {\n /**\n * @param _node - A SnapshotNode to wrap.\n * @param ref - The location this snapshot came from.\n * @param _index - The iteration order for this snapshot\n * @hideconstructor\n */\n constructor(\n readonly _node: Node,\n /**\n * The location of this DataSnapshot.\n */\n readonly ref: DatabaseReference,\n readonly _index: Index\n ) {}\n\n /**\n * Gets the priority value of the data in this `DataSnapshot`.\n *\n * Applications need not use priority but can order collections by\n * ordinary properties (see\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#sorting_and_filtering_data |Sorting and filtering data}\n * ).\n */\n get priority(): string | number | null {\n // typecast here because we never return deferred values or internal priorities (MAX_PRIORITY)\n return this._node.getPriority().val() as string | number | null;\n }\n\n /**\n * The key (last part of the path) of the location of this `DataSnapshot`.\n *\n * The last token in a Database location is considered its key. For example,\n * \"ada\" is the key for the /users/ada/ node. Accessing the key on any\n * `DataSnapshot` will return the key for the location that generated it.\n * However, accessing the key on the root URL of a Database will return\n * `null`.\n */\n get key(): string | null {\n return this.ref.key;\n }\n\n /** Returns the number of child properties of this `DataSnapshot`. */\n get size(): number {\n return this._node.numChildren();\n }\n\n /**\n * Gets another `DataSnapshot` for the location at the specified relative path.\n *\n * Passing a relative path to the `child()` method of a DataSnapshot returns\n * another `DataSnapshot` for the location at the specified relative path. The\n * relative path can either be a simple child name (for example, \"ada\") or a\n * deeper, slash-separated path (for example, \"ada/name/first\"). If the child\n * location has no data, an empty `DataSnapshot` (that is, a `DataSnapshot`\n * whose value is `null`) is returned.\n *\n * @param path - A relative path to the location of child data.\n */\n child(path: string): DataSnapshot {\n const childPath = new Path(path);\n const childRef = child(this.ref, path);\n return new DataSnapshot(\n this._node.getChild(childPath),\n childRef,\n PRIORITY_INDEX\n );\n }\n /**\n * Returns true if this `DataSnapshot` contains any data. It is slightly more\n * efficient than using `snapshot.val() !== null`.\n */\n exists(): boolean {\n return !this._node.isEmpty();\n }\n\n /**\n * Exports the entire contents of the DataSnapshot as a JavaScript object.\n *\n * The `exportVal()` method is similar to `val()`, except priority information\n * is included (if available), making it suitable for backing up your data.\n *\n * @returns The DataSnapshot's contents as a JavaScript value (Object,\n * Array, string, number, boolean, or `null`).\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n exportVal(): any {\n return this._node.val(true);\n }\n\n /**\n * Enumerates the top-level children in the `IteratedDataSnapshot`.\n *\n * Because of the way JavaScript objects work, the ordering of data in the\n * JavaScript object returned by `val()` is not guaranteed to match the\n * ordering on the server nor the ordering of `onChildAdded()` events. That is\n * where `forEach()` comes in handy. It guarantees the children of a\n * `DataSnapshot` will be iterated in their query order.\n *\n * If no explicit `orderBy*()` method is used, results are returned\n * ordered by key (unless priorities are used, in which case, results are\n * returned by priority).\n *\n * @param action - A function that will be called for each child DataSnapshot.\n * The callback can return true to cancel further enumeration.\n * @returns true if enumeration was canceled due to your callback returning\n * true.\n */\n forEach(action: (child: IteratedDataSnapshot) => boolean | void): boolean {\n if (this._node.isLeafNode()) {\n return false;\n }\n\n const childrenNode = this._node as ChildrenNode;\n // Sanitize the return value to a boolean. ChildrenNode.forEachChild has a weird return type...\n return !!childrenNode.forEachChild(this._index, (key, node) => {\n return action(\n new DataSnapshot(node, child(this.ref, key), PRIORITY_INDEX)\n );\n });\n }\n\n /**\n * Returns true if the specified child path has (non-null) data.\n *\n * @param path - A relative path to the location of a potential child.\n * @returns `true` if data exists at the specified child path; else\n * `false`.\n */\n hasChild(path: string): boolean {\n const childPath = new Path(path);\n return !this._node.getChild(childPath).isEmpty();\n }\n\n /**\n * Returns whether or not the `DataSnapshot` has any non-`null` child\n * properties.\n *\n * You can use `hasChildren()` to determine if a `DataSnapshot` has any\n * children. If it does, you can enumerate them using `forEach()`. If it\n * doesn't, then either this snapshot contains a primitive value (which can be\n * retrieved with `val()`) or it is empty (in which case, `val()` will return\n * `null`).\n *\n * @returns true if this snapshot has any children; else false.\n */\n hasChildren(): boolean {\n if (this._node.isLeafNode()) {\n return false;\n } else {\n return !this._node.isEmpty();\n }\n }\n\n /**\n * Returns a JSON-serializable representation of this object.\n */\n toJSON(): object | null {\n return this.exportVal();\n }\n\n /**\n * Extracts a JavaScript value from a `DataSnapshot`.\n *\n * Depending on the data in a `DataSnapshot`, the `val()` method may return a\n * scalar type (string, number, or boolean), an array, or an object. It may\n * also return null, indicating that the `DataSnapshot` is empty (contains no\n * data).\n *\n * @returns The DataSnapshot's contents as a JavaScript value (Object,\n * Array, string, number, boolean, or `null`).\n */\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n val(): any {\n return this._node.val();\n }\n}\n\n/**\n * Represents a child snapshot of a `Reference` that is being iterated over. The key will never be undefined.\n */\nexport interface IteratedDataSnapshot extends DataSnapshot {\n key: string; // key of the location of this snapshot.\n}\n\n/**\n *\n * Returns a `Reference` representing the location in the Database\n * corresponding to the provided path. If no path is provided, the `Reference`\n * will point to the root of the Database.\n *\n * @param db - The database instance to obtain a reference for.\n * @param path - Optional path representing the location the returned\n * `Reference` will point. If not provided, the returned `Reference` will\n * point to the root of the Database.\n * @returns If a path is provided, a `Reference`\n * pointing to the provided path. Otherwise, a `Reference` pointing to the\n * root of the Database.\n */\nexport function ref(db: Database, path?: string): DatabaseReference {\n db = getModularInstance(db);\n db._checkNotDeleted('ref');\n return path !== undefined ? child(db._root, path) : db._root;\n}\n\n/**\n * Returns a `Reference` representing the location in the Database\n * corresponding to the provided Firebase URL.\n *\n * An exception is thrown if the URL is not a valid Firebase Database URL or it\n * has a different domain than the current `Database` instance.\n *\n * Note that all query parameters (`orderBy`, `limitToLast`, etc.) are ignored\n * and are not applied to the returned `Reference`.\n *\n * @param db - The database instance to obtain a reference for.\n * @param url - The Firebase URL at which the returned `Reference` will\n * point.\n * @returns A `Reference` pointing to the provided\n * Firebase URL.\n */\nexport function refFromURL(db: Database, url: string): DatabaseReference {\n db = getModularInstance(db);\n db._checkNotDeleted('refFromURL');\n const parsedURL = parseRepoInfo(url, db._repo.repoInfo_.nodeAdmin);\n validateUrl('refFromURL', parsedURL);\n\n const repoInfo = parsedURL.repoInfo;\n if (\n !db._repo.repoInfo_.isCustomHost() &&\n repoInfo.host !== db._repo.repoInfo_.host\n ) {\n fatal(\n 'refFromURL' +\n ': Host name does not match the current database: ' +\n '(found ' +\n repoInfo.host +\n ' but expected ' +\n db._repo.repoInfo_.host +\n ')'\n );\n }\n\n return ref(db, parsedURL.path.toString());\n}\n/**\n * Gets a `Reference` for the location at the specified relative path.\n *\n * The relative path can either be a simple child name (for example, \"ada\") or\n * a deeper slash-separated path (for example, \"ada/name/first\").\n *\n * @param parent - The parent location.\n * @param path - A relative path from this location to the desired child\n * location.\n * @returns The specified child location.\n */\nexport function child(\n parent: DatabaseReference,\n path: string\n): DatabaseReference {\n parent = getModularInstance(parent);\n if (pathGetFront(parent._path) === null) {\n validateRootPathString('child', 'path', path, false);\n } else {\n validatePathString('child', 'path', path, false);\n }\n return new ReferenceImpl(parent._repo, pathChild(parent._path, path));\n}\n\n/**\n * Returns an `OnDisconnect` object - see\n * {@link https://firebase.google.com/docs/database/web/offline-capabilities | Enabling Offline Capabilities in JavaScript}\n * for more information on how to use it.\n *\n * @param ref - The reference to add OnDisconnect triggers for.\n */\nexport function onDisconnect(ref: DatabaseReference): OnDisconnect {\n ref = getModularInstance(ref) as ReferenceImpl;\n return new OnDisconnect(ref._repo, ref._path);\n}\n\nexport interface ThenableReferenceImpl\n extends ReferenceImpl,\n Pick, 'then' | 'catch'> {}\n\n/**\n * Generates a new child location using a unique key and returns its\n * `Reference`.\n *\n * This is the most common pattern for adding data to a collection of items.\n *\n * If you provide a value to `push()`, the value is written to the\n * generated location. If you don't pass a value, nothing is written to the\n * database and the child remains empty (but you can use the `Reference`\n * elsewhere).\n *\n * The unique keys generated by `push()` are ordered by the current time, so the\n * resulting list of items is chronologically sorted. The keys are also\n * designed to be unguessable (they contain 72 random bits of entropy).\n *\n * See {@link https://firebase.google.com/docs/database/web/lists-of-data#append_to_a_list_of_data | Append to a list of data}.\n * See {@link https://firebase.googleblog.com/2015/02/the-2120-ways-to-ensure-unique_68.html | The 2^120 Ways to Ensure Unique Identifiers}.\n *\n * @param parent - The parent location.\n * @param value - Optional value to be written at the generated location.\n * @returns Combined `Promise` and `Reference`; resolves when write is complete,\n * but can be used immediately as the `Reference` to the child location.\n */\nexport function push(\n parent: DatabaseReference,\n value?: unknown\n): ThenableReference {\n parent = getModularInstance(parent);\n validateWritablePath('push', parent._path);\n validateFirebaseDataArg('push', value, parent._path, true);\n const now = repoServerTime(parent._repo);\n const name = nextPushId(now);\n\n // push() returns a ThennableReference whose promise is fulfilled with a\n // regular Reference. We use child() to create handles to two different\n // references. The first is turned into a ThennableReference below by adding\n // then() and catch() methods and is used as the return value of push(). The\n // second remains a regular Reference and is used as the fulfilled value of\n // the first ThennableReference.\n const thennablePushRef: Partial = child(\n parent,\n name\n ) as ReferenceImpl;\n const pushRef = child(parent, name) as ReferenceImpl;\n\n let promise: Promise;\n if (value != null) {\n promise = set(pushRef, value).then(() => pushRef);\n } else {\n promise = Promise.resolve(pushRef);\n }\n\n thennablePushRef.then = promise.then.bind(promise);\n thennablePushRef.catch = promise.then.bind(promise, undefined);\n return thennablePushRef as ThenableReferenceImpl;\n}\n\n/**\n * Removes the data at this Database location.\n *\n * Any data at child locations will also be deleted.\n *\n * The effect of the remove will be visible immediately and the corresponding\n * event 'value' will be triggered. Synchronization of the remove to the\n * Firebase servers will also be started, and the returned Promise will resolve\n * when complete. If provided, the onComplete callback will be called\n * asynchronously after synchronization has finished.\n *\n * @param ref - The location to remove.\n * @returns Resolves when remove on server is complete.\n */\nexport function remove(ref: DatabaseReference): Promise {\n validateWritablePath('remove', ref._path);\n return set(ref, null);\n}\n\n/**\n * Writes data to this Database location.\n *\n * This will overwrite any data at this location and all child locations.\n *\n * The effect of the write will be visible immediately, and the corresponding\n * events (\"value\", \"child_added\", etc.) will be triggered. Synchronization of\n * the data to the Firebase servers will also be started, and the returned\n * Promise will resolve when complete. If provided, the `onComplete` callback\n * will be called asynchronously after synchronization has finished.\n *\n * Passing `null` for the new value is equivalent to calling `remove()`; namely,\n * all data at this location and all child locations will be deleted.\n *\n * `set()` will remove any priority stored at this location, so if priority is\n * meant to be preserved, you need to use `setWithPriority()` instead.\n *\n * Note that modifying data with `set()` will cancel any pending transactions\n * at that location, so extreme care should be taken if mixing `set()` and\n * `transaction()` to modify the same data.\n *\n * A single `set()` will generate a single \"value\" event at the location where\n * the `set()` was performed.\n *\n * @param ref - The location to write to.\n * @param value - The value to be written (string, number, boolean, object,\n * array, or null).\n * @returns Resolves when write to server is complete.\n */\nexport function set(ref: DatabaseReference, value: unknown): Promise {\n ref = getModularInstance(ref);\n validateWritablePath('set', ref._path);\n validateFirebaseDataArg('set', value, ref._path, false);\n const deferred = new Deferred();\n repoSetWithPriority(\n ref._repo,\n ref._path,\n value,\n /*priority=*/ null,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n}\n\n/**\n * Sets a priority for the data at this Database location.\n *\n * Applications need not use priority but can order collections by\n * ordinary properties (see\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#sorting_and_filtering_data | Sorting and filtering data}\n * ).\n *\n * @param ref - The location to write to.\n * @param priority - The priority to be written (string, number, or null).\n * @returns Resolves when write to server is complete.\n */\nexport function setPriority(\n ref: DatabaseReference,\n priority: string | number | null\n): Promise {\n ref = getModularInstance(ref);\n validateWritablePath('setPriority', ref._path);\n validatePriority('setPriority', priority, false);\n const deferred = new Deferred();\n repoSetWithPriority(\n ref._repo,\n pathChild(ref._path, '.priority'),\n priority,\n null,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n}\n\n/**\n * Writes data the Database location. Like `set()` but also specifies the\n * priority for that data.\n *\n * Applications need not use priority but can order collections by\n * ordinary properties (see\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#sorting_and_filtering_data | Sorting and filtering data}\n * ).\n *\n * @param ref - The location to write to.\n * @param value - The value to be written (string, number, boolean, object,\n * array, or null).\n * @param priority - The priority to be written (string, number, or null).\n * @returns Resolves when write to server is complete.\n */\nexport function setWithPriority(\n ref: DatabaseReference,\n value: unknown,\n priority: string | number | null\n): Promise {\n validateWritablePath('setWithPriority', ref._path);\n validateFirebaseDataArg('setWithPriority', value, ref._path, false);\n validatePriority('setWithPriority', priority, false);\n if (ref.key === '.length' || ref.key === '.keys') {\n throw 'setWithPriority failed: ' + ref.key + ' is a read-only object.';\n }\n\n const deferred = new Deferred();\n repoSetWithPriority(\n ref._repo,\n ref._path,\n value,\n priority,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n}\n\n/**\n * Writes multiple values to the Database at once.\n *\n * The `values` argument contains multiple property-value pairs that will be\n * written to the Database together. Each child property can either be a simple\n * property (for example, \"name\") or a relative path (for example,\n * \"name/first\") from the current location to the data to update.\n *\n * As opposed to the `set()` method, `update()` can be use to selectively update\n * only the referenced properties at the current location (instead of replacing\n * all the child properties at the current location).\n *\n * The effect of the write will be visible immediately, and the corresponding\n * events ('value', 'child_added', etc.) will be triggered. Synchronization of\n * the data to the Firebase servers will also be started, and the returned\n * Promise will resolve when complete. If provided, the `onComplete` callback\n * will be called asynchronously after synchronization has finished.\n *\n * A single `update()` will generate a single \"value\" event at the location\n * where the `update()` was performed, regardless of how many children were\n * modified.\n *\n * Note that modifying data with `update()` will cancel any pending\n * transactions at that location, so extreme care should be taken if mixing\n * `update()` and `transaction()` to modify the same data.\n *\n * Passing `null` to `update()` will remove the data at this location.\n *\n * See\n * {@link https://firebase.googleblog.com/2015/09/introducing-multi-location-updates-and_86.html | Introducing multi-location updates and more}.\n *\n * @param ref - The location to write to.\n * @param values - Object containing multiple values.\n * @returns Resolves when update on server is complete.\n */\nexport function update(ref: DatabaseReference, values: object): Promise {\n validateFirebaseMergeDataArg('update', values, ref._path, false);\n const deferred = new Deferred();\n repoUpdate(\n ref._repo,\n ref._path,\n values as Record,\n deferred.wrapCallback(() => {})\n );\n return deferred.promise;\n}\n\n/**\n * Gets the most up-to-date result for this query.\n *\n * @param query - The query to run.\n * @returns A `Promise` which resolves to the resulting DataSnapshot if a value is\n * available, or rejects if the client is unable to return a value (e.g., if the\n * server is unreachable and there is nothing cached).\n */\nexport function get(query: Query): Promise {\n query = getModularInstance(query) as QueryImpl;\n const callbackContext = new CallbackContext(() => {});\n const container = new ValueEventRegistration(callbackContext);\n return repoGetValue(query._repo, query, container).then(node => {\n return new DataSnapshot(\n node,\n new ReferenceImpl(query._repo, query._path),\n query._queryParams.getIndex()\n );\n });\n}\n/**\n * Represents registration for 'value' events.\n */\nexport class ValueEventRegistration implements EventRegistration {\n constructor(private callbackContext: CallbackContext) {}\n\n respondsTo(eventType: string): boolean {\n return eventType === 'value';\n }\n\n createEvent(change: Change, query: QueryContext): DataEvent {\n const index = query._queryParams.getIndex();\n return new DataEvent(\n 'value',\n this,\n new DataSnapshot(\n change.snapshotNode,\n new ReferenceImpl(query._repo, query._path),\n index\n )\n );\n }\n\n getEventRunner(eventData: CancelEvent | DataEvent): () => void {\n if (eventData.getEventType() === 'cancel') {\n return () =>\n this.callbackContext.onCancel((eventData as CancelEvent).error);\n } else {\n return () =>\n this.callbackContext.onValue((eventData as DataEvent).snapshot, null);\n }\n }\n\n createCancelEvent(error: Error, path: Path): CancelEvent | null {\n if (this.callbackContext.hasCancelCallback) {\n return new CancelEvent(this, error, path);\n } else {\n return null;\n }\n }\n\n matches(other: EventRegistration): boolean {\n if (!(other instanceof ValueEventRegistration)) {\n return false;\n } else if (!other.callbackContext || !this.callbackContext) {\n // If no callback specified, we consider it to match any callback.\n return true;\n } else {\n return other.callbackContext.matches(this.callbackContext);\n }\n }\n\n hasAnyCallback(): boolean {\n return this.callbackContext !== null;\n }\n}\n\n/**\n * Represents the registration of a child_x event.\n */\nexport class ChildEventRegistration implements EventRegistration {\n constructor(\n private eventType: string,\n private callbackContext: CallbackContext | null\n ) {}\n\n respondsTo(eventType: string): boolean {\n let eventToCheck =\n eventType === 'children_added' ? 'child_added' : eventType;\n eventToCheck =\n eventToCheck === 'children_removed' ? 'child_removed' : eventToCheck;\n return this.eventType === eventToCheck;\n }\n\n createCancelEvent(error: Error, path: Path): CancelEvent | null {\n if (this.callbackContext.hasCancelCallback) {\n return new CancelEvent(this, error, path);\n } else {\n return null;\n }\n }\n\n createEvent(change: Change, query: QueryContext): DataEvent {\n assert(change.childName != null, 'Child events should have a childName.');\n const childRef = child(\n new ReferenceImpl(query._repo, query._path),\n change.childName\n );\n const index = query._queryParams.getIndex();\n return new DataEvent(\n change.type as EventType,\n this,\n new DataSnapshot(change.snapshotNode, childRef, index),\n change.prevName\n );\n }\n\n getEventRunner(eventData: CancelEvent | DataEvent): () => void {\n if (eventData.getEventType() === 'cancel') {\n return () =>\n this.callbackContext.onCancel((eventData as CancelEvent).error);\n } else {\n return () =>\n this.callbackContext.onValue(\n (eventData as DataEvent).snapshot,\n (eventData as DataEvent).prevName\n );\n }\n }\n\n matches(other: EventRegistration): boolean {\n if (other instanceof ChildEventRegistration) {\n return (\n this.eventType === other.eventType &&\n (!this.callbackContext ||\n !other.callbackContext ||\n this.callbackContext.matches(other.callbackContext))\n );\n }\n\n return false;\n }\n\n hasAnyCallback(): boolean {\n return !!this.callbackContext;\n }\n}\n\nfunction addEventListener(\n query: Query,\n eventType: EventType,\n callback: UserCallback,\n cancelCallbackOrListenOptions?: ((error: Error) => unknown) | ListenOptions,\n options?: ListenOptions\n) {\n let cancelCallback: ((error: Error) => unknown) | undefined;\n if (typeof cancelCallbackOrListenOptions === 'object') {\n cancelCallback = undefined;\n options = cancelCallbackOrListenOptions;\n }\n if (typeof cancelCallbackOrListenOptions === 'function') {\n cancelCallback = cancelCallbackOrListenOptions;\n }\n\n if (options && options.onlyOnce) {\n const userCallback = callback;\n const onceCallback: UserCallback = (dataSnapshot, previousChildName) => {\n repoRemoveEventCallbackForQuery(query._repo, query, container);\n userCallback(dataSnapshot, previousChildName);\n };\n onceCallback.userCallback = callback.userCallback;\n onceCallback.context = callback.context;\n callback = onceCallback;\n }\n\n const callbackContext = new CallbackContext(\n callback,\n cancelCallback || undefined\n );\n const container =\n eventType === 'value'\n ? new ValueEventRegistration(callbackContext)\n : new ChildEventRegistration(eventType, callbackContext);\n repoAddEventCallbackForQuery(query._repo, query, container);\n return () => repoRemoveEventCallbackForQuery(query._repo, query, container);\n}\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onValue` event will trigger once with the initial data stored at this\n * location, and then trigger again each time the data changes. The\n * `DataSnapshot` passed to the callback will be for the location at which\n * `on()` was called. It won't trigger until the entire contents has been\n * synchronized. If the location has no data, it will be triggered with an empty\n * `DataSnapshot` (`val()` will return `null`).\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs. The\n * callback will be passed a DataSnapshot.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onValue(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n cancelCallback?: (error: Error) => unknown\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onValue` event will trigger once with the initial data stored at this\n * location, and then trigger again each time the data changes. The\n * `DataSnapshot` passed to the callback will be for the location at which\n * `on()` was called. It won't trigger until the entire contents has been\n * synchronized. If the location has no data, it will be triggered with an empty\n * `DataSnapshot` (`val()` will return `null`).\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs. The\n * callback will be passed a DataSnapshot.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onValue(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onValue` event will trigger once with the initial data stored at this\n * location, and then trigger again each time the data changes. The\n * `DataSnapshot` passed to the callback will be for the location at which\n * `on()` was called. It won't trigger until the entire contents has been\n * synchronized. If the location has no data, it will be triggered with an empty\n * `DataSnapshot` (`val()` will return `null`).\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs. The\n * callback will be passed a DataSnapshot.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onValue(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n cancelCallback: (error: Error) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\nexport function onValue(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n cancelCallbackOrListenOptions?: ((error: Error) => unknown) | ListenOptions,\n options?: ListenOptions\n): Unsubscribe {\n return addEventListener(\n query,\n 'value',\n callback,\n cancelCallbackOrListenOptions,\n options\n );\n}\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildAdded` event will be triggered once for each initial child at this\n * location, and it will be triggered again every time a new child is added. The\n * `DataSnapshot` passed into the callback will reflect the data for the\n * relevant child. For ordering purposes, it is passed a second argument which\n * is a string containing the key of the previous sibling child by sort order,\n * or `null` if it is the first child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildAdded(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName?: string | null\n ) => unknown,\n cancelCallback?: (error: Error) => unknown\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildAdded` event will be triggered once for each initial child at this\n * location, and it will be triggered again every time a new child is added. The\n * `DataSnapshot` passed into the callback will reflect the data for the\n * relevant child. For ordering purposes, it is passed a second argument which\n * is a string containing the key of the previous sibling child by sort order,\n * or `null` if it is the first child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildAdded(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildAdded` event will be triggered once for each initial child at this\n * location, and it will be triggered again every time a new child is added. The\n * `DataSnapshot` passed into the callback will reflect the data for the\n * relevant child. For ordering purposes, it is passed a second argument which\n * is a string containing the key of the previous sibling child by sort order,\n * or `null` if it is the first child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildAdded(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallback: (error: Error) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\nexport function onChildAdded(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallbackOrListenOptions?: ((error: Error) => unknown) | ListenOptions,\n options?: ListenOptions\n): Unsubscribe {\n return addEventListener(\n query,\n 'child_added',\n callback,\n cancelCallbackOrListenOptions,\n options\n );\n}\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildChanged` event will be triggered when the data stored in a child\n * (or any of its descendants) changes. Note that a single `child_changed` event\n * may represent multiple changes to the child. The `DataSnapshot` passed to the\n * callback will contain the new child contents. For ordering purposes, the\n * callback is also passed a second argument which is a string containing the\n * key of the previous sibling child by sort order, or `null` if it is the first\n * child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildChanged(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallback?: (error: Error) => unknown\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildChanged` event will be triggered when the data stored in a child\n * (or any of its descendants) changes. Note that a single `child_changed` event\n * may represent multiple changes to the child. The `DataSnapshot` passed to the\n * callback will contain the new child contents. For ordering purposes, the\n * callback is also passed a second argument which is a string containing the\n * key of the previous sibling child by sort order, or `null` if it is the first\n * child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildChanged(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildChanged` event will be triggered when the data stored in a child\n * (or any of its descendants) changes. Note that a single `child_changed` event\n * may represent multiple changes to the child. The `DataSnapshot` passed to the\n * callback will contain the new child contents. For ordering purposes, the\n * callback is also passed a second argument which is a string containing the\n * key of the previous sibling child by sort order, or `null` if it is the first\n * child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildChanged(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallback: (error: Error) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\nexport function onChildChanged(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallbackOrListenOptions?: ((error: Error) => unknown) | ListenOptions,\n options?: ListenOptions\n): Unsubscribe {\n return addEventListener(\n query,\n 'child_changed',\n callback,\n cancelCallbackOrListenOptions,\n options\n );\n}\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildMoved` event will be triggered when a child's sort order changes\n * such that its position relative to its siblings changes. The `DataSnapshot`\n * passed to the callback will be for the data of the child that has moved. It\n * is also passed a second argument which is a string containing the key of the\n * previous sibling child by sort order, or `null` if it is the first child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildMoved(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallback?: (error: Error) => unknown\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildMoved` event will be triggered when a child's sort order changes\n * such that its position relative to its siblings changes. The `DataSnapshot`\n * passed to the callback will be for the data of the child that has moved. It\n * is also passed a second argument which is a string containing the key of the\n * previous sibling child by sort order, or `null` if it is the first child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildMoved(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildMoved` event will be triggered when a child's sort order changes\n * such that its position relative to its siblings changes. The `DataSnapshot`\n * passed to the callback will be for the data of the child that has moved. It\n * is also passed a second argument which is a string containing the key of the\n * previous sibling child by sort order, or `null` if it is the first child.\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildMoved(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallback: (error: Error) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\nexport function onChildMoved(\n query: Query,\n callback: (\n snapshot: DataSnapshot,\n previousChildName: string | null\n ) => unknown,\n cancelCallbackOrListenOptions?: ((error: Error) => unknown) | ListenOptions,\n options?: ListenOptions\n): Unsubscribe {\n return addEventListener(\n query,\n 'child_moved',\n callback,\n cancelCallbackOrListenOptions,\n options\n );\n}\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildRemoved` event will be triggered once every time a child is\n * removed. The `DataSnapshot` passed into the callback will be the old data for\n * the child that was removed. A child will get removed when either:\n *\n * - a client explicitly calls `remove()` on that child or one of its ancestors\n * - a client calls `set(null)` on that child or one of its ancestors\n * - that child has all of its children removed\n * - there is a query in effect which now filters out the child (because it's\n * sort order changed or the max limit was hit)\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildRemoved(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n cancelCallback?: (error: Error) => unknown\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildRemoved` event will be triggered once every time a child is\n * removed. The `DataSnapshot` passed into the callback will be the old data for\n * the child that was removed. A child will get removed when either:\n *\n * - a client explicitly calls `remove()` on that child or one of its ancestors\n * - a client calls `set(null)` on that child or one of its ancestors\n * - that child has all of its children removed\n * - there is a query in effect which now filters out the child (because it's\n * sort order changed or the max limit was hit)\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildRemoved(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\n/**\n * Listens for data changes at a particular location.\n *\n * This is the primary way to read data from a Database. Your callback\n * will be triggered for the initial data and again whenever the data changes.\n * Invoke the returned unsubscribe callback to stop receiving updates. See\n * {@link https://firebase.google.com/docs/database/web/retrieve-data | Retrieve Data on the Web}\n * for more details.\n *\n * An `onChildRemoved` event will be triggered once every time a child is\n * removed. The `DataSnapshot` passed into the callback will be the old data for\n * the child that was removed. A child will get removed when either:\n *\n * - a client explicitly calls `remove()` on that child or one of its ancestors\n * - a client calls `set(null)` on that child or one of its ancestors\n * - that child has all of its children removed\n * - there is a query in effect which now filters out the child (because it's\n * sort order changed or the max limit was hit)\n *\n * @param query - The query to run.\n * @param callback - A callback that fires when the specified event occurs.\n * The callback will be passed a DataSnapshot and a string containing the key of\n * the previous child, by sort order, or `null` if it is the first child.\n * @param cancelCallback - An optional callback that will be notified if your\n * event subscription is ever canceled because your client does not have\n * permission to read this data (or it had permission but has now lost it).\n * This callback will be passed an `Error` object indicating why the failure\n * occurred.\n * @param options - An object that can be used to configure `onlyOnce`, which\n * then removes the listener after its first invocation.\n * @returns A function that can be invoked to remove the listener.\n */\nexport function onChildRemoved(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n cancelCallback: (error: Error) => unknown,\n options: ListenOptions\n): Unsubscribe;\n\nexport function onChildRemoved(\n query: Query,\n callback: (snapshot: DataSnapshot) => unknown,\n cancelCallbackOrListenOptions?: ((error: Error) => unknown) | ListenOptions,\n options?: ListenOptions\n): Unsubscribe {\n return addEventListener(\n query,\n 'child_removed',\n callback,\n cancelCallbackOrListenOptions,\n options\n );\n}\n\nexport { EventType };\n\n/**\n * Detaches a callback previously attached with the corresponding `on*()` (`onValue`, `onChildAdded`) listener.\n * Note: This is not the recommended way to remove a listener. Instead, please use the returned callback function from\n * the respective `on*` callbacks.\n *\n * Detach a callback previously attached with `on*()`. Calling `off()` on a parent listener\n * will not automatically remove listeners registered on child nodes, `off()`\n * must also be called on any child listeners to remove the callback.\n *\n * If a callback is not specified, all callbacks for the specified eventType\n * will be removed. Similarly, if no eventType is specified, all callbacks\n * for the `Reference` will be removed.\n *\n * Individual listeners can also be removed by invoking their unsubscribe\n * callbacks.\n *\n * @param query - The query that the listener was registered with.\n * @param eventType - One of the following strings: \"value\", \"child_added\",\n * \"child_changed\", \"child_removed\", or \"child_moved.\" If omitted, all callbacks\n * for the `Reference` will be removed.\n * @param callback - The callback function that was passed to `on()` or\n * `undefined` to remove all callbacks.\n */\nexport function off(\n query: Query,\n eventType?: EventType,\n callback?: (\n snapshot: DataSnapshot,\n previousChildName?: string | null\n ) => unknown\n): void {\n let container: EventRegistration | null = null;\n const expCallback = callback ? new CallbackContext(callback) : null;\n if (eventType === 'value') {\n container = new ValueEventRegistration(expCallback);\n } else if (eventType) {\n container = new ChildEventRegistration(eventType, expCallback);\n }\n repoRemoveEventCallbackForQuery(query._repo, query, container);\n}\n\n/** Describes the different query constraints available in this SDK. */\nexport type QueryConstraintType =\n | 'endAt'\n | 'endBefore'\n | 'startAt'\n | 'startAfter'\n | 'limitToFirst'\n | 'limitToLast'\n | 'orderByChild'\n | 'orderByKey'\n | 'orderByPriority'\n | 'orderByValue'\n | 'equalTo';\n\n/**\n * A `QueryConstraint` is used to narrow the set of documents returned by a\n * Database query. `QueryConstraint`s are created by invoking {@link endAt},\n * {@link endBefore}, {@link startAt}, {@link startAfter}, {@link\n * limitToFirst}, {@link limitToLast}, {@link orderByChild},\n * {@link orderByChild}, {@link orderByKey} , {@link orderByPriority} ,\n * {@link orderByValue} or {@link equalTo} and\n * can then be passed to {@link query} to create a new query instance that\n * also contains this `QueryConstraint`.\n */\nexport abstract class QueryConstraint {\n /** The type of this query constraints */\n abstract readonly type: QueryConstraintType;\n\n /**\n * Takes the provided `Query` and returns a copy of the `Query` with this\n * `QueryConstraint` applied.\n */\n abstract _apply(query: QueryImpl): QueryImpl;\n}\n\nclass QueryEndAtConstraint extends QueryConstraint {\n readonly type = 'endAt';\n\n constructor(\n private readonly _value: number | string | boolean | null,\n private readonly _key?: string\n ) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n validateFirebaseDataArg('endAt', this._value, query._path, true);\n const newParams = queryParamsEndAt(\n query._queryParams,\n this._value,\n this._key\n );\n validateLimit(newParams);\n validateQueryEndpoints(newParams);\n if (query._queryParams.hasEnd()) {\n throw new Error(\n 'endAt: Starting point was already set (by another call to endAt, ' +\n 'endBefore or equalTo).'\n );\n }\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n query._orderByCalled\n );\n }\n}\n\n/**\n * Creates a `QueryConstraint` with the specified ending point.\n *\n * Using `startAt()`, `startAfter()`, `endBefore()`, `endAt()` and `equalTo()`\n * allows you to choose arbitrary starting and ending points for your queries.\n *\n * The ending point is inclusive, so children with exactly the specified value\n * will be included in the query. The optional key argument can be used to\n * further limit the range of the query. If it is specified, then children that\n * have exactly the specified value must also have a key name less than or equal\n * to the specified key.\n *\n * You can read more about `endAt()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#filtering_data | Filtering data}.\n *\n * @param value - The value to end at. The argument type depends on which\n * `orderBy*()` function was used in this query. Specify a value that matches\n * the `orderBy*()` type. When used in combination with `orderByKey()`, the\n * value must be a string.\n * @param key - The child key to end at, among the children with the previously\n * specified priority. This argument is only allowed if ordering by child,\n * value, or priority.\n */\nexport function endAt(\n value: number | string | boolean | null,\n key?: string\n): QueryConstraint {\n validateKey('endAt', 'key', key, true);\n return new QueryEndAtConstraint(value, key);\n}\n\nclass QueryEndBeforeConstraint extends QueryConstraint {\n readonly type = 'endBefore';\n\n constructor(\n private readonly _value: number | string | boolean | null,\n private readonly _key?: string\n ) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n validateFirebaseDataArg('endBefore', this._value, query._path, false);\n const newParams = queryParamsEndBefore(\n query._queryParams,\n this._value,\n this._key\n );\n validateLimit(newParams);\n validateQueryEndpoints(newParams);\n if (query._queryParams.hasEnd()) {\n throw new Error(\n 'endBefore: Starting point was already set (by another call to endAt, ' +\n 'endBefore or equalTo).'\n );\n }\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n query._orderByCalled\n );\n }\n}\n\n/**\n * Creates a `QueryConstraint` with the specified ending point (exclusive).\n *\n * Using `startAt()`, `startAfter()`, `endBefore()`, `endAt()` and `equalTo()`\n * allows you to choose arbitrary starting and ending points for your queries.\n *\n * The ending point is exclusive. If only a value is provided, children\n * with a value less than the specified value will be included in the query.\n * If a key is specified, then children must have a value less than or equal\n * to the specified value and a key name less than the specified key.\n *\n * @param value - The value to end before. The argument type depends on which\n * `orderBy*()` function was used in this query. Specify a value that matches\n * the `orderBy*()` type. When used in combination with `orderByKey()`, the\n * value must be a string.\n * @param key - The child key to end before, among the children with the\n * previously specified priority. This argument is only allowed if ordering by\n * child, value, or priority.\n */\nexport function endBefore(\n value: number | string | boolean | null,\n key?: string\n): QueryConstraint {\n validateKey('endBefore', 'key', key, true);\n return new QueryEndBeforeConstraint(value, key);\n}\n\nclass QueryStartAtConstraint extends QueryConstraint {\n readonly type = 'startAt';\n\n constructor(\n private readonly _value: number | string | boolean | null,\n private readonly _key?: string\n ) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n validateFirebaseDataArg('startAt', this._value, query._path, true);\n const newParams = queryParamsStartAt(\n query._queryParams,\n this._value,\n this._key\n );\n validateLimit(newParams);\n validateQueryEndpoints(newParams);\n if (query._queryParams.hasStart()) {\n throw new Error(\n 'startAt: Starting point was already set (by another call to startAt, ' +\n 'startBefore or equalTo).'\n );\n }\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n query._orderByCalled\n );\n }\n}\n\n/**\n * Creates a `QueryConstraint` with the specified starting point.\n *\n * Using `startAt()`, `startAfter()`, `endBefore()`, `endAt()` and `equalTo()`\n * allows you to choose arbitrary starting and ending points for your queries.\n *\n * The starting point is inclusive, so children with exactly the specified value\n * will be included in the query. The optional key argument can be used to\n * further limit the range of the query. If it is specified, then children that\n * have exactly the specified value must also have a key name greater than or\n * equal to the specified key.\n *\n * You can read more about `startAt()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#filtering_data | Filtering data}.\n *\n * @param value - The value to start at. The argument type depends on which\n * `orderBy*()` function was used in this query. Specify a value that matches\n * the `orderBy*()` type. When used in combination with `orderByKey()`, the\n * value must be a string.\n * @param key - The child key to start at. This argument is only allowed if\n * ordering by child, value, or priority.\n */\nexport function startAt(\n value: number | string | boolean | null = null,\n key?: string\n): QueryConstraint {\n validateKey('startAt', 'key', key, true);\n return new QueryStartAtConstraint(value, key);\n}\n\nclass QueryStartAfterConstraint extends QueryConstraint {\n readonly type = 'startAfter';\n\n constructor(\n private readonly _value: number | string | boolean | null,\n private readonly _key?: string\n ) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n validateFirebaseDataArg('startAfter', this._value, query._path, false);\n const newParams = queryParamsStartAfter(\n query._queryParams,\n this._value,\n this._key\n );\n validateLimit(newParams);\n validateQueryEndpoints(newParams);\n if (query._queryParams.hasStart()) {\n throw new Error(\n 'startAfter: Starting point was already set (by another call to startAt, ' +\n 'startAfter, or equalTo).'\n );\n }\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n query._orderByCalled\n );\n }\n}\n\n/**\n * Creates a `QueryConstraint` with the specified starting point (exclusive).\n *\n * Using `startAt()`, `startAfter()`, `endBefore()`, `endAt()` and `equalTo()`\n * allows you to choose arbitrary starting and ending points for your queries.\n *\n * The starting point is exclusive. If only a value is provided, children\n * with a value greater than the specified value will be included in the query.\n * If a key is specified, then children must have a value greater than or equal\n * to the specified value and a a key name greater than the specified key.\n *\n * @param value - The value to start after. The argument type depends on which\n * `orderBy*()` function was used in this query. Specify a value that matches\n * the `orderBy*()` type. When used in combination with `orderByKey()`, the\n * value must be a string.\n * @param key - The child key to start after. This argument is only allowed if\n * ordering by child, value, or priority.\n */\nexport function startAfter(\n value: number | string | boolean | null,\n key?: string\n): QueryConstraint {\n validateKey('startAfter', 'key', key, true);\n return new QueryStartAfterConstraint(value, key);\n}\n\nclass QueryLimitToFirstConstraint extends QueryConstraint {\n readonly type = 'limitToFirst';\n\n constructor(private readonly _limit: number) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n if (query._queryParams.hasLimit()) {\n throw new Error(\n 'limitToFirst: Limit was already set (by another call to limitToFirst ' +\n 'or limitToLast).'\n );\n }\n return new QueryImpl(\n query._repo,\n query._path,\n queryParamsLimitToFirst(query._queryParams, this._limit),\n query._orderByCalled\n );\n }\n}\n\n/**\n * Creates a new `QueryConstraint` that if limited to the first specific number\n * of children.\n *\n * The `limitToFirst()` method is used to set a maximum number of children to be\n * synced for a given callback. If we set a limit of 100, we will initially only\n * receive up to 100 `child_added` events. If we have fewer than 100 messages\n * stored in our Database, a `child_added` event will fire for each message.\n * However, if we have over 100 messages, we will only receive a `child_added`\n * event for the first 100 ordered messages. As items change, we will receive\n * `child_removed` events for each item that drops out of the active list so\n * that the total number stays at 100.\n *\n * You can read more about `limitToFirst()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#filtering_data | Filtering data}.\n *\n * @param limit - The maximum number of nodes to include in this query.\n */\nexport function limitToFirst(limit: number): QueryConstraint {\n if (typeof limit !== 'number' || Math.floor(limit) !== limit || limit <= 0) {\n throw new Error('limitToFirst: First argument must be a positive integer.');\n }\n return new QueryLimitToFirstConstraint(limit);\n}\n\nclass QueryLimitToLastConstraint extends QueryConstraint {\n readonly type = 'limitToLast';\n\n constructor(private readonly _limit: number) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n if (query._queryParams.hasLimit()) {\n throw new Error(\n 'limitToLast: Limit was already set (by another call to limitToFirst ' +\n 'or limitToLast).'\n );\n }\n return new QueryImpl(\n query._repo,\n query._path,\n queryParamsLimitToLast(query._queryParams, this._limit),\n query._orderByCalled\n );\n }\n}\n\n/**\n * Creates a new `QueryConstraint` that is limited to return only the last\n * specified number of children.\n *\n * The `limitToLast()` method is used to set a maximum number of children to be\n * synced for a given callback. If we set a limit of 100, we will initially only\n * receive up to 100 `child_added` events. If we have fewer than 100 messages\n * stored in our Database, a `child_added` event will fire for each message.\n * However, if we have over 100 messages, we will only receive a `child_added`\n * event for the last 100 ordered messages. As items change, we will receive\n * `child_removed` events for each item that drops out of the active list so\n * that the total number stays at 100.\n *\n * You can read more about `limitToLast()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#filtering_data | Filtering data}.\n *\n * @param limit - The maximum number of nodes to include in this query.\n */\nexport function limitToLast(limit: number): QueryConstraint {\n if (typeof limit !== 'number' || Math.floor(limit) !== limit || limit <= 0) {\n throw new Error('limitToLast: First argument must be a positive integer.');\n }\n\n return new QueryLimitToLastConstraint(limit);\n}\n\nclass QueryOrderByChildConstraint extends QueryConstraint {\n readonly type = 'orderByChild';\n\n constructor(private readonly _path: string) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n validateNoPreviousOrderByCall(query, 'orderByChild');\n const parsedPath = new Path(this._path);\n if (pathIsEmpty(parsedPath)) {\n throw new Error(\n 'orderByChild: cannot pass in empty path. Use orderByValue() instead.'\n );\n }\n const index = new PathIndex(parsedPath);\n const newParams = queryParamsOrderBy(query._queryParams, index);\n validateQueryEndpoints(newParams);\n\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n /*orderByCalled=*/ true\n );\n }\n}\n\n/**\n * Creates a new `QueryConstraint` that orders by the specified child key.\n *\n * Queries can only order by one key at a time. Calling `orderByChild()`\n * multiple times on the same query is an error.\n *\n * Firebase queries allow you to order your data by any child key on the fly.\n * However, if you know in advance what your indexes will be, you can define\n * them via the .indexOn rule in your Security Rules for better performance. See\n * the{@link https://firebase.google.com/docs/database/security/indexing-data}\n * rule for more information.\n *\n * You can read more about `orderByChild()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#sort_data | Sort data}.\n *\n * @param path - The path to order by.\n */\nexport function orderByChild(path: string): QueryConstraint {\n if (path === '$key') {\n throw new Error(\n 'orderByChild: \"$key\" is invalid. Use orderByKey() instead.'\n );\n } else if (path === '$priority') {\n throw new Error(\n 'orderByChild: \"$priority\" is invalid. Use orderByPriority() instead.'\n );\n } else if (path === '$value') {\n throw new Error(\n 'orderByChild: \"$value\" is invalid. Use orderByValue() instead.'\n );\n }\n validatePathString('orderByChild', 'path', path, false);\n return new QueryOrderByChildConstraint(path);\n}\n\nclass QueryOrderByKeyConstraint extends QueryConstraint {\n readonly type = 'orderByKey';\n\n _apply(query: QueryImpl): QueryImpl {\n validateNoPreviousOrderByCall(query, 'orderByKey');\n const newParams = queryParamsOrderBy(query._queryParams, KEY_INDEX);\n validateQueryEndpoints(newParams);\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n /*orderByCalled=*/ true\n );\n }\n}\n\n/**\n * Creates a new `QueryConstraint` that orders by the key.\n *\n * Sorts the results of a query by their (ascending) key values.\n *\n * You can read more about `orderByKey()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#sort_data | Sort data}.\n */\nexport function orderByKey(): QueryConstraint {\n return new QueryOrderByKeyConstraint();\n}\n\nclass QueryOrderByPriorityConstraint extends QueryConstraint {\n readonly type = 'orderByPriority';\n\n _apply(query: QueryImpl): QueryImpl {\n validateNoPreviousOrderByCall(query, 'orderByPriority');\n const newParams = queryParamsOrderBy(query._queryParams, PRIORITY_INDEX);\n validateQueryEndpoints(newParams);\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n /*orderByCalled=*/ true\n );\n }\n}\n\n/**\n * Creates a new `QueryConstraint` that orders by priority.\n *\n * Applications need not use priority but can order collections by\n * ordinary properties (see\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#sort_data | Sort data}\n * for alternatives to priority.\n */\nexport function orderByPriority(): QueryConstraint {\n return new QueryOrderByPriorityConstraint();\n}\n\nclass QueryOrderByValueConstraint extends QueryConstraint {\n readonly type = 'orderByValue';\n\n _apply(query: QueryImpl): QueryImpl {\n validateNoPreviousOrderByCall(query, 'orderByValue');\n const newParams = queryParamsOrderBy(query._queryParams, VALUE_INDEX);\n validateQueryEndpoints(newParams);\n return new QueryImpl(\n query._repo,\n query._path,\n newParams,\n /*orderByCalled=*/ true\n );\n }\n}\n\n/**\n * Creates a new `QueryConstraint` that orders by value.\n *\n * If the children of a query are all scalar values (string, number, or\n * boolean), you can order the results by their (ascending) values.\n *\n * You can read more about `orderByValue()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#sort_data | Sort data}.\n */\nexport function orderByValue(): QueryConstraint {\n return new QueryOrderByValueConstraint();\n}\n\nclass QueryEqualToValueConstraint extends QueryConstraint {\n readonly type = 'equalTo';\n\n constructor(\n private readonly _value: number | string | boolean | null,\n private readonly _key?: string\n ) {\n super();\n }\n\n _apply(query: QueryImpl): QueryImpl {\n validateFirebaseDataArg('equalTo', this._value, query._path, false);\n if (query._queryParams.hasStart()) {\n throw new Error(\n 'equalTo: Starting point was already set (by another call to startAt/startAfter or ' +\n 'equalTo).'\n );\n }\n if (query._queryParams.hasEnd()) {\n throw new Error(\n 'equalTo: Ending point was already set (by another call to endAt/endBefore or ' +\n 'equalTo).'\n );\n }\n return new QueryEndAtConstraint(this._value, this._key)._apply(\n new QueryStartAtConstraint(this._value, this._key)._apply(query)\n );\n }\n}\n\n/**\n * Creates a `QueryConstraint` that includes children that match the specified\n * value.\n *\n * Using `startAt()`, `startAfter()`, `endBefore()`, `endAt()` and `equalTo()`\n * allows you to choose arbitrary starting and ending points for your queries.\n *\n * The optional key argument can be used to further limit the range of the\n * query. If it is specified, then children that have exactly the specified\n * value must also have exactly the specified key as their key name. This can be\n * used to filter result sets with many matches for the same value.\n *\n * You can read more about `equalTo()` in\n * {@link https://firebase.google.com/docs/database/web/lists-of-data#filtering_data | Filtering data}.\n *\n * @param value - The value to match for. The argument type depends on which\n * `orderBy*()` function was used in this query. Specify a value that matches\n * the `orderBy*()` type. When used in combination with `orderByKey()`, the\n * value must be a string.\n * @param key - The child key to start at, among the children with the\n * previously specified priority. This argument is only allowed if ordering by\n * child, value, or priority.\n */\nexport function equalTo(\n value: number | string | boolean | null,\n key?: string\n): QueryConstraint {\n validateKey('equalTo', 'key', key, true);\n return new QueryEqualToValueConstraint(value, key);\n}\n\n/**\n * Creates a new immutable instance of `Query` that is extended to also include\n * additional query constraints.\n *\n * @param query - The Query instance to use as a base for the new constraints.\n * @param queryConstraints - The list of `QueryConstraint`s to apply.\n * @throws if any of the provided query constraints cannot be combined with the\n * existing or new constraints.\n */\nexport function query(\n query: Query,\n ...queryConstraints: QueryConstraint[]\n): Query {\n let queryImpl = getModularInstance(query) as QueryImpl;\n for (const constraint of queryConstraints) {\n queryImpl = constraint._apply(queryImpl);\n }\n return queryImpl;\n}\n\n/**\n * Define reference constructor in various modules\n *\n * We are doing this here to avoid several circular\n * dependency issues\n */\nsyncPointSetReferenceConstructor(ReferenceImpl);\nsyncTreeSetReferenceConstructor(ReferenceImpl);\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport {\n _FirebaseService,\n _getProvider,\n FirebaseApp,\n getApp\n} from '@firebase/app';\nimport { AppCheckInternalComponentName } from '@firebase/app-check-interop-types';\nimport { FirebaseAuthInternalName } from '@firebase/auth-interop-types';\nimport { Provider } from '@firebase/component';\nimport {\n getModularInstance,\n createMockUserToken,\n EmulatorMockTokenOptions,\n getDefaultEmulatorHostnameAndPort\n} from '@firebase/util';\n\nimport { AppCheckTokenProvider } from '../core/AppCheckTokenProvider';\nimport {\n AuthTokenProvider,\n EmulatorTokenProvider,\n FirebaseAuthTokenProvider\n} from '../core/AuthTokenProvider';\nimport { Repo, repoInterrupt, repoResume, repoStart } from '../core/Repo';\nimport { RepoInfo } from '../core/RepoInfo';\nimport { parseRepoInfo } from '../core/util/libs/parser';\nimport { newEmptyPath, pathIsEmpty } from '../core/util/Path';\nimport {\n warn,\n fatal,\n log,\n enableLogging as enableLoggingImpl\n} from '../core/util/util';\nimport { validateUrl } from '../core/util/validation';\nimport { BrowserPollConnection } from '../realtime/BrowserPollConnection';\nimport { TransportManager } from '../realtime/TransportManager';\nimport { WebSocketConnection } from '../realtime/WebSocketConnection';\n\nimport { ReferenceImpl } from './Reference_impl';\n\nexport { EmulatorMockTokenOptions } from '@firebase/util';\n/**\n * This variable is also defined in the firebase Node.js Admin SDK. Before\n * modifying this definition, consult the definition in:\n *\n * https://github.com/firebase/firebase-admin-node\n *\n * and make sure the two are consistent.\n */\nconst FIREBASE_DATABASE_EMULATOR_HOST_VAR = 'FIREBASE_DATABASE_EMULATOR_HOST';\n\n/**\n * Creates and caches `Repo` instances.\n */\nconst repos: {\n [appName: string]: {\n [dbUrl: string]: Repo;\n };\n} = {};\n\n/**\n * If true, any new `Repo` will be created to use `ReadonlyRestClient` (for testing purposes).\n */\nlet useRestClient = false;\n\n/**\n * Update an existing `Repo` in place to point to a new host/port.\n */\nfunction repoManagerApplyEmulatorSettings(\n repo: Repo,\n host: string,\n port: number,\n tokenProvider?: AuthTokenProvider\n): void {\n repo.repoInfo_ = new RepoInfo(\n `${host}:${port}`,\n /* secure= */ false,\n repo.repoInfo_.namespace,\n repo.repoInfo_.webSocketOnly,\n repo.repoInfo_.nodeAdmin,\n repo.repoInfo_.persistenceKey,\n repo.repoInfo_.includeNamespaceInQueryParams,\n /*isUsingEmulator=*/ true\n );\n\n if (tokenProvider) {\n repo.authTokenProvider_ = tokenProvider;\n }\n}\n\n/**\n * This function should only ever be called to CREATE a new database instance.\n * @internal\n */\nexport function repoManagerDatabaseFromApp(\n app: FirebaseApp,\n authProvider: Provider,\n appCheckProvider?: Provider,\n url?: string,\n nodeAdmin?: boolean\n): Database {\n let dbUrl: string | undefined = url || app.options.databaseURL;\n if (dbUrl === undefined) {\n if (!app.options.projectId) {\n fatal(\n \"Can't determine Firebase Database URL. Be sure to include \" +\n ' a Project ID when calling firebase.initializeApp().'\n );\n }\n\n log('Using default host for project ', app.options.projectId);\n dbUrl = `${app.options.projectId}-default-rtdb.firebaseio.com`;\n }\n\n let parsedUrl = parseRepoInfo(dbUrl, nodeAdmin);\n let repoInfo = parsedUrl.repoInfo;\n\n let isEmulator: boolean;\n\n let dbEmulatorHost: string | undefined = undefined;\n if (typeof process !== 'undefined' && process.env) {\n dbEmulatorHost = process.env[FIREBASE_DATABASE_EMULATOR_HOST_VAR];\n }\n\n if (dbEmulatorHost) {\n isEmulator = true;\n dbUrl = `http://${dbEmulatorHost}?ns=${repoInfo.namespace}`;\n parsedUrl = parseRepoInfo(dbUrl, nodeAdmin);\n repoInfo = parsedUrl.repoInfo;\n } else {\n isEmulator = !parsedUrl.repoInfo.secure;\n }\n\n const authTokenProvider =\n nodeAdmin && isEmulator\n ? new EmulatorTokenProvider(EmulatorTokenProvider.OWNER)\n : new FirebaseAuthTokenProvider(app.name, app.options, authProvider);\n\n validateUrl('Invalid Firebase Database URL', parsedUrl);\n if (!pathIsEmpty(parsedUrl.path)) {\n fatal(\n 'Database URL must point to the root of a Firebase Database ' +\n '(not including a child path).'\n );\n }\n\n const repo = repoManagerCreateRepo(\n repoInfo,\n app,\n authTokenProvider,\n new AppCheckTokenProvider(app.name, appCheckProvider)\n );\n return new Database(repo, app);\n}\n\n/**\n * Remove the repo and make sure it is disconnected.\n *\n */\nfunction repoManagerDeleteRepo(repo: Repo, appName: string): void {\n const appRepos = repos[appName];\n // This should never happen...\n if (!appRepos || appRepos[repo.key] !== repo) {\n fatal(`Database ${appName}(${repo.repoInfo_}) has already been deleted.`);\n }\n repoInterrupt(repo);\n delete appRepos[repo.key];\n}\n\n/**\n * Ensures a repo doesn't already exist and then creates one using the\n * provided app.\n *\n * @param repoInfo - The metadata about the Repo\n * @returns The Repo object for the specified server / repoName.\n */\nfunction repoManagerCreateRepo(\n repoInfo: RepoInfo,\n app: FirebaseApp,\n authTokenProvider: AuthTokenProvider,\n appCheckProvider: AppCheckTokenProvider\n): Repo {\n let appRepos = repos[app.name];\n\n if (!appRepos) {\n appRepos = {};\n repos[app.name] = appRepos;\n }\n\n let repo = appRepos[repoInfo.toURLString()];\n if (repo) {\n fatal(\n 'Database initialized multiple times. Please make sure the format of the database URL matches with each database() call.'\n );\n }\n repo = new Repo(repoInfo, useRestClient, authTokenProvider, appCheckProvider);\n appRepos[repoInfo.toURLString()] = repo;\n\n return repo;\n}\n\n/**\n * Forces us to use ReadonlyRestClient instead of PersistentConnection for new Repos.\n */\nexport function repoManagerForceRestClient(forceRestClient: boolean): void {\n useRestClient = forceRestClient;\n}\n\n/**\n * Class representing a Firebase Realtime Database.\n */\nexport class Database implements _FirebaseService {\n /** Represents a `Database` instance. */\n readonly 'type' = 'database';\n\n /** Track if the instance has been used (root or repo accessed) */\n _instanceStarted: boolean = false;\n\n /** Backing state for root_ */\n private _rootInternal?: ReferenceImpl;\n\n /** @hideconstructor */\n constructor(\n public _repoInternal: Repo,\n /** The {@link @firebase/app#FirebaseApp} associated with this Realtime Database instance. */\n readonly app: FirebaseApp\n ) {}\n\n get _repo(): Repo {\n if (!this._instanceStarted) {\n repoStart(\n this._repoInternal,\n this.app.options.appId,\n this.app.options['databaseAuthVariableOverride']\n );\n this._instanceStarted = true;\n }\n return this._repoInternal;\n }\n\n get _root(): ReferenceImpl {\n if (!this._rootInternal) {\n this._rootInternal = new ReferenceImpl(this._repo, newEmptyPath());\n }\n return this._rootInternal;\n }\n\n _delete(): Promise {\n if (this._rootInternal !== null) {\n repoManagerDeleteRepo(this._repo, this.app.name);\n this._repoInternal = null;\n this._rootInternal = null;\n }\n return Promise.resolve();\n }\n\n _checkNotDeleted(apiName: string) {\n if (this._rootInternal === null) {\n fatal('Cannot call ' + apiName + ' on a deleted database.');\n }\n }\n}\n\nfunction checkTransportInit() {\n if (TransportManager.IS_TRANSPORT_INITIALIZED) {\n warn(\n 'Transport has already been initialized. Please call this function before calling ref or setting up a listener'\n );\n }\n}\n\n/**\n * Force the use of websockets instead of longPolling.\n */\nexport function forceWebSockets() {\n checkTransportInit();\n BrowserPollConnection.forceDisallow();\n}\n\n/**\n * Force the use of longPolling instead of websockets. This will be ignored if websocket protocol is used in databaseURL.\n */\nexport function forceLongPolling() {\n checkTransportInit();\n WebSocketConnection.forceDisallow();\n BrowserPollConnection.forceAllow();\n}\n\n/**\n * Returns the instance of the Realtime Database SDK that is associated\n * with the provided {@link @firebase/app#FirebaseApp}. Initializes a new instance with\n * with default settings if no instance exists or if the existing instance uses\n * a custom database URL.\n *\n * @param app - The {@link @firebase/app#FirebaseApp} instance that the returned Realtime\n * Database instance is associated with.\n * @param url - The URL of the Realtime Database instance to connect to. If not\n * provided, the SDK connects to the default instance of the Firebase App.\n * @returns The `Database` instance of the provided app.\n */\nexport function getDatabase(\n app: FirebaseApp = getApp(),\n url?: string\n): Database {\n const db = _getProvider(app, 'database').getImmediate({\n identifier: url\n }) as Database;\n if (!db._instanceStarted) {\n const emulator = getDefaultEmulatorHostnameAndPort('database');\n if (emulator) {\n connectDatabaseEmulator(db, ...emulator);\n }\n }\n return db;\n}\n\n/**\n * Modify the provided instance to communicate with the Realtime Database\n * emulator.\n *\n *

Note: This method must be called before performing any other operation.\n *\n * @param db - The instance to modify.\n * @param host - The emulator host (ex: localhost)\n * @param port - The emulator port (ex: 8080)\n * @param options.mockUserToken - the mock auth token to use for unit testing Security Rules\n */\nexport function connectDatabaseEmulator(\n db: Database,\n host: string,\n port: number,\n options: {\n mockUserToken?: EmulatorMockTokenOptions | string;\n } = {}\n): void {\n db = getModularInstance(db);\n db._checkNotDeleted('useEmulator');\n if (db._instanceStarted) {\n fatal(\n 'Cannot call useEmulator() after instance has already been initialized.'\n );\n }\n\n const repo = db._repoInternal;\n let tokenProvider: EmulatorTokenProvider | undefined = undefined;\n if (repo.repoInfo_.nodeAdmin) {\n if (options.mockUserToken) {\n fatal(\n 'mockUserToken is not supported by the Admin SDK. For client access with mock users, please use the \"firebase\" package instead of \"firebase-admin\".'\n );\n }\n tokenProvider = new EmulatorTokenProvider(EmulatorTokenProvider.OWNER);\n } else if (options.mockUserToken) {\n const token =\n typeof options.mockUserToken === 'string'\n ? options.mockUserToken\n : createMockUserToken(options.mockUserToken, db.app.options.projectId);\n tokenProvider = new EmulatorTokenProvider(token);\n }\n\n // Modify the repo to apply emulator settings\n repoManagerApplyEmulatorSettings(repo, host, port, tokenProvider);\n}\n\n/**\n * Disconnects from the server (all Database operations will be completed\n * offline).\n *\n * The client automatically maintains a persistent connection to the Database\n * server, which will remain active indefinitely and reconnect when\n * disconnected. However, the `goOffline()` and `goOnline()` methods may be used\n * to control the client connection in cases where a persistent connection is\n * undesirable.\n *\n * While offline, the client will no longer receive data updates from the\n * Database. However, all Database operations performed locally will continue to\n * immediately fire events, allowing your application to continue behaving\n * normally. Additionally, each operation performed locally will automatically\n * be queued and retried upon reconnection to the Database server.\n *\n * To reconnect to the Database and begin receiving remote events, see\n * `goOnline()`.\n *\n * @param db - The instance to disconnect.\n */\nexport function goOffline(db: Database): void {\n db = getModularInstance(db);\n db._checkNotDeleted('goOffline');\n repoInterrupt(db._repo);\n}\n\n/**\n * Reconnects to the server and synchronizes the offline Database state\n * with the server state.\n *\n * This method should be used after disabling the active connection with\n * `goOffline()`. Once reconnected, the client will transmit the proper data\n * and fire the appropriate events so that your client \"catches up\"\n * automatically.\n *\n * @param db - The instance to reconnect.\n */\nexport function goOnline(db: Database): void {\n db = getModularInstance(db);\n db._checkNotDeleted('goOnline');\n repoResume(db._repo);\n}\n\n/**\n * Logs debugging information to the console.\n *\n * @param enabled - Enables logging if `true`, disables logging if `false`.\n * @param persistent - Remembers the logging state between page refreshes if\n * `true`.\n */\nexport function enableLogging(enabled: boolean, persistent?: boolean);\n\n/**\n * Logs debugging information to the console.\n *\n * @param logger - A custom logger function to control how things get logged.\n */\nexport function enableLogging(logger: (message: string) => unknown);\n\nexport function enableLogging(\n logger: boolean | ((message: string) => unknown),\n persistent?: boolean\n): void {\n enableLoggingImpl(logger, persistent);\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst SERVER_TIMESTAMP = {\n '.sv': 'timestamp'\n};\n\n/**\n * Returns a placeholder value for auto-populating the current timestamp (time\n * since the Unix epoch, in milliseconds) as determined by the Firebase\n * servers.\n */\nexport function serverTimestamp(): object {\n return SERVER_TIMESTAMP;\n}\n\n/**\n * Returns a placeholder value that can be used to atomically increment the\n * current database value by the provided delta.\n *\n * @param delta - the amount to modify the current value atomically.\n * @returns A placeholder value for modifying data atomically server-side.\n */\nexport function increment(delta: number): object {\n return {\n '.sv': {\n 'increment': delta\n }\n };\n}\n","/**\n * @license\n * Copyright 2020 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getModularInstance, Deferred } from '@firebase/util';\n\nimport { repoStartTransaction } from '../core/Repo';\nimport { PRIORITY_INDEX } from '../core/snap/indexes/PriorityIndex';\nimport { Node } from '../core/snap/Node';\nimport { validateWritablePath } from '../core/util/validation';\n\nimport { DatabaseReference } from './Reference';\nimport { DataSnapshot, onValue, ReferenceImpl } from './Reference_impl';\n\n/** An options object to configure transactions. */\nexport interface TransactionOptions {\n /**\n * By default, events are raised each time the transaction update function\n * runs. So if it is run multiple times, you may see intermediate states. You\n * can set this to false to suppress these intermediate states and instead\n * wait until the transaction has completed before events are raised.\n */\n readonly applyLocally?: boolean;\n}\n\n/**\n * A type for the resolve value of {@link runTransaction}.\n */\nexport class TransactionResult {\n /** @hideconstructor */\n constructor(\n /** Whether the transaction was successfully committed. */\n readonly committed: boolean,\n /** The resulting data snapshot. */\n readonly snapshot: DataSnapshot\n ) {}\n\n /** Returns a JSON-serializable representation of this object. */\n toJSON(): object {\n return { committed: this.committed, snapshot: this.snapshot.toJSON() };\n }\n}\n\n/**\n * Atomically modifies the data at this location.\n *\n * Atomically modify the data at this location. Unlike a normal `set()`, which\n * just overwrites the data regardless of its previous value, `runTransaction()` is\n * used to modify the existing value to a new value, ensuring there are no\n * conflicts with other clients writing to the same location at the same time.\n *\n * To accomplish this, you pass `runTransaction()` an update function which is\n * used to transform the current value into a new value. If another client\n * writes to the location before your new value is successfully written, your\n * update function will be called again with the new current value, and the\n * write will be retried. This will happen repeatedly until your write succeeds\n * without conflict or you abort the transaction by not returning a value from\n * your update function.\n *\n * Note: Modifying data with `set()` will cancel any pending transactions at\n * that location, so extreme care should be taken if mixing `set()` and\n * `runTransaction()` to update the same data.\n *\n * Note: When using transactions with Security and Firebase Rules in place, be\n * aware that a client needs `.read` access in addition to `.write` access in\n * order to perform a transaction. This is because the client-side nature of\n * transactions requires the client to read the data in order to transactionally\n * update it.\n *\n * @param ref - The location to atomically modify.\n * @param transactionUpdate - A developer-supplied function which will be passed\n * the current data stored at this location (as a JavaScript object). The\n * function should return the new value it would like written (as a JavaScript\n * object). If `undefined` is returned (i.e. you return with no arguments) the\n * transaction will be aborted and the data at this location will not be\n * modified.\n * @param options - An options object to configure transactions.\n * @returns A `Promise` that can optionally be used instead of the `onComplete`\n * callback to handle success and failure.\n */\nexport function runTransaction(\n ref: DatabaseReference,\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n transactionUpdate: (currentData: any) => unknown,\n options?: TransactionOptions\n): Promise {\n ref = getModularInstance(ref);\n\n validateWritablePath('Reference.transaction', ref._path);\n\n if (ref.key === '.length' || ref.key === '.keys') {\n throw (\n 'Reference.transaction failed: ' + ref.key + ' is a read-only object.'\n );\n }\n\n const applyLocally = options?.applyLocally ?? true;\n const deferred = new Deferred();\n\n const promiseComplete = (\n error: Error | null,\n committed: boolean,\n node: Node | null\n ) => {\n let dataSnapshot: DataSnapshot | null = null;\n if (error) {\n deferred.reject(error);\n } else {\n dataSnapshot = new DataSnapshot(\n node,\n new ReferenceImpl(ref._repo, ref._path),\n PRIORITY_INDEX\n );\n deferred.resolve(new TransactionResult(committed, dataSnapshot));\n }\n };\n\n // Add a watch to make sure we get server updates.\n const unwatcher = onValue(ref, () => {});\n\n repoStartTransaction(\n ref._repo,\n ref._path,\n transactionUpdate,\n promiseComplete,\n unwatcher,\n applyLocally\n );\n\n return deferred.promise;\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { PersistentConnection } from '../core/PersistentConnection';\nimport { RepoInfo } from '../core/RepoInfo';\nimport { Connection } from '../realtime/Connection';\n\nimport { repoManagerForceRestClient } from './Database';\n\nexport const DataConnection = PersistentConnection;\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\n(PersistentConnection.prototype as any).simpleListen = function (\n pathString: string,\n onComplete: (a: unknown) => void\n) {\n this.sendRequest('q', { p: pathString }, onComplete);\n};\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\n(PersistentConnection.prototype as any).echo = function (\n data: unknown,\n onEcho: (a: unknown) => void\n) {\n this.sendRequest('echo', { d: data }, onEcho);\n};\n\n// RealTimeConnection properties that we use in tests.\nexport const RealTimeConnection = Connection;\n\n/**\n * @internal\n */\nexport const hijackHash = function (newHash: () => string) {\n const oldPut = PersistentConnection.prototype.put;\n PersistentConnection.prototype.put = function (\n pathString,\n data,\n onComplete,\n hash\n ) {\n if (hash !== undefined) {\n hash = newHash();\n }\n oldPut.call(this, pathString, data, onComplete, hash);\n };\n return function () {\n PersistentConnection.prototype.put = oldPut;\n };\n};\n\nexport const ConnectionTarget = RepoInfo;\n\n/**\n * Forces the RepoManager to create Repos that use ReadonlyRestClient instead of PersistentConnection.\n * @internal\n */\nexport const forceRestClient = function (forceRestClient: boolean) {\n repoManagerForceRestClient(forceRestClient);\n};\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Logger } from '@firebase/logger';\n\nconst logClient = new Logger('@firebase/database-compat');\n\nexport const warn = function (msg: string) {\n const message = 'FIREBASE WARNING: ' + msg;\n logClient.warn(message);\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { OnDisconnect as ModularOnDisconnect } from '@firebase/database';\nimport { validateArgCount, validateCallback, Compat } from '@firebase/util';\n\nimport { warn } from '../util/util';\nexport class OnDisconnect implements Compat {\n constructor(readonly _delegate: ModularOnDisconnect) {}\n\n cancel(onComplete?: (a: Error | null) => void): Promise {\n validateArgCount('OnDisconnect.cancel', 0, 1, arguments.length);\n validateCallback('OnDisconnect.cancel', 'onComplete', onComplete, true);\n const result = this._delegate.cancel();\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n remove(onComplete?: (a: Error | null) => void): Promise {\n validateArgCount('OnDisconnect.remove', 0, 1, arguments.length);\n validateCallback('OnDisconnect.remove', 'onComplete', onComplete, true);\n const result = this._delegate.remove();\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n set(value: unknown, onComplete?: (a: Error | null) => void): Promise {\n validateArgCount('OnDisconnect.set', 1, 2, arguments.length);\n validateCallback('OnDisconnect.set', 'onComplete', onComplete, true);\n const result = this._delegate.set(value);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n setWithPriority(\n value: unknown,\n priority: number | string | null,\n onComplete?: (a: Error | null) => void\n ): Promise {\n validateArgCount('OnDisconnect.setWithPriority', 2, 3, arguments.length);\n validateCallback(\n 'OnDisconnect.setWithPriority',\n 'onComplete',\n onComplete,\n true\n );\n const result = this._delegate.setWithPriority(value, priority);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n update(\n objectToMerge: Record,\n onComplete?: (a: Error | null) => void\n ): Promise {\n validateArgCount('OnDisconnect.update', 1, 2, arguments.length);\n if (Array.isArray(objectToMerge)) {\n const newObjectToMerge: { [k: string]: unknown } = {};\n for (let i = 0; i < objectToMerge.length; ++i) {\n newObjectToMerge['' + i] = objectToMerge[i];\n }\n objectToMerge = newObjectToMerge;\n warn(\n 'Passing an Array to firebase.database.onDisconnect().update() is deprecated. Use set() if you want to overwrite the ' +\n 'existing data, or an Object with integer keys if you really do want to only update some of the children.'\n );\n }\n validateCallback('OnDisconnect.update', 'onComplete', onComplete, true);\n const result = this._delegate.update(objectToMerge);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { validateArgCount } from '@firebase/util';\n\nimport { DataSnapshot } from './Reference';\n\nexport class TransactionResult {\n /**\n * A type for the resolve value of Firebase.transaction.\n */\n constructor(public committed: boolean, public snapshot: DataSnapshot) {}\n\n // Do not create public documentation. This is intended to make JSON serialization work but is otherwise unnecessary\n // for end-users\n toJSON(): object {\n validateArgCount('TransactionResult.toJSON', 0, 1, arguments.length);\n return { committed: this.committed, snapshot: this.snapshot.toJSON() };\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n OnDisconnect as ModularOnDisconnect,\n off,\n onChildAdded,\n onChildChanged,\n onChildMoved,\n onChildRemoved,\n onValue,\n EventType,\n limitToFirst,\n query,\n limitToLast,\n orderByChild,\n orderByKey,\n orderByValue,\n orderByPriority,\n startAt,\n startAfter,\n endAt,\n endBefore,\n equalTo,\n get,\n set,\n update,\n setWithPriority,\n remove,\n setPriority,\n push,\n runTransaction,\n child,\n DataSnapshot as ModularDataSnapshot,\n Query as ExpQuery,\n DatabaseReference as ModularReference,\n _QueryImpl,\n _ReferenceImpl,\n _validatePathString,\n _validateWritablePath,\n _UserCallback,\n _QueryParams\n} from '@firebase/database';\nimport {\n Compat,\n Deferred,\n errorPrefix,\n validateArgCount,\n validateCallback,\n validateContextObject\n} from '@firebase/util';\n\nimport { warn } from '../util/util';\nimport { validateBoolean, validateEventType } from '../util/validation';\n\nimport { Database } from './Database';\nimport { OnDisconnect } from './onDisconnect';\nimport { TransactionResult } from './TransactionResult';\n\n/**\n * Class representing a firebase data snapshot. It wraps a SnapshotNode and\n * surfaces the public methods (val, forEach, etc.) we want to expose.\n */\nexport class DataSnapshot implements Compat {\n constructor(\n readonly _database: Database,\n readonly _delegate: ModularDataSnapshot\n ) {}\n\n /**\n * Retrieves the snapshot contents as JSON. Returns null if the snapshot is\n * empty.\n *\n * @returns JSON representation of the DataSnapshot contents, or null if empty.\n */\n val(): unknown {\n validateArgCount('DataSnapshot.val', 0, 0, arguments.length);\n return this._delegate.val();\n }\n\n /**\n * Returns the snapshot contents as JSON, including priorities of node. Suitable for exporting\n * the entire node contents.\n * @returns JSON representation of the DataSnapshot contents, or null if empty.\n */\n exportVal(): unknown {\n validateArgCount('DataSnapshot.exportVal', 0, 0, arguments.length);\n return this._delegate.exportVal();\n }\n\n // Do not create public documentation. This is intended to make JSON serialization work but is otherwise unnecessary\n // for end-users\n toJSON(): unknown {\n // Optional spacer argument is unnecessary because we're depending on recursion rather than stringifying the content\n validateArgCount('DataSnapshot.toJSON', 0, 1, arguments.length);\n return this._delegate.toJSON();\n }\n\n /**\n * Returns whether the snapshot contains a non-null value.\n *\n * @returns Whether the snapshot contains a non-null value, or is empty.\n */\n exists(): boolean {\n validateArgCount('DataSnapshot.exists', 0, 0, arguments.length);\n return this._delegate.exists();\n }\n\n /**\n * Returns a DataSnapshot of the specified child node's contents.\n *\n * @param path - Path to a child.\n * @returns DataSnapshot for child node.\n */\n child(path: string): DataSnapshot {\n validateArgCount('DataSnapshot.child', 0, 1, arguments.length);\n // Ensure the childPath is a string (can be a number)\n path = String(path);\n _validatePathString('DataSnapshot.child', 'path', path, false);\n return new DataSnapshot(this._database, this._delegate.child(path));\n }\n\n /**\n * Returns whether the snapshot contains a child at the specified path.\n *\n * @param path - Path to a child.\n * @returns Whether the child exists.\n */\n hasChild(path: string): boolean {\n validateArgCount('DataSnapshot.hasChild', 1, 1, arguments.length);\n _validatePathString('DataSnapshot.hasChild', 'path', path, false);\n return this._delegate.hasChild(path);\n }\n\n /**\n * Returns the priority of the object, or null if no priority was set.\n *\n * @returns The priority.\n */\n getPriority(): string | number | null {\n validateArgCount('DataSnapshot.getPriority', 0, 0, arguments.length);\n return this._delegate.priority;\n }\n\n /**\n * Iterates through child nodes and calls the specified action for each one.\n *\n * @param action - Callback function to be called\n * for each child.\n * @returns True if forEach was canceled by action returning true for\n * one of the child nodes.\n */\n forEach(action: (snapshot: IteratedDataSnapshot) => boolean | void): boolean {\n validateArgCount('DataSnapshot.forEach', 1, 1, arguments.length);\n validateCallback('DataSnapshot.forEach', 'action', action, false);\n return this._delegate.forEach(expDataSnapshot =>\n action(new DataSnapshot(this._database, expDataSnapshot))\n );\n }\n\n /**\n * Returns whether this DataSnapshot has children.\n * @returns True if the DataSnapshot contains 1 or more child nodes.\n */\n hasChildren(): boolean {\n validateArgCount('DataSnapshot.hasChildren', 0, 0, arguments.length);\n return this._delegate.hasChildren();\n }\n\n get key() {\n return this._delegate.key;\n }\n\n /**\n * Returns the number of children for this DataSnapshot.\n * @returns The number of children that this DataSnapshot contains.\n */\n numChildren(): number {\n validateArgCount('DataSnapshot.numChildren', 0, 0, arguments.length);\n return this._delegate.size;\n }\n\n /**\n * @returns The Firebase reference for the location this snapshot's data came\n * from.\n */\n getRef(): Reference {\n validateArgCount('DataSnapshot.ref', 0, 0, arguments.length);\n return new Reference(this._database, this._delegate.ref);\n }\n\n get ref(): Reference {\n return this.getRef();\n }\n}\n\n/**\n * Represents a child snapshot of a `Reference` that is being iterated over. The key will never be undefined.\n */\nexport interface IteratedDataSnapshot extends DataSnapshot {\n key: string; // key of the location of this snapshot.\n}\n\nexport interface SnapshotCallback {\n (dataSnapshot: DataSnapshot, previousChildName?: string | null): unknown;\n}\n\n/**\n * A Query represents a filter to be applied to a firebase location. This object purely represents the\n * query expression (and exposes our public API to build the query). The actual query logic is in ViewBase.js.\n *\n * Since every Firebase reference is a query, Firebase inherits from this object.\n */\nexport class Query implements Compat {\n constructor(readonly database: Database, readonly _delegate: ExpQuery) {}\n\n on(\n eventType: string,\n callback: SnapshotCallback,\n cancelCallbackOrContext?: ((a: Error) => unknown) | object | null,\n context?: object | null\n ): SnapshotCallback {\n validateArgCount('Query.on', 2, 4, arguments.length);\n validateCallback('Query.on', 'callback', callback, false);\n\n const ret = Query.getCancelAndContextArgs_(\n 'Query.on',\n cancelCallbackOrContext,\n context\n );\n const valueCallback = (expSnapshot, previousChildName?) => {\n callback.call(\n ret.context,\n new DataSnapshot(this.database, expSnapshot),\n previousChildName\n );\n };\n valueCallback.userCallback = callback;\n valueCallback.context = ret.context;\n const cancelCallback = ret.cancel?.bind(ret.context);\n\n switch (eventType) {\n case 'value':\n onValue(this._delegate, valueCallback, cancelCallback);\n return callback;\n case 'child_added':\n onChildAdded(this._delegate, valueCallback, cancelCallback);\n return callback;\n case 'child_removed':\n onChildRemoved(this._delegate, valueCallback, cancelCallback);\n return callback;\n case 'child_changed':\n onChildChanged(this._delegate, valueCallback, cancelCallback);\n return callback;\n case 'child_moved':\n onChildMoved(this._delegate, valueCallback, cancelCallback);\n return callback;\n default:\n throw new Error(\n errorPrefix('Query.on', 'eventType') +\n 'must be a valid event type = \"value\", \"child_added\", \"child_removed\", ' +\n '\"child_changed\", or \"child_moved\".'\n );\n }\n }\n\n off(\n eventType?: string,\n callback?: SnapshotCallback,\n context?: object | null\n ): void {\n validateArgCount('Query.off', 0, 3, arguments.length);\n validateEventType('Query.off', eventType, true);\n validateCallback('Query.off', 'callback', callback, true);\n validateContextObject('Query.off', 'context', context, true);\n if (callback) {\n const valueCallback: _UserCallback = () => {};\n valueCallback.userCallback = callback;\n valueCallback.context = context;\n off(this._delegate, eventType as EventType, valueCallback);\n } else {\n off(this._delegate, eventType as EventType | undefined);\n }\n }\n\n /**\n * Get the server-value for this query, or return a cached value if not connected.\n */\n get(): Promise {\n return get(this._delegate).then(expSnapshot => {\n return new DataSnapshot(this.database, expSnapshot);\n });\n }\n\n /**\n * Attaches a listener, waits for the first event, and then removes the listener\n */\n once(\n eventType: string,\n callback?: SnapshotCallback,\n failureCallbackOrContext?: ((a: Error) => void) | object | null,\n context?: object | null\n ): Promise {\n validateArgCount('Query.once', 1, 4, arguments.length);\n validateCallback('Query.once', 'callback', callback, true);\n\n const ret = Query.getCancelAndContextArgs_(\n 'Query.once',\n failureCallbackOrContext,\n context\n );\n const deferred = new Deferred();\n const valueCallback: _UserCallback = (expSnapshot, previousChildName?) => {\n const result = new DataSnapshot(this.database, expSnapshot);\n if (callback) {\n callback.call(ret.context, result, previousChildName);\n }\n deferred.resolve(result);\n };\n valueCallback.userCallback = callback;\n valueCallback.context = ret.context;\n const cancelCallback = (error: Error) => {\n if (ret.cancel) {\n ret.cancel.call(ret.context, error);\n }\n deferred.reject(error);\n };\n\n switch (eventType) {\n case 'value':\n onValue(this._delegate, valueCallback, cancelCallback, {\n onlyOnce: true\n });\n break;\n case 'child_added':\n onChildAdded(this._delegate, valueCallback, cancelCallback, {\n onlyOnce: true\n });\n break;\n case 'child_removed':\n onChildRemoved(this._delegate, valueCallback, cancelCallback, {\n onlyOnce: true\n });\n break;\n case 'child_changed':\n onChildChanged(this._delegate, valueCallback, cancelCallback, {\n onlyOnce: true\n });\n break;\n case 'child_moved':\n onChildMoved(this._delegate, valueCallback, cancelCallback, {\n onlyOnce: true\n });\n break;\n default:\n throw new Error(\n errorPrefix('Query.once', 'eventType') +\n 'must be a valid event type = \"value\", \"child_added\", \"child_removed\", ' +\n '\"child_changed\", or \"child_moved\".'\n );\n }\n\n return deferred.promise;\n }\n\n /**\n * Set a limit and anchor it to the start of the window.\n */\n limitToFirst(limit: number): Query {\n validateArgCount('Query.limitToFirst', 1, 1, arguments.length);\n return new Query(this.database, query(this._delegate, limitToFirst(limit)));\n }\n\n /**\n * Set a limit and anchor it to the end of the window.\n */\n limitToLast(limit: number): Query {\n validateArgCount('Query.limitToLast', 1, 1, arguments.length);\n return new Query(this.database, query(this._delegate, limitToLast(limit)));\n }\n\n /**\n * Given a child path, return a new query ordered by the specified grandchild path.\n */\n orderByChild(path: string): Query {\n validateArgCount('Query.orderByChild', 1, 1, arguments.length);\n return new Query(this.database, query(this._delegate, orderByChild(path)));\n }\n\n /**\n * Return a new query ordered by the KeyIndex\n */\n orderByKey(): Query {\n validateArgCount('Query.orderByKey', 0, 0, arguments.length);\n return new Query(this.database, query(this._delegate, orderByKey()));\n }\n\n /**\n * Return a new query ordered by the PriorityIndex\n */\n orderByPriority(): Query {\n validateArgCount('Query.orderByPriority', 0, 0, arguments.length);\n return new Query(this.database, query(this._delegate, orderByPriority()));\n }\n\n /**\n * Return a new query ordered by the ValueIndex\n */\n orderByValue(): Query {\n validateArgCount('Query.orderByValue', 0, 0, arguments.length);\n return new Query(this.database, query(this._delegate, orderByValue()));\n }\n\n startAt(\n value: number | string | boolean | null = null,\n name?: string | null\n ): Query {\n validateArgCount('Query.startAt', 0, 2, arguments.length);\n return new Query(\n this.database,\n query(this._delegate, startAt(value, name))\n );\n }\n\n startAfter(\n value: number | string | boolean | null = null,\n name?: string | null\n ): Query {\n validateArgCount('Query.startAfter', 0, 2, arguments.length);\n return new Query(\n this.database,\n query(this._delegate, startAfter(value, name))\n );\n }\n\n endAt(\n value: number | string | boolean | null = null,\n name?: string | null\n ): Query {\n validateArgCount('Query.endAt', 0, 2, arguments.length);\n return new Query(this.database, query(this._delegate, endAt(value, name)));\n }\n\n endBefore(\n value: number | string | boolean | null = null,\n name?: string | null\n ): Query {\n validateArgCount('Query.endBefore', 0, 2, arguments.length);\n return new Query(\n this.database,\n query(this._delegate, endBefore(value, name))\n );\n }\n\n /**\n * Load the selection of children with exactly the specified value, and, optionally,\n * the specified name.\n */\n equalTo(value: number | string | boolean | null, name?: string) {\n validateArgCount('Query.equalTo', 1, 2, arguments.length);\n return new Query(\n this.database,\n query(this._delegate, equalTo(value, name))\n );\n }\n\n /**\n * @returns URL for this location.\n */\n toString(): string {\n validateArgCount('Query.toString', 0, 0, arguments.length);\n return this._delegate.toString();\n }\n\n // Do not create public documentation. This is intended to make JSON serialization work but is otherwise unnecessary\n // for end-users.\n toJSON() {\n // An optional spacer argument is unnecessary for a string.\n validateArgCount('Query.toJSON', 0, 1, arguments.length);\n return this._delegate.toJSON();\n }\n\n /**\n * Return true if this query and the provided query are equivalent; otherwise, return false.\n */\n isEqual(other: Query): boolean {\n validateArgCount('Query.isEqual', 1, 1, arguments.length);\n if (!(other instanceof Query)) {\n const error =\n 'Query.isEqual failed: First argument must be an instance of firebase.database.Query.';\n throw new Error(error);\n }\n return this._delegate.isEqual(other._delegate);\n }\n\n /**\n * Helper used by .on and .once to extract the context and or cancel arguments.\n * @param fnName - The function name (on or once)\n *\n */\n private static getCancelAndContextArgs_(\n fnName: string,\n cancelOrContext?: ((a: Error) => void) | object | null,\n context?: object | null\n ): { cancel: ((a: Error) => void) | undefined; context: object | undefined } {\n const ret: {\n cancel: ((a: Error) => void) | null;\n context: object | null;\n } = { cancel: undefined, context: undefined };\n if (cancelOrContext && context) {\n ret.cancel = cancelOrContext as (a: Error) => void;\n validateCallback(fnName, 'cancel', ret.cancel, true);\n\n ret.context = context;\n validateContextObject(fnName, 'context', ret.context, true);\n } else if (cancelOrContext) {\n // we have either a cancel callback or a context.\n if (typeof cancelOrContext === 'object' && cancelOrContext !== null) {\n // it's a context!\n ret.context = cancelOrContext;\n } else if (typeof cancelOrContext === 'function') {\n ret.cancel = cancelOrContext as (a: Error) => void;\n } else {\n throw new Error(\n errorPrefix(fnName, 'cancelOrContext') +\n ' must either be a cancel callback or a context object.'\n );\n }\n }\n return ret;\n }\n\n get ref(): Reference {\n return new Reference(\n this.database,\n new _ReferenceImpl(this._delegate._repo, this._delegate._path)\n );\n }\n}\n\nexport class Reference extends Query implements Compat {\n then: Promise['then'];\n catch: Promise['catch'];\n\n /**\n * Call options:\n * new Reference(Repo, Path) or\n * new Reference(url: string, string|RepoManager)\n *\n * Externally - this is the firebase.database.Reference type.\n */\n constructor(\n readonly database: Database,\n readonly _delegate: ModularReference\n ) {\n super(\n database,\n new _QueryImpl(\n _delegate._repo,\n _delegate._path,\n new _QueryParams(),\n false\n )\n );\n }\n\n /** @returns {?string} */\n getKey(): string | null {\n validateArgCount('Reference.key', 0, 0, arguments.length);\n return this._delegate.key;\n }\n\n child(pathString: string): Reference {\n validateArgCount('Reference.child', 1, 1, arguments.length);\n if (typeof pathString === 'number') {\n pathString = String(pathString);\n }\n return new Reference(this.database, child(this._delegate, pathString));\n }\n\n /** @returns {?Reference} */\n getParent(): Reference | null {\n validateArgCount('Reference.parent', 0, 0, arguments.length);\n const parent = this._delegate.parent;\n return parent ? new Reference(this.database, parent) : null;\n }\n\n /** @returns {!Reference} */\n getRoot(): Reference {\n validateArgCount('Reference.root', 0, 0, arguments.length);\n return new Reference(this.database, this._delegate.root);\n }\n\n set(\n newVal: unknown,\n onComplete?: (error: Error | null) => void\n ): Promise {\n validateArgCount('Reference.set', 1, 2, arguments.length);\n validateCallback('Reference.set', 'onComplete', onComplete, true);\n const result = set(this._delegate, newVal);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n update(\n values: object,\n onComplete?: (a: Error | null) => void\n ): Promise {\n validateArgCount('Reference.update', 1, 2, arguments.length);\n\n if (Array.isArray(values)) {\n const newObjectToMerge: { [k: string]: unknown } = {};\n for (let i = 0; i < values.length; ++i) {\n newObjectToMerge['' + i] = values[i];\n }\n values = newObjectToMerge;\n warn(\n 'Passing an Array to Firebase.update() is deprecated. ' +\n 'Use set() if you want to overwrite the existing data, or ' +\n 'an Object with integer keys if you really do want to ' +\n 'only update some of the children.'\n );\n }\n _validateWritablePath('Reference.update', this._delegate._path);\n validateCallback('Reference.update', 'onComplete', onComplete, true);\n\n const result = update(this._delegate, values);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n setWithPriority(\n newVal: unknown,\n newPriority: string | number | null,\n onComplete?: (a: Error | null) => void\n ): Promise {\n validateArgCount('Reference.setWithPriority', 2, 3, arguments.length);\n validateCallback(\n 'Reference.setWithPriority',\n 'onComplete',\n onComplete,\n true\n );\n\n const result = setWithPriority(this._delegate, newVal, newPriority);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n remove(onComplete?: (a: Error | null) => void): Promise {\n validateArgCount('Reference.remove', 0, 1, arguments.length);\n validateCallback('Reference.remove', 'onComplete', onComplete, true);\n\n const result = remove(this._delegate);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n transaction(\n transactionUpdate: (currentData: unknown) => unknown,\n onComplete?: (\n error: Error | null,\n committed: boolean,\n dataSnapshot: DataSnapshot | null\n ) => void,\n applyLocally?: boolean\n ): Promise {\n validateArgCount('Reference.transaction', 1, 3, arguments.length);\n validateCallback(\n 'Reference.transaction',\n 'transactionUpdate',\n transactionUpdate,\n false\n );\n validateCallback('Reference.transaction', 'onComplete', onComplete, true);\n validateBoolean(\n 'Reference.transaction',\n 'applyLocally',\n applyLocally,\n true\n );\n\n const result = runTransaction(this._delegate, transactionUpdate, {\n applyLocally\n }).then(\n transactionResult =>\n new TransactionResult(\n transactionResult.committed,\n new DataSnapshot(this.database, transactionResult.snapshot)\n )\n );\n if (onComplete) {\n result.then(\n transactionResult =>\n onComplete(\n null,\n transactionResult.committed,\n transactionResult.snapshot\n ),\n error => onComplete(error, false, null)\n );\n }\n return result;\n }\n\n setPriority(\n priority: string | number | null,\n onComplete?: (a: Error | null) => void\n ): Promise {\n validateArgCount('Reference.setPriority', 1, 2, arguments.length);\n validateCallback('Reference.setPriority', 'onComplete', onComplete, true);\n\n const result = setPriority(this._delegate, priority);\n if (onComplete) {\n result.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n return result;\n }\n\n push(value?: unknown, onComplete?: (a: Error | null) => void): Reference {\n validateArgCount('Reference.push', 0, 2, arguments.length);\n validateCallback('Reference.push', 'onComplete', onComplete, true);\n\n const expPromise = push(this._delegate, value);\n const promise = expPromise.then(\n expRef => new Reference(this.database, expRef)\n );\n\n if (onComplete) {\n promise.then(\n () => onComplete(null),\n error => onComplete(error)\n );\n }\n\n const result = new Reference(this.database, expPromise);\n result.then = promise.then.bind(promise);\n result.catch = promise.catch.bind(promise, undefined);\n return result;\n }\n\n onDisconnect(): OnDisconnect {\n _validateWritablePath('Reference.onDisconnect', this._delegate._path);\n return new OnDisconnect(\n new ModularOnDisconnect(this._delegate._repo, this._delegate._path)\n );\n }\n\n get key(): string | null {\n return this.getKey();\n }\n\n get parent(): Reference | null {\n return this.getParent();\n }\n\n get root(): Reference {\n return this.getRoot();\n }\n}\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { errorPrefix as errorPrefixFxn } from '@firebase/util';\n\nexport const validateBoolean = function (\n fnName: string,\n argumentName: string,\n bool: unknown,\n optional: boolean\n) {\n if (optional && bool === undefined) {\n return;\n }\n if (typeof bool !== 'boolean') {\n throw new Error(\n errorPrefixFxn(fnName, argumentName) + 'must be a boolean.'\n );\n }\n};\n\nexport const validateEventType = function (\n fnName: string,\n eventType: string,\n optional: boolean\n) {\n if (optional && eventType === undefined) {\n return;\n }\n\n switch (eventType) {\n case 'value':\n case 'child_added':\n case 'child_removed':\n case 'child_changed':\n case 'child_moved':\n break;\n default:\n throw new Error(\n errorPrefixFxn(fnName, 'eventType') +\n 'must be a valid event type = \"value\", \"child_added\", \"child_removed\", ' +\n '\"child_changed\", or \"child_moved\".'\n );\n }\n};\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n// eslint-disable-next-line import/no-extraneous-dependencies\n\nimport { FirebaseApp } from '@firebase/app-types';\nimport { FirebaseService } from '@firebase/app-types/private';\nimport {\n forceLongPolling,\n forceWebSockets,\n goOnline,\n connectDatabaseEmulator,\n goOffline,\n ref,\n refFromURL,\n increment,\n serverTimestamp,\n Database as ModularDatabase\n} from '@firebase/database';\nimport {\n validateArgCount,\n Compat,\n EmulatorMockTokenOptions\n} from '@firebase/util';\n\nimport { Reference } from './Reference';\n\n/**\n * Class representing a firebase database.\n */\nexport class Database implements FirebaseService, Compat {\n static readonly ServerValue = {\n TIMESTAMP: serverTimestamp(),\n increment: (delta: number) => increment(delta)\n };\n\n /**\n * The constructor should not be called by users of our public API.\n */\n constructor(readonly _delegate: ModularDatabase, readonly app: FirebaseApp) {}\n\n INTERNAL = {\n delete: () => this._delegate._delete(),\n forceWebSockets,\n forceLongPolling\n };\n\n /**\n * Modify this instance to communicate with the Realtime Database emulator.\n *\n *

Note: This method must be called before performing any other operation.\n *\n * @param host - the emulator host (ex: localhost)\n * @param port - the emulator port (ex: 8080)\n * @param options.mockUserToken - the mock auth token to use for unit testing Security Rules\n */\n useEmulator(\n host: string,\n port: number,\n options: {\n mockUserToken?: EmulatorMockTokenOptions;\n } = {}\n ): void {\n connectDatabaseEmulator(this._delegate, host, port, options);\n }\n\n /**\n * Returns a reference to the root or to the path specified in the provided\n * argument.\n *\n * @param path - The relative string path or an existing Reference to a database\n * location.\n * @throws If a Reference is provided, throws if it does not belong to the\n * same project.\n * @returns Firebase reference.\n */\n ref(path?: string): Reference;\n ref(path?: Reference): Reference;\n ref(path?: string | Reference): Reference {\n validateArgCount('database.ref', 0, 1, arguments.length);\n if (path instanceof Reference) {\n const childRef = refFromURL(this._delegate, path.toString());\n return new Reference(this, childRef);\n } else {\n const childRef = ref(this._delegate, path);\n return new Reference(this, childRef);\n }\n }\n\n /**\n * Returns a reference to the root or the path specified in url.\n * We throw a exception if the url is not in the same domain as the\n * current repo.\n * @returns Firebase reference.\n */\n refFromURL(url: string): Reference {\n const apiName = 'database.refFromURL';\n validateArgCount(apiName, 1, 1, arguments.length);\n const childRef = refFromURL(this._delegate, url);\n return new Reference(this, childRef);\n }\n\n // Make individual repo go offline.\n goOffline(): void {\n validateArgCount('database.goOffline', 0, 0, arguments.length);\n return goOffline(this._delegate);\n }\n\n goOnline(): void {\n validateArgCount('database.goOnline', 0, 0, arguments.length);\n return goOnline(this._delegate);\n }\n}\n","/**\n * @license\n * Copyright 2021 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// eslint-disable-next-line import/no-extraneous-dependencies\nimport firebase, { FirebaseNamespace } from '@firebase/app-compat';\nimport { _FirebaseNamespace } from '@firebase/app-types/private';\nimport { Component, ComponentType } from '@firebase/component';\nimport { enableLogging } from '@firebase/database';\nimport * as types from '@firebase/database-types';\n\nimport { name, version } from '../package.json';\nimport { Database } from '../src/api/Database';\nimport * as INTERNAL from '../src/api/internal';\nimport { DataSnapshot, Query, Reference } from '../src/api/Reference';\n\nconst ServerValue = Database.ServerValue;\n\nexport function registerDatabase(instance: FirebaseNamespace) {\n // Register the Database Service with the 'firebase' namespace.\n (instance as unknown as _FirebaseNamespace).INTERNAL.registerComponent(\n new Component(\n 'database-compat',\n (container, { instanceIdentifier: url }) => {\n /* Dependencies */\n // getImmediate for FirebaseApp will always succeed\n const app = container.getProvider('app-compat').getImmediate();\n const databaseExp = container\n .getProvider('database')\n .getImmediate({ identifier: url });\n return new Database(databaseExp, app);\n },\n ComponentType.PUBLIC\n )\n .setServiceProps(\n // firebase.database namespace properties\n {\n Reference,\n Query,\n Database,\n DataSnapshot,\n enableLogging,\n INTERNAL,\n ServerValue\n }\n )\n .setMultipleInstances(true)\n );\n\n instance.registerVersion(name, version);\n}\n\nregisterDatabase(firebase);\n\ndeclare module '@firebase/app-compat' {\n interface FirebaseNamespace {\n database?: {\n (app?: FirebaseApp): types.FirebaseDatabase;\n enableLogging: typeof types.enableLogging;\n ServerValue: types.ServerValue;\n Database: typeof types.FirebaseDatabase;\n };\n }\n interface FirebaseApp {\n database?(databaseURL?: string): types.FirebaseDatabase;\n }\n}\n","/**\n * @license\n * Copyright 2017 Google LLC\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n AppCheckInternalComponentName,\n FirebaseAppCheckInternal\n} from '@firebase/app-check-interop-types';\nimport { FirebaseApp } from '@firebase/app-types';\nimport {\n FirebaseAuthInternal,\n FirebaseAuthInternalName\n} from '@firebase/auth-interop-types';\nimport {\n Component,\n ComponentContainer,\n ComponentType,\n Provider\n} from '@firebase/component';\nimport {\n _repoManagerDatabaseFromApp,\n _setSDKVersion\n} from '@firebase/database';\nimport * as types from '@firebase/database-types';\n\nimport { Database } from './Database';\n\n/**\n * Used by console to create a database based on the app,\n * passed database URL and a custom auth implementation.\n *\n * @param app - A valid FirebaseApp-like object\n * @param url - A valid Firebase databaseURL\n * @param version - custom version e.g. firebase-admin version\n * @param customAuthImpl - custom auth implementation\n */\nexport function initStandalone({\n app,\n url,\n version,\n customAuthImpl,\n customAppCheckImpl,\n namespace,\n nodeAdmin = false\n}: {\n app: FirebaseApp;\n url: string;\n version: string;\n customAuthImpl: FirebaseAuthInternal;\n customAppCheckImpl?: FirebaseAppCheckInternal;\n namespace: T;\n nodeAdmin?: boolean;\n}): {\n instance: types.Database;\n namespace: T;\n} {\n _setSDKVersion(version);\n\n const container = new ComponentContainer('database-standalone');\n /**\n * ComponentContainer('database-standalone') is just a placeholder that doesn't perform\n * any actual function.\n */\n const authProvider = new Provider(\n 'auth-internal',\n container\n );\n authProvider.setComponent(\n new Component('auth-internal', () => customAuthImpl, ComponentType.PRIVATE)\n );\n\n let appCheckProvider: Provider = undefined;\n if (customAppCheckImpl) {\n appCheckProvider = new Provider(\n 'app-check-internal',\n container\n );\n appCheckProvider.setComponent(\n new Component(\n 'app-check-internal',\n () => customAppCheckImpl,\n ComponentType.PRIVATE\n )\n );\n }\n\n return {\n instance: new Database(\n _repoManagerDatabaseFromApp(\n app,\n authProvider,\n appCheckProvider,\n url,\n nodeAdmin\n ),\n app\n ) as types.Database,\n namespace\n };\n}\n","/**\n * @license\n * Copyright The Closure Library Authors.\n * SPDX-License-Identifier: Apache-2.0\n */\n\n/**\n * @fileoverview Bootstrap for the Google JS Library (Closure).\n *\n * In uncompiled mode base.js will attempt to load Closure's deps file, unless\n * the global CLOSURE_NO_DEPS is set to true. This allows projects\n * to include their own deps file(s) from different locations.\n *\n * Avoid including base.js more than once. This is strictly discouraged and not\n * supported. goog.require(...) won't work properly in that case.\n *\n * @suppress {deprecated} Users cannot remove deprecated uses here.\n * @provideGoog\n */\n\n\n/**\n * @define {boolean} Overridden to true by the compiler.\n */\nvar COMPILED = false;\n\n\n/**\n * Base namespace for the Closure library. Checks to see goog is already\n * defined in the current scope before assigning to prevent clobbering if\n * base.js is loaded more than once.\n *\n * @const\n */\nvar goog = goog || {};\n\n/**\n * Reference to the global object.\n * https://www.ecma-international.org/ecma-262/9.0/index.html#sec-global-object\n *\n * More info on this implementation here:\n * https://docs.google.com/document/d/1NAeW4Wk7I7FV0Y2tcUFvQdGMc89k2vdgSXInw8_nvCI/edit\n *\n * @const\n * @suppress {undefinedVars} self won't be referenced unless `this` is falsy.\n * @type {!Global}\n */\ngoog.global =\n // Check `this` first for backwards compatibility.\n // Valid unless running as an ES module or in a function wrapper called\n // without setting `this` properly.\n // Note that base.js can't usefully be imported as an ES module, but it may\n // be compiled into bundles that are loadable as ES modules.\n this ||\n // https://developer.mozilla.org/en-US/docs/Web/API/Window/self\n // For in-page browser environments and workers.\n self;\n\n\n/**\n * A hook for overriding the define values in uncompiled mode.\n *\n * In uncompiled mode, `CLOSURE_UNCOMPILED_DEFINES` may be defined before\n * loading base.js. If a key is defined in `CLOSURE_UNCOMPILED_DEFINES`,\n * `goog.define` will use the value instead of the default value. This\n * allows flags to be overwritten without compilation (this is normally\n * accomplished with the compiler's \"define\" flag).\n *\n * Example:\n *

\n *   var CLOSURE_UNCOMPILED_DEFINES = {'goog.DEBUG': false};\n * 
\n *\n * @type {Object|undefined}\n */\ngoog.global.CLOSURE_UNCOMPILED_DEFINES;\n\n\n/**\n * A hook for overriding the define values in uncompiled or compiled mode,\n * like CLOSURE_UNCOMPILED_DEFINES but effective in compiled code. In\n * uncompiled code CLOSURE_UNCOMPILED_DEFINES takes precedence.\n *\n * Also unlike CLOSURE_UNCOMPILED_DEFINES the values must be number, boolean or\n * string literals or the compiler will emit an error.\n *\n * While any @define value may be set, only those set with goog.define will be\n * effective for uncompiled code.\n *\n * Example:\n *
\n *   var CLOSURE_DEFINES = {'goog.DEBUG': false} ;\n * 
\n *\n * Currently the Closure Compiler will only recognize very simple definitions of\n * this value when looking for values to apply to compiled code and ignore all\n * other references. Specifically, it looks the value defined at the variable\n * declaration, as with the example above.\n *\n * TODO(user): Improve the recognized definitions.\n *\n * @type {!Object|null|undefined}\n */\ngoog.global.CLOSURE_DEFINES;\n\n\n/**\n * Builds an object structure for the provided namespace path, ensuring that\n * names that already exist are not overwritten. For example:\n * \"a.b.c\" -> a = {};a.b={};a.b.c={};\n * Used by goog.provide and goog.exportSymbol.\n * @param {string} name The name of the object that this file defines.\n * @param {*=} object The object to expose at the end of the path.\n * @param {boolean=} overwriteImplicit If object is set and a previous call\n * implicitly constructed the namespace given by name, this parameter\n * controls whether object should overwrite the implicitly constructed\n * namespace or be merged into it. Defaults to false.\n * @param {?Object=} objectToExportTo The object to add the path to; if this\n * field is not specified, its value defaults to `goog.global`.\n * @private\n */\ngoog.exportPath_ = function(name, object, overwriteImplicit, objectToExportTo) {\n var parts = name.split('.');\n var cur = objectToExportTo || goog.global;\n\n // Internet Explorer exhibits strange behavior when throwing errors from\n // methods externed in this manner. See the testExportSymbolExceptions in\n // base_test.html for an example.\n if (!(parts[0] in cur) && typeof cur.execScript != 'undefined') {\n cur.execScript('var ' + parts[0]);\n }\n\n for (var part; parts.length && (part = parts.shift());) {\n if (!parts.length && object !== undefined) {\n if (!overwriteImplicit && goog.isObject(object) &&\n goog.isObject(cur[part])) {\n // Merge properties on object (the input parameter) with the existing\n // implicitly defined namespace, so as to not clobber previously\n // defined child namespaces.\n for (var prop in object) {\n if (object.hasOwnProperty(prop)) {\n cur[part][prop] = object[prop];\n }\n }\n } else {\n // Either there is no existing implicit namespace, or overwriteImplicit\n // is set to true, so directly assign object (the input parameter) to\n // the namespace.\n cur[part] = object;\n }\n } else if (cur[part] && cur[part] !== Object.prototype[part]) {\n cur = cur[part];\n } else {\n cur = cur[part] = {};\n }\n }\n};\n\n\n/**\n * Defines a named value. In uncompiled mode, the value is retrieved from\n * CLOSURE_DEFINES or CLOSURE_UNCOMPILED_DEFINES if the object is defined and\n * has the property specified, and otherwise used the defined defaultValue.\n * When compiled the default can be overridden using the compiler options or the\n * value set in the CLOSURE_DEFINES object. Returns the defined value so that it\n * can be used safely in modules. Note that the value type MUST be either\n * boolean, number, or string.\n *\n * @param {string} name The distinguished name to provide.\n * @param {T} defaultValue\n * @return {T} The defined value.\n * @template T\n */\ngoog.define = function(name, defaultValue) {\n var value = defaultValue;\n if (!COMPILED) {\n var uncompiledDefines = goog.global.CLOSURE_UNCOMPILED_DEFINES;\n var defines = goog.global.CLOSURE_DEFINES;\n if (uncompiledDefines &&\n // Anti DOM-clobbering runtime check (b/37736576).\n /** @type {?} */ (uncompiledDefines).nodeType === undefined &&\n Object.prototype.hasOwnProperty.call(uncompiledDefines, name)) {\n value = uncompiledDefines[name];\n } else if (\n defines &&\n // Anti DOM-clobbering runtime check (b/37736576).\n /** @type {?} */ (defines).nodeType === undefined &&\n Object.prototype.hasOwnProperty.call(defines, name)) {\n value = defines[name];\n }\n }\n return value;\n};\n\n\n/**\n * @define {number} Integer year indicating the set of browser features that are\n * guaranteed to be present. This is defined to include exactly features that\n * work correctly on all \"modern\" browsers that are stable on January 1 of the\n * specified year. For example,\n * ```js\n * if (goog.FEATURESET_YEAR >= 2019) {\n * // use APIs known to be available on all major stable browsers Jan 1, 2019\n * } else {\n * // polyfill for older browsers\n * }\n * ```\n * This is intended to be the primary define for removing\n * unnecessary browser compatibility code (such as ponyfills and workarounds),\n * and should inform the default value for most other defines:\n * ```js\n * const ASSUME_NATIVE_PROMISE =\n * goog.define('ASSUME_NATIVE_PROMISE', goog.FEATURESET_YEAR >= 2016);\n * ```\n *\n * The default assumption is that IE9 is the lowest supported browser, which was\n * first available Jan 1, 2012.\n *\n * TODO(user): Reference more thorough documentation when it's available.\n */\ngoog.FEATURESET_YEAR = goog.define('goog.FEATURESET_YEAR', 2012);\n\n\n/**\n * @define {boolean} DEBUG is provided as a convenience so that debugging code\n * that should not be included in a production. It can be easily stripped\n * by specifying --define goog.DEBUG=false to the Closure Compiler aka\n * JSCompiler. For example, most toString() methods should be declared inside an\n * \"if (goog.DEBUG)\" conditional because they are generally used for debugging\n * purposes and it is difficult for the JSCompiler to statically determine\n * whether they are used.\n */\ngoog.DEBUG = goog.define('goog.DEBUG', true);\n\n\n/**\n * @define {string} LOCALE defines the locale being used for compilation. It is\n * used to select locale specific data to be compiled in js binary. BUILD rule\n * can specify this value by \"--define goog.LOCALE=\" as a compiler\n * option.\n *\n * Take into account that the locale code format is important. You should use\n * the canonical Unicode format with hyphen as a delimiter. Language must be\n * lowercase, Language Script - Capitalized, Region - UPPERCASE.\n * There are few examples: pt-BR, en, en-US, sr-Latin-BO, zh-Hans-CN.\n *\n * See more info about locale codes here:\n * http://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers\n *\n * For language codes you should use values defined by ISO 693-1. See it here\n * http://www.w3.org/WAI/ER/IG/ert/iso639.htm. There is only one exception from\n * this rule: the Hebrew language. For legacy reasons the old code (iw) should\n * be used instead of the new code (he).\n *\n */\ngoog.LOCALE = goog.define('goog.LOCALE', 'en'); // default to en\n\n\n/**\n * @define {boolean} Whether this code is running on trusted sites.\n *\n * On untrusted sites, several native functions can be defined or overridden by\n * external libraries like Prototype, Datejs, and JQuery and setting this flag\n * to false forces closure to use its own implementations when possible.\n *\n * If your JavaScript can be loaded by a third party site and you are wary about\n * relying on non-standard implementations, specify\n * \"--define goog.TRUSTED_SITE=false\" to the compiler.\n */\ngoog.TRUSTED_SITE = goog.define('goog.TRUSTED_SITE', true);\n\n\n/**\n * @define {boolean} Whether code that calls {@link goog.setTestOnly} should\n * be disallowed in the compilation unit.\n */\ngoog.DISALLOW_TEST_ONLY_CODE =\n goog.define('goog.DISALLOW_TEST_ONLY_CODE', COMPILED && !goog.DEBUG);\n\n\n/**\n * @define {boolean} Whether to use a Chrome app CSP-compliant method for\n * loading scripts via goog.require. @see appendScriptSrcNode_.\n */\ngoog.ENABLE_CHROME_APP_SAFE_SCRIPT_LOADING =\n goog.define('goog.ENABLE_CHROME_APP_SAFE_SCRIPT_LOADING', false);\n\n\n/**\n * Defines a namespace in Closure.\n *\n * A namespace may only be defined once in a codebase. It may be defined using\n * goog.provide() or goog.module().\n *\n * The presence of one or more goog.provide() calls in a file indicates\n * that the file defines the given objects/namespaces.\n * Provided symbols must not be null or undefined.\n *\n * In addition, goog.provide() creates the object stubs for a namespace\n * (for example, goog.provide(\"goog.foo.bar\") will create the object\n * goog.foo.bar if it does not already exist).\n *\n * Build tools also scan for provide/require/module statements\n * to discern dependencies, build dependency files (see deps.js), etc.\n *\n * @see goog.require\n * @see goog.module\n * @param {string} name Namespace provided by this file in the form\n * \"goog.package.part\".\n * deprecated Use goog.module (see b/159289405)\n */\ngoog.provide = function(name) {\n if (goog.isInModuleLoader_()) {\n throw new Error('goog.provide cannot be used within a module.');\n }\n if (!COMPILED) {\n // Ensure that the same namespace isn't provided twice.\n // A goog.module/goog.provide maps a goog.require to a specific file\n if (goog.isProvided_(name)) {\n throw new Error('Namespace \"' + name + '\" already declared.');\n }\n }\n\n goog.constructNamespace_(name);\n};\n\n\n/**\n * @param {string} name Namespace provided by this file in the form\n * \"goog.package.part\".\n * @param {?Object=} object The object to embed in the namespace.\n * @param {boolean=} overwriteImplicit If object is set and a previous call\n * implicitly constructed the namespace given by name, this parameter\n * controls whether opt_obj should overwrite the implicitly constructed\n * namespace or be merged into it. Defaults to false.\n * @private\n */\ngoog.constructNamespace_ = function(name, object, overwriteImplicit) {\n if (!COMPILED) {\n delete goog.implicitNamespaces_[name];\n\n var namespace = name;\n while ((namespace = namespace.substring(0, namespace.lastIndexOf('.')))) {\n if (goog.getObjectByName(namespace)) {\n break;\n }\n goog.implicitNamespaces_[namespace] = true;\n }\n }\n\n goog.exportPath_(name, object, overwriteImplicit);\n};\n\n\n/**\n * According to the CSP3 spec a nonce must be a valid base64 string.\n * @see https://www.w3.org/TR/CSP3/#grammardef-base64-value\n * @private @const\n */\ngoog.NONCE_PATTERN_ = /^[\\w+/_-]+[=]{0,2}$/;\n\n\n/**\n * Returns CSP nonce, if set for any script tag.\n * @param {?Window=} opt_window The window context used to retrieve the nonce.\n * Defaults to global context.\n * @return {string} CSP nonce or empty string if no nonce is present.\n * @private\n */\ngoog.getScriptNonce_ = function(opt_window) {\n var doc = (opt_window || goog.global).document;\n var script = doc.querySelector && doc.querySelector('script[nonce]');\n if (script) {\n // Try to get the nonce from the IDL property first, because browsers that\n // implement additional nonce protection features (currently only Chrome) to\n // prevent nonce stealing via CSS do not expose the nonce via attributes.\n // See https://github.com/whatwg/html/issues/2369\n var nonce = script['nonce'] || script.getAttribute('nonce');\n if (nonce && goog.NONCE_PATTERN_.test(nonce)) {\n return nonce;\n }\n }\n return '';\n};\n\n\n/**\n * Module identifier validation regexp.\n * Note: This is a conservative check, it is very possible to be more lenient,\n * the primary exclusion here is \"/\" and \"\\\" and a leading \".\", these\n * restrictions are intended to leave the door open for using goog.require\n * with relative file paths rather than module identifiers.\n * @private\n */\ngoog.VALID_MODULE_RE_ = /^[a-zA-Z_$][a-zA-Z0-9._$]*$/;\n\n\n/**\n * Defines a module in Closure.\n *\n * Marks that this file must be loaded as a module and claims the namespace.\n *\n * A namespace may only be defined once in a codebase. It may be defined using\n * goog.provide() or goog.module().\n *\n * goog.module() has three requirements:\n * - goog.module may not be used in the same file as goog.provide.\n * - goog.module must be the first statement in the file.\n * - only one goog.module is allowed per file.\n *\n * When a goog.module annotated file is loaded, it is enclosed in\n * a strict function closure. This means that:\n * - any variables declared in a goog.module file are private to the file\n * (not global), though the compiler is expected to inline the module.\n * - The code must obey all the rules of \"strict\" JavaScript.\n * - the file will be marked as \"use strict\"\n *\n * NOTE: unlike goog.provide, goog.module does not declare any symbols by\n * itself. If declared symbols are desired, use\n * goog.module.declareLegacyNamespace().\n *\n *\n * See the public goog.module proposal: http://goo.gl/Va1hin\n *\n * @param {string} name Namespace provided by this file in the form\n * \"goog.package.part\", is expected but not required.\n * @return {void}\n */\ngoog.module = function(name) {\n if (typeof name !== 'string' || !name ||\n name.search(goog.VALID_MODULE_RE_) == -1) {\n throw new Error('Invalid module identifier');\n }\n if (!goog.isInGoogModuleLoader_()) {\n throw new Error(\n 'Module ' + name + ' has been loaded incorrectly. Note, ' +\n 'modules cannot be loaded as normal scripts. They require some kind of ' +\n 'pre-processing step. You\\'re likely trying to load a module via a ' +\n 'script tag or as a part of a concatenated bundle without rewriting the ' +\n 'module. For more info see: ' +\n 'https://github.com/google/closure-library/wiki/goog.module:-an-ES6-module-like-alternative-to-goog.provide.');\n }\n if (goog.moduleLoaderState_.moduleName) {\n throw new Error('goog.module may only be called once per module.');\n }\n\n // Store the module name for the loader.\n goog.moduleLoaderState_.moduleName = name;\n if (!COMPILED) {\n // Ensure that the same namespace isn't provided twice.\n // A goog.module/goog.provide maps a goog.require to a specific file\n if (goog.isProvided_(name)) {\n throw new Error('Namespace \"' + name + '\" already declared.');\n }\n delete goog.implicitNamespaces_[name];\n }\n};\n\n\n/**\n * @param {string} name The module identifier.\n * @return {?} The module exports for an already loaded module or null.\n *\n * Note: This is not an alternative to goog.require, it does not\n * indicate a hard dependency, instead it is used to indicate\n * an optional dependency or to access the exports of a module\n * that has already been loaded.\n * @suppress {missingProvide}\n */\ngoog.module.get = function(name) {\n return goog.module.getInternal_(name);\n};\n\n\n/**\n * @param {string} name The module identifier.\n * @return {?} The module exports for an already loaded module or null.\n * @private\n */\ngoog.module.getInternal_ = function(name) {\n if (!COMPILED) {\n if (name in goog.loadedModules_) {\n return goog.loadedModules_[name].exports;\n } else if (!goog.implicitNamespaces_[name]) {\n var ns = goog.getObjectByName(name);\n return ns != null ? ns : null;\n }\n }\n return null;\n};\n\n/**\n * Types of modules the debug loader can load.\n * @enum {string}\n */\ngoog.ModuleType = {\n ES6: 'es6',\n GOOG: 'goog'\n};\n\n\n/**\n * @private {?{\n * moduleName: (string|undefined),\n * declareLegacyNamespace:boolean,\n * type: ?goog.ModuleType\n * }}\n */\ngoog.moduleLoaderState_ = null;\n\n\n/**\n * @private\n * @return {boolean} Whether a goog.module or an es6 module is currently being\n * initialized.\n */\ngoog.isInModuleLoader_ = function() {\n return goog.isInGoogModuleLoader_() || goog.isInEs6ModuleLoader_();\n};\n\n\n/**\n * @private\n * @return {boolean} Whether a goog.module is currently being initialized.\n */\ngoog.isInGoogModuleLoader_ = function() {\n return !!goog.moduleLoaderState_ &&\n goog.moduleLoaderState_.type == goog.ModuleType.GOOG;\n};\n\n\n/**\n * @private\n * @return {boolean} Whether an es6 module is currently being initialized.\n */\ngoog.isInEs6ModuleLoader_ = function() {\n var inLoader = !!goog.moduleLoaderState_ &&\n goog.moduleLoaderState_.type == goog.ModuleType.ES6;\n\n if (inLoader) {\n return true;\n }\n\n var jscomp = goog.global['$jscomp'];\n\n if (jscomp) {\n // jscomp may not have getCurrentModulePath if this is a compiled bundle\n // that has some of the runtime, but not all of it. This can happen if\n // optimizations are turned on so the unused runtime is removed but renaming\n // and Closure pass are off (so $jscomp is still named $jscomp and the\n // goog.provide/require calls still exist).\n if (typeof jscomp.getCurrentModulePath != 'function') {\n return false;\n }\n\n // Bundled ES6 module.\n return !!jscomp.getCurrentModulePath();\n }\n\n return false;\n};\n\n\n/**\n * Provide the module's exports as a globally accessible object under the\n * module's declared name. This is intended to ease migration to goog.module\n * for files that have existing usages.\n * @suppress {missingProvide}\n */\ngoog.module.declareLegacyNamespace = function() {\n if (!COMPILED && !goog.isInGoogModuleLoader_()) {\n throw new Error(\n 'goog.module.declareLegacyNamespace must be called from ' +\n 'within a goog.module');\n }\n if (!COMPILED && !goog.moduleLoaderState_.moduleName) {\n throw new Error(\n 'goog.module must be called prior to ' +\n 'goog.module.declareLegacyNamespace.');\n }\n goog.moduleLoaderState_.declareLegacyNamespace = true;\n};\n\n\n/**\n * Associates an ES6 module with a Closure module ID so that is available via\n * goog.require. The associated ID acts like a goog.module ID - it does not\n * create any global names, it is merely available via goog.require /\n * goog.module.get / goog.forwardDeclare / goog.requireType. goog.require and\n * goog.module.get will return the entire module as if it was import *'d. This\n * allows Closure files to reference ES6 modules for the sake of migration.\n *\n * @param {string} namespace\n * @suppress {missingProvide}\n */\ngoog.declareModuleId = function(namespace) {\n if (!COMPILED) {\n if (!goog.isInEs6ModuleLoader_()) {\n throw new Error(\n 'goog.declareModuleId may only be called from ' +\n 'within an ES6 module');\n }\n if (goog.moduleLoaderState_ && goog.moduleLoaderState_.moduleName) {\n throw new Error(\n 'goog.declareModuleId may only be called once per module.');\n }\n if (namespace in goog.loadedModules_) {\n throw new Error(\n 'Module with namespace \"' + namespace + '\" already exists.');\n }\n }\n if (goog.moduleLoaderState_) {\n // Not bundled - debug loading.\n goog.moduleLoaderState_.moduleName = namespace;\n } else {\n // Bundled - not debug loading, no module loader state.\n var jscomp = goog.global['$jscomp'];\n if (!jscomp || typeof jscomp.getCurrentModulePath != 'function') {\n throw new Error(\n 'Module with namespace \"' + namespace +\n '\" has been loaded incorrectly.');\n }\n var exports = jscomp.require(jscomp.getCurrentModulePath());\n goog.loadedModules_[namespace] = {\n exports: exports,\n type: goog.ModuleType.ES6,\n moduleId: namespace\n };\n }\n};\n\n\n/**\n * Marks that the current file should only be used for testing, and never for\n * live code in production.\n *\n * In the case of unit tests, the message may optionally be an exact namespace\n * for the test (e.g. 'goog.stringTest'). The linter will then ignore the extra\n * provide (if not explicitly defined in the code).\n *\n * @param {string=} opt_message Optional message to add to the error that's\n * raised when used in production code.\n */\ngoog.setTestOnly = function(opt_message) {\n if (goog.DISALLOW_TEST_ONLY_CODE) {\n opt_message = opt_message || '';\n throw new Error(\n 'Importing test-only code into non-debug environment' +\n (opt_message ? ': ' + opt_message : '.'));\n }\n};\n\n\n/**\n * Forward declares a symbol. This is an indication to the compiler that the\n * symbol may be used in the source yet is not required and may not be provided\n * in compilation.\n *\n * The most common usage of forward declaration is code that takes a type as a\n * function parameter but does not need to require it. By forward declaring\n * instead of requiring, no hard dependency is made, and (if not required\n * elsewhere) the namespace may never be required and thus, not be pulled\n * into the JavaScript binary. If it is required elsewhere, it will be type\n * checked as normal.\n *\n * Before using goog.forwardDeclare, please read the documentation at\n * https://github.com/google/closure-compiler/wiki/Bad-Type-Annotation to\n * understand the options and tradeoffs when working with forward declarations.\n *\n * @param {string} name The namespace to forward declare in the form of\n * \"goog.package.part\".\n * @deprecated See go/noforwarddeclaration, Use `goog.requireType` instead.\n */\ngoog.forwardDeclare = function(name) {};\n\n\n/**\n * Forward declare type information. Used to assign types to goog.global\n * referenced object that would otherwise result in unknown type references\n * and thus block property disambiguation.\n */\ngoog.forwardDeclare('Document');\ngoog.forwardDeclare('HTMLScriptElement');\ngoog.forwardDeclare('XMLHttpRequest');\n\n\nif (!COMPILED) {\n /**\n * Check if the given name has been goog.provided. This will return false for\n * names that are available only as implicit namespaces.\n * @param {string} name name of the object to look for.\n * @return {boolean} Whether the name has been provided.\n * @private\n */\n goog.isProvided_ = function(name) {\n return (name in goog.loadedModules_) ||\n (!goog.implicitNamespaces_[name] && goog.getObjectByName(name) != null);\n };\n\n /**\n * Namespaces implicitly defined by goog.provide. For example,\n * goog.provide('goog.events.Event') implicitly declares that 'goog' and\n * 'goog.events' must be namespaces.\n *\n * @type {!Object}\n * @private\n */\n goog.implicitNamespaces_ = {'goog.module': true};\n\n // NOTE: We add goog.module as an implicit namespace as goog.module is defined\n // here and because the existing module package has not been moved yet out of\n // the goog.module namespace. This satisifies both the debug loader and\n // ahead-of-time dependency management.\n}\n\n\n/**\n * Returns an object based on its fully qualified external name. The object\n * is not found if null or undefined. If you are using a compilation pass that\n * renames property names beware that using this function will not find renamed\n * properties.\n *\n * @param {string} name The fully qualified name.\n * @param {Object=} opt_obj The object within which to look; default is\n * |goog.global|.\n * @return {?} The value (object or primitive) or, if not found, null.\n */\ngoog.getObjectByName = function(name, opt_obj) {\n var parts = name.split('.');\n var cur = opt_obj || goog.global;\n for (var i = 0; i < parts.length; i++) {\n cur = cur[parts[i]];\n if (cur == null) {\n return null;\n }\n }\n return cur;\n};\n\n\n/**\n * Adds a dependency from a file to the files it requires.\n * @param {string} relPath The path to the js file.\n * @param {!Array} provides An array of strings with\n * the names of the objects this file provides.\n * @param {!Array} requires An array of strings with\n * the names of the objects this file requires.\n * @param {boolean|!Object=} opt_loadFlags Parameters indicating\n * how the file must be loaded. The boolean 'true' is equivalent\n * to {'module': 'goog'} for backwards-compatibility. Valid properties\n * and values include {'module': 'goog'} and {'lang': 'es6'}.\n */\ngoog.addDependency = function(relPath, provides, requires, opt_loadFlags) {\n if (!COMPILED && goog.DEPENDENCIES_ENABLED) {\n goog.debugLoader_.addDependency(relPath, provides, requires, opt_loadFlags);\n }\n};\n\n\n// NOTE(nnaze): The debug DOM loader was included in base.js as an original way\n// to do \"debug-mode\" development. The dependency system can sometimes be\n// confusing, as can the debug DOM loader's asynchronous nature.\n//\n// With the DOM loader, a call to goog.require() is not blocking -- the script\n// will not load until some point after the current script. If a namespace is\n// needed at runtime, it needs to be defined in a previous script, or loaded via\n// require() with its registered dependencies.\n//\n// User-defined namespaces may need their own deps file. For a reference on\n// creating a deps file, see:\n// Externally: https://developers.google.com/closure/library/docs/depswriter\n//\n// Because of legacy clients, the DOM loader can't be easily removed from\n// base.js. Work was done to make it disableable or replaceable for\n// different environments (DOM-less JavaScript interpreters like Rhino or V8,\n// for example). See bootstrap/ for more information.\n\n\n/**\n * @define {boolean} Whether to enable the debug loader.\n *\n * If enabled, a call to goog.require() will attempt to load the namespace by\n * appending a script tag to the DOM (if the namespace has been registered).\n *\n * If disabled, goog.require() will simply assert that the namespace has been\n * provided (and depend on the fact that some outside tool correctly ordered\n * the script).\n */\ngoog.ENABLE_DEBUG_LOADER = goog.define('goog.ENABLE_DEBUG_LOADER', false);\n\n\n/**\n * @param {string} msg\n * @private\n */\ngoog.logToConsole_ = function(msg) {\n if (goog.global.console) {\n goog.global.console['error'](msg);\n }\n};\n\n\n/**\n * Implements a system for the dynamic resolution of dependencies that works in\n * parallel with the BUILD system.\n *\n * Note that all calls to goog.require will be stripped by the compiler.\n *\n * @see goog.provide\n * @param {string} namespace Namespace (as was given in goog.provide,\n * goog.module, or goog.declareModuleId) in the form\n * \"goog.package.part\".\n * @return {?} If called within a goog.module or ES6 module file, the associated\n * namespace or module otherwise null.\n */\ngoog.require = function(namespace) {\n if (!COMPILED) {\n // Might need to lazy load on old IE.\n if (goog.ENABLE_DEBUG_LOADER) {\n goog.debugLoader_.requested(namespace);\n }\n\n // If the object already exists we do not need to do anything.\n if (goog.isProvided_(namespace)) {\n if (goog.isInModuleLoader_()) {\n return goog.module.getInternal_(namespace);\n }\n } else if (goog.ENABLE_DEBUG_LOADER) {\n var moduleLoaderState = goog.moduleLoaderState_;\n goog.moduleLoaderState_ = null;\n try {\n goog.debugLoader_.load_(namespace);\n } finally {\n goog.moduleLoaderState_ = moduleLoaderState;\n }\n }\n\n return null;\n }\n};\n\n\n/**\n * Requires a symbol for its type information. This is an indication to the\n * compiler that the symbol may appear in type annotations, yet it is not\n * referenced at runtime.\n *\n * When called within a goog.module or ES6 module file, the return value may be\n * assigned to or destructured into a variable, but it may not be otherwise used\n * in code outside of a type annotation.\n *\n * Note that all calls to goog.requireType will be stripped by the compiler.\n *\n * @param {string} namespace Namespace (as was given in goog.provide,\n * goog.module, or goog.declareModuleId) in the form\n * \"goog.package.part\".\n * @return {?}\n */\ngoog.requireType = function(namespace) {\n // Return an empty object so that single-level destructuring of the return\n // value doesn't crash at runtime when using the debug loader. Multi-level\n // destructuring isn't supported.\n return {};\n};\n\n\n/**\n * Path for included scripts.\n * @type {string}\n */\ngoog.basePath = '';\n\n\n/**\n * A hook for overriding the base path.\n * @type {string|undefined}\n */\ngoog.global.CLOSURE_BASE_PATH;\n\n\n/**\n * Whether to attempt to load Closure's deps file. By default, when uncompiled,\n * deps files will attempt to be loaded.\n * @type {boolean|undefined}\n */\ngoog.global.CLOSURE_NO_DEPS;\n\n\n/**\n * A function to import a single script. This is meant to be overridden when\n * Closure is being run in non-HTML contexts, such as web workers. It's defined\n * in the global scope so that it can be set before base.js is loaded, which\n * allows deps.js to be imported properly.\n *\n * The first parameter the script source, which is a relative URI. The second,\n * optional parameter is the script contents, in the event the script needed\n * transformation. It should return true if the script was imported, false\n * otherwise.\n * @type {(function(string, string=): boolean)|undefined}\n */\ngoog.global.CLOSURE_IMPORT_SCRIPT;\n\n\n/**\n * When defining a class Foo with an abstract method bar(), you can do:\n * Foo.prototype.bar = goog.abstractMethod\n *\n * Now if a subclass of Foo fails to override bar(), an error will be thrown\n * when bar() is invoked.\n *\n * @type {!Function}\n * @throws {Error} when invoked to indicate the method should be overridden.\n * @deprecated Use \"@abstract\" annotation instead of goog.abstractMethod in new\n * code. See\n * https://github.com/google/closure-compiler/wiki/@abstract-classes-and-methods\n */\ngoog.abstractMethod = function() {\n throw new Error('unimplemented abstract method');\n};\n\n\n/**\n * Adds a `getInstance` static method that always returns the same\n * instance object.\n * @param {!Function} ctor The constructor for the class to add the static\n * method to.\n * @suppress {missingProperties} 'instance_' isn't a property on 'Function'\n * but we don't have a better type to use here.\n */\ngoog.addSingletonGetter = function(ctor) {\n // instance_ is immediately set to prevent issues with sealed constructors\n // such as are encountered when a constructor is returned as the export object\n // of a goog.module in unoptimized code.\n // Delcare type to avoid conformance violations that ctor.instance_ is unknown\n /** @type {undefined|!Object} @suppress {underscore} */\n ctor.instance_ = undefined;\n ctor.getInstance = function() {\n if (ctor.instance_) {\n return ctor.instance_;\n }\n if (goog.DEBUG) {\n // NOTE: JSCompiler can't optimize away Array#push.\n goog.instantiatedSingletons_[goog.instantiatedSingletons_.length] = ctor;\n }\n // Cast to avoid conformance violations that ctor.instance_ is unknown\n return /** @type {!Object|undefined} */ (ctor.instance_) = new ctor;\n };\n};\n\n\n/**\n * All singleton classes that have been instantiated, for testing. Don't read\n * it directly, use the `goog.testing.singleton` module. The compiler\n * removes this variable if unused.\n * @type {!Array}\n * @private\n */\ngoog.instantiatedSingletons_ = [];\n\n\n/**\n * @define {boolean} Whether to load goog.modules using `eval` when using\n * the debug loader. This provides a better debugging experience as the\n * source is unmodified and can be edited using Chrome Workspaces or similar.\n * However in some environments the use of `eval` is banned\n * so we provide an alternative.\n */\ngoog.LOAD_MODULE_USING_EVAL = goog.define('goog.LOAD_MODULE_USING_EVAL', true);\n\n\n/**\n * @define {boolean} Whether the exports of goog.modules should be sealed when\n * possible.\n */\ngoog.SEAL_MODULE_EXPORTS = goog.define('goog.SEAL_MODULE_EXPORTS', goog.DEBUG);\n\n\n/**\n * The registry of initialized modules:\n * The module identifier or path to module exports map.\n * @private @const {!Object}\n */\ngoog.loadedModules_ = {};\n\n\n/**\n * True if the debug loader enabled and used.\n * @const {boolean}\n */\ngoog.DEPENDENCIES_ENABLED = !COMPILED && goog.ENABLE_DEBUG_LOADER;\n\n\n/**\n * @define {string} How to decide whether to transpile. Valid values\n * are 'always', 'never', and 'detect'. The default ('detect') is to\n * use feature detection to determine which language levels need\n * transpilation.\n */\n// NOTE(sdh): we could expand this to accept a language level to bypass\n// detection: e.g. goog.TRANSPILE == 'es5' would transpile ES6 files but\n// would leave ES3 and ES5 files alone.\ngoog.TRANSPILE = goog.define('goog.TRANSPILE', 'detect');\n\n/**\n * @define {boolean} If true assume that ES modules have already been\n * transpiled by the jscompiler (in the same way that transpile.js would\n * transpile them - to jscomp modules). Useful only for servers that wish to use\n * the debug loader and transpile server side. Thus this is only respected if\n * goog.TRANSPILE is \"never\".\n */\ngoog.ASSUME_ES_MODULES_TRANSPILED =\n goog.define('goog.ASSUME_ES_MODULES_TRANSPILED', false);\n\n\n/**\n * @define {string} Trusted Types policy name. If non-empty then Closure will\n * use Trusted Types.\n */\ngoog.TRUSTED_TYPES_POLICY_NAME =\n goog.define('goog.TRUSTED_TYPES_POLICY_NAME', 'goog');\n\n\n/**\n * @package {?boolean}\n * Visible for testing.\n */\ngoog.hasBadLetScoping = null;\n\n\n/**\n * @param {function(?):?|string} moduleDef The module definition.\n */\ngoog.loadModule = function(moduleDef) {\n // NOTE: we allow function definitions to be either in the from\n // of a string to eval (which keeps the original source intact) or\n // in a eval forbidden environment (CSP) we allow a function definition\n // which in its body must call `goog.module`, and return the exports\n // of the module.\n var previousState = goog.moduleLoaderState_;\n try {\n goog.moduleLoaderState_ = {\n moduleName: '',\n declareLegacyNamespace: false,\n type: goog.ModuleType.GOOG\n };\n var origExports = {};\n var exports = origExports;\n if (typeof moduleDef === 'function') {\n exports = moduleDef.call(undefined, exports);\n } else if (typeof moduleDef === 'string') {\n exports = goog.loadModuleFromSource_.call(undefined, exports, moduleDef);\n } else {\n throw new Error('Invalid module definition');\n }\n\n var moduleName = goog.moduleLoaderState_.moduleName;\n if (typeof moduleName === 'string' && moduleName) {\n // Don't seal legacy namespaces as they may be used as a parent of\n // another namespace\n if (goog.moduleLoaderState_.declareLegacyNamespace) {\n // Whether exports was overwritten via default export assignment.\n // This is important for legacy namespaces as it dictates whether\n // previously a previously loaded implicit namespace should be clobbered\n // or not.\n var isDefaultExport = origExports !== exports;\n goog.constructNamespace_(moduleName, exports, isDefaultExport);\n } else if (\n goog.SEAL_MODULE_EXPORTS && Object.seal &&\n typeof exports == 'object' && exports != null) {\n Object.seal(exports);\n }\n\n var data = {\n exports: exports,\n type: goog.ModuleType.GOOG,\n moduleId: goog.moduleLoaderState_.moduleName\n };\n goog.loadedModules_[moduleName] = data;\n } else {\n throw new Error('Invalid module name \\\"' + moduleName + '\\\"');\n }\n } finally {\n goog.moduleLoaderState_ = previousState;\n }\n};\n\n\n/**\n * @private @const\n */\ngoog.loadModuleFromSource_ =\n /** @type {function(!Object, string):?} */ (function(exports) {\n // NOTE: we avoid declaring parameters or local variables here to avoid\n // masking globals or leaking values into the module definition.\n 'use strict';\n eval(goog.CLOSURE_EVAL_PREFILTER_.createScript(arguments[1]));\n return exports;\n });\n\n\n/**\n * Normalize a file path by removing redundant \"..\" and extraneous \".\" file\n * path components.\n * @param {string} path\n * @return {string}\n * @private\n */\ngoog.normalizePath_ = function(path) {\n var components = path.split('/');\n var i = 0;\n while (i < components.length) {\n if (components[i] == '.') {\n components.splice(i, 1);\n } else if (\n i && components[i] == '..' && components[i - 1] &&\n components[i - 1] != '..') {\n components.splice(--i, 2);\n } else {\n i++;\n }\n }\n return components.join('/');\n};\n\n\n/**\n * Provides a hook for loading a file when using Closure's goog.require() API\n * with goog.modules. In particular this hook is provided to support Node.js.\n *\n * @type {(function(string):string)|undefined}\n */\ngoog.global.CLOSURE_LOAD_FILE_SYNC;\n\n\n/**\n * Loads file by synchronous XHR. Should not be used in production environments.\n * @param {string} src Source URL.\n * @return {?string} File contents, or null if load failed.\n * @private\n */\ngoog.loadFileSync_ = function(src) {\n if (goog.global.CLOSURE_LOAD_FILE_SYNC) {\n return goog.global.CLOSURE_LOAD_FILE_SYNC(src);\n } else {\n try {\n /** @type {XMLHttpRequest} */\n var xhr = new goog.global['XMLHttpRequest']();\n xhr.open('get', src, false);\n xhr.send();\n // NOTE: Successful http: requests have a status of 200, but successful\n // file: requests may have a status of zero. Any other status, or a\n // thrown exception (particularly in case of file: requests) indicates\n // some sort of error, which we treat as a missing or unavailable file.\n return xhr.status == 0 || xhr.status == 200 ? xhr.responseText : null;\n } catch (err) {\n // No need to rethrow or log, since errors should show up on their own.\n return null;\n }\n }\n};\n\n//==============================================================================\n// Language Enhancements\n//==============================================================================\n\n\n/**\n * This is a \"fixed\" version of the typeof operator. It differs from the typeof\n * operator in such a way that null returns 'null' and arrays return 'array'.\n * @param {?} value The value to get the type of.\n * @return {string} The name of the type.\n */\ngoog.typeOf = function(value) {\n var s = typeof value;\n\n if (s != 'object') {\n return s;\n }\n\n if (!value) {\n return 'null';\n }\n\n if (Array.isArray(value)) {\n return 'array';\n }\n return s;\n};\n\n\n/**\n * Returns true if the object looks like an array. To qualify as array like\n * the value needs to be either a NodeList or an object with a Number length\n * property. Note that for this function neither strings nor functions are\n * considered \"array-like\".\n *\n * @param {?} val Variable to test.\n * @return {boolean} Whether variable is an array.\n */\ngoog.isArrayLike = function(val) {\n var type = goog.typeOf(val);\n // We do not use goog.isObject here in order to exclude function values.\n return type == 'array' || type == 'object' && typeof val.length == 'number';\n};\n\n\n/**\n * Returns true if the object looks like a Date. To qualify as Date-like the\n * value needs to be an object and have a getFullYear() function.\n * @param {?} val Variable to test.\n * @return {boolean} Whether variable is a like a Date.\n */\ngoog.isDateLike = function(val) {\n return goog.isObject(val) && typeof val.getFullYear == 'function';\n};\n\n\n/**\n * Returns true if the specified value is an object. This includes arrays and\n * functions.\n * @param {?} val Variable to test.\n * @return {boolean} Whether variable is an object.\n */\ngoog.isObject = function(val) {\n var type = typeof val;\n return type == 'object' && val != null || type == 'function';\n // return Object(val) === val also works, but is slower, especially if val is\n // not an object.\n};\n\n\n/**\n * Gets a unique ID for an object. This mutates the object so that further calls\n * with the same object as a parameter returns the same value. The unique ID is\n * guaranteed to be unique across the current session amongst objects that are\n * passed into `getUid`. There is no guarantee that the ID is unique or\n * consistent across sessions. It is unsafe to generate unique ID for function\n * prototypes.\n *\n * @param {Object} obj The object to get the unique ID for.\n * @return {number} The unique ID for the object.\n */\ngoog.getUid = function(obj) {\n // TODO(arv): Make the type stricter, do not accept null.\n return Object.prototype.hasOwnProperty.call(obj, goog.UID_PROPERTY_) &&\n obj[goog.UID_PROPERTY_] ||\n (obj[goog.UID_PROPERTY_] = ++goog.uidCounter_);\n};\n\n\n/**\n * Whether the given object is already assigned a unique ID.\n *\n * This does not modify the object.\n *\n * @param {!Object} obj The object to check.\n * @return {boolean} Whether there is an assigned unique id for the object.\n */\ngoog.hasUid = function(obj) {\n return !!obj[goog.UID_PROPERTY_];\n};\n\n\n/**\n * Removes the unique ID from an object. This is useful if the object was\n * previously mutated using `goog.getUid` in which case the mutation is\n * undone.\n * @param {Object} obj The object to remove the unique ID field from.\n */\ngoog.removeUid = function(obj) {\n // TODO(arv): Make the type stricter, do not accept null.\n\n // In IE, DOM nodes are not instances of Object and throw an exception if we\n // try to delete. Instead we try to use removeAttribute.\n if (obj !== null && 'removeAttribute' in obj) {\n obj.removeAttribute(goog.UID_PROPERTY_);\n }\n\n try {\n delete obj[goog.UID_PROPERTY_];\n } catch (ex) {\n }\n};\n\n\n/**\n * Name for unique ID property. Initialized in a way to help avoid collisions\n * with other closure JavaScript on the same page.\n * @type {string}\n * @private\n */\ngoog.UID_PROPERTY_ = 'closure_uid_' + ((Math.random() * 1e9) >>> 0);\n\n\n/**\n * Counter for UID.\n * @type {number}\n * @private\n */\ngoog.uidCounter_ = 0;\n\n\n/**\n * Clones a value. The input may be an Object, Array, or basic type. Objects and\n * arrays will be cloned recursively.\n *\n * WARNINGS:\n * goog.cloneObject does not detect reference loops. Objects that\n * refer to themselves will cause infinite recursion.\n *\n * goog.cloneObject is unaware of unique identifiers, and copies\n * UIDs created by getUid into cloned results.\n *\n * @param {*} obj The value to clone.\n * @return {*} A clone of the input value.\n * @deprecated goog.cloneObject is unsafe. Prefer the goog.object methods.\n */\ngoog.cloneObject = function(obj) {\n var type = goog.typeOf(obj);\n if (type == 'object' || type == 'array') {\n if (typeof obj.clone === 'function') {\n return obj.clone();\n }\n if (typeof Map !== 'undefined' && obj instanceof Map) {\n return new Map(obj);\n } else if (typeof Set !== 'undefined' && obj instanceof Set) {\n return new Set(obj);\n }\n var clone = type == 'array' ? [] : {};\n for (var key in obj) {\n clone[key] = goog.cloneObject(obj[key]);\n }\n return clone;\n }\n\n return obj;\n};\n\n\n/**\n * A native implementation of goog.bind.\n * @param {?function(this:T, ...)} fn A function to partially apply.\n * @param {T} selfObj Specifies the object which this should point to when the\n * function is run.\n * @param {...*} var_args Additional arguments that are partially applied to the\n * function.\n * @return {!Function} A partially-applied form of the function goog.bind() was\n * invoked as a method of.\n * @template T\n * @private\n */\ngoog.bindNative_ = function(fn, selfObj, var_args) {\n return /** @type {!Function} */ (fn.call.apply(fn.bind, arguments));\n};\n\n\n/**\n * A pure-JS implementation of goog.bind.\n * @param {?function(this:T, ...)} fn A function to partially apply.\n * @param {T} selfObj Specifies the object which this should point to when the\n * function is run.\n * @param {...*} var_args Additional arguments that are partially applied to the\n * function.\n * @return {!Function} A partially-applied form of the function goog.bind() was\n * invoked as a method of.\n * @template T\n * @private\n */\ngoog.bindJs_ = function(fn, selfObj, var_args) {\n if (!fn) {\n throw new Error();\n }\n\n if (arguments.length > 2) {\n var boundArgs = Array.prototype.slice.call(arguments, 2);\n return function() {\n // Prepend the bound arguments to the current arguments.\n var newArgs = Array.prototype.slice.call(arguments);\n Array.prototype.unshift.apply(newArgs, boundArgs);\n return fn.apply(selfObj, newArgs);\n };\n\n } else {\n return function() {\n return fn.apply(selfObj, arguments);\n };\n }\n};\n\n\n/**\n * Partially applies this function to a particular 'this object' and zero or\n * more arguments. The result is a new function with some arguments of the first\n * function pre-filled and the value of this 'pre-specified'.\n *\n * Remaining arguments specified at call-time are appended to the pre-specified\n * ones.\n *\n * Also see: {@link #partial}.\n *\n * Usage:\n *
var barMethBound = goog.bind(myFunction, myObj, 'arg1', 'arg2');\n * barMethBound('arg3', 'arg4');
\n *\n * @param {?function(this:T, ...)} fn A function to partially apply.\n * @param {T} selfObj Specifies the object which this should point to when the\n * function is run.\n * @param {...*} var_args Additional arguments that are partially applied to the\n * function.\n * @return {!Function} A partially-applied form of the function goog.bind() was\n * invoked as a method of.\n * @template T\n * @suppress {deprecated} See above.\n * @deprecated use `=> {}` or Function.prototype.bind instead.\n */\ngoog.bind = function(fn, selfObj, var_args) {\n // TODO(nicksantos): narrow the type signature.\n if (Function.prototype.bind &&\n // NOTE(nicksantos): Somebody pulled base.js into the default Chrome\n // extension environment. This means that for Chrome extensions, they get\n // the implementation of Function.prototype.bind that calls goog.bind\n // instead of the native one. Even worse, we don't want to introduce a\n // circular dependency between goog.bind and Function.prototype.bind, so\n // we have to hack this to make sure it works correctly.\n Function.prototype.bind.toString().indexOf('native code') != -1) {\n goog.bind = goog.bindNative_;\n } else {\n goog.bind = goog.bindJs_;\n }\n return goog.bind.apply(null, arguments);\n};\n\n\n/**\n * Like goog.bind(), except that a 'this object' is not required. Useful when\n * the target function is already bound.\n *\n * Usage:\n * var g = goog.partial(f, arg1, arg2);\n * g(arg3, arg4);\n *\n * @param {Function} fn A function to partially apply.\n * @param {...*} var_args Additional arguments that are partially applied to fn.\n * @return {!Function} A partially-applied form of the function goog.partial()\n * was invoked as a method of.\n */\ngoog.partial = function(fn, var_args) {\n var args = Array.prototype.slice.call(arguments, 1);\n return function() {\n // Clone the array (with slice()) and append additional arguments\n // to the existing arguments.\n var newArgs = args.slice();\n newArgs.push.apply(newArgs, arguments);\n return fn.apply(/** @type {?} */ (this), newArgs);\n };\n};\n\n\n/**\n * @return {number} An integer value representing the number of milliseconds\n * between midnight, January 1, 1970 and the current time.\n * @deprecated Use Date.now\n */\ngoog.now = function() {\n return Date.now();\n};\n\n\n/**\n * Evals JavaScript in the global scope.\n *\n * Throws an exception if neither execScript or eval is defined.\n * @param {string|!TrustedScript} script JavaScript string.\n */\ngoog.globalEval = function(script) {\n (0, eval)(script);\n};\n\n\n/**\n * Optional map of CSS class names to obfuscated names used with\n * goog.getCssName().\n * @private {!Object|undefined}\n * @see goog.setCssNameMapping\n */\ngoog.cssNameMapping_;\n\n\n/**\n * Optional obfuscation style for CSS class names. Should be set to either\n * 'BY_WHOLE' or 'BY_PART' if defined.\n * @type {string|undefined}\n * @private\n * @see goog.setCssNameMapping\n */\ngoog.cssNameMappingStyle_;\n\n\n\n/**\n * A hook for modifying the default behavior goog.getCssName. The function\n * if present, will receive the standard output of the goog.getCssName as\n * its input.\n *\n * @type {(function(string):string)|undefined}\n */\ngoog.global.CLOSURE_CSS_NAME_MAP_FN;\n\n\n/**\n * Handles strings that are intended to be used as CSS class names.\n *\n * This function works in tandem with @see goog.setCssNameMapping.\n *\n * Without any mapping set, the arguments are simple joined with a hyphen and\n * passed through unaltered.\n *\n * When there is a mapping, there are two possible styles in which these\n * mappings are used. In the BY_PART style, each part (i.e. in between hyphens)\n * of the passed in css name is rewritten according to the map. In the BY_WHOLE\n * style, the full css name is looked up in the map directly. If a rewrite is\n * not specified by the map, the compiler will output a warning.\n *\n * When the mapping is passed to the compiler, it will replace calls to\n * goog.getCssName with the strings from the mapping, e.g.\n * var x = goog.getCssName('foo');\n * var y = goog.getCssName(this.baseClass, 'active');\n * becomes:\n * var x = 'foo';\n * var y = this.baseClass + '-active';\n *\n * If one argument is passed it will be processed, if two are passed only the\n * modifier will be processed, as it is assumed the first argument was generated\n * as a result of calling goog.getCssName.\n *\n * @param {string} className The class name.\n * @param {string=} opt_modifier A modifier to be appended to the class name.\n * @return {string} The class name or the concatenation of the class name and\n * the modifier.\n */\ngoog.getCssName = function(className, opt_modifier) {\n // String() is used for compatibility with compiled soy where the passed\n // className can be non-string objects.\n if (String(className).charAt(0) == '.') {\n throw new Error(\n 'className passed in goog.getCssName must not start with \".\".' +\n ' You passed: ' + className);\n }\n\n var getMapping = function(cssName) {\n return goog.cssNameMapping_[cssName] || cssName;\n };\n\n var renameByParts = function(cssName) {\n // Remap all the parts individually.\n var parts = cssName.split('-');\n var mapped = [];\n for (var i = 0; i < parts.length; i++) {\n mapped.push(getMapping(parts[i]));\n }\n return mapped.join('-');\n };\n\n var rename;\n if (goog.cssNameMapping_) {\n rename =\n goog.cssNameMappingStyle_ == 'BY_WHOLE' ? getMapping : renameByParts;\n } else {\n rename = function(a) {\n return a;\n };\n }\n\n var result =\n opt_modifier ? className + '-' + rename(opt_modifier) : rename(className);\n\n // The special CLOSURE_CSS_NAME_MAP_FN allows users to specify further\n // processing of the class name.\n if (goog.global.CLOSURE_CSS_NAME_MAP_FN) {\n return goog.global.CLOSURE_CSS_NAME_MAP_FN(result);\n }\n\n return result;\n};\n\n\n/**\n * Sets the map to check when returning a value from goog.getCssName(). Example:\n *
\n * goog.setCssNameMapping({\n *   \"goog\": \"a\",\n *   \"disabled\": \"b\",\n * });\n *\n * var x = goog.getCssName('goog');\n * // The following evaluates to: \"a a-b\".\n * goog.getCssName('goog') + ' ' + goog.getCssName(x, 'disabled')\n * 
\n * When declared as a map of string literals to string literals, the JSCompiler\n * will replace all calls to goog.getCssName() using the supplied map if the\n * --process_closure_primitives flag is set.\n *\n * @param {!Object} mapping A map of strings to strings where keys are possible\n * arguments to goog.getCssName() and values are the corresponding values\n * that should be returned.\n * @param {string=} opt_style The style of css name mapping. There are two valid\n * options: 'BY_PART', and 'BY_WHOLE'.\n * @see goog.getCssName for a description.\n */\ngoog.setCssNameMapping = function(mapping, opt_style) {\n goog.cssNameMapping_ = mapping;\n goog.cssNameMappingStyle_ = opt_style;\n};\n\n\n/**\n * To use CSS renaming in compiled mode, one of the input files should have a\n * call to goog.setCssNameMapping() with an object literal that the JSCompiler\n * can extract and use to replace all calls to goog.getCssName(). In uncompiled\n * mode, JavaScript code should be loaded before this base.js file that declares\n * a global variable, CLOSURE_CSS_NAME_MAPPING, which is used below. This is\n * to ensure that the mapping is loaded before any calls to goog.getCssName()\n * are made in uncompiled mode.\n *\n * A hook for overriding the CSS name mapping.\n * @type {!Object|undefined}\n */\ngoog.global.CLOSURE_CSS_NAME_MAPPING;\n\n\nif (!COMPILED && goog.global.CLOSURE_CSS_NAME_MAPPING) {\n // This does not call goog.setCssNameMapping() because the JSCompiler\n // requires that goog.setCssNameMapping() be called with an object literal.\n goog.cssNameMapping_ = goog.global.CLOSURE_CSS_NAME_MAPPING;\n}\n\n/**\n * Options bag type for `goog.getMsg()` third argument.\n *\n * It is important to note that these options need to be known at compile time,\n * so they must always be provided to `goog.getMsg()` as an actual object\n * literal in the function call. Otherwise, closure-compiler will report an\n * error.\n * @record\n */\ngoog.GetMsgOptions = function() {};\n\n/**\n * If `true`, escape '<' in the message string to '<'.\n *\n * Used by Closure Templates where the generated code size and performance is\n * critical which is why {@link goog.html.SafeHtmlFormatter} is not used.\n * The value must be literal `true` or `false`.\n * @type {boolean|undefined}\n */\ngoog.GetMsgOptions.prototype.html;\n\n/**\n * If `true`, unescape common html entities: >, <, ', " and\n * &.\n *\n * Used for messages not in HTML context, such as with the `textContent`\n * property.\n * The value must be literal `true` or `false`.\n * @type {boolean|undefined}\n */\ngoog.GetMsgOptions.prototype.unescapeHtmlEntities;\n\n/**\n * Associates placeholder names with strings showing how their values are\n * obtained.\n *\n * This field is intended for use in automatically generated JS code.\n * Human-written code should use meaningful placeholder names instead.\n *\n * closure-compiler uses this as the contents of the `` tag in the\n * XMB file it generates or defaults to `-` for historical reasons.\n *\n * Must be an object literal.\n * Ignored at runtime.\n * Keys are placeholder names.\n * Values are string literals indicating how the value is obtained.\n * Typically this is a snippet of source code.\n * @type {!Object|undefined}\n */\ngoog.GetMsgOptions.prototype.original_code;\n\n/**\n * Associates placeholder names with example values.\n *\n * closure-compiler uses this as the contents of the `` tag in the\n * XMB file it generates or defaults to `-` for historical reasons.\n *\n * Must be an object literal.\n * Ignored at runtime.\n * Keys are placeholder names.\n * Values are string literals containing example placeholder values.\n * (e.g. \"George McFly\" for a name placeholder)\n * @type {!Object|undefined}\n */\ngoog.GetMsgOptions.prototype.example;\n\n/**\n * Gets a localized message.\n *\n * This function is a compiler primitive. If you give the compiler a localized\n * message bundle, it will replace the string at compile-time with a localized\n * version, and expand goog.getMsg call to a concatenated string.\n *\n * Messages must be initialized in the form:\n * \n * var MSG_NAME = goog.getMsg('Hello {$placeholder}', {'placeholder': 'world'});\n * \n *\n * This function produces a string which should be treated as plain text. Use\n * {@link goog.html.SafeHtmlFormatter} in conjunction with goog.getMsg to\n * produce SafeHtml.\n *\n * @param {string} str Translatable string, places holders in the form {$foo}.\n * @param {!Object=} opt_values Maps place holder name to value.\n * @param {!goog.GetMsgOptions=} opt_options see `goog.GetMsgOptions`\n * @return {string} message with placeholders filled.\n */\ngoog.getMsg = function(str, opt_values, opt_options) {\n if (opt_options && opt_options.html) {\n // Note that '&' is not replaced because the translation can contain HTML\n // entities.\n str = str.replace(/')\n .replace(/'/g, '\\'')\n .replace(/"/g, '\"')\n .replace(/&/g, '&');\n }\n if (opt_values) {\n str = str.replace(/\\{\\$([^}]+)}/g, function(match, key) {\n return (opt_values != null && key in opt_values) ? opt_values[key] :\n match;\n });\n }\n return str;\n};\n\n\n/**\n * Gets a localized message. If the message does not have a translation, gives a\n * fallback message.\n *\n * This is useful when introducing a new message that has not yet been\n * translated into all languages.\n *\n * This function is a compiler primitive. Must be used in the form:\n * var x = goog.getMsgWithFallback(MSG_A, MSG_B);\n * where MSG_A and MSG_B were initialized with goog.getMsg.\n *\n * @param {string} a The preferred message.\n * @param {string} b The fallback message.\n * @return {string} The best translated message.\n */\ngoog.getMsgWithFallback = function(a, b) {\n return a;\n};\n\n\n/**\n * Exposes an unobfuscated global namespace path for the given object.\n * Note that fields of the exported object *will* be obfuscated, unless they are\n * exported in turn via this function or goog.exportProperty.\n *\n * Also handy for making public items that are defined in anonymous closures.\n *\n * ex. goog.exportSymbol('public.path.Foo', Foo);\n *\n * ex. goog.exportSymbol('public.path.Foo.staticFunction', Foo.staticFunction);\n * public.path.Foo.staticFunction();\n *\n * ex. goog.exportSymbol('public.path.Foo.prototype.myMethod',\n * Foo.prototype.myMethod);\n * new public.path.Foo().myMethod();\n *\n * @param {string} publicPath Unobfuscated name to export.\n * @param {*} object Object the name should point to.\n * @param {?Object=} objectToExportTo The object to add the path to; default\n * is goog.global.\n */\ngoog.exportSymbol = function(publicPath, object, objectToExportTo) {\n goog.exportPath_(\n publicPath, object, /* overwriteImplicit= */ true, objectToExportTo);\n};\n\n\n/**\n * Exports a property unobfuscated into the object's namespace.\n * ex. goog.exportProperty(Foo, 'staticFunction', Foo.staticFunction);\n * ex. goog.exportProperty(Foo.prototype, 'myMethod', Foo.prototype.myMethod);\n * @param {Object} object Object whose static property is being exported.\n * @param {string} publicName Unobfuscated name to export.\n * @param {*} symbol Object the name should point to.\n */\ngoog.exportProperty = function(object, publicName, symbol) {\n object[publicName] = symbol;\n};\n\n\n/**\n * Inherit the prototype methods from one constructor into another.\n *\n * Usage:\n *
\n * function ParentClass(a, b) { }\n * ParentClass.prototype.foo = function(a) { };\n *\n * function ChildClass(a, b, c) {\n *   ChildClass.base(this, 'constructor', a, b);\n * }\n * goog.inherits(ChildClass, ParentClass);\n *\n * var child = new ChildClass('a', 'b', 'see');\n * child.foo(); // This works.\n * 
\n *\n * @param {!Function} childCtor Child class.\n * @param {!Function} parentCtor Parent class.\n * @suppress {strictMissingProperties} superClass_ and base is not defined on\n * Function.\n * @deprecated Use ECMAScript class syntax instead.\n */\ngoog.inherits = function(childCtor, parentCtor) {\n /** @constructor */\n function tempCtor() {}\n tempCtor.prototype = parentCtor.prototype;\n childCtor.superClass_ = parentCtor.prototype;\n childCtor.prototype = new tempCtor();\n /** @override */\n childCtor.prototype.constructor = childCtor;\n\n /**\n * Calls superclass constructor/method.\n *\n * This function is only available if you use goog.inherits to\n * express inheritance relationships between classes.\n *\n * NOTE: This is a replacement for goog.base and for superClass_\n * property defined in childCtor.\n *\n * @param {!Object} me Should always be \"this\".\n * @param {string} methodName The method name to call. Calling\n * superclass constructor can be done with the special string\n * 'constructor'.\n * @param {...*} var_args The arguments to pass to superclass\n * method/constructor.\n * @return {*} The return value of the superclass method/constructor.\n */\n childCtor.base = function(me, methodName, var_args) {\n // Copying using loop to avoid deop due to passing arguments object to\n // function. This is faster in many JS engines as of late 2014.\n var args = new Array(arguments.length - 2);\n for (var i = 2; i < arguments.length; i++) {\n args[i - 2] = arguments[i];\n }\n return parentCtor.prototype[methodName].apply(me, args);\n };\n};\n\n\n/**\n * Allow for aliasing within scope functions. This function exists for\n * uncompiled code - in compiled code the calls will be inlined and the aliases\n * applied. In uncompiled code the function is simply run since the aliases as\n * written are valid JavaScript.\n *\n *\n * @param {function()} fn Function to call. This function can contain aliases\n * to namespaces (e.g. \"var dom = goog.dom\") or classes\n * (e.g. \"var Timer = goog.Timer\").\n * @deprecated Use goog.module instead.\n */\ngoog.scope = function(fn) {\n if (goog.isInModuleLoader_()) {\n throw new Error('goog.scope is not supported within a module.');\n }\n fn.call(goog.global);\n};\n\n\n/*\n * To support uncompiled, strict mode bundles that use eval to divide source\n * like so:\n * eval('someSource;//# sourceUrl sourcefile.js');\n * We need to export the globally defined symbols \"goog\" and \"COMPILED\".\n * Exporting \"goog\" breaks the compiler optimizations, so we required that\n * be defined externally.\n * NOTE: We don't use goog.exportSymbol here because we don't want to trigger\n * extern generation when that compiler option is enabled.\n */\nif (!COMPILED) {\n goog.global['COMPILED'] = COMPILED;\n}\n\n\n//==============================================================================\n// goog.defineClass implementation\n//==============================================================================\n\n\n/**\n * Creates a restricted form of a Closure \"class\":\n * - from the compiler's perspective, the instance returned from the\n * constructor is sealed (no new properties may be added). This enables\n * better checks.\n * - the compiler will rewrite this definition to a form that is optimal\n * for type checking and optimization (initially this will be a more\n * traditional form).\n *\n * @param {Function} superClass The superclass, Object or null.\n * @param {goog.defineClass.ClassDescriptor} def\n * An object literal describing\n * the class. It may have the following properties:\n * \"constructor\": the constructor function\n * \"statics\": an object literal containing methods to add to the constructor\n * as \"static\" methods or a function that will receive the constructor\n * function as its only parameter to which static properties can\n * be added.\n * all other properties are added to the prototype.\n * @return {!Function} The class constructor.\n * @deprecated Use ECMAScript class syntax instead.\n */\ngoog.defineClass = function(superClass, def) {\n // TODO(johnlenz): consider making the superClass an optional parameter.\n var constructor = def.constructor;\n var statics = def.statics;\n // Wrap the constructor prior to setting up the prototype and static methods.\n if (!constructor || constructor == Object.prototype.constructor) {\n constructor = function() {\n throw new Error(\n 'cannot instantiate an interface (no constructor defined).');\n };\n }\n\n var cls = goog.defineClass.createSealingConstructor_(constructor, superClass);\n if (superClass) {\n goog.inherits(cls, superClass);\n }\n\n // Remove all the properties that should not be copied to the prototype.\n delete def.constructor;\n delete def.statics;\n\n goog.defineClass.applyProperties_(cls.prototype, def);\n if (statics != null) {\n if (statics instanceof Function) {\n statics(cls);\n } else {\n goog.defineClass.applyProperties_(cls, statics);\n }\n }\n\n return cls;\n};\n\n\n/**\n * @typedef {{\n * constructor: (!Function|undefined),\n * statics: (Object|undefined|function(Function):void)\n * }}\n */\ngoog.defineClass.ClassDescriptor;\n\n\n/**\n * @define {boolean} Whether the instances returned by goog.defineClass should\n * be sealed when possible.\n *\n * When sealing is disabled the constructor function will not be wrapped by\n * goog.defineClass, making it incompatible with ES6 class methods.\n */\ngoog.defineClass.SEAL_CLASS_INSTANCES =\n goog.define('goog.defineClass.SEAL_CLASS_INSTANCES', goog.DEBUG);\n\n\n/**\n * If goog.defineClass.SEAL_CLASS_INSTANCES is enabled and Object.seal is\n * defined, this function will wrap the constructor in a function that seals the\n * results of the provided constructor function.\n *\n * @param {!Function} ctr The constructor whose results maybe be sealed.\n * @param {Function} superClass The superclass constructor.\n * @return {!Function} The replacement constructor.\n * @private\n */\ngoog.defineClass.createSealingConstructor_ = function(ctr, superClass) {\n if (!goog.defineClass.SEAL_CLASS_INSTANCES) {\n // Do now wrap the constructor when sealing is disabled. Angular code\n // depends on this for injection to work properly.\n return ctr;\n }\n\n // NOTE: The sealing behavior has been removed\n\n /**\n * @this {Object}\n * @return {?}\n */\n var wrappedCtr = function() {\n // Don't seal an instance of a subclass when it calls the constructor of\n // its super class as there is most likely still setup to do.\n var instance = ctr.apply(this, arguments) || this;\n instance[goog.UID_PROPERTY_] = instance[goog.UID_PROPERTY_];\n\n return instance;\n };\n\n return wrappedCtr;\n};\n\n\n\n// TODO(johnlenz): share these values with the goog.object\n/**\n * The names of the fields that are defined on Object.prototype.\n * @type {!Array}\n * @private\n * @const\n */\ngoog.defineClass.OBJECT_PROTOTYPE_FIELDS_ = [\n 'constructor', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable',\n 'toLocaleString', 'toString', 'valueOf'\n];\n\n\n// TODO(johnlenz): share this function with the goog.object\n/**\n * @param {!Object} target The object to add properties to.\n * @param {!Object} source The object to copy properties from.\n * @private\n */\ngoog.defineClass.applyProperties_ = function(target, source) {\n // TODO(johnlenz): update this to support ES5 getters/setters\n\n var key;\n for (key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n target[key] = source[key];\n }\n }\n\n // For IE the for-in-loop does not contain any properties that are not\n // enumerable on the prototype object (for example isPrototypeOf from\n // Object.prototype) and it will also not include 'replace' on objects that\n // extend String and change 'replace' (not that it is common for anyone to\n // extend anything except Object).\n for (var i = 0; i < goog.defineClass.OBJECT_PROTOTYPE_FIELDS_.length; i++) {\n key = goog.defineClass.OBJECT_PROTOTYPE_FIELDS_[i];\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n target[key] = source[key];\n }\n }\n};\n\n/**\n * Returns the parameter.\n * @param {string} s\n * @return {string}\n * @private\n */\ngoog.identity_ = function(s) {\n return s;\n};\n\n\n/**\n * Creates Trusted Types policy if Trusted Types are supported by the browser.\n * The policy just blesses any string as a Trusted Type. It is not visibility\n * restricted because anyone can also call trustedTypes.createPolicy directly.\n * However, the allowed names should be restricted by a HTTP header and the\n * reference to the created policy should be visibility restricted.\n * @param {string} name\n * @return {?TrustedTypePolicy}\n */\ngoog.createTrustedTypesPolicy = function(name) {\n var policy = null;\n var policyFactory = goog.global.trustedTypes;\n if (!policyFactory || !policyFactory.createPolicy) {\n return policy;\n }\n // trustedTypes.createPolicy throws if called with a name that is already\n // registered, even in report-only mode. Until the API changes, catch the\n // error not to break the applications functionally. In such case, the code\n // will fall back to using regular Safe Types.\n // TODO(koto): Remove catching once createPolicy API stops throwing.\n try {\n policy = policyFactory.createPolicy(name, {\n createHTML: goog.identity_,\n createScript: goog.identity_,\n createScriptURL: goog.identity_\n });\n } catch (e) {\n goog.logToConsole_(e.message);\n }\n return policy;\n};\n\n// There's a bug in the compiler where without collapse properties the\n// Closure namespace defines do not guard code correctly. To help reduce code\n// size also check for !COMPILED even though it redundant until this is fixed.\nif (!COMPILED && goog.DEPENDENCIES_ENABLED) {\n\n\n /**\n * Tries to detect whether the current browser is Edge, based on the user\n * agent. This matches only pre-Chromium Edge.\n * @see https://docs.microsoft.com/en-us/microsoft-edge/web-platform/user-agent-string\n * @return {boolean} True if the current browser is Edge.\n * @private\n */\n goog.isEdge_ = function() {\n var userAgent = goog.global.navigator && goog.global.navigator.userAgent ?\n goog.global.navigator.userAgent :\n '';\n var edgeRe = /Edge\\/(\\d+)(\\.\\d)*/i;\n return !!userAgent.match(edgeRe);\n };\n\n\n /**\n * Tries to detect whether is in the context of an HTML document.\n * @return {boolean} True if it looks like HTML document.\n * @private\n */\n goog.inHtmlDocument_ = function() {\n /** @type {!Document} */\n var doc = goog.global.document;\n return doc != null && 'write' in doc; // XULDocument misses write.\n };\n\n\n /**\n * We'd like to check for if the document readyState is 'loading'; however\n * there are bugs on IE 10 and below where the readyState being anything other\n * than 'complete' is not reliable.\n * @return {boolean}\n * @private\n */\n goog.isDocumentLoading_ = function() {\n // attachEvent is available on IE 6 thru 10 only, and thus can be used to\n // detect those browsers.\n /** @type {!HTMLDocument} */\n var doc = goog.global.document;\n return doc.attachEvent ? doc.readyState != 'complete' :\n doc.readyState == 'loading';\n };\n\n\n /**\n * Tries to detect the base path of base.js script that bootstraps Closure.\n * @private\n */\n goog.findBasePath_ = function() {\n if (goog.global.CLOSURE_BASE_PATH != undefined &&\n // Anti DOM-clobbering runtime check (b/37736576).\n typeof goog.global.CLOSURE_BASE_PATH === 'string') {\n goog.basePath = goog.global.CLOSURE_BASE_PATH;\n return;\n } else if (!goog.inHtmlDocument_()) {\n return;\n }\n /** @type {!Document} */\n var doc = goog.global.document;\n // If we have a currentScript available, use it exclusively.\n var currentScript = doc.currentScript;\n if (currentScript) {\n var scripts = [currentScript];\n } else {\n var scripts = doc.getElementsByTagName('SCRIPT');\n }\n // Search backwards since the current script is in almost all cases the one\n // that has base.js.\n for (var i = scripts.length - 1; i >= 0; --i) {\n var script = /** @type {!HTMLScriptElement} */ (scripts[i]);\n var src = script.src;\n var qmark = src.lastIndexOf('?');\n var l = qmark == -1 ? src.length : qmark;\n if (src.slice(l - 7, l) == 'base.js') {\n goog.basePath = src.slice(0, l - 7);\n return;\n }\n }\n };\n\n goog.findBasePath_();\n\n /**\n * Rewrites closing script tags in input to avoid ending an enclosing script\n * tag.\n *\n * @param {string} str\n * @return {string}\n * @private\n */\n goog.protectScriptTag_ = function(str) {\n return str.replace(/<\\/(SCRIPT)/ig, '\\\\x3c/$1');\n };\n\n\n /**\n * A debug loader is responsible for downloading and executing javascript\n * files in an unbundled, uncompiled environment.\n *\n * This can be custimized via the setDependencyFactory method, or by\n * CLOSURE_IMPORT_SCRIPT/CLOSURE_LOAD_FILE_SYNC.\n *\n * @struct @constructor @final @private\n */\n goog.DebugLoader_ = function() {\n /** @private @const {!Object} */\n this.dependencies_ = {};\n /** @private @const {!Object} */\n this.idToPath_ = {};\n /** @private @const {!Object} */\n this.written_ = {};\n /** @private @const {!Array} */\n this.loadingDeps_ = [];\n /** @private {!Array} */\n this.depsToLoad_ = [];\n /** @private {boolean} */\n this.paused_ = false;\n /** @private {!goog.DependencyFactory} */\n this.factory_ = new goog.DependencyFactory();\n /** @private @const {!Object} */\n this.deferredCallbacks_ = {};\n /** @private @const {!Array} */\n this.deferredQueue_ = [];\n };\n\n /**\n * @param {!Array} namespaces\n * @param {function(): undefined} callback Function to call once all the\n * namespaces have loaded.\n */\n goog.DebugLoader_.prototype.bootstrap = function(namespaces, callback) {\n var cb = callback;\n function resolve() {\n if (cb) {\n goog.global.setTimeout(cb, 0);\n cb = null;\n }\n }\n\n if (!namespaces.length) {\n resolve();\n return;\n }\n\n var deps = [];\n for (var i = 0; i < namespaces.length; i++) {\n var path = this.getPathFromDeps_(namespaces[i]);\n if (!path) {\n throw new Error('Unregonized namespace: ' + namespaces[i]);\n }\n deps.push(this.dependencies_[path]);\n }\n\n var require = goog.require;\n var loaded = 0;\n for (var i = 0; i < namespaces.length; i++) {\n require(namespaces[i]);\n deps[i].onLoad(function() {\n if (++loaded == namespaces.length) {\n resolve();\n }\n });\n }\n };\n\n\n /**\n * Loads the Closure Dependency file.\n *\n * Exposed a public function so CLOSURE_NO_DEPS can be set to false, base\n * loaded, setDependencyFactory called, and then this called. i.e. allows\n * custom loading of the deps file.\n */\n goog.DebugLoader_.prototype.loadClosureDeps = function() {\n // Circumvent addDependency, which would try to transpile deps.js if\n // transpile is set to always.\n var relPath = 'deps.js';\n this.depsToLoad_.push(this.factory_.createDependency(\n goog.normalizePath_(goog.basePath + relPath), relPath, [], [], {}));\n this.loadDeps_();\n };\n\n\n /**\n * Notifies the debug loader when a dependency has been requested.\n *\n * @param {string} absPathOrId Path of the dependency or goog id.\n * @param {boolean=} opt_force\n */\n goog.DebugLoader_.prototype.requested = function(absPathOrId, opt_force) {\n var path = this.getPathFromDeps_(absPathOrId);\n if (path &&\n (opt_force || this.areDepsLoaded_(this.dependencies_[path].requires))) {\n var callback = this.deferredCallbacks_[path];\n if (callback) {\n delete this.deferredCallbacks_[path];\n callback();\n }\n }\n };\n\n\n /**\n * Sets the dependency factory, which can be used to create custom\n * goog.Dependency implementations to control how dependencies are loaded.\n *\n * @param {!goog.DependencyFactory} factory\n */\n goog.DebugLoader_.prototype.setDependencyFactory = function(factory) {\n this.factory_ = factory;\n };\n\n\n /**\n * Travserses the dependency graph and queues the given dependency, and all of\n * its transitive dependencies, for loading and then starts loading if not\n * paused.\n *\n * @param {string} namespace\n * @private\n */\n goog.DebugLoader_.prototype.load_ = function(namespace) {\n if (!this.getPathFromDeps_(namespace)) {\n var errorMessage = 'goog.require could not find: ' + namespace;\n goog.logToConsole_(errorMessage);\n } else {\n var loader = this;\n\n var deps = [];\n\n /** @param {string} namespace */\n var visit = function(namespace) {\n var path = loader.getPathFromDeps_(namespace);\n\n if (!path) {\n throw new Error('Bad dependency path or symbol: ' + namespace);\n }\n\n if (loader.written_[path]) {\n return;\n }\n\n loader.written_[path] = true;\n\n var dep = loader.dependencies_[path];\n for (var i = 0; i < dep.requires.length; i++) {\n if (!goog.isProvided_(dep.requires[i])) {\n visit(dep.requires[i]);\n }\n }\n\n deps.push(dep);\n };\n\n visit(namespace);\n\n var wasLoading = !!this.depsToLoad_.length;\n this.depsToLoad_ = this.depsToLoad_.concat(deps);\n\n if (!this.paused_ && !wasLoading) {\n this.loadDeps_();\n }\n }\n };\n\n\n /**\n * Loads any queued dependencies until they are all loaded or paused.\n *\n * @private\n */\n goog.DebugLoader_.prototype.loadDeps_ = function() {\n var loader = this;\n var paused = this.paused_;\n\n while (this.depsToLoad_.length && !paused) {\n (function() {\n var loadCallDone = false;\n var dep = loader.depsToLoad_.shift();\n\n var loaded = false;\n loader.loading_(dep);\n\n var controller = {\n pause: function() {\n if (loadCallDone) {\n throw new Error('Cannot call pause after the call to load.');\n } else {\n paused = true;\n }\n },\n resume: function() {\n if (loadCallDone) {\n loader.resume_();\n } else {\n // Some dep called pause and then resume in the same load call.\n // Just keep running this same loop.\n paused = false;\n }\n },\n loaded: function() {\n if (loaded) {\n throw new Error('Double call to loaded.');\n }\n\n loaded = true;\n loader.loaded_(dep);\n },\n pending: function() {\n // Defensive copy.\n var pending = [];\n for (var i = 0; i < loader.loadingDeps_.length; i++) {\n pending.push(loader.loadingDeps_[i]);\n }\n return pending;\n },\n /**\n * @param {goog.ModuleType} type\n */\n setModuleState: function(type) {\n goog.moduleLoaderState_ = {\n type: type,\n moduleName: '',\n declareLegacyNamespace: false\n };\n },\n /** @type {function(string, string, string=)} */\n registerEs6ModuleExports: function(\n path, exports, opt_closureNamespace) {\n if (opt_closureNamespace) {\n goog.loadedModules_[opt_closureNamespace] = {\n exports: exports,\n type: goog.ModuleType.ES6,\n moduleId: opt_closureNamespace || ''\n };\n }\n },\n /** @type {function(string, ?)} */\n registerGoogModuleExports: function(moduleId, exports) {\n goog.loadedModules_[moduleId] = {\n exports: exports,\n type: goog.ModuleType.GOOG,\n moduleId: moduleId\n };\n },\n clearModuleState: function() {\n goog.moduleLoaderState_ = null;\n },\n defer: function(callback) {\n if (loadCallDone) {\n throw new Error(\n 'Cannot register with defer after the call to load.');\n }\n loader.defer_(dep, callback);\n },\n areDepsLoaded: function() {\n return loader.areDepsLoaded_(dep.requires);\n }\n };\n\n try {\n dep.load(controller);\n } finally {\n loadCallDone = true;\n }\n })();\n }\n\n if (paused) {\n this.pause_();\n }\n };\n\n\n /** @private */\n goog.DebugLoader_.prototype.pause_ = function() {\n this.paused_ = true;\n };\n\n\n /** @private */\n goog.DebugLoader_.prototype.resume_ = function() {\n if (this.paused_) {\n this.paused_ = false;\n this.loadDeps_();\n }\n };\n\n\n /**\n * Marks the given dependency as loading (load has been called but it has not\n * yet marked itself as finished). Useful for dependencies that want to know\n * what else is loading. Example: goog.modules cannot eval if there are\n * loading dependencies.\n *\n * @param {!goog.Dependency} dep\n * @private\n */\n goog.DebugLoader_.prototype.loading_ = function(dep) {\n this.loadingDeps_.push(dep);\n };\n\n\n /**\n * Marks the given dependency as having finished loading and being available\n * for require.\n *\n * @param {!goog.Dependency} dep\n * @private\n */\n goog.DebugLoader_.prototype.loaded_ = function(dep) {\n for (var i = 0; i < this.loadingDeps_.length; i++) {\n if (this.loadingDeps_[i] == dep) {\n this.loadingDeps_.splice(i, 1);\n break;\n }\n }\n\n for (var i = 0; i < this.deferredQueue_.length; i++) {\n if (this.deferredQueue_[i] == dep.path) {\n this.deferredQueue_.splice(i, 1);\n break;\n }\n }\n\n if (this.loadingDeps_.length == this.deferredQueue_.length &&\n !this.depsToLoad_.length) {\n // Something has asked to load these, but they may not be directly\n // required again later, so load them now that we know we're done loading\n // everything else. e.g. a goog module entry point.\n while (this.deferredQueue_.length) {\n this.requested(this.deferredQueue_.shift(), true);\n }\n }\n\n dep.loaded();\n };\n\n\n /**\n * @param {!Array} pathsOrIds\n * @return {boolean}\n * @private\n */\n goog.DebugLoader_.prototype.areDepsLoaded_ = function(pathsOrIds) {\n for (var i = 0; i < pathsOrIds.length; i++) {\n var path = this.getPathFromDeps_(pathsOrIds[i]);\n if (!path ||\n (!(path in this.deferredCallbacks_) &&\n !goog.isProvided_(pathsOrIds[i]))) {\n return false;\n }\n }\n\n return true;\n };\n\n\n /**\n * @param {string} absPathOrId\n * @return {?string}\n * @private\n */\n goog.DebugLoader_.prototype.getPathFromDeps_ = function(absPathOrId) {\n if (absPathOrId in this.idToPath_) {\n return this.idToPath_[absPathOrId];\n } else if (absPathOrId in this.dependencies_) {\n return absPathOrId;\n } else {\n return null;\n }\n };\n\n\n /**\n * @param {!goog.Dependency} dependency\n * @param {!Function} callback\n * @private\n */\n goog.DebugLoader_.prototype.defer_ = function(dependency, callback) {\n this.deferredCallbacks_[dependency.path] = callback;\n this.deferredQueue_.push(dependency.path);\n };\n\n\n /**\n * Interface for goog.Dependency implementations to have some control over\n * loading of dependencies.\n *\n * @record\n */\n goog.LoadController = function() {};\n\n\n /**\n * Tells the controller to halt loading of more dependencies.\n */\n goog.LoadController.prototype.pause = function() {};\n\n\n /**\n * Tells the controller to resume loading of more dependencies if paused.\n */\n goog.LoadController.prototype.resume = function() {};\n\n\n /**\n * Tells the controller that this dependency has finished loading.\n *\n * This causes this to be removed from pending() and any load callbacks to\n * fire.\n */\n goog.LoadController.prototype.loaded = function() {};\n\n\n /**\n * List of dependencies on which load has been called but which have not\n * called loaded on their controller. This includes the current dependency.\n *\n * @return {!Array}\n */\n goog.LoadController.prototype.pending = function() {};\n\n\n /**\n * Registers an object as an ES6 module's exports so that goog.modules may\n * require it by path.\n *\n * @param {string} path Full path of the module.\n * @param {?} exports\n * @param {string=} opt_closureNamespace Closure namespace to associate with\n * this module.\n */\n goog.LoadController.prototype.registerEs6ModuleExports = function(\n path, exports, opt_closureNamespace) {};\n\n\n /**\n * Sets the current module state.\n *\n * @param {goog.ModuleType} type Type of module.\n */\n goog.LoadController.prototype.setModuleState = function(type) {};\n\n\n /**\n * Clears the current module state.\n */\n goog.LoadController.prototype.clearModuleState = function() {};\n\n\n /**\n * Registers a callback to call once the dependency is actually requested\n * via goog.require + all of the immediate dependencies have been loaded or\n * all other files have been loaded. Allows for lazy loading until\n * require'd without pausing dependency loading, which is needed on old IE.\n *\n * @param {!Function} callback\n */\n goog.LoadController.prototype.defer = function(callback) {};\n\n\n /**\n * @return {boolean}\n */\n goog.LoadController.prototype.areDepsLoaded = function() {};\n\n\n /**\n * Basic super class for all dependencies Closure Library can load.\n *\n * This default implementation is designed to load untranspiled, non-module\n * scripts in a web broswer.\n *\n * For goog.modules see {@see goog.GoogModuleDependency}.\n * For untranspiled ES6 modules {@see goog.Es6ModuleDependency}.\n *\n * @param {string} path Absolute path of this script.\n * @param {string} relativePath Path of this script relative to goog.basePath.\n * @param {!Array} provides goog.provided or goog.module symbols\n * in this file.\n * @param {!Array} requires goog symbols or relative paths to Closure\n * this depends on.\n * @param {!Object} loadFlags\n * @struct @constructor\n */\n goog.Dependency = function(\n path, relativePath, provides, requires, loadFlags) {\n /** @const */\n this.path = path;\n /** @const */\n this.relativePath = relativePath;\n /** @const */\n this.provides = provides;\n /** @const */\n this.requires = requires;\n /** @const */\n this.loadFlags = loadFlags;\n /** @private {boolean} */\n this.loaded_ = false;\n /** @private {!Array} */\n this.loadCallbacks_ = [];\n };\n\n\n /**\n * @return {string} The pathname part of this dependency's path if it is a\n * URI.\n */\n goog.Dependency.prototype.getPathName = function() {\n var pathName = this.path;\n var protocolIndex = pathName.indexOf('://');\n if (protocolIndex >= 0) {\n pathName = pathName.substring(protocolIndex + 3);\n var slashIndex = pathName.indexOf('/');\n if (slashIndex >= 0) {\n pathName = pathName.substring(slashIndex + 1);\n }\n }\n return pathName;\n };\n\n\n /**\n * @param {function()} callback Callback to fire as soon as this has loaded.\n * @final\n */\n goog.Dependency.prototype.onLoad = function(callback) {\n if (this.loaded_) {\n callback();\n } else {\n this.loadCallbacks_.push(callback);\n }\n };\n\n\n /**\n * Marks this dependency as loaded and fires any callbacks registered with\n * onLoad.\n * @final\n */\n goog.Dependency.prototype.loaded = function() {\n this.loaded_ = true;\n var callbacks = this.loadCallbacks_;\n this.loadCallbacks_ = [];\n for (var i = 0; i < callbacks.length; i++) {\n callbacks[i]();\n }\n };\n\n\n /**\n * Whether or not document.written / appended script tags should be deferred.\n *\n * @private {boolean}\n */\n goog.Dependency.defer_ = false;\n\n\n /**\n * Map of script ready / state change callbacks. Old IE cannot handle putting\n * these properties on goog.global.\n *\n * @private @const {!Object}\n */\n goog.Dependency.callbackMap_ = {};\n\n\n /**\n * @param {function(...?):?} callback\n * @return {string}\n * @private\n */\n goog.Dependency.registerCallback_ = function(callback) {\n var key = Math.random().toString(32);\n goog.Dependency.callbackMap_[key] = callback;\n return key;\n };\n\n\n /**\n * @param {string} key\n * @private\n */\n goog.Dependency.unregisterCallback_ = function(key) {\n delete goog.Dependency.callbackMap_[key];\n };\n\n\n /**\n * @param {string} key\n * @param {...?} var_args\n * @private\n * @suppress {unusedPrivateMembers}\n */\n goog.Dependency.callback_ = function(key, var_args) {\n if (key in goog.Dependency.callbackMap_) {\n var callback = goog.Dependency.callbackMap_[key];\n var args = [];\n for (var i = 1; i < arguments.length; i++) {\n args.push(arguments[i]);\n }\n callback.apply(undefined, args);\n } else {\n var errorMessage = 'Callback key ' + key +\n ' does not exist (was base.js loaded more than once?).';\n throw Error(errorMessage);\n }\n };\n\n\n /**\n * Starts loading this dependency. This dependency can pause loading if it\n * needs to and resume it later via the controller interface.\n *\n * When this is loaded it should call controller.loaded(). Note that this will\n * end up calling the loaded method of this dependency; there is no need to\n * call it explicitly.\n *\n * @param {!goog.LoadController} controller\n */\n goog.Dependency.prototype.load = function(controller) {\n if (goog.global.CLOSURE_IMPORT_SCRIPT) {\n if (goog.global.CLOSURE_IMPORT_SCRIPT(this.path)) {\n controller.loaded();\n } else {\n controller.pause();\n }\n return;\n }\n\n if (!goog.inHtmlDocument_()) {\n goog.logToConsole_(\n 'Cannot use default debug loader outside of HTML documents.');\n if (this.relativePath == 'deps.js') {\n // Some old code is relying on base.js auto loading deps.js failing with\n // no error before later setting CLOSURE_IMPORT_SCRIPT.\n // CLOSURE_IMPORT_SCRIPT should be set *before* base.js is loaded, or\n // CLOSURE_NO_DEPS set to true.\n goog.logToConsole_(\n 'Consider setting CLOSURE_IMPORT_SCRIPT before loading base.js, ' +\n 'or setting CLOSURE_NO_DEPS to true.');\n controller.loaded();\n } else {\n controller.pause();\n }\n return;\n }\n\n /** @type {!HTMLDocument} */\n var doc = goog.global.document;\n\n // If the user tries to require a new symbol after document load,\n // something has gone terribly wrong. Doing a document.write would\n // wipe out the page. This does not apply to the CSP-compliant method\n // of writing script tags.\n if (doc.readyState == 'complete' &&\n !goog.ENABLE_CHROME_APP_SAFE_SCRIPT_LOADING) {\n // Certain test frameworks load base.js multiple times, which tries\n // to write deps.js each time. If that happens, just fail silently.\n // These frameworks wipe the page between each load of base.js, so this\n // is OK.\n var isDeps = /\\bdeps.js$/.test(this.path);\n if (isDeps) {\n controller.loaded();\n return;\n } else {\n throw Error('Cannot write \"' + this.path + '\" after document load');\n }\n }\n\n var nonce = goog.getScriptNonce_();\n if (!goog.ENABLE_CHROME_APP_SAFE_SCRIPT_LOADING &&\n goog.isDocumentLoading_()) {\n var key;\n var callback = function(script) {\n if (script.readyState && script.readyState != 'complete') {\n script.onload = callback;\n return;\n }\n goog.Dependency.unregisterCallback_(key);\n controller.loaded();\n };\n key = goog.Dependency.registerCallback_(callback);\n\n var defer = goog.Dependency.defer_ ? ' defer' : '';\n var nonceAttr = nonce ? ' nonce=\"' + nonce + '\"' : '';\n var script = '