From eca9bdd0508e3d0304dfb0e9c4f4ea3bd157d40b Mon Sep 17 00:00:00 2001 From: Josep Sayol Date: Sat, 8 Jul 2017 11:20:15 +0200 Subject: [PATCH] refactor(database): Add types to several classes and utility methods * refactor(database): Several classes and utility methods * WIP: fix tests with recent type changes * refactor(database): Type casting format for TSX support * refactor(database): Add 'noImplicitAny' support Adds support for the 'noImplicitAny' Typescript compiler option to the database implementation. --- src/database.ts | 2 +- src/database/api/DataSnapshot.ts | 6 +- src/database/api/Database.ts | 136 ++-- src/database/api/Query.ts | 28 +- src/database/api/Reference.ts | 28 +- src/database/api/TransactionResult.ts | 8 +- src/database/api/internal.ts | 15 +- src/database/api/onDisconnect.ts | 22 +- src/database/api/test_access.ts | 23 +- src/database/core/AuthTokenProvider.ts | 29 +- src/database/core/CompoundWrite.ts | 66 +- src/database/core/PersistentConnection.ts | 550 ++++++++-------- src/database/core/ReadonlyRestClient.ts | 37 +- src/database/core/Repo.ts | 114 ++-- src/database/core/RepoInfo.ts | 82 +-- src/database/core/RepoManager.ts | 31 +- src/database/core/Repo_transaction.ts | 334 +++++----- src/database/core/ServerActions.ts | 21 +- src/database/core/SnapshotHolder.ts | 13 +- src/database/core/SparseSnapshotTree.ts | 73 +-- src/database/core/SyncPoint.ts | 146 +++-- src/database/core/SyncTree.ts | 456 +++++++------- src/database/core/WriteTree.ts | 271 ++++---- src/database/core/operation/AckUserWrite.ts | 2 +- src/database/core/operation/Merge.ts | 3 +- src/database/core/operation/Operation.ts | 2 +- src/database/core/snap/ChildrenNode.ts | 333 +++++----- src/database/core/snap/IndexMap.ts | 91 ++- src/database/core/snap/LeafNode.ts | 123 ++-- src/database/core/snap/Node.ts | 6 +- src/database/core/snap/childSet.ts | 98 +-- src/database/core/snap/comparators.ts | 9 +- src/database/core/snap/indexes/Index.ts | 23 +- src/database/core/snap/indexes/KeyIndex.ts | 34 +- src/database/core/snap/indexes/PathIndex.ts | 54 +- .../core/snap/indexes/PriorityIndex.ts | 53 +- src/database/core/snap/indexes/ValueIndex.ts | 38 +- src/database/core/snap/nodeFromJSON.ts | 67 +- src/database/core/snap/snap.ts | 12 +- src/database/core/stats/StatsCollection.ts | 12 +- src/database/core/stats/StatsListener.ts | 13 +- src/database/core/stats/StatsManager.ts | 27 +- src/database/core/stats/StatsReporter.ts | 25 +- .../core/storage/DOMStorageWrapper.ts | 42 +- src/database/core/storage/MemoryStorage.ts | 20 +- src/database/core/storage/storage.ts | 6 +- src/database/core/util/CountedSet.ts | 46 +- src/database/core/util/EventEmitter.ts | 45 +- src/database/core/util/ImmutableTree.ts | 157 +++-- src/database/core/util/NextPushId.ts | 17 +- src/database/core/util/OnlineMonitor.ts | 28 +- src/database/core/util/Path.ts | 137 ++-- src/database/core/util/ServerValues.ts | 50 +- src/database/core/util/SortedMap.ts | 359 +++++------ src/database/core/util/Tree.ts | 112 ++-- src/database/core/util/VisibilityMonitor.ts | 24 +- src/database/core/util/libs/parser.ts | 66 +- src/database/core/util/util.ts | 289 +++++---- src/database/core/util/validation.ts | 254 ++++---- .../core/view/ChildChangeAccumulator.ts | 22 +- src/database/core/view/CompleteChildSource.ts | 8 +- src/database/core/view/Event.ts | 2 +- src/database/core/view/EventGenerator.ts | 17 +- src/database/core/view/EventRegistration.ts | 26 +- src/database/core/view/QueryParams.ts | 193 +++--- src/database/core/view/View.ts | 173 +++-- src/database/core/view/ViewCache.ts | 74 +-- src/database/core/view/ViewProcessor.ts | 449 +++++++------ .../core/view/filter/LimitedFilter.ts | 158 +++-- src/database/core/view/filter/RangedFilter.ts | 96 +-- .../realtime/BrowserPollConnection.ts | 225 +++---- src/database/realtime/Connection.ts | 237 ++++--- src/database/realtime/Constants.ts | 21 +- src/database/realtime/Transport.ts | 31 +- src/database/realtime/TransportManager.ts | 27 +- src/database/realtime/WebSocketConnection.ts | 127 ++-- .../realtime/polling/PacketReceiver.ts | 17 +- src/utils/nodePatches.ts | 2 +- .../database/browser/crawler_support.test.ts | 8 +- tests/database/compound_write.test.ts | 204 +++--- tests/database/database.test.ts | 36 +- tests/database/datasnapshot.test.ts | 50 +- tests/database/helpers/EventAccumulator.ts | 4 +- tests/database/helpers/events.ts | 71 +-- tests/database/helpers/util.ts | 99 ++- tests/database/info.test.ts | 45 +- tests/database/node.test.ts | 113 ++-- tests/database/order.test.ts | 149 ++--- tests/database/order_by.test.ts | 130 ++-- tests/database/path.test.ts | 8 +- tests/database/promise.test.ts | 60 +- tests/database/query.test.ts | 594 +++++++++--------- tests/database/repoinfo.test.ts | 24 +- tests/database/sortedmap.test.ts | 180 +++--- tests/database/sparsesnapshottree.test.ts | 42 +- tests/database/transaction.test.ts | 225 +++---- 96 files changed, 4495 insertions(+), 4620 deletions(-) diff --git a/src/database.ts b/src/database.ts index f56ccb48258..24705b42143 100644 --- a/src/database.ts +++ b/src/database.ts @@ -25,7 +25,7 @@ import * as INTERNAL from './database/api/internal'; import * as TEST_ACCESS from './database/api/test_access'; import { isNodeSdk } from "./utils/environment"; -export function registerDatabase(instance) { +export function registerDatabase(instance: FirebaseNamespace) { // Register the Database Service with the 'firebase' namespace. const namespace = instance.INTERNAL.registerService( 'database', diff --git a/src/database/api/DataSnapshot.ts b/src/database/api/DataSnapshot.ts index 04ecdf3aa95..9d5c4d5cf26 100644 --- a/src/database/api/DataSnapshot.ts +++ b/src/database/api/DataSnapshot.ts @@ -117,7 +117,7 @@ export class DataSnapshot { validateArgCount('DataSnapshot.getPriority', 0, 0, arguments.length); // typecast here because we never return deferred values or internal priorities (MAX_PRIORITY) - return /**@type {string|number|null} */ (this.node_.getPriority().val()); + return (this.node_.getPriority().val() as string | number | null); } /** @@ -128,14 +128,14 @@ export class DataSnapshot { * @return {boolean} True if forEach was canceled by action returning true for * one of the child nodes. */ - forEach(action: (d: DataSnapshot) => any): boolean { + forEach(action: (d: DataSnapshot) => void): boolean { validateArgCount('DataSnapshot.forEach', 1, 1, arguments.length); validateCallback('DataSnapshot.forEach', 1, action, false); if (this.node_.isLeafNode()) return false; - const childrenNode = /**@type {ChildrenNode} */ (this.node_); + const childrenNode = (this.node_ as ChildrenNode); // Sanitize the return value to a boolean. ChildrenNode.forEachChild has a weird return type... return !!childrenNode.forEachChild(this.index_, (key, node) => { return action(new DataSnapshot(node, this.ref_.child(key), PRIORITY_INDEX)); diff --git a/src/database/api/Database.ts b/src/database/api/Database.ts index 96af0efa6d1..0e6a4a0dd74 100644 --- a/src/database/api/Database.ts +++ b/src/database/api/Database.ts @@ -1,59 +1,56 @@ /** -* Copyright 2017 Google Inc. -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -import { fatal } from "../core/util/util"; -import { parseRepoInfo } from "../core/util/libs/parser"; -import { Path } from "../core/util/Path"; -import { PromiseImpl } from "../../utils/promise"; -import { Reference } from "./Reference"; -import { Repo } from "../core/Repo"; -import { RepoManager } from "../core/RepoManager"; -import { validateArgCount } from "../../utils/validation"; -import { FirebaseApp } from "../../app/firebase_app"; -import { validateUrl } from "../core/util/validation"; + * Copyright 2017 Google Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { fatal } from '../core/util/util'; +import { parseRepoInfo } from '../core/util/libs/parser'; +import { Path } from '../core/util/Path'; +import { PromiseImpl } from '../../utils/promise'; +import { Reference } from './Reference'; +import { Repo } from '../core/Repo'; +import { RepoManager } from '../core/RepoManager'; +import { validateArgCount } from '../../utils/validation'; +import { validateUrl } from '../core/util/validation'; +import { FirebaseApp, FirebaseService } from '../../app/firebase_app'; +import { RepoInfo } from '../core/RepoInfo'; /** * Class representing a firebase database. - * @implements {firebase.Service} + * @implements {FirebaseService} */ -export class Database { - repo_: Repo; - root_: Reference; - INTERNAL; - - static ServerValue = { +export class Database implements FirebaseService { + INTERNAL: DatabaseInternals; + private root_: Reference; + + static readonly ServerValue = { 'TIMESTAMP': { - '.sv' : 'timestamp' + '.sv': 'timestamp' } - } + }; /** * The constructor should not be called by users of our public API. - * @param {!Repo} repo + * @param {!Repo} repo_ */ - constructor(repo) { - if (!(repo instanceof Repo)) { - fatal("Don't call new Database() directly - please use firebase.database()."); + constructor(private repo_: Repo) { + if (!(repo_ instanceof Repo)) { + fatal('Don\'t call new Database() directly - please use firebase.database().'); } - /** @type {Repo} */ - this.repo_ = repo; - - /** @type {Firebase} */ - this.root_ = new Reference(repo, Path.Empty); + /** @type {Reference} */ + this.root_ = new Reference(repo_, Path.Empty); this.INTERNAL = new DatabaseInternals(this); } @@ -65,7 +62,7 @@ export class Database { /** * Returns a reference to the root or the path specified in opt_pathString. * @param {string=} pathString - * @return {!Firebase} Firebase reference. + * @return {!Reference} Firebase reference. */ ref(pathString?: string): Reference { this.checkDeleted_('ref'); @@ -79,20 +76,21 @@ export class Database { * We throw a exception if the url is not in the same domain as the * current repo. * @param {string} url - * @return {!Firebase} Firebase reference. + * @return {!Reference} Firebase reference. */ - refFromURL(url) { + refFromURL(url: string): Reference { /** @const {string} */ - var apiName = 'database.refFromURL'; + const apiName = 'database.refFromURL'; this.checkDeleted_(apiName); validateArgCount(apiName, 1, 1, arguments.length); - var parsedURL = parseRepoInfo(url); + const parsedURL = parseRepoInfo(url); validateUrl(apiName, 1, parsedURL); - var repoInfo = parsedURL.repoInfo; - if (repoInfo.host !== this.repo_.repoInfo_.host) { - fatal(apiName + ": Host name does not match the current database: " + - "(found " + repoInfo.host + " but expected " + this.repo_.repoInfo_.host + ")"); + const repoInfo = parsedURL.repoInfo; + if (repoInfo.host !== ((this.repo_ as any).repoInfo_ as RepoInfo).host) { + fatal(apiName + ': Host name does not match the current database: ' + + '(found ' + repoInfo.host + ' but expected ' + + ((this.repo_ as any).repoInfo_ as RepoInfo).host + ')'); } return this.ref(parsedURL.path.toString()); @@ -101,9 +99,9 @@ export class Database { /** * @param {string} apiName */ - private checkDeleted_(apiName) { + private checkDeleted_(apiName: string) { if (this.repo_ === null) { - fatal("Cannot call " + apiName + " on a deleted database."); + fatal('Cannot call ' + apiName + ' on a deleted database.'); } } @@ -114,36 +112,28 @@ export class Database { this.repo_.interrupt(); } - goOnline () { + goOnline() { validateArgCount('database.goOnline', 0, 0, arguments.length); this.checkDeleted_('goOnline'); this.repo_.resume(); } -}; - -Object.defineProperty(Repo.prototype, 'database', { - get() { - return this.__database || (this.__database = new Database(this)); - } -}); +} -class DatabaseInternals { - database +export class DatabaseInternals { /** @param {!Database} database */ - constructor(database) { - this.database = database; + constructor(public database: Database) { } - /** @return {firebase.Promise} */ - delete() { - this.database.checkDeleted_('delete'); - RepoManager.getInstance().deleteRepo(/** @type {!Repo} */ (this.database.repo_)); + /** @return {Promise} */ + delete(): Promise { + (this.database as any).checkDeleted_('delete'); + RepoManager.getInstance().deleteRepo((this.database as any).repo_ as Repo); - this.database.repo_ = null; - this.database.root_ = null; + (this.database as any).repo_ = null; + (this.database as any).root_ = null; this.database.INTERNAL = null; this.database = null; return PromiseImpl.resolve(); } -}; +} diff --git a/src/database/api/Query.ts b/src/database/api/Query.ts index 61bead6a87d..a7d76ec6ab1 100644 --- a/src/database/api/Query.ts +++ b/src/database/api/Query.ts @@ -29,7 +29,7 @@ import { validateKey, } from '../core/util/validation'; import { errorPrefix, validateArgCount, validateCallback, validateContextObject } from '../../utils/validation'; -import { ValueEventRegistration, ChildEventRegistration } from '../core/view/EventRegistration'; +import { ValueEventRegistration, ChildEventRegistration, EventRegistration } from '../core/view/EventRegistration'; import { Deferred, attachDummyErrorHandler } from '../../utils/promise'; import { Repo } from '../core/Repo'; import { QueryParams } from '../core/view/QueryParams'; @@ -153,7 +153,7 @@ export class Query { // This is a slight hack. We cannot goog.require('fb.api.Firebase'), since Firebase requires fb.api.Query. // However, we will always export 'Firebase' to the global namespace, so it's guaranteed to exist by the time this // method gets called. - return (new Query.__referenceConstructor(this.repo, this.path)); + return (new Query.__referenceConstructor(this.repo, this.path) as Reference); } /** @@ -174,7 +174,7 @@ export class Query { if (eventType === 'value') { this.onValueEvent(callback, ret.cancel, ret.context); } else { - const callbacks = {}; + const callbacks: { [k: string]: typeof callback } = {}; callbacks[eventType] = callback; this.onChildEvent(callbacks, ret.cancel, ret.context); } @@ -187,7 +187,7 @@ export class Query { * @param {?Object} context * @protected */ - onValueEvent(callback: (a: DataSnapshot) => any, cancelCallback: ((a: Error) => any) | null, context: Object | null) { + protected onValueEvent(callback: (a: DataSnapshot) => void, cancelCallback: ((a: Error) => void) | null, context: Object | null) { const container = new ValueEventRegistration(callback, cancelCallback || null, context || null); this.repo.addEventCallbackForQuery(this, container); } @@ -196,6 +196,7 @@ export class Query { * @param {!Object.} callbacks * @param {?function(Error)} cancelCallback * @param {?Object} context + * @protected */ onChildEvent(callbacks: { [k: string]: SnapshotCallback }, cancelCallback: ((a: Error) => any) | null, context: Object | null) { @@ -214,10 +215,10 @@ export class Query { validateCallback('Query.off', 2, callback, true); validateContextObject('Query.off', 3, context, true); - let container = null; - let callbacks = null; + let container: EventRegistration | null = null; + let callbacks: { [k: string]: typeof callback } | null = null; if (eventType === 'value') { - const valueCallback = /** @type {function(!DataSnapshot)} */ (callback) || null; + const valueCallback = callback || null; container = new ValueEventRegistration(valueCallback, null, context || null); } else if (eventType) { if (callback) { @@ -239,7 +240,8 @@ export class Query { */ once(eventType: string, userCallback?: SnapshotCallback, - cancelOrContext?, context?: Object): Promise { + cancelOrContext?: ((a: Error) => void) | Object, + context?: Object): Promise { validateArgCount('Query.once', 1, 4, arguments.length); validateEventType('Query.once', 1, eventType, false); validateCallback('Query.once', 2, userCallback, true); @@ -254,7 +256,7 @@ export class Query { const deferred = new Deferred(); attachDummyErrorHandler(deferred.promise); - const onceCallback = (snapshot) => { + const onceCallback = (snapshot: DataSnapshot) => { // NOTE: Even though we unsubscribe, we may get called multiple times if a single action (e.g. set() with JSON) // triggers multiple events (e.g. child_added or child_changed). if (firstCall) { @@ -508,11 +510,11 @@ export class Query { * @return {{cancel: ?function(Error), context: ?Object}} * @private */ - private static getCancelAndContextArgs_(fnName: string, cancelOrContext?: ((a: Error) => any) | Object, - context?: Object): { cancel: ((a: Error) => any) | null, context: Object | null } { - const ret = {cancel: null, context: null}; + private static getCancelAndContextArgs_(fnName: string, cancelOrContext?: ((a: Error) => void) | Object, + context?: Object): { cancel: ((a: Error) => void) | null, context: Object | null } { + const ret: { cancel: ((a: Error) => void) | null, context: Object | null } = {cancel: null, context: null}; if (cancelOrContext && context) { - ret.cancel = /** @type {function(Error)} */ (cancelOrContext); + ret.cancel = (cancelOrContext as (a: Error) => void); validateCallback(fnName, 3, ret.cancel, true); ret.context = context; diff --git a/src/database/api/Reference.ts b/src/database/api/Reference.ts index 4836fe5beb5..d1e2f890648 100644 --- a/src/database/api/Reference.ts +++ b/src/database/api/Reference.ts @@ -40,9 +40,13 @@ import { SyncPoint } from '../core/SyncPoint'; import { Database } from './Database'; import { DataSnapshot } from './DataSnapshot'; +export interface ReferenceConstructor { + new(repo: Repo, path: Path): Reference; +} + export class Reference extends Query { - public then; - public catch; + public then: (a?: any) => Promise; + public catch: (a?: Error) => Promise; /** * Call options: @@ -104,7 +108,7 @@ export class Reference extends Query { getRoot(): Reference { validateArgCount('Reference.root', 0, 0, arguments.length); - let ref = this; + let ref = (this as any); while (ref.getParent() !== null) { ref = ref.getParent(); } @@ -121,7 +125,7 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Promise} */ - set(newVal: any, onComplete?: (a: Error | null) => any): Promise { + set(newVal: any, onComplete?: (a: Error | null) => void): Promise { validateArgCount('Reference.set', 1, 2, arguments.length); validateWritablePath('Reference.set', this.path); validateFirebaseDataArg('Reference.set', 1, newVal, this.path, false); @@ -137,12 +141,12 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Promise} */ - update(objectToMerge: Object, onComplete?: (a: Error | null) => any): Promise { + update(objectToMerge: Object, onComplete?: (a: Error | null) => void): Promise { validateArgCount('Reference.update', 1, 2, arguments.length); validateWritablePath('Reference.update', this.path); if (Array.isArray(objectToMerge)) { - const newObjectToMerge = {}; + const newObjectToMerge: { [k: string]: any } = {}; for (let i = 0; i < objectToMerge.length; ++i) { newObjectToMerge['' + i] = objectToMerge[i]; } @@ -167,7 +171,7 @@ export class Reference extends Query { * @return {!Promise} */ setWithPriority(newVal: any, newPriority: string | number | null, - onComplete?: (a: Error | null) => any): Promise { + onComplete?: (a: Error | null) => void): Promise { validateArgCount('Reference.setWithPriority', 2, 3, arguments.length); validateWritablePath('Reference.setWithPriority', this.path); validateFirebaseDataArg('Reference.setWithPriority', 1, newVal, this.path, false); @@ -186,7 +190,7 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Promise} */ - remove(onComplete?: (a: Error | null) => any): Promise { + remove(onComplete?: (a: Error | null) => void): Promise { validateArgCount('Reference.remove', 0, 1, arguments.length); validateWritablePath('Reference.remove', this.path); validateCallback('Reference.remove', 1, onComplete, true); @@ -201,7 +205,7 @@ export class Reference extends Query { * @return {!Promise} */ transaction(transactionUpdate: (a: any) => any, - onComplete?: (a: Error | null, b: boolean, c: DataSnapshot | null) => any, + onComplete?: (a: Error | null, b: boolean, c: DataSnapshot | null) => void, applyLocally?: boolean): Promise { validateArgCount('Reference.transaction', 1, 3, arguments.length); validateWritablePath('Reference.transaction', this.path); @@ -222,7 +226,7 @@ export class Reference extends Query { attachDummyErrorHandler(deferred.promise); } - const promiseComplete = function (error, committed, snapshot) { + const promiseComplete = function (error: Error, committed: boolean, snapshot: DataSnapshot) { if (error) { deferred.reject(error); } else { @@ -242,7 +246,7 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Promise} */ - setPriority(priority: string | number | null, onComplete?: (a: Error | null) => any): Promise { + setPriority(priority: string | number | null, onComplete?: (a: Error | null) => void): Promise { validateArgCount('Reference.setPriority', 1, 2, arguments.length); validateWritablePath('Reference.setPriority', this.path); validatePriority('Reference.setPriority', 1, priority, false); @@ -258,7 +262,7 @@ export class Reference extends Query { * @param {function(?Error)=} onComplete * @return {!Reference} */ - push(value?: any, onComplete?: (a: Error | null) => any): Reference { + push(value?: any, onComplete?: (a: Error | null) => void): Reference { validateArgCount('Reference.push', 0, 2, arguments.length); validateWritablePath('Reference.push', this.path); validateFirebaseDataArg('Reference.push', 1, value, this.path, true); diff --git a/src/database/api/TransactionResult.ts b/src/database/api/TransactionResult.ts index 4c18341ec4d..147c4789977 100644 --- a/src/database/api/TransactionResult.ts +++ b/src/database/api/TransactionResult.ts @@ -14,13 +14,17 @@ * limitations under the License. */ +import { DataSnapshot } from './DataSnapshot'; + export class TransactionResult { /** * A type for the resolve value of Firebase.transaction. * @constructor * @dict * @param {boolean} committed - * @param {fb.api.DataSnapshot} snapshot + * @param {DataSnapshot} snapshot */ - constructor(public committed, public snapshot) {} + constructor(public committed: boolean, public snapshot: DataSnapshot) { + + } } \ No newline at end of file diff --git a/src/database/api/internal.ts b/src/database/api/internal.ts index af5675e44e1..90041e70d0c 100644 --- a/src/database/api/internal.ts +++ b/src/database/api/internal.ts @@ -16,6 +16,7 @@ import { WebSocketConnection } from "../realtime/WebSocketConnection"; import { BrowserPollConnection } from "../realtime/BrowserPollConnection"; +import { Reference } from './Reference'; /** * INTERNAL methods for internal-use only (tests, etc.). @@ -35,26 +36,26 @@ export const forceWebSockets = function() { }; /* Used by App Manager */ -export const isWebSocketsAvailable = function() { +export const isWebSocketsAvailable = function(): boolean { return WebSocketConnection['isAvailable'](); }; -export const setSecurityDebugCallback = function(ref, callback) { - ref.repo.persistentConnection_.securityDebugCallback_ = callback; +export const setSecurityDebugCallback = function(ref: Reference, callback: (a: Object) => void) { + (ref.repo.persistentConnection_ as any).securityDebugCallback_ = callback; }; -export const stats = function(ref, showDelta) { +export const stats = function(ref: Reference, showDelta?: boolean) { ref.repo.stats(showDelta); }; -export const statsIncrementCounter = function(ref, metric) { +export const statsIncrementCounter = function(ref: Reference, metric: string) { ref.repo.statsIncrementCounter(metric); }; -export const dataUpdateCount = function(ref) { +export const dataUpdateCount = function(ref: Reference): number { return ref.repo.dataUpdateCount; }; -export const interceptServerData = function(ref, callback) { +export const interceptServerData = function(ref: Reference, callback: ((a: string, b: any) => void) | null) { return ref.repo.interceptServerData_(callback); }; diff --git a/src/database/api/onDisconnect.ts b/src/database/api/onDisconnect.ts index 530664879e2..4456513b989 100644 --- a/src/database/api/onDisconnect.ts +++ b/src/database/api/onDisconnect.ts @@ -14,18 +14,18 @@ * limitations under the License. */ -import { +import { validateArgCount, validateCallback -} from "../../utils/validation"; +} from '../../utils/validation'; import { validateWritablePath, validateFirebaseDataArg, validatePriority, validateFirebaseMergeDataArg, -} from "../core/util/validation"; -import { warn } from "../core/util/util"; -import { Deferred } from "../../utils/promise"; +} from '../core/util/validation'; +import { warn } from '../core/util/util'; +import { Deferred } from '../../utils/promise'; import { Repo } from '../core/Repo'; import { Path } from '../core/util/Path'; @@ -45,7 +45,7 @@ export class OnDisconnect { * @param {function(?Error)=} onComplete * @return {!firebase.Promise} */ - cancel(onComplete?) { + cancel(onComplete?: (a: Error | null) => void): Promise { validateArgCount('OnDisconnect.cancel', 0, 1, arguments.length); validateCallback('OnDisconnect.cancel', 1, onComplete, true); const deferred = new Deferred(); @@ -57,7 +57,7 @@ export class OnDisconnect { * @param {function(?Error)=} onComplete * @return {!firebase.Promise} */ - remove(onComplete?) { + remove(onComplete?: (a: Error | null) => void): Promise { validateArgCount('OnDisconnect.remove', 0, 1, arguments.length); validateWritablePath('OnDisconnect.remove', this.path_); validateCallback('OnDisconnect.remove', 1, onComplete, true); @@ -71,7 +71,7 @@ export class OnDisconnect { * @param {function(?Error)=} onComplete * @return {!firebase.Promise} */ - set(value, onComplete?) { + set(value: any, onComplete?: (a: Error | null) => void): Promise { validateArgCount('OnDisconnect.set', 1, 2, arguments.length); validateWritablePath('OnDisconnect.set', this.path_); validateFirebaseDataArg('OnDisconnect.set', 1, value, this.path_, false); @@ -87,7 +87,7 @@ export class OnDisconnect { * @param {function(?Error)=} onComplete * @return {!firebase.Promise} */ - setWithPriority(value, priority, onComplete?) { + setWithPriority(value: any, priority: number | string | null, onComplete?: (a: Error | null) => void): Promise { validateArgCount('OnDisconnect.setWithPriority', 2, 3, arguments.length); validateWritablePath('OnDisconnect.setWithPriority', this.path_); validateFirebaseDataArg('OnDisconnect.setWithPriority', @@ -105,11 +105,11 @@ export class OnDisconnect { * @param {function(?Error)=} onComplete * @return {!firebase.Promise} */ - update(objectToMerge, onComplete?) { + update(objectToMerge: object, onComplete?: (a: Error | null) => void): Promise { validateArgCount('OnDisconnect.update', 1, 2, arguments.length); validateWritablePath('OnDisconnect.update', this.path_); if (Array.isArray(objectToMerge)) { - const newObjectToMerge = {}; + const newObjectToMerge: { [k: string]: any } = {}; for (let i = 0; i < objectToMerge.length; ++i) { newObjectToMerge['' + i] = objectToMerge[i]; } diff --git a/src/database/api/test_access.ts b/src/database/api/test_access.ts index 30f6de2e7ff..1bc16cf43bc 100644 --- a/src/database/api/test_access.ts +++ b/src/database/api/test_access.ts @@ -18,6 +18,7 @@ import { RepoInfo } from "../core/RepoInfo"; import { PersistentConnection } from "../core/PersistentConnection"; import { RepoManager } from "../core/RepoManager"; import { Connection } from "../realtime/Connection"; +import { Query } from './Query'; export const DataConnection = PersistentConnection; @@ -25,7 +26,7 @@ export const DataConnection = PersistentConnection; * @param {!string} pathString * @param {function(*)} onComplete */ -(PersistentConnection.prototype as any).simpleListen = function(pathString, onComplete) { +(PersistentConnection.prototype as any).simpleListen = function(pathString: string, onComplete: (a: any) => void) { this.sendRequest('q', {'p': pathString}, onComplete); }; @@ -33,7 +34,7 @@ export const DataConnection = PersistentConnection; * @param {*} data * @param {function(*)} onEcho */ -(PersistentConnection.prototype as any).echo = function(data, onEcho) { +(PersistentConnection.prototype as any).echo = function(data: any, onEcho: (a: any) => void) { this.sendRequest('echo', {'d': data}, onEcho); }; @@ -44,8 +45,8 @@ export const RealTimeConnection = Connection; * @param {function(): string} newHash * @return {function()} */ -export const hijackHash = function(newHash) { - var oldPut = PersistentConnection.prototype.put; +export const hijackHash = function(newHash: () => string) { + const oldPut = PersistentConnection.prototype.put; PersistentConnection.prototype.put = function(pathString, data, opt_onComplete, opt_hash) { if (opt_hash !== undefined) { opt_hash = newHash(); @@ -58,24 +59,24 @@ export const hijackHash = function(newHash) { }; /** - * @type {function(new:fb.core.RepoInfo, !string, boolean, !string, boolean): undefined} + * @type {function(new:RepoInfo, !string, boolean, !string, boolean): undefined} */ export const ConnectionTarget = RepoInfo; /** - * @param {!fb.api.Query} query + * @param {!Query} query * @return {!string} */ -export const queryIdentifier = function(query) { +export const queryIdentifier = function(query: Query) { return query.queryIdentifier(); }; /** - * @param {!fb.api.Query} firebaseRef + * @param {!Query} firebaseRef * @return {!Object} */ -export const listens = function(firebaseRef) { - return firebaseRef.repo.persistentConnection_.listens_; +export const listens = function(firebaseRef: Query) { + return (firebaseRef.repo.persistentConnection_ as any).listens_; }; /** @@ -83,6 +84,6 @@ export const listens = function(firebaseRef) { * * @param {boolean} forceRestClient */ -export const forceRestClient = function(forceRestClient) { +export const forceRestClient = function(forceRestClient: boolean) { RepoManager.getInstance().forceRestClient(forceRestClient); }; diff --git a/src/database/core/AuthTokenProvider.ts b/src/database/core/AuthTokenProvider.ts index b45c9526dec..0cbfd6fb397 100644 --- a/src/database/core/AuthTokenProvider.ts +++ b/src/database/core/AuthTokenProvider.ts @@ -14,32 +14,29 @@ * limitations under the License. */ -import { log, warn } from "./util/util"; +import { log, warn } from './util/util'; +import { FirebaseApp, FirebaseAuthTokenData } from '../../app/firebase_app'; /** * Abstraction around FirebaseApp's token fetching capabilities. */ export class AuthTokenProvider { - private app_; - /** - * @param {!firebase.app.App} app + * @param {!FirebaseApp} app_ */ - constructor(app) { - /** @private {!firebase.app.App} */ - this.app_ = app; + constructor(private app_: FirebaseApp) { } /** * @param {boolean} forceRefresh - * @return {!Promise} + * @return {!Promise} */ - getToken(forceRefresh) { + getToken(forceRefresh: boolean): Promise { return this.app_['INTERNAL']['getToken'](forceRefresh) .then( null, // .catch - function(error) { + function (error) { // TODO: Need to figure out all the cases this is raised and whether // this makes sense. if (error && error.code === 'auth/token-not-initialized') { @@ -51,18 +48,18 @@ export class AuthTokenProvider { }); } - addTokenChangeListener(listener) { + addTokenChangeListener(listener: (token: string | null) => void) { // TODO: We might want to wrap the listener and call it with no args to // avoid a leaky abstraction, but that makes removing the listener harder. this.app_['INTERNAL']['addAuthTokenListener'](listener); } - removeTokenChangeListener(listener) { + removeTokenChangeListener(listener: (token: string | null) => void) { this.app_['INTERNAL']['removeAuthTokenListener'](listener); } notifyForInvalidToken() { - var errorMessage = 'Provided authentication credentials for the app named "' + + let errorMessage = 'Provided authentication credentials for the app named "' + this.app_.name + '" are invalid. This usually indicates your app was not ' + 'initialized correctly. '; if ('credential' in this.app_.options) { @@ -75,9 +72,9 @@ export class AuthTokenProvider { 'project.'; } else { errorMessage += 'Make sure the "apiKey" and "databaseURL" properties provided to ' + - 'initializeApp() match the values provided for your app at ' + - 'https://console.firebase.google.com/.'; + 'initializeApp() match the values provided for your app at ' + + 'https://console.firebase.google.com/.'; } warn(errorMessage); } -}; +} diff --git a/src/database/core/CompoundWrite.ts b/src/database/core/CompoundWrite.ts index a8a84dd38e4..adad6a10574 100644 --- a/src/database/core/CompoundWrite.ts +++ b/src/database/core/CompoundWrite.ts @@ -20,6 +20,7 @@ import { forEach } from "../../utils/obj"; import { Node, NamedNode } from "./snap/Node"; import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; import { assert } from "../../utils/assert"; +import { ChildrenNode } from './snap/ChildrenNode'; /** * This class holds a collection of writes that can be applied to nodes in unison. It abstracts away the logic with @@ -31,12 +32,12 @@ import { assert } from "../../utils/assert"; * @param {!ImmutableTree.} writeTree */ export class CompoundWrite { - constructor(private writeTree_: ImmutableTree) {}; + constructor(private writeTree_: ImmutableTree) {}; /** * @type {!CompoundWrite} */ static Empty = new CompoundWrite(new ImmutableTree(null)); - + /** * @param {!Path} path * @param {!Node} node @@ -46,20 +47,20 @@ export class CompoundWrite { if (path.isEmpty()) { return new CompoundWrite(new ImmutableTree(node)); } else { - var rootmost = this.writeTree_.findRootMostValueAndPath(path); + const rootmost = this.writeTree_.findRootMostValueAndPath(path); if (rootmost != null) { - var rootMostPath = rootmost.path - var value = rootmost.value; - var relativePath = Path.relativePath(rootMostPath, path); + const rootMostPath = rootmost.path; + let value = rootmost.value; + const relativePath = Path.relativePath(rootMostPath, path); value = value.updateChild(relativePath, node); return new CompoundWrite(this.writeTree_.set(rootMostPath, value)); } else { - var subtree = new ImmutableTree(node); - var newWriteTree = this.writeTree_.setTree(path, subtree); + const subtree = new ImmutableTree(node); + const newWriteTree = this.writeTree_.setTree(path, subtree); return new CompoundWrite(newWriteTree); } } - }; + } /** * @param {!Path} path @@ -67,12 +68,12 @@ export class CompoundWrite { * @return {!CompoundWrite} */ addWrites(path: Path, updates: { [name: string]: Node }): CompoundWrite { - var newWrite = this; - forEach(updates, function(childKey, node) { + let newWrite = this as CompoundWrite; + forEach(updates, function(childKey: string, node: Node) { newWrite = newWrite.addWrite(path.child(childKey), node); }); return newWrite; - }; + } /** * Will remove a write at the given path and deeper paths. This will not modify a write at a higher @@ -85,10 +86,10 @@ export class CompoundWrite { if (path.isEmpty()) { return CompoundWrite.Empty; } else { - var newWriteTree = this.writeTree_.setTree(path, ImmutableTree.Empty); + const newWriteTree = this.writeTree_.setTree(path, ImmutableTree.Empty); return new CompoundWrite(newWriteTree); } - }; + } /** * Returns whether this CompoundWrite will fully overwrite a node at a given location and can therefore be @@ -99,7 +100,7 @@ export class CompoundWrite { */ hasCompleteWrite(path: Path): boolean { return this.getCompleteNode(path) != null; - }; + } /** * Returns a node for a path if and only if the node is a "complete" overwrite at that path. This will not aggregate @@ -108,14 +109,14 @@ export class CompoundWrite { * @param {!Path} path The path to get a complete write * @return {?Node} The node if complete at that path, or null otherwise. */ - getCompleteNode(path: Path): Node { - var rootmost = this.writeTree_.findRootMostValueAndPath(path); + getCompleteNode(path: Path): Node | null { + const rootmost = this.writeTree_.findRootMostValueAndPath(path); if (rootmost != null) { return this.writeTree_.get(rootmost.path).getChild(Path.relativePath(rootmost.path, path)); } else { return null; } - }; + } /** * Returns all children that are guaranteed to be a complete overwrite. @@ -123,13 +124,12 @@ export class CompoundWrite { * @return {!Array.} A list of all complete children. */ getCompleteChildren(): Array { - var children = []; - var node = this.writeTree_.value; + const children: NamedNode[] = []; + let node = this.writeTree_.value; if (node != null) { // If it's a leaf node, it has no children; so nothing to do. if (!node.isLeafNode()) { - node = /** @type {!ChildrenNode} */ (node); - node.forEachChild(PRIORITY_INDEX, function(childName, childNode) { + (node as ChildrenNode).forEachChild(PRIORITY_INDEX, function(childName, childNode) { children.push(new NamedNode(childName, childNode)); }); } @@ -141,32 +141,32 @@ export class CompoundWrite { }); } return children; - }; + } /** * @param {!Path} path * @return {!CompoundWrite} */ - childCompoundWrite(path: Path) { + childCompoundWrite(path: Path): CompoundWrite { if (path.isEmpty()) { return this; } else { - var shadowingNode = this.getCompleteNode(path); + const shadowingNode = this.getCompleteNode(path); if (shadowingNode != null) { return new CompoundWrite(new ImmutableTree(shadowingNode)); } else { return new CompoundWrite(this.writeTree_.subtree(path)); } } - }; + } /** * Returns true if this CompoundWrite is empty and therefore does not modify any nodes. * @return {boolean} Whether this CompoundWrite is empty */ - isEmpty() { + isEmpty(): boolean { return this.writeTree_.isEmpty(); - }; + } /** * Applies this CompoundWrite to a node. The node is returned with all writes from this CompoundWrite applied to the @@ -174,9 +174,9 @@ export class CompoundWrite { * @param {!Node} node The node to apply this CompoundWrite to * @return {!Node} The node with all writes applied */ - apply(node: Node) { + apply(node: Node): Node { return CompoundWrite.applySubtreeWrite_(Path.Empty, this.writeTree_, node); - }; + } /** * @param {!Path} relativePath @@ -185,12 +185,12 @@ export class CompoundWrite { * @return {!Node} * @private */ - static applySubtreeWrite_ = function(relativePath: Path, writeTree: ImmutableTree, node: Node) { + private static applySubtreeWrite_ = function(relativePath: Path, writeTree: ImmutableTree, node: Node): Node { if (writeTree.value != null) { // Since there a write is always a leaf, we're done here return node.updateChild(relativePath, writeTree.value); } else { - var priorityWrite = null; + let priorityWrite = null; writeTree.children.inorderTraversal(function(childKey, childTree) { if (childKey === '.priority') { // Apply priorities at the end so we don't update priorities for either empty nodes or forget @@ -207,6 +207,6 @@ export class CompoundWrite { } return node; } - }; + } } diff --git a/src/database/core/PersistentConnection.ts b/src/database/core/PersistentConnection.ts index cd4c3fffa80..3a8e0cfd926 100644 --- a/src/database/core/PersistentConnection.ts +++ b/src/database/core/PersistentConnection.ts @@ -14,32 +14,59 @@ * limitations under the License. */ -import firebase from "../../app"; -import { forEach, contains, isEmpty, getCount, safeGet } from "../../utils/obj"; -import { stringify } from "../../utils/json"; +import firebase from '../../app'; +import { forEach, contains, isEmpty, getCount, safeGet } from '../../utils/obj'; +import { stringify } from '../../utils/json'; import { assert } from '../../utils/assert'; -import { error, log, logWrapper, warn, ObjectToUniqueKey } from "./util/util"; -import { Path } from "./util/Path"; -import { VisibilityMonitor } from "./util/VisibilityMonitor"; -import { OnlineMonitor } from "./util/OnlineMonitor"; -import { isAdmin, isValidFormat } from "../../utils/jwt"; -import { Connection } from "../realtime/Connection"; -import { CONSTANTS } from "../../utils/constants"; -import { +import { error, log, logWrapper, warn, ObjectToUniqueKey } from './util/util'; +import { Path } from './util/Path'; +import { VisibilityMonitor } from './util/VisibilityMonitor'; +import { OnlineMonitor } from './util/OnlineMonitor'; +import { isAdmin, isValidFormat } from '../../utils/jwt'; +import { Connection } from '../realtime/Connection'; +import { CONSTANTS } from '../../utils/constants'; +import { isMobileCordova, isReactNative, isNodeSdk -} from "../../utils/environment"; - -var RECONNECT_MIN_DELAY = 1000; -var RECONNECT_MAX_DELAY_DEFAULT = 60 * 5 * 1000; // 5 minutes in milliseconds (Case: 1858) -var RECONNECT_MAX_DELAY_FOR_ADMINS = 30 * 1000; // 30 seconds for admin clients (likely to be a backend server) -var RECONNECT_DELAY_MULTIPLIER = 1.3; -var RECONNECT_DELAY_RESET_TIMEOUT = 30000; // Reset delay back to MIN_DELAY after being connected for 30sec. -var SERVER_KILL_INTERRUPT_REASON = "server_kill"; +} from '../../utils/environment'; +import { ServerActions } from './ServerActions'; +import { AuthTokenProvider } from './AuthTokenProvider'; +import { RepoInfo } from './RepoInfo'; +import { Query } from '../api/Query'; + +const RECONNECT_MIN_DELAY = 1000; +const RECONNECT_MAX_DELAY_DEFAULT = 60 * 5 * 1000; // 5 minutes in milliseconds (Case: 1858) +const RECONNECT_MAX_DELAY_FOR_ADMINS = 30 * 1000; // 30 seconds for admin clients (likely to be a backend server) +const RECONNECT_DELAY_MULTIPLIER = 1.3; +const RECONNECT_DELAY_RESET_TIMEOUT = 30000; // Reset delay back to MIN_DELAY after being connected for 30sec. +const SERVER_KILL_INTERRUPT_REASON = 'server_kill'; // If auth fails repeatedly, we'll assume something is wrong and log a warning / back off. -var INVALID_AUTH_TOKEN_THRESHOLD = 3; +const INVALID_AUTH_TOKEN_THRESHOLD = 3; + +interface ListenSpec { + onComplete(s: string, p?: any): void; + + hashFn(): string; + + query: Query; + tag: number | null; +} + +interface OnDisconnectRequest { + pathString: string; + action: string; + data: any; + onComplete?: (a: string, b: string) => void; +} + +interface OutstandingPut { + action: string; + request: Object; + queued?: boolean; + onComplete: (a: string, b?: string) => void; +} /** * Firebase connection. Abstracts wire protocol and handles reconnecting. @@ -47,118 +74,86 @@ var INVALID_AUTH_TOKEN_THRESHOLD = 3; * NOTE: All JSON objects sent to the realtime connection must have property names enclosed * in quotes to make sure the closure compiler does not minify them. */ -export class PersistentConnection { +export class PersistentConnection extends ServerActions { // Used for diagnostic logging. - id; - log_; + id = PersistentConnection.nextPersistentConnectionId_++; + private log_ = logWrapper('p:' + this.id + ':'); + /** @private {Object} */ - interruptReasons_; - listens_; - outstandingPuts_; - outstandingPutCount_; - onDisconnectRequestQueue_; - connected_; - reconnectDelay_; - maxReconnectDelay_; - onDataUpdate_; - onConnectStatus_; - onServerInfoUpdate_; - repoInfo_; - securityDebugCallback_; - lastSessionId; + private interruptReasons_: { [reason: string]: boolean } = {}; + private listens_: { [path: string]: { [queryId: string]: ListenSpec } } = {}; + private outstandingPuts_: OutstandingPut[] = []; + private outstandingPutCount_ = 0; + private onDisconnectRequestQueue_: OnDisconnectRequest[] = []; + private connected_ = false; + private reconnectDelay_ = RECONNECT_MIN_DELAY; + private maxReconnectDelay_ = RECONNECT_MAX_DELAY_DEFAULT; + private securityDebugCallback_: ((a: Object) => void) | null = null; + lastSessionId: string | null = null; + + /** @private {number|null} */ + private establishConnectionTimer_: number | null = null; + + /** @private {boolean} */ + private visible_: boolean = false; + + // Before we get connected, we keep a queue of pending messages to send. + private requestCBHash_: { [k: number]: (a: any) => void } = {}; + private requestNumber_ = 0; + /** @private {?{ * sendRequest(Object), * close() * }} */ - private realtime_; + private realtime_: { sendRequest(a: Object): void, close(): void } | null = null; + /** @private {string|null} */ - authToken_; - authTokenProvider_; - forceTokenRefresh_; - invalidAuthTokenCount_; - /** @private {Object|null|undefined} */ - private authOverride_; - /** @private {number|null} */ - private establishConnectionTimer_; - /** @private {boolean} */ - private visible_; + private authToken_: string | null = null; + private forceTokenRefresh_ = false; + private invalidAuthTokenCount_ = 0; - // Before we get connected, we keep a queue of pending messages to send. - requestCBHash_; - requestNumber_; + private firstConnection_ = true; + private lastConnectionAttemptTime_: number | null = null; + private lastConnectionEstablishedTime_: number | null = null; - firstConnection_; - lastConnectionAttemptTime_; - lastConnectionEstablishedTime_; /** * @private */ - static nextPersistentConnectionId_ = 0; + private static nextPersistentConnectionId_ = 0; /** * Counter for number of connections created. Mainly used for tagging in the logs * @type {number} * @private */ - static nextConnectionId_ = 0; + private static nextConnectionId_ = 0; + /** * @implements {ServerActions} - * @param {!RepoInfo} repoInfo Data about the namespace we are connecting to - * @param {function(string, *, boolean, ?number)} onDataUpdate A callback for new data from the server + * @param {!RepoInfo} repoInfo_ Data about the namespace we are connecting to + * @param {function(string, *, boolean, ?number)} onDataUpdate_ A callback for new data from the server + * @param onConnectStatus_ + * @param onServerInfoUpdate_ + * @param authTokenProvider_ + * @param authOverride_ */ - constructor(repoInfo, onDataUpdate, onConnectStatus, - onServerInfoUpdate, authTokenProvider, authOverride) { - // Used for diagnostic logging. - this.id = PersistentConnection.nextPersistentConnectionId_++; - this.log_ = logWrapper('p:' + this.id + ':'); - /** @private {Object} */ - this.interruptReasons_ = { }; - this.listens_ = {}; - this.outstandingPuts_ = []; - this.outstandingPutCount_ = 0; - this.onDisconnectRequestQueue_ = []; - this.connected_ = false; - this.reconnectDelay_ = RECONNECT_MIN_DELAY; - this.maxReconnectDelay_ = RECONNECT_MAX_DELAY_DEFAULT; - this.onDataUpdate_ = onDataUpdate; - this.onConnectStatus_ = onConnectStatus; - this.onServerInfoUpdate_ = onServerInfoUpdate; - this.repoInfo_ = repoInfo; - this.securityDebugCallback_ = null; - this.lastSessionId = null; - /** @private {?{ - * sendRequest(Object), - * close() - * }} */ - this.realtime_ = null; - /** @private {string|null} */ - this.authToken_ = null; - this.authTokenProvider_ = authTokenProvider; - this.forceTokenRefresh_ = false; - this.invalidAuthTokenCount_ = 0; - if (authOverride && !isNodeSdk()) { + constructor(private repoInfo_: RepoInfo, + private onDataUpdate_: (a: string, b: any, c: boolean, d: number | null) => void, + private onConnectStatus_: (a: boolean) => void, + private onServerInfoUpdate_: (a: any) => void, + private authTokenProvider_: AuthTokenProvider, + private authOverride_?: Object | null) { + super(); + + if (authOverride_ && !isNodeSdk()) { throw new Error('Auth override specified in options, but not supported on non Node.js platforms'); } - /** private {Object|null|undefined} */ - this.authOverride_ = authOverride; - /** @private {number|null} */ - this.establishConnectionTimer_ = null; - /** @private {boolean} */ - this.visible_ = false; - - // Before we get connected, we keep a queue of pending messages to send. - this.requestCBHash_ = {}; - this.requestNumber_ = 0; - - this.firstConnection_ = true; - this.lastConnectionAttemptTime_ = null; - this.lastConnectionEstablishedTime_ = null; this.scheduleConnect_(0); VisibilityMonitor.getInstance().on('visible', this.onVisible_, this); - if (repoInfo.host.indexOf('fblocal') === -1) { + if (repoInfo_.host.indexOf('fblocal') === -1) { OnlineMonitor.getInstance().on('online', this.onOnline_, this); } } @@ -169,12 +164,12 @@ export class PersistentConnection { * @param {function(*)=} onResponse * @protected */ - sendRequest(action, body, onResponse?) { - var curReqNum = ++this.requestNumber_; + protected sendRequest(action: string, body: any, onResponse?: (a: any) => void) { + const curReqNum = ++this.requestNumber_; - var msg = {'r': curReqNum, 'a': action, 'b': body}; + const msg = {'r': curReqNum, 'a': action, 'b': body}; this.log_(stringify(msg)); - assert(this.connected_, "sendRequest call when we're not connected not allowed."); + assert(this.connected_, 'sendRequest call when we\'re not connected not allowed.'); this.realtime_.sendRequest(msg); if (onResponse) { this.requestCBHash_[curReqNum] = onResponse; @@ -184,15 +179,15 @@ export class PersistentConnection { /** * @inheritDoc */ - listen(query, currentHashFn, tag, onComplete) { - var queryId = query.queryIdentifier(); - var pathString = query.path.toString(); + listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => void) { + const queryId = query.queryIdentifier(); + const pathString = query.path.toString(); this.log_('Listen called for ' + pathString + ' ' + queryId); this.listens_[pathString] = this.listens_[pathString] || {}; assert(query.getQueryParams().isDefault() || !query.getQueryParams().loadsAllData(), - 'listen() called for non-default but complete query'); + 'listen() called for non-default but complete query'); assert(!this.listens_[pathString][queryId], 'listen() called twice for same path/queryId.'); - var listenSpec = { + const listenSpec: ListenSpec = { onComplete: onComplete, hashFn: currentHashFn, query: query, @@ -212,15 +207,14 @@ export class PersistentConnection { * tag: ?number}} listenSpec * @private */ - sendListen_(listenSpec) { - var query = listenSpec.query; - var pathString = query.path.toString(); - var queryId = query.queryIdentifier(); - var self = this; + private sendListen_(listenSpec: ListenSpec) { + const query = listenSpec.query; + const pathString = query.path.toString(); + const queryId = query.queryIdentifier(); this.log_('Listen on ' + pathString + ' for ' + queryId); - var req = {/*path*/ 'p': pathString}; + const req: { [k: string]: any } = {/*path*/ 'p': pathString}; - var action = 'q'; + const action = 'q'; // Only bother to send query if it's non-default. if (listenSpec.tag) { @@ -230,20 +224,20 @@ export class PersistentConnection { req[/*hash*/'h'] = listenSpec.hashFn(); - this.sendRequest(action, req, function(message) { - var payload = message[/*data*/ 'd']; - var status = message[/*status*/ 's']; + this.sendRequest(action, req, (message: { [k: string]: any }) => { + const payload: any = message[/*data*/ 'd']; + const status: string = message[/*status*/ 's']; // print warnings in any case... - self.warnOnListenWarnings_(payload, query); + PersistentConnection.warnOnListenWarnings_(payload, query); - var currentListenSpec = self.listens_[pathString] && self.listens_[pathString][queryId]; + const currentListenSpec = this.listens_[pathString] && this.listens_[pathString][queryId]; // only trigger actions if the listen hasn't been removed and readded if (currentListenSpec === listenSpec) { - self.log_('listen response', message); + this.log_('listen response', message); if (status !== 'ok') { - self.removeListen_(pathString, queryId); + this.removeListen_(pathString, queryId); } if (listenSpec.onComplete) { @@ -258,14 +252,14 @@ export class PersistentConnection { * @param {!Query} query * @private */ - warnOnListenWarnings_(payload, query) { + private static warnOnListenWarnings_(payload: any, query: Query) { if (payload && typeof payload === 'object' && contains(payload, 'w')) { - var warnings = safeGet(payload, 'w'); + const warnings = safeGet(payload, 'w'); if (Array.isArray(warnings) && ~warnings.indexOf('no_index')) { - var indexSpec = '".indexOn": "' + query.getQueryParams().getIndex().toString() + '"'; - var indexPath = query.path.toString(); + const indexSpec = '".indexOn": "' + query.getQueryParams().getIndex().toString() + '"'; + const indexPath = query.path.toString(); warn('Using an unspecified index. Consider adding ' + indexSpec + ' at ' + indexPath + - ' to your security rules for better performance'); + ' to your security rules for better performance'); } } } @@ -273,7 +267,7 @@ export class PersistentConnection { /** * @inheritDoc */ - refreshAuthToken(token) { + refreshAuthToken(token: string) { this.authToken_ = token; this.log_('Auth token refreshed'); if (this.authToken_) { @@ -282,7 +276,7 @@ export class PersistentConnection { //If we're connected we want to let the server know to unauthenticate us. If we're not connected, simply delete //the credential so we dont become authenticated next time we connect. if (this.connected_) { - this.sendRequest('unauth', {}, function() { }); + this.sendRequest('unauth', {}, () => { }); } } @@ -293,10 +287,10 @@ export class PersistentConnection { * @param {!string} credential * @private */ - reduceReconnectDelayIfAdminCredential_(credential) { + private reduceReconnectDelayIfAdminCredential_(credential: string) { // NOTE: This isn't intended to be bulletproof (a malicious developer can always just modify the client). // Additionally, we don't bother resetting the max delay back to the default if auth fails / expires. - var isFirebaseSecret = credential && credential.length === 40; + const isFirebaseSecret = credential && credential.length === 40; if (isFirebaseSecret || isAdmin(credential)) { this.log_('Admin auth credential detected. Reducing max reconnect time.'); this.maxReconnectDelay_ = RECONNECT_MAX_DELAY_FOR_ADMINS; @@ -308,26 +302,25 @@ export class PersistentConnection { * a auth revoked (the connection is closed). */ tryAuth() { - var self = this; if (this.connected_ && this.authToken_) { - var token = this.authToken_; - var authMethod = isValidFormat(token) ? 'auth' : 'gauth'; - var requestData = {'cred': token}; + const token = this.authToken_; + const authMethod = isValidFormat(token) ? 'auth' : 'gauth'; + const requestData: { [k: string]: any } = {'cred': token}; if (this.authOverride_ === null) { requestData['noauth'] = true; } else if (typeof this.authOverride_ === 'object') { requestData['authvar'] = this.authOverride_; } - this.sendRequest(authMethod, requestData, function(res) { - var status = res[/*status*/ 's']; - var data = res[/*data*/ 'd'] || 'error'; + this.sendRequest(authMethod, requestData, (res: { [k: string]: any }) => { + const status: string = res[/*status*/ 's']; + const data: string = res[/*data*/ 'd'] || 'error'; - if (self.authToken_ === token) { + if (this.authToken_ === token) { if (status === 'ok') { - self.invalidAuthTokenCount_ = 0; + this.invalidAuthTokenCount_ = 0; } else { // Triggers reconnect and force refresh for auth token - self.onAuthRevoked_(status, data); + this.onAuthRevoked_(status, data); } } }); @@ -337,27 +330,26 @@ export class PersistentConnection { /** * @inheritDoc */ - unlisten(query, tag) { - var pathString = query.path.toString(); - var queryId = query.queryIdentifier(); + unlisten(query: Query, tag: number | null) { + const pathString = query.path.toString(); + const queryId = query.queryIdentifier(); - this.log_("Unlisten called for " + pathString + " " + queryId); + this.log_('Unlisten called for ' + pathString + ' ' + queryId); assert(query.getQueryParams().isDefault() || !query.getQueryParams().loadsAllData(), - 'unlisten() called for non-default but complete query'); - var listen = this.removeListen_(pathString, queryId); + 'unlisten() called for non-default but complete query'); + const listen = this.removeListen_(pathString, queryId); if (listen && this.connected_) { this.sendUnlisten_(pathString, queryId, query.queryObject(), tag); } } - sendUnlisten_(pathString, queryId, queryObj, tag) { + private sendUnlisten_(pathString: string, queryId: string, queryObj: Object, tag: number | null) { this.log_('Unlisten on ' + pathString + ' for ' + queryId); - var self = this; - var req = {/*path*/ 'p': pathString}; - var action = 'n'; - // Only bother send queryId if it's non-default. + const req: { [k: string]: any } = {/*path*/ 'p': pathString}; + const action = 'n'; + // Only bother sending queryId if it's non-default. if (tag) { req['q'] = queryObj; req['t'] = tag; @@ -369,15 +361,15 @@ export class PersistentConnection { /** * @inheritDoc */ - onDisconnectPut(pathString, data, opt_onComplete) { + onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { if (this.connected_) { - this.sendOnDisconnect_('o', pathString, data, opt_onComplete); + this.sendOnDisconnect_('o', pathString, data, onComplete); } else { this.onDisconnectRequestQueue_.push({ - pathString: pathString, + pathString, action: 'o', - data: data, - onComplete: opt_onComplete + data, + onComplete }); } } @@ -385,15 +377,15 @@ export class PersistentConnection { /** * @inheritDoc */ - onDisconnectMerge(pathString, data, opt_onComplete) { + onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { if (this.connected_) { - this.sendOnDisconnect_('om', pathString, data, opt_onComplete); + this.sendOnDisconnect_('om', pathString, data, onComplete); } else { this.onDisconnectRequestQueue_.push({ - pathString: pathString, + pathString, action: 'om', - data: data, - onComplete: opt_onComplete + data, + onComplete }); } } @@ -401,27 +393,26 @@ export class PersistentConnection { /** * @inheritDoc */ - onDisconnectCancel(pathString, opt_onComplete) { + onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => void) { if (this.connected_) { - this.sendOnDisconnect_('oc', pathString, null, opt_onComplete); + this.sendOnDisconnect_('oc', pathString, null, onComplete); } else { this.onDisconnectRequestQueue_.push({ - pathString: pathString, + pathString, action: 'oc', data: null, - onComplete: opt_onComplete + onComplete }); } } - sendOnDisconnect_(action, pathString, data, opt_onComplete) { - var self = this; - var request = {/*path*/ 'p': pathString, /*data*/ 'd': data}; - self.log_('onDisconnect ' + action, request); - this.sendRequest(action, request, function(response) { - if (opt_onComplete) { - setTimeout(function() { - opt_onComplete(response[/*status*/ 's'], response[/* data */'d']); + private sendOnDisconnect_(action: string, pathString: string, data: any, onComplete: (a: string, b: string) => void) { + const request = {/*path*/ 'p': pathString, /*data*/ 'd': data}; + this.log_('onDisconnect ' + action, request); + this.sendRequest(action, request, (response: { [k: string]: any }) => { + if (onComplete) { + setTimeout(function () { + onComplete(response[/*status*/ 's'], response[/* data */'d']); }, Math.floor(0)); } }); @@ -430,32 +421,33 @@ export class PersistentConnection { /** * @inheritDoc */ - put(pathString, data, opt_onComplete, opt_hash) { - this.putInternal('p', pathString, data, opt_onComplete, opt_hash); + put(pathString: string, data: any, onComplete?: (a: string, b: string) => void, hash?: string) { + this.putInternal('p', pathString, data, onComplete, hash); } /** * @inheritDoc */ - merge(pathString, data, onComplete, opt_hash) { - this.putInternal('m', pathString, data, onComplete, opt_hash); + merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => void, hash?: string) { + this.putInternal('m', pathString, data, onComplete, hash); } - putInternal(action, pathString, data, opt_onComplete, opt_hash) { - var request = {/*path*/ 'p': pathString, /*data*/ 'd': data }; + putInternal(action: string, pathString: string, data: any, + onComplete: (a: string, b: string | null) => void, hash?: string) { + const request: { [k: string]: any } = {/*path*/ 'p': pathString, /*data*/ 'd': data}; - if (opt_hash !== undefined) - request[/*hash*/ 'h'] = opt_hash; + if (hash !== undefined) + request[/*hash*/ 'h'] = hash; // TODO: Only keep track of the most recent put for a given path? this.outstandingPuts_.push({ - action: action, - request: request, - onComplete: opt_onComplete + action, + request, + onComplete }); this.outstandingPutCount_++; - var index = this.outstandingPuts_.length - 1; + const index = this.outstandingPuts_.length - 1; if (this.connected_) { this.sendPut_(index); @@ -464,22 +456,21 @@ export class PersistentConnection { } } - sendPut_(index) { - var self = this; - var action = this.outstandingPuts_[index].action; - var request = this.outstandingPuts_[index].request; - var onComplete = this.outstandingPuts_[index].onComplete; + private sendPut_(index: number) { + const action = this.outstandingPuts_[index].action; + const request = this.outstandingPuts_[index].request; + const onComplete = this.outstandingPuts_[index].onComplete; this.outstandingPuts_[index].queued = this.connected_; - this.sendRequest(action, request, function(message) { - self.log_(action + ' response', message); + this.sendRequest(action, request, (message: { [k: string]: any }) => { + this.log_(action + ' response', message); - delete self.outstandingPuts_[index]; - self.outstandingPutCount_--; + delete this.outstandingPuts_[index]; + this.outstandingPutCount_--; // Clean up array occasionally. - if (self.outstandingPutCount_ === 0) { - self.outstandingPuts_ = []; + if (this.outstandingPutCount_ === 0) { + this.outstandingPuts_ = []; } if (onComplete) @@ -490,16 +481,16 @@ export class PersistentConnection { /** * @inheritDoc */ - reportStats(stats) { + reportStats(stats: { [k: string]: any }) { // If we're not connected, we just drop the stats. if (this.connected_) { - var request = { /*counters*/ 'c': stats }; + const request = {/*counters*/ 'c': stats}; this.log_('reportStats', request); - this.sendRequest(/*stats*/ 's', request, function(result) { - var status = result[/*status*/ 's']; + this.sendRequest(/*stats*/ 's', request, (result) => { + const status = result[/*status*/ 's']; if (status !== 'ok') { - var errorReason = result[/* data */ 'd']; + const errorReason = result[/* data */ 'd']; this.log_('reportStats', 'Error sending stats: ' + errorReason); } }); @@ -510,12 +501,12 @@ export class PersistentConnection { * @param {*} message * @private */ - onDataMessage_(message) { + private onDataMessage_(message: { [k: string]: any }) { if ('r' in message) { // this is a response this.log_('from server: ' + stringify(message)); - var reqNum = message['r']; - var onResponse = this.requestCBHash_[reqNum]; + const reqNum = message['r']; + const onResponse = this.requestCBHash_[reqNum]; if (onResponse) { delete this.requestCBHash_[reqNum]; onResponse(message[/*body*/ 'b']); @@ -528,7 +519,7 @@ export class PersistentConnection { } } - onDataPush_(action, body) { + private onDataPush_(action: string, body: { [k: string]: any }) { this.log_('handleServerMessage', action, body); if (action === 'd') this.onDataUpdate_(body[/*path*/ 'p'], body[/*data*/ 'd'], /*isMerge*/false, body['t']); @@ -545,7 +536,7 @@ export class PersistentConnection { '\nAre you using the latest client?'); } - onReady_(timestamp, sessionId) { + private onReady_(timestamp: number, sessionId: string) { this.log_('connection ready'); this.connected_ = true; this.lastConnectionEstablishedTime_ = new Date().getTime(); @@ -559,8 +550,8 @@ export class PersistentConnection { this.onConnectStatus_(true); } - scheduleConnect_(timeout) { - assert(!this.realtime_, "Scheduling a connect when we're already connected/ing?"); + private scheduleConnect_(timeout: number) { + assert(!this.realtime_, 'Scheduling a connect when we\'re already connected/ing?'); if (this.establishConnectionTimer_) { clearTimeout(this.establishConnectionTimer_); @@ -569,18 +560,17 @@ export class PersistentConnection { // NOTE: Even when timeout is 0, it's important to do a setTimeout to work around an infuriating "Security Error" in // Firefox when trying to write to our long-polling iframe in some scenarios (e.g. Forge or our unit tests). - var self = this; - this.establishConnectionTimer_ = setTimeout(function() { - self.establishConnectionTimer_ = null; - self.establishConnection_(); - }, Math.floor(timeout)); + this.establishConnectionTimer_ = setTimeout(() => { + this.establishConnectionTimer_ = null; + this.establishConnection_(); + }, Math.floor(timeout)) as any; } /** * @param {boolean} visible * @private */ - onVisible_(visible) { + private onVisible_(visible: boolean) { // NOTE: Tabbing away and back to a window will defeat our reconnect backoff, but I think that's fine. if (visible && !this.visible_ && this.reconnectDelay_ === this.maxReconnectDelay_) { this.log_('Window became visible. Reducing delay.'); @@ -593,7 +583,7 @@ export class PersistentConnection { this.visible_ = visible; } - onOnline_(online) { + private onOnline_(online: boolean) { if (online) { this.log_('Browser went online.'); this.reconnectDelay_ = RECONNECT_MIN_DELAY; @@ -601,14 +591,14 @@ export class PersistentConnection { this.scheduleConnect_(0); } } else { - this.log_("Browser went offline. Killing connection."); + this.log_('Browser went offline. Killing connection.'); if (this.realtime_) { this.realtime_.close(); } } } - onRealtimeDisconnect_() { + private onRealtimeDisconnect_() { this.log_('data client disconnected'); this.connected_ = false; this.realtime_ = null; @@ -621,19 +611,19 @@ export class PersistentConnection { if (this.shouldReconnect_()) { if (!this.visible_) { - this.log_("Window isn't visible. Delaying reconnect."); + this.log_('Window isn\'t visible. Delaying reconnect.'); this.reconnectDelay_ = this.maxReconnectDelay_; this.lastConnectionAttemptTime_ = new Date().getTime(); } else if (this.lastConnectionEstablishedTime_) { // If we've been connected long enough, reset reconnect delay to minimum. - var timeSinceLastConnectSucceeded = new Date().getTime() - this.lastConnectionEstablishedTime_; + const timeSinceLastConnectSucceeded = new Date().getTime() - this.lastConnectionEstablishedTime_; if (timeSinceLastConnectSucceeded > RECONNECT_DELAY_RESET_TIMEOUT) this.reconnectDelay_ = RECONNECT_MIN_DELAY; this.lastConnectionEstablishedTime_ = null; } - var timeSinceLastConnectAttempt = new Date().getTime() - this.lastConnectionAttemptTime_; - var reconnectDelay = Math.max(0, this.reconnectDelay_ - timeSinceLastConnectAttempt); + const timeSinceLastConnectAttempt = new Date().getTime() - this.lastConnectionAttemptTime_; + let reconnectDelay = Math.max(0, this.reconnectDelay_ - timeSinceLastConnectAttempt); reconnectDelay = Math.random() * reconnectDelay; this.log_('Trying to reconnect in ' + reconnectDelay + 'ms'); @@ -645,20 +635,20 @@ export class PersistentConnection { this.onConnectStatus_(false); } - establishConnection_() { + private establishConnection_() { if (this.shouldReconnect_()) { this.log_('Making a connection attempt'); this.lastConnectionAttemptTime_ = new Date().getTime(); this.lastConnectionEstablishedTime_ = null; - var onDataMessage = this.onDataMessage_.bind(this); - var onReady = this.onReady_.bind(this); - var onDisconnect = this.onRealtimeDisconnect_.bind(this); - var connId = this.id + ':' + PersistentConnection.nextConnectionId_++; - var self = this; - var lastSessionId = this.lastSessionId; - var canceled = false; - var connection = null; - var closeFn = function() { + const onDataMessage = this.onDataMessage_.bind(this); + const onReady = this.onReady_.bind(this); + const onDisconnect = this.onRealtimeDisconnect_.bind(this); + const connId = this.id + ':' + PersistentConnection.nextConnectionId_++; + const self = this; + const lastSessionId = this.lastSessionId; + let canceled = false; + let connection: Connection | null = null; + const closeFn = function () { if (connection) { connection.close(); } else { @@ -666,8 +656,8 @@ export class PersistentConnection { onDisconnect(); } }; - var sendRequestFn = function(msg) { - assert(connection, "sendRequest call when we're not connected not allowed."); + const sendRequestFn = function (msg: Object) { + assert(connection, 'sendRequest call when we\'re not connected not allowed.'); connection.sendRequest(msg); }; @@ -676,11 +666,11 @@ export class PersistentConnection { sendRequest: sendRequestFn }; - var forceRefresh = this.forceTokenRefresh_; + const forceRefresh = this.forceTokenRefresh_; this.forceTokenRefresh_ = false; // First fetch auth token, and establish connection after fetching the token was successful - this.authTokenProvider_.getToken(forceRefresh).then(function(result) { + this.authTokenProvider_.getToken(forceRefresh).then(function (result) { if (!canceled) { log('getToken() completed. Creating connection.'); self.authToken_ = result && result.accessToken; @@ -695,7 +685,7 @@ export class PersistentConnection { } else { log('getToken() completed but was canceled'); } - }).then(null, function(error) { + }).then(null, function (error) { self.log_('Failed to get token: ' + error); if (!canceled) { if (CONSTANTS.NODE_ADMIN) { @@ -713,7 +703,7 @@ export class PersistentConnection { /** * @param {string} reason */ - interrupt(reason) { + interrupt(reason: string) { log('Interrupting connection for reason: ' + reason); this.interruptReasons_[reason] = true; if (this.realtime_) { @@ -732,7 +722,7 @@ export class PersistentConnection { /** * @param {string} reason */ - resume(reason) { + resume(reason: string) { log('Resuming connection for reason: ' + reason); delete this.interruptReasons_[reason]; if (isEmpty(this.interruptReasons_)) { @@ -743,22 +733,14 @@ export class PersistentConnection { } } - /** - * @param reason - * @return {boolean} - */ - isInterrupted(reason) { - return this.interruptReasons_[reason] || false; - } - - handleTimestamp_(timestamp) { - var delta = timestamp - new Date().getTime(); + private handleTimestamp_(timestamp: number) { + const delta = timestamp - new Date().getTime(); this.onServerInfoUpdate_({'serverTimeOffset': delta}); } - cancelSentTransactions_() { - for (var i = 0; i < this.outstandingPuts_.length; i++) { - var put = this.outstandingPuts_[i]; + private cancelSentTransactions_() { + for (let i = 0; i < this.outstandingPuts_.length; i++) { + const put = this.outstandingPuts_[i]; if (put && /*hash*/'h' in put.request && put.queued) { if (put.onComplete) put.onComplete('disconnect'); @@ -774,19 +756,19 @@ export class PersistentConnection { } /** - * @param {!string} pathString - * @param {Array.<*>=} opt_query - * @private - */ - onListenRevoked_(pathString, opt_query) { + * @param {!string} pathString + * @param {Array.<*>=} query + * @private + */ + private onListenRevoked_(pathString: string, query?: any[]) { // Remove the listen and manufacture a "permission_denied" error for the failed listen. - var queryId; - if (!opt_query) { + let queryId; + if (!query) { queryId = 'default'; } else { - queryId = opt_query.map(function(q) { return ObjectToUniqueKey(q); }).join('$'); + queryId = query.map(q => ObjectToUniqueKey(q)).join('$'); } - var listen = this.removeListen_(pathString, queryId); + const listen = this.removeListen_(pathString, queryId); if (listen && listen.onComplete) listen.onComplete('permission_denied'); } @@ -797,9 +779,9 @@ export class PersistentConnection { * @return {{queries:Array., onComplete:function(string)}} * @private */ - removeListen_(pathString, queryId) { - var normalizedPathString = new Path(pathString).toString(); // normalize path. - var listen; + private removeListen_(pathString: string, queryId: string): ListenSpec { + const normalizedPathString = new Path(pathString).toString(); // normalize path. + let listen; if (this.listens_[normalizedPathString] !== undefined) { listen = this.listens_[normalizedPathString][queryId]; delete this.listens_[normalizedPathString][queryId]; @@ -813,7 +795,7 @@ export class PersistentConnection { return listen; } - onAuthRevoked_(statusCode, explanation) { + private onAuthRevoked_(statusCode: string, explanation: string) { log('Auth token revoked: ' + statusCode + '/' + explanation); this.authToken_ = null; this.forceTokenRefresh_ = true; @@ -834,7 +816,7 @@ export class PersistentConnection { } } - onSecurityDebugPacket_(body) { + private onSecurityDebugPacket_(body: { [k: string]: any }) { if (this.securityDebugCallback_) { this.securityDebugCallback_(body); } else { @@ -844,26 +826,25 @@ export class PersistentConnection { } } - restoreState_() { + private restoreState_() { //Re-authenticate ourselves if we have a credential stored. this.tryAuth(); // Puts depend on having received the corresponding data update from the server before they complete, so we must // make sure to send listens before puts. - var self = this; - forEach(this.listens_, function(pathString, queries) { - forEach(queries, function(key, listenSpec) { - self.sendListen_(listenSpec); + forEach(this.listens_, (pathString: string, queries: Object) => { + forEach(queries, (key: string, listenSpec: ListenSpec) => { + this.sendListen_(listenSpec); }); }); - for (var i = 0; i < this.outstandingPuts_.length; i++) { + for (let i = 0; i < this.outstandingPuts_.length; i++) { if (this.outstandingPuts_[i]) this.sendPut_(i); } while (this.onDisconnectRequestQueue_.length) { - var request = this.onDisconnectRequestQueue_.shift(); + const request = this.onDisconnectRequestQueue_.shift(); this.sendOnDisconnect_(request.action, request.pathString, request.data, request.onComplete); } } @@ -872,10 +853,10 @@ export class PersistentConnection { * Sends client stats for first connection * @private */ - sendConnectStats_() { - var stats = {}; + private sendConnectStats_() { + const stats: { [k: string]: number } = {}; - var clientName = 'js'; + let clientName = 'js'; if (CONSTANTS.NODE_ADMIN) { clientName = 'admin_node'; } else if (CONSTANTS.NODE_CLIENT) { @@ -897,8 +878,9 @@ export class PersistentConnection { * @return {boolean} * @private */ - shouldReconnect_() { - var online = OnlineMonitor.getInstance().currentlyOnline(); + private shouldReconnect_(): boolean { + const online = OnlineMonitor.getInstance().currentlyOnline(); return isEmpty(this.interruptReasons_) && online; } -}; // end PersistentConnection +} + diff --git a/src/database/core/ReadonlyRestClient.ts b/src/database/core/ReadonlyRestClient.ts index 847e4db24ae..70205919b8d 100644 --- a/src/database/core/ReadonlyRestClient.ts +++ b/src/database/core/ReadonlyRestClient.ts @@ -29,9 +29,15 @@ import { Query } from '../api/Query'; * This is mostly useful for compatibility with crawlers, where we don't want to spin up a full * persistent connection (using WebSockets or long-polling) */ -export class ReadonlyRestClient implements ServerActions { +export class ReadonlyRestClient extends ServerActions { + reportStats(stats: { + [k: string]: any; + }): void { + throw new Error('Method not implemented.'); + } + /** @private {function(...[*])} */ - private log_: (...args: any[]) => any = logWrapper('p:rest:'); + private log_: (...args: any[]) => void = logWrapper('p:rest:'); /** * We don't actually need to track listens, except to prevent us calling an onComplete for a listen @@ -63,12 +69,13 @@ export class ReadonlyRestClient implements ServerActions { * @implements {ServerActions} */ constructor(private repoInfo_: RepoInfo, - private onDataUpdate_: (a: string, b: any, c: boolean, d: number | null) => any, + private onDataUpdate_: (a: string, b: any, c: boolean, d: number | null) => void, private authTokenProvider_: AuthTokenProvider) { + super(); } /** @inheritDoc */ - listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => any) { + listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => void) { const pathString = query.path.toString(); this.log_('Listen called for ' + pathString + ' ' + query.queryIdentifier()); @@ -117,24 +124,6 @@ export class ReadonlyRestClient implements ServerActions { // no-op since we just always call getToken. } - /** @inheritDoc */ - onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => any) { } - - /** @inheritDoc */ - onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => any) { } - - /** @inheritDoc */ - onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => any) { } - - /** @inheritDoc */ - put(pathString: string, data: any, onComplete?: (a: string, b: string) => any, hash?: string) { } - - /** @inheritDoc */ - merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => any, hash?: string) { } - - /** @inheritDoc */ - reportStats(stats: { [k: string]: any }) { } - /** * Performs a REST request to the given path, with the provided query string parameters, * and any auth credentials we have. @@ -144,8 +133,8 @@ export class ReadonlyRestClient implements ServerActions { * @param {?function(?number, *=)} callback * @private */ - private restRequest_(pathString: string, queryStringParameters: {[k: string]: any} = {}, - callback: ((a: number | null, b?: any) => any) | null) { + private restRequest_(pathString: string, queryStringParameters: { [k: string]: any } = {}, + callback: ((a: number | null, b?: any) => void) | null) { queryStringParameters['format'] = 'export'; this.authTokenProvider_.getToken(/*forceRefresh=*/false).then((authTokenData) => { diff --git a/src/database/core/Repo.ts b/src/database/core/Repo.ts index e9457f6f792..982018369dd 100644 --- a/src/database/core/Repo.ts +++ b/src/database/core/Repo.ts @@ -40,6 +40,9 @@ import { Database } from '../api/Database'; import { ServerActions } from './ServerActions'; import { Query } from '../api/Query'; import { EventRegistration } from './view/EventRegistration'; +import { StatsCollection } from './stats/StatsCollection'; +import { Event } from './view/Event'; +import { Node } from './snap/Node'; const INTERRUPT_REASON = 'repo_interrupt'; @@ -47,25 +50,25 @@ const INTERRUPT_REASON = 'repo_interrupt'; * A connection to a single data repository. */ export class Repo { - /** @type {!Database} */ - database: Database; - infoSyncTree_: SyncTree; - dataUpdateCount; - serverSyncTree_: SyncTree; - - public repoInfo_; - private stats_; - private statsListener_; - private eventQueue_; - private nextWriteId_; + dataUpdateCount = 0; + private infoSyncTree_: SyncTree; + private serverSyncTree_: SyncTree; + + private stats_: StatsCollection; + private statsListener_: StatsListener | null = null; + private eventQueue_ = new EventQueue(); + private nextWriteId_ = 1; private server_: ServerActions; - private statsReporter_; - private transactions_init_; - private infoData_; - private onDisconnect_; - private abortTransactions_; - private rerunTransactions_; - private interceptServerDataCallback_; + private statsReporter_: StatsReporter; + private transactions_init_: () => void; + private infoData_: SnapshotHolder; + private abortTransactions_: (path: Path) => Path; + private rerunTransactions_: (changedPath: Path) => Path; + private interceptServerDataCallback_: ((a: string, b: any) => void) | null = null; + private __database: Database; + + // A list of data pieces and paths to be set when this client disconnects. + private onDisconnect_ = new SparseSnapshotTree(); /** * TODO: This should be @private but it's used by test_access.js and internal.js @@ -74,20 +77,15 @@ export class Repo { persistentConnection_: PersistentConnection | null = null; /** - * @param {!RepoInfo} repoInfo + * @param {!RepoInfo} repoInfo_ * @param {boolean} forceRestClient * @param {!FirebaseApp} app */ - constructor(repoInfo: RepoInfo, forceRestClient: boolean, public app: FirebaseApp) { + constructor(private repoInfo_: RepoInfo, forceRestClient: boolean, public app: FirebaseApp) { /** @type {!AuthTokenProvider} */ const authTokenProvider = new AuthTokenProvider(app); - this.repoInfo_ = repoInfo; - this.stats_ = StatsManager.getCollection(repoInfo); - /** @type {StatsListener} */ - this.statsListener_ = null; - this.eventQueue_ = new EventQueue(); - this.nextWriteId_ = 1; + this.stats_ = StatsManager.getCollection(repoInfo_); if (forceRestClient || beingCrawled()) { this.server_ = new ReadonlyRestClient(this.repoInfo_, @@ -126,7 +124,7 @@ export class Repo { // In the case of multiple Repos for the same repoInfo (i.e. there are multiple Firebase.Contexts being used), // we only want to create one StatsReporter. As such, we'll report stats over the first Repo created. - this.statsReporter_ = StatsManager.getOrCreateReporter(repoInfo, + this.statsReporter_ = StatsManager.getOrCreateReporter(repoInfo_, () => new StatsReporter(this.stats_, this.server_)); this.transactions_init_(); @@ -135,7 +133,7 @@ export class Repo { this.infoData_ = new SnapshotHolder(); this.infoSyncTree_ = new SyncTree({ startListening: (query, tag, currentHashFn, onComplete) => { - let infoEvents = []; + let infoEvents: Event[] = []; const node = this.infoData_.getNode(query.path); // This is possibly a hack, but we have different semantics for .info endpoints. We don't raise null events // on initial data... @@ -151,13 +149,6 @@ export class Repo { }); this.updateInfo_('connected', false); - // A list of data pieces and paths to be set when this client disconnects. - this.onDisconnect_ = new SparseSnapshotTree(); - - this.dataUpdateCount = 0; - - this.interceptServerDataCallback_ = null; - this.serverSyncTree_ = new SyncTree({ startListening: (query, tag, currentHashFn, onComplete) => { this.server_.listen(query, currentHashFn, tag, (status, data) => { @@ -192,7 +183,7 @@ export class Repo { */ serverTime(): number { const offsetNode = this.infoData_.getNode(new Path('.info/serverTimeOffset')); - const offset = /** @type {number} */ (offsetNode.val()) || 0; + const offset = (offsetNode.val() as number) || 0; return new Date().getTime() + offset; } @@ -223,14 +214,14 @@ export class Repo { let events = []; if (tag) { if (isMerge) { - const taggedChildren = map(/**@type {!Object.} */ (data), (raw) => nodeFromJSON(raw)); + const taggedChildren = map(data as { [k: string]: any }, (raw: any) => nodeFromJSON(raw)); events = this.serverSyncTree_.applyTaggedQueryMerge(path, taggedChildren, tag); } else { const taggedSnap = nodeFromJSON(data); events = this.serverSyncTree_.applyTaggedQueryOverwrite(path, taggedSnap, tag); } } else if (isMerge) { - const changedChildren = map(/**@type {!Object.} */ (data), (raw) => nodeFromJSON(raw)); + const changedChildren = map(data as { [k: string]: any }, (raw: any) => nodeFromJSON(raw)); events = this.serverSyncTree_.applyServerMerge(path, changedChildren); } else { const snap = nodeFromJSON(data); @@ -246,10 +237,11 @@ export class Repo { } /** + * TODO: This should be @private but it's used by test_access.js and internal.js * @param {?function(!string, *):*} callback * @private */ - private interceptServerData_(callback: (a: string, b: any) => any) { + interceptServerData_(callback: ((a: string, b: any) => any) | null) { this.interceptServerDataCallback_ = callback; } @@ -302,9 +294,9 @@ export class Repo { * @param {number|string|null} newPriority * @param {?function(?Error, *=)} onComplete */ - setWithPriority(path: Path, newVal: any, + setWithPriority(path: Path, newVal: any, newPriority: number | string | null, - onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + onComplete: ((status: Error | null, errorReason?: string) => void) | null) { this.log_('set', {path: path.toString(), value: newVal, priority: newPriority}); // TODO: Optimize this behavior to either (a) store flag to skip resolving where possible and / or @@ -337,15 +329,15 @@ export class Repo { * @param {!Object} childrenToMerge * @param {?function(?Error, *=)} onComplete */ - update(path: Path, childrenToMerge: Object, - onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + update(path: Path, childrenToMerge: { [k: string]: any }, + onComplete: ((status: Error | null, errorReason?: string) => void) | null) { this.log_('update', {path: path.toString(), value: childrenToMerge}); // Start with our existing data and merge each child into it. let empty = true; const serverValues = this.generateServerValues(); - const changedChildren = {}; - forEach(childrenToMerge, function (changedKey, changedValue) { + const changedChildren: { [k: string]: Node } = {}; + forEach(childrenToMerge, (changedKey: string, changedValue: any) => { empty = false; const newNodeUnresolved = nodeFromJSON(changedValue); changedChildren[changedKey] = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); @@ -367,7 +359,7 @@ export class Repo { this.callOnCompleteCallback(onComplete, status, errorReason); }); - forEach(childrenToMerge, (changedPath, changedValue) => { + forEach(childrenToMerge, (changedPath: string) => { const affectedPath = this.abortTransactions_(path.child(changedPath)); this.rerunTransactions_(affectedPath); }); @@ -389,7 +381,7 @@ export class Repo { const serverValues = this.generateServerValues(); const resolvedOnDisconnectTree = resolveDeferredValueTree(this.onDisconnect_, serverValues); - let events = []; + let events: Event[] = []; resolvedOnDisconnectTree.forEachTree(Path.Empty, (path, snap) => { events = events.concat(this.serverSyncTree_.applyServerOverwrite(path, snap)); @@ -405,7 +397,7 @@ export class Repo { * @param {!Path} path * @param {?function(?Error, *=)} onComplete */ - onDisconnectCancel(path: Path, onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + onDisconnectCancel(path: Path, onComplete: ((status: Error | null, errorReason?: string) => void) | null) { this.server_.onDisconnectCancel(path.toString(), (status, errorReason) => { if (status === 'ok') { this.onDisconnect_.forget(path); @@ -419,7 +411,7 @@ export class Repo { * @param {*} value * @param {?function(?Error, *=)} onComplete */ - onDisconnectSet(path: Path, value: any, onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + onDisconnectSet(path: Path, value: any, onComplete: ((status: Error | null, errorReason?: string) => void) | null) { const newNode = nodeFromJSON(value); this.server_.onDisconnectPut(path.toString(), newNode.val(/*export=*/true), (status, errorReason) => { if (status === 'ok') { @@ -435,7 +427,7 @@ export class Repo { * @param {*} priority * @param {?function(?Error, *=)} onComplete */ - onDisconnectSetWithPriority(path, value, priority, onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + onDisconnectSetWithPriority(path: Path, value: any, priority: any, onComplete: ((status: Error | null, errorReason?: string) => void) | null) { const newNode = nodeFromJSON(value, priority); this.server_.onDisconnectPut(path.toString(), newNode.val(/*export=*/true), (status, errorReason) => { if (status === 'ok') { @@ -450,8 +442,8 @@ export class Repo { * @param {*} childrenToMerge * @param {?function(?Error, *=)} onComplete */ - onDisconnectUpdate(path, childrenToMerge, - onComplete: ((status: Error | null, errorReason?: string) => any) | null) { + onDisconnectUpdate(path: Path, childrenToMerge: { [k: string]: any }, + onComplete: ((status: Error | null, errorReason?: string) => void) | null) { if (isEmpty(childrenToMerge)) { log('onDisconnect().update() called with empty data. Don\'t do anything.'); this.callOnCompleteCallback(onComplete, 'ok'); @@ -515,7 +507,7 @@ export class Repo { if (typeof console === 'undefined') return; - let stats; + let stats: { [k: string]: any }; if (showDelta) { if (!this.statsListener_) this.statsListener_ = new StatsListener(this.stats_); @@ -525,11 +517,9 @@ export class Repo { } const longestName = Object.keys(stats).reduce( - function (previousValue, currentValue, index, array) { - return Math.max(currentValue.length, previousValue); - }, 0); + (previousValue, currentValue) => Math.max(currentValue.length, previousValue), 0); - forEach(stats, (stat, value) => { + forEach(stats, (stat: string, value: any) => { // pad stat names to be the same length (plus 2 extra spaces). for (let i = stat.length; i < longestName + 2; i++) stat += ' '; @@ -537,7 +527,7 @@ export class Repo { }); } - statsIncrementCounter(metric) { + statsIncrementCounter(metric: string) { this.stats_.incrementCounter(metric); this.statsReporter_.includeStat(metric); } @@ -551,7 +541,7 @@ export class Repo { if (this.persistentConnection_) { prefix = this.persistentConnection_.id + ':'; } - log(prefix, var_args); + log(prefix, ...var_args); } /** @@ -559,7 +549,7 @@ export class Repo { * @param {!string} status * @param {?string=} errorReason */ - callOnCompleteCallback(callback: ((status: Error | null, errorReason?: string) => any) | null, + callOnCompleteCallback(callback: ((status: Error | null, errorReason?: string) => void) | null, status: string, errorReason?: string | null) { if (callback) { exceptionGuard(function () { @@ -578,5 +568,9 @@ export class Repo { }); } } + + get database(): Database { + return this.__database || (this.__database = new Database(this)); + } } diff --git a/src/database/core/RepoInfo.ts b/src/database/core/RepoInfo.ts index ab621009fe0..20acdd25c32 100644 --- a/src/database/core/RepoInfo.ts +++ b/src/database/core/RepoInfo.ts @@ -14,61 +14,60 @@ * limitations under the License. */ -import { assert } from "../../utils/assert"; -import { forEach } from "../../utils/obj"; +import { assert } from '../../utils/assert'; +import { forEach } from '../../utils/obj'; import { PersistentStorage } from './storage/storage'; -import { CONSTANTS } from "../realtime/Constants"; +import { LONG_POLLING, WEBSOCKET } from '../realtime/Constants'; + + /** * A class that holds metadata about a Repo object - * @param {string} host Hostname portion of the url for the repo - * @param {boolean} secure Whether or not this repo is accessed over ssl - * @param {string} namespace The namespace represented by the repo - * @param {boolean} webSocketOnly Whether to prefer websockets over all other transports (used by Nest). - * @param {string=} persistenceKey Override the default session persistence storage key + * * @constructor */ export class RepoInfo { - host; - domain; - secure; - namespace; - webSocketOnly; - persistenceKey; - internalHost; - - constructor(host, secure, namespace, webSocketOnly, persistenceKey?) { + host: string; + domain: string; + internalHost: string; + + /** + * @param {string} host Hostname portion of the url for the repo + * @param {boolean} secure Whether or not this repo is accessed over ssl + * @param {string} namespace The namespace represented by the repo + * @param {boolean} webSocketOnly Whether to prefer websockets over all other transports (used by Nest). + * @param {string=} persistenceKey Override the default session persistence storage key + */ + constructor(host: string,public secure: boolean, public namespace: string, + public webSocketOnly: boolean, public persistenceKey: string = '') { this.host = host.toLowerCase(); this.domain = this.host.substr(this.host.indexOf('.') + 1); - this.secure = secure; - this.namespace = namespace; - this.webSocketOnly = webSocketOnly; - this.persistenceKey = persistenceKey || ''; this.internalHost = PersistentStorage.get('host:' + host) || this.host; } - needsQueryParam() { + + needsQueryParam(): boolean { return this.host !== this.internalHost; - }; + } - isCacheableHost() { + isCacheableHost(): boolean { return this.internalHost.substr(0, 2) === 's-'; - }; + } isDemoHost() { return this.domain === 'firebaseio-demo.com'; - }; + } isCustomHost() { return this.domain !== 'firebaseio.com' && this.domain !== 'firebaseio-demo.com'; - }; + } - updateHost(newHost) { + updateHost(newHost: string) { if (newHost !== this.internalHost) { this.internalHost = newHost; if (this.isCacheableHost()) { PersistentStorage.set('host:' + this.host, this.internalHost); } } - }; + } /** * Returns the websocket URL for this repo @@ -76,13 +75,14 @@ export class RepoInfo { * @param {Object} params list * @return {string} The URL for this repo */ - connectionURL(type, params) { + connectionURL(type: string, params: { [k: string]: string }): string { assert(typeof type === 'string', 'typeof type must == string'); assert(typeof params === 'object', 'typeof params must == object'); - var connURL; - if (type === CONSTANTS.WEBSOCKET) { + + let connURL: string; + if (type === WEBSOCKET) { connURL = (this.secure ? 'wss://' : 'ws://') + this.internalHost + '/.ws?'; - } else if (type === CONSTANTS.LONG_POLLING) { + } else if (type === LONG_POLLING) { connURL = (this.secure ? 'https://' : 'http://') + this.internalHost + '/.lp?'; } else { throw new Error('Unknown connection type: ' + type); @@ -91,26 +91,26 @@ export class RepoInfo { params['ns'] = this.namespace; } - var pairs = []; + const pairs: string[] = []; - forEach(params, (key, value) => { + forEach(params, (key: string, value: string) => { pairs.push(key + '=' + value); }); return connURL + pairs.join('&'); - }; + } /** @return {string} */ - toString() { - var str = this.toURLString(); + toString(): string { + let str = this.toURLString(); if (this.persistenceKey) { str += '<' + this.persistenceKey + '>'; } return str; - }; + } /** @return {string} */ - toURLString() { + toURLString(): string { return (this.secure ? 'https://' : 'http://') + this.host; - }; + } } diff --git a/src/database/core/RepoManager.ts b/src/database/core/RepoManager.ts index a5c291045c7..e1fbd2e7950 100644 --- a/src/database/core/RepoManager.ts +++ b/src/database/core/RepoManager.ts @@ -22,11 +22,12 @@ import { parseRepoInfo } from "./util/libs/parser"; import { validateUrl } from "./util/validation"; import "./Repo_transaction"; import { Database } from '../api/Database'; +import { RepoInfo } from './RepoInfo'; /** @const {string} */ -var DATABASE_URL_OPTION = 'databaseURL'; +const DATABASE_URL_OPTION = 'databaseURL'; -let _staticInstance; +let _staticInstance: RepoManager; /** * Creates and caches Repo instances. @@ -45,7 +46,7 @@ export class RepoManager { */ private useRestClient_: boolean = false; - static getInstance() { + static getInstance(): RepoManager { if (!_staticInstance) { _staticInstance = new RepoManager(); } @@ -54,13 +55,13 @@ export class RepoManager { // TODO(koss): Remove these functions unless used in tests? interrupt() { - for (var repo in this.repos_) { + for (const repo in this.repos_) { this.repos_[repo].interrupt(); } } resume() { - for (var repo in this.repos_) { + for (const repo in this.repos_) { this.repos_[repo].resume(); } } @@ -68,19 +69,19 @@ export class RepoManager { /** * This function should only ever be called to CREATE a new database instance. * - * @param {!App} app + * @param {!FirebaseApp} app * @return {!Database} */ databaseFromApp(app: FirebaseApp): Database { - var dbUrl: string = app.options[DATABASE_URL_OPTION]; + const dbUrl: string = app.options[DATABASE_URL_OPTION]; if (dbUrl === undefined) { fatal("Can't determine Firebase Database URL. Be sure to include " + DATABASE_URL_OPTION + " option when calling firebase.intializeApp()."); } - var parsedUrl = parseRepoInfo(dbUrl); - var repoInfo = parsedUrl.repoInfo; + const parsedUrl = parseRepoInfo(dbUrl); + const repoInfo = parsedUrl.repoInfo; validateUrl('Invalid Firebase Database URL', 1, parsedUrl); if (!parsedUrl.path.isEmpty()) { @@ -88,7 +89,7 @@ export class RepoManager { "(not including a child path)."); } - var repo = this.createRepo(repoInfo, app); + const repo = this.createRepo(repoInfo, app); return repo.database; } @@ -98,7 +99,7 @@ export class RepoManager { * * @param {!Repo} repo */ - deleteRepo(repo) { + deleteRepo(repo: Repo) { // This should never happen... if (safeGet(this.repos_, repo.app.name) !== repo) { @@ -116,8 +117,8 @@ export class RepoManager { * @param {!FirebaseApp} app * @return {!Repo} The Repo object for the specified server / repoName. */ - createRepo(repoInfo, app: FirebaseApp): Repo { - var repo = safeGet(this.repos_, app.name); + createRepo(repoInfo: RepoInfo, app: FirebaseApp): Repo { + let repo = safeGet(this.repos_, app.name); if (repo) { fatal('FIREBASE INTERNAL ERROR: Database initialized multiple times.'); } @@ -131,7 +132,7 @@ export class RepoManager { * Forces us to use ReadonlyRestClient instead of PersistentConnection for new Repos. * @param {boolean} forceRestClient */ - forceRestClient(forceRestClient) { + forceRestClient(forceRestClient: boolean) { this.useRestClient_ = forceRestClient; } -}; // end RepoManager \ No newline at end of file +} \ No newline at end of file diff --git a/src/database/core/Repo_transaction.ts b/src/database/core/Repo_transaction.ts index 4cb71fc29c7..757e4a2b65b 100644 --- a/src/database/core/Repo_transaction.ts +++ b/src/database/core/Repo_transaction.ts @@ -14,24 +14,25 @@ * limitations under the License. */ -import { assert } from "../../utils/assert"; -import { Reference } from "../api/Reference"; -import { DataSnapshot } from "../api/DataSnapshot"; -import { Path } from "./util/Path"; -import { Tree } from "./util/Tree"; -import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; -import { Node } from "./snap/Node"; -import { +import { assert } from '../../utils/assert'; +import { Reference } from '../api/Reference'; +import { DataSnapshot } from '../api/DataSnapshot'; +import { Path } from './util/Path'; +import { Tree } from './util/Tree'; +import { PRIORITY_INDEX } from './snap/indexes/PriorityIndex'; +import { Node } from './snap/Node'; +import { LUIDGenerator, warn, exceptionGuard, -} from "./util/util"; -import { resolveDeferredValueSnapshot } from "./util/ServerValues"; -import { isValidPriority, validateFirebaseData } from "./util/validation"; -import { contains, safeGet } from "../../utils/obj"; -import { nodeFromJSON } from "./snap/nodeFromJSON"; -import { ChildrenNode } from "./snap/ChildrenNode"; -import { Repo } from "./Repo"; +} from './util/util'; +import { resolveDeferredValueSnapshot } from './util/ServerValues'; +import { isValidPriority, validateFirebaseData } from './util/validation'; +import { contains, safeGet } from '../../utils/obj'; +import { nodeFromJSON } from './snap/nodeFromJSON'; +import { ChildrenNode } from './snap/ChildrenNode'; +import { Repo } from './Repo'; +import { Event } from './view/Event'; // TODO: This is pretty messy. Ideally, a lot of this would move into FirebaseData, or a transaction-specific // component used by FirebaseData, but it has ties to user callbacks (transaction update and onComplete) as well @@ -41,27 +42,27 @@ import { Repo } from "./Repo"; /** * @enum {number} */ -export const TransactionStatus = { +export enum TransactionStatus { // We've run the transaction and updated transactionResultData_ with the result, but it isn't currently sent to the // server. A transaction will go from RUN -> SENT -> RUN if it comes back from the server as rejected due to // mismatched hash. - RUN: 1, + RUN, - // We've run the transaction and sent it to the server and it's currently outstanding (hasn't come back as accepted - // or rejected yet). - SENT: 2, + // We've run the transaction and sent it to the server and it's currently outstanding (hasn't come back as accepted + // or rejected yet). + SENT, - // Temporary state used to mark completed transactions (whether successful or aborted). The transaction will be - // removed when we get a chance to prune completed ones. - COMPLETED: 3, + // Temporary state used to mark completed transactions (whether successful or aborted). The transaction will be + // removed when we get a chance to prune completed ones. + COMPLETED, - // Used when an already-sent transaction needs to be aborted (e.g. due to a conflicting set() call that was made). - // If it comes back as unsuccessful, we'll abort it. - SENT_NEEDS_ABORT: 4, + // Used when an already-sent transaction needs to be aborted (e.g. due to a conflicting set() call that was made). + // If it comes back as unsuccessful, we'll abort it. + SENT_NEEDS_ABORT, - // Temporary state used to mark transactions that need to be aborted. - NEEDS_ABORT: 5 -}; + // Temporary state used to mark transactions that need to be aborted. + NEEDS_ABORT +} /** * If a transaction does not succeed after 25 retries, we abort it. Among other things this ensure that if there's @@ -84,49 +85,50 @@ export const TransactionStatus = { * unwatcher: function(), * abortReason: ?string, * currentWriteId: !number, - * currentHash: ?string, * currentInputSnapshot: ?Node, * currentOutputSnapshotRaw: ?Node, * currentOutputSnapshotResolved: ?Node * }} */ +type Transaction = { + path: Path; + update: (a: any) => any; + onComplete: (a: Error | null, b: boolean, c: DataSnapshot | null) => void; + status: TransactionStatus; + order: number; + applyLocally: boolean; + retryCount: number; + unwatcher: () => void; + abortReason: string | null; + currentWriteId: number; + currentInputSnapshot: Node | null; + currentOutputSnapshotRaw: Node | null; + currentOutputSnapshotResolved: Node | null; +} /** * Setup the transaction data structures * @private */ -(Repo.prototype as any).transactions_init_ = function() { +(Repo.prototype as any).transactions_init_ = function () { /** * Stores queues of outstanding transactions for Firebase locations. * * @type {!Tree.>} * @private */ - this.transactionQueueTree_ = new Tree(); + this.transactionQueueTree_ = new Tree(); }; declare module './Repo' { interface Repo { - startTransaction(path: Path, transactionUpdate, onComplete, applyLocally): void + startTransaction(path: Path, + transactionUpdate: (a: any) => void, + onComplete: ((a: Error, b: boolean, c: DataSnapshot) => void) | null, + applyLocally: boolean): void } } -type Transaction = { - path: Path, - update: Function, - onComplete: Function, - status: number, - order: number, - applyLocally: boolean, - retryCount: number, - unwatcher: Function, - abortReason: any, - currentWriteId: any, - currentInputSnapshot: any, - currentOutputSnapshotRaw: any, - currentOutputSnapshotResolved: any -} - /** * Creates a new transaction, adds it to the transactions we're tracking, and sends it to the server if possible. * @@ -135,23 +137,23 @@ type Transaction = { * @param {?function(?Error, boolean, ?DataSnapshot)} onComplete Completion callback. * @param {boolean} applyLocally Whether or not to make intermediate results visible */ -(Repo.prototype as any).startTransaction = function(path: Path, - transactionUpdate: () => any, - onComplete: (Error, boolean, DataSnapshot) => any, - applyLocally: boolean) { +Repo.prototype.startTransaction = function (path: Path, + transactionUpdate: (a: any) => any, + onComplete: ((a: Error, b: boolean, c: DataSnapshot) => void) | null, + applyLocally: boolean) { this.log_('transaction on ' + path); // Add a watch to make sure we get server updates. - var valueCallback = function() { }; - var watchRef = new Reference(this, path); + const valueCallback = function () { }; + const watchRef = new Reference(this, path); watchRef.on('value', valueCallback); - var unwatcher = function() { watchRef.off('value', valueCallback); }; + const unwatcher = function () { watchRef.off('value', valueCallback); }; // Initialize transaction. - var transaction: Transaction = { - path: path, + const transaction: Transaction = { + path, update: transactionUpdate, - onComplete: onComplete, + onComplete, // One of TransactionStatus enums. status: null, @@ -166,7 +168,7 @@ type Transaction = { retryCount: 0, // Function to call to clean up our .on() listener. - unwatcher: unwatcher, + unwatcher, // Stores why a transaction was aborted. abortReason: null, @@ -182,9 +184,9 @@ type Transaction = { // Run transaction initially. - var currentState = this.getLatestState_(path); + const currentState = this.getLatestState_(path); transaction.currentInputSnapshot = currentState; - var newVal = transaction.update(currentState.val()); + const newVal = transaction.update(currentState.val()); if (newVal === undefined) { // Abort transaction. transaction.unwatcher(); @@ -192,7 +194,7 @@ type Transaction = { transaction.currentOutputSnapshotResolved = null; if (transaction.onComplete) { // We just set the input snapshot, so this cast should be safe - var snapshot = new DataSnapshot(transaction.currentInputSnapshot, new Reference(this, transaction.path), PRIORITY_INDEX); + const snapshot = new DataSnapshot(transaction.currentInputSnapshot, new Reference(this, transaction.path), PRIORITY_INDEX); transaction.onComplete(null, false, snapshot); } } else { @@ -200,8 +202,8 @@ type Transaction = { // Mark as run and add to our queue. transaction.status = TransactionStatus.RUN; - var queueNode = this.transactionQueueTree_.subTree(path); - var nodeQueue = queueNode.getValue() || []; + const queueNode = this.transactionQueueTree_.subTree(path); + const nodeQueue = queueNode.getValue() || []; nodeQueue.push(transaction); queueNode.setValue(nodeQueue); @@ -209,25 +211,25 @@ type Transaction = { // Update visibleData and raise events // Note: We intentionally raise events after updating all of our transaction state, since the user could // start new transactions from the event callbacks. - var priorityForNode; + let priorityForNode; if (typeof newVal === 'object' && newVal !== null && contains(newVal, '.priority')) { priorityForNode = safeGet(newVal, '.priority'); assert(isValidPriority(priorityForNode), 'Invalid priority returned by transaction. ' + 'Priority must be a valid string, finite number, server value, or null.'); } else { - var currentNode = this.serverSyncTree_.calcCompleteEventCache(path) || ChildrenNode.EMPTY_NODE; + const currentNode = this.serverSyncTree_.calcCompleteEventCache(path) || ChildrenNode.EMPTY_NODE; priorityForNode = currentNode.getPriority().val(); } priorityForNode = /** @type {null|number|string} */ (priorityForNode); - var serverValues = this.generateServerValues(); - var newNodeUnresolved = nodeFromJSON(newVal, priorityForNode); - var newNode = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); + const serverValues = this.generateServerValues(); + const newNodeUnresolved = nodeFromJSON(newVal, priorityForNode); + const newNode = resolveDeferredValueSnapshot(newNodeUnresolved, serverValues); transaction.currentOutputSnapshotRaw = newNodeUnresolved; transaction.currentOutputSnapshotResolved = newNode; transaction.currentWriteId = this.getNextWriteId_(); - var events = this.serverSyncTree_.applyUserOverwrite(path, newNode, transaction.currentWriteId, transaction.applyLocally); + const events = this.serverSyncTree_.applyUserOverwrite(path, newNode, transaction.currentWriteId, transaction.applyLocally); this.eventQueue_.raiseEventsForChangedPath(path, events); this.sendReadyTransactions_(); @@ -240,7 +242,7 @@ type Transaction = { * @return {Node} * @private */ -(Repo.prototype as any).getLatestState_ = function(path: Path, excludeSets: [number]): Node { +(Repo.prototype as any).getLatestState_ = function (path: Path, excludeSets?: number[]): Node { return this.serverSyncTree_.calcCompleteEventCache(path, excludeSets) || ChildrenNode.EMPTY_NODE; }; @@ -252,33 +254,28 @@ type Transaction = { * Externally it's called with no arguments, but it calls itself recursively with a particular * transactionQueueTree node to recurse through the tree. * - * @param {Tree.>=} opt_node transactionQueueTree node to start at. + * @param {Tree.>=} node transactionQueueTree node to start at. * @private */ -(Repo.prototype as any).sendReadyTransactions_ = function(node?) { - var node = /** @type {!Tree.>} */ (node || this.transactionQueueTree_); - +(Repo.prototype as any).sendReadyTransactions_ = function (node: Tree = this.transactionQueueTree_) { // Before recursing, make sure any completed transactions are removed. if (!node) { this.pruneCompletedTransactionsBelowNode_(node); } if (node.getValue() !== null) { - var queue = this.buildTransactionQueue_(node); + const queue = this.buildTransactionQueue_(node); assert(queue.length > 0, 'Sending zero length transaction queue'); - var allRun = queue.every(function(transaction) { - return transaction.status === TransactionStatus.RUN; - }); + const allRun = queue.every((transaction: Transaction) => transaction.status === TransactionStatus.RUN); // If they're all run (and not sent), we can send them. Else, we must wait. if (allRun) { this.sendTransactionQueue_(node.path(), queue); } } else if (node.hasChildren()) { - var self = this; - node.forEachChild(function(childNode) { - self.sendReadyTransactions_(childNode); + node.forEachChild((childNode) => { + this.sendReadyTransactions_(childNode); }); } }; @@ -291,64 +288,63 @@ type Transaction = { * @param {!Array.} queue Queue of transactions under the specified location. * @private */ -(Repo.prototype as any).sendTransactionQueue_ = function(path: Path, queue: Array) { +(Repo.prototype as any).sendTransactionQueue_ = function (path: Path, queue: Array) { // Mark transactions as sent and increment retry count! - var setsToIgnore = queue.map(function(txn) { return txn.currentWriteId; }); - var latestState = this.getLatestState_(path, setsToIgnore); - var snapToSend = latestState; - var latestHash = latestState.hash(); - for (var i = 0; i < queue.length; i++) { - var txn = queue[i]; + const setsToIgnore = queue.map(function (txn) { return txn.currentWriteId; }); + const latestState = this.getLatestState_(path, setsToIgnore); + let snapToSend = latestState; + const latestHash = latestState.hash(); + for (let i = 0; i < queue.length; i++) { + const txn = queue[i]; assert(txn.status === TransactionStatus.RUN, 'tryToSendTransactionQueue_: items in queue should all be run.'); txn.status = TransactionStatus.SENT; txn.retryCount++; - var relativePath = Path.relativePath(path, txn.path); + const relativePath = Path.relativePath(path, txn.path); // If we've gotten to this point, the output snapshot must be defined. snapToSend = snapToSend.updateChild(relativePath, /**@type {!Node} */ (txn.currentOutputSnapshotRaw)); } - var dataToSend = snapToSend.val(true); - var pathToSend = path; + const dataToSend = snapToSend.val(true); + const pathToSend = path; // Send the put. - var self = this; - this.server_.put(pathToSend.toString(), dataToSend, function(status) { - self.log_('transaction put response', {path: pathToSend.toString(), status: status}); + this.server_.put(pathToSend.toString(), dataToSend, (status: string) => { + this.log_('transaction put response', {path: pathToSend.toString(), status}); - var events = []; + let events: Event[] = []; if (status === 'ok') { // Queue up the callbacks and fire them after cleaning up all of our transaction state, since // the callback could trigger more transactions or sets. - var callbacks = []; - for (i = 0; i < queue.length; i++) { + const callbacks = []; + for (let i = 0; i < queue.length; i++) { queue[i].status = TransactionStatus.COMPLETED; - events = events.concat(self.serverSyncTree_.ackUserWrite(queue[i].currentWriteId)); + events = events.concat(this.serverSyncTree_.ackUserWrite(queue[i].currentWriteId)); if (queue[i].onComplete) { // We never unset the output snapshot, and given that this transaction is complete, it should be set - var node = /** @type {!Node} */ (queue[i].currentOutputSnapshotResolved); - var ref = new Reference(self, queue[i].path); - var snapshot = new DataSnapshot(node, ref, PRIORITY_INDEX); + const node = queue[i].currentOutputSnapshotResolved as Node; + const ref = new Reference(this, queue[i].path); + const snapshot = new DataSnapshot(node, ref, PRIORITY_INDEX); callbacks.push(queue[i].onComplete.bind(null, null, true, snapshot)); } queue[i].unwatcher(); } // Now remove the completed transactions. - self.pruneCompletedTransactionsBelowNode_(self.transactionQueueTree_.subTree(path)); + this.pruneCompletedTransactionsBelowNode_(this.transactionQueueTree_.subTree(path)); // There may be pending transactions that we can now send. - self.sendReadyTransactions_(); + this.sendReadyTransactions_(); - self.eventQueue_.raiseEventsForChangedPath(path, events); + this.eventQueue_.raiseEventsForChangedPath(path, events); // Finally, trigger onComplete callbacks. - for (i = 0; i < callbacks.length; i++) { + for (let i = 0; i < callbacks.length; i++) { exceptionGuard(callbacks[i]); } } else { // transactions are no longer sent. Update their status appropriately. if (status === 'datastale') { - for (i = 0; i < queue.length; i++) { + for (let i = 0; i < queue.length; i++) { if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) queue[i].status = TransactionStatus.NEEDS_ABORT; else @@ -356,13 +352,13 @@ type Transaction = { } } else { warn('transaction at ' + pathToSend.toString() + ' failed: ' + status); - for (i = 0; i < queue.length; i++) { + for (let i = 0; i < queue.length; i++) { queue[i].status = TransactionStatus.NEEDS_ABORT; queue[i].abortReason = status; } } - self.rerunTransactions_(path); + this.rerunTransactions_(path); } }, latestHash); }; @@ -379,11 +375,11 @@ type Transaction = { * @return {!Path} The rootmost path that was affected by rerunning transactions. * @private */ -(Repo.prototype as any).rerunTransactions_ = function(changedPath: Path) { - var rootMostTransactionNode = this.getAncestorTransactionNode_(changedPath); - var path = rootMostTransactionNode.path(); +(Repo.prototype as any).rerunTransactions_ = function (changedPath: Path): Path { + const rootMostTransactionNode = this.getAncestorTransactionNode_(changedPath); + const path = rootMostTransactionNode.path(); - var queue = this.buildTransactionQueue_(rootMostTransactionNode); + const queue = this.buildTransactionQueue_(rootMostTransactionNode); this.rerunTransactionQueue_(queue, path); return path; @@ -397,22 +393,22 @@ type Transaction = { * @param {!Path} path The path the queue is for. * @private */ -(Repo.prototype as any).rerunTransactionQueue_ = function(queue: Array, path: Path) { +(Repo.prototype as any).rerunTransactionQueue_ = function (queue: Array, path: Path) { if (queue.length === 0) { return; // Nothing to do! } // Queue up the callbacks and fire them after cleaning up all of our transaction state, since // the callback could trigger more transactions or sets. - var callbacks = []; - var events = []; + const callbacks = []; + let events: Event[] = []; // Ignore all of the sets we're going to re-run. - var txnsToRerun = queue.filter(function(q) { return q.status === TransactionStatus.RUN; }); - var setsToIgnore = txnsToRerun.map(function(q) { return q.currentWriteId; }); - for (var i = 0; i < queue.length; i++) { - var transaction = queue[i]; - var relativePath = Path.relativePath(path, transaction.path); - var abortTransaction = false, abortReason; + const txnsToRerun = queue.filter(function (q) { return q.status === TransactionStatus.RUN; }); + const setsToIgnore = txnsToRerun.map(function (q) { return q.currentWriteId; }); + for (let i = 0; i < queue.length; i++) { + const transaction = queue[i]; + const relativePath = Path.relativePath(path, transaction.path); + let abortTransaction = false, abortReason; assert(relativePath !== null, 'rerunTransactionsUnderNode_: relativePath should not be null.'); if (transaction.status === TransactionStatus.NEEDS_ABORT) { @@ -426,22 +422,22 @@ type Transaction = { events = events.concat(this.serverSyncTree_.ackUserWrite(transaction.currentWriteId, true)); } else { // This code reruns a transaction - var currentNode = this.getLatestState_(transaction.path, setsToIgnore); + const currentNode = this.getLatestState_(transaction.path, setsToIgnore); transaction.currentInputSnapshot = currentNode; - var newData = queue[i].update(currentNode.val()); + const newData = queue[i].update(currentNode.val()); if (newData !== undefined) { validateFirebaseData('transaction failed: Data returned ', newData, transaction.path); - var newDataNode = nodeFromJSON(newData); - var hasExplicitPriority = (typeof newData === 'object' && newData != null && + let newDataNode = nodeFromJSON(newData); + const hasExplicitPriority = (typeof newData === 'object' && newData != null && contains(newData, '.priority')); if (!hasExplicitPriority) { // Keep the old priority if there wasn't a priority explicitly specified. newDataNode = newDataNode.updatePriority(currentNode.getPriority()); } - var oldWriteId = transaction.currentWriteId; - var serverValues = this.generateServerValues(); - var newNodeResolved = resolveDeferredValueSnapshot(newDataNode, serverValues); + const oldWriteId = transaction.currentWriteId; + const serverValues = this.generateServerValues(); + const newNodeResolved = resolveDeferredValueSnapshot(newDataNode, serverValues); transaction.currentOutputSnapshotRaw = newDataNode; transaction.currentOutputSnapshotResolved = newNodeResolved; @@ -468,16 +464,16 @@ type Transaction = { // Removing a listener can trigger pruning which can muck with mergedData/visibleData (as it prunes data). // So defer the unwatcher until we're done. - (function(unwatcher) { + (function (unwatcher) { setTimeout(unwatcher, Math.floor(0)); })(queue[i].unwatcher); if (queue[i].onComplete) { if (abortReason === 'nodata') { - var ref = new Reference(this, queue[i].path); + const ref = new Reference(this, queue[i].path); // We set this field immediately, so it's safe to cast to an actual snapshot - var lastInput = /** @type {!Node} */ (queue[i].currentInputSnapshot); - var snapshot = new DataSnapshot(lastInput, ref, PRIORITY_INDEX); + const lastInput = /** @type {!Node} */ (queue[i].currentInputSnapshot); + const snapshot = new DataSnapshot(lastInput, ref, PRIORITY_INDEX); callbacks.push(queue[i].onComplete.bind(null, null, false, snapshot)); } else { callbacks.push(queue[i].onComplete.bind(null, new Error(abortReason), false, null)); @@ -490,7 +486,7 @@ type Transaction = { this.pruneCompletedTransactionsBelowNode_(this.transactionQueueTree_); // Now fire callbacks, now that we're in a good, known state. - for (i = 0; i < callbacks.length; i++) { + for (let i = 0; i < callbacks.length; i++) { exceptionGuard(callbacks[i]); } @@ -507,11 +503,11 @@ type Transaction = { * @return {!Tree.>} The rootmost node with a transaction. * @private */ -(Repo.prototype as any).getAncestorTransactionNode_ = function(path: Path): Tree { - var front; +(Repo.prototype as any).getAncestorTransactionNode_ = function (path: Path): Tree { + let front; // Start at the root and walk deeper into the tree towards path until we find a node with pending transactions. - var transactionNode = this.transactionQueueTree_; + let transactionNode = this.transactionQueueTree_; while ((front = path.getFront()) !== null && transactionNode.getValue() === null) { transactionNode = transactionNode.subTree(front); path = path.popFront(); @@ -528,13 +524,13 @@ type Transaction = { * @return {Array.} The generated queue. * @private */ -(Repo.prototype as any).buildTransactionQueue_ = function(transactionNode: Tree): Array { +(Repo.prototype as any).buildTransactionQueue_ = function (transactionNode: Tree): Array { // Walk any child transaction queues and aggregate them into a single queue. - var transactionQueue = []; + const transactionQueue: Transaction[] = []; this.aggregateTransactionQueuesForNode_(transactionNode, transactionQueue); // Sort them by the order the transactions were created. - transactionQueue.sort(function(a, b) { return a.order - b.order; }); + transactionQueue.sort(function (a, b) { return a.order - b.order; }); return transactionQueue; }; @@ -544,17 +540,17 @@ type Transaction = { * @param {Array.} queue * @private */ -(Repo.prototype as any).aggregateTransactionQueuesForNode_ = function(node: Tree, queue: Array) { - var nodeQueue = node.getValue(); +(Repo.prototype as any).aggregateTransactionQueuesForNode_ = function (node: Tree, + queue: Array) { + const nodeQueue = node.getValue(); if (nodeQueue !== null) { - for (var i = 0; i < nodeQueue.length; i++) { + for (let i = 0; i < nodeQueue.length; i++) { queue.push(nodeQueue[i]); } } - var self = this; - node.forEachChild(function(child) { - self.aggregateTransactionQueuesForNode_(child, queue); + node.forEachChild((child) => { + this.aggregateTransactionQueuesForNode_(child, queue); }); }; @@ -565,11 +561,11 @@ type Transaction = { * @param {!Tree.>} node * @private */ -(Repo.prototype as any).pruneCompletedTransactionsBelowNode_ = function(node: Tree) { - var queue = node.getValue(); +(Repo.prototype as any).pruneCompletedTransactionsBelowNode_ = function (node: Tree) { + const queue = node.getValue(); if (queue) { - var to = 0; - for (var from = 0; from < queue.length; from++) { + let to = 0; + for (let from = 0; from < queue.length; from++) { if (queue[from].status !== TransactionStatus.COMPLETED) { queue[to] = queue[from]; to++; @@ -579,9 +575,8 @@ type Transaction = { node.setValue(queue.length > 0 ? queue : null); } - var self = this; - node.forEachChild(function(childNode) { - self.pruneCompletedTransactionsBelowNode_(childNode); + node.forEachChild((childNode) => { + this.pruneCompletedTransactionsBelowNode_(childNode); }); }; @@ -594,20 +589,19 @@ type Transaction = { * @return {!Path} * @private */ -(Repo.prototype as any).abortTransactions_ = function(path: Path) { - var affectedPath = this.getAncestorTransactionNode_(path).path(); +(Repo.prototype as any).abortTransactions_ = function (path: Path): Path { + const affectedPath = this.getAncestorTransactionNode_(path).path(); - var transactionNode = this.transactionQueueTree_.subTree(path); - var self = this; + const transactionNode = this.transactionQueueTree_.subTree(path); - transactionNode.forEachAncestor(function(node) { - self.abortTransactionsOnNode_(node); + transactionNode.forEachAncestor((node: Tree) => { + this.abortTransactionsOnNode_(node); }); this.abortTransactionsOnNode_(transactionNode); - transactionNode.forEachDescendant(function(node) { - self.abortTransactionsOnNode_(node); + transactionNode.forEachDescendant((node: Tree) => { + this.abortTransactionsOnNode_(node); }); return affectedPath; @@ -620,19 +614,19 @@ type Transaction = { * @param {!Tree.>} node Node to abort transactions for. * @private */ -(Repo.prototype as any).abortTransactionsOnNode_ = function(node: Tree) { - var queue = node.getValue(); +(Repo.prototype as any).abortTransactionsOnNode_ = function (node: Tree) { + const queue = node.getValue(); if (queue !== null) { // Queue up the callbacks and fire them after cleaning up all of our transaction state, since // the callback could trigger more transactions or sets. - var callbacks = []; + const callbacks = []; // Go through queue. Any already-sent transactions must be marked for abort, while the unsent ones // can be immediately aborted and removed. - var events = []; - var lastSent = -1; - for (var i = 0; i < queue.length; i++) { + let events: Event[] = []; + let lastSent = -1; + for (let i = 0; i < queue.length; i++) { if (queue[i].status === TransactionStatus.SENT_NEEDS_ABORT) { // Already marked. No action needed. } else if (queue[i].status === TransactionStatus.SENT) { @@ -648,7 +642,7 @@ type Transaction = { queue[i].unwatcher(); events = events.concat(this.serverSyncTree_.ackUserWrite(queue[i].currentWriteId, true)); if (queue[i].onComplete) { - var snapshot = null; + const snapshot: DataSnapshot | null = null; callbacks.push(queue[i].onComplete.bind(null, new Error('set'), false, snapshot)); } } @@ -663,7 +657,7 @@ type Transaction = { // Now fire the callbacks. this.eventQueue_.raiseEventsForChangedPath(node.path(), events); - for (i = 0; i < callbacks.length; i++) { + for (let i = 0; i < callbacks.length; i++) { exceptionGuard(callbacks[i]); } } diff --git a/src/database/core/ServerActions.ts b/src/database/core/ServerActions.ts index 849cb3c01db..58dc438616b 100644 --- a/src/database/core/ServerActions.ts +++ b/src/database/core/ServerActions.ts @@ -22,7 +22,7 @@ import { Query } from '../api/Query'; * * @interface */ -export interface ServerActions { +export abstract class ServerActions { /** * @param {!Query} query @@ -30,7 +30,8 @@ export interface ServerActions { * @param {?number} tag * @param {function(string, *)} onComplete */ - listen(query: Query, currentHashFn: () => string, tag: number | null, onComplete: (a: string, b: any) => any); + abstract listen(query: Query, currentHashFn: () => string, tag: number | null, + onComplete: (a: string, b: any) => void): void; /** * Remove a listen. @@ -38,7 +39,7 @@ export interface ServerActions { * @param {!Query} query * @param {?number} tag */ - unlisten(query: Query, tag: number | null); + abstract unlisten(query: Query, tag: number | null): void; /** * @param {string} pathString @@ -46,7 +47,7 @@ export interface ServerActions { * @param {function(string, string)=} onComplete * @param {string=} hash */ - put(pathString: string, data: any, onComplete?: (a: string, b: string) => any, hash?: string); + put(pathString: string, data: any, onComplete?: (a: string, b: string) => void, hash?: string) { } /** * @param {string} pathString @@ -54,37 +55,37 @@ export interface ServerActions { * @param {function(string, ?string)} onComplete * @param {string=} hash */ - merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => any, hash?: string); + merge(pathString: string, data: any, onComplete: (a: string, b: string | null) => void, hash?: string) { } /** * Refreshes the auth token for the current connection. * @param {string} token The authentication token */ - refreshAuthToken(token: string); + refreshAuthToken(token: string) { } /** * @param {string} pathString * @param {*} data * @param {function(string, string)=} onComplete */ - onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => any); + onDisconnectPut(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { } /** * @param {string} pathString * @param {*} data * @param {function(string, string)=} onComplete */ - onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => any); + onDisconnectMerge(pathString: string, data: any, onComplete?: (a: string, b: string) => void) { } /** * @param {string} pathString * @param {function(string, string)=} onComplete */ - onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => any); + onDisconnectCancel(pathString: string, onComplete?: (a: string, b: string) => void) { } /** * @param {Object.} stats */ - reportStats(stats: { [k: string]: any }); + reportStats(stats: { [k: string]: any }) { } } diff --git a/src/database/core/SnapshotHolder.ts b/src/database/core/SnapshotHolder.ts index 4ea8cf96556..58d94f2e2b5 100644 --- a/src/database/core/SnapshotHolder.ts +++ b/src/database/core/SnapshotHolder.ts @@ -15,6 +15,8 @@ */ import { ChildrenNode } from "./snap/ChildrenNode"; +import { Path } from './util/Path'; +import { Node } from './snap/Node'; /** * Mutable object which basically just stores a reference to the "latest" immutable snapshot. @@ -22,14 +24,13 @@ import { ChildrenNode } from "./snap/ChildrenNode"; * @constructor */ export class SnapshotHolder { - private rootNode_; - constructor() { - this.rootNode_ = ChildrenNode.EMPTY_NODE; - } - getNode(path) { + private rootNode_: Node = ChildrenNode.EMPTY_NODE; + + getNode(path: Path): Node { return this.rootNode_.getChild(path); } - updateSnapshot(path, newSnapshotNode) { + + updateSnapshot(path: Path, newSnapshotNode: Node) { this.rootNode_ = this.rootNode_.updateChild(path, newSnapshotNode); } } diff --git a/src/database/core/SparseSnapshotTree.ts b/src/database/core/SparseSnapshotTree.ts index d043f5caf1d..83b383339bd 100644 --- a/src/database/core/SparseSnapshotTree.ts +++ b/src/database/core/SparseSnapshotTree.ts @@ -17,6 +17,7 @@ import { Path } from "./util/Path"; import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; import { CountedSet } from "./util/CountedSet"; +import { Node } from './snap/Node'; /** * Helper class to store a sparse set of snapshots. @@ -24,35 +25,32 @@ import { CountedSet } from "./util/CountedSet"; * @constructor */ export class SparseSnapshotTree { - value_; - children_; - constructor() { - /** - * @private - * @type {Node} - */ - this.value_ = null; + /** + * @private + * @type {Node} + */ + private value_: Node | null = null; + + /** + * @private + * @type {CountedSet} + */ + private children_: CountedSet | null = null; - /** - * @private - * @type {CountedSet} - */ - this.children_ = null; - }; /** * Gets the node stored at the given path if one exists. * * @param {!Path} path Path to look up snapshot for. * @return {?Node} The retrieved node, or null. */ - find(path) { + find(path: Path): Node | null { if (this.value_ != null) { return this.value_.getChild(path); } else if (!path.isEmpty() && this.children_ != null) { - var childKey = path.getFront(); + const childKey = path.getFront(); path = path.popFront(); if (this.children_.contains(childKey)) { - var childTree = this.children_.get(childKey); + const childTree = this.children_.get(childKey) as SparseSnapshotTree; return childTree.find(path); } else { return null; @@ -60,8 +58,7 @@ export class SparseSnapshotTree { } else { return null; } - }; - + } /** * Stores the given node at the specified path. If there is already a node @@ -70,7 +67,7 @@ export class SparseSnapshotTree { * @param {!Path} path Path to look up snapshot for. * @param {!Node} data The new data, or null. */ - remember(path, data) { + remember(path: Path, data: Node) { if (path.isEmpty()) { this.value_ = data; this.children_ = null; @@ -78,20 +75,19 @@ export class SparseSnapshotTree { this.value_ = this.value_.updateChild(path, data); } else { if (this.children_ == null) { - this.children_ = new CountedSet(); + this.children_ = new CountedSet(); } - var childKey = path.getFront(); + const childKey = path.getFront(); if (!this.children_.contains(childKey)) { this.children_.add(childKey, new SparseSnapshotTree()); } - var child = this.children_.get(childKey); + const child = this.children_.get(childKey) as SparseSnapshotTree; path = path.popFront(); child.remember(path, data); } - }; - + } /** * Purge the data at path from the cache. @@ -99,7 +95,7 @@ export class SparseSnapshotTree { * @param {!Path} path Path to look up snapshot for. * @return {boolean} True if this node should now be removed. */ - forget(path) { + forget(path: Path): boolean { if (path.isEmpty()) { this.value_ = null; this.children_ = null; @@ -110,10 +106,10 @@ export class SparseSnapshotTree { // We're trying to forget a node that doesn't exist return false; } else { - var value = this.value_; + const value = this.value_; this.value_ = null; - var self = this; + const self = this; value.forEachChild(PRIORITY_INDEX, function(key, tree) { self.remember(new Path(key), tree); }); @@ -121,10 +117,10 @@ export class SparseSnapshotTree { return this.forget(path); } } else if (this.children_ !== null) { - var childKey = path.getFront(); + const childKey = path.getFront(); path = path.popFront(); if (this.children_.contains(childKey)) { - var safeToRemove = this.children_.get(childKey).forget(path); + const safeToRemove = (this.children_.get(childKey) as SparseSnapshotTree).forget(path); if (safeToRemove) { this.children_.remove(childKey); } @@ -141,7 +137,7 @@ export class SparseSnapshotTree { return true; } } - }; + } /** * Recursively iterates through all of the stored tree and calls the @@ -150,28 +146,27 @@ export class SparseSnapshotTree { * @param {!Path} prefixPath Path to look up node for. * @param {!Function} func The function to invoke for each tree. */ - forEachTree(prefixPath, func) { + forEachTree(prefixPath: Path, func: (a: Path, b: Node) => any) { if (this.value_ !== null) { func(prefixPath, this.value_); } else { - this.forEachChild(function(key, tree) { - var path = new Path(prefixPath.toString() + '/' + key); + this.forEachChild((key, tree) => { + const path = new Path(prefixPath.toString() + '/' + key); tree.forEachTree(path, func); }); } - }; - + } /** * Iterates through each immediate child and triggers the callback. * * @param {!Function} func The function to invoke for each child. */ - forEachChild(func) { + forEachChild(func: (a: string, b: SparseSnapshotTree) => void) { if (this.children_ !== null) { - this.children_.each(function(key, tree) { + this.children_.each((key, tree) => { func(key, tree); }); } - }; + } } diff --git a/src/database/core/SyncPoint.ts b/src/database/core/SyncPoint.ts index e7c2c4cf5d3..1798b0bd82a 100644 --- a/src/database/core/SyncPoint.ts +++ b/src/database/core/SyncPoint.ts @@ -14,14 +14,22 @@ * limitations under the License. */ -import { CacheNode } from "./view/CacheNode"; -import { ChildrenNode } from "./snap/ChildrenNode"; -import { assert } from "../../utils/assert"; -import { isEmpty, forEach, findValue, safeGet } from "../../utils/obj"; -import { ViewCache } from "./view/ViewCache"; -import { View } from "./view/View"; - -let __referenceConstructor; +import { CacheNode } from './view/CacheNode'; +import { ChildrenNode } from './snap/ChildrenNode'; +import { assert } from '../../utils/assert'; +import { isEmpty, forEach, findValue, safeGet } from '../../utils/obj'; +import { ViewCache } from './view/ViewCache'; +import { View } from './view/View'; +import { Operation } from './operation/Operation'; +import { WriteTreeRef } from './WriteTree'; +import { Query } from '../api/Query'; +import { EventRegistration } from './view/EventRegistration'; +import { Node } from './snap/Node'; +import { Path } from './util/Path'; +import { Event } from './view/Event'; +import { Reference, ReferenceConstructor } from '../api/Reference'; + +let __referenceConstructor: ReferenceConstructor; /** * SyncPoint represents a single location in a SyncTree with 1 or more event registrations, meaning we need to @@ -34,34 +42,33 @@ let __referenceConstructor; * applyUserOverwrite, etc.) */ export class SyncPoint { - static set __referenceConstructor(val) { - assert(!__referenceConstructor, '__referenceConstructor has already been defined'); + static set __referenceConstructor(val: ReferenceConstructor) { + assert(!__referenceConstructor, '__referenceConstructor has already been defined'); __referenceConstructor = val; } + static get __referenceConstructor() { assert(__referenceConstructor, 'Reference.ts has not been loaded'); return __referenceConstructor; } - views_: object; - constructor() { - /** - * The Views being tracked at this location in the tree, stored as a map where the key is a - * queryId and the value is the View for that query. - * - * NOTE: This list will be quite small (usually 1, but perhaps 2 or 3; any more is an odd use case). - * - * @type {!Object.} - * @private - */ - this.views_ = { }; - }; + /** + * The Views being tracked at this location in the tree, stored as a map where the key is a + * queryId and the value is the View for that query. + * + * NOTE: This list will be quite small (usually 1, but perhaps 2 or 3; any more is an odd use case). + * + * @type {!Object.} + * @private + */ + private views_: { [k: string]: View } = {}; + /** * @return {boolean} */ - isEmpty() { + isEmpty(): boolean { return isEmpty(this.views_); - }; + } /** * @@ -70,22 +77,23 @@ export class SyncPoint { * @param {?Node} optCompleteServerCache * @return {!Array.} */ - applyOperation(operation, writesCache, optCompleteServerCache) { - var queryId = operation.source.queryId; + applyOperation(operation: Operation, writesCache: WriteTreeRef, + optCompleteServerCache: Node | null): Event[] { + const queryId = operation.source.queryId; if (queryId !== null) { - var view = safeGet(this.views_, queryId); + const view = safeGet(this.views_, queryId); assert(view != null, 'SyncTree gave us an op for an invalid query.'); return view.applyOperation(operation, writesCache, optCompleteServerCache); } else { - var events = []; + let events: Event[] = []; - forEach(this.views_, function(key, view) { + forEach(this.views_, function (key: string, view: View) { events = events.concat(view.applyOperation(operation, writesCache, optCompleteServerCache)); }); return events; } - }; + } /** * Add an event callback for the specified query. @@ -97,13 +105,14 @@ export class SyncPoint { * @param {boolean} serverCacheComplete * @return {!Array.} Events to raise. */ - addEventRegistration(query, eventRegistration, writesCache, serverCache, serverCacheComplete) { - var queryId = query.queryIdentifier(); - var view = safeGet(this.views_, queryId); + addEventRegistration(query: Query, eventRegistration: EventRegistration, writesCache: WriteTreeRef, + serverCache: Node | null, serverCacheComplete: boolean): Event[] { + const queryId = query.queryIdentifier(); + let view = safeGet(this.views_, queryId); if (!view) { // TODO: make writesCache take flag for complete server node - var eventCache = writesCache.calcCompleteEventCache(serverCacheComplete ? serverCache : null); - var eventCacheComplete = false; + let eventCache = writesCache.calcCompleteEventCache(serverCacheComplete ? serverCache : null); + let eventCacheComplete = false; if (eventCache) { eventCacheComplete = true; } else if (serverCache instanceof ChildrenNode) { @@ -113,9 +122,9 @@ export class SyncPoint { eventCache = ChildrenNode.EMPTY_NODE; eventCacheComplete = false; } - var viewCache = new ViewCache( - new CacheNode(/** @type {!Node} */ (eventCache), eventCacheComplete, false), - new CacheNode(/** @type {!Node} */ (serverCache), serverCacheComplete, false) + const viewCache = new ViewCache( + new CacheNode(/** @type {!Node} */ (eventCache), eventCacheComplete, false), + new CacheNode(/** @type {!Node} */ (serverCache), serverCacheComplete, false) ); view = new View(query, viewCache); this.views_[queryId] = view; @@ -124,7 +133,7 @@ export class SyncPoint { // This is guaranteed to exist now, we just created anything that was missing view.addEventRegistration(eventRegistration); return view.getInitialEvents(eventRegistration); - }; + } /** * Remove event callback(s). Return cancelEvents if a cancelError is specified. @@ -137,15 +146,16 @@ export class SyncPoint { * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. * @return {{removed:!Array., events:!Array.}} removed queries and any cancel events */ - removeEventRegistration(query, eventRegistration, cancelError) { - var queryId = query.queryIdentifier(); - var removed = []; - var cancelEvents = []; - var hadCompleteView = this.hasCompleteView(); + removeEventRegistration(query: Query, eventRegistration: EventRegistration | null, + cancelError?: Error): { removed: Query[], events: Event[] } { + const queryId = query.queryIdentifier(); + const removed: Query[] = []; + let cancelEvents: Event[] = []; + const hadCompleteView = this.hasCompleteView(); if (queryId === 'default') { // When you do ref.off(...), we search all views for the registration to remove. - var self = this; - forEach(this.views_, function(viewQueryId, view) { + const self = this; + forEach(this.views_, function (viewQueryId: string, view: View) { cancelEvents = cancelEvents.concat(view.removeEventRegistration(eventRegistration, cancelError)); if (view.isEmpty()) { delete self.views_[viewQueryId]; @@ -158,7 +168,7 @@ export class SyncPoint { }); } else { // remove the callback from the specific view. - var view = safeGet(this.views_, queryId); + const view = safeGet(this.views_, queryId); if (view) { cancelEvents = cancelEvents.concat(view.removeEventRegistration(eventRegistration, cancelError)); if (view.isEmpty()) { @@ -178,68 +188,66 @@ export class SyncPoint { } return {removed: removed, events: cancelEvents}; - }; + } /** * @return {!Array.} */ - getQueryViews() { + getQueryViews(): View[] { const values = Object.keys(this.views_) .map(key => this.views_[key]); - return values.filter(function(view) { + return values.filter(function (view) { return !view.getQuery().getQueryParams().loadsAllData(); }); - }; + } /** * * @param {!Path} path The path to the desired complete snapshot * @return {?Node} A complete cache, if it exists */ - getCompleteServerCache(path) { - var serverCache = null; - forEach(this.views_, (key, view) => { + getCompleteServerCache(path: Path): Node | null { + let serverCache: Node | null = null; + forEach(this.views_, (key: string, view: View) => { serverCache = serverCache || view.getCompleteServerCache(path); }); return serverCache; - }; + } /** * @param {!Query} query * @return {?View} */ - viewForQuery(query) { - var params = query.getQueryParams(); + viewForQuery(query: Query): View | null { + const params = query.getQueryParams(); if (params.loadsAllData()) { return this.getCompleteView(); } else { - var queryId = query.queryIdentifier(); + const queryId = query.queryIdentifier(); return safeGet(this.views_, queryId); } - }; + } /** * @param {!Query} query * @return {boolean} */ - viewExistsForQuery(query) { + viewExistsForQuery(query: Query): boolean { return this.viewForQuery(query) != null; - }; + } /** * @return {boolean} */ - hasCompleteView() { + hasCompleteView(): boolean { return this.getCompleteView() != null; - }; + } /** * @return {?View} */ - getCompleteView() { - var completeView = findValue(this.views_, function(view) { - return view.getQuery().getQueryParams().loadsAllData(); - }); + getCompleteView(): View | null { + const completeView = findValue(this.views_, (view: View) => view.getQuery().getQueryParams().loadsAllData()); return completeView || null; - }; + } } diff --git a/src/database/core/SyncTree.ts b/src/database/core/SyncTree.ts index 428af442922..d933ba431f7 100644 --- a/src/database/core/SyncTree.ts +++ b/src/database/core/SyncTree.ts @@ -15,19 +15,23 @@ */ import { assert } from '../../utils/assert'; -import { errorForServerCode } from "./util/util"; -import { AckUserWrite } from "./operation/AckUserWrite"; -import { ChildrenNode } from "./snap/ChildrenNode"; -import { forEach, safeGet } from "../../utils/obj"; -import { ImmutableTree } from "./util/ImmutableTree"; -import { ListenComplete } from "./operation/ListenComplete"; -import { Merge } from "./operation/Merge"; -import { OperationSource } from "./operation/Operation"; -import { Overwrite } from "./operation/Overwrite"; -import { Path } from "./util/Path"; -import { SyncPoint } from "./SyncPoint"; -import { WriteTree } from "./WriteTree"; -import { Query } from "../api/Query"; +import { errorForServerCode } from './util/util'; +import { AckUserWrite } from './operation/AckUserWrite'; +import { ChildrenNode } from './snap/ChildrenNode'; +import { forEach, safeGet } from '../../utils/obj'; +import { ImmutableTree } from './util/ImmutableTree'; +import { ListenComplete } from './operation/ListenComplete'; +import { Merge } from './operation/Merge'; +import { Operation, OperationSource } from './operation/Operation'; +import { Overwrite } from './operation/Overwrite'; +import { Path } from './util/Path'; +import { SyncPoint } from './SyncPoint'; +import { WriteTree, WriteTreeRef } from './WriteTree'; +import { Query } from '../api/Query'; +import { Node } from './snap/Node'; +import { Event } from './view/Event'; +import { EventRegistration } from './view/EventRegistration'; +import { View } from './view/View'; /** * @typedef {{ @@ -41,6 +45,14 @@ import { Query } from "../api/Query"; * stopListening: function(!Query, ?number) * }} */ +export interface ListenProvider { + startListening(query: Query, + tag: number | null, + hashFn: () => string, + onComplete: (a: string, b?: any) => Event[]): Event[]; + + stopListening(a: Query, b: number | null): void; +} /** * SyncTree is the central class for managing event callback registration, data caching, views @@ -63,8 +75,6 @@ import { Query } from "../api/Query"; * events are returned to the caller rather than raised synchronously. * * @constructor - * @param {!ListenProvider} listenProvider Used by SyncTree to start / stop listening - * to server data. */ export class SyncTree { /** @@ -72,25 +82,24 @@ export class SyncTree { * @type {!ImmutableTree.} * @private */ - syncPointTree_; + private syncPointTree_: ImmutableTree = ImmutableTree.Empty; /** * A tree of all pending user writes (user-initiated set()'s, transaction()'s, update()'s, etc.). * @type {!WriteTree} * @private */ - pendingWriteTree_; - tagToQueryMap_; - queryToTagMap_; - listenProvider_; - - constructor(listenProvider) { - this.syncPointTree_ = ImmutableTree.Empty; - this.pendingWriteTree_ = new WriteTree(); - this.tagToQueryMap_ = {}; - this.queryToTagMap_ = {}; - this.listenProvider_ = listenProvider; - }; + private pendingWriteTree_ = new WriteTree(); + + private tagToQueryMap_: { [k: string]: string } = {}; + private queryToTagMap_: { [k: string]: number } = {}; + + /** + * @param {!ListenProvider} listenProvider_ Used by SyncTree to start / stop listening + * to server data. + */ + constructor(private listenProvider_: ListenProvider) { + } /** * Apply the data changes for a user-generated set() or transaction() call. @@ -101,7 +110,7 @@ export class SyncTree { * @param {boolean=} visible * @return {!Array.} Events to raise. */ - applyUserOverwrite(path, newData, writeId, visible) { + applyUserOverwrite(path: Path, newData: Node, writeId: number, visible?: boolean): Event[] { // Record pending write. this.pendingWriteTree_.addOverwrite(path, newData, writeId, visible); @@ -109,9 +118,9 @@ export class SyncTree { return []; } else { return this.applyOperationToSyncPoints_( - new Overwrite(OperationSource.User, path, newData)); + new Overwrite(OperationSource.User, path, newData)); } - }; + } /** * Apply the data from a user-generated update() call @@ -121,15 +130,15 @@ export class SyncTree { * @param {!number} writeId * @return {!Array.} Events to raise. */ - applyUserMerge(path, changedChildren, writeId) { + applyUserMerge(path: Path, changedChildren: { [k: string]: Node }, writeId: number): Event[] { // Record pending merge. this.pendingWriteTree_.addMerge(path, changedChildren, writeId); - var changeTree = ImmutableTree.fromObject(changedChildren); + const changeTree = ImmutableTree.fromObject(changedChildren); return this.applyOperationToSyncPoints_( - new Merge(OperationSource.User, path, changeTree)); - }; + new Merge(OperationSource.User, path, changeTree)); + } /** * Acknowledge a pending user write that was previously registered with applyUserOverwrite() or applyUserMerge(). @@ -138,25 +147,23 @@ export class SyncTree { * @param {boolean=} revert True if the given write failed and needs to be reverted * @return {!Array.} Events to raise. */ - ackUserWrite(writeId, revert) { - revert = revert || false; - - var write = this.pendingWriteTree_.getWrite(writeId); - var needToReevaluate = this.pendingWriteTree_.removeWrite(writeId); + ackUserWrite(writeId: number, revert: boolean = false) { + const write = this.pendingWriteTree_.getWrite(writeId); + const needToReevaluate = this.pendingWriteTree_.removeWrite(writeId); if (!needToReevaluate) { return []; } else { - var affectedTree = ImmutableTree.Empty; + let affectedTree = ImmutableTree.Empty; if (write.snap != null) { // overwrite affectedTree = affectedTree.set(Path.Empty, true); } else { - forEach(write.children, function(pathString, node) { + forEach(write.children, function (pathString: string, node: Node) { affectedTree = affectedTree.set(new Path(pathString), node); }); } return this.applyOperationToSyncPoints_(new AckUserWrite(write.path, affectedTree, revert)); } - }; + } /** * Apply new server data for the specified path.. @@ -165,10 +172,10 @@ export class SyncTree { * @param {!Node} newData * @return {!Array.} Events to raise. */ - applyServerOverwrite(path, newData) { + applyServerOverwrite(path: Path, newData: Node): Event[] { return this.applyOperationToSyncPoints_( - new Overwrite(OperationSource.Server, path, newData)); - }; + new Overwrite(OperationSource.Server, path, newData)); + } /** * Apply new server data to be merged in at the specified path. @@ -177,12 +184,12 @@ export class SyncTree { * @param {!Object.} changedChildren * @return {!Array.} Events to raise. */ - applyServerMerge(path, changedChildren) { - var changeTree = ImmutableTree.fromObject(changedChildren); + applyServerMerge(path: Path, changedChildren: { [k: string]: Node }): Event[] { + const changeTree = ImmutableTree.fromObject(changedChildren); return this.applyOperationToSyncPoints_( - new Merge(OperationSource.Server, path, changeTree)); - }; + new Merge(OperationSource.Server, path, changeTree)); + } /** * Apply a listen complete for a query @@ -190,10 +197,10 @@ export class SyncTree { * @param {!Path} path * @return {!Array.} Events to raise. */ - applyListenComplete(path) { + applyListenComplete(path: Path): Event[] { return this.applyOperationToSyncPoints_( - new ListenComplete(OperationSource.Server, path)); - }; + new ListenComplete(OperationSource.Server, path)); + } /** * Apply new server data for the specified tagged query. @@ -203,20 +210,20 @@ export class SyncTree { * @param {!number} tag * @return {!Array.} Events to raise. */ - applyTaggedQueryOverwrite(path, snap, tag) { - var queryKey = this.queryKeyForTag_(tag); + applyTaggedQueryOverwrite(path: Path, snap: Node, tag: number): Event[] { + const queryKey = this.queryKeyForTag_(tag); if (queryKey != null) { - var r = this.parseQueryKey_(queryKey); - var queryPath = r.path, queryId = r.queryId; - var relativePath = Path.relativePath(queryPath, path); - var op = new Overwrite(OperationSource.forServerTaggedQuery(queryId), - relativePath, snap); - return this.applyTaggedOperation_(queryPath, queryId, op); + const r = SyncTree.parseQueryKey_(queryKey); + const queryPath = r.path, queryId = r.queryId; + const relativePath = Path.relativePath(queryPath, path); + const op = new Overwrite(OperationSource.forServerTaggedQuery(queryId), + relativePath, snap); + return this.applyTaggedOperation_(queryPath, op); } else { // Query must have been removed already return []; } - }; + } /** * Apply server data to be merged in for the specified tagged query. @@ -226,21 +233,21 @@ export class SyncTree { * @param {!number} tag * @return {!Array.} Events to raise. */ - applyTaggedQueryMerge(path, changedChildren, tag) { - var queryKey = this.queryKeyForTag_(tag); + applyTaggedQueryMerge(path: Path, changedChildren: { [k: string]: Node }, tag: number): Event[] { + const queryKey = this.queryKeyForTag_(tag); if (queryKey) { - var r = this.parseQueryKey_(queryKey); - var queryPath = r.path, queryId = r.queryId; - var relativePath = Path.relativePath(queryPath, path); - var changeTree = ImmutableTree.fromObject(changedChildren); - var op = new Merge(OperationSource.forServerTaggedQuery(queryId), - relativePath, changeTree); - return this.applyTaggedOperation_(queryPath, queryId, op); + const r = SyncTree.parseQueryKey_(queryKey); + const queryPath = r.path, queryId = r.queryId; + const relativePath = Path.relativePath(queryPath, path); + const changeTree = ImmutableTree.fromObject(changedChildren); + const op = new Merge(OperationSource.forServerTaggedQuery(queryId), + relativePath, changeTree); + return this.applyTaggedOperation_(queryPath, op); } else { // We've already removed the query. No big deal, ignore the update return []; } - }; + } /** * Apply a listen complete for a tagged query @@ -249,20 +256,20 @@ export class SyncTree { * @param {!number} tag * @return {!Array.} Events to raise. */ - applyTaggedListenComplete(path, tag) { - var queryKey = this.queryKeyForTag_(tag); + applyTaggedListenComplete(path: Path, tag: number): Event[] { + const queryKey = this.queryKeyForTag_(tag); if (queryKey) { - var r = this.parseQueryKey_(queryKey); - var queryPath = r.path, queryId = r.queryId; - var relativePath = Path.relativePath(queryPath, path); - var op = new ListenComplete(OperationSource.forServerTaggedQuery(queryId), - relativePath); - return this.applyTaggedOperation_(queryPath, queryId, op); + const r = SyncTree.parseQueryKey_(queryKey); + const queryPath = r.path, queryId = r.queryId; + const relativePath = Path.relativePath(queryPath, path); + const op = new ListenComplete(OperationSource.forServerTaggedQuery(queryId), + relativePath); + return this.applyTaggedOperation_(queryPath, op); } else { // We've already removed the query. No big deal, ignore the update return []; } - }; + } /** * Add an event callback for the specified query. @@ -271,19 +278,19 @@ export class SyncTree { * @param {!EventRegistration} eventRegistration * @return {!Array.} Events to raise. */ - addEventRegistration(query, eventRegistration) { - var path = query.path; + addEventRegistration(query: Query, eventRegistration: EventRegistration): Event[] { + const path = query.path; - var serverCache = null; - var foundAncestorDefaultView = false; + let serverCache: Node | null = null; + let foundAncestorDefaultView = false; // Any covering writes will necessarily be at the root, so really all we need to find is the server cache. // Consider optimizing this once there's a better understanding of what actual behavior will be. - this.syncPointTree_.foreachOnPath(path, function(pathToSyncPoint, sp) { - var relativePath = Path.relativePath(pathToSyncPoint, path); + this.syncPointTree_.foreachOnPath(path, function (pathToSyncPoint, sp) { + const relativePath = Path.relativePath(pathToSyncPoint, path); serverCache = serverCache || sp.getCompleteServerCache(relativePath); foundAncestorDefaultView = foundAncestorDefaultView || sp.hasCompleteView(); }); - var syncPoint = this.syncPointTree_.get(path); + let syncPoint = this.syncPointTree_.get(path); if (!syncPoint) { syncPoint = new SyncPoint(); this.syncPointTree_ = this.syncPointTree_.set(path, syncPoint); @@ -292,40 +299,40 @@ export class SyncTree { serverCache = serverCache || syncPoint.getCompleteServerCache(Path.Empty); } - var serverCacheComplete; + let serverCacheComplete; if (serverCache != null) { serverCacheComplete = true; } else { serverCacheComplete = false; serverCache = ChildrenNode.EMPTY_NODE; - var subtree = this.syncPointTree_.subtree(path); - subtree.foreachChild(function(childName, childSyncPoint) { - var completeCache = childSyncPoint.getCompleteServerCache(Path.Empty); + const subtree = this.syncPointTree_.subtree(path); + subtree.foreachChild(function (childName, childSyncPoint) { + const completeCache = childSyncPoint.getCompleteServerCache(Path.Empty); if (completeCache) { serverCache = serverCache.updateImmediateChild(childName, completeCache); } }); } - var viewAlreadyExists = syncPoint.viewExistsForQuery(query); + const viewAlreadyExists = syncPoint.viewExistsForQuery(query); if (!viewAlreadyExists && !query.getQueryParams().loadsAllData()) { // We need to track a tag for this query - var queryKey = this.makeQueryKey_(query); + const queryKey = SyncTree.makeQueryKey_(query); assert(!(queryKey in this.queryToTagMap_), 'View does not exist, but we have a tag'); - var tag = SyncTree.getNextQueryTag_(); + const tag = SyncTree.getNextQueryTag_(); this.queryToTagMap_[queryKey] = tag; // Coerce to string to avoid sparse arrays. this.tagToQueryMap_['_' + tag] = queryKey; } - var writesCache = this.pendingWriteTree_.childWrites(path); - var events = syncPoint.addEventRegistration(query, eventRegistration, writesCache, serverCache, serverCacheComplete); + const writesCache = this.pendingWriteTree_.childWrites(path); + let events = syncPoint.addEventRegistration(query, eventRegistration, writesCache, serverCache, serverCacheComplete); if (!viewAlreadyExists && !foundAncestorDefaultView) { - var view = /** @type !View */ (syncPoint.viewForQuery(query)); + const view = /** @type !View */ (syncPoint.viewForQuery(query)); events = events.concat(this.setupListener_(query, view)); } return events; - }; + } /** * Remove event callback(s). @@ -338,11 +345,12 @@ export class SyncTree { * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. * @return {!Array.} Cancel events, if cancelError was provided. */ - removeEventRegistration(query, eventRegistration, cancelError?) { + removeEventRegistration(query: Query, eventRegistration: EventRegistration | null, + cancelError?: Error): Event[] { // Find the syncPoint first. Then deal with whether or not it has matching listeners - var path = query.path; - var maybeSyncPoint = this.syncPointTree_.get(path); - var cancelEvents = []; + const path = query.path; + const maybeSyncPoint = this.syncPointTree_.get(path); + let cancelEvents: Event[] = []; // A removal on a default query affects all queries at that location. A removal on an indexed query, even one without // other query constraints, does *not* affect all queries at that location. So this check must be for 'default', and // not loadsAllData(). @@ -350,11 +358,11 @@ export class SyncTree { /** * @type {{removed: !Array., events: !Array.}} */ - var removedAndEvents = maybeSyncPoint.removeEventRegistration(query, eventRegistration, cancelError); + const removedAndEvents = maybeSyncPoint.removeEventRegistration(query, eventRegistration, cancelError); if (maybeSyncPoint.isEmpty()) { this.syncPointTree_ = this.syncPointTree_.remove(path); } - var removed = removedAndEvents.removed; + const removed = removedAndEvents.removed; cancelEvents = removedAndEvents.events; // We may have just removed one of many listeners and can short-circuit this whole process // We may also not have removed a default listener, in which case all of the descendant listeners should already be @@ -362,27 +370,27 @@ export class SyncTree { // // Since indexed queries can shadow if they don't have other query constraints, check for loadsAllData(), instead of // queryId === 'default' - var removingDefault = -1 !== removed.findIndex(function(query) { + const removingDefault = -1 !== removed.findIndex(function (query) { return query.getQueryParams().loadsAllData(); }); - var covered = this.syncPointTree_.findOnPath(path, function(relativePath, parentSyncPoint) { + const covered = this.syncPointTree_.findOnPath(path, function (relativePath, parentSyncPoint) { return parentSyncPoint.hasCompleteView(); }); if (removingDefault && !covered) { - var subtree = this.syncPointTree_.subtree(path); + const subtree = this.syncPointTree_.subtree(path); // There are potentially child listeners. Determine what if any listens we need to send before executing the // removal if (!subtree.isEmpty()) { // We need to fold over our subtree and collect the listeners to send - var newViews = this.collectDistinctViewsForSubTree_(subtree); + const newViews = this.collectDistinctViewsForSubTree_(subtree); // Ok, we've collected all the listens we need. Set them up. - for (var i = 0; i < newViews.length; ++i) { - var view = newViews[i], newQuery = view.getQuery(); - var listener = this.createListenerForView_(view); - this.listenProvider_.startListening(this.queryForListening_(newQuery), this.tagForQuery_(newQuery), - listener.hashFn, listener.onComplete); + for (let i = 0; i < newViews.length; ++i) { + const view = newViews[i], newQuery = view.getQuery(); + const listener = this.createListenerForView_(view); + this.listenProvider_.startListening(SyncTree.queryForListening_(newQuery), this.tagForQuery_(newQuery), + listener.hashFn, listener.onComplete); } } else { // There's nothing below us, so nothing we need to start listening on @@ -396,14 +404,12 @@ export class SyncTree { // default. Otherwise, we need to iterate through and cancel each individual query if (removingDefault) { // We don't tag default listeners - var defaultTag = null; - this.listenProvider_.stopListening(this.queryForListening_(query), defaultTag); + const defaultTag: number | null = null; + this.listenProvider_.stopListening(SyncTree.queryForListening_(query), defaultTag); } else { - var self = this; - removed.forEach(function(queryToRemove) { - var queryIdToRemove = queryToRemove.queryIdentifier(); - var tagToRemove = self.queryToTagMap_[self.makeQueryKey_(queryToRemove)]; - self.listenProvider_.stopListening(self.queryForListening_(queryToRemove), tagToRemove); + removed.forEach((queryToRemove: Query) => { + const tagToRemove = this.queryToTagMap_[SyncTree.makeQueryKey_(queryToRemove)]; + this.listenProvider_.stopListening(SyncTree.queryForListening_(queryToRemove), tagToRemove); }); } } @@ -413,7 +419,7 @@ export class SyncTree { // No-op, this listener must've been already removed } return cancelEvents; - }; + } /** * Returns a complete cache, if we have one, of the data at a particular path. The location must have a listener above @@ -424,18 +430,18 @@ export class SyncTree { * @param {Array.=} writeIdsToExclude A specific set to be excluded * @return {?Node} */ - calcCompleteEventCache(path, writeIdsToExclude) { - var includeHiddenSets = true; - var writeTree = this.pendingWriteTree_; - var serverCache = this.syncPointTree_.findOnPath(path, function(pathSoFar, syncPoint) { - var relativePath = Path.relativePath(pathSoFar, path); - var serverCache = syncPoint.getCompleteServerCache(relativePath); + calcCompleteEventCache(path: Path, writeIdsToExclude?: number[]): Node | null { + const includeHiddenSets = true; + const writeTree = this.pendingWriteTree_; + const serverCache = this.syncPointTree_.findOnPath(path, function (pathSoFar, syncPoint) { + const relativePath = Path.relativePath(pathSoFar, path); + const serverCache = syncPoint.getCompleteServerCache(relativePath); if (serverCache) { return serverCache; } }); return writeTree.calcCompleteEventCache(path, serverCache, writeIdsToExclude, includeHiddenSets); - }; + } /** * This collapses multiple unfiltered views into a single view, since we only need a single @@ -445,41 +451,41 @@ export class SyncTree { * @return {!Array.} * @private */ - collectDistinctViewsForSubTree_(subtree) { - return subtree.fold(function(relativePath, maybeChildSyncPoint, childMap) { + private collectDistinctViewsForSubTree_(subtree: ImmutableTree): View[] { + return subtree.fold((relativePath, maybeChildSyncPoint, childMap) => { if (maybeChildSyncPoint && maybeChildSyncPoint.hasCompleteView()) { - var completeView = maybeChildSyncPoint.getCompleteView(); + const completeView = maybeChildSyncPoint.getCompleteView(); return [completeView]; } else { // No complete view here, flatten any deeper listens into an array - var views = []; + let views: View[] = []; if (maybeChildSyncPoint) { views = maybeChildSyncPoint.getQueryViews(); } - forEach(childMap, function(key, childViews) { + forEach(childMap, function (key: string, childViews: View[]) { views = views.concat(childViews); }); return views; } }); - }; + } /** * @param {!Array.} queries * @private */ - removeTags_(queries) { - for (var j = 0; j < queries.length; ++j) { - var removedQuery = queries[j]; + private removeTags_(queries: Query[]) { + for (let j = 0; j < queries.length; ++j) { + const removedQuery = queries[j]; if (!removedQuery.getQueryParams().loadsAllData()) { // We should have a tag for this - var removedQueryKey = this.makeQueryKey_(removedQuery); - var removedQueryTag = this.queryToTagMap_[removedQueryKey]; + const removedQueryKey = SyncTree.makeQueryKey_(removedQuery); + const removedQueryTag = this.queryToTagMap_[removedQueryKey]; delete this.queryToTagMap_[removedQueryKey]; delete this.tagToQueryMap_['_' + removedQueryTag]; } } - }; + } /** @@ -488,7 +494,7 @@ export class SyncTree { * @return {!Query} The normalized query * @private */ - queryForListening_(query: Query) { + private static queryForListening_(query: Query): Query { if (query.getQueryParams().loadsAllData() && !query.getQueryParams().isDefault()) { // We treat queries that load all data as default queries // Cast is necessary because ref() technically returns Firebase which is actually fb.api.Firebase which inherits @@ -497,7 +503,7 @@ export class SyncTree { } else { return query; } - }; + } /** @@ -508,47 +514,45 @@ export class SyncTree { * @return {!Array.} This method can return events to support synchronous data sources * @private */ - setupListener_(query, view) { - var path = query.path; - var tag = this.tagForQuery_(query); - var listener = this.createListenerForView_(view); + private setupListener_(query: Query, view: View): Event[] { + const path = query.path; + const tag = this.tagForQuery_(query); + const listener = this.createListenerForView_(view); - var events = this.listenProvider_.startListening(this.queryForListening_(query), tag, listener.hashFn, - listener.onComplete); + const events = this.listenProvider_.startListening(SyncTree.queryForListening_(query), tag, listener.hashFn, + listener.onComplete); - var subtree = this.syncPointTree_.subtree(path); + const subtree = this.syncPointTree_.subtree(path); // The root of this subtree has our query. We're here because we definitely need to send a listen for that, but we // may need to shadow other listens as well. if (tag) { - assert(!subtree.value.hasCompleteView(), "If we're adding a query, it shouldn't be shadowed"); + assert(!subtree.value.hasCompleteView(), 'If we\'re adding a query, it shouldn\'t be shadowed'); } else { // Shadow everything at or below this location, this is a default listener. - var queriesToStop = subtree.fold(function(relativePath, maybeChildSyncPoint, childMap) { + const queriesToStop = subtree.fold(function (relativePath, maybeChildSyncPoint, childMap) { if (!relativePath.isEmpty() && maybeChildSyncPoint && maybeChildSyncPoint.hasCompleteView()) { return [maybeChildSyncPoint.getCompleteView().getQuery()]; } else { // No default listener here, flatten any deeper queries into an array - var queries = []; + let queries: Query[] = []; if (maybeChildSyncPoint) { queries = queries.concat( - maybeChildSyncPoint.getQueryViews().map(function(view) { - return view.getQuery(); - }) + maybeChildSyncPoint.getQueryViews().map(view=> view.getQuery()) ); } - forEach(childMap, function(key, childQueries) { + forEach(childMap, function (key: string, childQueries: Query[]) { queries = queries.concat(childQueries); }); return queries; } }); - for (var i = 0; i < queriesToStop.length; ++i) { - var queryToStop = queriesToStop[i]; - this.listenProvider_.stopListening(this.queryForListening_(queryToStop), this.tagForQuery_(queryToStop)); + for (let i = 0; i < queriesToStop.length; ++i) { + const queryToStop = queriesToStop[i]; + this.listenProvider_.stopListening(SyncTree.queryForListening_(queryToStop), this.tagForQuery_(queryToStop)); } } return events; - }; + } /** * @@ -556,32 +560,31 @@ export class SyncTree { * @return {{hashFn: function(), onComplete: function(!string, *)}} * @private */ - createListenerForView_(view) { - var self = this; - var query = view.getQuery(); - var tag = this.tagForQuery_(query); + private createListenerForView_(view: View): { hashFn(): string, onComplete(a: string, b?: any): Event[] } { + const query = view.getQuery(); + const tag = this.tagForQuery_(query); return { - hashFn: function() { - var cache = view.getServerCache() || ChildrenNode.EMPTY_NODE; + hashFn: () => { + const cache = view.getServerCache() || ChildrenNode.EMPTY_NODE; return cache.hash(); }, - onComplete: function(status, data) { + onComplete: (status: string): Event[] => { if (status === 'ok') { if (tag) { - return self.applyTaggedListenComplete(query.path, tag); + return this.applyTaggedListenComplete(query.path, tag); } else { - return self.applyListenComplete(query.path); + return this.applyListenComplete(query.path); } } else { // If a listen failed, kill all of the listeners here, not just the one that triggered the error. // Note that this may need to be scoped to just this listener if we change permissions on filtered children - var error = errorForServerCode(status, query); - return self.removeEventRegistration(query, /*eventRegistration*/null, error); + const error = errorForServerCode(status, query); + return this.removeEventRegistration(query, /*eventRegistration*/null, error); } } }; - }; + } /** * Given a query, computes a "queryKey" suitable for use in our queryToTagMap_. @@ -589,9 +592,9 @@ export class SyncTree { * @param {!Query} query * @return {string} */ - makeQueryKey_(query) { + private static makeQueryKey_(query: Query): string { return query.path.toString() + '$' + query.queryIdentifier(); - }; + } /** * Given a queryKey (created by makeQueryKey), parse it back into a path and queryId. @@ -599,14 +602,14 @@ export class SyncTree { * @param {!string} queryKey * @return {{queryId: !string, path: !Path}} */ - parseQueryKey_(queryKey) { - var splitIndex = queryKey.indexOf('$'); + private static parseQueryKey_(queryKey: string): { queryId: string, path: Path } { + const splitIndex = queryKey.indexOf('$'); assert(splitIndex !== -1 && splitIndex < queryKey.length - 1, 'Bad queryKey.'); return { queryId: queryKey.substr(splitIndex + 1), path: new Path(queryKey.substr(0, splitIndex)) }; - }; + } /** * Return the query associated with the given tag, if we have one @@ -614,9 +617,9 @@ export class SyncTree { * @return {?string} * @private */ - queryKeyForTag_(tag) { + private queryKeyForTag_(tag: number): string | null { return this.tagToQueryMap_['_' + tag]; - }; + } /** * Return the tag associated with the given query. @@ -624,41 +627,40 @@ export class SyncTree { * @return {?number} * @private */ - tagForQuery_(query) { - var queryKey = this.makeQueryKey_(query); + private tagForQuery_(query: Query): number | null { + const queryKey = SyncTree.makeQueryKey_(query); return safeGet(this.queryToTagMap_, queryKey); - }; + } /** * Static tracker for next query tag. * @type {number} * @private */ - static nextQueryTag_ = 1; + private static nextQueryTag_ = 1; /** * Static accessor for query tags. * @return {number} * @private */ - static getNextQueryTag_ = function() { + private static getNextQueryTag_(): number { return SyncTree.nextQueryTag_++; - }; + } /** * A helper method to apply tagged operations * * @param {!Path} queryPath - * @param {!string} queryId * @param {!Operation} operation * @return {!Array.} * @private */ - applyTaggedOperation_(queryPath, queryId, operation) { - var syncPoint = this.syncPointTree_.get(queryPath); - assert(syncPoint, "Missing sync point for query tag that we're tracking"); - var writesCache = this.pendingWriteTree_.childWrites(queryPath); - return syncPoint.applyOperation(operation, writesCache, /*serverCache=*/null); + private applyTaggedOperation_(queryPath: Path, operation: Operation): Event[] { + const syncPoint = this.syncPointTree_.get(queryPath); + assert(syncPoint, 'Missing sync point for query tag that we\'re tracking'); + const writesCache = this.pendingWriteTree_.childWrites(queryPath); + return syncPoint.applyOperation(operation, writesCache, /*serverCache=*/null); } /** @@ -667,22 +669,22 @@ export class SyncTree { * NOTES: * - Descendant SyncPoints will be visited first (since we raise events depth-first). - * - We call applyOperation() on each SyncPoint passing three things: - * 1. A version of the Operation that has been made relative to the SyncPoint location. - * 2. A WriteTreeRef of any writes we have cached at the SyncPoint location. - * 3. A snapshot Node with cached server data, if we have it. - - * - We concatenate all of the events returned by each SyncPoint and return the result. - * - * @param {!Operation} operation - * @return {!Array.} - * @private - */ - applyOperationToSyncPoints_(operation) { + * - We call applyOperation() on each SyncPoint passing three things: + * 1. A version of the Operation that has been made relative to the SyncPoint location. + * 2. A WriteTreeRef of any writes we have cached at the SyncPoint location. + * 3. A snapshot Node with cached server data, if we have it. + + * - We concatenate all of the events returned by each SyncPoint and return the result. + * + * @param {!Operation} operation + * @return {!Array.} + * @private + */ + private applyOperationToSyncPoints_(operation: Operation): Event[] { return this.applyOperationHelper_(operation, this.syncPointTree_, /*serverCache=*/ null, - this.pendingWriteTree_.childWrites(Path.Empty)); + this.pendingWriteTree_.childWrites(Path.Empty)); - }; + } /** * Recursive helper for applyOperationToSyncPoints_ @@ -694,27 +696,28 @@ export class SyncTree { * @param {!WriteTreeRef} writesCache * @return {!Array.} */ - applyOperationHelper_(operation, syncPointTree, serverCache, writesCache) { + private applyOperationHelper_(operation: Operation, syncPointTree: ImmutableTree, + serverCache: Node | null, writesCache: WriteTreeRef): Event[] { if (operation.path.isEmpty()) { return this.applyOperationDescendantsHelper_(operation, syncPointTree, serverCache, writesCache); } else { - var syncPoint = syncPointTree.get(Path.Empty); + const syncPoint = syncPointTree.get(Path.Empty); // If we don't have cached server data, see if we can get it from this SyncPoint. if (serverCache == null && syncPoint != null) { serverCache = syncPoint.getCompleteServerCache(Path.Empty); } - var events = []; - var childName = operation.path.getFront(); - var childOperation = operation.operationForChild(childName); - var childTree = syncPointTree.children.get(childName); + let events: Event[] = []; + const childName = operation.path.getFront(); + const childOperation = operation.operationForChild(childName); + const childTree = syncPointTree.children.get(childName); if (childTree && childOperation) { - var childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; - var childWritesCache = writesCache.child(childName); + const childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; + const childWritesCache = writesCache.child(childName); events = events.concat( - this.applyOperationHelper_(childOperation, childTree, childServerCache, childWritesCache)); + this.applyOperationHelper_(childOperation, childTree, childServerCache, childWritesCache)); } if (syncPoint) { @@ -723,7 +726,7 @@ export class SyncTree { return events; } - }; + } /** * Recursive helper for applyOperationToSyncPoints_ @@ -735,24 +738,23 @@ export class SyncTree { * @param {!WriteTreeRef} writesCache * @return {!Array.} */ - applyOperationDescendantsHelper_(operation, syncPointTree, - serverCache, writesCache) { - var syncPoint = syncPointTree.get(Path.Empty); + private applyOperationDescendantsHelper_(operation: Operation, syncPointTree: ImmutableTree, + serverCache: Node | null, writesCache: WriteTreeRef): Event[] { + const syncPoint = syncPointTree.get(Path.Empty); // If we don't have cached server data, see if we can get it from this SyncPoint. if (serverCache == null && syncPoint != null) { serverCache = syncPoint.getCompleteServerCache(Path.Empty); } - var events = []; - var self = this; - syncPointTree.children.inorderTraversal(function(childName, childTree) { - var childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; - var childWritesCache = writesCache.child(childName); - var childOperation = operation.operationForChild(childName); + let events: Event[] = []; + syncPointTree.children.inorderTraversal((childName, childTree) => { + const childServerCache = serverCache ? serverCache.getImmediateChild(childName) : null; + const childWritesCache = writesCache.child(childName); + const childOperation = operation.operationForChild(childName); if (childOperation) { events = events.concat( - self.applyOperationDescendantsHelper_(childOperation, childTree, childServerCache, childWritesCache)); + this.applyOperationDescendantsHelper_(childOperation, childTree, childServerCache, childWritesCache)); } }); @@ -761,5 +763,5 @@ export class SyncTree { } return events; - }; + } } diff --git a/src/database/core/WriteTree.ts b/src/database/core/WriteTree.ts index 386fe52c486..8462a8a6ebf 100644 --- a/src/database/core/WriteTree.ts +++ b/src/database/core/WriteTree.ts @@ -14,25 +14,27 @@ * limitations under the License. */ -import { findKey, forEach, safeGet } from "../../utils/obj"; -import { assert, assertionError } from "../../utils/assert"; -import { Path } from "./util/Path"; -import { CompoundWrite } from "./CompoundWrite"; -import { PRIORITY_INDEX } from "./snap/indexes/PriorityIndex"; -import { ChildrenNode } from "./snap/ChildrenNode"; +import { findKey, forEach, safeGet } from '../../utils/obj'; +import { assert, assertionError } from '../../utils/assert'; +import { Path } from './util/Path'; +import { CompoundWrite } from './CompoundWrite'; +import { PRIORITY_INDEX } from './snap/indexes/PriorityIndex'; +import { ChildrenNode } from './snap/ChildrenNode'; +import { NamedNode, Node } from './snap/Node'; +import { CacheNode } from './view/CacheNode'; +import { Index } from './snap/indexes/Index'; /** * Defines a single user-initiated write operation. May be the result of a set(), transaction(), or update() call. In * the case of a set() or transaction, snap wil be non-null. In the case of an update(), children will be non-null. - * - * @typedef {{ - * writeId: number, - * path: !Path, - * snap: ?Node, - * children: ?Object., - * visible: boolean - * }} */ +export interface WriteRecord { + writeId: number; + path: Path; + snap?: Node | null; + children?: { [k: string]: Node } | null; + visible: boolean +} /** * WriteTree tracks all pending user-initiated writes and has methods to calculate the result of merging them @@ -49,7 +51,7 @@ export class WriteTree { * @type {!CompoundWrite} * @private */ - visibleWrites_; + private visibleWrites_: CompoundWrite = CompoundWrite.Empty; /** * A list of all pending writes, regardless of visibility and shadowed-ness. Used to calculate arbitrary @@ -59,23 +61,19 @@ export class WriteTree { * @type {!Array.} * @private */ - allWrites_; - lastWriteId_; - - constructor() { - this.visibleWrites_ = CompoundWrite.Empty; - this.allWrites_ = []; - this.lastWriteId_ = -1; - }; + private allWrites_: WriteRecord[] = []; + + private lastWriteId_ = -1; + /** * Create a new WriteTreeRef for the given path. For use with a new sync point at the given path. * * @param {!Path} path * @return {!WriteTreeRef} */ - childWrites(path): WriteTreeRef { + childWrites(path: Path): WriteTreeRef { return new WriteTreeRef(path, this); - }; + } /** * Record a new overwrite from user code. @@ -85,7 +83,7 @@ export class WriteTree { * @param {!number} writeId * @param {boolean=} visible This is set to false by some transactions. It should be excluded from event caches */ - addOverwrite(path, snap, writeId, visible) { + addOverwrite(path: Path, snap: Node, writeId: number, visible?: boolean) { assert(writeId > this.lastWriteId_, 'Stacking an older write on top of newer ones'); if (visible === undefined) { visible = true; @@ -96,7 +94,7 @@ export class WriteTree { this.visibleWrites_ = this.visibleWrites_.addWrite(path, snap); } this.lastWriteId_ = writeId; - }; + } /** * Record a new merge from user code. @@ -105,28 +103,28 @@ export class WriteTree { * @param {!Object.} changedChildren * @param {!number} writeId */ - addMerge(path, changedChildren, writeId) { + addMerge(path: Path, changedChildren: { [k: string]: Node }, writeId: number) { assert(writeId > this.lastWriteId_, 'Stacking an older merge on top of newer ones'); this.allWrites_.push({path: path, children: changedChildren, writeId: writeId, visible: true}); this.visibleWrites_ = this.visibleWrites_.addWrites(path, changedChildren); this.lastWriteId_ = writeId; - }; + } /** * @param {!number} writeId * @return {?WriteRecord} */ - getWrite(writeId) { - for (var i = 0; i < this.allWrites_.length; i++) { - var record = this.allWrites_[i]; + getWrite(writeId: number): WriteRecord | null { + for (let i = 0; i < this.allWrites_.length; i++) { + const record = this.allWrites_[i]; if (record.writeId === writeId) { return record; } } return null; - }; + } /** @@ -137,24 +135,24 @@ export class WriteTree { * @return {boolean} true if the write may have been visible (meaning we'll need to reevaluate / raise * events as a result). */ - removeWrite(writeId) { + removeWrite(writeId: number): boolean { // Note: disabling this check. It could be a transaction that preempted another transaction, and thus was applied // out of order. - //var validClear = revert || this.allWrites_.length === 0 || writeId <= this.allWrites_[0].writeId; + //const validClear = revert || this.allWrites_.length === 0 || writeId <= this.allWrites_[0].writeId; //assert(validClear, "Either we don't have this write, or it's the first one in the queue"); - var idx = this.allWrites_.findIndex(function(s) { return s.writeId === writeId; }); + const idx = this.allWrites_.findIndex(function (s) { return s.writeId === writeId; }); assert(idx >= 0, 'removeWrite called with nonexistent writeId.'); - var writeToRemove = this.allWrites_[idx]; + const writeToRemove = this.allWrites_[idx]; this.allWrites_.splice(idx, 1); - var removedWriteWasVisible = writeToRemove.visible; - var removedWriteOverlapsWithOtherWrites = false; + let removedWriteWasVisible = writeToRemove.visible; + let removedWriteOverlapsWithOtherWrites = false; - var i = this.allWrites_.length - 1; + let i = this.allWrites_.length - 1; while (removedWriteWasVisible && i >= 0) { - var currentWrite = this.allWrites_[i]; + const currentWrite = this.allWrites_[i]; if (currentWrite.visible) { if (i >= idx && this.recordContainsPath_(currentWrite, writeToRemove.path)) { // The removed write was completely shadowed by a subsequent write. @@ -178,15 +176,14 @@ export class WriteTree { if (writeToRemove.snap) { this.visibleWrites_ = this.visibleWrites_.removeWrite(writeToRemove.path); } else { - var children = writeToRemove.children; - var self = this; - forEach(children, function(childName, childSnap) { - self.visibleWrites_ = self.visibleWrites_.removeWrite(writeToRemove.path.child(childName)); + const children = writeToRemove.children; + forEach(children, (childName: string) => { + this.visibleWrites_ = this.visibleWrites_.removeWrite(writeToRemove.path.child(childName)); }); } return true; } - }; + } /** * Return a complete snapshot for the given path if there's visible write data at that path, else null. @@ -195,9 +192,9 @@ export class WriteTree { * @param {!Path} path * @return {?Node} */ - getCompleteWriteData(path) { + getCompleteWriteData(path: Path): Node | null { return this.visibleWrites_.getCompleteNode(path); - }; + } /** * Given optional, underlying server data, and an optional set of constraints (exclude some sets, include hidden @@ -209,26 +206,26 @@ export class WriteTree { * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false * @return {?Node} */ - calcCompleteEventCache(treePath, completeServerCache, writeIdsToExclude, - includeHiddenWrites) { + calcCompleteEventCache(treePath: Path, completeServerCache: Node | null, writeIdsToExclude?: number[], + includeHiddenWrites?: boolean): Node | null { if (!writeIdsToExclude && !includeHiddenWrites) { - var shadowingNode = this.visibleWrites_.getCompleteNode(treePath); + const shadowingNode = this.visibleWrites_.getCompleteNode(treePath); if (shadowingNode != null) { return shadowingNode; } else { - var subMerge = this.visibleWrites_.childCompoundWrite(treePath); + const subMerge = this.visibleWrites_.childCompoundWrite(treePath); if (subMerge.isEmpty()) { return completeServerCache; } else if (completeServerCache == null && !subMerge.hasCompleteWrite(Path.Empty)) { // We wouldn't have a complete snapshot, since there's no underlying data and no complete shadow return null; } else { - var layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE; + const layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE; return subMerge.apply(layeredCache); } } } else { - var merge = this.visibleWrites_.childCompoundWrite(treePath); + const merge = this.visibleWrites_.childCompoundWrite(treePath); if (!includeHiddenWrites && merge.isEmpty()) { return completeServerCache; } else { @@ -236,18 +233,18 @@ export class WriteTree { if (!includeHiddenWrites && completeServerCache == null && !merge.hasCompleteWrite(Path.Empty)) { return null; } else { - var filter = function(write) { + const filter = function (write: WriteRecord) { return (write.visible || includeHiddenWrites) && - (!writeIdsToExclude || !~writeIdsToExclude.indexOf(write.writeId)) && - (write.path.contains(treePath) || treePath.contains(write.path)); + (!writeIdsToExclude || !~writeIdsToExclude.indexOf(write.writeId)) && + (write.path.contains(treePath) || treePath.contains(write.path)); }; - var mergeAtPath = WriteTree.layerTree_(this.allWrites_, filter, treePath); - layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE; + const mergeAtPath = WriteTree.layerTree_(this.allWrites_, filter, treePath); + const layeredCache = completeServerCache || ChildrenNode.EMPTY_NODE; return mergeAtPath.apply(layeredCache); } } } - }; + } /** * With optional, underlying server data, attempt to return a children node of children that we have complete data for. @@ -257,13 +254,13 @@ export class WriteTree { * @param {?ChildrenNode} completeServerChildren * @return {!ChildrenNode} */ - calcCompleteEventChildren(treePath, completeServerChildren) { - var completeChildren = ChildrenNode.EMPTY_NODE; - var topLevelSet = this.visibleWrites_.getCompleteNode(treePath); + calcCompleteEventChildren(treePath: Path, completeServerChildren: ChildrenNode | null) { + let completeChildren = ChildrenNode.EMPTY_NODE as Node; + const topLevelSet = this.visibleWrites_.getCompleteNode(treePath); if (topLevelSet) { if (!topLevelSet.isLeafNode()) { // we're shadowing everything. Return the children. - topLevelSet.forEachChild(PRIORITY_INDEX, function(childName, childSnap) { + topLevelSet.forEachChild(PRIORITY_INDEX, function (childName, childSnap) { completeChildren = completeChildren.updateImmediateChild(childName, childSnap); }); } @@ -271,26 +268,26 @@ export class WriteTree { } else if (completeServerChildren) { // Layer any children we have on top of this // We know we don't have a top-level set, so just enumerate existing children - var merge = this.visibleWrites_.childCompoundWrite(treePath); - completeServerChildren.forEachChild(PRIORITY_INDEX, function(childName, childNode) { - var node = merge.childCompoundWrite(new Path(childName)).apply(childNode); + const merge = this.visibleWrites_.childCompoundWrite(treePath); + completeServerChildren.forEachChild(PRIORITY_INDEX, function (childName, childNode) { + const node = merge.childCompoundWrite(new Path(childName)).apply(childNode); completeChildren = completeChildren.updateImmediateChild(childName, node); }); // Add any complete children we have from the set - merge.getCompleteChildren().forEach(function(namedNode) { + merge.getCompleteChildren().forEach(function (namedNode) { completeChildren = completeChildren.updateImmediateChild(namedNode.name, namedNode.node); }); return completeChildren; } else { // We don't have anything to layer on top of. Layer on any children we have // Note that we can return an empty snap if we have a defined delete - merge = this.visibleWrites_.childCompoundWrite(treePath); - merge.getCompleteChildren().forEach(function(namedNode) { + const merge = this.visibleWrites_.childCompoundWrite(treePath); + merge.getCompleteChildren().forEach(function (namedNode) { completeChildren = completeChildren.updateImmediateChild(namedNode.name, namedNode.node); }); return completeChildren; } - }; + } /** * Given that the underlying server data has updated, determine what, if anything, needs to be @@ -312,18 +309,18 @@ export class WriteTree { * @param {?Node} existingServerSnap * @return {?Node} */ - calcEventCacheAfterServerOverwrite(treePath, childPath, existingEventSnap, - existingServerSnap) { + calcEventCacheAfterServerOverwrite(treePath: Path, childPath: Path, existingEventSnap: Node | null, + existingServerSnap: Node | null): Node | null { assert(existingEventSnap || existingServerSnap, 'Either existingEventSnap or existingServerSnap must exist'); - var path = treePath.child(childPath); + const path = treePath.child(childPath); if (this.visibleWrites_.hasCompleteWrite(path)) { // At this point we can probably guarantee that we're in case 2, meaning no events // May need to check visibility while doing the findRootMostValueAndPath call return null; } else { // No complete shadowing. We're either partially shadowing or not shadowing at all. - var childMerge = this.visibleWrites_.childCompoundWrite(path); + const childMerge = this.visibleWrites_.childCompoundWrite(path); if (childMerge.isEmpty()) { // We're not shadowing at all. Case 1 return existingServerSnap.getChild(childPath); @@ -337,7 +334,7 @@ export class WriteTree { return childMerge.apply(existingServerSnap.getChild(childPath)); } } - }; + } /** * Returns a complete child for a given server snap after applying all user writes or null if there is no @@ -348,20 +345,20 @@ export class WriteTree { * @param {!CacheNode} existingServerSnap * @return {?Node} */ - calcCompleteChild(treePath, childKey, existingServerSnap) { - var path = treePath.child(childKey); - var shadowingNode = this.visibleWrites_.getCompleteNode(path); + calcCompleteChild(treePath: Path, childKey: string, existingServerSnap: CacheNode): Node | null { + const path = treePath.child(childKey); + const shadowingNode = this.visibleWrites_.getCompleteNode(path); if (shadowingNode != null) { return shadowingNode; } else { if (existingServerSnap.isCompleteForChild(childKey)) { - var childMerge = this.visibleWrites_.childCompoundWrite(path); + const childMerge = this.visibleWrites_.childCompoundWrite(path); return childMerge.apply(existingServerSnap.getNode().getImmediateChild(childKey)); } else { return null; } } - }; + } /** * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at @@ -371,9 +368,9 @@ export class WriteTree { * @param {!Path} path * @return {?Node} */ - shadowingWrite(path) { + shadowingWrite(path: Path): Node | null { return this.visibleWrites_.getCompleteNode(path); - }; + } /** * This method is used when processing child remove events on a query. If we can, we pull in children that were outside @@ -387,11 +384,11 @@ export class WriteTree { * @param {!Index} index * @return {!Array.} */ - calcIndexedSlice(treePath, completeServerData, startPost, count, reverse, - index) { - var toIterate; - var merge = this.visibleWrites_.childCompoundWrite(treePath); - var shadowingNode = merge.getCompleteNode(Path.Empty); + calcIndexedSlice(treePath: Path, completeServerData: Node | null, startPost: NamedNode, count: number, + reverse: boolean, index: Index): NamedNode[] { + let toIterate: Node; + const merge = this.visibleWrites_.childCompoundWrite(treePath); + const shadowingNode = merge.getCompleteNode(Path.Empty); if (shadowingNode != null) { toIterate = shadowingNode; } else if (completeServerData != null) { @@ -402,11 +399,11 @@ export class WriteTree { } toIterate = toIterate.withIndex(index); if (!toIterate.isEmpty() && !toIterate.isLeafNode()) { - var nodes = []; - var cmp = index.getCompare(); - var iter = reverse ? toIterate.getReverseIteratorFrom(startPost, index) : - toIterate.getIteratorFrom(startPost, index); - var next = iter.getNext(); + const nodes = []; + const cmp = index.getCompare(); + const iter = reverse ? (toIterate as ChildrenNode).getReverseIteratorFrom(startPost, index) : + (toIterate as ChildrenNode).getIteratorFrom(startPost, index); + let next = iter.getNext(); while (next && nodes.length < count) { if (cmp(next, startPost) !== 0) { nodes.push(next); @@ -417,7 +414,7 @@ export class WriteTree { } else { return []; } - }; + } /** * @param {!WriteRecord} writeRecord @@ -425,30 +422,30 @@ export class WriteTree { * @return {boolean} * @private */ - recordContainsPath_(writeRecord, path) { + private recordContainsPath_(writeRecord: WriteRecord, path: Path): boolean { if (writeRecord.snap) { return writeRecord.path.contains(path); } else { // findKey can return undefined, so use !! to coerce to boolean - return !!findKey(writeRecord.children, function(childSnap, childName) { + return !!findKey(writeRecord.children, function (childSnap: Node, childName: string) { return writeRecord.path.child(childName).contains(path); }); } - }; + } /** * Re-layer the writes and merges into a tree so we can efficiently calculate event snapshots * @private */ - resetTree_() { + private resetTree_() { this.visibleWrites_ = WriteTree.layerTree_(this.allWrites_, WriteTree.DefaultFilter_, - Path.Empty); + Path.Empty); if (this.allWrites_.length > 0) { this.lastWriteId_ = this.allWrites_[this.allWrites_.length - 1].writeId; } else { this.lastWriteId_ = -1; } - }; + } /** * The default filter used when constructing the tree. Keep everything that's visible. @@ -456,9 +453,10 @@ export class WriteTree { * @param {!WriteRecord} write * @return {boolean} * @private - * @const */ - static DefaultFilter_ = function(write) { return write.visible; }; + private static DefaultFilter_(write: WriteRecord) { + return write.visible; + } /** * Static method. Given an array of WriteRecords, a filter for which ones to include, and a path, construct the tree of @@ -470,16 +468,16 @@ export class WriteTree { * @return {!CompoundWrite} * @private */ - static layerTree_ = function(writes, filter, treeRoot) { - var compoundWrite = CompoundWrite.Empty; - for (var i = 0; i < writes.length; ++i) { - var write = writes[i]; + private static layerTree_(writes: WriteRecord[], filter: (w: WriteRecord) => boolean, treeRoot: Path): CompoundWrite { + let compoundWrite = CompoundWrite.Empty; + for (let i = 0; i < writes.length; ++i) { + const write = writes[i]; // Theory, a later set will either: // a) abort a relevant transaction, so no need to worry about excluding it from calculating that transaction // b) not be relevant to a transaction (separate branch), so again will not affect the data for that transaction if (filter(write)) { - var writePath = write.path; - var relativePath; + const writePath = write.path; + let relativePath; if (write.snap) { if (treeRoot.contains(writePath)) { relativePath = Path.relativePath(treeRoot, writePath); @@ -499,10 +497,10 @@ export class WriteTree { if (relativePath.isEmpty()) { compoundWrite = compoundWrite.addWrites(Path.Empty, write.children); } else { - var child = safeGet(write.children, relativePath.getFront()); + const child = safeGet(write.children, relativePath.getFront()); if (child) { // There exists a child in this node that matches the root path - var deepNode = child.getChild(relativePath.popFront()); + const deepNode = child.getChild(relativePath.popFront()); compoundWrite = compoundWrite.addWrite(Path.Empty, deepNode); } } @@ -515,15 +513,13 @@ export class WriteTree { } } return compoundWrite; - }; + } } /** * A WriteTreeRef wraps a WriteTree and a path, for convenient access to a particular subtree. All of the methods * just proxy to the underlying WriteTree. * - * @param {!Path} path - * @param {!WriteTree} writeTree * @constructor */ export class WriteTreeRef { @@ -535,7 +531,7 @@ export class WriteTreeRef { * @private * @const */ - treePath_; + private readonly treePath_: Path; /** * * A reference to the actual tree of write data. All methods are pass-through to the tree, but with the appropriate @@ -548,12 +544,17 @@ export class WriteTreeRef { * @private * @const */ - writeTree_; + private readonly writeTree_: WriteTree; - constructor(path, writeTree) { + /** + * @param {!Path} path + * @param {!WriteTree} writeTree + */ + constructor(path: Path, writeTree: WriteTree) { this.treePath_ = path; this.writeTree_ = writeTree; - }; + } + /** * If possible, returns a complete event cache, using the underlying server data if possible. In addition, can be used * to get a cache that includes hidden writes, and excludes arbitrary writes. Note that customizing the returned node @@ -564,11 +565,11 @@ export class WriteTreeRef { * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false * @return {?Node} */ - calcCompleteEventCache(completeServerCache, writeIdsToExclude, - includeHiddenWrites) { + calcCompleteEventCache(completeServerCache: Node | null, writeIdsToExclude?: number[], + includeHiddenWrites?: boolean): Node | null { return this.writeTree_.calcCompleteEventCache(this.treePath_, completeServerCache, writeIdsToExclude, - includeHiddenWrites); - }; + includeHiddenWrites); + } /** * If possible, returns a children node containing all of the complete children we have data for. The returned data is a @@ -577,9 +578,9 @@ export class WriteTreeRef { * @param {?ChildrenNode} completeServerChildren * @return {!ChildrenNode} */ - calcCompleteEventChildren(completeServerChildren) { - return this.writeTree_.calcCompleteEventChildren(this.treePath_, completeServerChildren); - }; + calcCompleteEventChildren(completeServerChildren: ChildrenNode | null): ChildrenNode { + return this.writeTree_.calcCompleteEventChildren(this.treePath_, completeServerChildren) as ChildrenNode; + } /** * Given that either the underlying server data has updated or the outstanding writes have updated, determine what, @@ -600,9 +601,10 @@ export class WriteTreeRef { * @param {?Node} existingServerSnap * @return {?Node} */ - calcEventCacheAfterServerOverwrite(path, existingEventSnap, existingServerSnap) { + calcEventCacheAfterServerOverwrite(path: Path, existingEventSnap: Node | null, + existingServerSnap: Node | null): Node | null { return this.writeTree_.calcEventCacheAfterServerOverwrite(this.treePath_, path, existingEventSnap, existingServerSnap); - }; + } /** * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at @@ -612,9 +614,9 @@ export class WriteTreeRef { * @param {!Path} path * @return {?Node} */ - shadowingWrite(path) { + shadowingWrite(path: Path): Node | null { return this.writeTree_.shadowingWrite(this.treePath_.child(path)); - }; + } /** * This method is used when processing child remove events on a query. If we can, we pull in children that were outside @@ -627,9 +629,10 @@ export class WriteTreeRef { * @param {!Index} index * @return {!Array.} */ - calcIndexedSlice(completeServerData, startPost, count, reverse, index) { + calcIndexedSlice(completeServerData: Node | null, startPost: NamedNode, count: number, + reverse: boolean, index: Index): NamedNode[] { return this.writeTree_.calcIndexedSlice(this.treePath_, completeServerData, startPost, count, reverse, index); - }; + } /** * Returns a complete child for a given server snap after applying all user writes or null if there is no @@ -639,9 +642,9 @@ export class WriteTreeRef { * @param {!CacheNode} existingServerCache * @return {?Node} */ - calcCompleteChild(childKey, existingServerCache) { + calcCompleteChild(childKey: string, existingServerCache: CacheNode): Node | null { return this.writeTree_.calcCompleteChild(this.treePath_, childKey, existingServerCache); - }; + } /** * Return a WriteTreeRef for a child. @@ -649,7 +652,7 @@ export class WriteTreeRef { * @param {string} childName * @return {!WriteTreeRef} */ - child(childName) { + child(childName: string): WriteTreeRef { return new WriteTreeRef(this.treePath_.child(childName), this.writeTree_); - }; + } } diff --git a/src/database/core/operation/AckUserWrite.ts b/src/database/core/operation/AckUserWrite.ts index f9945334c2c..29e553fb82b 100644 --- a/src/database/core/operation/AckUserWrite.ts +++ b/src/database/core/operation/AckUserWrite.ts @@ -33,7 +33,7 @@ export class AckUserWrite implements Operation { * @param {!boolean} revert */ constructor(/**@inheritDoc */ public path: Path, - /**@inheritDoc */ public affectedTree: ImmutableTree, + /**@inheritDoc */ public affectedTree: ImmutableTree, /**@inheritDoc */ public revert: boolean) { } diff --git a/src/database/core/operation/Merge.ts b/src/database/core/operation/Merge.ts index 720de95aa88..60aa5aac5e4 100644 --- a/src/database/core/operation/Merge.ts +++ b/src/database/core/operation/Merge.ts @@ -19,6 +19,7 @@ import { Overwrite } from "./Overwrite"; import { Path } from "../util/Path"; import { assert } from "../../../utils/assert"; import { ImmutableTree } from '../util/ImmutableTree'; +import { Node } from '../snap/Node'; /** * @param {!OperationSource} source @@ -33,7 +34,7 @@ export class Merge implements Operation { constructor(/**@inheritDoc */ public source: OperationSource, /**@inheritDoc */ public path: Path, - /**@inheritDoc */ public children: ImmutableTree) { + /**@inheritDoc */ public children: ImmutableTree) { } /** diff --git a/src/database/core/operation/Operation.ts b/src/database/core/operation/Operation.ts index 82d306f9372..16b55f26eee 100644 --- a/src/database/core/operation/Operation.ts +++ b/src/database/core/operation/Operation.ts @@ -84,7 +84,7 @@ export class OperationSource { * @param {string} queryId * @return {!OperationSource} */ - static forServerTaggedQuery = function(queryId) { + static forServerTaggedQuery = function(queryId: string): OperationSource { return new OperationSource(false, /*fromServer=*/true, queryId, /*tagged=*/true); }; } \ No newline at end of file diff --git a/src/database/core/snap/ChildrenNode.ts b/src/database/core/snap/ChildrenNode.ts index 93b473b519d..22f1ecccec6 100644 --- a/src/database/core/snap/ChildrenNode.ts +++ b/src/database/core/snap/ChildrenNode.ts @@ -15,28 +15,34 @@ */ import { assert } from "../../../utils/assert"; -import { +import { sha1, MAX_NAME, MIN_NAME -} from "../util/util"; -import { SortedMap } from "../util/SortedMap"; -import { Node, NamedNode } from "./Node"; -import { - validatePriorityNode, +} from '../util/util'; +import { SortedMap, SortedMapIterator } from '../util/SortedMap'; +import { Node, NamedNode } from './Node'; +import { + validatePriorityNode, priorityHashText, setMaxNode -} from "./snap"; -import { PRIORITY_INDEX, setMaxNode as setPriorityMaxNode } from "./indexes/PriorityIndex"; -import { KEY_INDEX, KeyIndex } from "./indexes/KeyIndex"; -import { IndexMap } from "./IndexMap"; -import { LeafNode } from "./LeafNode"; -import { NAME_COMPARATOR } from "./comparators"; -import "./indexes/Index"; +} from './snap'; +import { PRIORITY_INDEX, setMaxNode as setPriorityMaxNode } from './indexes/PriorityIndex'; +import { KEY_INDEX, KeyIndex } from './indexes/KeyIndex'; +import { IndexMap } from './IndexMap'; +import { LeafNode } from './LeafNode'; +import { NAME_COMPARATOR } from './comparators'; +import { Index } from './indexes/Index'; +import { Path } from '../util/Path'; + +export interface ChildrenNodeConstructor { + new(children_: SortedMap, priorityNode_: Node | null, indexMap_: IndexMap): ChildrenNode; + EMPTY_NODE: ChildrenNode; +} // TODO: For memory savings, don't store priorityNode_ if it's empty. -let EMPTY_NODE; +let EMPTY_NODE: ChildrenNode; /** * ChildrenNode is a class for storing internal nodes in a DataSnapshot @@ -45,115 +51,92 @@ let EMPTY_NODE; * * @constructor * @implements {Node} - * @param {!SortedMap.} children List of children - * of this node.. - * @param {?Node} priorityNode The priority of this node (as a snapshot node). - * @param {!IndexMap} indexMap */ export class ChildrenNode implements Node { - children_; - priorityNode_; - indexMap_; - lazyHash_; - - static get EMPTY_NODE() { - return EMPTY_NODE || (EMPTY_NODE = new ChildrenNode(new SortedMap(NAME_COMPARATOR), null, IndexMap.Default)); + private lazyHash_: string | null = null; + + static get EMPTY_NODE(): ChildrenNode { + return EMPTY_NODE || (EMPTY_NODE = new ChildrenNode(new SortedMap(NAME_COMPARATOR), null, IndexMap.Default)); } - constructor(children, priorityNode, indexMap) { - /** - * @private - * @const - * @type {!SortedMap.} - */ - this.children_ = children; + /** + * + * @param {!SortedMap.} children_ List of children + * of this node.. + * @param {?Node} priorityNode_ The priority of this node (as a snapshot node). + * @param {!IndexMap} indexMap_ + */ + constructor(private readonly children_: SortedMap, + private readonly priorityNode_: Node | null, + private indexMap_: IndexMap) { /** - * Note: The only reason we allow null priority is to for EMPTY_NODE, since we can't use + * Note: The only reason we allow null priority is for EMPTY_NODE, since we can't use * EMPTY_NODE as the priority of EMPTY_NODE. We might want to consider making EMPTY_NODE its own * class instead of an empty ChildrenNode. - * - * @private - * @const - * @type {?Node} */ - this.priorityNode_ = priorityNode; if (this.priorityNode_) { validatePriorityNode(this.priorityNode_); } - if (children.isEmpty()) { + if (this.children_.isEmpty()) { assert(!this.priorityNode_ || this.priorityNode_.isEmpty(), 'An empty node cannot have a priority'); } - - /** - * - * @type {!IndexMap} - * @private - */ - this.indexMap_ = indexMap; - - /** - * - * @type {?string} - * @private - */ - this.lazyHash_ = null; - }; + } /** @inheritDoc */ - isLeafNode() { + isLeafNode(): boolean { return false; - }; + } /** @inheritDoc */ - getPriority() { + getPriority(): Node { return this.priorityNode_ || EMPTY_NODE; - }; + } /** @inheritDoc */ - updatePriority(newPriorityNode) { + updatePriority(newPriorityNode: Node): Node { if (this.children_.isEmpty()) { // Don't allow priorities on empty nodes return this; } else { return new ChildrenNode(this.children_, newPriorityNode, this.indexMap_); } - }; + } /** @inheritDoc */ - getImmediateChild(childName) { + getImmediateChild(childName: string): Node { // Hack to treat priority as a regular child if (childName === '.priority') { return this.getPriority(); } else { - var child = this.children_.get(childName); + const child = this.children_.get(childName); return child === null ? EMPTY_NODE : child; } - }; + } /** @inheritDoc */ - getChild(path) { - var front = path.getFront(); + getChild(path: Path): Node { + const front = path.getFront(); if (front === null) return this; return this.getImmediateChild(front).getChild(path.popFront()); - }; + } /** @inheritDoc */ - hasChild(childName) { + hasChild(childName: string): boolean { return this.children_.get(childName) !== null; - }; + } /** @inheritDoc */ - updateImmediateChild(childName, newChildNode) { + updateImmediateChild(childName: string, newChildNode: Node): Node { assert(newChildNode, 'We should always be passing snapshot nodes'); if (childName === '.priority') { return this.updatePriority(newChildNode); } else { - var namedNode = new NamedNode(childName, newChildNode); - var newChildren, newIndexMap, newPriority; + const namedNode = new NamedNode(childName, newChildNode); + let newChildren, newIndexMap, newPriority; if (newChildNode.isEmpty()) { newChildren = this.children_.remove(childName); newIndexMap = this.indexMap_.removeFromIndexes(namedNode, this.children_ @@ -166,47 +149,46 @@ export class ChildrenNode implements Node { newPriority = newChildren.isEmpty() ? EMPTY_NODE : this.priorityNode_; return new ChildrenNode(newChildren, newPriority, newIndexMap); } - }; + } /** @inheritDoc */ - updateChild(path, newChildNode) { - var front = path.getFront(); + updateChild(path: Path, newChildNode: Node): Node { + const front = path.getFront(); if (front === null) { return newChildNode; } else { assert(path.getFront() !== '.priority' || path.getLength() === 1, - '.priority must be the last token in a path'); - var newImmediateChild = this.getImmediateChild(front). - updateChild(path.popFront(), newChildNode); + '.priority must be the last token in a path'); + const newImmediateChild = this.getImmediateChild(front).updateChild(path.popFront(), newChildNode); return this.updateImmediateChild(front, newImmediateChild); } - }; + } /** @inheritDoc */ - isEmpty() { + isEmpty(): boolean { return this.children_.isEmpty(); - }; + } /** @inheritDoc */ - numChildren() { + numChildren(): number { return this.children_.count(); - }; + } /** * @private * @type {RegExp} */ - static INTEGER_REGEXP_ = /^(0|[1-9]\d*)$/; + private static INTEGER_REGEXP_ = /^(0|[1-9]\d*)$/; /** @inheritDoc */ - val(opt_exportFormat) { + val(exportFormat?: boolean): object { if (this.isEmpty()) return null; - var obj = { }; - var numKeys = 0, maxKey = 0, allIntegerKeys = true; - this.forEachChild(PRIORITY_INDEX, function(key, childNode) { - obj[key] = childNode.val(opt_exportFormat); + const obj: { [k: string]: Object } = {}; + let numKeys = 0, maxKey = 0, allIntegerKeys = true; + this.forEachChild(PRIORITY_INDEX, function (key: string, childNode: Node) { + obj[key] = childNode.val(exportFormat); numKeys++; if (allIntegerKeys && ChildrenNode.INTEGER_REGEXP_.test(key)) { @@ -216,32 +198,32 @@ export class ChildrenNode implements Node { } }); - if (!opt_exportFormat && allIntegerKeys && maxKey < 2 * numKeys) { + if (!exportFormat && allIntegerKeys && maxKey < 2 * numKeys) { // convert to array. - var array = []; - for (var key in obj) - array[key] = obj[key]; + const array: Object[] = []; + for (let key in obj) + array[key as any as number] = obj[key]; return array; } else { - if (opt_exportFormat && !this.getPriority().isEmpty()) { + if (exportFormat && !this.getPriority().isEmpty()) { obj['.priority'] = this.getPriority().val(); } return obj; } - }; + } /** @inheritDoc */ - hash() { + hash(): string { if (this.lazyHash_ === null) { - var toHash = ''; + let toHash = ''; if (!this.getPriority().isEmpty()) toHash += 'priority:' + priorityHashText( - /**@type {(!string|!number)} */ (this.getPriority().val())) + ':'; + (this.getPriority().val() as string | number)) + ':'; - this.forEachChild(PRIORITY_INDEX, function(key, childNode) { - var childHash = childNode.hash(); + this.forEachChild(PRIORITY_INDEX, function (key, childNode) { + const childHash = childNode.hash(); if (childHash !== '') toHash += ':' + key + ':' + childHash; }); @@ -249,151 +231,152 @@ export class ChildrenNode implements Node { this.lazyHash_ = (toHash === '') ? '' : sha1(toHash); } return this.lazyHash_; - }; + } /** @inheritDoc */ - getPredecessorChildName(childName, childNode, index) { - var idx = this.resolveIndex_(index); + getPredecessorChildName(childName: string, childNode: Node, index: Index): string { + const idx = this.resolveIndex_(index); if (idx) { - var predecessor = idx.getPredecessorKey(new NamedNode(childName, childNode)); + const predecessor = idx.getPredecessorKey(new NamedNode(childName, childNode)); return predecessor ? predecessor.name : null; } else { return this.children_.getPredecessorKey(childName); } - }; + } /** - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {?string} */ - getFirstChildName(indexDefinition) { - var idx = this.resolveIndex_(indexDefinition); + getFirstChildName(indexDefinition: Index): string | null { + const idx = this.resolveIndex_(indexDefinition); if (idx) { - var minKey = idx.minKey(); + const minKey = idx.minKey(); return minKey && minKey.name; } else { return this.children_.minKey(); } - }; + } /** - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {?NamedNode} */ - getFirstChild(indexDefinition) { - var minKey = this.getFirstChildName(indexDefinition); + getFirstChild(indexDefinition: Index): NamedNode | null { + const minKey = this.getFirstChildName(indexDefinition); if (minKey) { return new NamedNode(minKey, this.children_.get(minKey)); } else { return null; } - }; + } /** * Given an index, return the key name of the largest value we have, according to that index - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {?string} */ - getLastChildName(indexDefinition) { - var idx = this.resolveIndex_(indexDefinition); + getLastChildName(indexDefinition: Index): string | null { + const idx = this.resolveIndex_(indexDefinition); if (idx) { - var maxKey = idx.maxKey(); + const maxKey = idx.maxKey(); return maxKey && maxKey.name; } else { return this.children_.maxKey(); } - }; + } /** - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {?NamedNode} */ - getLastChild(indexDefinition) { - var maxKey = this.getLastChildName(indexDefinition); + getLastChild(indexDefinition: Index): NamedNode | null { + const maxKey = this.getLastChildName(indexDefinition); if (maxKey) { return new NamedNode(maxKey, this.children_.get(maxKey)); } else { return null; } - }; + } /** * @inheritDoc */ - forEachChild(index, action) { - var idx = this.resolveIndex_(index); + forEachChild(index: Index, action: (key: string, node: Node) => void): any { + const idx = this.resolveIndex_(index); if (idx) { - return idx.inorderTraversal(function(wrappedNode) { + return idx.inorderTraversal(function (wrappedNode) { return action(wrappedNode.name, wrappedNode.node); }); } else { return this.children_.inorderTraversal(action); } - }; + } /** - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {SortedMapIterator} */ - getIterator(indexDefinition) { + getIterator(indexDefinition: Index): SortedMapIterator { return this.getIteratorFrom(indexDefinition.minPost(), indexDefinition); - }; + } /** * * @param {!NamedNode} startPost - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {!SortedMapIterator} */ - getIteratorFrom(startPost, indexDefinition) { - var idx = this.resolveIndex_(indexDefinition); + getIteratorFrom(startPost: NamedNode, indexDefinition: Index): SortedMapIterator { + const idx = this.resolveIndex_(indexDefinition); if (idx) { - return idx.getIteratorFrom(startPost, function(key) { return key; }); + return idx.getIteratorFrom(startPost, (key) => key); } else { - var iterator = this.children_.getIteratorFrom(startPost.name, NamedNode.Wrap); - var next = iterator.peek(); + const iterator = this.children_.getIteratorFrom(startPost.name, NamedNode.Wrap); + let next = iterator.peek(); while (next != null && indexDefinition.compare(next, startPost) < 0) { iterator.getNext(); next = iterator.peek(); } return iterator; } - }; + } /** - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {!SortedMapIterator} */ - getReverseIterator(indexDefinition) { + getReverseIterator(indexDefinition: Index): SortedMapIterator { return this.getReverseIteratorFrom(indexDefinition.maxPost(), indexDefinition); - }; + } /** * @param {!NamedNode} endPost - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {!SortedMapIterator} */ - getReverseIteratorFrom(endPost, indexDefinition) { - var idx = this.resolveIndex_(indexDefinition); + getReverseIteratorFrom(endPost: NamedNode, + indexDefinition: Index): SortedMapIterator { + const idx = this.resolveIndex_(indexDefinition); if (idx) { - return idx.getReverseIteratorFrom(endPost, function(key) { return key; }); + return idx.getReverseIteratorFrom(endPost, function (key) { return key; }); } else { - var iterator = this.children_.getReverseIteratorFrom(endPost.name, NamedNode.Wrap); - var next = iterator.peek(); + const iterator = this.children_.getReverseIteratorFrom(endPost.name, NamedNode.Wrap); + let next = iterator.peek(); while (next != null && indexDefinition.compare(next, endPost) > 0) { iterator.getNext(); next = iterator.peek(); } return iterator; } - }; + } /** * @inheritDoc */ - compareTo(other) { + compareTo(other: ChildrenNode): number { if (this.isEmpty()) { if (other.isEmpty()) { return 0; @@ -408,45 +391,45 @@ export class ChildrenNode implements Node { // Must be another node with children. return 0; } - }; + } /** * @inheritDoc */ - withIndex(indexDefinition) { + withIndex(indexDefinition: Index): Node { if (indexDefinition === KEY_INDEX || this.indexMap_.hasIndex(indexDefinition)) { return this; } else { - var newIndexMap = this.indexMap_.addIndex(indexDefinition, this.children_); + const newIndexMap = this.indexMap_.addIndex(indexDefinition, this.children_); return new ChildrenNode(this.children_, this.priorityNode_, newIndexMap); } - }; + } /** * @inheritDoc */ - isIndexed(index) { + isIndexed(index: Index): boolean { return index === KEY_INDEX || this.indexMap_.hasIndex(index); - }; + } /** * @inheritDoc */ - equals(other) { + equals(other: Node): boolean { if (other === this) { return true; } else if (other.isLeafNode()) { return false; } else { - var otherChildrenNode = /** @type {!ChildrenNode} */ (other); + const otherChildrenNode = other as ChildrenNode; if (!this.getPriority().equals(otherChildrenNode.getPriority())) { return false; } else if (this.children_.count() === otherChildrenNode.children_.count()) { - var thisIter = this.getIterator(PRIORITY_INDEX); - var otherIter = otherChildrenNode.getIterator(PRIORITY_INDEX); - var thisCurrent = thisIter.getNext(); - var otherCurrent = otherIter.getNext(); + const thisIter = this.getIterator(PRIORITY_INDEX); + const otherIter = otherChildrenNode.getIterator(PRIORITY_INDEX); + let thisCurrent = thisIter.getNext(); + let otherCurrent = otherIter.getNext(); while (thisCurrent && otherCurrent) { if (thisCurrent.name !== otherCurrent.name || !thisCurrent.node.equals(otherCurrent.node)) { return false; @@ -459,7 +442,7 @@ export class ChildrenNode implements Node { return false; } } - }; + } /** @@ -467,16 +450,16 @@ export class ChildrenNode implements Node { * instead. * * @private - * @param {!fb.core.snap.Index} indexDefinition + * @param {!Index} indexDefinition * @return {?SortedMap.} */ - resolveIndex_(indexDefinition) { + private resolveIndex_(indexDefinition: Index): SortedMap | null { if (indexDefinition === KEY_INDEX) { return null; } else { return this.indexMap_.get(indexDefinition.toString()); } - }; + } } @@ -485,39 +468,39 @@ export class ChildrenNode implements Node { * @extends {ChildrenNode} * @private */ -export class MaxNode extends ChildrenNode { +export class MaxNode extends ChildrenNode { constructor() { - super(new SortedMap(NAME_COMPARATOR), ChildrenNode.EMPTY_NODE, IndexMap.Default); + super(new SortedMap(NAME_COMPARATOR), ChildrenNode.EMPTY_NODE, IndexMap.Default); } - compareTo(other) { + compareTo(other: Node): number { if (other === this) { return 0; } else { return 1; } - }; + } - equals(other) { + equals(other: Node): boolean { // Not that we every compare it, but MAX_NODE is only ever equal to itself return other === this; - }; + } - getPriority() { + getPriority(): MaxNode { return this; - }; + } - getImmediateChild(childName) { + getImmediateChild(childName: string): ChildrenNode { return ChildrenNode.EMPTY_NODE; - }; + } - isEmpty() { + isEmpty(): boolean { return false; - }; + } } /** diff --git a/src/database/core/snap/IndexMap.ts b/src/database/core/snap/IndexMap.ts index 019fcac6e14..0b8ce7bfc49 100644 --- a/src/database/core/snap/IndexMap.ts +++ b/src/database/core/snap/IndexMap.ts @@ -14,13 +14,16 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; -import { buildChildSet } from "./childSet"; -import { contains, clone, map, safeGet } from "../../../utils/obj"; -import { NamedNode } from "./Node"; -import { PRIORITY_INDEX } from "./indexes/PriorityIndex"; -import { KEY_INDEX } from "./indexes/KeyIndex"; -let _defaultIndexMap; +import { assert } from '../../../utils/assert'; +import { buildChildSet } from './childSet'; +import { contains, clone, map, safeGet } from '../../../utils/obj'; +import { NamedNode, Node } from './Node'; +import { PRIORITY_INDEX } from './indexes/PriorityIndex'; +import { KEY_INDEX } from './indexes/KeyIndex'; +import { SortedMap } from '../util/SortedMap'; +import { Index } from './indexes/Index'; + +let _defaultIndexMap: IndexMap; const fallbackObject = {}; @@ -31,34 +34,31 @@ const fallbackObject = {}; * @constructor */ export class IndexMap { - indexes_; - indexSet_; - /** * The default IndexMap for nodes without a priority * @type {!IndexMap} * @const */ - static get Default() { + static get Default(): IndexMap { assert(fallbackObject && PRIORITY_INDEX, 'ChildrenNode.ts has not been loaded'); _defaultIndexMap = _defaultIndexMap || new IndexMap( - { '.priority': fallbackObject }, - { '.priority': PRIORITY_INDEX } + {'.priority': fallbackObject}, + {'.priority': PRIORITY_INDEX} ); return _defaultIndexMap; } - constructor(indexes, indexSet) { - this.indexes_ = indexes; - this.indexSet_ = indexSet; + constructor(private indexes_: { [k: string]: SortedMap | /*FallbackType*/object }, + private indexSet_: { [k: string]: Index }) { } + /** * * @param {!string} indexKey * @return {?SortedMap.} */ - get(indexKey) { - var sortedMap = safeGet(this.indexes_, indexKey); + get(indexKey: string): SortedMap | null { + const sortedMap = safeGet(this.indexes_, indexKey); if (!sortedMap) throw new Error('No index defined for ' + indexKey); if (sortedMap === fallbackObject) { @@ -68,46 +68,46 @@ export class IndexMap { } else { return sortedMap; } - }; + } /** * @param {!Index} indexDefinition * @return {boolean} */ - hasIndex(indexDefinition) { + hasIndex(indexDefinition: Index): boolean { return contains(this.indexSet_, indexDefinition.toString()); - }; + } /** * @param {!Index} indexDefinition * @param {!SortedMap.} existingChildren * @return {!IndexMap} */ - addIndex(indexDefinition, existingChildren) { + addIndex(indexDefinition: Index, existingChildren: SortedMap): IndexMap { assert(indexDefinition !== KEY_INDEX, - "KeyIndex always exists and isn't meant to be added to the IndexMap."); - var childList = []; - var sawIndexedValue = false; - var iter = existingChildren.getIterator(NamedNode.Wrap); - var next = iter.getNext(); + 'KeyIndex always exists and isn\'t meant to be added to the IndexMap.'); + const childList = []; + let sawIndexedValue = false; + const iter = existingChildren.getIterator(NamedNode.Wrap); + let next = iter.getNext(); while (next) { sawIndexedValue = sawIndexedValue || indexDefinition.isDefinedOn(next.node); childList.push(next); next = iter.getNext(); } - var newIndex; + let newIndex; if (sawIndexedValue) { newIndex = buildChildSet(childList, indexDefinition.getCompare()); } else { newIndex = fallbackObject; } - var indexName = indexDefinition.toString(); - var newIndexSet = clone(this.indexSet_); + const indexName = indexDefinition.toString(); + const newIndexSet = clone(this.indexSet_); newIndexSet[indexName] = indexDefinition; - var newIndexes = clone(this.indexes_); + const newIndexes = clone(this.indexes_); newIndexes[indexName] = newIndex; return new IndexMap(newIndexes, newIndexSet); - }; + } /** @@ -116,18 +116,17 @@ export class IndexMap { * @param {!SortedMap.} existingChildren * @return {!IndexMap} */ - addToIndexes(namedNode, existingChildren) { - var self = this; - var newIndexes = map(this.indexes_, function(indexedChildren, indexName) { - var index = safeGet(self.indexSet_, indexName); + addToIndexes(namedNode: NamedNode, existingChildren: SortedMap): IndexMap { + const newIndexes = map(this.indexes_, (indexedChildren: SortedMap, indexName: string) => { + const index = safeGet(this.indexSet_, indexName); assert(index, 'Missing index implementation for ' + indexName); if (indexedChildren === fallbackObject) { // Check to see if we need to index everything if (index.isDefinedOn(namedNode.node)) { // We need to build this index - var childList = []; - var iter = existingChildren.getIterator(NamedNode.Wrap); - var next = iter.getNext(); + const childList = []; + const iter = existingChildren.getIterator(NamedNode.Wrap); + let next = iter.getNext(); while (next) { if (next.name != namedNode.name) { childList.push(next); @@ -141,8 +140,8 @@ export class IndexMap { return fallbackObject; } } else { - var existingSnap = existingChildren.get(namedNode.name); - var newChildren = indexedChildren; + const existingSnap = existingChildren.get(namedNode.name); + let newChildren = indexedChildren; if (existingSnap) { newChildren = newChildren.remove(new NamedNode(namedNode.name, existingSnap)); } @@ -150,7 +149,7 @@ export class IndexMap { } }); return new IndexMap(newIndexes, this.indexSet_); - }; + } /** * Create a new IndexMap instance with the given value removed @@ -158,13 +157,13 @@ export class IndexMap { * @param {!SortedMap.} existingChildren * @return {!IndexMap} */ - removeFromIndexes(namedNode, existingChildren) { - var newIndexes = map(this.indexes_, function(indexedChildren) { + removeFromIndexes(namedNode: NamedNode, existingChildren: SortedMap): IndexMap { + const newIndexes = map(this.indexes_, function (indexedChildren: SortedMap) { if (indexedChildren === fallbackObject) { // This is the fallback. Just return it, nothing to do in this case return indexedChildren; } else { - var existingSnap = existingChildren.get(namedNode.name); + const existingSnap = existingChildren.get(namedNode.name); if (existingSnap) { return indexedChildren.remove(new NamedNode(namedNode.name, existingSnap)); } else { @@ -174,5 +173,5 @@ export class IndexMap { } }); return new IndexMap(newIndexes, this.indexSet_); - }; + } } diff --git a/src/database/core/snap/LeafNode.ts b/src/database/core/snap/LeafNode.ts index 9e6e29b0035..0f8e6aa9c0c 100644 --- a/src/database/core/snap/LeafNode.ts +++ b/src/database/core/snap/LeafNode.ts @@ -15,17 +15,20 @@ */ import { assert } from '../../../utils/assert' -import { +import { doubleToIEEE754String, sha1 -} from "../util/util"; +} from '../util/util'; import { priorityHashText, validatePriorityNode -} from "./snap"; -import { Node } from "./Node"; +} from './snap'; +import { Node } from './Node'; +import { Path } from '../util/Path'; +import { Index } from './indexes/Index'; +import { ChildrenNodeConstructor } from './ChildrenNode'; -let __childrenNodeConstructor; +let __childrenNodeConstructor: ChildrenNodeConstructor; /** * LeafNode is a class for storing leaf nodes in a DataSnapshot. It @@ -33,12 +36,14 @@ let __childrenNodeConstructor; * number, or boolean) accessible via getValue(). */ export class LeafNode implements Node { - static set __childrenNodeConstructor(val) { + static set __childrenNodeConstructor(val: ChildrenNodeConstructor) { __childrenNodeConstructor = val; } + static get __childrenNodeConstructor() { return __childrenNodeConstructor; } + /** * The sort order for comparing leaf nodes of different types. If two leaf nodes have * the same type, the comparison falls back to their value @@ -47,53 +52,39 @@ export class LeafNode implements Node { */ static VALUE_TYPE_ORDER = ['object', 'boolean', 'number', 'string']; - value_; - priorityNode_; - lazyHash_; + private lazyHash_: string | null = null; + /** * @implements {Node} - * @param {!(string|number|boolean|Object)} value The value to store in this leaf node. + * @param {!(string|number|boolean|Object)} value_ The value to store in this leaf node. * The object type is possible in the event of a deferred value - * @param {!Node=} opt_priorityNode The priority of this node. + * @param {!Node=} priorityNode_ The priority of this node. */ - constructor(value, opt_priorityNode?) { - /** - * @private - * @const - * @type {!(string|number|boolean|Object)} - */ - this.value_ = value; + constructor(private readonly value_: string | number | boolean | object, + private priorityNode_: Node = LeafNode.__childrenNodeConstructor.EMPTY_NODE) { assert(this.value_ !== undefined && this.value_ !== null, - "LeafNode shouldn't be created with null/undefined value."); + 'LeafNode shouldn\'t be created with null/undefined value.'); - /** - * @private - * @const - * @type {!Node} - */ - this.priorityNode_ = opt_priorityNode || LeafNode.__childrenNodeConstructor.EMPTY_NODE; validatePriorityNode(this.priorityNode_); - - this.lazyHash_ = null; } /** @inheritDoc */ - isLeafNode() { - return true; + isLeafNode(): boolean { + return true; } /** @inheritDoc */ - getPriority() { + getPriority(): Node { return this.priorityNode_; } /** @inheritDoc */ - updatePriority(newPriorityNode) { + updatePriority(newPriorityNode: Node): Node { return new LeafNode(this.value_, newPriorityNode); } /** @inheritDoc */ - getImmediateChild(childName) { + getImmediateChild(childName: string): Node { // Hack to treat priority as a regular child if (childName === '.priority') { return this.priorityNode_; @@ -103,7 +94,7 @@ export class LeafNode implements Node { } /** @inheritDoc */ - getChild(path) { + getChild(path: Path): Node { if (path.isEmpty()) { return this; } else if (path.getFront() === '.priority') { @@ -116,107 +107,107 @@ export class LeafNode implements Node { /** * @inheritDoc */ - hasChild() { + hasChild(): boolean { return false; } /** @inheritDoc */ - getPredecessorChildName(childName, childNode) { + getPredecessorChildName(childName: String, childNode: Node): null { return null; } /** @inheritDoc */ - updateImmediateChild(childName, newChildNode) { + updateImmediateChild(childName: string, newChildNode: Node): Node { if (childName === '.priority') { return this.updatePriority(newChildNode); } else if (newChildNode.isEmpty() && childName !== '.priority') { return this; } else { return LeafNode.__childrenNodeConstructor.EMPTY_NODE - .updateImmediateChild(childName, newChildNode) - .updatePriority(this.priorityNode_); + .updateImmediateChild(childName, newChildNode) + .updatePriority(this.priorityNode_); } } /** @inheritDoc */ - updateChild(path, newChildNode) { - var front = path.getFront(); + updateChild(path: Path, newChildNode: Node): Node { + const front = path.getFront(); if (front === null) { return newChildNode; } else if (newChildNode.isEmpty() && front !== '.priority') { return this; } else { assert(front !== '.priority' || path.getLength() === 1, - '.priority must be the last token in a path'); + '.priority must be the last token in a path'); return this.updateImmediateChild(front, LeafNode.__childrenNodeConstructor.EMPTY_NODE.updateChild(path.popFront(), newChildNode)); } } /** @inheritDoc */ - isEmpty() { + isEmpty(): boolean { return false; } /** @inheritDoc */ - numChildren() { + numChildren(): number { return 0; } /** @inheritDoc */ - forEachChild(index, action) { + forEachChild(index: Index, action: (s: string, n: Node) => void): any { return false; } /** * @inheritDoc */ - val(opt_exportFormat) { - if (opt_exportFormat && !this.getPriority().isEmpty()) - return { '.value': this.getValue(), '.priority' : this.getPriority().val() }; + val(exportFormat?: boolean): Object { + if (exportFormat && !this.getPriority().isEmpty()) + return {'.value': this.getValue(), '.priority': this.getPriority().val()}; else return this.getValue(); } /** @inheritDoc */ - hash() { + hash(): string { if (this.lazyHash_ === null) { - var toHash = ''; + let toHash = ''; if (!this.priorityNode_.isEmpty()) toHash += 'priority:' + priorityHashText( - /** @type {(number|string)} */ (this.priorityNode_.val())) + ':'; + (this.priorityNode_.val() as number|string)) + ':'; - var type = typeof this.value_; + const type = typeof this.value_; toHash += type + ':'; if (type === 'number') { - toHash += doubleToIEEE754String(/** @type {number} */ (this.value_)); + toHash += doubleToIEEE754String(this.value_ as number); } else { toHash += this.value_; } this.lazyHash_ = sha1(toHash); } - return /**@type {!string} */ (this.lazyHash_); + return this.lazyHash_; } /** * Returns the value of the leaf node. * @return {Object|string|number|boolean} The value of the node. */ - getValue() { + getValue(): object | string | number | boolean { return this.value_; } /** * @inheritDoc */ - compareTo(other) { + compareTo(other: Node): number { if (other === LeafNode.__childrenNodeConstructor.EMPTY_NODE) { return 1; } else if (other instanceof LeafNode.__childrenNodeConstructor) { return -1; } else { assert(other.isLeafNode(), 'Unknown node type'); - return this.compareToLeafNode_(/** @type {!LeafNode} */ (other)); + return this.compareToLeafNode_(other as LeafNode); } } @@ -226,11 +217,11 @@ export class LeafNode implements Node { * @return {!number} * @private */ - compareToLeafNode_(otherLeaf) { - var otherLeafType = typeof otherLeaf.value_; - var thisLeafType = typeof this.value_; - var otherIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(otherLeafType); - var thisIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(thisLeafType); + private compareToLeafNode_(otherLeaf: LeafNode): number { + const otherLeafType = typeof otherLeaf.value_; + const thisLeafType = typeof this.value_; + const otherIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(otherLeafType); + const thisIndex = LeafNode.VALUE_TYPE_ORDER.indexOf(thisLeafType); assert(otherIndex >= 0, 'Unknown leaf type: ' + otherLeafType); assert(thisIndex >= 0, 'Unknown leaf type: ' + thisLeafType); if (otherIndex === thisIndex) { @@ -256,21 +247,21 @@ export class LeafNode implements Node { /** * @inheritDoc */ - withIndex() { + withIndex(): Node { return this; } /** * @inheritDoc */ - isIndexed() { + isIndexed(): boolean { return true; } /** * @inheritDoc */ - equals(other) { + equals(other: Node): boolean { /** * @inheritDoc */ @@ -278,10 +269,10 @@ export class LeafNode implements Node { return true; } else if (other.isLeafNode()) { - var otherLeaf = /** @type {!LeafNode} */ (other); + const otherLeaf = other as LeafNode; return this.value_ === otherLeaf.value_ && this.priorityNode_.equals(otherLeaf.priorityNode_); } else { return false; } } -}; // end LeafNode \ No newline at end of file +} \ No newline at end of file diff --git a/src/database/core/snap/Node.ts b/src/database/core/snap/Node.ts index 58585e47e47..c8483b63278 100644 --- a/src/database/core/snap/Node.ts +++ b/src/database/core/snap/Node.ts @@ -69,7 +69,7 @@ export interface Node { * @param {!Index} index The index to use to determine the predecessor * @return {?string} The name of the predecessor child, or null if childNode is the first child. */ - getPredecessorChildName(childName: String, childNode: Node, index: Index): string; + getPredecessorChildName(childName: String, childNode: Node, index: Index): string | null; /** * Returns a duplicate node, with the specified immediate child updated. @@ -116,10 +116,10 @@ export interface Node { * each child. It's passed the child name and the child node. * @return {*} The first truthy value return by action, or the last falsey one */ - forEachChild(index: Index, action: (string, node) => any): any; + forEachChild(index: Index, action: (a: string, b: Node) => void): any; /** - * @param {boolean=} opt_exportFormat True for export format (also wire protocol format). + * @param {boolean=} exportFormat True for export format (also wire protocol format). * @return {*} Value of this node as JSON. */ val(exportFormat?: boolean): Object; diff --git a/src/database/core/snap/childSet.ts b/src/database/core/snap/childSet.ts index a2cdb248261..5d6e3f61947 100644 --- a/src/database/core/snap/childSet.ts +++ b/src/database/core/snap/childSet.ts @@ -14,42 +14,41 @@ * limitations under the License. */ -import { LLRBNode } from "../util/SortedMap"; -import { SortedMap } from "../util/SortedMap"; +import { LLRBNode } from '../util/SortedMap'; +import { SortedMap } from '../util/SortedMap'; +import { NamedNode } from './Node'; const LOG_2 = Math.log(2); /** - * @param {number} length * @constructor */ class Base12Num { - count; - current_; - bits_; - - constructor(length) { - var logBase2 = function(num) { - return parseInt((Math.log(num) / LOG_2 as any), 10); - }; - var bitMask = function(bits) { - return parseInt(Array(bits + 1).join('1'), 2); - }; + count: number; + private current_: number; + private bits_: number; + + /** + * @param {number} length + */ + constructor(length: number) { + const logBase2 = (num: number) => parseInt((Math.log(num) / LOG_2 as any), 10); + const bitMask = (bits: number) => parseInt(Array(bits + 1).join('1'), 2); this.count = logBase2(length + 1); this.current_ = this.count - 1; - var mask = bitMask(this.count); + const mask = bitMask(this.count); this.bits_ = (length + 1) & mask; } /** * @return {boolean} */ - nextBitIsOne() { + nextBitIsOne(): boolean { //noinspection JSBitwiseOperatorUsage - var result = !(this.bits_ & (0x1 << this.current_)); + const result = !(this.bits_ & (0x1 << this.current_)); this.current_--; return result; - }; + } } /** @@ -67,43 +66,48 @@ class Base12Num { * @param {(function(K, K):number)=} mapSortFn An optional override for comparator used by the generated sorted map * @return {SortedMap.} */ -export const buildChildSet = function(childList, cmp, keyFn?, mapSortFn?) { +export const buildChildSet = function(childList: NamedNode[], + cmp: (a: NamedNode, b: NamedNode) => number, + keyFn?: (a: NamedNode) => K, + mapSortFn?: (a: K, b: K) => number): SortedMap { childList.sort(cmp); - var buildBalancedTree = function(low, high) { - var length = high - low; + const buildBalancedTree = function(low: number, high: number): LLRBNode | null { + const length = high - low; + let namedNode: NamedNode; + let key: K; if (length == 0) { return null; } else if (length == 1) { - var namedNode = childList[low]; - var key = keyFn ? keyFn(namedNode) : namedNode; - return new LLRBNode(key, namedNode.node, LLRBNode.BLACK, null, null); + namedNode = childList[low]; + key = keyFn ? keyFn(namedNode) : namedNode as any as K; + return new LLRBNode(key, namedNode.node as any as V, LLRBNode.BLACK, null, null); } else { - var middle = parseInt((length / 2 as any), 10) + low; - var left = buildBalancedTree(low, middle); - var right = buildBalancedTree(middle + 1, high); + const middle = parseInt((length / 2 as any), 10) + low; + const left = buildBalancedTree(low, middle); + const right = buildBalancedTree(middle + 1, high); namedNode = childList[middle]; - key = keyFn ? keyFn(namedNode) : namedNode; - return new LLRBNode(key, namedNode.node, LLRBNode.BLACK, left, right); + key = keyFn ? keyFn(namedNode) : namedNode as any as K; + return new LLRBNode(key, namedNode.node as any as V, LLRBNode.BLACK, left, right); } }; - var buildFrom12Array = function(base12) { - var node = null; - var root = null; - var index = childList.length; + const buildFrom12Array = function (base12: Base12Num): LLRBNode { + let node: LLRBNode = null; + let root = null; + let index = childList.length; - var buildPennant = function(chunkSize, color) { - var low = index - chunkSize; - var high = index; + const buildPennant = function (chunkSize: number, color: boolean) { + const low = index - chunkSize; + const high = index; index -= chunkSize; - var childTree = buildBalancedTree(low + 1, high); - var namedNode = childList[low]; - var key = keyFn ? keyFn(namedNode) : namedNode; - attachPennant(new LLRBNode(key, namedNode.node, color, null, childTree)); + const childTree = buildBalancedTree(low + 1, high); + const namedNode = childList[low]; + const key: K = keyFn ? keyFn(namedNode) : namedNode as any as K; + attachPennant(new LLRBNode(key, namedNode.node as any as V, color, null, childTree)); }; - var attachPennant = function(pennant) { + const attachPennant = function (pennant: LLRBNode) { if (node) { node.left = pennant; node = pennant; @@ -113,10 +117,10 @@ export const buildChildSet = function(childList, cmp, keyFn?, mapSortFn?) { } }; - for (var i = 0; i < base12.count; ++i) { - var isOne = base12.nextBitIsOne(); + for (let i = 0; i < base12.count; ++i) { + const isOne = base12.nextBitIsOne(); // The number of nodes taken in each slice is 2^(arr.length - (i + 1)) - var chunkSize = Math.pow(2, base12.count - (i + 1)); + const chunkSize = Math.pow(2, base12.count - (i + 1)); if (isOne) { buildPennant(chunkSize, LLRBNode.BLACK); } else { @@ -128,8 +132,8 @@ export const buildChildSet = function(childList, cmp, keyFn?, mapSortFn?) { return root; }; - var base12 = new Base12Num(childList.length); - var root = buildFrom12Array(base12); + const base12 = new Base12Num(childList.length); + const root = buildFrom12Array(base12); - return new SortedMap(mapSortFn || cmp, root); + return new SortedMap(mapSortFn || (cmp as any), root); }; \ No newline at end of file diff --git a/src/database/core/snap/comparators.ts b/src/database/core/snap/comparators.ts index 3cc039d54fb..d1a4914c0c2 100644 --- a/src/database/core/snap/comparators.ts +++ b/src/database/core/snap/comparators.ts @@ -15,11 +15,12 @@ */ import { nameCompare } from "../util/util"; +import { NamedNode } from './Node'; -export function NAME_ONLY_COMPARATOR(left, right) { +export function NAME_ONLY_COMPARATOR(left: NamedNode, right: NamedNode) { return nameCompare(left.name, right.name); -}; +} -export function NAME_COMPARATOR(left, right) { +export function NAME_COMPARATOR(left: string, right: string) { return nameCompare(left, right); -}; +} diff --git a/src/database/core/snap/indexes/Index.ts b/src/database/core/snap/indexes/Index.ts index 9aaa64c8e43..e962c181a0b 100644 --- a/src/database/core/snap/indexes/Index.ts +++ b/src/database/core/snap/indexes/Index.ts @@ -16,6 +16,7 @@ import { Node, NamedNode } from "../Node"; import { MIN_NAME, MAX_NAME } from "../../util/util"; +import { Comparator } from '../../util/SortedMap'; /** * @@ -40,9 +41,11 @@ export abstract class Index { * @return {function(!NamedNode, !NamedNode):number} A standalone comparison function for * this index */ - getCompare() { + getCompare(): Comparator { return this.compare.bind(this); - }; + } + + /** * Given a before and after value for a node, determine if the indexed value has changed. Even if they are different, * it's possible that the changes are isolated to parts of the snapshot that are not indexed. @@ -51,20 +54,20 @@ export abstract class Index { * @param {!Node} newNode * @return {boolean} True if the portion of the snapshot being indexed changed between oldNode and newNode */ - indexedValueChanged(oldNode, newNode) { - var oldWrapped = new NamedNode(MIN_NAME, oldNode); - var newWrapped = new NamedNode(MIN_NAME, newNode); + indexedValueChanged(oldNode: Node, newNode: Node): boolean { + const oldWrapped = new NamedNode(MIN_NAME, oldNode); + const newWrapped = new NamedNode(MIN_NAME, newNode); return this.compare(oldWrapped, newWrapped) !== 0; - }; + } /** * @return {!NamedNode} a node wrapper that will sort equal to or less than * any other node wrapper, using this index */ - minPost() { + minPost(): NamedNode { return (NamedNode as any).MIN; - }; + } /** @@ -79,11 +82,11 @@ export abstract class Index { * @param {string} name * @return {!NamedNode} */ - abstract makePost(indexValue: object, name: string): NamedNode; + abstract makePost(indexValue: any, name: string): NamedNode; /** * @return {!string} String representation for inclusion in a query spec */ abstract toString(): string; -}; +} diff --git a/src/database/core/snap/indexes/KeyIndex.ts b/src/database/core/snap/indexes/KeyIndex.ts index 35dc19954f9..adaa82cdbf5 100644 --- a/src/database/core/snap/indexes/KeyIndex.ts +++ b/src/database/core/snap/indexes/KeyIndex.ts @@ -20,25 +20,23 @@ import { nameCompare, MAX_NAME } from "../../util/util"; import { assert, assertionError } from "../../../../utils/assert"; import { ChildrenNode } from "../ChildrenNode"; -let __EMPTY_NODE; +let __EMPTY_NODE: ChildrenNode; export class KeyIndex extends Index { static get __EMPTY_NODE() { return __EMPTY_NODE; } + static set __EMPTY_NODE(val) { __EMPTY_NODE = val; } - constructor() { - super(); - } + /** * @inheritDoc */ - compare(a, b) { + compare(a: NamedNode, b: NamedNode): number { return nameCompare(a.name, b.name); - }; - + } /** * @inheritDoc @@ -47,15 +45,15 @@ export class KeyIndex extends Index { // We could probably return true here (since every node has a key), but it's never called // so just leaving unimplemented for now. throw assertionError('KeyIndex.isDefinedOn not expected to be called.'); - }; + } /** * @inheritDoc */ - indexedValueChanged(oldNode, newNode) { + indexedValueChanged(oldNode: Node, newNode: Node): boolean { return false; // The key for a node never changes. - }; + } /** @@ -63,17 +61,17 @@ export class KeyIndex extends Index { */ minPost() { return (NamedNode as any).MIN; - }; + } /** * @inheritDoc */ - maxPost() { + maxPost(): NamedNode { // TODO: This should really be created once and cached in a static property, but // NamedNode isn't defined yet, so I can't use it in a static. Bleh. return new NamedNode(MAX_NAME, __EMPTY_NODE); - }; + } /** @@ -81,19 +79,19 @@ export class KeyIndex extends Index { * @param {string} name * @return {!NamedNode} */ - makePost(indexValue, name) { + makePost(indexValue: string, name: string): NamedNode { assert(typeof indexValue === 'string', 'KeyIndex indexValue must always be a string.'); // We just use empty node, but it'll never be compared, since our comparator only looks at name. return new NamedNode(indexValue, __EMPTY_NODE); - }; + } /** * @return {!string} String representation for inclusion in a query spec */ - toString() { + toString(): string { return '.key'; - }; -}; + } +} export const KEY_INDEX = new KeyIndex(); \ No newline at end of file diff --git a/src/database/core/snap/indexes/PathIndex.ts b/src/database/core/snap/indexes/PathIndex.ts index 461781c2f6a..a2081f74881 100644 --- a/src/database/core/snap/indexes/PathIndex.ts +++ b/src/database/core/snap/indexes/PathIndex.ts @@ -18,8 +18,9 @@ import { assert } from "../../../../utils/assert"; import { nameCompare, MAX_NAME } from "../../util/util"; import { Index } from "./Index"; import { ChildrenNode, MAX_NODE } from "../ChildrenNode"; -import { NamedNode } from "../Node"; +import { NamedNode, Node } from '../Node'; import { nodeFromJSON } from "../nodeFromJSON"; +import { Path } from '../../util/Path'; /** * @param {!Path} indexPath @@ -27,76 +28,69 @@ import { nodeFromJSON } from "../nodeFromJSON"; * @extends {Index} */ export class PathIndex extends Index { - indexPath_; - - constructor(indexPath) { + constructor(private indexPath_: Path) { super(); - assert(!indexPath.isEmpty() && indexPath.getFront() !== '.priority', + assert(!indexPath_.isEmpty() && indexPath_.getFront() !== '.priority', 'Can\'t create PathIndex with empty path or .priority key'); - /** - * - * @type {!Path} - * @private - */ - this.indexPath_ = indexPath; - }; + } + /** * @param {!Node} snap * @return {!Node} * @protected */ - extractChild(snap) { + protected extractChild(snap: Node): Node { return snap.getChild(this.indexPath_); - }; + } /** * @inheritDoc */ - isDefinedOn(node) { + isDefinedOn(node: Node): boolean { return !node.getChild(this.indexPath_).isEmpty(); - }; + } /** * @inheritDoc */ - compare(a, b) { - var aChild = this.extractChild(a.node); - var bChild = this.extractChild(b.node); - var indexCmp = aChild.compareTo(bChild); + compare(a: NamedNode, b: NamedNode): number { + const aChild = this.extractChild(a.node); + const bChild = this.extractChild(b.node); + const indexCmp = aChild.compareTo(bChild); if (indexCmp === 0) { return nameCompare(a.name, b.name); } else { return indexCmp; } - }; + } /** * @inheritDoc */ - makePost(indexValue, name) { - var valueNode = nodeFromJSON(indexValue); - var node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, valueNode); + makePost(indexValue: object, name: string): NamedNode { + const valueNode = nodeFromJSON(indexValue); + const node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, valueNode); return new NamedNode(name, node); - }; + } /** * @inheritDoc */ - maxPost() { - var node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, MAX_NODE); + maxPost(): NamedNode { + const node = ChildrenNode.EMPTY_NODE.updateChild(this.indexPath_, MAX_NODE); return new NamedNode(MAX_NAME, node); - }; + } /** * @inheritDoc */ - toString() { + toString(): string { return this.indexPath_.slice().join('/'); - }; + } } \ No newline at end of file diff --git a/src/database/core/snap/indexes/PriorityIndex.ts b/src/database/core/snap/indexes/PriorityIndex.ts index b9a9400bcb9..f54e923e7e8 100644 --- a/src/database/core/snap/indexes/PriorityIndex.ts +++ b/src/database/core/snap/indexes/PriorityIndex.ts @@ -16,17 +16,17 @@ import { Index } from './Index'; import { nameCompare, MAX_NAME } from "../../util/util"; -import { NamedNode } from "../Node"; +import { NamedNode, Node } from '../Node'; import { LeafNode } from "../LeafNode"; -let nodeFromJSON; -let MAX_NODE; +let nodeFromJSON: (a: any) => Node; +let MAX_NODE: Node; -export function setNodeFromJSON(val) { +export function setNodeFromJSON(val: (a: any) => Node) { nodeFromJSON = val; } -export function setMaxNode(val) { +export function setMaxNode(val: Node) { MAX_NODE = val; } @@ -37,56 +37,51 @@ export function setMaxNode(val) { * @private */ export class PriorityIndex extends Index { - - constructor() { - super(); - } - /** * @inheritDoc */ - compare(a, b) { - var aPriority = a.node.getPriority(); - var bPriority = b.node.getPriority(); - var indexCmp = aPriority.compareTo(bPriority); + compare(a: NamedNode, b: NamedNode): number { + const aPriority = a.node.getPriority(); + const bPriority = b.node.getPriority(); + const indexCmp = aPriority.compareTo(bPriority); if (indexCmp === 0) { return nameCompare(a.name, b.name); } else { return indexCmp; } - }; + } /** * @inheritDoc */ - isDefinedOn(node) { + isDefinedOn(node: Node): boolean { return !node.getPriority().isEmpty(); - }; + } /** * @inheritDoc */ - indexedValueChanged(oldNode, newNode) { + indexedValueChanged(oldNode: Node, newNode: Node): boolean { return !oldNode.getPriority().equals(newNode.getPriority()); - }; + } /** * @inheritDoc */ - minPost() { + minPost(): NamedNode { return (NamedNode as any).MIN; - }; + } /** * @inheritDoc */ - maxPost() { + maxPost(): NamedNode { return new NamedNode(MAX_NAME, new LeafNode('[PRIORITY-POST]', MAX_NODE)); - }; + } /** @@ -94,18 +89,18 @@ export class PriorityIndex extends Index { * @param {string} name * @return {!NamedNode} */ - makePost(indexValue, name) { - var priorityNode = nodeFromJSON(indexValue); + makePost(indexValue: any, name: string): NamedNode { + const priorityNode = nodeFromJSON(indexValue); return new NamedNode(name, new LeafNode('[PRIORITY-POST]', priorityNode)); - }; + } /** * @return {!string} String representation for inclusion in a query spec */ - toString() { + toString(): string { return '.priority'; - }; -}; + } +} export const PRIORITY_INDEX = new PriorityIndex(); diff --git a/src/database/core/snap/indexes/ValueIndex.ts b/src/database/core/snap/indexes/ValueIndex.ts index 0c82d067830..dbd705a1e41 100644 --- a/src/database/core/snap/indexes/ValueIndex.ts +++ b/src/database/core/snap/indexes/ValueIndex.ts @@ -15,7 +15,7 @@ */ import { Index } from "./Index"; -import { NamedNode } from "../Node"; +import { NamedNode, Node } from '../Node'; import { nameCompare } from "../../util/util"; import { nodeFromJSON } from "../nodeFromJSON"; @@ -25,66 +25,62 @@ import { nodeFromJSON } from "../nodeFromJSON"; * @private */ export class ValueIndex extends Index { - constructor() { - super(); - } - /** * @inheritDoc */ - compare(a, b) { - var indexCmp = a.node.compareTo(b.node); + compare(a: NamedNode, b: NamedNode): number { + const indexCmp = a.node.compareTo(b.node); if (indexCmp === 0) { return nameCompare(a.name, b.name); } else { return indexCmp; } - }; + } /** * @inheritDoc */ - isDefinedOn(node) { + isDefinedOn(node: Node): boolean { return true; - }; + } /** * @inheritDoc */ - indexedValueChanged(oldNode, newNode) { + indexedValueChanged(oldNode: Node, newNode: Node): boolean { return !oldNode.equals(newNode); - }; + } /** * @inheritDoc */ - minPost() { + minPost(): NamedNode { return (NamedNode as any).MIN; - }; + } /** * @inheritDoc */ - maxPost() { + maxPost(): NamedNode { return (NamedNode as any).MAX; - }; + } /** * @param {*} indexValue * @param {string} name * @return {!NamedNode} */ - makePost(indexValue, name) { - var valueNode = nodeFromJSON(indexValue); + makePost(indexValue: object, name: string): NamedNode { + const valueNode = nodeFromJSON(indexValue); return new NamedNode(name, valueNode); - }; + } /** * @return {!string} String representation for inclusion in a query spec */ - toString() { + toString(): string { return '.value'; }; -}; +} export const VALUE_INDEX = new ValueIndex(); \ No newline at end of file diff --git a/src/database/core/snap/nodeFromJSON.ts b/src/database/core/snap/nodeFromJSON.ts index 08c9c7c746f..eb09e0b3d55 100644 --- a/src/database/core/snap/nodeFromJSON.ts +++ b/src/database/core/snap/nodeFromJSON.ts @@ -14,40 +14,41 @@ * limitations under the License. */ -import { ChildrenNode } from "./ChildrenNode"; -import { LeafNode } from "./LeafNode"; -import { NamedNode } from "./Node"; -import { forEach, contains } from "../../../utils/obj"; -import { assert } from "../../../utils/assert"; -import { buildChildSet } from "./childSet"; -import { NAME_COMPARATOR, NAME_ONLY_COMPARATOR } from "./comparators"; -import { IndexMap } from "./IndexMap"; -import { PRIORITY_INDEX, setNodeFromJSON } from "./indexes/PriorityIndex"; +import { ChildrenNode } from './ChildrenNode'; +import { LeafNode } from './LeafNode'; +import { NamedNode, Node } from './Node'; +import { forEach, contains } from '../../../utils/obj'; +import { assert } from '../../../utils/assert'; +import { buildChildSet } from './childSet'; +import { NAME_COMPARATOR, NAME_ONLY_COMPARATOR } from './comparators'; +import { IndexMap } from './IndexMap'; +import { PRIORITY_INDEX, setNodeFromJSON } from './indexes/PriorityIndex'; +import { SortedMap } from '../util/SortedMap'; const USE_HINZE = true; /** * Constructs a snapshot node representing the passed JSON and returns it. * @param {*} json JSON to create a node for. - * @param {?string|?number=} opt_priority Optional priority to use. This will be ignored if the + * @param {?string|?number=} priority Optional priority to use. This will be ignored if the * passed JSON contains a .priority property. * @return {!Node} */ -export function nodeFromJSON(json, priority?) { +export function nodeFromJSON(json: any | null, + priority: string | number | null = null): Node { if (json === null) { return ChildrenNode.EMPTY_NODE; } - priority = priority !== undefined ? priority : null; if (typeof json === 'object' && '.priority' in json) { priority = json['.priority']; } - + assert( - priority === null || - typeof priority === 'string' || - typeof priority === 'number' || - (typeof priority === 'object' && '.sv' in priority), + priority === null || + typeof priority === 'string' || + typeof priority === 'number' || + (typeof priority === 'object' && '.sv' in (priority as object)), 'Invalid priority type found: ' + (typeof priority) ); @@ -57,17 +58,17 @@ export function nodeFromJSON(json, priority?) { // Valid leaf nodes include non-objects or server-value wrapper objects if (typeof json !== 'object' || '.sv' in json) { - var jsonLeaf = /** @type {!(string|number|boolean|Object)} */ (json); + const jsonLeaf = json as string | number | boolean | object; return new LeafNode(jsonLeaf, nodeFromJSON(priority)); } if (!(json instanceof Array) && USE_HINZE) { - var children = []; - var childrenHavePriority = false; - var hinzeJsonObj = /** @type {!Object} */ (json); - forEach(hinzeJsonObj, function(key, child) { + const children: NamedNode[] = []; + let childrenHavePriority = false; + const hinzeJsonObj: { [k: string]: any } = json as object; + forEach(hinzeJsonObj, (key: string, child: any) => { if (typeof key !== 'string' || key.substring(0, 1) !== '.') { // Ignore metadata nodes - var childNode = nodeFromJSON(hinzeJsonObj[key]); + const childNode = nodeFromJSON(hinzeJsonObj[key]); if (!childNode.isEmpty()) { childrenHavePriority = childrenHavePriority || !childNode.getPriority().isEmpty(); children.push(new NamedNode(key, childNode)); @@ -79,25 +80,23 @@ export function nodeFromJSON(json, priority?) { return ChildrenNode.EMPTY_NODE; } - var childSet = /**@type {!SortedMap.} */ (buildChildSet( - children, NAME_ONLY_COMPARATOR, function(namedNode) { return namedNode.name; }, - NAME_COMPARATOR - )); + const childSet = buildChildSet(children, NAME_ONLY_COMPARATOR, + (namedNode) => namedNode.name, NAME_COMPARATOR) as SortedMap; if (childrenHavePriority) { - var sortedChildSet = buildChildSet(children, PRIORITY_INDEX.getCompare()); + const sortedChildSet = buildChildSet(children, PRIORITY_INDEX.getCompare()); return new ChildrenNode(childSet, nodeFromJSON(priority), new IndexMap({'.priority': sortedChildSet}, {'.priority': PRIORITY_INDEX})); } else { return new ChildrenNode(childSet, nodeFromJSON(priority), - IndexMap.Default); + IndexMap.Default); } } else { - var node = ChildrenNode.EMPTY_NODE; - var jsonObj = /** @type {!Object} */ (json); - forEach(jsonObj, function(key, childData) { + let node: Node = ChildrenNode.EMPTY_NODE; + const jsonObj = json as object; + forEach(jsonObj, (key: string, childData: any) => { if (contains(jsonObj, key)) { if (key.substring(0, 1) !== '.') { // ignore metadata nodes. - var childNode = nodeFromJSON(childData); + const childNode = nodeFromJSON(childData); if (childNode.isLeafNode() || !childNode.isEmpty()) node = node.updateImmediateChild(key, childNode); } @@ -106,6 +105,6 @@ export function nodeFromJSON(json, priority?) { return node.updatePriority(nodeFromJSON(priority)); } -}; +} setNodeFromJSON(nodeFromJSON); \ No newline at end of file diff --git a/src/database/core/snap/snap.ts b/src/database/core/snap/snap.ts index 03f756df6e0..db4a1cefe2d 100644 --- a/src/database/core/snap/snap.ts +++ b/src/database/core/snap/snap.ts @@ -19,11 +19,11 @@ import { doubleToIEEE754String, } from "../util/util"; import { contains } from "../../../utils/obj"; -import { NamedNode } from "./Node"; +import { Node } from './Node'; -let MAX_NODE; +let MAX_NODE: Node; -export function setMaxNode(val) { +export function setMaxNode(val: Node) { MAX_NODE = val; } @@ -31,7 +31,7 @@ export function setMaxNode(val) { * @param {(!string|!number)} priority * @return {!string} */ -export const priorityHashText = function(priority) { +export const priorityHashText = function(priority: string | number): string { if (typeof priority === 'number') return 'number:' + doubleToIEEE754String(priority); else @@ -43,9 +43,9 @@ export const priorityHashText = function(priority) { * * @param {!Node} priorityNode */ -export const validatePriorityNode = function(priorityNode) { +export const validatePriorityNode = function(priorityNode: Node) { if (priorityNode.isLeafNode()) { - var val = priorityNode.val(); + const val = priorityNode.val(); assert(typeof val === 'string' || typeof val === 'number' || (typeof val === 'object' && contains(val, '.sv')), 'Priority must be a string or number.'); diff --git a/src/database/core/stats/StatsCollection.ts b/src/database/core/stats/StatsCollection.ts index 5d3f596d3ad..7372a8ce850 100644 --- a/src/database/core/stats/StatsCollection.ts +++ b/src/database/core/stats/StatsCollection.ts @@ -23,21 +23,17 @@ import { contains } from '../../../utils/obj'; * @constructor */ export class StatsCollection { - counters_: object; - constructor() { - this.counters_ = { }; - } - incrementCounter(name, amount) { - if (amount === undefined) - amount = 1; + private counters_: { [k: string]: number } = {}; + incrementCounter(name: string, amount: number = 1) { if (!contains(this.counters_, name)) this.counters_[name] = 0; this.counters_[name] += amount; } + get() { return deepCopy(this.counters_); - }; + } } diff --git a/src/database/core/stats/StatsListener.ts b/src/database/core/stats/StatsListener.ts index de596d73845..9513112b1c8 100644 --- a/src/database/core/stats/StatsListener.ts +++ b/src/database/core/stats/StatsListener.ts @@ -15,6 +15,7 @@ */ import { clone, forEach } from '../../../utils/obj'; +import { StatsCollection } from './StatsCollection'; /** * Returns the delta from the previous call to get stats. @@ -23,17 +24,17 @@ import { clone, forEach } from '../../../utils/obj'; * @constructor */ export class StatsListener { - private last_ = null; - - constructor(private collection_) { + private last_: {[k: string]: number} | null = null; + + constructor(private collection_: StatsCollection) { } - get() { + get(): {[k: string]: number} { const newStats = this.collection_.get(); - const delta = clone(newStats); + const delta: typeof newStats = clone(newStats); if (this.last_) { - forEach(this.last_, (stat, value) => { + forEach(this.last_, (stat: string, value: number) => { delta[stat] = delta[stat] - value; }); } diff --git a/src/database/core/stats/StatsManager.ts b/src/database/core/stats/StatsManager.ts index 76f50ce8d33..23129e7224d 100644 --- a/src/database/core/stats/StatsManager.ts +++ b/src/database/core/stats/StatsManager.ts @@ -14,25 +14,30 @@ * limitations under the License. */ -import { StatsCollection } from "./StatsCollection"; +import { StatsCollection } from './StatsCollection'; +import { RepoInfo } from '../RepoInfo'; + +export class StatsManager { + private static collections_: { [k: string]: StatsCollection } = {}; + private static reporters_: { [k: string]: any } = {}; + + static getCollection(repoInfo: RepoInfo): StatsCollection { + const hashString = repoInfo.toString(); -export const StatsManager = { - collections_:{ }, - reporters_:{ }, - getCollection:function(repoInfo) { - var hashString = repoInfo.toString(); if (!this.collections_[hashString]) { this.collections_[hashString] = new StatsCollection(); } + return this.collections_[hashString]; - }, - getOrCreateReporter:function(repoInfo, creatorFunction) { - var hashString = repoInfo.toString(); + } + + static getOrCreateReporter(repoInfo: RepoInfo, creatorFunction: () => T): T { + const hashString = repoInfo.toString(); + if (!this.reporters_[hashString]) { this.reporters_[hashString] = creatorFunction(); } return this.reporters_[hashString]; } -}; - +} diff --git a/src/database/core/stats/StatsReporter.ts b/src/database/core/stats/StatsReporter.ts index 80c6b2679bf..7b2ec7dce53 100644 --- a/src/database/core/stats/StatsReporter.ts +++ b/src/database/core/stats/StatsReporter.ts @@ -15,8 +15,10 @@ */ import { contains, forEach } from '../../../utils/obj'; -import { setTimeoutNonBlocking } from "../util/util"; -import { StatsListener } from "./StatsListener"; +import { setTimeoutNonBlocking } from '../util/util'; +import { StatsListener } from './StatsListener'; +import { StatsCollection } from './StatsCollection'; +import { ServerActions } from '../ServerActions'; // Assuming some apps may have a short amount of time on page, and a bulk of firebase operations probably // happen on page load, we try to report our first set of stats pretty quickly, but we wait at least 10 @@ -28,32 +30,33 @@ const FIRST_STATS_MAX_TIME = 30 * 1000; const REPORT_STATS_INTERVAL = 5 * 60 * 1000; /** - * - * @param collection - * @param server_ * @constructor */ export class StatsReporter { - private statsListener_; - private statsToReport_ = {}; + private statsListener_: StatsListener; + private statsToReport_: { [k: string]: boolean } = {}; - constructor(collection, private server_: any) { + /** + * @param collection + * @param server_ + */ + constructor(collection: StatsCollection, private server_: ServerActions) { this.statsListener_ = new StatsListener(collection); const timeout = FIRST_STATS_MIN_TIME + (FIRST_STATS_MAX_TIME - FIRST_STATS_MIN_TIME) * Math.random(); setTimeoutNonBlocking(this.reportStats_.bind(this), Math.floor(timeout)); } - includeStat(stat) { + includeStat(stat: string) { this.statsToReport_[stat] = true; } private reportStats_() { const stats = this.statsListener_.get(); - const reportedStats = {}; + const reportedStats: typeof stats = {}; let haveStatsToReport = false; - forEach(stats, (stat, value) => { + forEach(stats, (stat: string, value: number) => { if (value > 0 && contains(this.statsToReport_, stat)) { reportedStats[stat] = value; haveStatsToReport = true; diff --git a/src/database/core/storage/DOMStorageWrapper.ts b/src/database/core/storage/DOMStorageWrapper.ts index 47517540082..567a279dc18 100644 --- a/src/database/core/storage/DOMStorageWrapper.ts +++ b/src/database/core/storage/DOMStorageWrapper.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { jsonEval, stringify } from "../../../utils/json"; +import { jsonEval, stringify } from '../../../utils/json'; /** * Wraps a DOM Storage object and: @@ -24,63 +24,61 @@ import { jsonEval, stringify } from "../../../utils/json"; * We automatically (see storage.js) create two such wrappers, one for sessionStorage, * and one for localStorage. * - * @param {Storage} domStorage The underlying storage object (e.g. localStorage or sessionStorage) * @constructor */ export class DOMStorageWrapper { - prefix_; - domStorage_; + // Use a prefix to avoid collisions with other stuff saved by the app. + private prefix_ = 'firebase:'; - constructor(domStorage) { - this.domStorage_ = domStorage; + /** + * @param {Storage} domStorage_ The underlying storage object (e.g. localStorage or sessionStorage) + */ + constructor(private domStorage_: Storage) { + } - // Use a prefix to avoid collisions with other stuff saved by the app. - this.prefix_ = 'firebase:'; - }; - /** * @param {string} key The key to save the value under * @param {?Object} value The value being stored, or null to remove the key. */ - set(key, value) { + set(key: string, value: any | null) { if (value == null) { this.domStorage_.removeItem(this.prefixedName_(key)); } else { this.domStorage_.setItem(this.prefixedName_(key), stringify(value)); } - }; + } /** * @param {string} key * @return {*} The value that was stored under this key, or null */ - get(key) { - var storedVal = this.domStorage_.getItem(this.prefixedName_(key)); + get(key: string): any { + const storedVal = this.domStorage_.getItem(this.prefixedName_(key)); if (storedVal == null) { return null; } else { return jsonEval(storedVal); } - }; + } /** * @param {string} key */ - remove(key) { + remove(key: string) { this.domStorage_.removeItem(this.prefixedName_(key)); - }; + } - isInMemoryStorage; + isInMemoryStorage: boolean; /** * @param {string} name * @return {string} */ - prefixedName_(name) { + prefixedName_(name: string): string { return this.prefix_ + name; - }; + } - toString() { + toString(): string { return this.domStorage_.toString(); - }; + } } diff --git a/src/database/core/storage/MemoryStorage.ts b/src/database/core/storage/MemoryStorage.ts index f260bdf68a7..9de7a79202d 100644 --- a/src/database/core/storage/MemoryStorage.ts +++ b/src/database/core/storage/MemoryStorage.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -import { contains } from "../../../utils/obj"; +import { contains } from '../../../utils/obj'; /** * An in-memory storage implementation that matches the API of DOMStorageWrapper @@ -23,28 +23,26 @@ import { contains } from "../../../utils/obj"; * @constructor */ export class MemoryStorage { - cache_: object; - constructor() { - this.cache_ = {}; - } - set(key, value) { + private cache_: { [k: string]: any } = {}; + + set(key: string, value: any | null) { if (value == null) { delete this.cache_[key]; } else { this.cache_[key] = value; } - }; + } - get(key) { + get(key: string): any { if (contains(this.cache_, key)) { return this.cache_[key]; } return null; - }; + } - remove(key) { + remove(key: string) { delete this.cache_[key]; - }; + } isInMemoryStorage = true; } diff --git a/src/database/core/storage/storage.ts b/src/database/core/storage/storage.ts index d974e939491..600c7dc4461 100644 --- a/src/database/core/storage/storage.ts +++ b/src/database/core/storage/storage.ts @@ -17,6 +17,8 @@ import { DOMStorageWrapper } from './DOMStorageWrapper'; import { MemoryStorage } from './MemoryStorage'; +declare const window: any; + /** * Helper to create a DOMStorageWrapper or else fall back to MemoryStorage. * TODO: Once MemoryStorage and DOMStorageWrapper have a shared interface this method annotation should change @@ -26,13 +28,13 @@ import { MemoryStorage } from './MemoryStorage'; * (e.g. 'localStorage' or 'sessionStorage'). * @return {?} Turning off type information until a common interface is defined. */ -const createStoragefor = function(domStorageName) { +const createStoragefor = function(domStorageName: string): DOMStorageWrapper | MemoryStorage { try { // NOTE: just accessing "localStorage" or "window['localStorage']" may throw a security exception, // so it must be inside the try/catch. if (typeof window !== 'undefined' && typeof window[domStorageName] !== 'undefined') { // Need to test cache. Just because it's here doesn't mean it works - var domStorage = window[domStorageName]; + const domStorage = window[domStorageName]; domStorage.setItem('firebase:sentinel', 'cache'); domStorage.removeItem('firebase:sentinel'); return new DOMStorageWrapper(domStorage); diff --git a/src/database/core/util/CountedSet.ts b/src/database/core/util/CountedSet.ts index b1fe55a039a..a9351d7d75e 100644 --- a/src/database/core/util/CountedSet.ts +++ b/src/database/core/util/CountedSet.ts @@ -14,35 +14,29 @@ * limitations under the License. */ -import { isEmpty, getCount, forEach, contains } from "../../../utils/obj"; +import { isEmpty, getCount, forEach, contains } from '../../../utils/obj'; /** * Implements a set with a count of elements. * + * @template K, V */ -export class CountedSet { - set: object; - - /** - * @template K, V - */ - constructor() { - this.set = {}; - } +export class CountedSet { + set: { [k: string]: V } = {}; /** * @param {!K} item * @param {V} val */ - add(item, val) { - this.set[item] = val !== null ? val : true; + add(item: K, val: V) { + this.set[item as any] = val !== null ? val : (true as any); } /** * @param {!K} key * @return {boolean} */ - contains(key) { + contains(key: K) { return contains(this.set, key); } @@ -50,15 +44,15 @@ export class CountedSet { * @param {!K} item * @return {V} */ - get(item) { - return this.contains(item) ? this.set[item] : undefined; + get(item: K): V | void { + return this.contains(item) ? this.set[item as any] : undefined; } /** * @param {!K} item */ - remove(item) { - delete this.set[item]; + remove(item: K) { + delete this.set[item as any]; } /** @@ -72,14 +66,14 @@ export class CountedSet { * True if there's nothing in the set * @return {boolean} */ - isEmpty() { + isEmpty(): boolean { return isEmpty(this.set); } /** * @return {number} The number of items in the set */ - count() { + count(): number { return getCount(this.set); } @@ -87,21 +81,19 @@ export class CountedSet { * Run a function on each k,v pair in the set * @param {function(K, V)} fn */ - each(fn) { - forEach(this.set, function(k, v) { - fn(k, v); - }); + each(fn: (k: K, v: V) => void) { + forEach(this.set, (k: K, v: V) => fn(k, v)); } /** * Mostly for debugging * @return {Array.} The keys present in this CountedSet */ - keys() { - var keys = []; - forEach(this.set, function(k, v) { + keys(): K[] { + const keys: K[] = []; + forEach(this.set, (k: K) => { keys.push(k); }); return keys; } -}; // end fb.core.util.CountedSet +} diff --git a/src/database/core/util/EventEmitter.ts b/src/database/core/util/EventEmitter.ts index 32434c7ae22..aa998355e9d 100644 --- a/src/database/core/util/EventEmitter.ts +++ b/src/database/core/util/EventEmitter.ts @@ -14,23 +14,21 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; +import { assert } from '../../../utils/assert'; /** * Base class to be used if you want to emit events. Call the constructor with * the set of allowed event names. */ export abstract class EventEmitter { - allowedEvents_; - listeners_; + private listeners_: { [eventType: string]: Array<{ callback(...args: any[]): void, context: any }> } = {}; + /** - * @param {!Array.} allowedEvents + * @param {!Array.} allowedEvents_ */ - constructor(allowedEvents: Array) { - assert(Array.isArray(allowedEvents) && allowedEvents.length > 0, - 'Requires a non-empty array'); - this.allowedEvents_ = allowedEvents; - this.listeners_ = {}; + constructor(private allowedEvents_: Array) { + assert(Array.isArray(allowedEvents_) && allowedEvents_.length > 0, + 'Requires a non-empty array'); } /** @@ -40,41 +38,41 @@ export abstract class EventEmitter { * @param {!string} eventType * @return {Array.<*>} Array of parameters to trigger initial event with. */ - abstract getInitialEvent(eventType: string); + abstract getInitialEvent(eventType: string): any[]; /** * To be called by derived classes to trigger events. * @param {!string} eventType * @param {...*} var_args */ - trigger(eventType, var_args) { + protected trigger(eventType: string, ...var_args: any[]) { if (Array.isArray(this.listeners_[eventType])) { // Clone the list, since callbacks could add/remove listeners. - var listeners = [ + const listeners = [ ...this.listeners_[eventType] ]; - for (var i = 0; i < listeners.length; i++) { - listeners[i].callback.apply(listeners[i].context, Array.prototype.slice.call(arguments, 1)); + for (let i = 0; i < listeners.length; i++) { + listeners[i].callback.apply(listeners[i].context, var_args); } } } - on(eventType, callback, context) { + on(eventType: string, callback: (a: any) => void, context: any) { this.validateEventType_(eventType); this.listeners_[eventType] = this.listeners_[eventType] || []; - this.listeners_[eventType].push({callback: callback, context: context }); + this.listeners_[eventType].push({callback, context}); - var eventData = this.getInitialEvent(eventType); + const eventData = this.getInitialEvent(eventType); if (eventData) { callback.apply(context, eventData); } } - off(eventType, callback, context) { + off(eventType: string, callback: (a: any) => void, context: any) { this.validateEventType_(eventType); - var listeners = this.listeners_[eventType] || []; - for (var i = 0; i < listeners.length; i++) { + const listeners = this.listeners_[eventType] || []; + for (let i = 0; i < listeners.length; i++) { if (listeners[i].callback === callback && (!context || context === listeners[i].context)) { listeners.splice(i, 1); return; @@ -82,11 +80,12 @@ export abstract class EventEmitter { } } - validateEventType_(eventType) { - assert(this.allowedEvents_.find(function(et) { + private validateEventType_(eventType: string) { + assert(this.allowedEvents_.find(function (et) { return et === eventType; }), 'Unknown event: ' + eventType ); } -}; // end fb.core.util.EventEmitter +} + diff --git a/src/database/core/util/ImmutableTree.ts b/src/database/core/util/ImmutableTree.ts index 48bd600a8cc..1ee2eba4849 100644 --- a/src/database/core/util/ImmutableTree.ts +++ b/src/database/core/util/ImmutableTree.ts @@ -14,21 +14,18 @@ * limitations under the License. */ -import { SortedMap } from "./SortedMap"; -import { Path } from "./Path"; -import { stringCompare } from "./util"; -import { forEach } from "../../../utils/obj"; +import { SortedMap } from './SortedMap'; +import { Path } from './Path'; +import { stringCompare } from './util'; +import { forEach } from '../../../utils/obj'; -let emptyChildrenSingleton; +let emptyChildrenSingleton: SortedMap>; /** * A tree with immutable elements. */ -export class ImmutableTree { - value; - children; - - static Empty = new ImmutableTree(null); +export class ImmutableTree { + static Empty = new ImmutableTree(null); /** * Singleton empty children collection. @@ -37,9 +34,9 @@ export class ImmutableTree { * @type {!SortedMap.>} * @private */ - static get EmptyChildren_() { + private static get EmptyChildren_(): SortedMap> { if (!emptyChildrenSingleton) { - emptyChildrenSingleton = new SortedMap(stringCompare); + emptyChildrenSingleton = new SortedMap>(stringCompare); } return emptyChildrenSingleton; } @@ -49,9 +46,9 @@ export class ImmutableTree { * @param {!Object.} obj * @return {!ImmutableTree.} */ - static fromObject(obj) { - var tree = ImmutableTree.Empty; - forEach(obj, function(childPath, childSnap) { + static fromObject(obj: { [k: string]: T }): ImmutableTree { + let tree: ImmutableTree = ImmutableTree.Empty; + forEach(obj, (childPath: string, childSnap: T) => { tree = tree.set(new Path(childPath), childSnap); }); return tree; @@ -60,27 +57,17 @@ export class ImmutableTree { /** * @template T * @param {?T} value - * @param {SortedMap.>=} opt_children + * @param {SortedMap.>=} children */ - constructor(value, children?) { - /** - * @const - * @type {?T} - */ - this.value = value; - - /** - * @const - * @type {!SortedMap.>} - */ - this.children = children || ImmutableTree.EmptyChildren_; + constructor(public readonly value: T | null, + public readonly children: SortedMap> = ImmutableTree.EmptyChildren_) { } /** * True if the value is empty and there are no children * @return {boolean} */ - isEmpty() { + isEmpty(): boolean { return this.value === null && this.children.isEmpty(); } @@ -96,21 +83,22 @@ export class ImmutableTree { * node * @return {?{path:!Path, value:!T}} */ - findRootMostMatchingPathAndValue(relativePath: Path, predicate) { + findRootMostMatchingPathAndValue(relativePath: Path, + predicate: (a: T) => boolean): { path: Path, value: T } | null { if (this.value != null && predicate(this.value)) { return {path: Path.Empty, value: this.value}; } else { if (relativePath.isEmpty()) { return null; } else { - var front = relativePath.getFront(); - var child = this.children.get(front); + const front = relativePath.getFront(); + const child = this.children.get(front); if (child !== null) { - var childExistingPathAndValue = - child.findRootMostMatchingPathAndValue(relativePath.popFront(), - predicate); + const childExistingPathAndValue = + child.findRootMostMatchingPathAndValue(relativePath.popFront(), + predicate); if (childExistingPathAndValue != null) { - var fullPath = new Path(front).child(childExistingPathAndValue.path); + const fullPath = new Path(front).child(childExistingPathAndValue.path); return {path: fullPath, value: childExistingPathAndValue.value}; } else { return null; @@ -128,20 +116,20 @@ export class ImmutableTree { * @param {!Path} relativePath * @return {?{path: !Path, value: !T}} */ - findRootMostValueAndPath(relativePath) { - return this.findRootMostMatchingPathAndValue(relativePath, function() { return true; }); + findRootMostValueAndPath(relativePath: Path): { path: Path, value: T } | null { + return this.findRootMostMatchingPathAndValue(relativePath, () => true); } /** * @param {!Path} relativePath * @return {!ImmutableTree.} The subtree at the given path */ - subtree(relativePath) { + subtree(relativePath: Path): ImmutableTree { if (relativePath.isEmpty()) { return this; } else { - var front = relativePath.getFront(); - var childTree = this.children.get(front); + const front = relativePath.getFront(); + const childTree = this.children.get(front); if (childTree !== null) { return childTree.subtree(relativePath.popFront()); } else { @@ -157,14 +145,14 @@ export class ImmutableTree { * @param {?T} toSet Value to set. * @return {!ImmutableTree.} Resulting tree. */ - set(relativePath, toSet) { + set(relativePath: Path, toSet: T | null): ImmutableTree { if (relativePath.isEmpty()) { return new ImmutableTree(toSet, this.children); } else { - var front = relativePath.getFront(); - var child = this.children.get(front) || ImmutableTree.Empty; - var newChild = child.set(relativePath.popFront(), toSet); - var newChildren = this.children.insert(front, newChild); + const front = relativePath.getFront(); + const child = this.children.get(front) || ImmutableTree.Empty; + const newChild = child.set(relativePath.popFront(), toSet); + const newChildren = this.children.insert(front, newChild); return new ImmutableTree(this.value, newChildren); } } @@ -175,7 +163,7 @@ export class ImmutableTree { * @param {!Path} relativePath Path to value to remove. * @return {!ImmutableTree.} Resulting tree. */ - remove(relativePath) { + remove(relativePath: Path): ImmutableTree { if (relativePath.isEmpty()) { if (this.children.isEmpty()) { return ImmutableTree.Empty; @@ -183,11 +171,11 @@ export class ImmutableTree { return new ImmutableTree(null, this.children); } } else { - var front = relativePath.getFront(); - var child = this.children.get(front); + const front = relativePath.getFront(); + const child = this.children.get(front); if (child) { - var newChild = child.remove(relativePath.popFront()); - var newChildren; + const newChild = child.remove(relativePath.popFront()); + let newChildren; if (newChild.isEmpty()) { newChildren = this.children.remove(front); } else { @@ -210,12 +198,12 @@ export class ImmutableTree { * @param {!Path} relativePath Path to get value for. * @return {?T} Value at path, or null. */ - get(relativePath) { + get(relativePath: Path): T | null { if (relativePath.isEmpty()) { return this.value; } else { - var front = relativePath.getFront(); - var child = this.children.get(front); + const front = relativePath.getFront(); + const child = this.children.get(front); if (child) { return child.get(relativePath.popFront()); } else { @@ -231,14 +219,14 @@ export class ImmutableTree { * @param {!ImmutableTree} newTree New tree. * @return {!ImmutableTree} Resulting tree. */ - setTree(relativePath, newTree) { + setTree(relativePath: Path, newTree: ImmutableTree): ImmutableTree { if (relativePath.isEmpty()) { return newTree; } else { - var front = relativePath.getFront(); - var child = this.children.get(front) || ImmutableTree.Empty; - var newChild = child.setTree(relativePath.popFront(), newTree); - var newChildren; + const front = relativePath.getFront(); + const child = this.children.get(front) || ImmutableTree.Empty; + const newChild = child.setTree(relativePath.popFront(), newTree); + let newChildren; if (newChild.isEmpty()) { newChildren = this.children.remove(front); } else { @@ -256,7 +244,7 @@ export class ImmutableTree { * @param {function(Path, ?T, Object.):V} fn * @return {V} */ - fold(fn) { + fold(fn: (path: Path, value: T, children: { [k: string]: V }) => V): V { return this.fold_(Path.Empty, fn); } @@ -268,9 +256,9 @@ export class ImmutableTree { * @return {V} * @private */ - fold_(pathSoFar, fn) { - var accum = {}; - this.children.inorderTraversal(function(childKey, childTree) { + private fold_(pathSoFar: Path, fn: (path: Path, value: T | null, children: { [k: string]: V }) => V): V { + const accum: {[k: string]: V} = {}; + this.children.inorderTraversal(function (childKey: string, childTree: ImmutableTree) { accum[childKey] = childTree.fold_(pathSoFar.child(childKey), fn); }); return fn(pathSoFar, this.value, accum); @@ -283,20 +271,20 @@ export class ImmutableTree { * @param {!function(!Path, !T):?V} f * @return {?V} */ - findOnPath(path, f) { + findOnPath(path: Path, f: (path: Path, value: T) => (V | null)): V | null { return this.findOnPath_(path, Path.Empty, f); } - findOnPath_(pathToFollow, pathSoFar, f) { - var result = this.value ? f(pathSoFar, this.value) : false; + private findOnPath_(pathToFollow: Path, pathSoFar: Path, f: (path: Path, value: T) => (V | null)): V | null { + const result = this.value ? f(pathSoFar, this.value) : false; if (result) { return result; } else { if (pathToFollow.isEmpty()) { return null; } else { - var front = pathToFollow.getFront(); - var nextChild = this.children.get(front); + const front = pathToFollow.getFront()!; + const nextChild = this.children.get(front); if (nextChild) { return nextChild.findOnPath_(pathToFollow.popFront(), pathSoFar.child(front), f); } else { @@ -306,22 +294,29 @@ export class ImmutableTree { } } - foreachOnPath(path, f) { + /** + * + * @param {!Path} path + * @param {!function(!Path, !T)} f + * @returns {!ImmutableTree.} + */ + foreachOnPath(path: Path, f: (path: Path, value: T) => void): ImmutableTree { return this.foreachOnPath_(path, Path.Empty, f); } - foreachOnPath_(pathToFollow, currentRelativePath, f) { + private foreachOnPath_(pathToFollow: Path, currentRelativePath: Path, + f: (path: Path, value: T) => void): ImmutableTree { if (pathToFollow.isEmpty()) { return this; } else { if (this.value) { f(currentRelativePath, this.value); } - var front = pathToFollow.getFront(); - var nextChild = this.children.get(front); + const front = pathToFollow.getFront(); + const nextChild = this.children.get(front); if (nextChild) { return nextChild.foreachOnPath_(pathToFollow.popFront(), - currentRelativePath.child(front), f); + currentRelativePath.child(front), f); } else { return ImmutableTree.Empty; } @@ -335,12 +330,12 @@ export class ImmutableTree { * the path from the root of the tree to a node, and the value at that node. * Called in depth-first order. */ - foreach(f) { + foreach(f: (path: Path, value: T) => void) { this.foreach_(Path.Empty, f); } - foreach_(currentRelativePath, f) { - this.children.inorderTraversal(function(childName, childTree) { + private foreach_(currentRelativePath: Path, f: (path: Path, value: T) => void) { + this.children.inorderTraversal(function (childName, childTree) { childTree.foreach_(currentRelativePath.child(childName), f); }); if (this.value) { @@ -348,11 +343,15 @@ export class ImmutableTree { } } - foreachChild(f) { - this.children.inorderTraversal(function(childName, childTree) { + /** + * + * @param {function(string, !T)} f + */ + foreachChild(f: (name: string, value: T) => void) { + this.children.inorderTraversal((childName: string, childTree: ImmutableTree) => { if (childTree.value) { f(childName, childTree.value); } }); } -}; // end ImmutableTree +} diff --git a/src/database/core/util/NextPushId.ts b/src/database/core/util/NextPushId.ts index 59078589f0e..014f799591e 100644 --- a/src/database/core/util/NextPushId.ts +++ b/src/database/core/util/NextPushId.ts @@ -32,24 +32,25 @@ import { assert } from "../../../utils/assert"; */ export const nextPushId = (function() { // Modeled after base64 web-safe chars, but ordered by ASCII. - var PUSH_CHARS = '-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'; + const PUSH_CHARS = '-0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'; // Timestamp of last push, used to prevent local collisions if you push twice // in one ms. - var lastPushTime = 0; + let lastPushTime = 0; // We generate 72-bits of randomness which get turned into 12 characters and // appended to the timestamp to prevent collisions with other clients. We // store the last characters we generated because in the event of a collision, // we'll use those same characters except "incremented" by one. - var lastRandChars = []; + const lastRandChars: number[] = []; - return function(now) { - var duplicateTime = (now === lastPushTime); + return function(now: number) { + const duplicateTime = (now === lastPushTime); lastPushTime = now; - var timeStampChars = new Array(8); - for (var i = 7; i >= 0; i--) { + let i; + const timeStampChars = new Array(8); + for (i = 7; i >= 0; i--) { timeStampChars[i] = PUSH_CHARS.charAt(now % 64); // NOTE: Can't use << here because javascript will convert to int and lose // the upper bits. @@ -57,7 +58,7 @@ export const nextPushId = (function() { } assert(now === 0, 'Cannot push at time == 0'); - var id = timeStampChars.join(''); + let id = timeStampChars.join(''); if (!duplicateTime) { for (i = 0; i < 12; i++) { diff --git a/src/database/core/util/OnlineMonitor.ts b/src/database/core/util/OnlineMonitor.ts index 325e4213884..838223f7184 100644 --- a/src/database/core/util/OnlineMonitor.ts +++ b/src/database/core/util/OnlineMonitor.ts @@ -25,10 +25,10 @@ import { isMobileCordova } from "../../../utils/environment"; * when we're not), but no false negatives. So we can safely use it to determine when * we definitely cannot reach the internet. * - * @extends {fb.core.util.EventEmitter} + * @extends {EventEmitter} */ export class OnlineMonitor extends EventEmitter { - online_; + private online_ = true; static getInstance() { return new OnlineMonitor(); @@ -36,7 +36,6 @@ export class OnlineMonitor extends EventEmitter { constructor() { super(['online']); - this.online_ = true; // We've had repeated complaints that Cordova apps can get stuck "offline", e.g. // https://forum.ionicframework.com/t/firebase-connection-is-lost-and-never-come-back/43810 @@ -45,18 +44,17 @@ export class OnlineMonitor extends EventEmitter { if (typeof window !== 'undefined' && typeof window.addEventListener !== 'undefined' && !isMobileCordova()) { - var self = this; - window.addEventListener('online', function() { - if (!self.online_) { - self.online_ = true; - self.trigger('online', true); + window.addEventListener('online', () => { + if (!this.online_) { + this.online_ = true; + this.trigger('online', true); } }, false); - window.addEventListener('offline', function() { - if (self.online_) { - self.online_ = false; - self.trigger('online', false); + window.addEventListener('offline', () => { + if (this.online_) { + this.online_ = false; + this.trigger('online', false); } }, false); } @@ -66,7 +64,7 @@ export class OnlineMonitor extends EventEmitter { * @param {!string} eventType * @return {Array.} */ - getInitialEvent(eventType) { + getInitialEvent(eventType: string): boolean[] { assert(eventType === 'online', 'Unknown event type: ' + eventType); return [this.online_]; } @@ -74,7 +72,7 @@ export class OnlineMonitor extends EventEmitter { /** * @return {boolean} */ - currentlyOnline() { + currentlyOnline(): boolean { return this.online_; } -}; // end OnlineMonitor +} diff --git a/src/database/core/util/Path.ts b/src/database/core/util/Path.ts index eb7b93063bf..fc312ee057f 100644 --- a/src/database/core/util/Path.ts +++ b/src/database/core/util/Path.ts @@ -14,8 +14,8 @@ * limitations under the License. */ -import { nameCompare } from "./util"; -import { stringLength } from "../../../utils/utf8"; +import { nameCompare } from './util'; +import { stringLength } from '../../../utils/utf8'; /** * An immutable object representing a parsed path. It's immutable so that you * can pass them around to other functions without worrying about them changing @@ -23,8 +23,8 @@ import { stringLength } from "../../../utils/utf8"; */ export class Path { - pieces_; - pieceNum_; + private pieces_: string[]; + private pieceNum_: number; /** * Singleton to represent an empty path @@ -34,18 +34,19 @@ export class Path { static get Empty() { return new Path(''); } + /** * @param {string|Array.} pathOrString Path string to parse, * or another path, or the raw tokens array - * @param {number=} opt_pieceNum + * @param {number=} pieceNum */ - constructor(pathOrString: string|string[], opt_pieceNum?) { - if (arguments.length == 1) { - this.pieces_ = (pathOrString).split('/'); + constructor(pathOrString: string | string[], pieceNum?: number) { + if (pieceNum === void 0) { + this.pieces_ = (pathOrString as string).split('/'); // Remove empty pieces. - var copyTo = 0; - for (var i = 0; i < this.pieces_.length; i++) { + let copyTo = 0; + for (let i = 0; i < this.pieces_.length; i++) { if (this.pieces_[i].length > 0) { this.pieces_[copyTo] = this.pieces_[i]; copyTo++; @@ -55,12 +56,12 @@ export class Path { this.pieceNum_ = 0; } else { - this.pieces_ = pathOrString; - this.pieceNum_ = opt_pieceNum; + this.pieces_ = pathOrString as string[]; + this.pieceNum_ = pieceNum; } } - getFront() { + getFront(): string | null { if (this.pieceNum_ >= this.pieces_.length) return null; @@ -70,15 +71,15 @@ export class Path { /** * @return {number} The number of segments in this path */ - getLength() { + getLength(): number { return this.pieces_.length - this.pieceNum_; } /** * @return {!Path} */ - popFront() { - var pieceNum = this.pieceNum_; + popFront(): Path { + let pieceNum = this.pieceNum_; if (pieceNum < this.pieces_.length) { pieceNum++; } @@ -88,16 +89,16 @@ export class Path { /** * @return {?string} */ - getBack() { + getBack(): string | null { if (this.pieceNum_ < this.pieces_.length) return this.pieces_[this.pieces_.length - 1]; return null; } - toString() { - var pathString = ''; - for (var i = this.pieceNum_; i < this.pieces_.length; i++) { + toString(): string { + let pathString = ''; + for (let i = this.pieceNum_; i < this.pieces_.length; i++) { if (this.pieces_[i] !== '') pathString += '/' + this.pieces_[i]; } @@ -105,10 +106,10 @@ export class Path { return pathString || '/'; } - toUrlEncodedString() { - var pathString = ''; - for (var i = this.pieceNum_; i < this.pieces_.length; i++) { - if (this.pieces_[i] !== '') + toUrlEncodedString(): string { + let pathString = ''; + for (let i = this.pieceNum_; i < this.pieces_.length; i++) { + if (this.pieces_[i] !== '') pathString += '/' + encodeURIComponent(String(this.pieces_[i])); } @@ -118,23 +119,22 @@ export class Path { /** * Shallow copy of the parts of the path. * - * @param {number=} opt_begin + * @param {number=} begin * @return {!Array} */ - slice(opt_begin) { - var begin = opt_begin || 0; + slice(begin: number = 0): string[] { return this.pieces_.slice(this.pieceNum_ + begin); } /** * @return {?Path} */ - parent() { + parent(): Path | null { if (this.pieceNum_ >= this.pieces_.length) return null; - var pieces = []; - for (var i = this.pieceNum_; i < this.pieces_.length - 1; i++) + const pieces = []; + for (let i = this.pieceNum_; i < this.pieces_.length - 1; i++) pieces.push(this.pieces_[i]); return new Path(pieces, 0); @@ -144,18 +144,18 @@ export class Path { * @param {string|!Path} childPathObj * @return {!Path} */ - child(childPathObj) { - var pieces = []; - for (var i = this.pieceNum_; i < this.pieces_.length; i++) + child(childPathObj: string | Path): Path { + const pieces = []; + for (let i = this.pieceNum_; i < this.pieces_.length; i++) pieces.push(this.pieces_[i]); if (childPathObj instanceof Path) { - for (i = childPathObj.pieceNum_; i < childPathObj.pieces_.length; i++) { + for (let i = childPathObj.pieceNum_; i < childPathObj.pieces_.length; i++) { pieces.push(childPathObj.pieces_[i]); } } else { - var childPieces = childPathObj.split('/'); - for (i = 0; i < childPieces.length; i++) { + const childPieces = childPathObj.split('/'); + for (let i = 0; i < childPieces.length; i++) { if (childPieces[i].length > 0) pieces.push(childPieces[i]); } @@ -167,7 +167,7 @@ export class Path { /** * @return {boolean} True if there are no segments in this path */ - isEmpty() { + isEmpty(): boolean { return this.pieceNum_ >= this.pieces_.length; } @@ -176,28 +176,29 @@ export class Path { * @param {!Path} innerPath * @return {!Path} The path from outerPath to innerPath */ - static relativePath(outerPath, innerPath) { - var outer = outerPath.getFront(), inner = innerPath.getFront(); + static relativePath(outerPath: Path, innerPath: Path): Path { + const outer = outerPath.getFront(), inner = innerPath.getFront(); if (outer === null) { return innerPath; } else if (outer === inner) { return Path.relativePath(outerPath.popFront(), - innerPath.popFront()); + innerPath.popFront()); } else { throw new Error('INTERNAL ERROR: innerPath (' + innerPath + ') is not within ' + - 'outerPath (' + outerPath + ')'); + 'outerPath (' + outerPath + ')'); } } + /** * @param {!Path} left * @param {!Path} right * @return {number} -1, 0, 1 if left is less, equal, or greater than the right. */ - static comparePaths(left, right) { - var leftKeys = left.slice(); - var rightKeys = right.slice(); - for (var i = 0; i < leftKeys.length && i < rightKeys.length; i++) { - var cmp = nameCompare(leftKeys[i], rightKeys[i]); + static comparePaths(left: Path, right: Path): number { + const leftKeys = left.slice(); + const rightKeys = right.slice(); + for (let i = 0; i < leftKeys.length && i < rightKeys.length; i++) { + const cmp = nameCompare(leftKeys[i], rightKeys[i]); if (cmp !== 0) return cmp; } if (leftKeys.length === rightKeys.length) return 0; @@ -209,12 +210,12 @@ export class Path { * @param {Path} other * @return {boolean} true if paths are the same. */ - equals(other) { + equals(other: Path): boolean { if (this.getLength() !== other.getLength()) { return false; } - for (var i = this.pieceNum_, j = other.pieceNum_; i <= this.pieces_.length; i++, j++) { + for (let i = this.pieceNum_, j = other.pieceNum_; i <= this.pieces_.length; i++, j++) { if (this.pieces_[i] !== other.pieces_[j]) { return false; } @@ -228,9 +229,9 @@ export class Path { * @param {!Path} other * @return {boolean} True if this path is a parent (or the same as) other */ - contains(other) { - var i = this.pieceNum_; - var j = other.pieceNum_; + contains(other: Path): boolean { + let i = this.pieceNum_; + let j = other.pieceNum_; if (this.getLength() > other.getLength()) { return false; } @@ -257,29 +258,26 @@ export class Path { */ export class ValidationPath { /** @type {!Array} */ - parts_; + private parts_: string[]; /** @type {number} Initialize to number of '/' chars needed in path. */ - byteLength_; - /** @type {string} */ - errorPrefix_; + private byteLength_: number; /** * @param {!Path} path Initial Path. - * @param {string} errorPrefix Prefix for any error messages. + * @param {string} errorPrefix_ Prefix for any error messages. */ - constructor(path, errorPrefix) { + constructor(path: Path, private errorPrefix_: string) { /** @type {!Array} */ this.parts_ = path.slice(); /** @type {number} Initialize to number of '/' chars needed in path. */ this.byteLength_ = Math.max(1, this.parts_.length); - /** @type {string} */ - this.errorPrefix_ = errorPrefix; - for (var i = 0; i < this.parts_.length; i++) { + for (let i = 0; i < this.parts_.length; i++) { this.byteLength_ += stringLength(this.parts_[i]); } this.checkValid_(); } + /** @const {number} Maximum key depth. */ static get MAX_PATH_DEPTH() { return 32; @@ -291,7 +289,7 @@ export class ValidationPath { } /** @param {string} child */ - push(child) { + push(child: string) { // Count the needed '/' if (this.parts_.length > 0) { this.byteLength_ += 1; @@ -302,7 +300,7 @@ export class ValidationPath { } pop() { - var last = this.parts_.pop(); + const last = this.parts_.pop(); this.byteLength_ -= stringLength(last); // Un-count the previous '/' if (this.parts_.length > 0) { @@ -310,16 +308,16 @@ export class ValidationPath { } } - checkValid_() { + private checkValid_() { if (this.byteLength_ > ValidationPath.MAX_PATH_LENGTH_BYTES) { throw new Error(this.errorPrefix_ + 'has a key path longer than ' + - ValidationPath.MAX_PATH_LENGTH_BYTES + ' bytes (' + - this.byteLength_ + ').'); + ValidationPath.MAX_PATH_LENGTH_BYTES + ' bytes (' + + this.byteLength_ + ').'); } if (this.parts_.length > ValidationPath.MAX_PATH_DEPTH) { throw new Error(this.errorPrefix_ + 'path specified exceeds the maximum depth that can be written (' + - ValidationPath.MAX_PATH_DEPTH + - ') or object contains a cycle ' + this.toErrorString()); + ValidationPath.MAX_PATH_DEPTH + + ') or object contains a cycle ' + this.toErrorString()); } } @@ -328,11 +326,12 @@ export class ValidationPath { * * @return {string} */ - toErrorString() { + toErrorString(): string { if (this.parts_.length == 0) { return ''; } return 'in property \'' + this.parts_.join('.') + '\''; } -}; // end fb.core.util.validation.ValidationPath +} + diff --git a/src/database/core/util/ServerValues.ts b/src/database/core/util/ServerValues.ts index 56e02715270..38276b63172 100644 --- a/src/database/core/util/ServerValues.ts +++ b/src/database/core/util/ServerValues.ts @@ -14,18 +14,21 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; -import { Path } from "./Path"; -import { SparseSnapshotTree } from "../SparseSnapshotTree"; -import { LeafNode } from "../snap/LeafNode"; -import { nodeFromJSON } from "../snap/nodeFromJSON"; -import { PRIORITY_INDEX } from "../snap/indexes/PriorityIndex"; +import { assert } from '../../../utils/assert'; +import { Path } from './Path'; +import { SparseSnapshotTree } from '../SparseSnapshotTree'; +import { LeafNode } from '../snap/LeafNode'; +import { nodeFromJSON } from '../snap/nodeFromJSON'; +import { PRIORITY_INDEX } from '../snap/indexes/PriorityIndex'; +import { Node } from '../snap/Node'; +import { ChildrenNode } from '../snap/ChildrenNode'; + /** * Generate placeholders for deferred values. * @param {?Object} values * @return {!Object} */ -export const generateWithValues = function(values) { +export const generateWithValues = function (values: { [k: string]: any } | null): { [k: string]: any } { values = values || {}; values['timestamp'] = values['timestamp'] || new Date().getTime(); return values; @@ -39,9 +42,10 @@ export const generateWithValues = function(values) { * @param {!Object} serverValues * @return {!(string|number|boolean)} */ -export const resolveDeferredValue = function(value, serverValues) { +export const resolveDeferredValue = function (value: { [k: string]: any } | string | number | boolean, + serverValues: { [k: string]: any }): string | number | boolean { if (!value || (typeof value !== 'object')) { - return /** @type {(string|number|boolean)} */ (value); + return value as string | number | boolean; } else { assert('.sv' in value, 'Unexpected leaf node or priority contents'); return serverValues[value['.sv']]; @@ -56,9 +60,9 @@ export const resolveDeferredValue = function(value, serverValues) { * @param {!Object} serverValues * @return {!SparseSnapshotTree} */ -export const resolveDeferredValueTree = function(tree, serverValues) { - var resolvedTree = new SparseSnapshotTree(); - tree.forEachTree(new Path(''), function(path, node) { +export const resolveDeferredValueTree = function (tree: SparseSnapshotTree, serverValues: Object): SparseSnapshotTree { + const resolvedTree = new SparseSnapshotTree(); + tree.forEachTree(new Path(''), function (path, node) { resolvedTree.remember(path, resolveDeferredValueSnapshot(node, serverValues)); }); return resolvedTree; @@ -69,31 +73,31 @@ export const resolveDeferredValueTree = function(tree, serverValues) { * Recursively replace all deferred values and priorities in the node with the * specified generated replacement values. If there are no server values in the node, * it'll be returned as-is. - * @param {!fb.core.snap.Node} node + * @param {!Node} node * @param {!Object} serverValues - * @return {!fb.core.snap.Node} + * @return {!Node} */ -export const resolveDeferredValueSnapshot = function(node, serverValues) { - var rawPri = /** @type {Object|boolean|null|number|string} */ (node.getPriority().val()), - priority = resolveDeferredValue(rawPri, serverValues), - newNode; +export const resolveDeferredValueSnapshot = function (node: Node, serverValues: Object): Node { + const rawPri = node.getPriority().val() as object | boolean | null | number | string; + const priority = resolveDeferredValue(rawPri, serverValues); + let newNode: Node; if (node.isLeafNode()) { - var leafNode = /** @type {!LeafNode} */ (node); - var value = resolveDeferredValue(leafNode.getValue(), serverValues); + const leafNode = node as LeafNode; + const value = resolveDeferredValue(leafNode.getValue(), serverValues); if (value !== leafNode.getValue() || priority !== leafNode.getPriority().val()) { return new LeafNode(value, nodeFromJSON(priority)); } else { return node; } } else { - var childrenNode = /** @type {!fb.core.snap.ChildrenNode} */ (node); + const childrenNode = node as ChildrenNode; newNode = childrenNode; if (priority !== childrenNode.getPriority().val()) { newNode = newNode.updatePriority(new LeafNode(priority)); } - childrenNode.forEachChild(PRIORITY_INDEX, function(childName, childNode) { - var newChildNode = resolveDeferredValueSnapshot(childNode, serverValues); + childrenNode.forEachChild(PRIORITY_INDEX, function (childName, childNode) { + const newChildNode = resolveDeferredValueSnapshot(childNode, serverValues); if (newChildNode !== childNode) { newNode = newNode.updateImmediateChild(childName, newChildNode); } diff --git a/src/database/core/util/SortedMap.ts b/src/database/core/util/SortedMap.ts index d65fedb583c..f8c97cd8b42 100644 --- a/src/database/core/util/SortedMap.ts +++ b/src/database/core/util/SortedMap.ts @@ -35,47 +35,37 @@ // TODO: It would also be good (and possibly necessary) to create a base // interface for LLRBNode and LLRBEmptyNode. +export type Comparator = (key1: K, key2: K) => number; /** * An iterator over an LLRBNode. */ -export class SortedMapIterator { - /** @private - * @type {?function(!K, !V): T} - */ - resultGenerator_; - isReverse_; - +export class SortedMapIterator { /** @private * @type {Array.} */ - nodeStack_: Array; + private nodeStack_: (LLRBNode | LLRBEmptyNode)[] = []; /** * @template K, V, T * @param {LLRBNode|LLRBEmptyNode} node Node to iterate. * @param {?K} startKey * @param {function(K, K): number} comparator - * @param {boolean} isReverse Whether or not to iterate in reverse - * @param {(function(K, V):T)=} opt_resultGenerator - */ - constructor(node, startKey, comparator, isReverse, opt_resultGenerator?) { - /** @private - * @type {?function(!K, !V): T} - */ - this.resultGenerator_ = opt_resultGenerator || null; - this.isReverse_ = isReverse; - - /** @private - * @type {Array.} - */ - this.nodeStack_ = []; - - var cmp = 1; + * @param {boolean} isReverse_ Whether or not to iterate in reverse + * @param {(function(K, V):T)=} resultGenerator_ + */ + constructor(node: LLRBNode | LLRBEmptyNode, + startKey: K | null, + comparator: Comparator, + private isReverse_: boolean, + private resultGenerator_: ((k: K, v: V) => T) | null = null) { + + let cmp = 1; while (!node.isEmpty()) { + node = node as LLRBNode; cmp = startKey ? comparator(node.key, startKey) : 1; // flip the comparison if we're going in reverse - if (isReverse) cmp *= -1; + if (isReverse_) cmp *= -1; if (cmp < 0) { // This node is less than our start key. ignore it @@ -100,15 +90,16 @@ export class SortedMapIterator { } } - getNext() { + getNext(): T { if (this.nodeStack_.length === 0) return null; - var node = this.nodeStack_.pop(), result; + let node = this.nodeStack_.pop(); + let result: T; if (this.resultGenerator_) result = this.resultGenerator_(node.key, node.value); else - result = {key: node.key, value: node.value}; + result = {key: node.key, value: node.value} as any; if (this.isReverse_) { node = node.left; @@ -127,48 +118,48 @@ export class SortedMapIterator { return result; } - hasNext() { + hasNext(): boolean { return this.nodeStack_.length > 0; } - peek() { + peek(): T { if (this.nodeStack_.length === 0) return null; - var node = this.nodeStack_[this.nodeStack_.length - 1]; + const node = this.nodeStack_[this.nodeStack_.length - 1]; if (this.resultGenerator_) { return this.resultGenerator_(node.key, node.value); } else { - return { key: node.key, value: node.value }; + return {key: node.key, value: node.value} as any; } } -}; // end SortedMapIterator +} /** * Represents a node in a Left-leaning Red-Black tree. */ -export class LLRBNode { - key; - value; - color; - left; - right; +export class LLRBNode { + color: boolean; + left: LLRBNode | LLRBEmptyNode; + right: LLRBNode | LLRBEmptyNode; /** * @template K, V * @param {!K} key Key associated with this node. * @param {!V} value Value associated with this node. * @param {?boolean} color Whether this node is red. - * @param {?(LLRBNode|LLRBEmptyNode)=} opt_left Left child. - * @param {?(LLRBNode|LLRBEmptyNode)=} opt_right Right child. - */ - constructor(key, value, color, opt_left?, opt_right?) { - this.key = key; - this.value = value; + * @param {?(LLRBNode|LLRBEmptyNode)=} left Left child. + * @param {?(LLRBNode|LLRBEmptyNode)=} right Right child. + */ + constructor(public key: K, + public value: V, + color: boolean | null, + left?: LLRBNode | LLRBEmptyNode | null, + right?: LLRBNode | LLRBEmptyNode | null) { this.color = color != null ? color : LLRBNode.RED; - this.left = opt_left != null ? opt_left : SortedMap.EMPTY_NODE_; - this.right = opt_right != null ? opt_right : SortedMap.EMPTY_NODE_; + this.left = left != null ? left : SortedMap.EMPTY_NODE as LLRBEmptyNode; + this.right = right != null ? right : SortedMap.EMPTY_NODE as LLRBEmptyNode; } static RED = true; @@ -184,26 +175,28 @@ export class LLRBNode { * @param {?LLRBNode|LLRBEmptyNode} right New right child for the node, or null. * @return {!LLRBNode} The node copy. */ - copy(key, value, color, left, right) { + copy(key: K | null, value: V | null, color: boolean | null, + left: LLRBNode | LLRBEmptyNode | null, + right: LLRBNode | LLRBEmptyNode | null): LLRBNode { return new LLRBNode( - (key != null) ? key : this.key, - (value != null) ? value : this.value, - (color != null) ? color : this.color, - (left != null) ? left : this.left, - (right != null) ? right : this.right); + (key != null) ? key : this.key, + (value != null) ? value : this.value, + (color != null) ? color : this.color, + (left != null) ? left : this.left, + (right != null) ? right : this.right); } /** * @return {number} The total number of nodes in the tree. */ - count() { + count(): number { return this.left.count() + 1 + this.right.count(); } /** * @return {boolean} True if the tree is empty. */ - isEmpty() { + isEmpty(): boolean { return false; } @@ -216,10 +209,10 @@ export class LLRBNode { * @return {*} The first truthy value returned by action, or the last falsey * value returned by action */ - inorderTraversal(action) { + inorderTraversal(action: (k: K, v: V) => any): boolean { return this.left.inorderTraversal(action) || - action(this.key, this.value) || - this.right.inorderTraversal(action); + action(this.key, this.value) || + this.right.inorderTraversal(action); } /** @@ -230,35 +223,35 @@ export class LLRBNode { * node. If it returns true, traversal is aborted. * @return {*} True if traversal was aborted. */ - reverseTraversal(action) { + reverseTraversal(action: (k: K, v: V) => void): boolean { return this.right.reverseTraversal(action) || - action(this.key, this.value) || - this.left.reverseTraversal(action); + action(this.key, this.value) || + this.left.reverseTraversal(action); } /** * @return {!Object} The minimum node in the tree. * @private */ - min_() { + private min_(): LLRBNode { if (this.left.isEmpty()) { return this; } else { - return this.left.min_(); + return (this.left as LLRBNode).min_(); } } /** * @return {!K} The maximum key in the tree. */ - minKey() { + minKey(): K { return this.min_().key; } /** * @return {!K} The maximum key in the tree. */ - maxKey() { + maxKey(): K { if (this.right.isEmpty()) { return this.key; } else { @@ -270,11 +263,11 @@ export class LLRBNode { * * @param {!Object} key Key to insert. * @param {!Object} value Value to insert. - * @param {fb.Comparator} comparator Comparator. + * @param {Comparator} comparator Comparator. * @return {!LLRBNode} New tree, with the key/value added. */ - insert(key, value, comparator) { - var cmp, n; + insert(key: K, value: V, comparator: Comparator): LLRBNode { + let cmp, n; n = this; cmp = comparator(key, n.key); if (cmp < 0) { @@ -291,25 +284,24 @@ export class LLRBNode { * @private * @return {!LLRBNode|LLRBEmptyNode} New tree, with the minimum key removed. */ - removeMin_() { - var n; + private removeMin_(): LLRBNode | LLRBEmptyNode { if (this.left.isEmpty()) { - return SortedMap.EMPTY_NODE_; + return SortedMap.EMPTY_NODE as LLRBEmptyNode; } - n = this; + let n: LLRBNode = this; if (!n.left.isRed_() && !n.left.left.isRed_()) n = n.moveRedLeft_(); - n = n.copy(null, null, null, n.left.removeMin_(), null); + n = n.copy(null, null, null, (n.left as LLRBNode).removeMin_(), null); return n.fixUp_(); } /** * @param {!Object} key The key of the item to remove. - * @param {fb.Comparator} comparator Comparator. + * @param {Comparator} comparator Comparator. * @return {!LLRBNode|LLRBEmptyNode} New tree, with the specified item removed. */ - remove(key, comparator) { - var n, smallest; + remove(key: K, comparator: Comparator): LLRBNode | LLRBEmptyNode { + let n, smallest; n = this; if (comparator(key, n.key) < 0) { if (!n.left.isEmpty() && !n.left.isRed_() && !n.left.left.isRed_()) { @@ -323,11 +315,11 @@ export class LLRBNode { } if (comparator(key, n.key) === 0) { if (n.right.isEmpty()) { - return SortedMap.EMPTY_NODE_; + return SortedMap.EMPTY_NODE as LLRBEmptyNode; } else { - smallest = n.right.min_(); + smallest = (n.right as LLRBNode).min_(); n = n.copy(smallest.key, smallest.value, null, null, - n.right.removeMin_()); + (n.right as LLRBNode).removeMin_()); } } n = n.copy(null, null, null, null, n.right.remove(key, comparator)); @@ -339,7 +331,7 @@ export class LLRBNode { * @private * @return {boolean} Whether this is a RED node. */ - isRed_() { + isRed_(): boolean { return this.color; } @@ -347,8 +339,8 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree after performing any needed rotations. */ - fixUp_() { - var n = this; + private fixUp_(): LLRBNode { + let n = this as any; if (n.right.isRed_() && !n.left.isRed_()) n = n.rotateLeft_(); if (n.left.isRed_() && n.left.left.isRed_()) n = n.rotateRight_(); if (n.left.isRed_() && n.right.isRed_()) n = n.colorFlip_(); @@ -359,10 +351,10 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree, after moveRedLeft. */ - moveRedLeft_() { - var n = this.colorFlip_(); + private moveRedLeft_(): LLRBNode { + let n = this.colorFlip_(); if (n.right.left.isRed_()) { - n = n.copy(null, null, null, null, n.right.rotateRight_()); + n = n.copy(null, null, null, null, (n.right as LLRBNode).rotateRight_()); n = n.rotateLeft_(); n = n.colorFlip_(); } @@ -373,8 +365,8 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree, after moveRedRight. */ - moveRedRight_() { - var n = this.colorFlip_(); + private moveRedRight_(): LLRBNode { + let n = this.colorFlip_(); if (n.left.left.isRed_()) { n = n.rotateRight_(); n = n.colorFlip_(); @@ -386,30 +378,27 @@ export class LLRBNode { * @private * @return {!LLRBNode} New tree, after rotateLeft. */ - rotateLeft_() { - var nl; - nl = this.copy(null, null, LLRBNode.RED, null, this.right.left); - return this.right.copy(null, null, this.color, nl, null); + private rotateLeft_(): LLRBNode { + const nl = this.copy(null, null, LLRBNode.RED, null, this.right.left); + return this.right.copy(null, null, this.color, nl, null) as LLRBNode; } /** * @private * @return {!LLRBNode} New tree, after rotateRight. */ - rotateRight_() { - var nr; - nr = this.copy(null, null, LLRBNode.RED, this.left.right, null); - return this.left.copy(null, null, this.color, null, nr); + private rotateRight_(): LLRBNode { + const nr = this.copy(null, null, LLRBNode.RED, this.left.right, null); + return this.left.copy(null, null, this.color, null, nr) as LLRBNode; } /** * @private * @return {!LLRBNode} New tree, after colorFlip. */ - colorFlip_() { - var left, right; - left = this.left.copy(null, null, !this.left.color, null, null); - right = this.right.copy(null, null, !this.right.color, null, null); + private colorFlip_(): LLRBNode { + const left = this.left.copy(null, null, !this.left.color, null, null); + const right = this.right.copy(null, null, !this.right.color, null, null); return this.copy(null, null, !this.color, left, right); } @@ -419,29 +408,24 @@ export class LLRBNode { * @private * @return {boolean} True if all is well. */ - checkMaxDepth_() { - var blackDepth; - blackDepth = this.check_(); - if (Math.pow(2.0, blackDepth) <= this.count() + 1) { - return true; - } else { - return false; - } + private checkMaxDepth_(): boolean { + const blackDepth = this.check_(); + return (Math.pow(2.0, blackDepth) <= this.count() + 1); } /** * @private * @return {number} Not sure what this returns exactly. :-). */ - check_() { - var blackDepth; + check_(): number { + let blackDepth; if (this.isRed_() && this.left.isRed_()) { throw new Error('Red node has red child(' + this.key + ',' + - this.value + ')'); + this.value + ')'); } if (this.right.isRed_()) { throw new Error('Right child of (' + this.key + ',' + - this.value + ') is red'); + this.value + ') is red'); } blackDepth = this.left.check_(); if (blackDepth !== this.right.check_()) { @@ -450,24 +434,27 @@ export class LLRBNode { return blackDepth + (this.isRed_() ? 0 : 1); } } -}; // end LLRBNode +} /** * Represents an empty node (a leaf node in the Red-Black Tree). */ -export class LLRBEmptyNode { - /** - * @template K, V - */ - constructor() {} +export class LLRBEmptyNode { + key: K; + value: V; + left: LLRBNode | LLRBEmptyNode; + right: LLRBNode | LLRBEmptyNode; + color: boolean; /** * Returns a copy of the current node. * * @return {!LLRBEmptyNode} The node copy. */ - copy() { + copy(key: K | null, value: V | null, color: boolean | null, + left: LLRBNode | LLRBEmptyNode | null, + right: LLRBNode | LLRBEmptyNode | null): LLRBEmptyNode { return this; } @@ -476,10 +463,10 @@ export class LLRBEmptyNode { * * @param {!K} key Key to be added. * @param {!V} value Value to be added. - * @param {fb.Comparator} comparator Comparator. + * @param {Comparator} comparator Comparator. * @return {!LLRBNode} New tree, with item added. */ - insert(key, value, comparator) { + insert(key: K, value: V, comparator: Comparator): LLRBNode { return new LLRBNode(key, value, null); } @@ -487,23 +474,24 @@ export class LLRBEmptyNode { * Returns a copy of the tree, with the specified key removed. * * @param {!K} key The key to remove. + * @param {Comparator} comparator Comparator. * @return {!LLRBEmptyNode} New tree, with item removed. */ - remove(key, comparator) { + remove(key: K, comparator: Comparator): LLRBEmptyNode { return this; } /** * @return {number} The total number of nodes in the tree. */ - count() { + count(): number { return 0; } /** * @return {boolean} True if the tree is empty. */ - isEmpty() { + isEmpty(): boolean { return true; } @@ -511,11 +499,11 @@ export class LLRBEmptyNode { * Traverses the tree in key order and calls the specified action function * for each node. * - * @param {function(!K, !V)} action Callback function to be called for each + * @param {function(!K, !V):*} action Callback function to be called for each * node. If it returns true, traversal is aborted. * @return {boolean} True if traversal was aborted. */ - inorderTraversal(action) { + inorderTraversal(action: (k: K, v: V) => any): boolean { return false; } @@ -527,21 +515,21 @@ export class LLRBEmptyNode { * node. If it returns true, traversal is aborted. * @return {boolean} True if traversal was aborted. */ - reverseTraversal(action) { + reverseTraversal(action: (k: K, v: V) => void): boolean { return false; } /** * @return {null} */ - minKey() { + minKey(): null { return null; } /** * @return {null} */ - maxKey() { + maxKey(): null { return null; } @@ -549,44 +537,37 @@ export class LLRBEmptyNode { * @private * @return {number} Not sure what this returns exactly. :-). */ - check_() { return 0; } + check_(): number { + return 0; + } /** * @private * @return {boolean} Whether this node is red. */ - isRed_() { return false; } -}; // end LLRBEmptyNode + isRed_() { + return false; + } +} /** * An immutable sorted map implementation, based on a Left-leaning Red-Black * tree. */ -export class SortedMap { - /** @private */ - comparator_; - - /** @private */ - root_; - +export class SortedMap { /** * Always use the same empty node, to reduce memory. - * @private * @const */ - static EMPTY_NODE_ = new LLRBEmptyNode(); + static EMPTY_NODE = new LLRBEmptyNode(); /** * @template K, V - * @param {function(K, K):number} comparator Key comparator. - * @param {LLRBNode=} opt_root (Optional) Root node for the map. + * @param {function(K, K):number} comparator_ Key comparator. + * @param {LLRBNode=} root_ (Optional) Root node for the map. */ - constructor(comparator, opt_root?) { - /** @private */ - this.comparator_ = comparator; - - /** @private */ - this.root_ = opt_root ? opt_root : SortedMap.EMPTY_NODE_; + constructor(private comparator_: Comparator, + private root_: LLRBNode | LLRBEmptyNode = SortedMap.EMPTY_NODE as LLRBEmptyNode) { } /** @@ -597,11 +578,11 @@ export class SortedMap { * @param {!V} value Value to be added. * @return {!SortedMap.} New map, with item added. */ - insert(key, value) { + insert(key: K, value: V): SortedMap { return new SortedMap( - this.comparator_, - this.root_.insert(key, value, this.comparator_) - .copy(null, null, LLRBNode.BLACK, null, null)); + this.comparator_, + this.root_.insert(key, value, this.comparator_) + .copy(null, null, LLRBNode.BLACK, null, null)); } /** @@ -610,11 +591,11 @@ export class SortedMap { * @param {!K} key The key to remove. * @return {!SortedMap.} New map, with item removed. */ - remove(key) { + remove(key: K): SortedMap { return new SortedMap( - this.comparator_, - this.root_.remove(key, this.comparator_) - .copy(null, null, LLRBNode.BLACK, null, null)); + this.comparator_, + this.root_.remove(key, this.comparator_) + .copy(null, null, LLRBNode.BLACK, null, null)); } /** @@ -624,9 +605,9 @@ export class SortedMap { * @return {?V} The value of the node with the given key, or null if the * key doesn't exist. */ - get(key) { - var cmp; - var node = this.root_; + get(key: K): V | null { + let cmp; + let node = this.root_; while (!node.isEmpty()) { cmp = this.comparator_(key, node.key); if (cmp === 0) { @@ -645,8 +626,8 @@ export class SortedMap { * @param {K} key The key to find the predecessor of * @return {?K} The predecessor key. */ - getPredecessorKey(key) { - var cmp, node = this.root_, rightParent = null; + getPredecessorKey(key: K): K | null { + let cmp, node = this.root_, rightParent = null; while (!node.isEmpty()) { cmp = this.comparator_(key, node.key); if (cmp === 0) { @@ -674,28 +655,28 @@ export class SortedMap { /** * @return {boolean} True if the map is empty. */ - isEmpty() { + isEmpty(): boolean { return this.root_.isEmpty(); } /** * @return {number} The total number of nodes in the map. */ - count() { + count(): number { return this.root_.count(); } /** * @return {?K} The minimum key in the map. */ - minKey() { + minKey(): K | null { return this.root_.minKey(); } /** * @return {?K} The maximum key in the map. */ - maxKey() { + maxKey(): K | null { return this.root_.maxKey(); } @@ -708,7 +689,7 @@ export class SortedMap { * @return {*} The first truthy value returned by action, or the last falsey * value returned by action */ - inorderTraversal(action) { + inorderTraversal(action: (k: K, v: V) => any): boolean { return this.root_.inorderTraversal(action); } @@ -720,45 +701,45 @@ export class SortedMap { * for each key/value pair. If action returns true, traversal is aborted. * @return {*} True if the traversal was aborted. */ - reverseTraversal(action) { + reverseTraversal(action: (k: K, v: V) => void): boolean { return this.root_.reverseTraversal(action); } /** * Returns an iterator over the SortedMap. * @template T - * @param {(function(K, V):T)=} opt_resultGenerator + * @param {(function(K, V):T)=} resultGenerator * @return {SortedMapIterator.} The iterator. */ - getIterator(resultGenerator?) { + getIterator(resultGenerator?: (k: K, v: V) => T): SortedMapIterator { return new SortedMapIterator(this.root_, - null, - this.comparator_, - false, - resultGenerator); + null, + this.comparator_, + false, + resultGenerator); } - getIteratorFrom(key, resultGenerator?) { + getIteratorFrom(key: K, resultGenerator?: (k: K, v: V) => T): SortedMapIterator { return new SortedMapIterator(this.root_, - key, - this.comparator_, - false, - resultGenerator); + key, + this.comparator_, + false, + resultGenerator); } - getReverseIteratorFrom(key, resultGenerator?) { + getReverseIteratorFrom(key: K, resultGenerator?: (k: K, v: V) => T): SortedMapIterator { return new SortedMapIterator(this.root_, - key, - this.comparator_, - true, - resultGenerator); + key, + this.comparator_, + true, + resultGenerator); } - getReverseIterator(resultGenerator?) { + getReverseIterator(resultGenerator?: (k: K, v: V) => T): SortedMapIterator { return new SortedMapIterator(this.root_, - null, - this.comparator_, - true, - resultGenerator); + null, + this.comparator_, + true, + resultGenerator); } -}; // end SortedMap \ No newline at end of file +} \ No newline at end of file diff --git a/src/database/core/util/Tree.ts b/src/database/core/util/Tree.ts index 8dac5b7b23a..472b5614a3a 100644 --- a/src/database/core/util/Tree.ts +++ b/src/database/core/util/Tree.ts @@ -14,26 +14,20 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; -import { Path } from "./Path"; +import { assert } from '../../../utils/assert'; +import { Path } from './Path'; import { forEach, contains, safeGet } from '../../../utils/obj' /** * Node in a Tree. */ -export class TreeNode { - children; - childCount; - value; - - constructor() { - // TODO: Consider making accessors that create children and value lazily or - // separate Internal / Leaf 'types'. - this.children = { }; - this.childCount = 0; - this.value = null; - } -}; // end TreeNode +export class TreeNode { + // TODO: Consider making accessors that create children and value lazily or + // separate Internal / Leaf 'types'. + children: { [name: string]: TreeNode } = {}; + childCount = 0; + value: T | null = null; +} /** @@ -41,21 +35,16 @@ export class TreeNode { * Nodes are not enumerated (by forEachChild) unless they have a value or non-empty * children. */ -export class Tree { - name_; - parent_; - node_; - +export class Tree { /** * @template T - * @param {string=} opt_name Optional name of the node. - * @param {Tree=} opt_parent Optional parent node. - * @param {TreeNode=} opt_node Optional node to wrap. + * @param {string=} name_ Optional name of the node. + * @param {Tree=} parent_ Optional parent node. + * @param {TreeNode=} node_ Optional node to wrap. */ - constructor(opt_name?, opt_parent?, opt_node?) { - this.name_ = opt_name ? opt_name : ''; - this.parent_ = opt_parent ? opt_parent : null; - this.node_ = opt_node ? opt_node : new TreeNode(); + constructor(private name_: string = '', + private parent_: Tree | null = null, + private node_: TreeNode = new TreeNode()) { } /** @@ -64,13 +53,13 @@ export class Tree { * @param {!(string|Path)} pathObj Path to look up. * @return {!Tree.} Tree for path. */ - subTree(pathObj) { + subTree(pathObj: string | Path): Tree { // TODO: Require pathObj to be Path? - var path = (pathObj instanceof Path) ? - pathObj : new Path(pathObj); - var child = this, next; + let path = (pathObj instanceof Path) ? + pathObj : new Path(pathObj); + let child = this as any, next; while ((next = path.getFront()) !== null) { - var childNode = safeGet(child.node_.children, next) || new TreeNode(); + const childNode = safeGet(child.node_.children, next) || new TreeNode(); child = new Tree(next, child, childNode); path = path.popFront(); } @@ -83,7 +72,7 @@ export class Tree { * * @return {?T} The data or null if no data exists. */ - getValue() { + getValue(): T | null { return this.node_.value; } @@ -92,7 +81,7 @@ export class Tree { * * @param {!T} value Value to set. */ - setValue(value) { + setValue(value: T) { assert(typeof value !== 'undefined', 'Cannot set value to undefined'); this.node_.value = value; this.updateParents_(); @@ -103,7 +92,7 @@ export class Tree { */ clear() { this.node_.value = null; - this.node_.children = { }; + this.node_.children = {}; this.node_.childCount = 0; this.updateParents_(); } @@ -111,14 +100,14 @@ export class Tree { /** * @return {boolean} Whether the tree has any children. */ - hasChildren() { + hasChildren(): boolean { return this.node_.childCount > 0; } /** * @return {boolean} Whether the tree is empty (no value or children). */ - isEmpty() { + isEmpty(): boolean { return this.getValue() === null && !this.hasChildren(); } @@ -127,10 +116,9 @@ export class Tree { * * @param {function(!Tree.)} action Action to be called for each child. */ - forEachChild(action) { - var self = this; - forEach(this.node_.children, function(child, childTree) { - action(new Tree(child, self, childTree)); + forEachChild(action: (tree: Tree) => void) { + forEach(this.node_.children, (child: string, childTree: TreeNode) => { + action(new Tree(child, this, childTree)); }); } @@ -138,20 +126,20 @@ export class Tree { * Does a depth-first traversal of this node's descendants, calling action for each one. * * @param {function(!Tree.)} action Action to be called for each child. - * @param {boolean=} opt_includeSelf Whether to call action on this node as well. Defaults to + * @param {boolean=} includeSelf Whether to call action on this node as well. Defaults to * false. - * @param {boolean=} opt_childrenFirst Whether to call action on children before calling it on + * @param {boolean=} childrenFirst Whether to call action on children before calling it on * parent. */ - forEachDescendant(action, opt_includeSelf, opt_childrenFirst) { - if (opt_includeSelf && !opt_childrenFirst) + forEachDescendant(action: (tree: Tree) => void, includeSelf?: boolean, childrenFirst?: boolean) { + if (includeSelf && !childrenFirst) action(this); - this.forEachChild(function(child) { - child.forEachDescendant(action, /*opt_includeSelf=*/true, opt_childrenFirst); + this.forEachChild(function (child) { + child.forEachDescendant(action, /*includeSelf=*/true, childrenFirst); }); - if (opt_includeSelf && opt_childrenFirst) + if (includeSelf && childrenFirst) action(this); } @@ -160,11 +148,11 @@ export class Tree { * * @param {function(!Tree.)} action Action to be called on each parent; return * true to abort. - * @param {boolean=} opt_includeSelf Whether to call action on this node as well. + * @param {boolean=} includeSelf Whether to call action on this node as well. * @return {boolean} true if the action callback returned true. */ - forEachAncestor(action, opt_includeSelf) { - var node = opt_includeSelf ? this : this.parent(); + forEachAncestor(action: (tree: Tree) => void, includeSelf?: boolean): boolean { + let node = includeSelf ? this : this.parent(); while (node !== null) { if (action(node)) { return true; @@ -181,8 +169,8 @@ export class Tree { * * @param {function(!Tree.)} action Action to be called for each child. */ - forEachImmediateDescendantWithValue(action) { - this.forEachChild(function(child) { + forEachImmediateDescendantWithValue(action: (tree: Tree) => void) { + this.forEachChild(function (child) { if (child.getValue() !== null) action(child); else @@ -193,22 +181,22 @@ export class Tree { /** * @return {!Path} The path of this tree node, as a Path. */ - path() { + path(): Path { return new Path(this.parent_ === null ? - this.name_ : this.parent_.path() + '/' + this.name_); + this.name_ : this.parent_.path() + '/' + this.name_); } /** * @return {string} The name of the tree node. */ - name() { + name(): string { return this.name_; } /** * @return {?Tree} The parent tree node, or null if this is the root of the tree. */ - parent() { + parent(): Tree | null { return this.parent_; } @@ -217,7 +205,7 @@ export class Tree { * * @private */ - updateParents_() { + private updateParents_() { if (this.parent_ !== null) this.parent_.updateChild_(this.name_, this); } @@ -229,9 +217,9 @@ export class Tree { * @param {!Tree.} child The child to update. * @private */ - updateChild_(childName, child) { - var childEmpty = child.isEmpty(); - var childExists = contains(this.node_.children, childName); + private updateChild_(childName: string, child: Tree) { + const childEmpty = child.isEmpty(); + const childExists = contains(this.node_.children, childName); if (childEmpty && childExists) { delete (this.node_.children[childName]); this.node_.childCount--; @@ -243,4 +231,4 @@ export class Tree { this.updateParents_(); } } -}; // end Tree +} diff --git a/src/database/core/util/VisibilityMonitor.ts b/src/database/core/util/VisibilityMonitor.ts index ba0bfb0a75c..fc518d7f773 100644 --- a/src/database/core/util/VisibilityMonitor.ts +++ b/src/database/core/util/VisibilityMonitor.ts @@ -17,11 +17,13 @@ import { EventEmitter } from "./EventEmitter"; import { assert } from "../../../utils/assert"; +declare const document: any; + /** - * @extends {fb.core.util.EventEmitter} + * @extends {EventEmitter} */ export class VisibilityMonitor extends EventEmitter { - visible_; + private visible_: boolean; static getInstance() { return new VisibilityMonitor(); @@ -29,7 +31,8 @@ export class VisibilityMonitor extends EventEmitter { constructor() { super(['visible']); - var hidden, visibilityChange; + let hidden: string; + let visibilityChange: string; if (typeof document !== 'undefined' && typeof document.addEventListener !== 'undefined') { if (typeof document['hidden'] !== 'undefined') { // Opera 12.10 and Firefox 18 and later support @@ -54,12 +57,11 @@ export class VisibilityMonitor extends EventEmitter { this.visible_ = true; if (visibilityChange) { - var self = this; - document.addEventListener(visibilityChange, function() { - var visible = !document[hidden]; - if (visible !== self.visible_) { - self.visible_ = visible; - self.trigger('visible', visible); + document.addEventListener(visibilityChange, () => { + const visible = !document[hidden]; + if (visible !== this.visible_) { + this.visible_ = visible; + this.trigger('visible', visible); } }, false); } @@ -69,8 +71,8 @@ export class VisibilityMonitor extends EventEmitter { * @param {!string} eventType * @return {Array.} */ - getInitialEvent(eventType) { + getInitialEvent(eventType: string): boolean[] { assert(eventType === 'visible', 'Unknown event type: ' + eventType); return [this.visible_]; } -}; // end VisibilityMonitor \ No newline at end of file +} \ No newline at end of file diff --git a/src/database/core/util/libs/parser.ts b/src/database/core/util/libs/parser.ts index 32f2516f50d..9315f99639f 100644 --- a/src/database/core/util/libs/parser.ts +++ b/src/database/core/util/libs/parser.ts @@ -14,42 +14,42 @@ * limitations under the License. */ -import { Path } from "../Path"; -import { RepoInfo } from "../../RepoInfo"; -import { warnIfPageIsSecure, fatal } from "../util"; +import { Path } from '../Path'; +import { RepoInfo } from '../../RepoInfo'; +import { warnIfPageIsSecure, fatal } from '../util'; /** * @param {!string} pathString * @return {string} */ -function decodePath(pathString) { - var pathStringDecoded = ''; - var pieces = pathString.split('/'); - for (var i = 0; i < pieces.length; i++) { +function decodePath(pathString: string): string { + let pathStringDecoded = ''; + const pieces = pathString.split('/'); + for (let i = 0; i < pieces.length; i++) { if (pieces[i].length > 0) { - var piece = pieces[i]; + let piece = pieces[i]; try { - piece = decodeURIComponent(piece.replace(/\+/g, " ")); + piece = decodeURIComponent(piece.replace(/\+/g, ' ')); } catch (e) {} pathStringDecoded += '/' + piece; } } return pathStringDecoded; -}; +} /** * * @param {!string} dataURL * @return {{repoInfo: !RepoInfo, path: !Path}} */ -export const parseRepoInfo = function(dataURL) { - var parsedUrl = parseURL(dataURL), - namespace = parsedUrl.subdomain; +export const parseRepoInfo = function (dataURL: string): { repoInfo: RepoInfo, path: Path } { + const parsedUrl = parseURL(dataURL), + namespace = parsedUrl.subdomain; if (parsedUrl.domain === 'firebase') { fatal(parsedUrl.host + - ' is no longer supported. ' + - 'Please use .firebaseio.com instead'); + ' is no longer supported. ' + + 'Please use .firebaseio.com instead'); } // Catch common error of uninitialized namespace value. @@ -61,7 +61,7 @@ export const parseRepoInfo = function(dataURL) { warnIfPageIsSecure(); } - var webSocketOnly = (parsedUrl.scheme === 'ws') || (parsedUrl.scheme === 'wss'); + const webSocketOnly = (parsedUrl.scheme === 'ws') || (parsedUrl.scheme === 'wss'); return { repoInfo: new RepoInfo(parsedUrl.host, parsedUrl.secure, namespace, webSocketOnly), @@ -74,31 +74,39 @@ export const parseRepoInfo = function(dataURL) { * @param {!string} dataURL * @return {{host: string, port: number, domain: string, subdomain: string, secure: boolean, scheme: string, pathString: string}} */ -export const parseURL = function(dataURL) { +export const parseURL = function (dataURL: string): { + host: string, + port: number, + domain: string, + subdomain: string, + secure: boolean, + scheme: string, + pathString: string +} { // Default to empty strings in the event of a malformed string. - var host = '', domain = '', subdomain = '', pathString = ''; + let host = '', domain = '', subdomain = '', pathString = ''; // Always default to SSL, unless otherwise specified. - var secure = true, scheme = 'https', port = 443; + let secure = true, scheme = 'https', port = 443; // Don't do any validation here. The caller is responsible for validating the result of parsing. if (typeof dataURL === 'string') { // Parse scheme. - var colonInd = dataURL.indexOf('//'); + let colonInd = dataURL.indexOf('//'); if (colonInd >= 0) { scheme = dataURL.substring(0, colonInd - 1); dataURL = dataURL.substring(colonInd + 2); } // Parse host and path. - var slashInd = dataURL.indexOf('/'); + let slashInd = dataURL.indexOf('/'); if (slashInd === -1) { slashInd = dataURL.length; } host = dataURL.substring(0, slashInd); pathString = decodePath(dataURL.substring(slashInd)); - var parts = host.split('.'); + const parts = host.split('.'); if (parts.length === 3) { // Normalize namespaces to lowercase to share storage / connection. domain = parts[1]; @@ -116,12 +124,12 @@ export const parseURL = function(dataURL) { } return { - host: host, - port: port, - domain: domain, - subdomain: subdomain, - secure: secure, - scheme: scheme, - pathString: pathString + host, + port, + domain, + subdomain, + secure, + scheme, + pathString, }; }; \ No newline at end of file diff --git a/src/database/core/util/util.ts b/src/database/core/util/util.ts index 26dced8f5c6..da9af46c286 100644 --- a/src/database/core/util/util.ts +++ b/src/database/core/util/util.ts @@ -14,52 +14,39 @@ * limitations under the License. */ -declare const Windows; +import { Query } from '../../api/Query'; + +declare const window: any; +declare const Windows: any; import { assert } from '../../../utils/assert'; import { forEach } from '../../../utils/obj'; import { base64 } from '../../../utils/crypt'; import { Sha1 } from '../../../utils/Sha1'; -import { - assert as _assert, - assertionError as _assertError -} from "../../../utils/assert"; -import { stringToByteArray } from "../../../utils/utf8"; -import { stringify } from "../../../utils/json"; -import { SessionStorage } from "../storage/storage"; -import { RepoInfo } from "../RepoInfo"; -import { isNodeSdk } from "../../../utils/environment"; +import { stringToByteArray } from '../../../utils/utf8'; +import { stringify } from '../../../utils/json'; +import { SessionStorage } from '../storage/storage'; +import { isNodeSdk } from '../../../utils/environment'; /** * Returns a locally-unique ID (generated by just incrementing up from 0 each time its called). * @type {function(): number} Generated ID. */ -export const LUIDGenerator = (function() { - var id = 1; - return function() { +export const LUIDGenerator: (() => number) = (function () { + let id = 1; + return function () { return id++; }; })(); -/** - * Same as fb.util.assert(), but forcefully logs instead of throws. - * @param {*} assertion The assertion to be tested for falsiness - * @param {!string} message The message to be logged on failure - */ -export const assertWeak = function(assertion, message) { - if (!assertion) { - error(message); - } -}; - /** * URL-safe base64 encoding * @param {!string} str * @return {!string} */ -export const base64Encode = function(str) { - var utf8Bytes = stringToByteArray(str); +export const base64Encode = function (str: string): string { + const utf8Bytes = stringToByteArray(str); return base64.encodeByteArray(utf8Bytes, /*useWebSafe=*/true); }; @@ -77,7 +64,7 @@ export function setBufferImpl(impl) { * @param {string} str To be decoded * @return {?string} Decoded result, if possible */ -export const base64Decode = function(str) { +export const base64Decode = function (str: string): string | null { try { if (BufferImpl()) { return new BufferImpl(str, 'base64').toString('utf8'); @@ -96,11 +83,11 @@ export const base64Decode = function(str) { * @param {!string} str The string to hash * @return {!string} The resulting hash */ -export const sha1 = function(str) { - var utf8Bytes = stringToByteArray(str); - var sha1 = new Sha1(); +export const sha1 = function (str: string): string { + const utf8Bytes = stringToByteArray(str); + const sha1 = new Sha1(); sha1.update(utf8Bytes); - var sha1Bytes = sha1.digest(); + const sha1Bytes = sha1.digest(); return base64.encodeByteArray(sha1Bytes); }; @@ -110,18 +97,18 @@ export const sha1 = function(str) { * @return {string} * @private */ -export const buildLogMessage_ = function(var_args) { - var message = ''; - for (var i = 0; i < arguments.length; i++) { - if (Array.isArray(arguments[i]) || - (arguments[i] && typeof arguments[i] === 'object' && typeof arguments[i].length === 'number')) { - message += buildLogMessage_.apply(null, arguments[i]); +const buildLogMessage_ = function (...var_args: any[]): string { + let message = ''; + for (let i = 0; i < var_args.length; i++) { + if (Array.isArray(var_args[i]) || + (var_args[i] && typeof var_args[i] === 'object' && typeof var_args[i].length === 'number')) { + message += buildLogMessage_.apply(null, var_args[i]); } - else if (typeof arguments[i] === 'object') { - message += stringify(arguments[i]); + else if (typeof var_args[i] === 'object') { + message += stringify(var_args[i]); } else { - message += arguments[i]; + message += var_args[i]; } message += ' '; } @@ -134,7 +121,7 @@ export const buildLogMessage_ = function(var_args) { * Use this for all debug messages in Firebase. * @type {?function(string)} */ -export var logger = null; +export let logger: ((a: string) => void) | null = null; /** @@ -142,30 +129,30 @@ export var logger = null; * @type {boolean} * @private */ -export var firstLog_ = true; +let firstLog_ = true; /** * The implementation of Firebase.enableLogging (defined here to break dependencies) - * @param {boolean|?function(string)} logger A flag to turn on logging, or a custom logger - * @param {boolean=} opt_persistent Whether or not to persist logging settings across refreshes + * @param {boolean|?function(string)} logger_ A flag to turn on logging, or a custom logger + * @param {boolean=} persistent Whether or not to persist logging settings across refreshes */ -export const enableLogging = function(logger, opt_persistent?) { - assert(!opt_persistent || (logger === true || logger === false), "Can't turn on custom loggers persistently."); - if (logger === true) { +export const enableLogging = function (logger_?: boolean | ((a: string) => void) | null, persistent?: boolean) { + assert(!persistent || (logger_ === true || logger_ === false), 'Can\'t turn on custom loggers persistently.'); + if (logger_ === true) { if (typeof console !== 'undefined') { if (typeof console.log === 'function') { logger = console.log.bind(console); } else if (typeof console.log === 'object') { // IE does this. - logger = function(message) { console.log(message); }; + logger = function (message) { console.log(message); }; } } - if (opt_persistent) + if (persistent) SessionStorage.set('logging_enabled', true); } - else if (typeof logger === 'function') { - logger = logger; + else if (typeof logger_ === 'function') { + logger = logger_; } else { logger = null; SessionStorage.remove('logging_enabled'); @@ -177,7 +164,7 @@ export const enableLogging = function(logger, opt_persistent?) { * * @param {...(string|Arguments)} var_args */ -export const log = function(...var_args) { +export const log = function (...var_args: string[]) { if (firstLog_ === true) { firstLog_ = false; if (logger === null && SessionStorage.get('logging_enabled') === true) @@ -185,7 +172,7 @@ export const log = function(...var_args) { } if (logger) { - var message = buildLogMessage_.apply(null, arguments); + const message = buildLogMessage_.apply(null, var_args); logger(message); } }; @@ -195,9 +182,9 @@ export const log = function(...var_args) { * @param {!string} prefix * @return {function(...[*])} */ -export const logWrapper = function(prefix) { - return function() { - log(prefix, arguments); +export const logWrapper = function (prefix: string): (...var_args: any[]) => void { + return function (...var_args: any[]) { + log(prefix, ...var_args); }; }; @@ -205,10 +192,10 @@ export const logWrapper = function(prefix) { /** * @param {...string} var_args */ -export const error = function(var_args) { +export const error = function (...var_args: string[]) { if (typeof console !== 'undefined') { - var message = 'FIREBASE INTERNAL ERROR: ' + - buildLogMessage_.apply(null, arguments); + const message = 'FIREBASE INTERNAL ERROR: ' + + buildLogMessage_(...var_args); if (typeof console.error !== 'undefined') { console.error(message); } else { @@ -221,8 +208,8 @@ export const error = function(var_args) { /** * @param {...string} var_args */ -export const fatal = function(var_args) { - var message = buildLogMessage_.apply(null, arguments); +export const fatal = function (...var_args: string[]) { + const message = buildLogMessage_(...var_args); throw new Error('FIREBASE FATAL ERROR: ' + message); }; @@ -230,9 +217,9 @@ export const fatal = function(var_args) { /** * @param {...*} var_args */ -export const warn = function(...var_args) { +export const warn = function (...var_args: any[]) { if (typeof console !== 'undefined') { - var message = 'FIREBASE WARNING: ' + buildLogMessage_.apply(null, arguments); + const message = 'FIREBASE WARNING: ' + buildLogMessage_(...var_args); if (typeof console.warn !== 'undefined') { console.warn(message); } else { @@ -246,12 +233,12 @@ export const warn = function(...var_args) { * Logs a warning if the containing page uses https. Called when a call to new Firebase * does not use https. */ -export const warnIfPageIsSecure = function() { +export const warnIfPageIsSecure = function () { // Be very careful accessing browser globals. Who knows what may or may not exist. if (typeof window !== 'undefined' && window.location && window.location.protocol && - window.location.protocol.indexOf('https:') !== -1) { + window.location.protocol.indexOf('https:') !== -1) { warn('Insecure Firebase access from a secure page. ' + - 'Please use https in calls to new Firebase().'); + 'Please use https in calls to new Firebase().'); } }; @@ -259,10 +246,10 @@ export const warnIfPageIsSecure = function() { /** * @param {!String} methodName */ -export const warnAboutUnsupportedMethod = function(methodName) { +export const warnAboutUnsupportedMethod = function (methodName: string) { warn(methodName + - ' is unsupported and will likely change soon. ' + - 'Please do not use.'); + ' is unsupported and will likely change soon. ' + + 'Please do not use.'); }; @@ -271,26 +258,26 @@ export const warnAboutUnsupportedMethod = function(methodName) { * @param {*} data * @return {boolean} */ -export const isInvalidJSONNumber = function(data) { +export const isInvalidJSONNumber = function (data: any): boolean { return typeof data === 'number' && - (data != data || // NaN - data == Number.POSITIVE_INFINITY || - data == Number.NEGATIVE_INFINITY); + (data != data || // NaN + data == Number.POSITIVE_INFINITY || + data == Number.NEGATIVE_INFINITY); }; /** * @param {function()} fn */ -export const executeWhenDOMReady = function(fn) { +export const executeWhenDOMReady = function (fn: () => void) { if (isNodeSdk() || document.readyState === 'complete') { fn(); } else { // Modeled after jQuery. Try DOMContentLoaded and onreadystatechange (which // fire before onload), but fall back to onload. - var called = false; - let wrappedFn = function() { + let called = false; + let wrappedFn = function () { if (!document.body) { setTimeout(wrappedFn, Math.floor(10)); return; @@ -309,10 +296,10 @@ export const executeWhenDOMReady = function(fn) { } else if ((document as any).attachEvent) { // IE. (document as any).attachEvent('onreadystatechange', - function() { - if (document.readyState === 'complete') - wrappedFn(); - } + function () { + if (document.readyState === 'complete') + wrappedFn(); + } ); // fallback to onload. (window as any).attachEvent('onload', wrappedFn); @@ -345,7 +332,7 @@ export const MAX_NAME = '[MAX_NAME]'; * @param {!string} b * @return {!number} */ -export const nameCompare = function(a, b) { +export const nameCompare = function (a: string, b: string): number { if (a === b) { return 0; } else if (a === MIN_NAME || b === MAX_NAME) { @@ -353,8 +340,8 @@ export const nameCompare = function(a, b) { } else if (b === MIN_NAME || a === MAX_NAME) { return 1; } else { - var aAsInt = tryParseInt(a), - bAsInt = tryParseInt(b); + const aAsInt = tryParseInt(a), + bAsInt = tryParseInt(b); if (aAsInt !== null) { if (bAsInt !== null) { @@ -376,7 +363,7 @@ export const nameCompare = function(a, b) { * @param {!string} b * @return {!number} comparison result. */ -export const stringCompare = function(a, b) { +export const stringCompare = function (a: string, b: string): number { if (a === b) { return 0; } else if (a < b) { @@ -392,7 +379,7 @@ export const stringCompare = function(a, b) { * @param {Object} obj * @return {*} */ -export const requireKey = function(key, obj) { +export const requireKey = function (key: string, obj: { [k: string]: any }): any { if (obj && (key in obj)) { return obj[key]; } else { @@ -405,19 +392,19 @@ export const requireKey = function(key, obj) { * @param {*} obj * @return {string} */ -export const ObjectToUniqueKey = function(obj) { +export const ObjectToUniqueKey = function (obj: any): string { if (typeof obj !== 'object' || obj === null) return stringify(obj); - var keys = []; - for (var k in obj) { + const keys = []; + for (let k in obj) { keys.push(k); } // Export as json, but with the keys sorted. keys.sort(); - var key = '{'; - for (var i = 0; i < keys.length; i++) { + let key = '{'; + for (let i = 0; i < keys.length; i++) { if (i !== 0) key += ','; key += stringify(keys[i]); @@ -436,15 +423,17 @@ export const ObjectToUniqueKey = function(obj) { * @param {!number} segsize The maximum number of chars in the string. * @return {Array.} The string, split into appropriately-sized chunks */ -export const splitStringBySize = function(str, segsize) { - if (str.length <= segsize) { +export const splitStringBySize = function (str: string, segsize: number): string[] { + const len = str.length; + + if (len <= segsize) { return [str]; } - var dataSegs = []; - for (var c = 0; c < str.length; c += segsize) { - if (c + segsize > str) { - dataSegs.push(str.substring(c, str.length)); + const dataSegs = []; + for (let c = 0; c < len; c += segsize) { + if (c + segsize > len) { + dataSegs.push(str.substring(c, len)); } else { dataSegs.push(str.substring(c, c + segsize)); @@ -460,9 +449,9 @@ export const splitStringBySize = function(str, segsize) { * @param {!(Object|Array)} obj The object or array to iterate over * @param {function(?, ?)} fn The function to apply */ -export const each = function(obj, fn) { +export const each = function (obj: Object | Array, fn: (v?: any, k?: any) => void) { if (Array.isArray(obj)) { - for (var i = 0; i < obj.length; ++i) { + for (let i = 0; i < obj.length; ++i) { fn(i, obj[i]); } } else { @@ -472,7 +461,7 @@ export const each = function(obj, fn) { * a single impl that does a key, value callback. So we invert * to not have to touch the `each` code points */ - forEach(obj, (key, val) => fn(val, key)); + forEach(obj, (key: any, val: any) => fn(val, key)); } }; @@ -480,11 +469,11 @@ export const each = function(obj, fn) { /** * Like goog.bind, but doesn't bother to create a closure if opt_context is null/undefined. * @param {function(*)} callback Callback function. - * @param {?Object=} opt_context Optional context to bind to. + * @param {?Object=} context Optional context to bind to. * @return {function(*)} */ -export const bindCallback = function(callback, opt_context) { - return opt_context ? callback.bind(opt_context) : callback; +export const bindCallback = function (callback: (a: any) => void, context?: object | null): Function { + return context ? callback.bind(context) : callback; }; @@ -495,18 +484,20 @@ export const bindCallback = function(callback, opt_context) { * @param {!number} v A double * @return {string} */ -export const doubleToIEEE754String = function(v) { +export const doubleToIEEE754String = function (v: number): string { assert(!isInvalidJSONNumber(v), 'Invalid JSON number'); // MJL - var ebits = 11, fbits = 52; - var bias = (1 << (ebits - 1)) - 1, - s, e, f, ln, - i, bits, str, bytes; + const ebits = 11, fbits = 52; + let bias = (1 << (ebits - 1)) - 1, + s, e, f, ln, + i, bits, str; // Compute sign, exponent, fraction // Skip NaN / Infinity handling --MJL. if (v === 0) { - e = 0; f = 0; s = (1 / v === -Infinity) ? 1 : 0; + e = 0; + f = 0; + s = (1 / v === -Infinity) ? 1 : 0; } else { s = v < 0; @@ -527,16 +518,22 @@ export const doubleToIEEE754String = function(v) { // Pack sign, exponent, fraction bits = []; - for (i = fbits; i; i -= 1) { bits.push(f % 2 ? 1 : 0); f = Math.floor(f / 2); } - for (i = ebits; i; i -= 1) { bits.push(e % 2 ? 1 : 0); e = Math.floor(e / 2); } + for (i = fbits; i; i -= 1) { + bits.push(f % 2 ? 1 : 0); + f = Math.floor(f / 2); + } + for (i = ebits; i; i -= 1) { + bits.push(e % 2 ? 1 : 0); + e = Math.floor(e / 2); + } bits.push(s ? 1 : 0); bits.reverse(); str = bits.join(''); // Return the data as a hex string. --MJL - var hexByteString = ''; + let hexByteString = ''; for (i = 0; i < 64; i += 8) { - var hexByte = parseInt(str.substr(i, 8), 2).toString(16); + let hexByte = parseInt(str.substr(i, 8), 2).toString(16); if (hexByte.length === 1) hexByte = '0' + hexByte; hexByteString = hexByteString + hexByte; @@ -550,12 +547,12 @@ export const doubleToIEEE754String = function(v) { * isolated environment where long-polling doesn't work). * @return {boolean} */ -export const isChromeExtensionContentScript = function() { +export const isChromeExtensionContentScript = function (): boolean { return !!(typeof window === 'object' && - window['chrome'] && - window['chrome']['extension'] && - !/^chrome/.test(window.location.href) - ); + window['chrome'] && + window['chrome']['extension'] && + !/^chrome/.test(window.location.href) + ); }; @@ -563,7 +560,7 @@ export const isChromeExtensionContentScript = function() { * Used to detect if we're in a Windows 8 Store app. * @return {boolean} */ -export const isWindowsStoreApp = function() { +export const isWindowsStoreApp = function (): boolean { // Check for the presence of a couple WinRT globals return typeof Windows === 'object' && typeof Windows.UI === 'object'; }; @@ -572,21 +569,21 @@ export const isWindowsStoreApp = function() { /** * Converts a server error code to a Javascript Error * @param {!string} code - * @param {!fb.api.Query} query + * @param {!Query} query * @return {Error} */ -export const errorForServerCode = function(code, query) { - var reason = 'Unknown Error'; +export const errorForServerCode = function (code: string, query: Query): Error { + let reason = 'Unknown Error'; if (code === 'too_big') { reason = 'The data requested exceeds the maximum size ' + - 'that can be accessed with a single request.'; + 'that can be accessed with a single request.'; } else if (code == 'permission_denied') { - reason = "Client doesn't have permission to access the desired data."; + reason = 'Client doesn\'t have permission to access the desired data.'; } else if (code == 'unavailable') { reason = 'The service is unavailable'; } - var error = new Error(code + ' at ' + query.path.toString() + ': ' + reason); + const error = new Error(code + ' at ' + query.path.toString() + ': ' + reason); (error as any).code = code.toUpperCase(); return error; }; @@ -605,9 +602,9 @@ export const INTEGER_REGEXP_ = new RegExp('^-?\\d{1,10}$'); * @param {!string} str * @return {?number} */ -export const tryParseInt = function(str) { +export const tryParseInt = function (str: string): number | null { if (INTEGER_REGEXP_.test(str)) { - var intVal = Number(str); + const intVal = Number(str); if (intVal >= -2147483648 && intVal <= 2147483647) { return intVal; } @@ -633,17 +630,17 @@ export const tryParseInt = function(str) { * * @param {!function()} fn The code to guard. */ -export const exceptionGuard = function(fn) { +export const exceptionGuard = function (fn: () => void) { try { fn(); } catch (e) { // Re-throw exception when it's safe. - setTimeout(function() { + setTimeout(function () { // It used to be that "throw e" would result in a good console error with // relevant context, but as of Chrome 39, you just get the firebase.js // file/line number where we re-throw it, which is useless. So we log // e.stack explicitly. - var stack = e.stack || ''; + const stack = e.stack || ''; warn('Exception was thrown by user callback.', stack); throw e; }, Math.floor(0)); @@ -656,15 +653,13 @@ export const exceptionGuard = function(fn) { * 1. Turns into a no-op if opt_callback is null or undefined. * 2. Wraps the call inside exceptionGuard to prevent exceptions from breaking our state. * - * @param {?Function=} opt_callback Optional onComplete callback. + * @param {?Function=} callback Optional onComplete callback. * @param {...*} var_args Arbitrary args to be passed to opt_onComplete */ -export const callUserCallback = function(opt_callback, var_args) { - if (typeof opt_callback === 'function') { - var args = Array.prototype.slice.call(arguments, 1); - var newArgs = args.slice(); - exceptionGuard(function() { - opt_callback.apply(null, newArgs); +export const callUserCallback = function (callback?: Function | null, ...var_args: any[]) { + if (typeof callback === 'function') { + exceptionGuard(function () { + callback(...var_args); }); } }; @@ -672,16 +667,16 @@ export const callUserCallback = function(opt_callback, var_args) { /** * @return {boolean} true if we think we're currently being crawled. -*/ -export const beingCrawled = function() { - var userAgent = (typeof window === 'object' && window['navigator'] && window['navigator']['userAgent']) || ''; + */ +export const beingCrawled = function (): boolean { + const userAgent = (typeof window === 'object' && window['navigator'] && window['navigator']['userAgent']) || ''; // For now we whitelist the most popular crawlers. We should refine this to be the set of crawlers we // believe to support JavaScript/AJAX rendering. // NOTE: Google Webmaster Tools doesn't really belong, but their "This is how a visitor to your website // would have seen the page" is flaky if we don't treat it as a crawler. return userAgent.search(/googlebot|google webmaster tools|bingbot|yahoo! slurp|baiduspider|yandexbot|duckduckbot/i) >= - 0; + 0; }; /** @@ -691,7 +686,7 @@ export const beingCrawled = function() { * @param {string} name * @param {!function(): *} fnGet */ -export const exportPropGetter = function(object, name, fnGet) { +export const exportPropGetter = function (object: Object, name: string, fnGet: () => any) { Object.defineProperty(object, name, {get: fnGet}); }; @@ -700,14 +695,14 @@ export const exportPropGetter = function(object, name, fnGet) { * * It is removed with clearTimeout() as normal. * - * @param fn {Function} Function to run. - * @param time {number} Milliseconds to wait before running. + * @param {Function} fn Function to run. + * @param {number} time Milliseconds to wait before running. * @return {number|Object} The setTimeout() return value. */ -export const setTimeoutNonBlocking = function(fn, time) { - var timeout = setTimeout(fn, time); - if (typeof timeout === 'object' && timeout['unref']) { - timeout['unref'](); +export const setTimeoutNonBlocking = function (fn: Function, time: number): number | Object { + const timeout: number | Object = setTimeout(fn, time); + if (typeof timeout === 'object' && (timeout as any)['unref']) { + (timeout as any)['unref'](); } return timeout; }; diff --git a/src/database/core/util/validation.ts b/src/database/core/util/validation.ts index 6b8380b170c..9134158e650 100644 --- a/src/database/core/util/validation.ts +++ b/src/database/core/util/validation.ts @@ -14,11 +14,12 @@ * limitations under the License. */ -import { Path, ValidationPath } from "./Path"; -import { forEach, contains, safeGet } from "../../../utils/obj"; -import { isInvalidJSONNumber } from "./util"; -import { errorPrefix as errorPrefixFxn } from "../../../utils/validation"; -import { stringLength } from "../../../utils/utf8"; +import { Path, ValidationPath } from './Path'; +import { forEach, contains, safeGet } from '../../../utils/obj'; +import { isInvalidJSONNumber } from './util'; +import { errorPrefix as errorPrefixFxn } from '../../../utils/validation'; +import { stringLength } from '../../../utils/utf8'; +import { RepoInfo } from '../RepoInfo'; /** * True for invalid Firebase keys @@ -47,43 +48,43 @@ export const MAX_LEAF_SIZE_ = 10 * 1024 * 1024; * @param {*} key * @return {boolean} */ -export const isValidKey = function(key) { +export const isValidKey = function (key: any): boolean { return typeof key === 'string' && key.length !== 0 && - !INVALID_KEY_REGEX_.test(key); -} + !INVALID_KEY_REGEX_.test(key); +}; /** * @param {string} pathString * @return {boolean} */ -export const isValidPathString = function(pathString) { +export const isValidPathString = function (pathString: string): boolean { return typeof pathString === 'string' && pathString.length !== 0 && - !INVALID_PATH_REGEX_.test(pathString); -} + !INVALID_PATH_REGEX_.test(pathString); +}; /** * @param {string} pathString * @return {boolean} */ -export const isValidRootPathString = function(pathString) { +export const isValidRootPathString = function (pathString: string): boolean { if (pathString) { // Allow '/.info/' at the beginning. pathString = pathString.replace(/^\/*\.info(\/|$)/, '/'); } return isValidPathString(pathString); -} +}; /** * @param {*} priority * @return {boolean} */ -export const isValidPriority = function(priority) { +export const isValidPriority = function (priority: any): boolean { return priority === null || - typeof priority === 'string' || - (typeof priority === 'number' && !isInvalidJSONNumber(priority)) || - ((priority && typeof priority === 'object') && contains(priority, '.sv')); -} + typeof priority === 'string' || + (typeof priority === 'number' && !isInvalidJSONNumber(priority)) || + ((priority && typeof priority === 'object') && contains(priority, '.sv')); +}; /** * Pre-validate a datum passed as an argument to Firebase function. @@ -94,34 +95,35 @@ export const isValidPriority = function(priority) { * @param {!Path} path * @param {boolean} optional */ -export const validateFirebaseDataArg = function(fnName, argumentNumber, data, path, optional) { +export const validateFirebaseDataArg = function (fnName: string, argumentNumber: number, + data: any, path: Path, optional: boolean) { if (optional && data === undefined) return; validateFirebaseData( - errorPrefixFxn(fnName, argumentNumber, optional), - data, path + errorPrefixFxn(fnName, argumentNumber, optional), + data, path ); -} +}; /** * Validate a data object client-side before sending to server. * * @param {string} errorPrefix * @param {*} data - * @param {!Path|!ValidationPath} path + * @param {!Path|!ValidationPath} path_ */ -export const validateFirebaseData = function(errorPrefix, data, path) { - if (path instanceof Path) { - path = new ValidationPath(path, errorPrefix); - } +export const validateFirebaseData = function (errorPrefix: string, data: any, path_: Path | ValidationPath) { + const path = path_ instanceof Path + ? new ValidationPath(path_, errorPrefix) + : path_; if (data === undefined) { throw new Error(errorPrefix + 'contains undefined ' + path.toErrorString()); } if (typeof data === 'function') { throw new Error(errorPrefix + 'contains a function ' + path.toErrorString() + - ' with contents = ' + data.toString()); + ' with contents = ' + data.toString()); } if (isInvalidJSONNumber(data)) { throw new Error(errorPrefix + 'contains ' + data.toString() + ' ' + path.toErrorString()); @@ -129,19 +131,19 @@ export const validateFirebaseData = function(errorPrefix, data, path) { // Check max leaf size, but try to avoid the utf8 conversion if we can. if (typeof data === 'string' && - data.length > MAX_LEAF_SIZE_ / 3 && - stringLength(data) > MAX_LEAF_SIZE_) { + data.length > MAX_LEAF_SIZE_ / 3 && + stringLength(data) > MAX_LEAF_SIZE_) { throw new Error(errorPrefix + 'contains a string greater than ' + - MAX_LEAF_SIZE_ + - ' utf8 bytes ' + path.toErrorString() + - " ('" + data.substring(0, 50) + "...')"); + MAX_LEAF_SIZE_ + + ' utf8 bytes ' + path.toErrorString() + + ' (\'' + data.substring(0, 50) + '...\')'); } // TODO = Perf = Consider combining the recursive validation of keys into NodeFromJSON // to save extra walking of large objects. if ((data && typeof data === 'object')) { - var hasDotValue = false, hasActualChild = false; - forEach(data, function(key, value) { + let hasDotValue = false, hasActualChild = false; + forEach(data, function (key: string, value: any) { if (key === '.value') { hasDotValue = true; } @@ -149,9 +151,9 @@ export const validateFirebaseData = function(errorPrefix, data, path) { hasActualChild = true; if (!isValidKey(key)) { throw new Error(errorPrefix + ' contains an invalid key (' + key + ') ' + - path.toErrorString() + - '. Keys must be non-empty strings ' + - 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); + path.toErrorString() + + '. Keys must be non-empty strings ' + + 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); } } @@ -162,11 +164,11 @@ export const validateFirebaseData = function(errorPrefix, data, path) { if (hasDotValue && hasActualChild) { throw new Error(errorPrefix + ' contains ".value" child ' + - path.toErrorString() + - ' in addition to actual children.'); + path.toErrorString() + + ' in addition to actual children.'); } } -} +}; /** * Pre-validate paths passed in the firebase function. @@ -174,19 +176,19 @@ export const validateFirebaseData = function(errorPrefix, data, path) { * @param {string} errorPrefix * @param {Array} mergePaths */ -export const validateFirebaseMergePaths = function(errorPrefix, mergePaths) { - var i, curPath; +export const validateFirebaseMergePaths = function (errorPrefix: string, mergePaths: Path[]) { + let i, curPath; for (i = 0; i < mergePaths.length; i++) { curPath = mergePaths[i]; - var keys = curPath.slice(); - for (var j = 0; j < keys.length; j++) { + const keys = curPath.slice(); + for (let j = 0; j < keys.length; j++) { if (keys[j] === '.priority' && j === (keys.length - 1)) { // .priority is OK } else if (!isValidKey(keys[j])) { throw new Error(errorPrefix + 'contains an invalid key (' + keys[j] + ') in path ' + - curPath.toString() + - '. Keys must be non-empty strings ' + - 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); + curPath.toString() + + '. Keys must be non-empty strings ' + + 'and can\'t contain ".", "#", "$", "/", "[", or "]"'); } } } @@ -195,16 +197,16 @@ export const validateFirebaseMergePaths = function(errorPrefix, mergePaths) { // We rely on the property that sorting guarantees that ancestors come // right before descendants. mergePaths.sort(Path.comparePaths); - var prevPath = null; + let prevPath: Path | null = null; for (i = 0; i < mergePaths.length; i++) { curPath = mergePaths[i]; if (prevPath !== null && prevPath.contains(curPath)) { throw new Error(errorPrefix + 'contains a path ' + prevPath.toString() + - ' that is ancestor of another path ' + curPath.toString()); + ' that is ancestor of another path ' + curPath.toString()); } prevPath = curPath; } -} +}; /** * pre-validate an object passed as an argument to firebase function ( @@ -216,50 +218,52 @@ export const validateFirebaseMergePaths = function(errorPrefix, mergePaths) { * @param {!Path} path * @param {boolean} optional */ -export const validateFirebaseMergeDataArg = function(fnName, argumentNumber, data, path, optional) { +export const validateFirebaseMergeDataArg = function (fnName: string, argumentNumber: number, + data: any, path: Path, optional: boolean) { if (optional && data === undefined) return; - var errorPrefix = errorPrefixFxn(fnName, argumentNumber, optional); + const errorPrefix = errorPrefixFxn(fnName, argumentNumber, optional); if (!(data && typeof data === 'object') || Array.isArray(data)) { throw new Error(errorPrefix + ' must be an object containing the children to replace.'); } - var mergePaths = []; - forEach(data, function(key, value) { - var curPath = new Path(key); + const mergePaths: Path[] = []; + forEach(data, function (key: string, value: any) { + const curPath = new Path(key); validateFirebaseData(errorPrefix, value, path.child(curPath)); if (curPath.getBack() === '.priority') { if (!isValidPriority(value)) { throw new Error( - errorPrefix + 'contains an invalid value for \'' + curPath.toString() + '\', which must be a valid ' + - 'Firebase priority (a string, finite number, server value, or null).'); + errorPrefix + 'contains an invalid value for \'' + curPath.toString() + '\', which must be a valid ' + + 'Firebase priority (a string, finite number, server value, or null).'); } } mergePaths.push(curPath); }); validateFirebaseMergePaths(errorPrefix, mergePaths); -} +}; -export const validatePriority = function(fnName, argumentNumber, priority, optional) { +export const validatePriority = function (fnName: string, argumentNumber: number, priority: any, optional: boolean) { if (optional && priority === undefined) return; if (isInvalidJSONNumber(priority)) throw new Error( - errorPrefixFxn(fnName, argumentNumber, optional) + - 'is ' + priority.toString() + - ', but must be a valid Firebase priority (a string, finite number, ' + - 'server value, or null).'); + errorPrefixFxn(fnName, argumentNumber, optional) + + 'is ' + priority.toString() + + ', but must be a valid Firebase priority (a string, finite number, ' + + 'server value, or null).'); // Special case to allow importing data with a .sv. if (!isValidPriority(priority)) throw new Error( - errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid Firebase priority ' + - '(a string, finite number, server value, or null).'); -} + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid Firebase priority ' + + '(a string, finite number, server value, or null).'); +}; -export const validateEventType = function(fnName, argumentNumber, eventType, optional) { +export const validateEventType = function (fnName: string, argumentNumber: number, + eventType: string, optional: boolean) { if (optional && eventType === undefined) return; @@ -272,125 +276,129 @@ export const validateEventType = function(fnName, argumentNumber, eventType, opt break; default: throw new Error( - errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid event type = "value", "child_added", "child_removed", ' + - '"child_changed", or "child_moved".'); + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid event type = "value", "child_added", "child_removed", ' + + '"child_changed", or "child_moved".'); } -} +}; -export const validateKey = function(fnName, argumentNumber, key, optional) { +export const validateKey = function (fnName: string, argumentNumber: number, + key: string, optional: boolean) { if (optional && key === undefined) return; if (!isValidKey(key)) throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'was an invalid key = "' + key + - '". Firebase keys must be non-empty strings and ' + - 'can\'t contain ".", "#", "$", "/", "[", or "]").'); -} + 'was an invalid key = "' + key + + '". Firebase keys must be non-empty strings and ' + + 'can\'t contain ".", "#", "$", "/", "[", or "]").'); +}; -export const validatePathString = function(fnName, argumentNumber, pathString, optional) { +export const validatePathString = function (fnName: string, argumentNumber: number, + pathString: string, optional: boolean) { if (optional && pathString === undefined) return; if (!isValidPathString(pathString)) throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'was an invalid path = "' + - pathString + - '". Paths must be non-empty strings and ' + - 'can\'t contain ".", "#", "$", "[", or "]"'); -} - -export const validateRootPathString = function(fnName, argumentNumber, pathString, optional) { + 'was an invalid path = "' + + pathString + + '". Paths must be non-empty strings and ' + + 'can\'t contain ".", "#", "$", "[", or "]"'); +}; + +export const validateRootPathString = function (fnName: string, argumentNumber: number, + pathString: string, optional: boolean) { if (pathString) { // Allow '/.info/' at the beginning. pathString = pathString.replace(/^\/*\.info(\/|$)/, '/'); } validatePathString(fnName, argumentNumber, pathString, optional); -} +}; -export const validateWritablePath = function(fnName, path) { +export const validateWritablePath = function (fnName: string, path: Path) { if (path.getFront() === '.info') { throw new Error(fnName + ' failed = Can\'t modify data under /.info/'); } -} +}; -export const validateUrl = function(fnName, argumentNumber, parsedUrl) { +export const validateUrl = function (fnName: string, argumentNumber: number, + parsedUrl: { repoInfo: RepoInfo, path: Path }) { // TODO = Validate server better. - var pathString = parsedUrl.path.toString(); + const pathString = parsedUrl.path.toString(); if (!(typeof parsedUrl.repoInfo.host === 'string') || parsedUrl.repoInfo.host.length === 0 || - !isValidKey(parsedUrl.repoInfo.namespace) || - (pathString.length !== 0 && !isValidRootPathString(pathString))) { + !isValidKey(parsedUrl.repoInfo.namespace) || + (pathString.length !== 0 && !isValidRootPathString(pathString))) { throw new Error(errorPrefixFxn(fnName, argumentNumber, false) + - 'must be a valid firebase URL and ' + - 'the path can\'t contain ".", "#", "$", "[", or "]".'); + 'must be a valid firebase URL and ' + + 'the path can\'t contain ".", "#", "$", "[", or "]".'); } -} +}; -export const validateCredential = function(fnName, argumentNumber, cred, optional) { +export const validateCredential = function (fnName: string, argumentNumber: number, cred: any, optional: boolean) { if (optional && cred === undefined) return; if (!(typeof cred === 'string')) throw new Error( - errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid credential (a string).'); -} + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid credential (a string).'); +}; -export const validateBoolean = function(fnName, argumentNumber, bool, optional) { +export const validateBoolean = function (fnName: string, argumentNumber: number, bool: any, optional: boolean) { if (optional && bool === undefined) return; if (typeof bool !== 'boolean') throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a boolean.'); -} + 'must be a boolean.'); +}; -export const validateString = function(fnName, argumentNumber, string, optional) { +export const validateString = function (fnName: string, argumentNumber: number, string: any, optional: boolean) { if (optional && string === undefined) return; if (!(typeof string === 'string')) { throw new Error( - errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid string.'); + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid string.'); } -} +}; -export const validateObject = function(fnName, argumentNumber, obj, optional) { +export const validateObject = function (fnName: string, argumentNumber: number, obj: any, optional: boolean) { if (optional && obj === undefined) return; if (!(obj && typeof obj === 'object') || obj === null) { throw new Error( - errorPrefixFxn(fnName, argumentNumber, optional) + - 'must be a valid object.'); + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must be a valid object.'); } -} +}; -export const validateObjectContainsKey = function(fnName, argumentNumber, obj, key, optional, opt_type) { - var objectContainsKey = ((obj && typeof obj === 'object') && contains(obj, key)); +export const validateObjectContainsKey = function (fnName: string, argumentNumber: number, obj: any, key: string, optional: boolean, opt_type?: string) { + const objectContainsKey = ((obj && typeof obj === 'object') && contains(obj, key)); if (!objectContainsKey) { if (optional) { return; } else { throw new Error( - errorPrefixFxn(fnName, argumentNumber, optional) + - 'must contain the key "' + key + '"'); + errorPrefixFxn(fnName, argumentNumber, optional) + + 'must contain the key "' + key + '"'); } } if (opt_type) { - var val = safeGet(obj, key); + const val = safeGet(obj, key); if ((opt_type === 'number' && !(typeof val === 'number')) || - (opt_type === 'string' && !(typeof val === 'string')) || - (opt_type === 'boolean' && !(typeof val === 'boolean')) || - (opt_type === 'function' && !(typeof val === 'function')) || - (opt_type === 'object' && !(typeof val === 'object') && val)) { + (opt_type === 'string' && !(typeof val === 'string')) || + (opt_type === 'boolean' && !(typeof val === 'boolean')) || + (opt_type === 'function' && !(typeof val === 'function')) || + (opt_type === 'object' && !(typeof val === 'object') && val)) { if (optional) { throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'contains invalid value for key "' + key + '" (must be of type "' + opt_type + '")'); + 'contains invalid value for key "' + key + '" (must be of type "' + opt_type + '")'); } else { throw new Error(errorPrefixFxn(fnName, argumentNumber, optional) + - 'must contain the key "' + key + '" with type "' + opt_type + '"'); + 'must contain the key "' + key + '" with type "' + opt_type + '"'); } } } -} +}; diff --git a/src/database/core/view/ChildChangeAccumulator.ts b/src/database/core/view/ChildChangeAccumulator.ts index be472a1006f..43238489069 100644 --- a/src/database/core/view/ChildChangeAccumulator.ts +++ b/src/database/core/view/ChildChangeAccumulator.ts @@ -15,14 +15,14 @@ */ import { getValues, safeGet } from '../../../utils/obj'; -import { Change } from "./Change"; -import { assert, assertionError } from "../../../utils/assert"; +import { Change } from './Change'; +import { assert, assertionError } from '../../../utils/assert'; /** * @constructor */ export class ChildChangeAccumulator { - changeMap_ = {}; + private changeMap_: { [k: string]: Change } = {}; /** * @param {!Change} change @@ -31,10 +31,10 @@ export class ChildChangeAccumulator { const type = change.type; const childKey = /** @type {!string} */ (change.childName); assert(type == Change.CHILD_ADDED || - type == Change.CHILD_CHANGED || - type == Change.CHILD_REMOVED, 'Only child changes supported for tracking'); + type == Change.CHILD_CHANGED || + type == Change.CHILD_REMOVED, 'Only child changes supported for tracking'); assert(childKey !== '.priority', 'Only non-priority child changes can be tracked.'); - const oldChange = safeGet(this.changeMap_, childKey); + const oldChange = safeGet(this.changeMap_, childKey) as Change; if (oldChange) { const oldType = oldChange.type; if (type == Change.CHILD_ADDED && oldType == Change.CHILD_REMOVED) { @@ -42,20 +42,18 @@ export class ChildChangeAccumulator { } else if (type == Change.CHILD_REMOVED && oldType == Change.CHILD_ADDED) { delete this.changeMap_[childKey]; } else if (type == Change.CHILD_REMOVED && oldType == Change.CHILD_CHANGED) { - this.changeMap_[childKey] = Change.childRemovedChange(childKey, - /** @type {!Node} */ (oldChange.oldSnap)); + this.changeMap_[childKey] = Change.childRemovedChange(childKey, oldChange.oldSnap); } else if (type == Change.CHILD_CHANGED && oldType == Change.CHILD_ADDED) { this.changeMap_[childKey] = Change.childAddedChange(childKey, change.snapshotNode); } else if (type == Change.CHILD_CHANGED && oldType == Change.CHILD_CHANGED) { - this.changeMap_[childKey] = Change.childChangedChange(childKey, change.snapshotNode, - /** @type {!Node} */ (oldChange.oldSnap)); + this.changeMap_[childKey] = Change.childChangedChange(childKey, change.snapshotNode, oldChange.oldSnap); } else { throw assertionError('Illegal combination of changes: ' + change + ' occurred after ' + oldChange); } } else { this.changeMap_[childKey] = change; } - }; + } /** @@ -63,7 +61,7 @@ export class ChildChangeAccumulator { */ getChanges(): Change[] { return getValues(this.changeMap_); - }; + } } diff --git a/src/database/core/view/CompleteChildSource.ts b/src/database/core/view/CompleteChildSource.ts index 0134d79088d..a734c7718ae 100644 --- a/src/database/core/view/CompleteChildSource.ts +++ b/src/database/core/view/CompleteChildSource.ts @@ -57,14 +57,14 @@ export class NoCompleteChildSource_ implements CompleteChildSource { /** * @inheritDoc */ - getCompleteChild() { + getCompleteChild(childKey?: string): Node | null { return null; } /** * @inheritDoc */ - getChildAfterChild() { + getChildAfterChild(index?: Index, child?: NamedNode, reverse?: boolean): NamedNode | null { return null; } } @@ -99,7 +99,7 @@ export class WriteTreeCompleteChildSource implements CompleteChildSource { /** * @inheritDoc */ - getCompleteChild(childKey) { + getCompleteChild(childKey: string): Node | null { const node = this.viewCache_.getEventCache(); if (node.isCompleteForChild(childKey)) { return node.getNode().getImmediateChild(childKey); @@ -113,7 +113,7 @@ export class WriteTreeCompleteChildSource implements CompleteChildSource { /** * @inheritDoc */ - getChildAfterChild(index, child, reverse) { + getChildAfterChild(index: Index, child: NamedNode, reverse: boolean): NamedNode | null { const completeServerData = this.optCompleteServerCache_ != null ? this.optCompleteServerCache_ : this.viewCache_.getCompleteServerSnap(); const nodes = this.writes_.calcIndexedSlice(completeServerData, child, 1, reverse, index); diff --git a/src/database/core/view/Event.ts b/src/database/core/view/Event.ts index c2bad80cd76..b9be0a721b2 100644 --- a/src/database/core/view/Event.ts +++ b/src/database/core/view/Event.ts @@ -127,7 +127,7 @@ export class CancelEvent implements Event { /** * @inheritDoc */ - getEventRunner(): () => any { + getEventRunner(): () => void { return this.eventRegistration.getEventRunner(this); } diff --git a/src/database/core/view/EventGenerator.ts b/src/database/core/view/EventGenerator.ts index b978f798f52..f05e1395f2a 100644 --- a/src/database/core/view/EventGenerator.ts +++ b/src/database/core/view/EventGenerator.ts @@ -15,8 +15,8 @@ */ import { NamedNode, Node } from '../snap/Node'; -import { Change } from "./Change"; -import { assertionError } from "../../../utils/assert"; +import { Change } from './Change'; +import { assertionError } from '../../../utils/assert'; import { Query } from '../../api/Query'; import { Index } from '../snap/indexes/Index'; import { EventRegistration } from './EventRegistration'; @@ -27,12 +27,15 @@ import { Event } from './Event'; * CacheDiffer into actual events (Event) that can be raised. See generateEventsForChanges() * for details. * - * @param {!Query} query * @constructor */ export class EventGenerator { private index_: Index; + /** + * + * @param {!Query} query_ + */ constructor(private query_: Query) { /** * @private @@ -56,13 +59,13 @@ export class EventGenerator { * @return {!Array.} */ generateEventsForChanges(changes: Change[], eventCache: Node, eventRegistrations: EventRegistration[]): Event[] { - const events = []; - const moves = []; + const events: Event[] = []; + const moves: Change[] = []; changes.forEach((change) => { if (change.type === Change.CHILD_CHANGED && - this.index_.indexedValueChanged(/** @type {!Node} */ (change.oldSnap), change.snapshotNode)) { - moves.push(Change.childMovedChange(/** @type {!string} */ (change.childName), change.snapshotNode)); + this.index_.indexedValueChanged(change.oldSnap as Node, change.snapshotNode)) { + moves.push(Change.childMovedChange(change.childName as string, change.snapshotNode)); } }); diff --git a/src/database/core/view/EventRegistration.ts b/src/database/core/view/EventRegistration.ts index 75c151197a0..37abc02e6a5 100644 --- a/src/database/core/view/EventRegistration.ts +++ b/src/database/core/view/EventRegistration.ts @@ -50,7 +50,7 @@ export interface EventRegistration { * @param {!Event} eventData * @return {function()} */ - getEventRunner(eventData: Event): () => any; + getEventRunner(eventData: Event): () => void; /** * @param {!Error} error @@ -87,8 +87,8 @@ export class ValueEventRegistration implements EventRegistration { * @param {?function(Error)} cancelCallback_ * @param {?Object} context_ */ - constructor(private callback_: ((d: DataSnapshot) => any) | null, - private cancelCallback_: ((e: Error) => any) | null, + constructor(private callback_: ((d: DataSnapshot) => void) | null, + private cancelCallback_: ((e: Error) => void) | null, private context_: Object | null) { } @@ -117,12 +117,12 @@ export class ValueEventRegistration implements EventRegistration { const cancelCB = this.cancelCallback_; return function () { // We know that error exists, we checked above that this is a cancel event - cancelCB.call(ctx, (eventData).error); + cancelCB.call(ctx, (eventData as CancelEvent).error); }; } else { const cb = this.callback_; return function () { - cb.call(ctx, (eventData).snapshot); + cb.call(ctx, (eventData as DataEvent).snapshot); }; } } @@ -175,15 +175,15 @@ export class ChildEventRegistration implements EventRegistration { * @param {?function(Error)} cancelCallback_ * @param {Object=} context_ */ - constructor(private callbacks_: ({ [k: string]: (d: DataSnapshot, s?: string | null) => any }) | null, - private cancelCallback_: ((e: Error) => any) | null, - private context_: Object) { + constructor(private callbacks_: ({ [k: string]: (d: DataSnapshot, s?: string | null) => void }) | null, + private cancelCallback_: ((e: Error) => void) | null, + private context_?: Object) { } /** * @inheritDoc */ - respondsTo(eventType): boolean { + respondsTo(eventType: string): boolean { let eventToCheck = eventType === 'children_added' ? 'child_added' : eventType; eventToCheck = eventToCheck === 'children_removed' ? 'child_removed' : eventToCheck; return contains(this.callbacks_, eventToCheck); @@ -207,7 +207,7 @@ export class ChildEventRegistration implements EventRegistration { assert(change.childName != null, 'Child events should have a childName.'); const ref = query.getRef().child(/** @type {!string} */ (change.childName)); const index = query.getQueryParams().getIndex(); - return new DataEvent(change.type, this, new DataSnapshot(change.snapshotNode, ref, index), + return new DataEvent(change.type as any, this, new DataSnapshot(change.snapshotNode, ref, index as any), change.prevName); } @@ -221,12 +221,12 @@ export class ChildEventRegistration implements EventRegistration { const cancelCB = this.cancelCallback_; return function () { // We know that error exists, we checked above that this is a cancel event - cancelCB.call(ctx, (eventData).error); + cancelCB.call(ctx, (eventData as CancelEvent).error); }; } else { - const cb = this.callbacks_[(eventData).eventType]; + const cb = this.callbacks_[(eventData as DataEvent).eventType]; return function () { - cb.call(ctx, (eventData).snapshot, (eventData).prevName); + cb.call(ctx, (eventData as DataEvent).snapshot, (eventData as DataEvent).prevName); } } } diff --git a/src/database/core/view/QueryParams.ts b/src/database/core/view/QueryParams.ts index cae4581ad43..f17fa19efc9 100644 --- a/src/database/core/view/QueryParams.ts +++ b/src/database/core/view/QueryParams.ts @@ -14,19 +14,21 @@ * limitations under the License. */ -import { assert } from "../../../utils/assert"; -import { +import { assert } from '../../../utils/assert'; +import { MIN_NAME, MAX_NAME -} from "../util/util"; -import { KEY_INDEX } from "../snap/indexes/KeyIndex"; -import { PRIORITY_INDEX } from "../snap/indexes/PriorityIndex"; -import { VALUE_INDEX } from "../snap/indexes/ValueIndex"; -import { PathIndex } from "../snap/indexes/PathIndex"; -import { IndexedFilter } from "./filter/IndexedFilter"; -import { LimitedFilter } from "./filter/LimitedFilter"; -import { RangedFilter } from "./filter/RangedFilter"; -import { stringify } from "../../../utils/json"; +} from '../util/util'; +import { KEY_INDEX } from '../snap/indexes/KeyIndex'; +import { PRIORITY_INDEX } from '../snap/indexes/PriorityIndex'; +import { VALUE_INDEX } from '../snap/indexes/ValueIndex'; +import { PathIndex } from '../snap/indexes/PathIndex'; +import { IndexedFilter } from './filter/IndexedFilter'; +import { LimitedFilter } from './filter/LimitedFilter'; +import { RangedFilter } from './filter/RangedFilter'; +import { stringify } from '../../../utils/json'; +import { NodeFilter } from './filter/NodeFilter'; +import { Index } from '../snap/indexes/Index'; /** * This class is an immutable-from-the-public-api struct containing a set of query parameters defining a @@ -35,43 +37,28 @@ import { stringify } from "../../../utils/json"; * @constructor */ export class QueryParams { - endNameSet_ - endSet_ - index_ - indexEndName_ - indexEndValue_ - indexStartName_ - indexStartValue_ - limit_ - limitSet_ - startEndSet_ - startNameSet_ - startSet_ - viewFrom_ - - constructor() { - this.limitSet_ = false; - this.startSet_ = false; - this.startNameSet_ = false; - this.endSet_ = false; - this.endNameSet_ = false; - - this.limit_ = 0; - this.viewFrom_ = ''; - this.indexStartValue_ = null; - this.indexStartName_ = ''; - this.indexEndValue_ = null; - this.indexEndName_ = ''; - - this.index_ = PRIORITY_INDEX; - }; + private limitSet_ = false; + private startSet_ = false; + private startNameSet_ = false; + private endSet_ = false; + private endNameSet_ = false; + + private limit_ = 0; + private viewFrom_ = ''; + private indexStartValue_: any | null = null; + private indexStartName_ = ''; + private indexEndValue_: any | null = null; + private indexEndName_ = ''; + + private index_ = PRIORITY_INDEX; + /** * Wire Protocol Constants * @const * @enum {string} * @private */ - private static WIRE_PROTOCOL_CONSTANTS_ = { + private static readonly WIRE_PROTOCOL_CONSTANTS_ = { INDEX_START_VALUE: 'sp', INDEX_START_NAME: 'sn', INDEX_END_VALUE: 'ep', @@ -89,7 +76,7 @@ export class QueryParams { * @enum {string} * @private */ - private static REST_QUERY_CONSTANTS_ = { + private static readonly REST_QUERY_CONSTANTS_ = { ORDER_BY: 'orderBy', PRIORITY_INDEX: '$priority', VALUE_INDEX: '$value', @@ -105,19 +92,19 @@ export class QueryParams { * @type {!QueryParams} * @const */ - static DEFAULT = new QueryParams(); + static readonly DEFAULT = new QueryParams(); /** * @return {boolean} */ - hasStart() { + hasStart(): boolean { return this.startSet_; - }; + } /** * @return {boolean} True if it would return from left. */ - isViewFromLeft() { + isViewFromLeft(): boolean { if (this.viewFrom_ === '') { // limit(), rather than limitToFirst or limitToLast was called. // This means that only one of startSet_ and endSet_ is true. Use them @@ -127,97 +114,97 @@ export class QueryParams { } else { return this.viewFrom_ === QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_LEFT; } - }; + } /** * Only valid to call if hasStart() returns true * @return {*} */ - getIndexStartValue() { + getIndexStartValue(): any { assert(this.startSet_, 'Only valid if start has been set'); return this.indexStartValue_; - }; + } /** * Only valid to call if hasStart() returns true. * Returns the starting key name for the range defined by these query parameters * @return {!string} */ - getIndexStartName() { + getIndexStartName(): string { assert(this.startSet_, 'Only valid if start has been set'); if (this.startNameSet_) { return this.indexStartName_; } else { return MIN_NAME; } - }; + } /** * @return {boolean} */ - hasEnd() { + hasEnd(): boolean { return this.endSet_; - }; + } /** * Only valid to call if hasEnd() returns true. * @return {*} */ - getIndexEndValue() { + getIndexEndValue(): any { assert(this.endSet_, 'Only valid if end has been set'); return this.indexEndValue_; - }; + } /** * Only valid to call if hasEnd() returns true. * Returns the end key name for the range defined by these query parameters * @return {!string} */ - getIndexEndName() { + getIndexEndName(): string { assert(this.endSet_, 'Only valid if end has been set'); if (this.endNameSet_) { return this.indexEndName_; } else { return MAX_NAME; } - }; + } /** * @return {boolean} */ - hasLimit() { + hasLimit(): boolean { return this.limitSet_; - }; + } /** * @return {boolean} True if a limit has been set and it has been explicitly anchored */ - hasAnchoredLimit() { + hasAnchoredLimit(): boolean { return this.limitSet_ && this.viewFrom_ !== ''; - }; + } /** * Only valid to call if hasLimit() returns true * @return {!number} */ - getLimit() { + getLimit(): number { assert(this.limitSet_, 'Only valid if limit has been set'); return this.limit_; - }; + } /** * @return {!Index} */ - getIndex() { + getIndex(): Index { return this.index_; - }; + } /** * @return {!QueryParams} * @private */ - copy_() { - var copy = new QueryParams(); + private copy_(): QueryParams { + const copy = new QueryParams(); copy.limitSet_ = this.limitSet_; copy.limit_ = this.limit_; copy.startSet_ = this.startSet_; @@ -231,38 +218,38 @@ export class QueryParams { copy.index_ = this.index_; copy.viewFrom_ = this.viewFrom_; return copy; - }; + } /** * @param {!number} newLimit * @return {!QueryParams} */ - limit(newLimit) { - var newParams = this.copy_(); + limit(newLimit: number): QueryParams { + const newParams = this.copy_(); newParams.limitSet_ = true; newParams.limit_ = newLimit; newParams.viewFrom_ = ''; return newParams; - }; + } /** * @param {!number} newLimit * @return {!QueryParams} */ - limitToFirst(newLimit) { - var newParams = this.copy_(); + limitToFirst(newLimit: number): QueryParams { + const newParams = this.copy_(); newParams.limitSet_ = true; newParams.limit_ = newLimit; newParams.viewFrom_ = QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_LEFT; return newParams; - }; + } /** * @param {!number} newLimit * @return {!QueryParams} */ - limitToLast(newLimit) { - var newParams = this.copy_(); + limitToLast(newLimit: number): QueryParams { + const newParams = this.copy_(); newParams.limitSet_ = true; newParams.limit_ = newLimit; newParams.viewFrom_ = QueryParams.WIRE_PROTOCOL_CONSTANTS_.VIEW_FROM_RIGHT; @@ -274,8 +261,8 @@ export class QueryParams { * @param {?string=} key * @return {!QueryParams} */ - startAt(indexValue, key) { - var newParams = this.copy_(); + startAt(indexValue: any, key?: string | null): QueryParams { + const newParams = this.copy_(); newParams.startSet_ = true; if (!(indexValue !== undefined)) { indexValue = null; @@ -289,25 +276,25 @@ export class QueryParams { newParams.indexStartName_ = ''; } return newParams; - }; + } /** * @param {*} indexValue * @param {?string=} key * @return {!QueryParams} */ - endAt(indexValue, key) { - var newParams = this.copy_(); + endAt(indexValue: any, key?: string | null): QueryParams { + const newParams = this.copy_(); newParams.endSet_ = true; if (!(indexValue !== undefined)) { indexValue = null; } newParams.indexEndValue_ = indexValue; - if ((key !== undefined)) { + if (key !== undefined) { newParams.endNameSet_ = true; newParams.indexEndName_ = key; } else { - newParams.startEndSet_ = false; + newParams.endNameSet_ = false; newParams.indexEndName_ = ''; } return newParams; @@ -317,18 +304,18 @@ export class QueryParams { * @param {!Index} index * @return {!QueryParams} */ - orderBy(index) { - var newParams = this.copy_(); + orderBy(index: Index): QueryParams { + const newParams = this.copy_(); newParams.index_ = index; return newParams; - }; + } /** * @return {!Object} */ - getQueryObject() { - var WIRE_PROTOCOL_CONSTANTS = QueryParams.WIRE_PROTOCOL_CONSTANTS_; - var obj = {}; + getQueryObject(): Object { + const WIRE_PROTOCOL_CONSTANTS = QueryParams.WIRE_PROTOCOL_CONSTANTS_; + const obj: { [k: string]: any } = {}; if (this.startSet_) { obj[WIRE_PROTOCOL_CONSTANTS.INDEX_START_VALUE] = this.indexStartValue_; if (this.startNameSet_) { @@ -343,7 +330,7 @@ export class QueryParams { } if (this.limitSet_) { obj[WIRE_PROTOCOL_CONSTANTS.LIMIT] = this.limit_; - var viewFrom = this.viewFrom_; + let viewFrom = this.viewFrom_; if (viewFrom === '') { if (this.isViewFromLeft()) { viewFrom = WIRE_PROTOCOL_CONSTANTS.VIEW_FROM_LEFT; @@ -358,26 +345,26 @@ export class QueryParams { obj[WIRE_PROTOCOL_CONSTANTS.INDEX] = this.index_.toString(); } return obj; - }; + } /** * @return {boolean} */ - loadsAllData() { + loadsAllData(): boolean { return !(this.startSet_ || this.endSet_ || this.limitSet_); - }; + } /** * @return {boolean} */ - isDefault() { + isDefault(): boolean { return this.loadsAllData() && this.index_ == PRIORITY_INDEX; - }; + } /** * @return {!NodeFilter} */ - getNodeFilter() { + getNodeFilter(): NodeFilter { if (this.loadsAllData()) { return new IndexedFilter(this.getIndex()); } else if (this.hasLimit()) { @@ -385,7 +372,7 @@ export class QueryParams { } else { return new RangedFilter(this); } - }; + } /** @@ -393,15 +380,15 @@ export class QueryParams { * * @return {!Object.} query string parameters */ - toRestQueryStringParameters() { - var REST_CONSTANTS = QueryParams.REST_QUERY_CONSTANTS_; - var qs = { }; + toRestQueryStringParameters(): { [k: string]: any } { + const REST_CONSTANTS = QueryParams.REST_QUERY_CONSTANTS_; + const qs: { [k: string]: string | number } = {}; if (this.isDefault()) { return qs; } - var orderBy; + let orderBy; if (this.index_ === PRIORITY_INDEX) { orderBy = REST_CONSTANTS.PRIORITY_INDEX; } else if (this.index_ === VALUE_INDEX) { @@ -437,5 +424,5 @@ export class QueryParams { } return qs; - }; + } } diff --git a/src/database/core/view/View.ts b/src/database/core/view/View.ts index 8390be0c01b..f3fb5b10f6b 100644 --- a/src/database/core/view/View.ts +++ b/src/database/core/view/View.ts @@ -14,17 +14,22 @@ * limitations under the License. */ -import { IndexedFilter } from "./filter/IndexedFilter"; -import { ViewProcessor } from "./ViewProcessor"; -import { ChildrenNode } from "../snap/ChildrenNode"; -import { CacheNode } from "./CacheNode"; -import { ViewCache } from "./ViewCache"; -import { EventGenerator } from "./EventGenerator"; -import { assert } from "../../../utils/assert"; -import { OperationType } from "../operation/Operation"; -import { Change } from "./Change"; -import { PRIORITY_INDEX } from "../snap/indexes/PriorityIndex"; -import { Query } from "../../api/Query"; +import { IndexedFilter } from './filter/IndexedFilter'; +import { ViewProcessor } from './ViewProcessor'; +import { ChildrenNode } from '../snap/ChildrenNode'; +import { CacheNode } from './CacheNode'; +import { ViewCache } from './ViewCache'; +import { EventGenerator } from './EventGenerator'; +import { assert } from '../../../utils/assert'; +import { Operation, OperationType } from '../operation/Operation'; +import { Change } from './Change'; +import { PRIORITY_INDEX } from '../snap/indexes/PriorityIndex'; +import { Query } from '../../api/Query'; +import { EventRegistration } from './EventRegistration'; +import { Node } from '../snap/Node'; +import { Path } from '../util/Path'; +import { WriteTreeRef } from '../WriteTree'; +import { CancelEvent, Event } from './Event'; /** * A view represents a specific location and query that has 1 or more event registrations. @@ -34,27 +39,24 @@ import { Query } from "../../api/Query"; * - Maintains a cache of the data visible for this location/query. * - Applies new operations (via applyOperation), updates the cache, and based on the event * registrations returns the set of events to be raised. - * - * @param {!fb.api.Query} query - * @param {!ViewCache} initialViewCache * @constructor */ export class View { - query_: Query - processor_ - viewCache_ - eventRegistrations_ - eventGenerator_ - constructor(query, initialViewCache) { - /** - * @type {!fb.api.Query} - * @private - */ - this.query_ = query; - var params = query.getQueryParams(); + private processor_: ViewProcessor; + private viewCache_: ViewCache; + private eventRegistrations_: EventRegistration[] = []; + private eventGenerator_: EventGenerator; - var indexFilter = new IndexedFilter(params.getIndex()); - var filter = params.getNodeFilter(); + /** + * + * @param {!Query} query_ + * @param {!ViewCache} initialViewCache + */ + constructor(private query_: Query, initialViewCache: ViewCache) { + const params = this.query_.getQueryParams(); + + const indexFilter = new IndexedFilter(params.getIndex()); + const filter = params.getNodeFilter(); /** * @type {ViewProcessor} @@ -62,16 +64,16 @@ export class View { */ this.processor_ = new ViewProcessor(filter); - var initialServerCache = initialViewCache.getServerCache(); - var initialEventCache = initialViewCache.getEventCache(); + const initialServerCache = initialViewCache.getServerCache(); + const initialEventCache = initialViewCache.getEventCache(); // Don't filter server node with other filter than index, wait for tagged listen - var serverSnap = indexFilter.updateFullNode(ChildrenNode.EMPTY_NODE, initialServerCache.getNode(), null); - var eventSnap = filter.updateFullNode(ChildrenNode.EMPTY_NODE, initialEventCache.getNode(), null); - var newServerCache = new CacheNode(serverSnap, initialServerCache.isFullyInitialized(), - indexFilter.filtersNodes()); - var newEventCache = new CacheNode(eventSnap, initialEventCache.isFullyInitialized(), - filter.filtersNodes()); + const serverSnap = indexFilter.updateFullNode(ChildrenNode.EMPTY_NODE, initialServerCache.getNode(), null); + const eventSnap = filter.updateFullNode(ChildrenNode.EMPTY_NODE, initialEventCache.getNode(), null); + const newServerCache = new CacheNode(serverSnap, initialServerCache.isFullyInitialized(), + indexFilter.filtersNodes()); + const newEventCache = new CacheNode(eventSnap, initialEventCache.isFullyInitialized(), + filter.filtersNodes()); /** * @type {!ViewCache} @@ -79,43 +81,38 @@ export class View { */ this.viewCache_ = new ViewCache(newEventCache, newServerCache); - /** - * @type {!Array.} - * @private - */ - this.eventRegistrations_ = []; - /** * @type {!EventGenerator} * @private */ - this.eventGenerator_ = new EventGenerator(query); + this.eventGenerator_ = new EventGenerator(this.query_); }; + /** - * @return {!fb.api.Query} + * @return {!Query} */ - getQuery() { + getQuery(): Query { return this.query_; }; /** - * @return {?fb.core.snap.Node} + * @return {?Node} */ - getServerCache() { + getServerCache(): Node | null { return this.viewCache_.getServerCache().getNode(); }; /** * @param {!Path} path - * @return {?fb.core.snap.Node} + * @return {?Node} */ - getCompleteServerCache(path) { - var cache = this.viewCache_.getCompleteServerSnap(); + getCompleteServerCache(path: Path): Node | null { + const cache = this.viewCache_.getCompleteServerSnap(); if (cache) { // If this isn't a "loadsAllData" view, then cache isn't actually a complete cache and // we need to see if it contains the child we're interested in. if (this.query_.getQueryParams().loadsAllData() || - (!path.isEmpty() && !cache.getImmediateChild(path.getFront()).isEmpty())) { + (!path.isEmpty() && !cache.getImmediateChild(path.getFront()).isEmpty())) { return cache.getChild(path); } } @@ -125,30 +122,30 @@ export class View { /** * @return {boolean} */ - isEmpty() { + isEmpty(): boolean { return this.eventRegistrations_.length === 0; }; /** - * @param {!fb.core.view.EventRegistration} eventRegistration + * @param {!EventRegistration} eventRegistration */ - addEventRegistration(eventRegistration) { + addEventRegistration(eventRegistration: EventRegistration) { this.eventRegistrations_.push(eventRegistration); }; /** - * @param {?fb.core.view.EventRegistration} eventRegistration If null, remove all callbacks. + * @param {?EventRegistration} eventRegistration If null, remove all callbacks. * @param {Error=} cancelError If a cancelError is provided, appropriate cancel events will be returned. - * @return {!Array.} Cancel events, if cancelError was provided. + * @return {!Array.} Cancel events, if cancelError was provided. */ - removeEventRegistration(eventRegistration, cancelError) { - var cancelEvents = []; + removeEventRegistration(eventRegistration: EventRegistration | null, cancelError?: Error): Event[] { + const cancelEvents: CancelEvent[] = []; if (cancelError) { assert(eventRegistration == null, 'A cancel should cancel all event registrations.'); - var path = this.query_.path; - this.eventRegistrations_.forEach(function(registration) { + const path = this.query_.path; + this.eventRegistrations_.forEach(function (registration) { cancelError = /** @type {!Error} */ (cancelError); - var maybeEvent = registration.createCancelEvent(cancelError, path); + const maybeEvent = registration.createCancelEvent(cancelError, path); if (maybeEvent) { cancelEvents.push(maybeEvent); } @@ -156,9 +153,9 @@ export class View { } if (eventRegistration) { - var remaining = []; - for (var i = 0; i < this.eventRegistrations_.length; ++i) { - var existing = this.eventRegistrations_[i]; + let remaining = []; + for (let i = 0; i < this.eventRegistrations_.length; ++i) { + const existing = this.eventRegistrations_[i]; if (!existing.matches(eventRegistration)) { remaining.push(existing); } else if (eventRegistration.hasAnyCallback()) { @@ -177,28 +174,28 @@ export class View { /** * Applies the given Operation, updates our cache, and returns the appropriate events. * - * @param {!fb.core.Operation} operation - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} optCompleteServerCache - * @return {!Array.} + * @param {!Operation} operation + * @param {!WriteTreeRef} writesCache + * @param {?Node} completeServerCache + * @return {!Array.} */ - applyOperation(operation, writesCache, optCompleteServerCache) { + applyOperation(operation: Operation, writesCache: WriteTreeRef, completeServerCache: Node | null): Event[] { if (operation.type === OperationType.MERGE && - operation.source.queryId !== null) { + operation.source.queryId !== null) { assert(this.viewCache_.getCompleteServerSnap(), - 'We should always have a full cache before handling merges'); + 'We should always have a full cache before handling merges'); assert(this.viewCache_.getCompleteEventSnap(), - 'Missing event cache, even though we have a server cache'); + 'Missing event cache, even though we have a server cache'); } - var oldViewCache = this.viewCache_; - var result = this.processor_.applyOperation(oldViewCache, operation, writesCache, optCompleteServerCache); + const oldViewCache = this.viewCache_; + const result = this.processor_.applyOperation(oldViewCache, operation, writesCache, completeServerCache); this.processor_.assertIndexed(result.viewCache); assert(result.viewCache.getServerCache().isFullyInitialized() || - !oldViewCache.getServerCache().isFullyInitialized(), - 'Once a server snap is complete, it should never go back'); + !oldViewCache.getServerCache().isFullyInitialized(), + 'Once a server snap is complete, it should never go back'); this.viewCache_ = result.viewCache; @@ -206,15 +203,15 @@ export class View { }; /** - * @param {!fb.core.view.EventRegistration} registration - * @return {!Array.} + * @param {!EventRegistration} registration + * @return {!Array.} */ - getInitialEvents(registration) { - var eventSnap = this.viewCache_.getEventCache(); - var initialChanges = []; + getInitialEvents(registration: EventRegistration): Event[] { + const eventSnap = this.viewCache_.getEventCache(); + const initialChanges: Change[] = []; if (!eventSnap.getNode().isLeafNode()) { - var eventNode = /** @type {!fb.core.snap.ChildrenNode} */ (eventSnap.getNode()); - eventNode.forEachChild(PRIORITY_INDEX, function(key, childNode) { + const eventNode = eventSnap.getNode() as ChildrenNode; + eventNode.forEachChild(PRIORITY_INDEX, function (key, childNode) { initialChanges.push(Change.childAddedChange(key, childNode)); }); } @@ -227,12 +224,12 @@ export class View { /** * @private * @param {!Array.} changes - * @param {!fb.core.snap.Node} eventCache - * @param {fb.core.view.EventRegistration=} opt_eventRegistration - * @return {!Array.} + * @param {!Node} eventCache + * @param {EventRegistration=} eventRegistration + * @return {!Array.} */ - generateEventsForChanges_(changes, eventCache, opt_eventRegistration) { - var registrations = opt_eventRegistration ? [opt_eventRegistration] : this.eventRegistrations_; + generateEventsForChanges_(changes: Change[], eventCache: Node, eventRegistration?: EventRegistration): Event[] { + const registrations = eventRegistration ? [eventRegistration] : this.eventRegistrations_; return this.eventGenerator_.generateEventsForChanges(changes, eventCache, registrations); }; } diff --git a/src/database/core/view/ViewCache.ts b/src/database/core/view/ViewCache.ts index 08c39a06999..808cd07487b 100644 --- a/src/database/core/view/ViewCache.ts +++ b/src/database/core/view/ViewCache.ts @@ -14,102 +14,82 @@ * limitations under the License. */ -import { ChildrenNode } from "../snap/ChildrenNode"; -import { CacheNode } from "./CacheNode"; +import { ChildrenNode } from '../snap/ChildrenNode'; +import { CacheNode } from './CacheNode'; +import { Node } from '../snap/Node'; /** * Stores the data we have cached for a view. * * serverSnap is the cached server data, eventSnap is the cached event data (server data plus any local writes). * - * @param {!CacheNode} eventCache - * @param {!CacheNode} serverCache * @constructor */ export class ViewCache { /** - * @const - * @type {!CacheNode} - * @private - */ - private eventCache_; - - /** - * @const - * @type {!CacheNode} - * @private + * + * @param {!CacheNode} eventCache_ + * @param {!CacheNode} serverCache_ */ - private serverCache_; - constructor(eventCache, serverCache) { - /** - * @const - * @type {!CacheNode} - * @private - */ - this.eventCache_ = eventCache; + constructor(private readonly eventCache_: CacheNode, + private readonly serverCache_: CacheNode) { + } - /** - * @const - * @type {!CacheNode} - * @private - */ - this.serverCache_ = serverCache; - }; /** * @const * @type {ViewCache} */ static Empty = new ViewCache( - new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false), - new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false) + new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false), + new CacheNode(ChildrenNode.EMPTY_NODE, /*fullyInitialized=*/false, /*filtered=*/false) ); /** - * @param {!fb.core.snap.Node} eventSnap + * @param {!Node} eventSnap * @param {boolean} complete * @param {boolean} filtered * @return {!ViewCache} */ - updateEventSnap(eventSnap, complete, filtered) { + updateEventSnap(eventSnap: Node, complete: boolean, filtered: boolean): ViewCache { return new ViewCache(new CacheNode(eventSnap, complete, filtered), this.serverCache_); - }; + } /** - * @param {!fb.core.snap.Node} serverSnap + * @param {!Node} serverSnap * @param {boolean} complete * @param {boolean} filtered * @return {!ViewCache} */ - updateServerSnap(serverSnap, complete, filtered) { + updateServerSnap(serverSnap: Node, complete: boolean, filtered: boolean): ViewCache { return new ViewCache(this.eventCache_, new CacheNode(serverSnap, complete, filtered)); - }; + } /** * @return {!CacheNode} */ - getEventCache() { + getEventCache(): CacheNode { return this.eventCache_; - }; + } /** - * @return {?fb.core.snap.Node} + * @return {?Node} */ - getCompleteEventSnap() { + getCompleteEventSnap(): Node | null { return (this.eventCache_.isFullyInitialized()) ? this.eventCache_.getNode() : null; - }; + } /** * @return {!CacheNode} */ - getServerCache() { + getServerCache(): CacheNode { return this.serverCache_; - }; + } /** - * @return {?fb.core.snap.Node} + * @return {?Node} */ - getCompleteServerSnap() { + getCompleteServerSnap(): Node | null { return this.serverCache_.isFullyInitialized() ? this.serverCache_.getNode() : null; - }; + } } diff --git a/src/database/core/view/ViewProcessor.ts b/src/database/core/view/ViewProcessor.ts index 62293247de6..6b1813c4535 100644 --- a/src/database/core/view/ViewProcessor.ts +++ b/src/database/core/view/ViewProcessor.ts @@ -14,119 +14,111 @@ * limitations under the License. */ -import { OperationType } from "../operation/Operation"; -import { assert, assertionError } from "../../../utils/assert"; -import { ChildChangeAccumulator } from "./ChildChangeAccumulator"; -import { Change } from "./Change"; -import { ChildrenNode } from "../snap/ChildrenNode"; -import { KEY_INDEX } from "../snap/indexes/KeyIndex"; -import { ImmutableTree } from "../util/ImmutableTree"; -import { Path } from "../util/Path"; -import { WriteTreeCompleteChildSource, NO_COMPLETE_CHILD_SOURCE } from "./CompleteChildSource"; +import { Operation, OperationType } from '../operation/Operation'; +import { assert, assertionError } from '../../../utils/assert'; +import { ChildChangeAccumulator } from './ChildChangeAccumulator'; +import { Change } from './Change'; +import { ChildrenNode } from '../snap/ChildrenNode'; +import { KEY_INDEX } from '../snap/indexes/KeyIndex'; +import { ImmutableTree } from '../util/ImmutableTree'; +import { Path } from '../util/Path'; +import { WriteTreeCompleteChildSource, NO_COMPLETE_CHILD_SOURCE, CompleteChildSource } from './CompleteChildSource'; +import { ViewCache } from './ViewCache'; +import { NodeFilter } from './filter/NodeFilter'; +import { WriteTreeRef } from '../WriteTree'; +import { Overwrite } from '../operation/Overwrite'; +import { Merge } from '../operation/Merge'; +import { AckUserWrite } from '../operation/AckUserWrite'; +import { Node } from '../snap/Node'; /** - * @param {!ViewCache} viewCache - * @param {!Array.} changes * @constructor * @struct */ export class ProcessorResult { /** - * @const - * @type {!ViewCache} - */ - viewCache; - - /** - * @const - * @type {!Array.} + * @param {!ViewCache} viewCache + * @param {!Array.} changes */ - changes; - - constructor(viewCache, changes) { - this.viewCache = viewCache; - this.changes = changes; - }; + constructor(public readonly viewCache: ViewCache, + public readonly changes: Change[]) { + } } /** - * @param {!NodeFilter} filter * @constructor */ export class ViewProcessor { /** - * @type {!NodeFilter} - * @private - * @const + * @param {!NodeFilter} filter_ */ - private filter_; - constructor(filter) { - this.filter_ = filter; - }; + constructor(private readonly filter_: NodeFilter) { + } /** * @param {!ViewCache} viewCache */ - assertIndexed(viewCache) { + assertIndexed(viewCache: ViewCache) { assert(viewCache.getEventCache().getNode().isIndexed(this.filter_.getIndex()), 'Event snap not indexed'); assert(viewCache.getServerCache().getNode().isIndexed(this.filter_.getIndex()), - 'Server snap not indexed'); - }; + 'Server snap not indexed'); + } /** * @param {!ViewCache} oldViewCache - * @param {!fb.core.Operation} operation - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} optCompleteCache + * @param {!Operation} operation + * @param {!WriteTreeRef} writesCache + * @param {?Node} completeCache * @return {!ProcessorResult} */ - applyOperation(oldViewCache, operation, writesCache, optCompleteCache) { - var accumulator = new ChildChangeAccumulator(); - var newViewCache, filterServerNode; + applyOperation(oldViewCache: ViewCache, operation: Operation, + writesCache: WriteTreeRef, completeCache: Node | null): ProcessorResult { + const accumulator = new ChildChangeAccumulator(); + let newViewCache, filterServerNode; if (operation.type === OperationType.OVERWRITE) { - var overwrite = /** @type {!fb.core.operation.Overwrite} */ (operation); + const overwrite = operation as Overwrite; if (overwrite.source.fromUser) { newViewCache = this.applyUserOverwrite_(oldViewCache, overwrite.path, overwrite.snap, - writesCache, optCompleteCache, accumulator); + writesCache, completeCache, accumulator); } else { assert(overwrite.source.fromServer, 'Unknown source.'); // We filter the node if it's a tagged update or the node has been previously filtered and the // update is not at the root in which case it is ok (and necessary) to mark the node unfiltered // again filterServerNode = overwrite.source.tagged || - (oldViewCache.getServerCache().isFiltered() && !overwrite.path.isEmpty()); + (oldViewCache.getServerCache().isFiltered() && !overwrite.path.isEmpty()); newViewCache = this.applyServerOverwrite_(oldViewCache, overwrite.path, overwrite.snap, writesCache, - optCompleteCache, filterServerNode, accumulator); + completeCache, filterServerNode, accumulator); } } else if (operation.type === OperationType.MERGE) { - var merge = /** @type {!fb.core.operation.Merge} */ (operation); + const merge = operation as Merge; if (merge.source.fromUser) { newViewCache = this.applyUserMerge_(oldViewCache, merge.path, merge.children, writesCache, - optCompleteCache, accumulator); + completeCache, accumulator); } else { assert(merge.source.fromServer, 'Unknown source.'); // We filter the node if it's a tagged update or the node has been previously filtered filterServerNode = merge.source.tagged || oldViewCache.getServerCache().isFiltered(); - newViewCache = this.applyServerMerge_(oldViewCache, merge.path, merge.children, writesCache, optCompleteCache, - filterServerNode, accumulator); + newViewCache = this.applyServerMerge_(oldViewCache, merge.path, merge.children, writesCache, completeCache, + filterServerNode, accumulator); } } else if (operation.type === OperationType.ACK_USER_WRITE) { - var ackUserWrite = /** @type {!fb.core.operation.AckUserWrite} */ (operation); + const ackUserWrite = operation as AckUserWrite; if (!ackUserWrite.revert) { newViewCache = this.ackUserWrite_(oldViewCache, ackUserWrite.path, ackUserWrite.affectedTree, writesCache, - optCompleteCache, accumulator); + completeCache, accumulator); } else { - newViewCache = this.revertUserWrite_(oldViewCache, ackUserWrite.path, writesCache, optCompleteCache, accumulator); + newViewCache = this.revertUserWrite_(oldViewCache, ackUserWrite.path, writesCache, completeCache, accumulator); } } else if (operation.type === OperationType.LISTEN_COMPLETE) { - newViewCache = this.listenComplete_(oldViewCache, operation.path, writesCache, optCompleteCache, accumulator); + newViewCache = this.listenComplete_(oldViewCache, operation.path, writesCache, accumulator); } else { throw assertionError('Unknown operation type: ' + operation.type); } - var changes = accumulator.getChanges(); - this.maybeAddValueEvent_(oldViewCache, newViewCache, changes); + const changes = accumulator.getChanges(); + ViewProcessor.maybeAddValueEvent_(oldViewCache, newViewCache, changes); return new ProcessorResult(newViewCache, changes); - }; + } /** * @param {!ViewCache} oldViewCache @@ -134,64 +126,65 @@ export class ViewProcessor { * @param {!Array.} accumulator * @private */ - maybeAddValueEvent_(oldViewCache, newViewCache, accumulator) { - var eventSnap = newViewCache.getEventCache(); + private static maybeAddValueEvent_(oldViewCache: ViewCache, newViewCache: ViewCache, accumulator: Change[]) { + const eventSnap = newViewCache.getEventCache(); if (eventSnap.isFullyInitialized()) { - var isLeafOrEmpty = eventSnap.getNode().isLeafNode() || eventSnap.getNode().isEmpty(); - var oldCompleteSnap = oldViewCache.getCompleteEventSnap(); + const isLeafOrEmpty = eventSnap.getNode().isLeafNode() || eventSnap.getNode().isEmpty(); + const oldCompleteSnap = oldViewCache.getCompleteEventSnap(); if (accumulator.length > 0 || - !oldViewCache.getEventCache().isFullyInitialized() || - (isLeafOrEmpty && !eventSnap.getNode().equals(/** @type {!fb.core.snap.Node} */ (oldCompleteSnap))) || - !eventSnap.getNode().getPriority().equals(oldCompleteSnap.getPriority())) { + !oldViewCache.getEventCache().isFullyInitialized() || + (isLeafOrEmpty && !eventSnap.getNode().equals(/** @type {!Node} */ (oldCompleteSnap))) || + !eventSnap.getNode().getPriority().equals(oldCompleteSnap.getPriority())) { accumulator.push(Change.valueChange( - /** @type {!fb.core.snap.Node} */ (newViewCache.getCompleteEventSnap()))); + /** @type {!Node} */ (newViewCache.getCompleteEventSnap()))); } } - }; + } /** * @param {!ViewCache} viewCache * @param {!Path} changePath - * @param {!fb.core.WriteTreeRef} writesCache - * @param {!fb.core.view.CompleteChildSource} source + * @param {!WriteTreeRef} writesCache + * @param {!CompleteChildSource} source * @param {!ChildChangeAccumulator} accumulator * @return {!ViewCache} * @private */ - generateEventCacheAfterServerEvent_(viewCache, changePath, writesCache, source, accumulator) { - var oldEventSnap = viewCache.getEventCache(); + private generateEventCacheAfterServerEvent_(viewCache: ViewCache, changePath: Path, + writesCache: WriteTreeRef, source: CompleteChildSource, + accumulator: ChildChangeAccumulator): ViewCache { + const oldEventSnap = viewCache.getEventCache(); if (writesCache.shadowingWrite(changePath) != null) { // we have a shadowing write, ignore changes return viewCache; } else { - var newEventCache, serverNode; + let newEventCache, serverNode; if (changePath.isEmpty()) { // TODO: figure out how this plays with "sliding ack windows" assert(viewCache.getServerCache().isFullyInitialized(), - 'If change path is empty, we must have complete server data'); + 'If change path is empty, we must have complete server data'); if (viewCache.getServerCache().isFiltered()) { // We need to special case this, because we need to only apply writes to complete children, or // we might end up raising events for incomplete children. If the server data is filtered deep // writes cannot be guaranteed to be complete - var serverCache = viewCache.getCompleteServerSnap(); - var completeChildren = (serverCache instanceof ChildrenNode) ? serverCache : - ChildrenNode.EMPTY_NODE; - var completeEventChildren = writesCache.calcCompleteEventChildren(completeChildren); + const serverCache = viewCache.getCompleteServerSnap(); + const completeChildren = (serverCache instanceof ChildrenNode) ? serverCache : + ChildrenNode.EMPTY_NODE; + const completeEventChildren = writesCache.calcCompleteEventChildren(completeChildren); newEventCache = this.filter_.updateFullNode(viewCache.getEventCache().getNode(), completeEventChildren, - accumulator); + accumulator); } else { - var completeNode = /** @type {!fb.core.snap.Node} */ - (writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap())); + const completeNode = writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap()); newEventCache = this.filter_.updateFullNode(viewCache.getEventCache().getNode(), completeNode, accumulator); } } else { - var childKey = changePath.getFront(); + const childKey = changePath.getFront(); if (childKey == '.priority') { - assert(changePath.getLength() == 1, "Can't have a priority with additional path components"); - var oldEventNode = oldEventSnap.getNode(); + assert(changePath.getLength() == 1, 'Can\'t have a priority with additional path components'); + const oldEventNode = oldEventSnap.getNode(); serverNode = viewCache.getServerCache().getNode(); // we might have overwrites for this priority - var updatedPriority = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventNode, serverNode); + const updatedPriority = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventNode, serverNode); if (updatedPriority != null) { newEventCache = this.filter_.updatePriority(oldEventNode, updatedPriority); } else { @@ -199,16 +192,16 @@ export class ViewProcessor { newEventCache = oldEventSnap.getNode(); } } else { - var childChangePath = changePath.popFront(); + const childChangePath = changePath.popFront(); // update child - var newEventChild; + let newEventChild; if (oldEventSnap.isCompleteForChild(childKey)) { serverNode = viewCache.getServerCache().getNode(); - var eventChildUpdate = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventSnap.getNode(), - serverNode); + const eventChildUpdate = writesCache.calcEventCacheAfterServerOverwrite(changePath, oldEventSnap.getNode(), + serverNode); if (eventChildUpdate != null) { newEventChild = oldEventSnap.getNode().getImmediateChild(childKey).updateChild(childChangePath, - eventChildUpdate); + eventChildUpdate); } else { // Nothing changed, just keep the old child newEventChild = oldEventSnap.getNode().getImmediateChild(childKey); @@ -218,7 +211,7 @@ export class ViewProcessor { } if (newEventChild != null) { newEventCache = this.filter_.updateChild(oldEventSnap.getNode(), childKey, newEventChild, childChangePath, - source, accumulator); + source, accumulator); } else { // no complete child available or no change newEventCache = oldEventSnap.getNode(); @@ -226,91 +219,91 @@ export class ViewProcessor { } } return viewCache.updateEventSnap(newEventCache, oldEventSnap.isFullyInitialized() || changePath.isEmpty(), - this.filter_.filtersNodes()); + this.filter_.filtersNodes()); } - }; + } /** * @param {!ViewCache} oldViewCache * @param {!Path} changePath - * @param {!fb.core.snap.Node} changedSnap - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} optCompleteCache + * @param {!Node} changedSnap + * @param {!WriteTreeRef} writesCache + * @param {?Node} completeCache * @param {boolean} filterServerNode * @param {!ChildChangeAccumulator} accumulator * @return {!ViewCache} * @private */ - applyServerOverwrite_(oldViewCache, changePath, changedSnap, - writesCache, optCompleteCache, filterServerNode, - accumulator) { - var oldServerSnap = oldViewCache.getServerCache(); - var newServerCache; - var serverFilter = filterServerNode ? this.filter_ : this.filter_.getIndexedFilter(); + applyServerOverwrite_(oldViewCache: ViewCache, changePath: Path, changedSnap: Node, + writesCache: WriteTreeRef, completeCache: Node | null, filterServerNode: boolean, + accumulator: ChildChangeAccumulator): ViewCache { + const oldServerSnap = oldViewCache.getServerCache(); + let newServerCache; + const serverFilter = filterServerNode ? this.filter_ : this.filter_.getIndexedFilter(); if (changePath.isEmpty()) { newServerCache = serverFilter.updateFullNode(oldServerSnap.getNode(), changedSnap, null); } else if (serverFilter.filtersNodes() && !oldServerSnap.isFiltered()) { // we want to filter the server node, but we didn't filter the server node yet, so simulate a full update - var newServerNode = oldServerSnap.getNode().updateChild(changePath, changedSnap); + const newServerNode = oldServerSnap.getNode().updateChild(changePath, changedSnap); newServerCache = serverFilter.updateFullNode(oldServerSnap.getNode(), newServerNode, null); } else { - var childKey = changePath.getFront(); + const childKey = changePath.getFront(); if (!oldServerSnap.isCompleteForPath(changePath) && changePath.getLength() > 1) { // We don't update incomplete nodes with updates intended for other listeners return oldViewCache; } - var childChangePath = changePath.popFront(); - var childNode = oldServerSnap.getNode().getImmediateChild(childKey); - var newChildNode = childNode.updateChild(childChangePath, changedSnap); + const childChangePath = changePath.popFront(); + const childNode = oldServerSnap.getNode().getImmediateChild(childKey); + const newChildNode = childNode.updateChild(childChangePath, changedSnap); if (childKey == '.priority') { newServerCache = serverFilter.updatePriority(oldServerSnap.getNode(), newChildNode); } else { newServerCache = serverFilter.updateChild(oldServerSnap.getNode(), childKey, newChildNode, childChangePath, - NO_COMPLETE_CHILD_SOURCE, null); + NO_COMPLETE_CHILD_SOURCE, null); } } - var newViewCache = oldViewCache.updateServerSnap(newServerCache, - oldServerSnap.isFullyInitialized() || changePath.isEmpty(), serverFilter.filtersNodes()); - var source = new WriteTreeCompleteChildSource(writesCache, newViewCache, optCompleteCache); + const newViewCache = oldViewCache.updateServerSnap(newServerCache, + oldServerSnap.isFullyInitialized() || changePath.isEmpty(), serverFilter.filtersNodes()); + const source = new WriteTreeCompleteChildSource(writesCache, newViewCache, completeCache); return this.generateEventCacheAfterServerEvent_(newViewCache, changePath, writesCache, source, accumulator); - }; + } /** * @param {!ViewCache} oldViewCache * @param {!Path} changePath - * @param {!fb.core.snap.Node} changedSnap - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} optCompleteCache + * @param {!Node} changedSnap + * @param {!WriteTreeRef} writesCache + * @param {?Node} completeCache * @param {!ChildChangeAccumulator} accumulator * @return {!ViewCache} * @private */ - applyUserOverwrite_(oldViewCache, changePath, changedSnap, writesCache, - optCompleteCache, accumulator) { - var oldEventSnap = oldViewCache.getEventCache(); - var newViewCache, newEventCache; - var source = new WriteTreeCompleteChildSource(writesCache, oldViewCache, optCompleteCache); + applyUserOverwrite_(oldViewCache: ViewCache, changePath: Path, changedSnap: Node, writesCache: WriteTreeRef, + completeCache: Node | null, accumulator: ChildChangeAccumulator): ViewCache { + const oldEventSnap = oldViewCache.getEventCache(); + let newViewCache, newEventCache; + const source = new WriteTreeCompleteChildSource(writesCache, oldViewCache, completeCache); if (changePath.isEmpty()) { newEventCache = this.filter_.updateFullNode(oldViewCache.getEventCache().getNode(), changedSnap, accumulator); newViewCache = oldViewCache.updateEventSnap(newEventCache, true, this.filter_.filtersNodes()); } else { - var childKey = changePath.getFront(); + const childKey = changePath.getFront(); if (childKey === '.priority') { newEventCache = this.filter_.updatePriority(oldViewCache.getEventCache().getNode(), changedSnap); newViewCache = oldViewCache.updateEventSnap(newEventCache, oldEventSnap.isFullyInitialized(), - oldEventSnap.isFiltered()); + oldEventSnap.isFiltered()); } else { - var childChangePath = changePath.popFront(); - var oldChild = oldEventSnap.getNode().getImmediateChild(childKey); - var newChild; + const childChangePath = changePath.popFront(); + const oldChild = oldEventSnap.getNode().getImmediateChild(childKey); + let newChild; if (childChangePath.isEmpty()) { // Child overwrite, we can replace the child newChild = changedSnap; } else { - var childNode = source.getCompleteChild(childKey); + const childNode = source.getCompleteChild(childKey); if (childNode != null) { if (childChangePath.getBack() === '.priority' && - childNode.getChild(/** @type {!Path} */ (childChangePath.parent())).isEmpty()) { + childNode.getChild(childChangePath.parent()).isEmpty()) { // This is a priority update on an empty node. If this node exists on the server, the // server will send down the priority in the update, so ignore for now newChild = childNode; @@ -323,17 +316,17 @@ export class ViewProcessor { } } if (!oldChild.equals(newChild)) { - var newEventSnap = this.filter_.updateChild(oldEventSnap.getNode(), childKey, newChild, childChangePath, - source, accumulator); + const newEventSnap = this.filter_.updateChild(oldEventSnap.getNode(), childKey, newChild, childChangePath, + source, accumulator); newViewCache = oldViewCache.updateEventSnap(newEventSnap, oldEventSnap.isFullyInitialized(), - this.filter_.filtersNodes()); + this.filter_.filtersNodes()); } else { newViewCache = oldViewCache; } } } return newViewCache; - }; + } /** * @param {!ViewCache} viewCache @@ -341,74 +334,75 @@ export class ViewProcessor { * @return {boolean} * @private */ - static cacheHasChild_(viewCache, childKey) { + private static cacheHasChild_(viewCache: ViewCache, childKey: string): boolean { return viewCache.getEventCache().isCompleteForChild(childKey); - }; + } /** * @param {!ViewCache} viewCache * @param {!Path} path - * @param {ImmutableTree.} changedChildren - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} serverCache + * @param {ImmutableTree.} changedChildren + * @param {!WriteTreeRef} writesCache + * @param {?Node} serverCache * @param {!ChildChangeAccumulator} accumulator * @return {!ViewCache} * @private */ - applyUserMerge_(viewCache, path, changedChildren, writesCache, - serverCache, accumulator) { + private applyUserMerge_(viewCache: ViewCache, path: Path, changedChildren: ImmutableTree, writesCache: WriteTreeRef, + serverCache: Node | null, accumulator: ChildChangeAccumulator): ViewCache { // HACK: In the case of a limit query, there may be some changes that bump things out of the // window leaving room for new items. It's important we process these changes first, so we // iterate the changes twice, first processing any that affect items currently in view. // TODO: I consider an item "in view" if cacheHasChild is true, which checks both the server // and event snap. I'm not sure if this will result in edge cases when a child is in one but // not the other. - var self = this; - var curViewCache = viewCache; - changedChildren.foreach(function(relativePath, childNode) { - var writePath = path.child(relativePath); + let curViewCache = viewCache; + changedChildren.foreach((relativePath, childNode) => { + const writePath = path.child(relativePath); if (ViewProcessor.cacheHasChild_(viewCache, writePath.getFront())) { - curViewCache = self.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, - serverCache, accumulator); + curViewCache = this.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, + serverCache, accumulator); } }); - changedChildren.foreach(function(relativePath, childNode) { - var writePath = path.child(relativePath); + changedChildren.foreach((relativePath, childNode) => { + const writePath = path.child(relativePath); if (!ViewProcessor.cacheHasChild_(viewCache, writePath.getFront())) { - curViewCache = self.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, - serverCache, accumulator); + curViewCache = this.applyUserOverwrite_(curViewCache, writePath, childNode, writesCache, + serverCache, accumulator); } }); return curViewCache; - }; + } /** - * @param {!fb.core.snap.Node} node - * @param {ImmutableTree.} merge - * @return {!fb.core.snap.Node} + * @param {!Node} node + * @param {ImmutableTree.} merge + * @return {!Node} * @private */ - applyMerge_(node, merge) { - merge.foreach(function(relativePath, childNode) { + private applyMerge_(node: Node, merge: ImmutableTree): Node { + merge.foreach(function (relativePath, childNode) { node = node.updateChild(relativePath, childNode); }); return node; - }; + } /** * @param {!ViewCache} viewCache * @param {!Path} path - * @param {!ImmutableTree.} changedChildren - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} serverCache + * @param {!ImmutableTree.} changedChildren + * @param {!WriteTreeRef} writesCache + * @param {?Node} serverCache * @param {boolean} filterServerNode * @param {!ChildChangeAccumulator} accumulator * @return {!ViewCache} * @private */ - applyServerMerge_(viewCache, path, changedChildren, writesCache, serverCache, filterServerNode, accumulator) { + private applyServerMerge_(viewCache: ViewCache, path: Path, changedChildren: ImmutableTree, + writesCache: WriteTreeRef, serverCache: Node | null, filterServerNode: boolean, + accumulator: ChildChangeAccumulator): ViewCache { // If we don't have a cache yet, this merge was intended for a previously listen in the same location. Ignore it and // wait for the complete data update coming soon. if (viewCache.getServerCache().getNode().isEmpty() && !viewCache.getServerCache().isFullyInitialized()) { @@ -421,134 +415,149 @@ export class ViewProcessor { // TODO: I consider an item "in view" if cacheHasChild is true, which checks both the server // and event snap. I'm not sure if this will result in edge cases when a child is in one but // not the other. - var curViewCache = viewCache; - var viewMergeTree; + let curViewCache = viewCache; + let viewMergeTree; if (path.isEmpty()) { viewMergeTree = changedChildren; } else { viewMergeTree = ImmutableTree.Empty.setTree(path, changedChildren); } - var serverNode = viewCache.getServerCache().getNode(); - var self = this; - viewMergeTree.children.inorderTraversal(function(childKey, childTree) { + const serverNode = viewCache.getServerCache().getNode(); + viewMergeTree.children.inorderTraversal((childKey, childTree) => { if (serverNode.hasChild(childKey)) { - var serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); - var newChild = self.applyMerge_(serverChild, childTree); - curViewCache = self.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, - writesCache, serverCache, filterServerNode, accumulator); + const serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); + const newChild = this.applyMerge_(serverChild, childTree); + curViewCache = this.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, + writesCache, serverCache, filterServerNode, accumulator); } }); - viewMergeTree.children.inorderTraversal(function(childKey, childMergeTree) { - var isUnknownDeepMerge = !viewCache.getServerCache().isCompleteForChild(childKey) && childMergeTree.value == null; + viewMergeTree.children.inorderTraversal((childKey, childMergeTree) => { + const isUnknownDeepMerge = !viewCache.getServerCache().isCompleteForChild(childKey) + && (childMergeTree.value == null); if (!serverNode.hasChild(childKey) && !isUnknownDeepMerge) { - var serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); - var newChild = self.applyMerge_(serverChild, childMergeTree); - curViewCache = self.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, writesCache, - serverCache, filterServerNode, accumulator); + const serverChild = viewCache.getServerCache().getNode().getImmediateChild(childKey); + const newChild = this.applyMerge_(serverChild, childMergeTree); + curViewCache = this.applyServerOverwrite_(curViewCache, new Path(childKey), newChild, writesCache, + serverCache, filterServerNode, accumulator); } }); return curViewCache; - }; + } /** * @param {!ViewCache} viewCache * @param {!Path} ackPath * @param {!ImmutableTree} affectedTree - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} optCompleteCache + * @param {!WriteTreeRef} writesCache + * @param {?Node} completeCache * @param {!ChildChangeAccumulator} accumulator * @return {!ViewCache} * @private */ - ackUserWrite_(viewCache, ackPath, affectedTree, writesCache, - optCompleteCache, accumulator) { + private ackUserWrite_(viewCache: ViewCache, ackPath: Path, affectedTree: ImmutableTree, writesCache: WriteTreeRef, + completeCache: Node | null, accumulator: ChildChangeAccumulator): ViewCache { if (writesCache.shadowingWrite(ackPath) != null) { return viewCache; } // Only filter server node if it is currently filtered - var filterServerNode = viewCache.getServerCache().isFiltered(); + const filterServerNode = viewCache.getServerCache().isFiltered(); // Essentially we'll just get our existing server cache for the affected paths and re-apply it as a server update // now that it won't be shadowed. - var serverCache = viewCache.getServerCache(); + const serverCache = viewCache.getServerCache(); if (affectedTree.value != null) { // This is an overwrite. if ((ackPath.isEmpty() && serverCache.isFullyInitialized()) || serverCache.isCompleteForPath(ackPath)) { return this.applyServerOverwrite_(viewCache, ackPath, serverCache.getNode().getChild(ackPath), - writesCache, optCompleteCache, filterServerNode, accumulator); + writesCache, completeCache, filterServerNode, accumulator); } else if (ackPath.isEmpty()) { // This is a goofy edge case where we are acking data at this location but don't have full data. We // should just re-apply whatever we have in our cache as a merge. - var changedChildren = /** @type {ImmutableTree} */ - (ImmutableTree.Empty); - serverCache.getNode().forEachChild(KEY_INDEX, function(name, node) { + let changedChildren = ImmutableTree.Empty; + serverCache.getNode().forEachChild(KEY_INDEX, function (name, node) { changedChildren = changedChildren.set(new Path(name), node); }); - return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, optCompleteCache, - filterServerNode, accumulator); + return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, completeCache, + filterServerNode, accumulator); } else { return viewCache; } } else { // This is a merge. - var changedChildren = /** @type {ImmutableTree} */ - (ImmutableTree.Empty); - affectedTree.foreach(function(mergePath, value) { - var serverCachePath = ackPath.child(mergePath); + let changedChildren = ImmutableTree.Empty; + affectedTree.foreach(function (mergePath, value) { + const serverCachePath = ackPath.child(mergePath); if (serverCache.isCompleteForPath(serverCachePath)) { changedChildren = changedChildren.set(mergePath, serverCache.getNode().getChild(serverCachePath)); } }); - return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, optCompleteCache, - filterServerNode, accumulator); + return this.applyServerMerge_(viewCache, ackPath, changedChildren, writesCache, completeCache, + filterServerNode, accumulator); } - }; + } + + /** + * @param {!ViewCache} viewCache + * @param {!Path} path + * @param {!WriteTreeRef} writesCache + * @param {!ChildChangeAccumulator} accumulator + * @return {!ViewCache} + * @private + */ + private listenComplete_(viewCache: ViewCache, path: Path, writesCache: WriteTreeRef, + accumulator: ChildChangeAccumulator): ViewCache { + const oldServerNode = viewCache.getServerCache(); + const newViewCache = viewCache.updateServerSnap(oldServerNode.getNode(), + oldServerNode.isFullyInitialized() || path.isEmpty(), oldServerNode.isFiltered()); + return this.generateEventCacheAfterServerEvent_(newViewCache, path, writesCache, + NO_COMPLETE_CHILD_SOURCE, accumulator); + } /** * @param {!ViewCache} viewCache * @param {!Path} path - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} optCompleteServerCache + * @param {!WriteTreeRef} writesCache + * @param {?Node} completeServerCache * @param {!ChildChangeAccumulator} accumulator * @return {!ViewCache} * @private */ - revertUserWrite_(viewCache, path, writesCache, optCompleteServerCache, - accumulator) { - var complete; + private revertUserWrite_(viewCache: ViewCache, path: Path, writesCache: WriteTreeRef, completeServerCache: Node | null, + accumulator: ChildChangeAccumulator): ViewCache { + let complete; if (writesCache.shadowingWrite(path) != null) { return viewCache; } else { - var source = new WriteTreeCompleteChildSource(writesCache, viewCache, optCompleteServerCache); - var oldEventCache = viewCache.getEventCache().getNode(); - var newEventCache; + const source = new WriteTreeCompleteChildSource(writesCache, viewCache, completeServerCache); + const oldEventCache = viewCache.getEventCache().getNode(); + let newEventCache; if (path.isEmpty() || path.getFront() === '.priority') { - var newNode; + let newNode; if (viewCache.getServerCache().isFullyInitialized()) { newNode = writesCache.calcCompleteEventCache(viewCache.getCompleteServerSnap()); } else { - var serverChildren = viewCache.getServerCache().getNode(); + const serverChildren = viewCache.getServerCache().getNode(); assert(serverChildren instanceof ChildrenNode, - 'serverChildren would be complete if leaf node'); - newNode = writesCache.calcCompleteEventChildren(/** @type {!ChildrenNode} */ (serverChildren)); + 'serverChildren would be complete if leaf node'); + newNode = writesCache.calcCompleteEventChildren(serverChildren as ChildrenNode); } - newNode = /** @type {!fb.core.snap.Node} newNode */ (newNode); + newNode = newNode as Node; newEventCache = this.filter_.updateFullNode(oldEventCache, newNode, accumulator); } else { - var childKey = path.getFront(); - var newChild = writesCache.calcCompleteChild(childKey, viewCache.getServerCache()); + const childKey = path.getFront(); + let newChild = writesCache.calcCompleteChild(childKey, viewCache.getServerCache()); if (newChild == null && viewCache.getServerCache().isCompleteForChild(childKey)) { newChild = oldEventCache.getImmediateChild(childKey); } if (newChild != null) { newEventCache = this.filter_.updateChild(oldEventCache, childKey, newChild, path.popFront(), source, - accumulator); + accumulator); } else if (viewCache.getEventCache().getNode().hasChild(childKey)) { // No complete child available, delete the existing one, if any newEventCache = this.filter_.updateChild(oldEventCache, childKey, ChildrenNode.EMPTY_NODE, path.popFront(), - source, accumulator); + source, accumulator); } else { newEventCache = oldEventCache; } @@ -561,27 +570,9 @@ export class ViewProcessor { } } complete = viewCache.getServerCache().isFullyInitialized() || - writesCache.shadowingWrite(Path.Empty) != null; + writesCache.shadowingWrite(Path.Empty) != null; return viewCache.updateEventSnap(newEventCache, complete, this.filter_.filtersNodes()); } - }; - - /** - * @param {!ViewCache} viewCache - * @param {!Path} path - * @param {!fb.core.WriteTreeRef} writesCache - * @param {?fb.core.snap.Node} serverCache - * @param {!ChildChangeAccumulator} accumulator - * @return {!ViewCache} - * @private - */ - listenComplete_(viewCache, path, writesCache, serverCache, - accumulator) { - var oldServerNode = viewCache.getServerCache(); - var newViewCache = viewCache.updateServerSnap(oldServerNode.getNode(), - oldServerNode.isFullyInitialized() || path.isEmpty(), oldServerNode.isFiltered()); - return this.generateEventCacheAfterServerEvent_(newViewCache, path, writesCache, - NO_COMPLETE_CHILD_SOURCE, accumulator); - }; + } } diff --git a/src/database/core/view/filter/LimitedFilter.ts b/src/database/core/view/filter/LimitedFilter.ts index 3051e856084..ce10db8a2b3 100644 --- a/src/database/core/view/filter/LimitedFilter.ts +++ b/src/database/core/view/filter/LimitedFilter.ts @@ -14,80 +14,70 @@ * limitations under the License. */ -import { RangedFilter } from "./RangedFilter"; -import { ChildrenNode } from "../../snap/ChildrenNode"; -import { Node, NamedNode } from "../../snap/Node"; -import { assert } from "../../../../utils/assert"; -import { Change } from "../Change"; +import { RangedFilter } from './RangedFilter'; +import { ChildrenNode } from '../../snap/ChildrenNode'; +import { Node, NamedNode } from '../../snap/Node'; +import { assert } from '../../../../utils/assert'; +import { Change } from '../Change'; +import { NodeFilter } from './NodeFilter'; +import { Index } from '../../snap/indexes/Index'; +import { IndexedFilter } from './IndexedFilter'; +import { QueryParams } from '../QueryParams'; +import { Path } from '../../util/Path'; +import { CompleteChildSource } from '../CompleteChildSource'; +import { ChildChangeAccumulator } from '../ChildChangeAccumulator'; + /** * Applies a limit and a range to a node and uses RangedFilter to do the heavy lifting where possible * * @constructor * @implements {NodeFilter} - * @param {!QueryParams} params */ -export class LimitedFilter { +export class LimitedFilter implements NodeFilter { /** * @const * @type {RangedFilter} * @private */ - private rangedFilter_; + private readonly rangedFilter_: RangedFilter; /** * @const * @type {!Index} * @private */ - private index_; + private readonly index_: Index; /** * @const * @type {number} * @private */ - private limit_; + private readonly limit_: number; /** * @const * @type {boolean} * @private */ - private reverse_; + private readonly reverse_: boolean; - constructor(params) { - /** - * @const - * @type {RangedFilter} - * @private - */ + /** + * @param {!QueryParams} params + */ + constructor(params: QueryParams) { this.rangedFilter_ = new RangedFilter(params); - - /** - * @const - * @type {!Index} - * @private - */ this.index_ = params.getIndex(); - - /** - * @const - * @type {number} - * @private - */ this.limit_ = params.getLimit(); - - /** - * @const - * @type {boolean} - * @private - */ this.reverse_ = !params.isViewFromLeft(); - }; + } + /** * @inheritDoc */ - updateChild(snap, key, newChild, affectedPath, source, optChangeAccumulator) { + updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null): Node { if (!this.rangedFilter_.matches(new NamedNode(key, newChild))) { newChild = ChildrenNode.EMPTY_NODE; } @@ -96,17 +86,18 @@ export class LimitedFilter { return snap; } else if (snap.numChildren() < this.limit_) { return this.rangedFilter_.getIndexedFilter().updateChild(snap, key, newChild, affectedPath, source, - optChangeAccumulator); + optChangeAccumulator); } else { return this.fullLimitUpdateChild_(snap, key, newChild, source, optChangeAccumulator); } - }; + } /** * @inheritDoc */ - updateFullNode(oldSnap, newSnap, optChangeAccumulator) { - var filtered; + updateFullNode(oldSnap: Node, newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null): Node { + let filtered; if (newSnap.isLeafNode() || newSnap.isEmpty()) { // Make sure we have a children node with the correct index, not a leaf node; filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_); @@ -115,17 +106,16 @@ export class LimitedFilter { // Easier to build up a snapshot, since what we're given has more than twice the elements we want filtered = ChildrenNode.EMPTY_NODE.withIndex(this.index_); // anchor to the startPost, endPost, or last element as appropriate - var iterator; - newSnap = /** @type {!ChildrenNode} */ (newSnap); + let iterator; if (this.reverse_) { - iterator = newSnap.getReverseIteratorFrom(this.rangedFilter_.getEndPost(), this.index_); + iterator = (newSnap as ChildrenNode).getReverseIteratorFrom(this.rangedFilter_.getEndPost(), this.index_); } else { - iterator = newSnap.getIteratorFrom(this.rangedFilter_.getStartPost(), this.index_); + iterator = (newSnap as ChildrenNode).getIteratorFrom(this.rangedFilter_.getStartPost(), this.index_); } - var count = 0; + let count = 0; while (iterator.hasNext() && count < this.limit_) { - var next = iterator.getNext(); - var inRange; + const next = iterator.getNext(); + let inRange; if (this.reverse_) { inRange = this.index_.compare(this.rangedFilter_.getStartPost(), next) <= 0; } else { @@ -143,16 +133,17 @@ export class LimitedFilter { // The snap contains less than twice the limit. Faster to delete from the snap than build up a new one filtered = newSnap.withIndex(this.index_); // Don't support priorities on queries - filtered = /** @type {!ChildrenNode} */ (filtered.updatePriority(ChildrenNode.EMPTY_NODE)); - var startPost; - var endPost; - var cmp; + filtered = filtered.updatePriority(ChildrenNode.EMPTY_NODE) as ChildrenNode; + let startPost; + let endPost; + let cmp; + let iterator; if (this.reverse_) { iterator = filtered.getReverseIterator(this.index_); startPost = this.rangedFilter_.getEndPost(); endPost = this.rangedFilter_.getStartPost(); - var indexCompare = this.index_.getCompare(); - cmp = function(a, b) { return indexCompare(b, a); }; + const indexCompare = this.index_.getCompare(); + cmp = (a: NamedNode, b: NamedNode) => indexCompare(b, a); } else { iterator = filtered.getIterator(this.index_); startPost = this.rangedFilter_.getStartPost(); @@ -160,15 +151,15 @@ export class LimitedFilter { cmp = this.index_.getCompare(); } - count = 0; - var foundStartPost = false; + let count = 0; + let foundStartPost = false; while (iterator.hasNext()) { - next = iterator.getNext(); + let next = iterator.getNext(); if (!foundStartPost && cmp(startPost, next) <= 0) { // start adding foundStartPost = true; } - inRange = foundStartPost && count < this.limit_ && cmp(next, endPost) <= 0; + let inRange = foundStartPost && count < this.limit_ && cmp(next, endPost) <= 0; if (inRange) { count++; } else { @@ -178,71 +169,72 @@ export class LimitedFilter { } } return this.rangedFilter_.getIndexedFilter().updateFullNode(oldSnap, filtered, optChangeAccumulator); - }; + } /** * @inheritDoc */ - updatePriority(oldSnap, newPriority) { + updatePriority(oldSnap: Node, newPriority: Node): Node { // Don't support priorities on queries return oldSnap; - }; + } /** * @inheritDoc */ - filtersNodes() { + filtersNodes(): boolean { return true; - }; + } /** * @inheritDoc */ - getIndexedFilter() { + getIndexedFilter(): IndexedFilter { return this.rangedFilter_.getIndexedFilter(); - }; + } /** * @inheritDoc */ - getIndex() { + getIndex(): Index { return this.index_; - }; + } /** * @param {!Node} snap * @param {string} childKey * @param {!Node} childSnap * @param {!CompleteChildSource} source - * @param {?ChildChangeAccumulator} optChangeAccumulator + * @param {?ChildChangeAccumulator} changeAccumulator * @return {!Node} * @private */ - fullLimitUpdateChild_(snap: Node, childKey: string, childSnap: Node, source, changeAccumulator?) { + private fullLimitUpdateChild_(snap: Node, childKey: string, childSnap: Node, source: CompleteChildSource, + changeAccumulator: ChildChangeAccumulator | null): Node { // TODO: rename all cache stuff etc to general snap terminology - var cmp; + let cmp; if (this.reverse_) { - var indexCmp = this.index_.getCompare(); - cmp = function(a, b) { return indexCmp(b, a); }; + const indexCmp = this.index_.getCompare(); + cmp = (a: NamedNode, b: NamedNode) => indexCmp(b, a); } else { cmp = this.index_.getCompare(); } - var oldEventCache = snap; + const oldEventCache = snap as ChildrenNode; assert(oldEventCache.numChildren() == this.limit_, ''); - var newChildNamedNode = new NamedNode(childKey, childSnap); - var windowBoundary = (this.reverse_ ? oldEventCache.getFirstChild(this.index_) : oldEventCache.getLastChild(this.index_)); - var inRange = this.rangedFilter_.matches(newChildNamedNode); + const newChildNamedNode = new NamedNode(childKey, childSnap); + const windowBoundary = this.reverse_ ? oldEventCache.getFirstChild(this.index_) : oldEventCache.getLastChild(this.index_) as NamedNode; + const inRange = this.rangedFilter_.matches(newChildNamedNode); if (oldEventCache.hasChild(childKey)) { - var oldChildSnap = oldEventCache.getImmediateChild(childKey); - var nextChild = source.getChildAfterChild(this.index_, windowBoundary, this.reverse_); + const oldChildSnap = oldEventCache.getImmediateChild(childKey); + let nextChild = source.getChildAfterChild(this.index_, windowBoundary, this.reverse_); while (nextChild != null && (nextChild.name == childKey || oldEventCache.hasChild(nextChild.name))) { // There is a weird edge case where a node is updated as part of a merge in the write tree, but hasn't // been applied to the limited filter yet. Ignore this next child which will be updated later in // the limited filter... nextChild = source.getChildAfterChild(this.index_, nextChild, this.reverse_); } - var compareNext = nextChild == null ? 1 : cmp(nextChild, newChildNamedNode); - var remainsInWindow = inRange && !childSnap.isEmpty() && compareNext >= 0; + const compareNext = nextChild == null ? 1 : cmp(nextChild, newChildNamedNode); + const remainsInWindow = inRange && !childSnap.isEmpty() && compareNext >= 0; if (remainsInWindow) { if (changeAccumulator != null) { changeAccumulator.trackChildChange(Change.childChangedChange(childKey, childSnap, oldChildSnap)); @@ -252,8 +244,8 @@ export class LimitedFilter { if (changeAccumulator != null) { changeAccumulator.trackChildChange(Change.childRemovedChange(childKey, oldChildSnap)); } - var newEventCache = oldEventCache.updateImmediateChild(childKey, ChildrenNode.EMPTY_NODE); - var nextChildInRange = nextChild != null && this.rangedFilter_.matches(nextChild); + const newEventCache = oldEventCache.updateImmediateChild(childKey, ChildrenNode.EMPTY_NODE); + const nextChildInRange = nextChild != null && this.rangedFilter_.matches(nextChild); if (nextChildInRange) { if (changeAccumulator != null) { changeAccumulator.trackChildChange(Change.childAddedChange(nextChild.name, nextChild.node)); @@ -280,5 +272,5 @@ export class LimitedFilter { } else { return snap; } - }; + } } diff --git a/src/database/core/view/filter/RangedFilter.ts b/src/database/core/view/filter/RangedFilter.ts index bb95ba838ce..037dae26053 100644 --- a/src/database/core/view/filter/RangedFilter.ts +++ b/src/database/core/view/filter/RangedFilter.ts @@ -14,18 +14,24 @@ * limitations under the License. */ -import { IndexedFilter } from "./IndexedFilter"; -import { PRIORITY_INDEX } from "../../../core/snap/indexes/PriorityIndex"; -import { Node, NamedNode } from "../../../core/snap/Node"; -import { ChildrenNode } from "../../../core/snap/ChildrenNode"; +import { IndexedFilter } from './IndexedFilter'; +import { PRIORITY_INDEX } from '../../snap/indexes/PriorityIndex'; +import { NamedNode, Node } from '../../../core/snap/Node'; +import { ChildrenNode } from '../../snap/ChildrenNode'; +import { NodeFilter } from './NodeFilter'; +import { QueryParams } from '../QueryParams'; +import { Index } from '../../snap/indexes/Index'; +import { Path } from '../../util/Path'; +import { CompleteChildSource } from '../CompleteChildSource'; +import { ChildChangeAccumulator } from '../ChildChangeAccumulator'; + /** * Filters nodes by range and uses an IndexFilter to track any changes after filtering the node * * @constructor * @implements {NodeFilter} - * @param {!fb.core.view.QueryParams} params */ -export class RangedFilter { +export class RangedFilter implements NodeFilter { /** * @type {!IndexedFilter} * @const @@ -38,135 +44,141 @@ export class RangedFilter { * @type {!Index} * @private */ - private index_; + private index_: Index; /** * @const * @type {!NamedNode} * @private */ - private startPost_; + private startPost_: NamedNode; /** * @const * @type {!NamedNode} * @private */ - private endPost_; + private endPost_: NamedNode; - constructor(params) { + /** + * @param {!QueryParams} params + */ + constructor(params: QueryParams) { this.indexedFilter_ = new IndexedFilter(params.getIndex()); this.index_ = params.getIndex(); - this.startPost_ = this.getStartPost_(params); - this.endPost_ = this.getEndPost_(params); - }; + this.startPost_ = RangedFilter.getStartPost_(params); + this.endPost_ = RangedFilter.getEndPost_(params); + } /** * @return {!NamedNode} */ - getStartPost() { + getStartPost(): NamedNode { return this.startPost_; - }; + } /** * @return {!NamedNode} */ - getEndPost() { + getEndPost(): NamedNode { return this.endPost_; - }; + } /** * @param {!NamedNode} node * @return {boolean} */ - matches(node) { + matches(node: NamedNode): boolean { return (this.index_.compare(this.getStartPost(), node) <= 0 && this.index_.compare(node, this.getEndPost()) <= 0); - }; + } /** * @inheritDoc */ - updateChild(snap, key, newChild, affectedPath, source, optChangeAccumulator) { + updateChild(snap: Node, key: string, newChild: Node, affectedPath: Path, + source: CompleteChildSource, + optChangeAccumulator: ChildChangeAccumulator | null): Node { if (!this.matches(new NamedNode(key, newChild))) { newChild = ChildrenNode.EMPTY_NODE; } return this.indexedFilter_.updateChild(snap, key, newChild, affectedPath, source, optChangeAccumulator); - }; + } /** * @inheritDoc */ - updateFullNode(oldSnap, newSnap, optChangeAccumulator) { + updateFullNode(oldSnap: Node, newSnap: Node, + optChangeAccumulator: ChildChangeAccumulator | null): Node { if (newSnap.isLeafNode()) { // Make sure we have a children node with the correct index, not a leaf node; newSnap = ChildrenNode.EMPTY_NODE; } - var filtered = newSnap.withIndex(this.index_); + let filtered = newSnap.withIndex(this.index_); // Don't support priorities on queries filtered = filtered.updatePriority(ChildrenNode.EMPTY_NODE); - var self = this; - newSnap.forEachChild(PRIORITY_INDEX, function(key, childNode) { + const self = this; + newSnap.forEachChild(PRIORITY_INDEX, function (key, childNode) { if (!self.matches(new NamedNode(key, childNode))) { filtered = filtered.updateImmediateChild(key, ChildrenNode.EMPTY_NODE); } }); return this.indexedFilter_.updateFullNode(oldSnap, filtered, optChangeAccumulator); - }; + } /** * @inheritDoc */ - updatePriority(oldSnap, newPriority) { + updatePriority(oldSnap: Node, newPriority: Node): Node { // Don't support priorities on queries return oldSnap; - }; + } /** * @inheritDoc */ - filtersNodes() { + filtersNodes(): boolean { return true; - }; + } /** * @inheritDoc */ - getIndexedFilter() { + getIndexedFilter(): IndexedFilter { return this.indexedFilter_; - }; + } /** * @inheritDoc */ - getIndex() { + getIndex(): Index { return this.index_; - }; + } /** - * @param {!fb.core.view.QueryParams} params + * @param {!QueryParams} params * @return {!NamedNode} * @private */ - getStartPost_(params) { + private static getStartPost_(params: QueryParams): NamedNode { if (params.hasStart()) { - var startName = params.getIndexStartName(); + const startName = params.getIndexStartName(); return params.getIndex().makePost(params.getIndexStartValue(), startName); } else { return params.getIndex().minPost(); } - }; + } /** - * @param {!fb.core.view.QueryParams} params + * @param {!QueryParams} params * @return {!NamedNode} * @private */ - getEndPost_(params) { + private static getEndPost_(params: QueryParams): NamedNode { if (params.hasEnd()) { - var endName = params.getIndexEndName(); + const endName = params.getIndexEndName(); return params.getIndex().makePost(params.getIndexEndValue(), endName); } else { return params.getIndex().maxPost(); } - }; + } } diff --git a/src/database/realtime/BrowserPollConnection.ts b/src/database/realtime/BrowserPollConnection.ts index a4c9801a75d..f083664dba6 100644 --- a/src/database/realtime/BrowserPollConnection.ts +++ b/src/database/realtime/BrowserPollConnection.ts @@ -23,15 +23,25 @@ import { logWrapper, LUIDGenerator, splitStringBySize -} from "../core/util/util"; -import { CountedSet } from "../core/util/CountedSet"; -import { StatsManager } from "../core/stats/StatsManager"; -import { PacketReceiver } from "./polling/PacketReceiver"; -import { CONSTANTS } from "./Constants"; -import { stringify } from "../../utils/json"; -import { isNodeSdk } from "../../utils/environment"; +} from '../core/util/util'; +import { CountedSet } from '../core/util/CountedSet'; +import { StatsManager } from '../core/stats/StatsManager'; +import { PacketReceiver } from './polling/PacketReceiver'; +import { + FORGE_DOMAIN, + FORGE_REF, + LAST_SESSION_PARAM, + LONG_POLLING, + PROTOCOL_VERSION, + REFERER_PARAM, + TRANSPORT_SESSION_PARAM, + VERSION_PARAM +} from './Constants'; +import { stringify } from '../../utils/json'; +import { isNodeSdk } from '../../utils/environment'; import { Transport } from './Transport'; import { RepoInfo } from '../core/RepoInfo'; +import { StatsCollection } from '../core/stats/StatsCollection'; // URL query parameters associated with longpolling export const FIREBASE_LONGPOLL_START_PARAM = 'start'; @@ -76,51 +86,45 @@ const LP_CONNECT_TIMEOUT = 30000; * * @constructor * @implements {Transport} - * @param {string} connId An identifier for this connection, used for logging - * @param {RepoInfo} repoInfo The info for the endpoint to send data to. - * @param {string=} opt_transportSessionId Optional transportSessionid if we are reconnecting for an existing - * transport session - * @param {string=} opt_lastSessionId Optional lastSessionId if the PersistentConnection has already created a - * connection previously */ export class BrowserPollConnection implements Transport { - repoInfo; - bytesSent; - bytesReceived; - transportSessionId; - lastSessionId; - urlFn; - scriptTagHolder; - myDisconnFrame; - curSegmentNum; - myPacketOrderer; - id; - password; - private log_; - private stats_; - private everConnected_; - private connectTimeoutTimer_; - private onDisconnect_; - private isClosed_; - - constructor(public connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string) { + bytesSent = 0; + bytesReceived = 0; + urlFn: (params: object) => string; + scriptTagHolder: FirebaseIFrameScriptHolder; + myDisconnFrame: HTMLIFrameElement; + curSegmentNum: number; + myPacketOrderer: PacketReceiver; + id: string; + password: string; + private log_: (...a: any[]) => void; + private stats_: StatsCollection; + private everConnected_ = false; + private isClosed_: boolean; + private connectTimeoutTimer_: number | null; + private onDisconnect_: ((a?: boolean) => void) | null; + + /** + * @param {string} connId An identifier for this connection, used for logging + * @param {RepoInfo} repoInfo The info for the endpoint to send data to. + * @param {string=} transportSessionId Optional transportSessionid if we are reconnecting for an existing + * transport session + * @param {string=} lastSessionId Optional lastSessionId if the PersistentConnection has already created a + * connection previously + */ + constructor(public connId: string, public repoInfo: RepoInfo, + public transportSessionId?: string, public lastSessionId?: string) { this.log_ = logWrapper(connId); - this.repoInfo = repoInfo; - this.bytesSent = 0; - this.bytesReceived = 0; this.stats_ = StatsManager.getCollection(repoInfo); - this.transportSessionId = transportSessionId; - this.everConnected_ = false; - this.lastSessionId = lastSessionId; - this.urlFn = (params) => repoInfo.connectionURL(CONSTANTS.LONG_POLLING, params); - }; + this.urlFn = (params: { [k: string]: string }) => repoInfo.connectionURL(LONG_POLLING, params); + } /** * * @param {function(Object)} onMessage Callback when messages arrive * @param {function()} onDisconnect Callback with connection lost. */ - open(onMessage: (msg: Object) => any, onDisconnect: () => any) { + open(onMessage: (msg: Object) => void, onDisconnect: (a?: boolean) => void) { this.curSegmentNum = 0; this.onDisconnect_ = onDisconnect; this.myPacketOrderer = new PacketReceiver(onMessage); @@ -131,7 +135,7 @@ export class BrowserPollConnection implements Transport { // Make sure we clear the host cache this.onClosed_(); this.connectTimeoutTimer_ = null; - }, Math.floor(LP_CONNECT_TIMEOUT)); + }, Math.floor(LP_CONNECT_TIMEOUT)) as any; // Ensure we delay the creation of the iframe until the DOM is loaded. executeWhenDOMReady(() => { @@ -179,23 +183,23 @@ export class BrowserPollConnection implements Transport { //Send the initial request to connect. The serial number is simply to keep the browser from pulling previous results //from cache. - const urlParams = {}; + const urlParams: { [k: string]: string | number } = {}; urlParams[FIREBASE_LONGPOLL_START_PARAM] = 't'; urlParams[FIREBASE_LONGPOLL_SERIAL_PARAM] = Math.floor(Math.random() * 100000000); if (this.scriptTagHolder.uniqueCallbackIdentifier) urlParams[FIREBASE_LONGPOLL_CALLBACK_ID_PARAM] = this.scriptTagHolder.uniqueCallbackIdentifier; - urlParams[CONSTANTS.VERSION_PARAM] = CONSTANTS.PROTOCOL_VERSION; + urlParams[VERSION_PARAM] = PROTOCOL_VERSION; if (this.transportSessionId) { - urlParams[CONSTANTS.TRANSPORT_SESSION_PARAM] = this.transportSessionId; + urlParams[TRANSPORT_SESSION_PARAM] = this.transportSessionId; } if (this.lastSessionId) { - urlParams[CONSTANTS.LAST_SESSION_PARAM] = this.lastSessionId; + urlParams[LAST_SESSION_PARAM] = this.lastSessionId; } if (!isNodeSdk() && typeof location !== 'undefined' && location.href && - location.href.indexOf(CONSTANTS.FORGE_DOMAIN) !== -1) { - urlParams[CONSTANTS.REFERER_PARAM] = CONSTANTS.FORGE_REF; + location.href.indexOf(FORGE_DOMAIN) !== -1) { + urlParams[REFERER_PARAM] = FORGE_REF; } const connectURL = this.urlFn(urlParams); this.log_('Connecting via long-poll to ' + connectURL); @@ -211,7 +215,7 @@ export class BrowserPollConnection implements Transport { this.addDisconnectPingFrame(this.id, this.password); }; - private static forceAllow_; + private static forceAllow_: boolean; /** * Forces long polling to be considered as a potential transport @@ -220,7 +224,7 @@ export class BrowserPollConnection implements Transport { BrowserPollConnection.forceAllow_ = true; }; - private static forceDisallow_; + private static forceDisallow_: boolean; /** * Forces longpolling to not be considered as a potential transport @@ -234,12 +238,12 @@ export class BrowserPollConnection implements Transport { // NOTE: In React-Native there's normally no 'document', but if you debug a React-Native app in // the Chrome debugger, 'document' is defined, but document.createElement is null (2015/06/08). return BrowserPollConnection.forceAllow_ || ( - !BrowserPollConnection.forceDisallow_ && - typeof document !== 'undefined' && document.createElement != null && - !isChromeExtensionContentScript() && - !isWindowsStoreApp() && - !isNodeSdk() - ); + !BrowserPollConnection.forceDisallow_ && + typeof document !== 'undefined' && document.createElement != null && + !isChromeExtensionContentScript() && + !isWindowsStoreApp() && + !isNodeSdk() + ); }; /** @@ -333,7 +337,7 @@ export class BrowserPollConnection implements Transport { addDisconnectPingFrame(id: string, pw: string) { if (isNodeSdk()) return; this.myDisconnFrame = document.createElement('iframe'); - const urlParams = {}; + const urlParams: { [k: string]: string } = {}; urlParams[FIREBASE_LONGPOLL_DISCONN_FRAME_REQUEST_PARAM] = 't'; urlParams[FIREBASE_LONGPOLL_ID_PARAM] = id; urlParams[FIREBASE_LONGPOLL_PW_PARAM] = pw; @@ -363,10 +367,6 @@ export interface IFrameElement extends HTMLIFrameElement { /********************************************************************************************* * A wrapper around an iframe that is used as a long-polling script holder. * @constructor - * @param commandCB - The callback to be called when control commands are recevied from the server. - * @param onMessageCB - The callback to be triggered when responses arrive from the server. - * @param onDisconnect - The callback to be triggered when this tag holder is closed - * @param urlFn - A function that provides the URL of the endpoint to send data to. *********************************************************************************************/ export class FirebaseIFrameScriptHolder { //We maintain a count of all of the outstanding requests, because if we have too many active at once it can cause @@ -374,10 +374,10 @@ export class FirebaseIFrameScriptHolder { /** * @type {CountedSet.} */ - outstandingRequests = new CountedSet(); + outstandingRequests = new CountedSet(); //A queue of the pending segments waiting for transmission to the server. - pendingSegs = []; + pendingSegs: { seg: number, ts: number, d: any }[] = []; //A serial number. We use this for two things: // 1) A way to ensure the browser doesn't cache responses to polls @@ -395,42 +395,51 @@ export class FirebaseIFrameScriptHolder { alive: boolean; myID: string; myPW: string; - commandCB; - onMessageCB; + commandCB: (command: string, ...args: any[]) => void; + onMessageCB: (...args: any[]) => void; - constructor(commandCB, onMessageCB, public onDisconnect, public urlFn) { + /** + * @param commandCB - The callback to be called when control commands are recevied from the server. + * @param onMessageCB - The callback to be triggered when responses arrive from the server. + * @param onDisconnect - The callback to be triggered when this tag holder is closed + * @param urlFn - A function that provides the URL of the endpoint to send data to. + */ + constructor(commandCB: (command: string, ...args: any[]) => void, + onMessageCB: (...args: any[]) => void, + public onDisconnect: () => void, + public urlFn: (a: object) => string) { if (!isNodeSdk()) { - //Each script holder registers a couple of uniquely named callbacks with the window. These are called from the - //iframes where we put the long-polling script tags. We have two callbacks: - // 1) Command Callback - Triggered for control issues, like starting a connection. - // 2) Message Callback - Triggered when new data arrives. - this.uniqueCallbackIdentifier = LUIDGenerator(); - window[FIREBASE_LONGPOLL_COMMAND_CB_NAME + this.uniqueCallbackIdentifier] = commandCB; - window[FIREBASE_LONGPOLL_DATA_CB_NAME + this.uniqueCallbackIdentifier] = onMessageCB; - - //Create an iframe for us to add script tags to. - this.myIFrame = FirebaseIFrameScriptHolder.createIFrame_(); - - // Set the iframe's contents. - let script = ''; - // if we set a javascript url, it's IE and we need to set the document domain. The javascript url is sufficient - // for ie9, but ie8 needs to do it again in the document itself. - if (this.myIFrame.src && this.myIFrame.src.substr(0, 'javascript:'.length) === 'javascript:') { - const currentDomain = document.domain; - script = ''; - } - const iframeContents = '' + script + ''; - try { - this.myIFrame.doc.open(); - this.myIFrame.doc.write(iframeContents); - this.myIFrame.doc.close(); - } catch (e) { - log('frame writing exception'); - if (e.stack) { - log(e.stack); + //Each script holder registers a couple of uniquely named callbacks with the window. These are called from the + //iframes where we put the long-polling script tags. We have two callbacks: + // 1) Command Callback - Triggered for control issues, like starting a connection. + // 2) Message Callback - Triggered when new data arrives. + this.uniqueCallbackIdentifier = LUIDGenerator(); + (window as any)[FIREBASE_LONGPOLL_COMMAND_CB_NAME + this.uniqueCallbackIdentifier] = commandCB; + (window as any)[FIREBASE_LONGPOLL_DATA_CB_NAME + this.uniqueCallbackIdentifier] = onMessageCB; + + //Create an iframe for us to add script tags to. + this.myIFrame = FirebaseIFrameScriptHolder.createIFrame_(); + + // Set the iframe's contents. + let script = ''; + // if we set a javascript url, it's IE and we need to set the document domain. The javascript url is sufficient + // for ie9, but ie8 needs to do it again in the document itself. + if (this.myIFrame.src && this.myIFrame.src.substr(0, 'javascript:'.length) === 'javascript:') { + const currentDomain = document.domain; + script = ''; + } + const iframeContents = '' + script + ''; + try { + this.myIFrame.doc.open(); + this.myIFrame.doc.write(iframeContents); + this.myIFrame.doc.close(); + } catch (e) { + log('frame writing exception'); + if (e.stack) { + log(e.stack); + } + log(e); } - log(e); - } } else { this.commandCB = commandCB; this.onMessageCB = onMessageCB; @@ -444,7 +453,7 @@ export class FirebaseIFrameScriptHolder { * @return {Element} */ private static createIFrame_(): IFrameElement { - const iframe = document.createElement('iframe'); + const iframe = document.createElement('iframe') as IFrameElement; iframe.style.display = 'none'; // This is necessary in order to initialize the document inside the iframe @@ -503,12 +512,12 @@ export class FirebaseIFrameScriptHolder { } if (isNodeSdk() && this.myID) { - var urlParams = {}; + const urlParams: { [k: string]: string } = {}; urlParams[FIREBASE_LONGPOLL_DISCONN_FRAME_PARAM] = 't'; urlParams[FIREBASE_LONGPOLL_ID_PARAM] = this.myID; urlParams[FIREBASE_LONGPOLL_PW_PARAM] = this.myPW; - var theURL = this.urlFn(urlParams); - (FirebaseIFrameScriptHolder).nodeRestRequest(theURL); + const theURL = this.urlFn(urlParams); + (FirebaseIFrameScriptHolder as any).nodeRestRequest(theURL); } // Protect from being called recursively. @@ -547,7 +556,7 @@ export class FirebaseIFrameScriptHolder { if (this.alive && this.sendNewPolls && this.outstandingRequests.count() < (this.pendingSegs.length > 0 ? 2 : 1)) { //construct our url this.currentSerial++; - const urlParams = {}; + const urlParams: { [k: string]: string | number } = {}; urlParams[FIREBASE_LONGPOLL_ID_PARAM] = this.myID; urlParams[FIREBASE_LONGPOLL_PW_PARAM] = this.myPW; urlParams[FIREBASE_LONGPOLL_SERIAL_PARAM] = this.currentSerial; @@ -585,7 +594,7 @@ export class FirebaseIFrameScriptHolder { * @param totalsegs - The total number of segments in this packet * @param data - The data for this segment. */ - enqueueSegment(segnum, totalsegs, data) { + enqueueSegment(segnum: number, totalsegs: number, data: any) { //add this to the queue of segments to send. this.pendingSegs.push({seg: segnum, ts: totalsegs, d: data}); @@ -631,9 +640,9 @@ export class FirebaseIFrameScriptHolder { * @param {!string} url - The URL for the script tag source. * @param {!function()} loadCB - A callback to be triggered once the script has loaded. */ - addTag(url: string, loadCB: () => any) { + addTag(url: string, loadCB: () => void) { if (isNodeSdk()) { - (this).doNodeLongPoll(url, loadCB); + (this as any).doNodeLongPoll(url, loadCB); } else { setTimeout(() => { try { @@ -643,10 +652,10 @@ export class FirebaseIFrameScriptHolder { newScript.type = 'text/javascript'; newScript.async = true; newScript.src = url; - newScript.onload = (newScript).onreadystatechange = function () { - const rstate = (newScript).readyState; + newScript.onload = (newScript as any).onreadystatechange = function () { + const rstate = (newScript as any).readyState; if (!rstate || rstate === 'loaded' || rstate === 'complete') { - newScript.onload = (newScript).onreadystatechange = null; + newScript.onload = (newScript as any).onreadystatechange = null; if (newScript.parentNode) { newScript.parentNode.removeChild(newScript); } diff --git a/src/database/realtime/Connection.ts b/src/database/realtime/Connection.ts index d1b515c48cc..c27de144555 100644 --- a/src/database/realtime/Connection.ts +++ b/src/database/realtime/Connection.ts @@ -22,9 +22,10 @@ import { warn, } from '../core/util/util'; import { PersistentStorage } from '../core/storage/storage'; -import { CONSTANTS } from './Constants'; +import { PROTOCOL_VERSION } from './Constants'; import { TransportManager } from './TransportManager'; import { RepoInfo } from '../core/RepoInfo'; +import { Transport, TransportConstructor } from './Transport'; // Abort upgrade attempt if it takes longer than 60s. const UPGRADE_TIMEOUT = 60000; @@ -39,10 +40,11 @@ const DELAY_BEFORE_SENDING_EXTRA_REQUESTS = 5000; const BYTES_SENT_HEALTHY_OVERRIDE = 10 * 1024; const BYTES_RECEIVED_HEALTHY_OVERRIDE = 100 * 1024; - -const REALTIME_STATE_CONNECTING = 0; -const REALTIME_STATE_CONNECTED = 1; -const REALTIME_STATE_DISCONNECTED = 2; +const enum RealtimeState { + CONNECTING, + CONNECTED, + DISCONNECTED, +} const MESSAGE_TYPE = 't'; const MESSAGE_DATA = 'd'; @@ -56,63 +58,48 @@ const PING = 'p'; const SERVER_HELLO = 'h'; + /** * Creates a new real-time connection to the server using whichever method works * best in the current browser. * * @constructor - * @param {!string} connId - an id for this connection - * @param {!RepoInfo} repoInfo - the info for the endpoint to connect to - * @param {function(Object)} onMessage - the callback to be triggered when a server-push message arrives - * @param {function(number, string)} onReady - the callback to be triggered when this connection is ready to send messages. - * @param {function()} onDisconnect - the callback to be triggered when a connection was lost - * @param {function(string)} onKill - the callback to be triggered when this connection has permanently shut down. - * @param {string=} lastSessionId - last session id in persistent connection. is used to clean up old session in real-time server - */ export class Connection { - connectionCount; - id; - lastSessionId; - pendingDataMessages; - sessionId; - - private conn_; - private healthyTimeout_; - private isHealthy_; - private log_; - private onDisconnect_; - private onKill_; - private onMessage_; - private onReady_; - private primaryResponsesRequired_; - private repoInfo_; - private rx_; - private secondaryConn_; - private secondaryResponsesRequired_; - private state_; - private transportManager_; - private tx_; - - constructor(connId: string, - repoInfo: RepoInfo, - onMessage: (a: Object) => any, - onReady: (a: number, b: string) => any, - onDisconnect: () => any, - onKill: (a: string) => any, - lastSessionId?: string) { - this.id = connId; + connectionCount = 0; + pendingDataMessages: any[] = []; + sessionId: string; + + private conn_: Transport; + private healthyTimeout_: number; + private isHealthy_: boolean; + private log_: (...args: any[]) => void; + private primaryResponsesRequired_: number; + private rx_: Transport; + private secondaryConn_: Transport; + private secondaryResponsesRequired_: number; + private state_ = RealtimeState.CONNECTING; + private transportManager_: TransportManager; + private tx_: Transport; + + /** + * @param {!string} id - an id for this connection + * @param {!RepoInfo} repoInfo_ - the info for the endpoint to connect to + * @param {function(Object)} onMessage_ - the callback to be triggered when a server-push message arrives + * @param {function(number, string)} onReady_ - the callback to be triggered when this connection is ready to send messages. + * @param {function()} onDisconnect_ - the callback to be triggered when a connection was lost + * @param {function(string)} onKill_ - the callback to be triggered when this connection has permanently shut down. + * @param {string=} lastSessionId - last session id in persistent connection. is used to clean up old session in real-time server + */ + constructor(public id: string, + private repoInfo_: RepoInfo, + private onMessage_: (a: Object) => void, + private onReady_: (a: number, b: string) => void, + private onDisconnect_: () => void, + private onKill_: (a: string) => void, + public lastSessionId?: string) { this.log_ = logWrapper('c:' + this.id + ':'); - this.onMessage_ = onMessage; - this.onReady_ = onReady; - this.onDisconnect_ = onDisconnect; - this.onKill_ = onKill; - this.repoInfo_ = repoInfo; - this.pendingDataMessages = []; - this.connectionCount = 0; - this.transportManager_ = new TransportManager(repoInfo); - this.state_ = REALTIME_STATE_CONNECTING; - this.lastSessionId = lastSessionId; + this.transportManager_ = new TransportManager(repoInfo_); this.log_('Connection created'); this.start_(); } @@ -143,7 +130,7 @@ export class Connection { * still have the context of your originating frame. */ setTimeout(() => { - // self.conn_ gets set to null in some of the tests. Check to make sure it still exists before using it + // this.conn_ gets set to null in some of the tests. Check to make sure it still exists before using it this.conn_ && this.conn_.open(onMessageReceived, onConnectionLost); }, Math.floor(0)); @@ -168,15 +155,15 @@ export class Connection { this.close(); } } - }, Math.floor(healthyTimeout_ms)); + }, Math.floor(healthyTimeout_ms)) as any; } - }; + } /** * @return {!string} * @private */ - private nextTransportId_() { + private nextTransportId_(): string { return 'c:' + this.id + ':' + this.connectionCount++; }; @@ -191,11 +178,11 @@ export class Connection { this.log_('closing an old connection'); } } - }; + } - private connReceiver_(conn) { - return message => { - if (this.state_ != REALTIME_STATE_DISCONNECTED) { + private connReceiver_(conn: Transport) { + return (message: object) => { + if (this.state_ != RealtimeState.DISCONNECTED) { if (conn === this.rx_) { this.onPrimaryMessageReceived_(message); } else if (conn === this.secondaryConn_) { @@ -205,17 +192,17 @@ export class Connection { } } }; - }; + } /** * * @param {Object} dataMsg An arbitrary data message to be sent to the server */ - sendRequest(dataMsg) { + sendRequest(dataMsg: object) { // wrap in a data message envelope and send it on const msg = {'t': 'd', 'd': dataMsg}; this.sendData_(msg); - }; + } tryCleanupConnection() { if (this.tx_ === this.secondaryConn_ && this.rx_ === this.secondaryConn_) { @@ -224,11 +211,11 @@ export class Connection { this.secondaryConn_ = null; // the server will shutdown the old connection } - }; + } - private onSecondaryControl_(controlData) { + private onSecondaryControl_(controlData: { [k: string]: any }) { if (MESSAGE_TYPE in controlData) { - const cmd = controlData[MESSAGE_TYPE]; + const cmd = controlData[MESSAGE_TYPE] as string; if (cmd === SWITCH_ACK) { this.upgradeIfSecondaryHealthy_(); } else if (cmd === CONTROL_RESET) { @@ -245,11 +232,11 @@ export class Connection { this.upgradeIfSecondaryHealthy_(); } } - }; + } - private onSecondaryMessageReceived_(parsedData) { - const layer = requireKey('t', parsedData); - const data = requireKey('d', parsedData); + private onSecondaryMessageReceived_(parsedData: object) { + const layer: string = requireKey('t', parsedData); + const data: any = requireKey('d', parsedData); if (layer == 'c') { this.onSecondaryControl_(data); } else if (layer == 'd') { @@ -258,7 +245,7 @@ export class Connection { } else { throw new Error('Unknown protocol layer: ' + layer); } - }; + } private upgradeIfSecondaryHealthy_() { if (this.secondaryResponsesRequired_ <= 0) { @@ -271,7 +258,7 @@ export class Connection { this.log_('sending ping on secondary.'); this.secondaryConn_.send({'t': 'c', 'd': {'t': PING, 'd': {}}}); } - }; + } private proceedWithUpgrade_() { // tell this connection to consider itself open @@ -287,25 +274,25 @@ export class Connection { this.tx_ = this.secondaryConn_; this.tryCleanupConnection(); - }; + } - private onPrimaryMessageReceived_(parsedData) { + private onPrimaryMessageReceived_(parsedData: { [k: string]: any }) { // Must refer to parsedData properties in quotes, so closure doesn't touch them. - const layer = requireKey('t', parsedData); - const data = requireKey('d', parsedData); + const layer: string = requireKey('t', parsedData); + const data: any = requireKey('d', parsedData); if (layer == 'c') { this.onControl_(data); } else if (layer == 'd') { this.onDataMessage_(data); } - }; + } - private onDataMessage_(message) { + private onDataMessage_(message: any) { this.onPrimaryResponse_(); // We don't do anything with data messages, just kick them up a level this.onMessage_(message); - }; + } private onPrimaryResponse_() { if (!this.isHealthy_) { @@ -318,8 +305,8 @@ export class Connection { } }; - private onControl_(controlData) { - const cmd = requireKey(MESSAGE_TYPE, controlData); + private onControl_(controlData: { [k: string]: any }) { + const cmd: string = requireKey(MESSAGE_TYPE, controlData); if (MESSAGE_DATA in controlData) { const payload = controlData[MESSAGE_DATA]; if (cmd === SERVER_HELLO) { @@ -349,39 +336,39 @@ export class Connection { error('Unknown control packet command: ' + cmd); } } - }; + } /** * * @param {Object} handshake The handshake data returned from the server * @private */ - private onHandshake_(handshake) { - const timestamp = handshake['ts']; - const version = handshake['v']; - const host = handshake['h']; - this.sessionId = handshake['s']; + private onHandshake_(handshake: { ts: number, v: string, h: string, s: string }) { + const timestamp = handshake.ts; + const version = handshake.v; + const host = handshake.h; + this.sessionId = handshake.s; this.repoInfo_.updateHost(host); // if we've already closed the connection, then don't bother trying to progress further - if (this.state_ == REALTIME_STATE_CONNECTING) { + if (this.state_ == RealtimeState.CONNECTING) { this.conn_.start(); this.onConnectionEstablished_(this.conn_, timestamp); - if (CONSTANTS.PROTOCOL_VERSION !== version) { + if (PROTOCOL_VERSION !== version) { warn('Protocol version mismatch detected'); } // TODO: do we want to upgrade? when? maybe a delay? this.tryStartUpgrade_(); } - }; + } private tryStartUpgrade_() { const conn = this.transportManager_.upgradeTransport(); if (conn) { this.startUpgrade_(conn); } - }; + } - private startUpgrade_(conn) { + private startUpgrade_(conn: TransportConstructor) { this.secondaryConn_ = new conn(this.nextTransportId_(), this.repoInfo_, this.sessionId); // For certain transports (WebSockets), we need to send and receive several messages back and forth before we @@ -393,59 +380,57 @@ export class Connection { this.secondaryConn_.open(onMessage, onDisconnect); // If we haven't successfully upgraded after UPGRADE_TIMEOUT, give up and kill the secondary. - const self = this; - setTimeoutNonBlocking(function () { - if (self.secondaryConn_) { - self.log_('Timed out trying to upgrade.'); - self.secondaryConn_.close(); + setTimeoutNonBlocking(() => { + if (this.secondaryConn_) { + this.log_('Timed out trying to upgrade.'); + this.secondaryConn_.close(); } }, Math.floor(UPGRADE_TIMEOUT)); - }; + } - private onReset_(host) { + private onReset_(host: string) { this.log_('Reset packet received. New host: ' + host); this.repoInfo_.updateHost(host); // TODO: if we're already "connected", we need to trigger a disconnect at the next layer up. // We don't currently support resets after the connection has already been established - if (this.state_ === REALTIME_STATE_CONNECTED) { + if (this.state_ === RealtimeState.CONNECTED) { this.close(); } else { // Close whatever connections we have open and start again. this.closeConnections_(); this.start_(); } - }; + } - private onConnectionEstablished_(conn, timestamp) { + private onConnectionEstablished_(conn: Transport, timestamp: number) { this.log_('Realtime connection established.'); this.conn_ = conn; - this.state_ = REALTIME_STATE_CONNECTED; + this.state_ = RealtimeState.CONNECTED; if (this.onReady_) { this.onReady_(timestamp, this.sessionId); this.onReady_ = null; } - const self = this; // If after 5 seconds we haven't sent enough requests to the server to get the connection healthy, // send some pings. if (this.primaryResponsesRequired_ === 0) { this.log_('Primary connection is healthy.'); this.isHealthy_ = true; } else { - setTimeoutNonBlocking(function () { - self.sendPingOnPrimaryIfNecessary_(); + setTimeoutNonBlocking(() => { + this.sendPingOnPrimaryIfNecessary_(); }, Math.floor(DELAY_BEFORE_SENDING_EXTRA_REQUESTS)); } - }; + } private sendPingOnPrimaryIfNecessary_() { // If the connection isn't considered healthy yet, we'll send a noop ping packet request. - if (!this.isHealthy_ && this.state_ === REALTIME_STATE_CONNECTED) { + if (!this.isHealthy_ && this.state_ === RealtimeState.CONNECTED) { this.log_('sending ping on primary.'); this.sendData_({'t': 'c', 'd': {'t': PING, 'd': {}}}); } - }; + } private onSecondaryConnectionLost_() { const conn = this.secondaryConn_; @@ -454,7 +439,7 @@ export class Connection { // we are relying on this connection already in some capacity. Therefore, a failure is real this.close(); } - }; + } /** * @@ -462,12 +447,12 @@ export class Connection { * we should flush the host cache * @private */ - private onConnectionLost_(everConnected) { + private onConnectionLost_(everConnected: boolean) { this.conn_ = null; // NOTE: IF you're seeing a Firefox error for this line, I think it might be because it's getting - // called on window close and REALTIME_STATE_CONNECTING is no longer defined. Just a guess. - if (!everConnected && this.state_ === REALTIME_STATE_CONNECTING) { + // called on window close and RealtimeState.CONNECTING is no longer defined. Just a guess. + if (!everConnected && this.state_ === RealtimeState.CONNECTING) { this.log_('Realtime connection failed.'); // Since we failed to connect at all, clear any cached entry for this namespace in case the machine went away if (this.repoInfo_.isCacheableHost()) { @@ -475,19 +460,19 @@ export class Connection { // reset the internal host to what we would show the user, i.e. .firebaseio.com this.repoInfo_.internalHost = this.repoInfo_.host; } - } else if (this.state_ === REALTIME_STATE_CONNECTED) { + } else if (this.state_ === RealtimeState.CONNECTED) { this.log_('Realtime connection lost.'); } this.close(); - }; + } /** * * @param {string} reason * @private */ - private onConnectionShutdown_(reason) { + private onConnectionShutdown_(reason: string) { this.log_('Connection shutdown command received. Shutting down...'); if (this.onKill_) { @@ -500,24 +485,24 @@ export class Connection { this.onDisconnect_ = null; this.close(); - }; + } - private sendData_(data) { - if (this.state_ !== REALTIME_STATE_CONNECTED) { + private sendData_(data: object) { + if (this.state_ !== RealtimeState.CONNECTED) { throw 'Connection is not connected'; } else { this.tx_.send(data); } - }; + } /** * Cleans up this connection, calling the appropriate callbacks */ close() { - if (this.state_ !== REALTIME_STATE_DISCONNECTED) { + if (this.state_ !== RealtimeState.DISCONNECTED) { this.log_('Closing realtime connection.'); - this.state_ = REALTIME_STATE_DISCONNECTED; + this.state_ = RealtimeState.DISCONNECTED; this.closeConnections_(); @@ -526,7 +511,7 @@ export class Connection { this.onDisconnect_ = null; } } - }; + } /** * @@ -548,7 +533,7 @@ export class Connection { clearTimeout(this.healthyTimeout_); this.healthyTimeout_ = null; } - }; + } } diff --git a/src/database/realtime/Constants.ts b/src/database/realtime/Constants.ts index acd65dd75aa..c9e4d59c3c7 100644 --- a/src/database/realtime/Constants.ts +++ b/src/database/realtime/Constants.ts @@ -14,23 +14,20 @@ * limitations under the License. */ -export const CONSTANTS = { +export const PROTOCOL_VERSION = '5'; - /** @const */ PROTOCOL_VERSION: '5', +export const VERSION_PARAM = 'v'; - /** @const */ VERSION_PARAM: 'v', +export const TRANSPORT_SESSION_PARAM = 's'; - /** @const */ TRANSPORT_SESSION_PARAM: 's', +export const REFERER_PARAM = 'r'; - /** @const */ REFERER_PARAM: 'r', +export const FORGE_REF = 'f'; - /** @const */ FORGE_REF: 'f', +export const FORGE_DOMAIN = 'firebaseio.com'; - /** @const */ FORGE_DOMAIN: 'firebaseio.com', +export const LAST_SESSION_PARAM = 'ls'; - /** @const */ LAST_SESSION_PARAM: 'ls', +export const WEBSOCKET = 'websocket'; - /** @const */ WEBSOCKET: 'websocket', - - /** @const */ LONG_POLLING: 'long_polling' -}; +export const LONG_POLLING = 'long_polling'; diff --git a/src/database/realtime/Transport.ts b/src/database/realtime/Transport.ts index a20ce5a8015..a69159b1880 100644 --- a/src/database/realtime/Transport.ts +++ b/src/database/realtime/Transport.ts @@ -16,6 +16,13 @@ import { RepoInfo } from '../core/RepoInfo'; +export interface TransportConstructor { + new(connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string): Transport; + isAvailable: () => boolean; + responsesRequiredToBeHealthy?: number; + healthyTimeout?: number; +} + export abstract class Transport { /** * Bytes received since connection started. @@ -29,6 +36,12 @@ export abstract class Transport { */ abstract bytesSent: number; + /** + * An identifier for this connection, used for logging + * @type {string} + */ + abstract connId: string; + /** * * @param {string} connId An identifier for this connection, used for logging @@ -43,14 +56,22 @@ export abstract class Transport { * @param {function(Object)} onMessage Callback when messages arrive * @param {function()} onDisconnect Callback with connection lost. */ - abstract open(onMessage: (a: Object) => any, onDisconnect: () => any); + abstract open(onMessage: (a: Object) => void, onDisconnect: (a?: boolean) => void): void; - abstract start(); + abstract start(): void; - abstract close(); + abstract close(): void; /** * @param {!Object} data The JSON data to transmit */ - abstract send(data: Object); -} \ No newline at end of file + abstract send(data: Object): void; + + abstract markConnectionHealthy(): void; + + abstract markConnectionHealthy(): void; +} + +export interface TransportConstructor { + new(connId: string, RepoInfo, transportSessionId?: string, lastSessionId?: string); +} diff --git a/src/database/realtime/TransportManager.ts b/src/database/realtime/TransportManager.ts index 33cc3f4264d..bf80ba1af5e 100644 --- a/src/database/realtime/TransportManager.ts +++ b/src/database/realtime/TransportManager.ts @@ -17,6 +17,8 @@ import { BrowserPollConnection } from "./BrowserPollConnection"; import { WebSocketConnection } from "./WebSocketConnection"; import { warn, each } from "../core/util/util"; +import { TransportConstructor } from './Transport'; +import { RepoInfo } from '../core/RepoInfo'; /** * Currently simplistic, this class manages what transport a Connection should use at various stages of its @@ -25,10 +27,10 @@ import { warn, each } from "../core/util/util"; * It starts with longpolling in a browser, and httppolling on node. It then upgrades to websockets if * they are available. * @constructor - * @param {!RepoInfo} repoInfo Metadata around the namespace we're connecting to */ export class TransportManager { - transports_: Array; + private transports_: TransportConstructor[]; + /** * @const * @type {!Array.} @@ -39,16 +41,20 @@ export class TransportManager { WebSocketConnection ]; } - constructor(repoInfo) { + + /** + * @param {!RepoInfo} repoInfo Metadata around the namespace we're connecting to + */ + constructor(repoInfo: RepoInfo) { this.initTransports_(repoInfo); - }; + } /** * @param {!RepoInfo} repoInfo * @private */ - initTransports_(repoInfo) { - const isWebSocketsAvailable = WebSocketConnection && WebSocketConnection['isAvailable'](); + private initTransports_(repoInfo: RepoInfo) { + const isWebSocketsAvailable: boolean = WebSocketConnection && WebSocketConnection['isAvailable'](); let isSkipPollConnection = isWebSocketsAvailable && !WebSocketConnection.previouslyFailed(); if (repoInfo.webSocketOnly) { @@ -61,8 +67,8 @@ export class TransportManager { if (isSkipPollConnection) { this.transports_ = [WebSocketConnection]; } else { - const transports = this.transports_ = []; - each(TransportManager.ALL_TRANSPORTS, function(i, transport) { + const transports = this.transports_ = [] as TransportConstructor[]; + each(TransportManager.ALL_TRANSPORTS, (i: number, transport: TransportConstructor) => { if (transport && transport['isAvailable']()) { transports.push(transport); } @@ -74,7 +80,7 @@ export class TransportManager { * @return {function(new:Transport, !string, !RepoInfo, string=, string=)} The constructor for the * initial transport to use */ - initialTransport() { + initialTransport(): TransportConstructor { if (this.transports_.length > 0) { return this.transports_[0]; } else { @@ -86,7 +92,7 @@ export class TransportManager { * @return {?function(new:Transport, function(),function(), string=)} The constructor for the next * transport, or null */ - upgradeTransport() { + upgradeTransport(): TransportConstructor | null { if (this.transports_.length > 1) { return this.transports_[1]; } else { @@ -94,4 +100,3 @@ export class TransportManager { } } } - diff --git a/src/database/realtime/WebSocketConnection.ts b/src/database/realtime/WebSocketConnection.ts index 393f0083b73..8f15c142931 100644 --- a/src/database/realtime/WebSocketConnection.ts +++ b/src/database/realtime/WebSocketConnection.ts @@ -15,18 +15,29 @@ */ import { RepoInfo } from '../core/RepoInfo'; -declare const MozWebSocket; -import firebase from "../../app"; +declare const MozWebSocket: any; + +import firebase from '../../app'; import { assert } from '../../utils/assert'; import { logWrapper, splitStringBySize } from '../core/util/util'; import { StatsManager } from '../core/stats/StatsManager'; -import { CONSTANTS } from './Constants'; -import { CONSTANTS as ENV_CONSTANTS } from "../../utils/constants"; +import { + FORGE_DOMAIN, + FORGE_REF, + LAST_SESSION_PARAM, + PROTOCOL_VERSION, + REFERER_PARAM, + TRANSPORT_SESSION_PARAM, + VERSION_PARAM, + WEBSOCKET +} from './Constants'; +import { CONSTANTS as ENV_CONSTANTS } from '../../utils/constants'; import { PersistentStorage } from '../core/storage/storage'; import { jsonEval, stringify } from '../../utils/json'; -import { isNodeSdk } from "../../utils/environment"; +import { isNodeSdk } from '../../utils/environment'; import { Transport } from './Transport'; +import { StatsCollection } from '../core/stats/StatsCollection'; const WEBSOCKET_MAX_FRAME_SIZE = 16384; const WEBSOCKET_KEEPALIVE_INTERVAL = 45000; @@ -46,34 +57,32 @@ export function setWebSocketImpl(impl) { * Create a new websocket connection with the given callbacks. * @constructor * @implements {Transport} - * @param {string} connId identifier for this transport - * @param {RepoInfo} repoInfo The info for the websocket endpoint. - * @param {string=} opt_transportSessionId Optional transportSessionId if this is connecting to an existing transport - * session - * @param {string=} opt_lastSessionId Optional lastSessionId if there was a previous connection */ export class WebSocketConnection implements Transport { - keepaliveTimer; - frames; - totalFrames: number; - bytesSent: number; - bytesReceived: number; - connURL; - onDisconnect; - onMessage; - mySock; - private log_; - private stats_; + keepaliveTimer: number | null = null; + frames: string[] | null = null; + totalFrames = 0; + bytesSent = 0; + bytesReceived = 0; + connURL: string; + onDisconnect: (a?: boolean) => void; + onMessage: (msg: Object) => void; + mySock: any | null; + private log_: (...a: any[]) => void; + private stats_: StatsCollection; private everConnected_: boolean; private isClosed_: boolean; - constructor(public connId: string, repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string) { + /** + * @param {string} connId identifier for this transport + * @param {RepoInfo} repoInfo The info for the websocket endpoint. + * @param {string=} transportSessionId Optional transportSessionId if this is connecting to an existing transport + * session + * @param {string=} lastSessionId Optional lastSessionId if there was a previous connection + */ + constructor(public connId: string, repoInfo: RepoInfo, + transportSessionId?: string, lastSessionId?: string) { this.log_ = logWrapper(this.connId); - this.keepaliveTimer = null; - this.frames = null; - this.totalFrames = 0; - this.bytesSent = 0; - this.bytesReceived = 0; this.stats_ = StatsManager.getCollection(repoInfo); this.connURL = WebSocketConnection.connectionURL_(repoInfo, transportSessionId, lastSessionId); } @@ -87,22 +96,22 @@ export class WebSocketConnection implements Transport { * @private */ private static connectionURL_(repoInfo: RepoInfo, transportSessionId?: string, lastSessionId?: string): string { - const urlParams = {}; - urlParams[CONSTANTS.VERSION_PARAM] = CONSTANTS.PROTOCOL_VERSION; + const urlParams: { [k: string]: string } = {}; + urlParams[VERSION_PARAM] = PROTOCOL_VERSION; if (!isNodeSdk() && typeof location !== 'undefined' && location.href && - location.href.indexOf(CONSTANTS.FORGE_DOMAIN) !== -1) { - urlParams[CONSTANTS.REFERER_PARAM] = CONSTANTS.FORGE_REF; + location.href.indexOf(FORGE_DOMAIN) !== -1) { + urlParams[REFERER_PARAM] = FORGE_REF; } if (transportSessionId) { - urlParams[CONSTANTS.TRANSPORT_SESSION_PARAM] = transportSessionId; + urlParams[TRANSPORT_SESSION_PARAM] = transportSessionId; } if (lastSessionId) { - urlParams[CONSTANTS.LAST_SESSION_PARAM] = lastSessionId; + urlParams[LAST_SESSION_PARAM] = lastSessionId; } - return repoInfo.connectionURL(CONSTANTS.WEBSOCKET, urlParams); + return repoInfo.connectionURL(WEBSOCKET, urlParams); } /** @@ -110,7 +119,7 @@ export class WebSocketConnection implements Transport { * @param onMessage Callback when messages arrive * @param onDisconnect Callback with connection lost. */ - open(onMessage: (msg: Object) => any, onDisconnect: () => any) { + open(onMessage: (msg: Object) => void, onDisconnect: (a?: boolean) => void) { this.onDisconnect = onDisconnect; this.onMessage = onMessage; @@ -124,19 +133,19 @@ export class WebSocketConnection implements Transport { if (isNodeSdk()) { const device = ENV_CONSTANTS.NODE_ADMIN ? 'AdminNode' : 'Node'; // UA Format: Firebase//// - const options = { + const options: { [k: string]: object } = { 'headers': { - 'User-Agent': `Firebase/${CONSTANTS.PROTOCOL_VERSION}/${firebase.SDK_VERSION}/${process.platform}/${device}` + 'User-Agent': `Firebase/${PROTOCOL_VERSION}/${firebase.SDK_VERSION}/${process.platform}/${device}` }}; // Plumb appropriate http_proxy environment variable into faye-websocket if it exists. const env = process['env']; - const proxy = (this.connURL.indexOf("wss://") == 0) - ? (env['HTTPS_PROXY'] || env['https_proxy']) - : (env['HTTP_PROXY'] || env['http_proxy']); + const proxy = (this.connURL.indexOf('wss://') == 0) + ? (env['HTTPS_PROXY'] || env['https_proxy']) + : (env['HTTP_PROXY'] || env['http_proxy']); if (proxy) { - options['proxy'] = { origin: proxy }; + options['proxy'] = {origin: proxy}; } this.mySock = new WebSocketImpl(this.connURL, [], options); @@ -164,11 +173,11 @@ export class WebSocketConnection implements Transport { this.onClosed_(); }; - this.mySock.onmessage = (m) => { + this.mySock.onmessage = (m: object) => { this.handleIncomingFrame(m); }; - this.mySock.onerror = (e) => { + this.mySock.onerror = (e: any) => { this.log_('WebSocket error. Closing connection.'); const error = e.message || e.data; if (error) { @@ -207,16 +216,12 @@ export class WebSocketConnection implements Transport { /** * Number of response before we consider the connection "healthy." * @type {number} - * - * NOTE: 'responsesRequiredToBeHealthy' shouldn't need to be quoted, but closure removed it for some reason otherwise! */ static responsesRequiredToBeHealthy = 2; /** * Time to wait for the connection te become healthy before giving up. * @type {number} - * - * NOTE: 'healthyTimeout' shouldn't need to be quoted, but closure removed it for some reason otherwise! */ static healthyTimeout = 30000; @@ -229,13 +234,13 @@ export class WebSocketConnection implements Transport { // we default to assuming that it previously failed to be safe. return PersistentStorage.isInMemoryStorage || PersistentStorage.get('previous_websocket_failure') === true; - }; + } markConnectionHealthy() { PersistentStorage.remove('previous_websocket_failure'); - }; + } - private appendFrame_(data) { + private appendFrame_(data: string) { this.frames.push(data); if (this.frames.length == this.totalFrames) { const fullMess = this.frames.join(''); @@ -275,16 +280,16 @@ export class WebSocketConnection implements Transport { } this.handleNewFrameCount_(1); return data; - }; + } /** * Process a websocket frame that has arrived from the server. * @param mess The frame data */ - handleIncomingFrame(mess) { + handleIncomingFrame(mess: { [k: string]: any }) { if (this.mySock === null) return; // Chrome apparently delivers incoming packets even after we .close() the connection sometimes. - const data = mess['data']; + const data = mess['data'] as string; this.bytesReceived += data.length; this.stats_.incrementCounter('bytes_received', data.length); @@ -300,7 +305,7 @@ export class WebSocketConnection implements Transport { this.appendFrame_(remainingData); } } - }; + } /** * Send a message to the server @@ -328,7 +333,7 @@ export class WebSocketConnection implements Transport { for (let i = 0; i < dataSegs.length; i++) { this.sendString_(dataSegs[i]); } - }; + } private shutdown_() { this.isClosed_ = true; @@ -341,7 +346,7 @@ export class WebSocketConnection implements Transport { this.mySock.close(); this.mySock = null; } - }; + } private onClosed_() { if (!this.isClosed_) { @@ -354,7 +359,7 @@ export class WebSocketConnection implements Transport { this.onDisconnect = null; } } - }; + } /** * External-facing close handler. @@ -365,7 +370,7 @@ export class WebSocketConnection implements Transport { this.log_('WebSocket is being closed'); this.shutdown_(); } - }; + } /** * Kill the current keepalive timer and start a new one, to ensure that it always fires N seconds after @@ -379,8 +384,8 @@ export class WebSocketConnection implements Transport { this.sendString_('0'); } this.resetKeepAlive(); - }, Math.floor(WEBSOCKET_KEEPALIVE_INTERVAL)); - }; + }, Math.floor(WEBSOCKET_KEEPALIVE_INTERVAL)) as any; + } /** * Send a string over the websocket. @@ -398,7 +403,7 @@ export class WebSocketConnection implements Transport { this.log_('Exception thrown from WebSocket.send():', e.message || e.data, 'Closing connection.'); setTimeout(this.onClosed_.bind(this), 0); } - }; + } } diff --git a/src/database/realtime/polling/PacketReceiver.ts b/src/database/realtime/polling/PacketReceiver.ts index 988d16f84cf..4b66858face 100644 --- a/src/database/realtime/polling/PacketReceiver.ts +++ b/src/database/realtime/polling/PacketReceiver.ts @@ -19,26 +19,28 @@ import { exceptionGuard } from '../../core/util/util'; /** * This class ensures the packets from the server arrive in order * This class takes data from the server and ensures it gets passed into the callbacks in order. - * @param onMessage * @constructor */ export class PacketReceiver { - pendingResponses = []; + pendingResponses: any[] = []; currentResponseNum = 0; closeAfterResponse = -1; - onClose = null; + onClose: (() => void) | null = null; - constructor(private onMessage_: any) { + /** + * @param onMessage_ + */ + constructor(private onMessage_: (a: Object) => void) { } - closeAfter(responseNum, callback) { + closeAfter(responseNum: number, callback: () => void) { this.closeAfterResponse = responseNum; this.onClose = callback; if (this.closeAfterResponse < this.currentResponseNum) { this.onClose(); this.onClose = null; } - }; + } /** * Each message from the server comes with a response number, and an array of data. The responseNumber @@ -47,7 +49,7 @@ export class PacketReceiver { * @param {number} requestNum * @param {Array} data */ - handleResponse(requestNum, data) { + handleResponse(requestNum: number, data: any[]) { this.pendingResponses[requestNum] = data; while (this.pendingResponses[this.currentResponseNum]) { const toProcess = this.pendingResponses[this.currentResponseNum]; @@ -61,7 +63,6 @@ export class PacketReceiver { } if (this.currentResponseNum === this.closeAfterResponse) { if (this.onClose) { - clearTimeout(this.onClose); this.onClose(); this.onClose = null; } diff --git a/src/utils/nodePatches.ts b/src/utils/nodePatches.ts index 12304ffe9f7..9ae92982625 100644 --- a/src/utils/nodePatches.ts +++ b/src/utils/nodePatches.ts @@ -17,7 +17,7 @@ import { CONSTANTS } from "./constants"; import { setWebSocketImpl } from "../database/realtime/WebSocketConnection"; import { setBufferImpl } from "../database/core/util/util"; -import { +import { FirebaseIFrameScriptHolder, FIREBASE_LONGPOLL_COMMAND_CB_NAME, FIREBASE_LONGPOLL_DATA_CB_NAME diff --git a/tests/database/browser/crawler_support.test.ts b/tests/database/browser/crawler_support.test.ts index 87b540dea08..68bcc9fce30 100644 --- a/tests/database/browser/crawler_support.test.ts +++ b/tests/database/browser/crawler_support.test.ts @@ -25,10 +25,10 @@ import { // Some sanity checks for the ReadonlyRestClient crawler support. describe('Crawler Support', function() { - var initialData; - var normalRef; - var restRef; - var tokenProvider; + let initialData; + let normalRef; + let restRef; + let tokenProvider; beforeEach(function(done) { normalRef = getRandomNode(); diff --git a/tests/database/compound_write.test.ts b/tests/database/compound_write.test.ts index a1e5f122009..8488c4ae8cc 100644 --- a/tests/database/compound_write.test.ts +++ b/tests/database/compound_write.test.ts @@ -23,10 +23,10 @@ import { nodeFromJSON } from "../../src/database/core/snap/nodeFromJSON"; import { Path } from "../../src/database/core/util/Path"; describe('CompoundWrite Tests', function() { - var LEAF_NODE = nodeFromJSON('leaf-node'); - var PRIO_NODE = nodeFromJSON('prio'); - var CHILDREN_NODE = nodeFromJSON({ 'child-1': 'value-1', 'child-2': 'value-2' }); - var EMPTY_NODE = ChildrenNode.EMPTY_NODE; + const LEAF_NODE = nodeFromJSON('leaf-node'); + const PRIO_NODE = nodeFromJSON('prio'); + const CHILDREN_NODE = nodeFromJSON({ 'child-1': 'value-1', 'child-2': 'value-2' }); + const EMPTY_NODE = ChildrenNode.EMPTY_NODE; function assertNodeGetsCorrectPriority(compoundWrite, node, priority) { if (node.isEmpty()) { @@ -61,133 +61,133 @@ describe('CompoundWrite Tests', function() { }); it('CompoundWrite with root priority update, child write is not empty.', function() { - var compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); expect(compoundWrite.childCompoundWrite(new Path('.priority')).isEmpty()).to.be.false; }); it('Applies leaf overwrite', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(Path.Empty, LEAF_NODE); expect(compoundWrite.apply(EMPTY_NODE)).to.equal(LEAF_NODE); }); it('Applies children overwrite', function() { - var compoundWrite = CompoundWrite.Empty; - var childNode = EMPTY_NODE.updateImmediateChild('child', LEAF_NODE); + let compoundWrite = CompoundWrite.Empty; + const childNode = EMPTY_NODE.updateImmediateChild('child', LEAF_NODE); compoundWrite = compoundWrite.addWrite(Path.Empty, childNode); expect(compoundWrite.apply(EMPTY_NODE)).to.equal(childNode); }); it('Adds child node', function() { - var compoundWrite = CompoundWrite.Empty; - var expected = EMPTY_NODE.updateImmediateChild('child', LEAF_NODE); + let compoundWrite = CompoundWrite.Empty; + const expected = EMPTY_NODE.updateImmediateChild('child', LEAF_NODE); compoundWrite = compoundWrite.addWrite(new Path('child'), LEAF_NODE); assertNodesEqual(expected, compoundWrite.apply(EMPTY_NODE)); }); it('Adds deep child node', function() { - var compoundWrite = CompoundWrite.Empty; - var path = new Path('deep/deep/node'); - var expected = EMPTY_NODE.updateChild(path, LEAF_NODE); + let compoundWrite = CompoundWrite.Empty; + const path = new Path('deep/deep/node'); + const expected = EMPTY_NODE.updateChild(path, LEAF_NODE); compoundWrite = compoundWrite.addWrite(path, LEAF_NODE); expect(compoundWrite.apply(EMPTY_NODE)).to.deep.equal(expected); }); it('shallow update removes deep update', function() { - var compoundWrite = CompoundWrite.Empty; - var updateOne = nodeFromJSON('new-foo-value'); - var updateTwo = nodeFromJSON('baz-value'); - var updateThree = nodeFromJSON({'foo': 'foo-value', 'bar': 'bar-value' }); + let compoundWrite = CompoundWrite.Empty; + const updateOne = nodeFromJSON('new-foo-value'); + const updateTwo = nodeFromJSON('baz-value'); + const updateThree = nodeFromJSON({'foo': 'foo-value', 'bar': 'bar-value' }); compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateThree); - var expectedChildOne = { + const expectedChildOne = { 'foo': 'foo-value', 'bar': 'bar-value' }; - var expected = CHILDREN_NODE.updateImmediateChild('child-1', + const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it('child priority updates empty priority on child write', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1/.priority'), EMPTY_NODE); - var node = new LeafNode('foo', PRIO_NODE); + const node = new LeafNode('foo', PRIO_NODE); assertNodeGetsCorrectPriority(compoundWrite.childCompoundWrite(new Path('child-1')), node, EMPTY_NODE); }); it('deep priority set works on empty node when other set is available', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('foo/.priority'), PRIO_NODE); compoundWrite = compoundWrite.addWrite(new Path('foo/child'), LEAF_NODE); - var node = compoundWrite.apply(EMPTY_NODE); + const node = compoundWrite.apply(EMPTY_NODE); assertNodesEqual(PRIO_NODE, node.getChild(new Path('foo')).getPriority()); }); it('child merge looks into update node', function() { - var compoundWrite = CompoundWrite.Empty; - var update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value'}); + let compoundWrite = CompoundWrite.Empty; + const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value'}); compoundWrite = compoundWrite.addWrite(Path.Empty, update); assertNodesEqual(nodeFromJSON('foo-value'), compoundWrite.childCompoundWrite(new Path('foo')).apply(EMPTY_NODE)); }); it('child merge removes node on deeper paths', function() { - var compoundWrite = CompoundWrite.Empty; - var update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + let compoundWrite = CompoundWrite.Empty; + const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); compoundWrite = compoundWrite.addWrite(Path.Empty, update); assertNodesEqual(EMPTY_NODE, compoundWrite.childCompoundWrite(new Path('foo/not/existing')).apply(LEAF_NODE)); }); it('child merge with empty path is same merge', function() { - var compoundWrite = CompoundWrite.Empty; - var update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + let compoundWrite = CompoundWrite.Empty; + const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); compoundWrite = compoundWrite.addWrite(Path.Empty, update); expect(compoundWrite.childCompoundWrite(Path.Empty)).to.equal(compoundWrite); }); it('root update removes root priority', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('.priority'), PRIO_NODE); compoundWrite = compoundWrite.addWrite(Path.Empty, nodeFromJSON('foo')); assertNodesEqual(nodeFromJSON('foo'), compoundWrite.apply(EMPTY_NODE)); }); it('deep update removes priority there', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('foo/.priority'), PRIO_NODE); compoundWrite = compoundWrite.addWrite(new Path('foo'), nodeFromJSON('bar')); - var expected = nodeFromJSON({ 'foo': 'bar' }); + const expected = nodeFromJSON({ 'foo': 'bar' }); assertNodesEqual(expected, compoundWrite.apply(EMPTY_NODE)); }); it('adding updates at path works', function() { - var compoundWrite = CompoundWrite.Empty; - var updates = { + let compoundWrite = CompoundWrite.Empty; + const updates = { 'foo': nodeFromJSON('foo-value'), 'bar': nodeFromJSON('bar-value') }; compoundWrite = compoundWrite.addWrites(new Path('child-1'), updates); - var expectedChildOne = { + const expectedChildOne = { 'foo': 'foo-value', 'bar': 'bar-value' }; - var expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it('adding updates at root works', function() { - var compoundWrite = CompoundWrite.Empty; - var updates = { + let compoundWrite = CompoundWrite.Empty; + const updates = { 'child-1': nodeFromJSON('new-value-1'), 'child-2': EMPTY_NODE, 'child-3': nodeFromJSON('value-3') }; compoundWrite = compoundWrite.addWrites(Path.Empty, updates); - var expected = { + const expected = { 'child-1': 'new-value-1', 'child-3': 'value-3' }; @@ -195,57 +195,57 @@ describe('CompoundWrite Tests', function() { }); it('child write of root priority works', function() { - var compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); assertNodesEqual(PRIO_NODE, compoundWrite.childCompoundWrite(new Path('.priority')).apply(EMPTY_NODE)); }); it('complete children only returns complete overwrites', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1'), LEAF_NODE); expect(compoundWrite.getCompleteChildren()).to.deep.equal([new NamedNode('child-1', LEAF_NODE)]); }); it('complete children only returns empty overwrites', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1'), EMPTY_NODE); expect(compoundWrite.getCompleteChildren()).to.deep.equal([new NamedNode('child-1', EMPTY_NODE)]); }); it('complete children doesnt return deep overwrites', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1/deep/path'), LEAF_NODE); expect(compoundWrite.getCompleteChildren()).to.deep.equal([]); }); it('complete children return all complete children but no incomplete', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1/deep/path'), LEAF_NODE); compoundWrite = compoundWrite.addWrite(new Path('child-2'), LEAF_NODE); compoundWrite = compoundWrite.addWrite(new Path('child-3'), EMPTY_NODE); - var expected = { + const expected = { 'child-2': LEAF_NODE, 'child-3': EMPTY_NODE }; - var actual = { }; - var completeChildren = compoundWrite.getCompleteChildren(); - for (var i = 0; i < completeChildren.length; i++) { + const actual = { }; + const completeChildren = compoundWrite.getCompleteChildren(); + for (let i = 0; i < completeChildren.length; i++) { actual[completeChildren[i].name] = completeChildren[i].node; } expect(actual).to.deep.equal(expected); }); it('complete children return all children for root set', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(Path.Empty, CHILDREN_NODE); - var expected = { + const expected = { 'child-1': nodeFromJSON('value-1'), 'child-2': nodeFromJSON('value-2') }; - var actual = { }; - var completeChildren = compoundWrite.getCompleteChildren(); - for (var i = 0; i < completeChildren.length; i++) { + const actual = { }; + const completeChildren = compoundWrite.getCompleteChildren(); + for (let i = 0; i < completeChildren.length; i++) { actual[completeChildren[i].name] = completeChildren[i].node; } expect(actual).to.deep.equal(expected); @@ -256,84 +256,84 @@ describe('CompoundWrite Tests', function() { }); it('compound write with empty root has shadowing write', function() { - var compoundWrite = CompoundWrite.Empty.addWrite(Path.Empty, EMPTY_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite(Path.Empty, EMPTY_NODE); expect(compoundWrite.hasCompleteWrite(Path.Empty)).to.be.true; expect(compoundWrite.hasCompleteWrite(new Path('child'))).to.be.true; }); it('compound write with root has shadowing write', function() { - var compoundWrite = CompoundWrite.Empty.addWrite(Path.Empty, LEAF_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite(Path.Empty, LEAF_NODE); expect(compoundWrite.hasCompleteWrite(Path.Empty)).to.be.true; expect(compoundWrite.hasCompleteWrite(new Path('child'))).to.be.true; }); it('compound write with deep update has shadowing write', function() { - var compoundWrite = CompoundWrite.Empty.addWrite(new Path('deep/update'), LEAF_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite(new Path('deep/update'), LEAF_NODE); expect(compoundWrite.hasCompleteWrite(Path.Empty)).to.be.false; expect(compoundWrite.hasCompleteWrite(new Path('deep'))).to.be.false; expect(compoundWrite.hasCompleteWrite(new Path('deep/update'))).to.be.true; }); it('compound write with priority update has shadowing write', function() { - var compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); + let compoundWrite = CompoundWrite.Empty.addWrite(new Path('.priority'), PRIO_NODE); expect(compoundWrite.hasCompleteWrite(Path.Empty)).to.be.false; expect(compoundWrite.hasCompleteWrite(new Path('.priority'))).to.be.true; }); it('updates can be removed', function() { - var compoundWrite = CompoundWrite.Empty; - var update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + let compoundWrite = CompoundWrite.Empty; + const update = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); compoundWrite = compoundWrite.addWrite(new Path('child-1'), update); compoundWrite = compoundWrite.removeWrite(new Path('child-1')); assertNodesEqual(CHILDREN_NODE, compoundWrite.apply(CHILDREN_NODE)); }); it('deep removes has no effect on overlaying set', function() { - var compoundWrite = CompoundWrite.Empty; - var updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); - var updateTwo = nodeFromJSON('baz-value'); - var updateThree = nodeFromJSON('new-foo-value'); + let compoundWrite = CompoundWrite.Empty; + const updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const updateTwo = nodeFromJSON('baz-value'); + const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); compoundWrite = compoundWrite.removeWrite(new Path('child-1/foo')); - var expectedChildOne = { + const expectedChildOne = { 'foo': 'new-foo-value', 'bar': 'bar-value', 'baz': 'baz-value' }; - var expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it('remove at path without set is without effect', function() { - var compoundWrite = CompoundWrite.Empty; - var updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); - var updateTwo = nodeFromJSON('baz-value'); - var updateThree = nodeFromJSON('new-foo-value'); + let compoundWrite = CompoundWrite.Empty; + const updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const updateTwo = nodeFromJSON('baz-value'); + const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); compoundWrite = compoundWrite.removeWrite(new Path('child-2')); - var expectedChildOne = { + const expectedChildOne = { 'foo': 'new-foo-value', 'bar': 'bar-value', 'baz': 'baz-value' }; - var expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it('can remove priority', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('.priority'), PRIO_NODE); compoundWrite = compoundWrite.removeWrite(new Path('.priority')); assertNodeGetsCorrectPriority(compoundWrite, LEAF_NODE, EMPTY_NODE); }); it('removing only affects removed path', function() { - var compoundWrite = CompoundWrite.Empty; - var updates = { + let compoundWrite = CompoundWrite.Empty; + const updates = { 'child-1': nodeFromJSON('new-value-1'), 'child-2': EMPTY_NODE, 'child-3': nodeFromJSON('value-3') @@ -341,7 +341,7 @@ describe('CompoundWrite Tests', function() { compoundWrite = compoundWrite.addWrites(Path.Empty, updates); compoundWrite = compoundWrite.removeWrite(new Path('child-2')); - var expected = { + const expected = { 'child-1': 'new-value-1', 'child-2': 'value-2', 'child-3': 'value-3' @@ -350,9 +350,9 @@ describe('CompoundWrite Tests', function() { }); it('remove removes all deeper sets', function() { - var compoundWrite = CompoundWrite.Empty; - var updateTwo = nodeFromJSON('baz-value'); - var updateThree = nodeFromJSON('new-foo-value'); + let compoundWrite = CompoundWrite.Empty; + const updateTwo = nodeFromJSON('baz-value'); + const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); compoundWrite = compoundWrite.removeWrite(new Path('child-1')); @@ -360,95 +360,95 @@ describe('CompoundWrite Tests', function() { }); it('remove at root also removes priority', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(Path.Empty, new LeafNode('foo', PRIO_NODE)); compoundWrite = compoundWrite.removeWrite(Path.Empty); - var node = nodeFromJSON('value'); + const node = nodeFromJSON('value'); assertNodeGetsCorrectPriority(compoundWrite, node, EMPTY_NODE); }); it('updating priority doesnt overwrite leaf node', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(Path.Empty, LEAF_NODE); compoundWrite = compoundWrite.addWrite(new Path('child/.priority'), PRIO_NODE); assertNodesEqual(LEAF_NODE, compoundWrite.apply(EMPTY_NODE)); }); it("updating empty node doesn't overwrite leaf node", function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(Path.Empty, LEAF_NODE); compoundWrite = compoundWrite.addWrite(new Path('child'), EMPTY_NODE); assertNodesEqual(LEAF_NODE, compoundWrite.apply(EMPTY_NODE)); }); it('Overwrites existing child', function() { - var compoundWrite = CompoundWrite.Empty; - var path = new Path('child-1'); + let compoundWrite = CompoundWrite.Empty; + const path = new Path('child-1'); compoundWrite = compoundWrite.addWrite(path, LEAF_NODE); expect(compoundWrite.apply(CHILDREN_NODE)).to.deep.equal(CHILDREN_NODE.updateImmediateChild(path.getFront(), LEAF_NODE)); }); it('Updates existing child', function() { - var compoundWrite = CompoundWrite.Empty; - var path = new Path('child-1/foo'); + let compoundWrite = CompoundWrite.Empty; + const path = new Path('child-1/foo'); compoundWrite = compoundWrite.addWrite(path, LEAF_NODE); expect(compoundWrite.apply(CHILDREN_NODE)).to.deep.equal(CHILDREN_NODE.updateChild(path, LEAF_NODE)); }); it("Doesn't update priority on empty node.", function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('.priority'), PRIO_NODE); assertNodeGetsCorrectPriority(compoundWrite, EMPTY_NODE, EMPTY_NODE); }); it('Updates priority on node', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('.priority'), PRIO_NODE); - var node = nodeFromJSON('value'); + const node = nodeFromJSON('value'); assertNodeGetsCorrectPriority(compoundWrite, node, PRIO_NODE); }); it('Updates priority of child', function() { - var compoundWrite = CompoundWrite.Empty; - var path = new Path('child-1/.priority'); + let compoundWrite = CompoundWrite.Empty; + const path = new Path('child-1/.priority'); compoundWrite = compoundWrite.addWrite(path, PRIO_NODE); assertNodesEqual(CHILDREN_NODE.updateChild(path, PRIO_NODE), compoundWrite.apply(CHILDREN_NODE)); }); it("Doesn't update priority of nonexistent child.", function() { - var compoundWrite = CompoundWrite.Empty; - var path = new Path('child-3/.priority'); + let compoundWrite = CompoundWrite.Empty; + const path = new Path('child-3/.priority'); compoundWrite = compoundWrite.addWrite(path, PRIO_NODE); assertNodesEqual(CHILDREN_NODE, compoundWrite.apply(CHILDREN_NODE)); }); it('Deep update existing updates', function() { - var compoundWrite = CompoundWrite.Empty; - var updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); - var updateTwo = nodeFromJSON('baz-value'); - var updateThree = nodeFromJSON('new-foo-value'); + let compoundWrite = CompoundWrite.Empty; + const updateOne = nodeFromJSON({ 'foo': 'foo-value', 'bar': 'bar-value' }); + const updateTwo = nodeFromJSON('baz-value'); + const updateThree = nodeFromJSON('new-foo-value'); compoundWrite = compoundWrite.addWrite(new Path('child-1'), updateOne); compoundWrite = compoundWrite.addWrite(new Path('child-1/baz'), updateTwo); compoundWrite = compoundWrite.addWrite(new Path('child-1/foo'), updateThree); - var expectedChildOne = { + const expectedChildOne = { 'foo': 'new-foo-value', 'bar': 'bar-value', 'baz': 'baz-value' }; - var expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); + const expected = CHILDREN_NODE.updateImmediateChild('child-1', nodeFromJSON(expectedChildOne)); assertNodesEqual(expected, compoundWrite.apply(CHILDREN_NODE)); }); it("child priority doesn't update empty node priority on child merge", function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1/.priority'), PRIO_NODE); assertNodeGetsCorrectPriority(compoundWrite.childCompoundWrite(new Path('child-1')), EMPTY_NODE, EMPTY_NODE); }); it('Child priority updates priority on child write', function() { - var compoundWrite = CompoundWrite.Empty; + let compoundWrite = CompoundWrite.Empty; compoundWrite = compoundWrite.addWrite(new Path('child-1/.priority'), PRIO_NODE); - var node = nodeFromJSON('value'); + const node = nodeFromJSON('value'); assertNodeGetsCorrectPriority(compoundWrite.childCompoundWrite(new Path('child-1')), node, PRIO_NODE); }); }); \ No newline at end of file diff --git a/tests/database/database.test.ts b/tests/database/database.test.ts index 6fb992f88a8..5f42373f266 100644 --- a/tests/database/database.test.ts +++ b/tests/database/database.test.ts @@ -23,7 +23,7 @@ import { import "../../src/database"; describe('Database Tests', function() { - var defaultApp; + let defaultApp; beforeEach(function() { defaultApp = firebase.initializeApp({databaseURL: TEST_PROJECT.databaseURL}); @@ -35,74 +35,74 @@ describe('Database Tests', function() { }); it('Can get database.', function() { - var db = firebase.database(); + const db = firebase.database(); expect(db).to.not.be.undefined; expect(db).not.to.be.null; }); it('Illegal to call constructor', function() { expect(function() { - var db = new firebase.database.Database('url'); + const db = new firebase.database.Database('url'); }).to.throw(/don't call new Database/i); }); it('Can get app', function() { - var db = firebase.database(); + const db = firebase.database(); expect(db.app).to.not.be.undefined; expect((db.app as any) instanceof firebase.app.App); }); it('Can get root ref', function() { - var db = firebase.database(); + const db = firebase.database(); - var ref = db.ref(); + const ref = db.ref(); expect(ref instanceof firebase.database.Reference).to.be.true; expect(ref.key).to.be.null; }); it('Can get child ref', function() { - var db = firebase.database(); + const db = firebase.database(); - var ref = db.ref('child'); + const ref = db.ref('child'); expect(ref instanceof firebase.database.Reference).to.be.true; expect(ref.key).to.equal('child'); }); it('Can get deep child ref', function() { - var db = firebase.database(); + const db = firebase.database(); - var ref = db.ref('child/grand-child'); + const ref = db.ref('child/grand-child'); expect(ref instanceof firebase.database.Reference).to.be.true; expect(ref.key).to.equal('grand-child'); }); it('ref() validates arguments', function() { - var db = firebase.database(); + const db = firebase.database(); expect(function() { - var ref = (db as any).ref('path', 'extra'); + const ref = (db as any).ref('path', 'extra'); }).to.throw(/Expects no more than 1/); }); it('Can get refFromURL()', function() { - var db = firebase.database(); - var ref = db.refFromURL(TEST_PROJECT.databaseURL + '/path/to/data'); + const db = firebase.database(); + const ref = db.refFromURL(TEST_PROJECT.databaseURL + '/path/to/data'); expect(ref.key).to.equal('data'); }); it('refFromURL() validates domain', function() { - var db = firebase.database(); + const db = firebase.database(); expect(function() { - var ref = db.refFromURL('https://thisisnotarealfirebase.firebaseio.com/path/to/data'); + const ref = db.refFromURL('https://thisisnotarealfirebase.firebaseio.com/path/to/data'); }).to.throw(/does not match.*database/i); }); it('refFromURL() validates argument', function() { - var db = firebase.database(); + const db = firebase.database(); expect(function() { - var ref = (db as any).refFromURL(); + const ref = (db as any).refFromURL(); }).to.throw(/Expects at least 1/); }); }); diff --git a/tests/database/datasnapshot.test.ts b/tests/database/datasnapshot.test.ts index 517fd6f3106..13165c82c78 100644 --- a/tests/database/datasnapshot.test.ts +++ b/tests/database/datasnapshot.test.ts @@ -23,13 +23,13 @@ import { Reference } from "../../src/database/api/Reference"; describe("DataSnapshot Tests", function () { /** @return {!DataSnapshot} */ - var snapshotForJSON = function(json) { - var dummyRef = getRandomNode(); + const snapshotForJSON = function(json) { + const dummyRef = getRandomNode(); return new DataSnapshot(nodeFromJSON(json), dummyRef, PRIORITY_INDEX); }; it("DataSnapshot.hasChildren() works.", function() { - var snap = snapshotForJSON({}); + let snap = snapshotForJSON({}); expect(snap.hasChildren()).to.equal(false); snap = snapshotForJSON(5); @@ -40,7 +40,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.exists() works.", function() { - var snap = snapshotForJSON({}); + let snap = snapshotForJSON({}); expect(snap.exists()).to.equal(false); snap = snapshotForJSON({ '.priority':1 }); @@ -60,13 +60,13 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.val() works.", function() { - var snap = snapshotForJSON(5); + let snap = snapshotForJSON(5); expect(snap.val()).to.equal(5); snap = snapshotForJSON({ }); expect(snap.val()).to.equal(null); - var json = + const json = { x: 5, y: { @@ -80,7 +80,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.child() works.", function() { - var snap = snapshotForJSON({x: 5, y: { yy: 3, yz: 4}}); + const snap = snapshotForJSON({x: 5, y: { yy: 3, yz: 4}}); expect(snap.child('x').val()).to.equal(5); expect(snap.child('y').val()).to.deep.equal({yy: 3, yz: 4}); expect(snap.child('y').child('yy').val()).to.equal(3); @@ -91,7 +91,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.hasChild() works.", function() { - var snap = snapshotForJSON({x: 5, y: { yy: 3, yz: 4}}); + const snap = snapshotForJSON({x: 5, y: { yy: 3, yz: 4}}); expect(snap.hasChild('x')).to.equal(true); expect(snap.hasChild('y/yy')).to.equal(true); expect(snap.hasChild('dinosaur')).to.equal(false); @@ -100,7 +100,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.key works.", function() { - var snap = snapshotForJSON({a: { b: { c: 5 }}}); + const snap = snapshotForJSON({a: { b: { c: 5 }}}); expect(snap.child('a').key).to.equal('a'); expect(snap.child('a/b/c').key).to.equal('c'); expect(snap.child('/a/b/c/').key).to.equal('c'); @@ -112,8 +112,8 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.forEach() works: no priorities.", function() { - var snap = snapshotForJSON({a: 1, z: 26, m: 13, n: 14, c: 3, b: 2, e: 5}); - var out = ''; + const snap = snapshotForJSON({a: 1, z: 26, m: 13, n: 14, c: 3, b: 2, e: 5}); + let out = ''; snap.forEach(function(child) { out = out + child.key + ':' + child.val() + ':'; }); @@ -122,7 +122,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.forEach() works: numeric priorities.", function() { - var snap = snapshotForJSON({ + const snap = snapshotForJSON({ a: {'.value': 1, '.priority': 26}, z: {'.value': 26, '.priority': 1}, m: {'.value': 13, '.priority': 14}, @@ -131,7 +131,7 @@ describe("DataSnapshot Tests", function () { b: {'.value': 2, '.priority': 25}, e: {'.value': 5, '.priority': 22}}); - var out = ''; + let out = ''; snap.forEach(function(child) { out = out + child.key + ':' + child.val() + ':'; }); @@ -140,7 +140,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.forEach() works: numeric priorities as strings.", function() { - var snap = snapshotForJSON({ + const snap = snapshotForJSON({ a: {'.value': 1, '.priority': '26'}, z: {'.value': 26, '.priority': '1'}, m: {'.value': 13, '.priority': '14'}, @@ -149,7 +149,7 @@ describe("DataSnapshot Tests", function () { b: {'.value': 2, '.priority': '25'}, e: {'.value': 5, '.priority': '22'}}); - var out = ''; + let out = ''; snap.forEach(function(child) { out = out + child.key + ':' + child.val() + ':'; }); @@ -158,7 +158,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.forEach() works: alpha priorities.", function() { - var snap = snapshotForJSON({ + const snap = snapshotForJSON({ a: {'.value': 1, '.priority': 'first'}, z: {'.value': 26, '.priority': 'second'}, m: {'.value': 13, '.priority': 'third'}, @@ -167,7 +167,7 @@ describe("DataSnapshot Tests", function () { b: {'.value': 2, '.priority': 'sixth'}, e: {'.value': 5, '.priority': 'seventh'}}); - var out = ''; + let out = ''; snap.forEach(function(child) { out = out + child.key + ':' + child.val() + ':'; }); @@ -176,7 +176,7 @@ describe("DataSnapshot Tests", function () { }); it("DataSnapshot.foreach() works: mixed alpha and numeric priorities", function() { - var json = { + const json = { "alpha42": {'.value': 1, '.priority': "zed" }, "noPriorityC": {'.value': 1, '.priority': null }, "num41": {'.value': 1, '.priority': 500 }, @@ -197,8 +197,8 @@ describe("DataSnapshot Tests", function () { "alpha40": {'.value': 1, '.priority': "zed" }, "num40": {'.value': 1, '.priority': 500 } }; - var snap = snapshotForJSON(json); - var out = ''; + const snap = snapshotForJSON(json); + let out = ''; snap.forEach(function(child) { out = out + child.key + ', '; }); @@ -207,19 +207,19 @@ describe("DataSnapshot Tests", function () { }); it(".val() exports array-like data as arrays.", function() { - var array = ['bob', 'and', 'becky', 'seem', 'really', 'nice', 'yeah?']; - var snap = snapshotForJSON(array); - var snapVal = snap.val(); + const array = ['bob', 'and', 'becky', 'seem', 'really', 'nice', 'yeah?']; + const snap = snapshotForJSON(array); + const snapVal = snap.val(); expect(snapVal).to.deep.equal(array); expect(snapVal instanceof Array).to.equal(true); // to.equal doesn't verify type. }); it("DataSnapshot can be JSON serialized", function() { - var json = { + const json = { "foo": "bar", ".priority": 1 }; - var snap = snapshotForJSON(json); + const snap = snapshotForJSON(json); expect(JSON.parse(JSON.stringify(snap))).to.deep.equal(json); }); }); diff --git a/tests/database/helpers/EventAccumulator.ts b/tests/database/helpers/EventAccumulator.ts index 7a3c5d3e60e..cabb035cb0b 100644 --- a/tests/database/helpers/EventAccumulator.ts +++ b/tests/database/helpers/EventAccumulator.ts @@ -18,12 +18,12 @@ export const EventAccumulatorFactory = { waitsForCount: maxCount => { let count = 0; const condition = () => ea.eventData.length >= count; - const ea = new EventAccumulator(condition) + const ea = new EventAccumulator(condition); ea.onReset(() => { count = 0; }); ea.onEvent(() => { count++; }); return ea; } -} +}; export class EventAccumulator { public eventData = []; diff --git a/tests/database/helpers/events.ts b/tests/database/helpers/events.ts index c6444f43fbb..fa4f9d07909 100644 --- a/tests/database/helpers/events.ts +++ b/tests/database/helpers/events.ts @@ -15,6 +15,7 @@ */ import { TEST_PROJECT } from "./util"; +import { Reference } from '../../../src/database/api/Reference'; /** * A set of functions to clean up event handlers. @@ -25,25 +26,25 @@ export let eventCleanupHandlers = []; /** Clean up outstanding event handlers */ export function eventCleanup() { - for (var i = 0; i < eventCleanupHandlers.length; ++i) { + for (let i = 0; i < eventCleanupHandlers.length; ++i) { eventCleanupHandlers[i](); } eventCleanupHandlers = []; -}; +} /** * The path component of the firebaseRef url - * @param {Firebase} firebaseRef + * @param {Reference} firebaseRef * @return {string} */ -function rawPath(firebaseRef) { +function rawPath(firebaseRef: Reference) { return firebaseRef.toString().replace(TEST_PROJECT.databaseURL, ''); -}; +} /** * Creates a struct which waits for many events. * @param {Array} pathAndEvents an array of tuples of [Firebase, [event type strings]] - * @param {string=} opt_helperName + * @param {string=} helperName * @return {{waiter: waiter, watchesInitializedWaiter: watchesInitializedWaiter, unregister: unregister, addExpectedEvents: addExpectedEvents}} */ export function eventTestHelper(pathAndEvents, helperName?) { @@ -57,19 +58,19 @@ export function eventTestHelper(pathAndEvents, helperName?) { resolveInit = pResolve; rejectInit = pReject; }); - var expectedPathAndEvents = []; - var actualPathAndEvents = []; - var pathEventListeners = {}; - var initializationEvents = 0; + const expectedPathAndEvents = []; + const actualPathAndEvents = []; + const pathEventListeners = {}; + let initializationEvents = 0; helperName = helperName ? helperName + ': ' : ''; // Listen on all of the required paths, with a callback function that just // appends to actualPathAndEvents. - var make_eventCallback = function(type) { + const make_eventCallback = function(type) { return function(snap) { // Get the ref of where the snapshot came from. - var ref = type === 'value' ? snap.ref : snap.ref.parent; + const ref = type === 'value' ? snap.ref : snap.ref.parent; actualPathAndEvents.push([rawPath(ref), [type, snap.key]]); @@ -98,15 +99,15 @@ export function eventTestHelper(pathAndEvents, helperName?) { // in the correct order. If anything is wrong (too many events or // incorrect events, we throw). Else we return false, indicating we should // keep waiting. - var waiter = function() { - var pathAndEventToString = function(pathAndEvent) { + const waiter = function() { + const pathAndEventToString = function(pathAndEvent) { return '{path: ' + pathAndEvent[0] + ', event:[' + pathAndEvent[1][0] + ', ' + pathAndEvent[1][1] + ']}'; }; - var i = 0; + let i = 0; while (i < expectedPathAndEvents.length && i < actualPathAndEvents.length) { - var expected = expectedPathAndEvents[i]; - var actual = actualPathAndEvents[i]; + const expected = expectedPathAndEvents[i]; + const actual = actualPathAndEvents[i]; if (expected[0] != actual[0] || expected[1][0] != actual[1][0] || expected[1][1] != actual[1][1]) { throw helperName + 'Event ' + i + ' incorrect. Expected: ' + pathAndEventToString(expected) + @@ -124,12 +125,12 @@ export function eventTestHelper(pathAndEvents, helperName?) { return expectedPathAndEvents.length == actualPathAndEvents.length; }; - var listenOnPath = function(path) { - var valueCB = make_eventCallback('value'); - var addedCB = make_eventCallback('child_added'); - var removedCB = make_eventCallback('child_removed'); - var movedCB = make_eventCallback('child_moved'); - var changedCB = make_eventCallback('child_changed'); + const listenOnPath = function(path) { + const valueCB = make_eventCallback('value'); + const addedCB = make_eventCallback('child_added'); + const removedCB = make_eventCallback('child_removed'); + const movedCB = make_eventCallback('child_moved'); + const changedCB = make_eventCallback('child_changed'); path.on('child_removed', removedCB); path.on('child_added', addedCB); path.on('child_moved', movedCB); @@ -145,13 +146,13 @@ export function eventTestHelper(pathAndEvents, helperName?) { }; - var addExpectedEvents = function(pathAndEvents) { - var pathsToListenOn = []; - for (var i = 0; i < pathAndEvents.length; i++) { + const addExpectedEvents = function(pathAndEvents) { + const pathsToListenOn = []; + for (let i = 0; i < pathAndEvents.length; i++) { - var pathAndEvent = pathAndEvents[i]; + const pathAndEvent = pathAndEvents[i]; - var path = pathAndEvent[0]; + const path = pathAndEvent[0]; //var event = pathAndEvent[1]; pathsToListenOn.push(path); @@ -177,8 +178,8 @@ export function eventTestHelper(pathAndEvents, helperName?) { // To mitigate this, we re-ordeer your event registrations and do them in order of shortest path to longest. pathsToListenOn.sort(function(a, b) { return a.toString().length - b.toString().length; }); - for (i = 0; i < pathsToListenOn.length; i++) { - path = pathsToListenOn[i]; + for (let i = 0; i < pathsToListenOn.length; i++) { + let path = pathsToListenOn[i]; if (!pathEventListeners[path.toString()]) { pathEventListeners[path.toString()] = { }; pathEventListeners[path.toString()].initialized = false; @@ -194,8 +195,8 @@ export function eventTestHelper(pathAndEvents, helperName?) { addExpectedEvents(pathAndEvents); - var watchesInitializedWaiter = function() { - for (var path in pathEventListeners) { + const watchesInitializedWaiter = function() { + for (let path in pathEventListeners) { if (!pathEventListeners[path].initialized) return false; } @@ -208,8 +209,8 @@ export function eventTestHelper(pathAndEvents, helperName?) { return true; }; - var unregister = function() { - for (var path in pathEventListeners) { + const unregister = function() { + for (let path in pathEventListeners) { if (pathEventListeners.hasOwnProperty(path)) { pathEventListeners[path].unlisten(); } @@ -228,4 +229,4 @@ export function eventTestHelper(pathAndEvents, helperName?) { addExpectedEvents(moreEvents); } }; -}; \ No newline at end of file +} \ No newline at end of file diff --git a/tests/database/helpers/util.ts b/tests/database/helpers/util.ts index b86ebcaf5e5..fa2f2c08e18 100644 --- a/tests/database/helpers/util.ts +++ b/tests/database/helpers/util.ts @@ -19,17 +19,16 @@ import firebase from "../../../src/app"; import '../../../src/database'; import { Reference } from "../../../src/database/api/Reference"; import { Query } from "../../../src/database/api/Query"; -import { expect } from "chai"; import { ConnectionTarget } from "../../../src/database/api/test_access"; export const TEST_PROJECT = require('../../config/project.json'); -var qs = {}; +const qs = {}; if ('location' in this) { - var search = (this.location.search.substr(1) || '').split('&'); - for (var i = 0; i < search.length; ++i) { - var parts = search[i].split('='); + const search = (this.location.search.substr(1) || '').split('&'); + for (let i = 0; i < search.length; ++i) { + const parts = search[i].split('='); qs[parts[0]] = parts[1] || true; // support for foo= } } @@ -42,7 +41,7 @@ let numDatabases = 0; * @return {!FirebaseApp} */ export function patchFakeAuthFunctions(app) { - var token_ = null; + const token_ = null; app['INTERNAL'] = app['INTERNAL'] || {}; @@ -57,24 +56,21 @@ export function patchFakeAuthFunctions(app) { }; return app; -}; +} /** * Gets or creates a root node to the test namespace. All calls sharing the * value of opt_i will share an app context. - * @param {=} opt_i - * @param {string=} opt_ref - * @return {Firebase} + * @param {number=} i + * @param {string=} ref + * @return {Reference} */ -export function getRootNode(i?, ref?) { - if (i === undefined) { - i = 0; - } +export function getRootNode(i = 0, ref?: string) { if (i + 1 > numDatabases) { numDatabases = i + 1; } - var app; - var db; + let app; + let db; try { app = firebase.app("TEST-" + i); } catch(e) { @@ -83,23 +79,23 @@ export function getRootNode(i?, ref?) { } db = app.database(); return db.ref(ref); -}; +} /** * Create multiple refs to the same top level * push key - each on it's own Firebase.Context. - * @param {int=} opt_numNodes - * @return {Firebase|Array} + * @param {int=} numNodes + * @return {Reference|Array} */ export function getRandomNode(numNodes?): Reference | Array { if (numNodes === undefined) { return getRandomNode(1)[0]; } - var child; - var nodeList = []; - for (var i = 0; i < numNodes; i++) { - var ref = getRootNode(i); + let child; + const nodeList = []; + for (let i = 0; i < numNodes; i++) { + const ref = getRootNode(i); if (child === undefined) { child = ref.push().key; } @@ -108,7 +104,7 @@ export function getRandomNode(numNodes?): Reference | Array { } return >nodeList; -}; +} export function getQueryValue(query: Query) { return query.once('value').then(snap => snap.val()); @@ -124,21 +120,20 @@ export function getPath(query: Query) { return query.toString().replace(TEST_PROJECT.databaseURL, ''); } -export function shuffle(arr, randFn?) { - var randFn = randFn || Math.random; - for (var i = arr.length - 1;i > 0;i--) { - var j = Math.floor(randFn() * (i + 1)); - var tmp = arr[i]; +export function shuffle(arr, randFn = Math.random) { + for (let i = arr.length - 1;i > 0;i--) { + const j = Math.floor(randFn() * (i + 1)); + const tmp = arr[i]; arr[i] = arr[j]; arr[j] = tmp; } } export function testAuthTokenProvider(app) { - var token_ = null; - var nextToken_ = null; - var hasNextToken_ = false; - var listeners_ = []; + let token_ = null; + let nextToken_ = null; + let hasNextToken_ = false; + const listeners_ = []; app['INTERNAL'] = app['INTERNAL'] || {}; @@ -151,9 +146,9 @@ export function testAuthTokenProvider(app) { }; app['INTERNAL']['addAuthTokenListener'] = function(listener) { - var token = token_; + const token = token_; listeners_.push(listener); - var async = Promise.resolve(); + const async = Promise.resolve(); async.then(function() { listener(token) }); @@ -166,8 +161,8 @@ export function testAuthTokenProvider(app) { return { setToken: function(token) { token_ = token; - var async = Promise.resolve(); - for (var i = 0; i < listeners_.length; i++) { + const async = Promise.resolve(); + for (let i = 0; i < listeners_.length; i++) { async.then((function(idx) { return function() { listeners_[idx](token); @@ -189,46 +184,46 @@ let freshRepoId = 1; const activeFreshApps = []; export function getFreshRepo(url, path?) { - var app = firebase.initializeApp({databaseURL: url}, 'ISOLATED_REPO_' + freshRepoId++); + const app = firebase.initializeApp({databaseURL: url}, 'ISOLATED_REPO_' + freshRepoId++); patchFakeAuthFunctions(app); activeFreshApps.push(app); return app.database().ref(path); } export function getFreshRepoFromReference(ref) { - var host = ref.root.toString(); - var path = ref.toString().replace(host, ''); + const host = ref.root.toString(); + const path = ref.toString().replace(host, ''); return getFreshRepo(host, path); } // Little helpers to get the currently cached snapshot / value. export function getSnap(path) { - var snap; - var callback = function(snapshot) { snap = snapshot; }; + let snap; + const callback = function(snapshot) { snap = snapshot; }; path.once('value', callback); return snap; -}; +} export function getVal(path) { - var snap = getSnap(path); + const snap = getSnap(path); return snap ? snap.val() : undefined; -}; +} export function canCreateExtraConnections() { return globalScope.MozWebSocket || globalScope.WebSocket; -}; +} export function buildObjFromKey(key) { - var keys = key.split('.'); - var obj = {}; - var parent = obj; - for (var i = 0; i < keys.length; i++) { - var key = keys[i]; + const keys = key.split('.'); + const obj = {}; + let parent = obj; + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; parent[key] = i < keys.length - 1 ? {} : 'test_value'; parent = parent[key]; } return obj; -}; +} export function testRepoInfo(url) { const regex = /https?:\/\/(.*).firebaseio.com/; diff --git a/tests/database/info.test.ts b/tests/database/info.test.ts index cc92fb5be15..cd807d02970 100644 --- a/tests/database/info.test.ts +++ b/tests/database/info.test.ts @@ -30,20 +30,21 @@ import { EventAccumulator } from "./helpers/EventAccumulator"; * but I want to leave the test here for when we can refactor * to remove the prod firebase dependency. */ -declare var runs; -declare var waitsFor; -declare var TEST_ALT_NAMESPACE; -declare var TEST_NAMESPACE; +declare const runs; +declare const waitsFor; +declare const TEST_ALT_NAMESPACE; +declare const TEST_NAMESPACE; describe(".info Tests", function () { + this.timeout(3000); it("Can get a reference to .info nodes.", function() { - var f = (getRootNode() as Reference); + const f = (getRootNode() as Reference); expect(getPath(f.child('.info'))).to.equal('/.info'); expect(getPath(f.child('.info/foo'))).to.equal('/.info/foo'); }); it("Can't write to .info", function() { - var f = (getRootNode() as Reference).child('.info'); + const f = (getRootNode() as Reference).child('.info'); expect(function() {f.set('hi');}).to.throw; expect(function() {f.setWithPriority('hi', 5);}).to.throw; expect(function() {f.setPriority('hi');}).to.throw; @@ -52,13 +53,13 @@ describe(".info Tests", function () { expect(function() {f.remove();}).to.throw; expect(function() {f.child('test').set('hi');}).to.throw; - var f2 = f.child('foo/baz'); + const f2 = f.child('foo/baz'); expect(function() {f2.set('hi');}).to.throw; }); it("Can watch .info/connected.", function() { return new Promise(resolve => { - var f = (getRandomNode() as Reference).root; + const f = (getRandomNode() as Reference).root; f.child('.info/connected').on('value', function(snap) { if (snap.val() === true) resolve(); }); @@ -67,9 +68,9 @@ describe(".info Tests", function () { it('.info/connected correctly goes to false when disconnected.', async function() { - var f = (getRandomNode() as Reference).root; - var everConnected = false; - var connectHistory = ''; + const f = (getRandomNode() as Reference).root; + let everConnected = false; + let connectHistory = ''; const ea = new EventAccumulator(() => everConnected); f.child('.info/connected').on('value', function(snap) { @@ -93,12 +94,12 @@ describe(".info Tests", function () { // Skipping this test as it is expecting a server time diff from a // local Firebase it.skip(".info/serverTimeOffset", async function() { - var ref = (getRootNode() as Reference); + const ref = (getRootNode() as Reference); // make sure push works - var child = ref.push(); + const child = ref.push(); - var offsets = []; + const offsets = []; const ea = new EventAccumulator(() => offsets.length === 1); @@ -118,14 +119,14 @@ describe(".info Tests", function () { }); it.skip("database.goOffline() / database.goOnline() connection management", function() { - var ref = getFreshRepo(TEST_NAMESPACE); - var refAlt = getFreshRepo(TEST_ALT_NAMESPACE); - var ready; + const ref = getFreshRepo(TEST_NAMESPACE); + const refAlt = getFreshRepo(TEST_ALT_NAMESPACE); + let ready; // Wait until we're connected to both Firebases runs(function() { ready = 0; - var eventHandler = function(snap) { + const eventHandler = function(snap) { if (snap.val() === true) { snap.ref.off(); ready += 1; @@ -144,10 +145,10 @@ describe(".info Tests", function () { // Ensure we're disconnected from both Firebases runs(function() { ready = 0; - var eventHandler = function(snap) { + const eventHandler = function(snap) { expect(snap.val() === false); ready += 1; - } + }; ref.child(".info/connected").once("value", eventHandler); refAlt.child(".info/connected").once("value", eventHandler); }); @@ -156,7 +157,7 @@ describe(".info Tests", function () { // Ensure that we don't automatically reconnect upon Reference creation runs(function() { ready = 0; - var refDup = ref.database.ref(); + const refDup = ref.database.ref(); refDup.child(".info/connected").on("value", function(snap) { ready = (snap.val() === true) || ready; }); @@ -176,7 +177,7 @@ describe(".info Tests", function () { // Ensure we're connected to both Firebases runs(function() { ready = 0; - var eventHandler = function(snap) { + const eventHandler = function(snap) { if (snap.val() === true) { snap.ref.off(); ready += 1; diff --git a/tests/database/node.test.ts b/tests/database/node.test.ts index 5cd66f57adc..480993d3edd 100644 --- a/tests/database/node.test.ts +++ b/tests/database/node.test.ts @@ -23,12 +23,13 @@ import { SortedMap } from "../../src/database/core/util/SortedMap"; import { ChildrenNode } from "../../src/database/core/snap/ChildrenNode"; import { NAME_COMPARATOR } from "../../src/database/core/snap/comparators"; import { nodeFromJSON } from "../../src/database/core/snap/nodeFromJSON"; +import { Node } from '../../src/database/core/snap/Node'; describe('Node Tests', function() { - var DEFAULT_INDEX = PRIORITY_INDEX; + const DEFAULT_INDEX = PRIORITY_INDEX; it('Create leaf nodes of various types.', function() { - var x = new LeafNode(5, new LeafNode(42)); + let x = new LeafNode(5, new LeafNode(42)); expect(x.getValue()).to.equal(5); expect(x.getPriority().val()).to.equal(42); expect(x.isLeafNode()).to.equal(true); @@ -40,68 +41,68 @@ describe('Node Tests', function() { }); it("LeafNode.updatePriority returns a new leaf node without changing the old.", function() { - var x = new LeafNode("test", new LeafNode(42)); - var y = x.updatePriority(new LeafNode(187)); + const x = new LeafNode("test", new LeafNode(42)); + const y = x.updatePriority(new LeafNode(187)); // old node is the same. expect(x.getValue()).to.equal("test"); expect(x.getPriority().val()).to.equal(42); // new node has the new priority but the old value. - expect(y.getValue()).to.equal("test"); + expect((y as any).getValue()).to.equal("test"); expect(y.getPriority().val()).to.equal(187); }); it("LeafNode.updateImmediateChild returns a new children node.", function() { - var x = new LeafNode("test", new LeafNode(42)); - var y = x.updateImmediateChild('test', new LeafNode("foo")); + const x = new LeafNode("test", new LeafNode(42)); + const y = x.updateImmediateChild('test', new LeafNode("foo")); expect(y.isLeafNode()).to.equal(false); expect(y.getPriority().val()).to.equal(42); - expect(y.getImmediateChild('test').getValue()).to.equal('foo'); + expect((y.getImmediateChild('test') as LeafNode).getValue()).to.equal('foo'); }); it("LeafNode.getImmediateChild returns an empty node.", function() { - var x = new LeafNode("test"); + const x = new LeafNode("test"); expect(x.getImmediateChild('foo')).to.equal(ChildrenNode.EMPTY_NODE); }); it("LeafNode.getChild returns an empty node.", function() { - var x = new LeafNode('test'); + const x = new LeafNode('test'); expect(x.getChild(new Path('foo/bar'))).to.equal(ChildrenNode.EMPTY_NODE); }); it('ChildrenNode.updatePriority returns a new internal node without changing the old.', function() { - var x = ChildrenNode.EMPTY_NODE.updateImmediateChild("child", new LeafNode(5)); - var children = x.children_; - var y = x.updatePriority(new LeafNode(17)); - expect(y.children_).to.equal(x.children_); - expect(x.children_).to.equal(children); + const x = ChildrenNode.EMPTY_NODE.updateImmediateChild("child", new LeafNode(5)); + const children = (x as any).children_; + const y = x.updatePriority(new LeafNode(17)); + expect((y as any).children_).to.equal((x as any).children_); + expect((x as any).children_).to.equal(children); expect(x.getPriority().val()).to.equal(null); expect(y.getPriority().val()).to.equal(17); }); it('ChildrenNode.updateImmediateChild returns a new internal node with the new child, without changing the old.', function() { - var children = new SortedMap(NAME_COMPARATOR); - var x = new ChildrenNode(children, ChildrenNode.EMPTY_NODE, IndexMap.Default); - var newValue = new LeafNode('new value'); - var y = x.updateImmediateChild('test', newValue); - expect(x.children_).to.equal(children); - expect(y.children_.get('test')).to.equal(newValue); + const children = new SortedMap(NAME_COMPARATOR); + const x = new ChildrenNode(children, ChildrenNode.EMPTY_NODE, IndexMap.Default); + const newValue = new LeafNode('new value'); + const y = x.updateImmediateChild('test', newValue); + expect((x as any).children_).to.equal(children); + expect((y as any).children_.get('test')).to.equal(newValue); }); it("ChildrenNode.updateChild returns a new internal node with the new child, without changing the old.", function() { - var children = new SortedMap(NAME_COMPARATOR); - var x = new ChildrenNode(children, ChildrenNode.EMPTY_NODE, IndexMap.Default); - var newValue = new LeafNode("new value"); - var y = x.updateChild(new Path('test/foo'), newValue); - expect(x.children_).to.equal(children); + const children = new SortedMap(NAME_COMPARATOR); + const x = new ChildrenNode(children, ChildrenNode.EMPTY_NODE, IndexMap.Default); + const newValue = new LeafNode("new value"); + const y = x.updateChild(new Path('test/foo'), newValue); + expect((x as any).children_).to.equal(children); expect(y.getChild(new Path('test/foo'))).to.equal(newValue); }); it("Node.hash() works correctly.", function() { - var node = nodeFromJSON({ + const node = nodeFromJSON({ intNode:4, doubleNode:4.5623, stringNode:"hey guys", @@ -119,7 +120,7 @@ describe('Node Tests', function() { }); it("Node.hash() works correctly with priorities.", function() { - var node = nodeFromJSON({ + const node = nodeFromJSON({ root: {c: {'.value': 99, '.priority': 'abc'}, '.priority': 'def'} }); @@ -127,7 +128,7 @@ describe('Node Tests', function() { }); it("Node.hash() works correctly with number priorities.", function() { - var node = nodeFromJSON({ + const node = nodeFromJSON({ root: {c: {'.value': 99, '.priority': 42}, '.priority': 3.14} }); @@ -135,7 +136,7 @@ describe('Node Tests', function() { }); it("Node.hash() stress...", function() { - var node = nodeFromJSON({ + const node = nodeFromJSON({ a:-1.7976931348623157e+308, b:1.7976931348623157e+308, c:"unicode ✔ 🐵 🌴 x͢", @@ -157,7 +158,7 @@ describe('Node Tests', function() { }); it("ChildrenNode.getPredecessorChild works correctly.", function() { - var node = nodeFromJSON({ + const node = nodeFromJSON({ d: true, a: true, g: true, c: true, e: true }); @@ -170,7 +171,7 @@ describe('Node Tests', function() { }); it("SortedChildrenNode.getPredecessorChild works correctly.", function() { - var node = nodeFromJSON({ + const node = nodeFromJSON({ d: { '.value': true, '.priority' : 22 }, a: { '.value': true, '.priority' : 25 }, g: { '.value': true, '.priority' : 19 }, @@ -186,7 +187,7 @@ describe('Node Tests', function() { }); it("SortedChildrenNode.updateImmediateChild works correctly.", function() { - var node = nodeFromJSON({ + let node = nodeFromJSON({ d: { '.value': true, '.priority' : 22 }, a: { '.value': true, '.priority' : 25 }, g: { '.value': true, '.priority' : 19 }, @@ -196,40 +197,40 @@ describe('Node Tests', function() { }); node = node.updateImmediateChild('c', nodeFromJSON(false)); - expect(node.getImmediateChild('c').getValue()).to.equal(false); + expect((node.getImmediateChild('c') as LeafNode).getValue()).to.equal(false); expect(node.getImmediateChild('c').getPriority().val()).to.equal(null); expect(node.getPriority().val()).to.equal(1000); }); it("removing nodes correctly removes intermediate nodes with no remaining children", function() { - var json = {a: {b: {c: 1}}}; - var node = nodeFromJSON(json); - var newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); + const json = {a: {b: {c: 1}}}; + const node = nodeFromJSON(json); + const newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); expect(newNode.isEmpty()).to.equal(true); }); it("removing nodes leaves intermediate nodes with other children", function() { - var json = {a: {b: {c: 1}, d: 2}}; - var node = nodeFromJSON(json); - var newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); + const json = {a: {b: {c: 1}, d: 2}}; + const node = nodeFromJSON(json); + const newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); expect(newNode.isEmpty()).to.equal(false); expect(newNode.getChild(new Path('a/b/c')).isEmpty()).to.equal(true); expect(newNode.getChild(new Path('a/d')).val()).to.equal(2); }); it("removing nodes leaves other leaf nodes", function() { - var json = {a: {b: {c: 1, d: 2}}}; - var node = nodeFromJSON(json); - var newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); + const json = {a: {b: {c: 1, d: 2}}}; + const node = nodeFromJSON(json); + const newNode = node.updateChild(new Path('a/b/c'), ChildrenNode.EMPTY_NODE); expect(newNode.isEmpty()).to.equal(false); expect(newNode.getChild(new Path('a/b/c')).isEmpty()).to.equal(true); expect(newNode.getChild(new Path('a/b/d')).val()).to.equal(2); }); it("removing nodes correctly removes the root", function() { - var json = null; - var node = nodeFromJSON(json); - var newNode = node.updateChild(new Path(''), ChildrenNode.EMPTY_NODE); + let json = null; + let node = nodeFromJSON(json); + let newNode = node.updateChild(new Path(''), ChildrenNode.EMPTY_NODE); expect(newNode.isEmpty()).to.equal(true); json = {a: 1}; @@ -239,29 +240,29 @@ describe('Node Tests', function() { }); it("ignores null values", function() { - var json = {a: 1, b: null}; - var node = nodeFromJSON(json); - expect(node.children_.get('b')).to.equal(null); + const json = {a: 1, b: null}; + const node = nodeFromJSON(json); + expect((node as any).children_.get('b')).to.equal(null); }); it("Leading zeroes in path are handled properly", function() { - var json = {"1": 1, "01": 2, "001": 3}; - var tree = nodeFromJSON(json); + const json = {"1": 1, "01": 2, "001": 3}; + const tree = nodeFromJSON(json); expect(tree.getChild(new Path("1")).val()).to.equal(1); expect(tree.getChild(new Path("01")).val()).to.equal(2); expect(tree.getChild(new Path("001")).val()).to.equal(3); }); it("Treats leading zeroes as objects, not array", function() { - var json = {"3": 1, "03": 2}; - var tree = nodeFromJSON(json); - var val = tree.val(); + const json = {"3": 1, "03": 2}; + const tree = nodeFromJSON(json); + const val = tree.val(); expect(val).to.deep.equal(json); }); it("Updating empty children doesn't overwrite leaf node", function() { - var empty = ChildrenNode.EMPTY_NODE; - var node = nodeFromJSON("value"); + const empty = ChildrenNode.EMPTY_NODE; + const node = nodeFromJSON("value"); expect(node).to.deep.equal(node.updateChild(new Path(".priority"), empty)); expect(node).to.deep.equal(node.updateChild(new Path("child"), empty)); expect(node).to.deep.equal(node.updateChild(new Path("child/.priority"), empty)); diff --git a/tests/database/order.test.ts b/tests/database/order.test.ts index 57161291745..bdbb8606db4 100644 --- a/tests/database/order.test.ts +++ b/tests/database/order.test.ts @@ -29,7 +29,8 @@ describe('Order Tests', function () { // in the opposite order. beforeEach(function() { return new Promise(resolve => { - var ref = (getRandomNode() as Reference), connected = false; + const ref = (getRandomNode() as Reference); + let connected = false; ref.root.child('.info/connected').on('value', function(s) { connected = s.val() == true; if (connected) resolve(); @@ -38,14 +39,14 @@ describe('Order Tests', function () { }); it("Push a bunch of data, enumerate it back; ensure order is correct.", async function () { - var node = (getRandomNode() as Reference); - for (var i = 0; i < 10; i++) { + const node = (getRandomNode() as Reference); + for (let i = 0; i < 10; i++) { node.push().set(i); } const snap = await node.once('value'); - var expected = 0; + let expected = 0; snap.forEach(function (child) { expect(child.val()).to.equal(expected); expected++; @@ -54,19 +55,19 @@ describe('Order Tests', function () { }); it("Push a bunch of paths, then write; ensure order is correct.", async function() { - var node = (getRandomNode() as Reference); - var paths = []; + const node = (getRandomNode() as Reference); + const paths = []; // Push them first to try to call push() multiple times in the same ms. - for (var i = 0; i < 20; i++) { + for (let i = 0; i < 20; i++) { paths[i] = node.push(); } - for (i = 0; i < 20; i++) { + for (let i = 0; i < 20; i++) { paths[i].set(i); } const snap = await node.once('value'); - - var expected = 0; + + let expected = 0; snap.forEach(function (child) { expect(child.val()).to.equal(expected); expected++; @@ -75,12 +76,12 @@ describe('Order Tests', function () { }); it("Push a bunch of data, reconnect, read it back; ensure order is chronological.", async function () { - var nodePair = (getRandomNode(2) as Reference[]); - var expected; + const nodePair = (getRandomNode(2) as Reference[]); + let expected; - var node = nodePair[0]; - var nodesSet = 0; - for (var i = 0; i < 10; i++) { + const node = nodePair[0]; + let nodesSet = 0; + for (let i = 0; i < 10; i++) { node.push().set(i, function() { ++nodesSet }); } @@ -95,7 +96,7 @@ describe('Order Tests', function () { expect(expected).to.equal(10); // read it back - var readSnap; + let readSnap; const ea = new EventAccumulator(() => readSnap); nodePair[1].on('value', function(snap) { readSnap = snap; @@ -113,13 +114,13 @@ describe('Order Tests', function () { }); it("Push a bunch of data with explicit priority, reconnect, read it back; ensure order is correct.", async function () { - var nodePair = (getRandomNode(2) as Reference[]); - var expected; + const nodePair = (getRandomNode(2) as Reference[]); + let expected; - var node = nodePair[0]; - var nodesSet = 0; - for (var i = 0; i < 10; i++) { - var pushedNode = node.push(); + const node = nodePair[0]; + let nodesSet = 0; + for (let i = 0; i < 10; i++) { + const pushedNode = node.push(); pushedNode.setWithPriority(i, 10 - i, function() { ++nodesSet }); } @@ -136,7 +137,7 @@ describe('Order Tests', function () { // We need confirmation that the server has gotten all the data before we can expect to receive it all // read it back - var readSnap; + let readSnap; const ea = new EventAccumulator(() => readSnap); nodePair[1].on('value', function(snap) { readSnap = snap; @@ -153,13 +154,13 @@ describe('Order Tests', function () { }); it("Push data with exponential priority and ensure order is correct.", async function () { - var nodePair = (getRandomNode(2) as Reference[]); - var expected; + const nodePair = (getRandomNode(2) as Reference[]); + let expected; - var node = nodePair[0]; - var nodesSet = 0; - for (var i = 0; i < 10; i++) { - var pushedNode = node.push(); + const node = nodePair[0]; + let nodesSet = 0; + for (let i = 0; i < 10; i++) { + const pushedNode = node.push(); pushedNode.setWithPriority(i, 111111111111111111111111111111 / Math.pow(10, i), function() { ++nodesSet }); } @@ -173,7 +174,7 @@ describe('Order Tests', function () { expect(expected).to.equal(-1); // read it back - var readSnap; + let readSnap; const ea = new EventAccumulator(() => readSnap); nodePair[1].on('value', function(snap) { readSnap = snap; @@ -191,11 +192,11 @@ describe('Order Tests', function () { }); it("Verify nodes without values aren't enumerated.", async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); node.child('foo'); node.child('bar').set('test'); - var items = 0; + let items = 0; const snap = await node.once('value'); snap.forEach(function (child) { items++; @@ -206,11 +207,11 @@ describe('Order Tests', function () { }); it.skip("Receive child_moved event when priority changes.", async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); // const ea = new EventAccumulator(() => eventHelper.watchesInitializedWaiter); - var eventHelper = eventTestHelper([ + const eventHelper = eventTestHelper([ [ node, ['child_added', 'a'] ], [ node, ['value', ''] ], [ node, ['child_added', 'b'] ], @@ -236,11 +237,11 @@ describe('Order Tests', function () { }); it.skip("Can reset priority to null.", async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); node.child('a').setWithPriority('a', 1); node.child('b').setWithPriority('b', 2); - var eventHelper; + let eventHelper; // const ea = new EventAccumulator(() => eventHelper.waiter()); eventHelper = eventTestHelper([ @@ -264,9 +265,9 @@ describe('Order Tests', function () { }); it("Inserting a node under a leaf node preserves its priority.", function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var snap = null; + let snap = null; node.on('value', function(s) {snap = s;}); node.setWithPriority('a', 10); @@ -275,8 +276,8 @@ describe('Order Tests', function () { }); it("Verify order of mixed numbers / strings / no priorities.", async function () { - var nodePair = (getRandomNode(2) as Reference[]); - var nodeAndPriorities = [ + const nodePair = (getRandomNode(2) as Reference[]); + const nodeAndPriorities = [ "alpha42", "zed", "noPriorityC", null, "num41", 500, @@ -297,25 +298,25 @@ describe('Order Tests', function () { "alpha40", "zed", "num40", 500]; - var setsCompleted = 0; + let setsCompleted = 0; for (let i = 0; i < nodeAndPriorities.length; i++) { - var n = nodePair[0].child((nodeAndPriorities[i++] as string)); + const n = nodePair[0].child((nodeAndPriorities[i++] as string)); n.setWithPriority(1, nodeAndPriorities[i], function() { setsCompleted++; }); } - var expectedOutput = "noPriorityA, noPriorityB, noPriorityC, num10, num20, num30, num40, num41, num42, num50, num60, num70, num80, alpha10, alpha20, alpha30, alpha40, alpha41, alpha42, "; + const expectedOutput = "noPriorityA, noPriorityB, noPriorityC, num10, num20, num30, num40, num41, num42, num50, num60, num70, num80, alpha10, alpha20, alpha30, alpha40, alpha41, alpha42, "; const snap = await nodePair[0].once('value'); - - var output = ""; + + let output = ""; snap.forEach(function (n) { output += n.key + ", "; }); expect(output).to.equal(expectedOutput); - var eventsFired = false; - var output = ""; + let eventsFired = false; + output = ""; nodePair[1].on('value', function(snap) { snap.forEach(function (n) { output += n.key + ", "; @@ -326,8 +327,8 @@ describe('Order Tests', function () { }); it("Verify order of integer keys.", async function () { - var ref = (getRandomNode() as Reference); - var keys = [ + const ref = (getRandomNode() as Reference); + const keys = [ "foo", "bar", "03", @@ -340,16 +341,16 @@ describe('Order Tests', function () { "9" ]; - var setsCompleted = 0; - for (var i = 0; i < keys.length; i++) { - var child = ref.child(keys[i]); + let setsCompleted = 0; + for (let i = 0; i < keys.length; i++) { + const child = ref.child(keys[i]); child.set(true, function() { setsCompleted++; }); } - var expectedOutput = "0, 3, 03, 003, 5, 9, 20, 100, bar, foo, "; + const expectedOutput = "0, 3, 03, 003, 5, 9, 20, 100, bar, foo, "; const snap = await ref.once('value'); - var output = ""; + let output = ""; snap.forEach(function (n) { output += n.key + ", "; }); @@ -358,9 +359,9 @@ describe('Order Tests', function () { }); it("Ensure prevName is correct on child_added event.", function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var added = ''; + let added = ''; node.on('child_added', function(snap, prevName) { added += snap.key + " " + prevName + ", "; }); @@ -371,9 +372,9 @@ describe('Order Tests', function () { }); it("Ensure prevName is correct when adding new nodes.", function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var added = ''; + let added = ''; node.on('child_added', function(snap, prevName) { added += snap.key + " " + prevName + ", "; }); @@ -392,9 +393,9 @@ describe('Order Tests', function () { }); it("Ensure prevName is correct when adding new nodes with JSON.", function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var added = ''; + let added = ''; node.on('child_added', function(snap, prevName) { added += snap.key + " " + prevName + ", "; }); @@ -413,9 +414,9 @@ describe('Order Tests', function () { }); it("Ensure prevName is correct when moving nodes.", function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var moved = ''; + let moved = ''; node.on('child_moved', function(snap, prevName) { moved += snap.key + " " + prevName + ", "; }); @@ -438,9 +439,9 @@ describe('Order Tests', function () { }); it("Ensure prevName is correct when moving nodes by setting whole JSON.", function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var moved = ''; + let moved = ''; node.on('child_moved', function(snap, prevName) { moved += snap.key + " " + prevName + ", "; }); @@ -480,8 +481,8 @@ describe('Order Tests', function () { }); it("Case 595: Should not get child_moved event when deleting prioritized grandchild.", function() { - var f = (getRandomNode() as Reference); - var moves = 0; + const f = (getRandomNode() as Reference); + let moves = 0; f.on('child_moved', function() { moves++; }); @@ -495,9 +496,9 @@ describe('Order Tests', function () { }); it("Can set value with priority of 0.", function() { - var f = (getRandomNode() as Reference); + const f = (getRandomNode() as Reference); - var snap = null; + let snap = null; f.on('value', function(s) { snap = s; }); @@ -508,9 +509,9 @@ describe('Order Tests', function () { }); it("Can set object with priority of 0.", function() { - var f = (getRandomNode() as Reference); + const f = (getRandomNode() as Reference); - var snap = null; + let snap = null; f.on('value', function(s) { snap = s; }); @@ -521,8 +522,8 @@ describe('Order Tests', function () { }); it("Case 2003: Should get child_moved for any priority change, regardless of whether it affects ordering.", function() { - var f = (getRandomNode() as Reference); - var moved = []; + const f = (getRandomNode() as Reference); + const moved = []; f.on('child_moved', function(snap) { moved.push(snap.key); }); f.set({ a: {'.value': 'a', '.priority': 0}, @@ -537,8 +538,8 @@ describe('Order Tests', function () { }); it("Case 2003: Should get child_moved for any priority change, regardless of whether it affects ordering (2).", function() { - var f = (getRandomNode() as Reference); - var moved = []; + const f = (getRandomNode() as Reference); + const moved = []; f.on('child_moved', function(snap) { moved.push(snap.key); }); f.set({ a: {'.value': 'a', '.priority': 0}, diff --git a/tests/database/order_by.test.ts b/tests/database/order_by.test.ts index c60fbfe8406..43112a6aa83 100644 --- a/tests/database/order_by.test.ts +++ b/tests/database/order_by.test.ts @@ -23,12 +23,12 @@ describe('.orderBy tests', function() { // TODO: setup spy on console.warn - var clearRef = (getRandomNode() as Reference); + const clearRef = (getRandomNode() as Reference); it('Snapshots are iterated in order', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var initial = { + const initial = { alex: {nuggets: 60}, rob: {nuggets: 56}, vassili: {nuggets: 55.5}, @@ -36,14 +36,14 @@ describe('.orderBy tests', function() { greg: {nuggets: 52} }; - var expectedOrder = ['greg', 'tony', 'vassili', 'rob', 'alex']; - var expectedPrevNames = [null, 'greg', 'tony', 'vassili', 'rob']; + const expectedOrder = ['greg', 'tony', 'vassili', 'rob', 'alex']; + const expectedPrevNames = [null, 'greg', 'tony', 'vassili', 'rob']; - var valueOrder = []; - var addedOrder = []; - var addedPrevNames = []; + const valueOrder = []; + const addedOrder = []; + const addedPrevNames = []; - var orderedRef = ref.orderByChild('nuggets'); + const orderedRef = ref.orderByChild('nuggets'); orderedRef.on('value', function(snap) { snap.forEach(function(childSnap) { @@ -64,9 +64,9 @@ describe('.orderBy tests', function() { }); it('Snapshots are iterated in order for value', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var initial = { + const initial = { alex: 60, rob: 56, vassili: 55.5, @@ -74,14 +74,14 @@ describe('.orderBy tests', function() { greg: 52 }; - var expectedOrder = ['greg', 'tony', 'vassili', 'rob', 'alex']; - var expectedPrevNames = [null, 'greg', 'tony', 'vassili', 'rob']; + const expectedOrder = ['greg', 'tony', 'vassili', 'rob', 'alex']; + const expectedPrevNames = [null, 'greg', 'tony', 'vassili', 'rob']; - var valueOrder = []; - var addedOrder = []; - var addedPrevNames = []; + const valueOrder = []; + const addedOrder = []; + const addedPrevNames = []; - var orderedRef = ref.orderByValue(); + const orderedRef = ref.orderByValue(); orderedRef.on('value', function(snap) { snap.forEach(function(childSnap) { @@ -102,9 +102,9 @@ describe('.orderBy tests', function() { }); it('Fires child_moved events', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var initial = { + const initial = { alex: {nuggets: 60}, rob: {nuggets: 56}, vassili: {nuggets: 55.5}, @@ -112,9 +112,9 @@ describe('.orderBy tests', function() { greg: {nuggets: 52} }; - var orderedRef = ref.orderByChild('nuggets'); + const orderedRef = ref.orderByChild('nuggets'); - var moved = false; + let moved = false; orderedRef.on('child_moved', function(snap, prevName) { moved = true; expect(snap.key).to.equal('greg'); @@ -128,12 +128,12 @@ describe('.orderBy tests', function() { }); it('Callback removal works', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var reads = 0; - var fooCb; - var barCb; - var bazCb; + let reads = 0; + let fooCb; + let barCb; + let bazCb; const ea = EventAccumulatorFactory.waitsForCount(4); fooCb = ref.orderByChild('foo').on('value', function() { @@ -177,14 +177,14 @@ describe('.orderBy tests', function() { }); it('child_added events are in the correct order', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var initial = { + const initial = { a: {value: 5}, c: {value: 3} }; - var added = []; + const added = []; ref.orderByChild('value').on('child_added', function(snap) { added.push(snap.key); }); @@ -201,9 +201,9 @@ describe('.orderBy tests', function() { }); it('Can use key index', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var data = { + const data = { a: { '.priority': 10, '.value': 'a' }, b: { '.priority': 5, '.value': 'b' }, c: { '.priority': 20, '.value': 'c' }, @@ -215,15 +215,15 @@ describe('.orderBy tests', function() { await ref.set(data); const snap = await ref.orderByKey().startAt('c').once('value'); - - var keys = []; + + let keys = []; snap.forEach(function(child) { keys.push(child.key); }); expect(keys).to.deep.equal(['c', 'd', 'e', 'f']); const ea = EventAccumulatorFactory.waitsForCount(5); - var keys = []; + keys = []; ref.orderByKey().limitToLast(5).on('child_added', function(child) { keys.push(child.key); @@ -237,7 +237,7 @@ describe('.orderBy tests', function() { }); it('Queries work on leaf nodes', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set('leaf-node', function() { ref.orderByChild('foo').limitToLast(1).on('value', function(snap) { @@ -248,17 +248,17 @@ describe('.orderBy tests', function() { }); it('Updates for unindexed queries work', function(done) { - var refs = (getRandomNode(2) as Reference[]); - var reader = refs[0]; - var writer = refs[1]; + const refs = (getRandomNode(2) as Reference[]); + const reader = refs[0]; + const writer = refs[1]; - var value = { + const value = { 'one': { 'index': 1, 'value': 'one' }, 'two': { 'index': 2, 'value': 'two' }, 'three': { 'index': 3, 'value': 'three' } }; - var count = 0; + let count = 0; writer.set(value, function() { reader.orderByChild('index').limitToLast(2).on('value', function(snap) { @@ -282,21 +282,21 @@ describe('.orderBy tests', function() { }); it('Server respects KeyIndex', function(done) { - var refs = (getRandomNode(2) as Reference[]); - var reader = refs[0]; - var writer = refs[1]; + const refs = (getRandomNode(2) as Reference[]); + const reader = refs[0]; + const writer = refs[1]; - var initial = { + const initial = { a: 1, b: 2, c: 3 }; - var expected = ['b', 'c']; + const expected = ['b', 'c']; - var actual = []; + const actual = []; - var orderedRef = reader.orderByKey().startAt('b').limitToFirst(2); + const orderedRef = reader.orderByKey().startAt('b').limitToFirst(2); writer.set(initial, function() { orderedRef.on('value', function(snap) { snap.forEach(function(childSnap) { @@ -309,9 +309,9 @@ describe('.orderBy tests', function() { }); it('startAt/endAt works on value index', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var initial = { + const initial = { alex: 60, rob: 56, vassili: 55.5, @@ -319,14 +319,14 @@ describe('.orderBy tests', function() { greg: 52 }; - var expectedOrder = ['tony', 'vassili', 'rob']; - var expectedPrevNames = [null, 'tony', 'vassili']; + const expectedOrder = ['tony', 'vassili', 'rob']; + const expectedPrevNames = [null, 'tony', 'vassili']; - var valueOrder = []; - var addedOrder = []; - var addedPrevNames = []; + const valueOrder = []; + const addedOrder = []; + const addedPrevNames = []; - var orderedRef = ref.orderByValue().startAt(52, 'tony').endAt(59); + const orderedRef = ref.orderByValue().startAt(52, 'tony').endAt(59); orderedRef.on('value', function(snap) { snap.forEach(function(childSnap) { @@ -347,9 +347,9 @@ describe('.orderBy tests', function() { }); it('Removing default listener removes non-default listener that loads all data', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var initial = { key: 'value' }; + const initial = { key: 'value' }; ref.set(initial, function(err) { expect(err).to.be.null; ref.orderByKey().on('value', function() {}); @@ -366,9 +366,9 @@ describe('.orderBy tests', function() { }); it('Can define and use an deep index', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var initial = { + const initial = { alex: {deep: {nuggets: 60}}, rob: {deep: {nuggets: 56}}, vassili: {deep: {nuggets: 55.5}}, @@ -376,14 +376,14 @@ describe('.orderBy tests', function() { greg: {deep: {nuggets: 52}} }; - var expectedOrder = ['greg', 'tony', 'vassili']; - var expectedPrevNames = [null, 'greg', 'tony']; + const expectedOrder = ['greg', 'tony', 'vassili']; + const expectedPrevNames = [null, 'greg', 'tony']; - var valueOrder = []; - var addedOrder = []; - var addedPrevNames = []; + const valueOrder = []; + const addedOrder = []; + const addedPrevNames = []; - var orderedRef = ref.orderByChild('deep/nuggets').limitToFirst(3); + const orderedRef = ref.orderByChild('deep/nuggets').limitToFirst(3); // come before value event orderedRef.on('child_added', function(snap, prevName) { diff --git a/tests/database/path.test.ts b/tests/database/path.test.ts index 8ef71d1a407..7a5c9e06e4a 100644 --- a/tests/database/path.test.ts +++ b/tests/database/path.test.ts @@ -18,12 +18,12 @@ import { expect } from "chai"; import { Path } from "../../src/database/core/util/Path"; describe('Path Tests', function () { - var expectGreater = function(left, right) { - expect(Path.comparePaths(new Path(left), new Path(right))).to.equal(1) + const expectGreater = function(left, right) { + expect(Path.comparePaths(new Path(left), new Path(right))).to.equal(1); expect(Path.comparePaths(new Path(right), new Path(left))).to.equal(-1) }; - var expectEqual = function(left, right) { + const expectEqual = function(left, right) { expect(Path.comparePaths(new Path(left), new Path(right))).to.equal(0) }; @@ -49,7 +49,7 @@ describe('Path Tests', function () { }); it('popFront() returns the parent', function() { - expect(new Path('/a/b/c').popFront().toString()).to.equal('/b/c') + expect(new Path('/a/b/c').popFront().toString()).to.equal('/b/c'); expect(new Path('/a/b/c').popFront().popFront().toString()).to.equal('/c'); expect(new Path('/a/b/c').popFront().popFront().popFront().toString()).to.equal('/'); expect(new Path('/a/b/c').popFront().popFront().popFront().popFront().toString()).to.equal('/'); diff --git a/tests/database/promise.test.ts b/tests/database/promise.test.ts index e67b8c2462e..2ec663fd545 100644 --- a/tests/database/promise.test.ts +++ b/tests/database/promise.test.ts @@ -31,7 +31,7 @@ describe('Promise Tests', function() { }); it('wraps Firebase.set', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); return ref.set(5).then(function() { return ref.once('value'); }).then(function(read) { @@ -40,8 +40,8 @@ describe('Promise Tests', function() { }); it('wraps Firebase.push when no value is passed', function() { - var ref = (getRandomNode() as Reference); - var pushed = ref.push(); + const ref = (getRandomNode() as Reference); + const pushed = ref.push(); return pushed.then(function(childRef) { expect(pushed.ref.parent.toString()).to.equal(ref.toString()); expect(pushed.toString()).to.equal(childRef.toString()); @@ -54,8 +54,8 @@ describe('Promise Tests', function() { }); it('wraps Firebase.push when a value is passed', function() { - var ref = (getRandomNode() as Reference); - var pushed = ref.push(6); + const ref = (getRandomNode() as Reference); + const pushed = ref.push(6); return pushed.then(function(childRef) { expect(pushed.ref.parent.toString()).to.equal(ref.toString()); expect(pushed.toString()).to.equal(childRef.toString()); @@ -67,9 +67,9 @@ describe('Promise Tests', function() { }); it('wraps Firebase.remove', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); return ref.set({'a': 'b'}).then(function() { - var p = ref.child('a').remove(); + const p = ref.child('a').remove(); expect(typeof p.then === 'function').to.equal(true); return p; }).then(function() { @@ -80,9 +80,9 @@ describe('Promise Tests', function() { }); it('wraps Firebase.update', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); return ref.set({'a': 'b'}).then(function() { - var p = ref.update({'c': 'd'}); + const p = ref.update({'c': 'd'}); expect(typeof p.then === 'function').to.equal(true); return p; }).then(function() { @@ -93,9 +93,9 @@ describe('Promise Tests', function() { }); it('wraps Fireabse.setPriority', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); return ref.set({'a': 'b'}).then(function() { - var p = ref.child('a').setPriority(5); + const p = ref.child('a').setPriority(5); expect(typeof p.then === 'function').to.equal(true); return p; }).then(function() { @@ -106,7 +106,7 @@ describe('Promise Tests', function() { }); it('wraps Firebase.setWithPriority', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); return ref.setWithPriority('hi', 5).then(function() { return ref.once('value'); }).then(function(snap) { @@ -116,7 +116,7 @@ describe('Promise Tests', function() { }); it('wraps Firebase.transaction', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); return ref.transaction(function() { return 5; }).then(function(result) { @@ -131,8 +131,8 @@ describe('Promise Tests', function() { it('exposes catch in the return of Firebase.push', function() { // Catch is a pain in the bum to provide safely because "catch" is a reserved word and ES3 and below require // you to use quotes to define it, but the closure linter really doesn't want you to do that either. - var ref = (getRandomNode() as Reference); - var pushed = ref.push(6); + const ref = (getRandomNode() as Reference); + const pushed = ref.push(6); expect(typeof ref.then === 'function').to.equal(false); expect(typeof ref.catch === 'function').to.equal(false); @@ -142,17 +142,17 @@ describe('Promise Tests', function() { }); it('wraps onDisconnect.remove', function() { - var refs = (getRandomNode(2) as Reference[]); - var writer = refs[0]; - var reader = refs[1]; - var refInfo = getRootNode(0, '.info/connected'); + const refs = (getRandomNode(2) as Reference[]); + const writer = refs[0]; + const reader = refs[1]; + const refInfo = getRootNode(0, '.info/connected'); refInfo.once('value', function(snapshot) { expect(snapshot.val()).to.equal(true); }); return writer.child('here today').set('gone tomorrow').then(function() { - var p = writer.child('here today').onDisconnect().remove(); + const p = writer.child('here today').onDisconnect().remove(); expect(typeof p.then === 'function').to.equal(true); return p; }).then(function() { @@ -165,11 +165,11 @@ describe('Promise Tests', function() { }); it('wraps onDisconnect.update', function() { - var refs = (getRandomNode(2) as Reference[]); - var writer = refs[0]; - var reader = refs[1]; + const refs = (getRandomNode(2) as Reference[]); + const writer = refs[0]; + const reader = refs[1]; return writer.set({'foo': 'baz'}).then(function() { - var p = writer.onDisconnect().update({'foo': 'bar'}); + const p = writer.onDisconnect().update({'foo': 'bar'}); expect(typeof p.then === 'function').to.equal(true); return p; }).then(function() { @@ -182,9 +182,9 @@ describe('Promise Tests', function() { }); it('wraps onDisconnect.set', function() { - var refs = (getRandomNode(2) as Reference[]); - var writer = refs[0]; - var reader = refs[1]; + const refs = (getRandomNode(2) as Reference[]); + const writer = refs[0]; + const reader = refs[1]; return writer.child('hello').onDisconnect().set('world').then(function() { writer.database.goOffline(); writer.database.goOnline(); @@ -195,9 +195,9 @@ describe('Promise Tests', function() { }); it('wraps onDisconnect.setWithPriority', function() { - var refs = (getRandomNode(2) as Reference[]); - var writer = refs[0]; - var reader = refs[1]; + const refs = (getRandomNode(2) as Reference[]); + const writer = refs[0]; + const reader = refs[1]; return writer.child('meaning of life').onDisconnect().setWithPriority('ultimate question', 42).then(function() { writer.database.goOffline(); writer.database.goOnline(); diff --git a/tests/database/query.test.ts b/tests/database/query.test.ts index 1b706178989..a11e688ea9b 100644 --- a/tests/database/query.test.ts +++ b/tests/database/query.test.ts @@ -15,12 +15,10 @@ */ import { expect } from "chai"; -import firebase from '../../src/app'; import { Reference } from "../../src/database/api/Reference"; import { Query } from "../../src/database/api/Query"; import "../../src/database/core/snap/ChildrenNode"; -import { - getQueryValue, +import { getRandomNode, getPath, pause @@ -36,7 +34,7 @@ type TaskList = [Query, any][]; describe('Query Tests', function() { // Little helper class for testing event callbacks w/ contexts. - var EventReceiver = function() { + const EventReceiver = function() { this.gotValue = false; this.gotChildAdded = false; }; @@ -48,7 +46,7 @@ describe('Query Tests', function() { }; it('Can create basic queries.', function() { - var path = (getRandomNode() as Reference); + const path = (getRandomNode() as Reference); path.limitToLast(10); path.startAt('199').limitToFirst(10); @@ -68,11 +66,11 @@ describe('Query Tests', function() { }); it('Exposes database as read-only property', function() { - var path = (getRandomNode() as Reference); - var child = path.child('child'); + const path = (getRandomNode() as Reference); + const child = path.child('child'); - var db = path.database; - var dbChild = child.database; + const db = path.database; + const dbChild = child.database; expect(db).to.equal(dbChild); /** @@ -84,7 +82,7 @@ describe('Query Tests', function() { }); it('Invalid queries throw', function() { - var path = (getRandomNode() as Reference); + const path = (getRandomNode() as Reference); /** * Because we are testing invalid queries, I am casting @@ -148,17 +146,17 @@ describe('Query Tests', function() { }); it('can produce a valid ref', function() { - var path = (getRandomNode() as Reference); + const path = (getRandomNode() as Reference); - var query = path.limitToLast(1); - var ref = query.ref; + const query = path.limitToLast(1); + const ref = query.ref; expect(ref.toString()).to.equal(path.toString()); }); it('Passing invalidKeys to startAt / endAt throws.', function() { - var f = (getRandomNode() as Reference); - var badKeys = ['.test', 'test.', 'fo$o', '[what', 'ever]', 'ha#sh', '/thing', 'th/ing', 'thing/']; + const f = (getRandomNode() as Reference); + const badKeys = ['.test', 'test.', 'fo$o', '[what', 'ever]', 'ha#sh', '/thing', 'th/ing', 'thing/']; // Changed from basic array iteration to avoid closure issues accessing mutable state _.each(badKeys, function(badKey) { expect(function() { f.startAt(null, badKey); }).to.throw(); @@ -167,15 +165,15 @@ describe('Query Tests', function() { }); it('Passing invalid paths to orderBy throws', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); expect(function() { ref.orderByChild('$child/foo'); }).to.throw(); expect(function() { ref.orderByChild('$key'); }).to.throw(); expect(function() { ref.orderByChild('$priority'); }).to.throw(); }); it('Query.queryIdentifier works.', function() { - var path = (getRandomNode() as Reference); - var queryId = function(query) { + const path = (getRandomNode() as Reference); + const queryId = function(query) { return query.queryIdentifier(query); }; @@ -194,7 +192,7 @@ describe('Query Tests', function() { }); it('Passing invalid queries to isEqual throws', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); expect(function() { (ref as any).isEqual(); }).to.throw(); expect(function() { (ref as any).isEqual(''); }).to.throw(); expect(function() { (ref as any).isEqual('foo'); }).to.throw(); @@ -209,9 +207,9 @@ describe('Query Tests', function() { }); it('Query.isEqual works.', function() { - var path = (getRandomNode() as Reference); - var rootRef = path.root; - var childRef = rootRef.child('child'); + const path = (getRandomNode() as Reference); + const rootRef = path.root; + const childRef = rootRef.child('child'); // Equivalent refs expect(path.isEqual(path), 'Query.isEqual - 1').to.be.true; @@ -230,14 +228,14 @@ describe('Query Tests', function() { expect(rootRef.isEqual(childRef), 'Query.isEqual - 7').to.be.false; expect(childRef.isEqual(rootRef.child('otherChild')), 'Query.isEqual - 8').to.be.false; - var childQueryLast25 = childRef.limitToLast(25); - var childQueryOrderedByKey = childRef.orderByKey(); - var childQueryOrderedByPriority = childRef.orderByPriority(); - var childQueryOrderedByTimestamp = childRef.orderByChild("timestamp"); - var childQueryStartAt1 = childQueryOrderedByTimestamp.startAt(1); - var childQueryStartAt2 = childQueryOrderedByTimestamp.startAt(2); - var childQueryEndAt2 = childQueryOrderedByTimestamp.endAt(2); - var childQueryStartAt1EndAt2 = childQueryOrderedByTimestamp.startAt(1).endAt(2); + const childQueryLast25 = childRef.limitToLast(25); + const childQueryOrderedByKey = childRef.orderByKey(); + const childQueryOrderedByPriority = childRef.orderByPriority(); + const childQueryOrderedByTimestamp = childRef.orderByChild("timestamp"); + const childQueryStartAt1 = childQueryOrderedByTimestamp.startAt(1); + const childQueryStartAt2 = childQueryOrderedByTimestamp.startAt(2); + const childQueryEndAt2 = childQueryOrderedByTimestamp.endAt(2); + const childQueryStartAt1EndAt2 = childQueryOrderedByTimestamp.startAt(1).endAt(2); // Equivalent queries expect(childRef.isEqual(childQueryLast25.ref), 'Query.isEqual - 9').to.be.true; @@ -258,10 +256,10 @@ describe('Query Tests', function() { }); it('Query.off can be called on the default query.', function() { - var path = (getRandomNode() as Reference); - var eventFired = false; + const path = (getRandomNode() as Reference); + let eventFired = false; - var callback = function() { eventFired = true; }; + const callback = function() { eventFired = true; }; path.limitToLast(5).on('value', callback); path.set({a: 5, b: 6}); @@ -274,10 +272,10 @@ describe('Query Tests', function() { }); it('Query.off can be called on the specific query.', function() { - var path = (getRandomNode() as Reference); - var eventFired = false; + const path = (getRandomNode() as Reference); + let eventFired = false; - var callback = function() { eventFired = true; }; + const callback = function() { eventFired = true; }; path.limitToLast(5).on('value', callback); path.set({a: 5, b: 6}); @@ -290,11 +288,11 @@ describe('Query Tests', function() { }); it('Query.off can be called without a callback specified.', function() { - var path = (getRandomNode() as Reference); - var eventFired = false; + const path = (getRandomNode() as Reference); + let eventFired = false; - var callback1 = function() { eventFired = true; }; - var callback2 = function() { eventFired = true; }; + const callback1 = function() { eventFired = true; }; + const callback2 = function() { eventFired = true; }; path.on('value', callback1); path.limitToLast(5).on('value', callback2); @@ -308,11 +306,11 @@ describe('Query Tests', function() { }); it('Query.off can be called without an event type or callback specified.', function() { - var path = (getRandomNode() as Reference); - var eventFired = false; + const path = (getRandomNode() as Reference); + let eventFired = false; - var callback1 = function() { eventFired = true; }; - var callback2 = function() { eventFired = true; }; + const callback1 = function() { eventFired = true; }; + const callback2 = function() { eventFired = true; }; path.on('value', callback1); path.limitToLast(5).on('value', callback2); @@ -326,9 +324,9 @@ describe('Query Tests', function() { }); it('Query.off respects provided context (for value events).', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var a = new EventReceiver(), + const a = new EventReceiver(), b = new EventReceiver(); ref.on('value', a.onValue, a); @@ -351,9 +349,9 @@ describe('Query Tests', function() { }); it('Query.off respects provided context (for child events).', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var a = new EventReceiver(), + const a = new EventReceiver(), b = new EventReceiver(); ref.on('child_added', a.onChildAdded, a); @@ -376,9 +374,9 @@ describe('Query Tests', function() { }); it('Query.off with no callback/context removes all callbacks, even with contexts (for value events).', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var a = new EventReceiver(), + const a = new EventReceiver(), b = new EventReceiver(); ref.on('value', a.onValue, a); @@ -399,9 +397,9 @@ describe('Query Tests', function() { }); it('Query.off with no callback/context removes all callbacks, even with contexts (for child events).', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var a = new EventReceiver(), + const a = new EventReceiver(), b = new EventReceiver(); ref.on('child_added', a.onChildAdded, a); @@ -422,9 +420,9 @@ describe('Query Tests', function() { }); it('Query.off with no event type / callback removes all callbacks (even those with contexts).', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var a = new EventReceiver(), + const a = new EventReceiver(), b = new EventReceiver(); ref.on('value', a.onValue, a); @@ -452,16 +450,16 @@ describe('Query Tests', function() { }); it('Set a limit of 5, add a bunch of nodes, ensure only last 5 items are kept.', function() { - var node = (getRandomNode() as Reference); - var snap = null; + const node = (getRandomNode() as Reference); + let snap = null; node.limitToLast(5).on('value', function(s) { snap = s; }); node.set({}); - for (var i = 0; i < 10; i++) { + for (let i = 0; i < 10; i++) { node.push().set(i); } - var expected = 5; + let expected = 5; snap.forEach(function(child) { expect(child.val()).to.equal(expected); expected++; @@ -471,7 +469,7 @@ describe('Query Tests', function() { }); it('Set a limit of 5, add a bunch of nodes, ensure only last 5 items are sent from server.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({}); const pushPromises = []; @@ -502,12 +500,12 @@ describe('Query Tests', function() { }); it('Set various limits, ensure resulting data is correct.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({a: 1, b: 2, c: 3}); const tasks: TaskList = [ - [node.limitToLast(1), {c: 3}],, + [node.limitToLast(1), {c: 3}], [node.endAt().limitToLast(1), {c: 3}], [node.limitToLast(2), {b: 2, c: 3}], [node.limitToLast(3), {a: 1, b: 2, c: 3}], @@ -526,7 +524,7 @@ describe('Query Tests', function() { }); it('Set various limits with a startAt name, ensure resulting data is correct.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({a: 1, b: 2, c: 3}); @@ -562,7 +560,7 @@ describe('Query Tests', function() { }); it('Set various limits with a endAt name, ensure resulting data is correct.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({a: 1, b: 2, c: 3}); @@ -598,7 +596,7 @@ describe('Query Tests', function() { }); it('Set various limits with a startAt name, ensure resulting data is correct from the server.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({a: 1, b: 2, c: 3}); @@ -625,8 +623,8 @@ describe('Query Tests', function() { }); it('Set limit, ensure child_removed and child_added events are fired when limit is hit.', function() { - var node = (getRandomNode() as Reference); - var added = '', removed = ''; + const node = (getRandomNode() as Reference); + let added = '', removed = ''; node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '}); node.limitToLast(2).on('child_removed', function(snap) { removed += snap.key + ' '}); node.set({a: 1, b: 2, c: 3}); @@ -641,13 +639,13 @@ describe('Query Tests', function() { }); it('Set limit, ensure child_removed and child_added events are fired when limit is hit, using server data', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({a: 1, b: 2, c: 3}); const ea = EventAccumulatorFactory.waitsForCount(2); - var added = '', removed = ''; + let added = '', removed = ''; node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '; ea.addEvent(); @@ -669,9 +667,9 @@ describe('Query Tests', function() { }); it('Set start and limit, ensure child_removed and child_added events are fired when limit is hit.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var added = '', removed = ''; + let added = '', removed = ''; node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { added += snap.key + ' '}); node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { removed += snap.key + ' '}); node.set({a: 1, b: 2, c: 3}); @@ -685,12 +683,12 @@ describe('Query Tests', function() { }); it('Set start and limit, ensure child_removed and child_added events are fired when limit is hit, using server data', async function() { - var node = getRandomNode() + const node = getRandomNode(); await node.set({a: 1, b: 2, c: 3}); const ea = EventAccumulatorFactory.waitsForCount(2); - var added = '', removed = ''; + let added = '', removed = ''; node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { added += snap.key + ' '; ea.addEvent(); @@ -712,9 +710,9 @@ describe('Query Tests', function() { }); it("Set start and limit, ensure child_added events are fired when limit isn't hit yet.", function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var added = '', removed = ''; + let added = '', removed = ''; node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { added += snap.key + ' '}); node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { removed += snap.key + ' '}); node.set({c: 3}); @@ -728,7 +726,7 @@ describe('Query Tests', function() { }); it("Set start and limit, ensure child_added events are fired when limit isn't hit yet, using server data", async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({c: 3}); @@ -737,7 +735,7 @@ describe('Query Tests', function() { let added = ''; let removed = ''; node.startAt(null, 'a').limitToFirst(2).on('child_added', function(snap) { - added += snap.key + ' ' + added += snap.key + ' '; ea.addEvent(); }); node.startAt(null, 'a').limitToFirst(2).on('child_removed', function(snap) { @@ -757,12 +755,12 @@ describe('Query Tests', function() { }); it('Set a limit, ensure child_removed and child_added events are fired when limit is satisfied and you remove an item.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); const ea = EventAccumulatorFactory.waitsForCount(1); - var added = '', removed = ''; + let added = '', removed = ''; node.limitToLast(2).on('child_added', function(snap) { - added += snap.key + ' ' + added += snap.key + ' '; ea.addEvent(); }); node.limitToLast(2).on('child_removed', function(snap) { removed += snap.key + ' '}); @@ -778,12 +776,12 @@ describe('Query Tests', function() { }); it('Set a limit, ensure child_removed and child_added events are fired when limit is satisfied and you remove an item. Using server data', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({a: 1, b: 2, c: 3}); let ea = EventAccumulatorFactory.waitsForCount(2); - var added = '', removed = ''; + let added = '', removed = ''; node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '; ea.addEvent(); @@ -809,9 +807,9 @@ describe('Query Tests', function() { }); it('Set a limit, ensure child_removed events are fired when limit is satisfied, you remove an item, and there are no more.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var added = '', removed = ''; + let added = '', removed = ''; node.limitToLast(2).on('child_added', function(snap) { added += snap.key + ' '}); node.limitToLast(2).on('child_removed', function(snap) { removed += snap.key + ' '}); node.set({b: 2, c: 3}); @@ -827,7 +825,7 @@ describe('Query Tests', function() { }); it('Set a limit, ensure child_removed events are fired when limit is satisfied, you remove an item, and there are no more. Using server data', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); const ea = EventAccumulatorFactory.waitsForCount(2); let added = ''; let removed = ''; @@ -855,13 +853,13 @@ describe('Query Tests', function() { }); it('Ensure startAt / endAt with priority works.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); const tasks: TaskList = [ [node.startAt('w').endAt('y'), {b: 2, c: 3, d: 4}], [node.startAt('w').endAt('w'), {d: 4 }], [node.startAt('a').endAt('c'), null], - ] + ]; await node.set({ a: {'.value': 1, '.priority': 'z'}, @@ -882,7 +880,7 @@ describe('Query Tests', function() { }); it('Ensure startAt / endAt with priority work with server data.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({ a: {'.value': 1, '.priority': 'z'}, @@ -909,7 +907,7 @@ describe('Query Tests', function() { }); it('Ensure startAt / endAt with priority and name works.', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({ a: {'.value': 1, '.priority': 1}, @@ -936,7 +934,7 @@ describe('Query Tests', function() { }); it('Ensure startAt / endAt with priority and name work with server data', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({ a: {'.value': 1, '.priority': 1}, @@ -961,13 +959,13 @@ describe('Query Tests', function() { }); it('Ensure startAt / endAt with priority and name works (2).', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); const tasks: TaskList = [ [node.startAt(1, 'c').endAt(2, 'b'), {a: 1, b: 2, c: 3, d: 4}], [node.startAt(1, 'd').endAt(2, 'a'), {d: 4, a: 1}], [node.startAt(1, 'e').endAt(2), {a: 1, b: 2}], - ] + ]; node.set({ c: {'.value': 3, '.priority': 1}, @@ -988,7 +986,7 @@ describe('Query Tests', function() { }); it('Ensure startAt / endAt with priority and name works (2). With server data', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); await node.set({ c: {'.value': 3, '.priority': 1}, @@ -1015,9 +1013,9 @@ describe('Query Tests', function() { }); it('Set a limit, add some nodes, ensure prevName works correctly.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var added = ''; + let added = ''; node.limitToLast(2).on('child_added', function(snap, prevName) { added += snap.key + ' ' + prevName + ', '; }); @@ -1039,7 +1037,7 @@ describe('Query Tests', function() { }); it('Set a limit, add some nodes, ensure prevName works correctly. With server data', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); let added = ''; await node.child('a').set(1); @@ -1071,8 +1069,8 @@ describe('Query Tests', function() { }); it('Set a limit, move some nodes, ensure prevName works correctly.', function() { - var node = (getRandomNode() as Reference); - var moved = ''; + const node = (getRandomNode() as Reference); + let moved = ''; node.limitToLast(2).on('child_moved', function(snap, prevName) { moved += snap.key + ' ' + prevName + ', '; }); @@ -1095,8 +1093,8 @@ describe('Query Tests', function() { }); it('Set a limit, move some nodes, ensure prevName works correctly, with server data', async function() { - var node = (getRandomNode() as Reference); - var moved = ''; + const node = (getRandomNode() as Reference); + let moved = ''; node.child('a').setWithPriority('a', 10); node.child('b').setWithPriority('b', 20); @@ -1124,9 +1122,9 @@ describe('Query Tests', function() { }); it('Numeric priorities: Set a limit, move some nodes, ensure prevName works correctly.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var moved = ''; + let moved = ''; node.limitToLast(2).on('child_moved', function(snap, prevName) { moved += snap.key + ' ' + prevName + ', '; }); @@ -1141,7 +1139,7 @@ describe('Query Tests', function() { }); it('Numeric priorities: Set a limit, move some nodes, ensure prevName works correctly. With server data', async function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); let moved = ''; node.child('a').setWithPriority('a', 1); @@ -1161,9 +1159,9 @@ describe('Query Tests', function() { }); it('Set a limit, add a bunch of nodes, ensure local events are correct.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); node.set({}); - var eventHistory = ''; + let eventHistory = ''; node.limitToLast(2).on('child_added', function(snap) { eventHistory = eventHistory + snap.val() + ' added, '; @@ -1172,8 +1170,8 @@ describe('Query Tests', function() { eventHistory = eventHistory + snap.val() + ' removed, '; }); - for (var i = 0; i < 5; i++) { - var n = node.push(); + for (let i = 0; i < 5; i++) { + const n = node.push(); n.set(i); } @@ -1181,9 +1179,9 @@ describe('Query Tests', function() { }); it('Set a limit, add a bunch of nodes, ensure remote events are correct.', async function() { - var nodePair = getRandomNode(2); - var writeNode = nodePair[0]; - var readNode = nodePair[1]; + const nodePair = getRandomNode(2); + const writeNode = nodePair[0]; + const readNode = nodePair[1]; const ea = new EventAccumulator(() => { try { expect(eventHistory).to.equal('3 added, 4 added, '); @@ -1192,7 +1190,7 @@ describe('Query Tests', function() { return false; } }); - var eventHistory = ''; + let eventHistory = ''; readNode.limitToLast(2).on('child_added', function(snap) { eventHistory = eventHistory + snap.val() + ' added, '; @@ -1209,8 +1207,8 @@ describe('Query Tests', function() { }); const promises = []; - for (var i = 0; i < 5; i++) { - var n = writeNode.push(); + for (let i = 0; i < 5; i++) { + const n = writeNode.push(); n.set(i); } @@ -1218,21 +1216,21 @@ describe('Query Tests', function() { }); it('Ensure on() returns callback function.', function() { - var node = (getRandomNode() as Reference); - var callback = function() { }; - var ret = node.on('value', callback); + const node = (getRandomNode() as Reference); + const callback = function() { }; + const ret = node.on('value', callback); expect(ret).to.equal(callback); }); it("Limit on unsynced node fires 'value'.", function(done) { - var f = (getRandomNode() as Reference); + const f = (getRandomNode() as Reference); f.limitToLast(1).on('value', function() { done(); }); }); it('Filtering to only null priorities works.', async function() { - var f = (getRandomNode() as Reference); + const f = (getRandomNode() as Reference); const ea = EventAccumulatorFactory.waitsForCount(1); f.root.child('.info/connected').on('value', function(snap) { @@ -1259,7 +1257,7 @@ describe('Query Tests', function() { }); it('null priorities included in endAt(2).', async function() { - var f = (getRandomNode() as Reference); + const f = (getRandomNode() as Reference); f.set({ a: {'.priority': null, '.value': 0}, @@ -1279,7 +1277,7 @@ describe('Query Tests', function() { }); it('null priorities not included in startAt(2).', async function() { - var f = (getRandomNode() as Reference); + const f = (getRandomNode() as Reference); f.set({ a: {'.priority': null, '.value': 0}, @@ -1300,21 +1298,21 @@ describe('Query Tests', function() { }); function dumpListens(node: Query) { - var listens = node.repo.persistentConnection_.listens_; - var nodePath = getPath(node); - var listenPaths = []; - for (var path in listens) { + const listens = (node.repo.persistentConnection_ as any).listens_; + const nodePath = getPath(node); + const listenPaths = []; + for (let path in listens) { if (path.substring(0, nodePath.length) === nodePath) { listenPaths.push(path); } } listenPaths.sort(); - var dumpPieces = []; - for (var i = 0; i < listenPaths.length; i++) { + const dumpPieces = []; + for (let i = 0; i < listenPaths.length; i++) { - var queryIds = []; - for (var queryId in listens[listenPaths[i]]) { + const queryIds = []; + for (let queryId in listens[listenPaths[i]]) { queryIds.push(queryId); } queryIds.sort(); @@ -1327,13 +1325,13 @@ describe('Query Tests', function() { } it('Dedupe listens: listen on parent.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); expect(dumpListens(node)).to.equal(''); - var aOn = node.child('a').on('value', function() { }); + const aOn = node.child('a').on('value', function() { }); expect(dumpListens(node)).to.equal('/a:default'); - var rootOn = node.on('value', function() {}); + const rootOn = node.on('value', function() {}); expect(dumpListens(node)).to.equal(':default'); node.off('value', rootOn); @@ -1344,12 +1342,12 @@ describe('Query Tests', function() { }); it('Dedupe listens: listen on grandchild.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var rootOn = node.on('value', function() {}); + const rootOn = node.on('value', function() {}); expect(dumpListens(node)).to.equal(':default'); - var aaOn = node.child('a/aa').on('value', function() { }); + const aaOn = node.child('a/aa').on('value', function() { }); expect(dumpListens(node)).to.equal(':default'); node.off('value', rootOn); @@ -1358,16 +1356,16 @@ describe('Query Tests', function() { }); it('Dedupe listens: listen on grandparent of two children.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); expect(dumpListens(node)).to.equal(''); - var aaOn = node.child('a/aa').on('value', function() { }); + const aaOn = node.child('a/aa').on('value', function() { }); expect(dumpListens(node)).to.equal('/a/aa:default'); - var bbOn = node.child('a/bb').on('value', function() { }); + const bbOn = node.child('a/bb').on('value', function() { }); expect(dumpListens(node)).to.equal('/a/aa:default;/a/bb:default'); - var rootOn = node.on('value', function() {}); + const rootOn = node.on('value', function() {}); expect(dumpListens(node)).to.equal(':default'); node.off('value', rootOn); @@ -1381,16 +1379,16 @@ describe('Query Tests', function() { }); it('Dedupe queried listens: multiple queried listens; no dupes', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); expect(dumpListens(node)).to.equal(''); - var aLim1On = node.child('a').limitToLast(1).on('value', function() { }); + const aLim1On = node.child('a').limitToLast(1).on('value', function() { }); expect(dumpListens(node)).to.equal('/a:{"l":1,"vf":"r"}'); - var rootLim1On = node.limitToLast(1).on('value', function() { }); + const rootLim1On = node.limitToLast(1).on('value', function() { }); expect(dumpListens(node)).to.equal(':{"l":1,"vf":"r"};/a:{"l":1,"vf":"r"}'); - var aLim5On = node.child('a').limitToLast(5).on('value', function() { }); + const aLim5On = node.child('a').limitToLast(5).on('value', function() { }); expect(dumpListens(node)).to.equal(':{"l":1,"vf":"r"};/a:{"l":1,"vf":"r"},{"l":5,"vf":"r"}'); node.limitToLast(1).off('value', rootLim1On); @@ -1402,15 +1400,15 @@ describe('Query Tests', function() { }); it('Dedupe queried listens: listen on parent of queried children.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var aLim1On = node.child('a').limitToLast(1).on('value', function() { }); + const aLim1On = node.child('a').limitToLast(1).on('value', function() { }); expect(dumpListens(node)).to.equal('/a:{"l":1,"vf":"r"}'); - var bLim1On = node.child('b').limitToLast(1).on('value', function() { }); + const bLim1On = node.child('b').limitToLast(1).on('value', function() { }); expect(dumpListens(node)).to.equal('/a:{"l":1,"vf":"r"};/b:{"l":1,"vf":"r"}'); - var rootOn = node.on('value', function() { }); + const rootOn = node.on('value', function() { }); expect(dumpListens(node)).to.equal(':default'); // remove in slightly random order. @@ -1425,9 +1423,9 @@ describe('Query Tests', function() { }); it('Limit with mix of null and non-null priorities.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); - var children = []; + const children = []; node.limitToLast(5).on('child_added', function(childSnap) { children.push(childSnap.key); }); @@ -1445,10 +1443,9 @@ describe('Query Tests', function() { }); it('Limit with mix of null and non-null priorities using server data', async function() { - var node = getRandomNode(), - done, count; + const node = getRandomNode(); - var children = []; + const children = []; await node.set({ 'Vikrum': {'.priority': 1000, 'score': 1000, 'name': 'Vikrum'}, 'Mike': {'.priority': 500, 'score': 500, 'name': 'Mike'}, @@ -1470,14 +1467,14 @@ describe('Query Tests', function() { }); it('.on() with a context works.', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var ListenerDoohickey = function() { this.snap = null; }; + const ListenerDoohickey = function() { this.snap = null; }; ListenerDoohickey.prototype.onEvent = function(snap) { this.snap = snap; }; - var l = new ListenerDoohickey(); + const l = new ListenerDoohickey(); ref.on('value', l.onEvent, l); ref.set('test'); @@ -1491,14 +1488,14 @@ describe('Query Tests', function() { }); it('.once() with a context works.', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var ListenerDoohickey = function() { this.snap = null; }; + const ListenerDoohickey = function() { this.snap = null; }; ListenerDoohickey.prototype.onEvent = function(snap) { this.snap = snap; }; - var l = new ListenerDoohickey(); + const l = new ListenerDoohickey(); ref.once('value', l.onEvent, l); ref.set('test'); @@ -1510,9 +1507,9 @@ describe('Query Tests', function() { }); it('handles an update that deletes the entire window in a query', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var snaps = []; + const snaps = []; ref.limitToLast(2).on('value', function(snap) { snaps.push(snap.val()); }); @@ -1534,14 +1531,14 @@ describe('Query Tests', function() { }); it('handles an out-of-view query on a child', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var parent = null; + let parent = null; ref.limitToLast(1).on('value', function(snap) { parent = snap.val(); }); - var child = null; + let child = null; ref.child('a').on('value', function(snap) { child = snap.val(); }); @@ -1556,14 +1553,14 @@ describe('Query Tests', function() { }); it('handles a child query going out of view of the parent', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var parent = null; + let parent = null; ref.limitToLast(1).on('value', function(snap) { parent = snap.val(); }); - var child = null; + let child = null; ref.child('a').on('value', function(snap) { child = snap.val(); }); @@ -1580,14 +1577,14 @@ describe('Query Tests', function() { }); it('handles diverging views', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var c = null; + let c = null; ref.limitToLast(1).endAt(null, 'c').on('value', function(snap) { c = snap.val(); }); - var d = null; + let d = null; ref.limitToLast(1).endAt(null, 'd').on('value', function(snap) { d = snap.val(); }); @@ -1601,9 +1598,9 @@ describe('Query Tests', function() { }); it('handles removing a queried element', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var val; + let val; const ea = EventAccumulatorFactory.waitsForCount(1); ref.limitToLast(1).on('child_added', function(snap) { val = snap.val(); @@ -1621,10 +1618,10 @@ describe('Query Tests', function() { }); it('.startAt().limitToFirst(1) works.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({a: 1, b: 2}); - var val; + let val; ref.startAt().limitToFirst(1).on('child_added', function(snap) { val = snap.val(); if (val === 1) { @@ -1634,11 +1631,11 @@ describe('Query Tests', function() { }); it('.startAt().limitToFirst(1) and then remove first child (case 1664).', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({a: 1, b: 2}); const ea = EventAccumulatorFactory.waitsForCount(1); - var val; + let val; ref.startAt().limitToFirst(1).on('child_added', function(snap) { val = snap.val(); ea.addEvent(); @@ -1655,7 +1652,7 @@ describe('Query Tests', function() { }); it('.startAt() with two arguments works properly (case 1169).', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); const data = { 'Walker': { name: 'Walker', @@ -1670,7 +1667,7 @@ describe('Query Tests', function() { }; ref.set(data, function() { ref.startAt(20, 'Walker').limitToFirst(2).on('value', function(s) { - var childNames = []; + const childNames = []; s.forEach(function(node) { childNames.push(node.key); }); expect(childNames).to.deep.equal(['Walker', 'Michael']); done(); @@ -1679,7 +1676,7 @@ describe('Query Tests', function() { }); it('handles multiple queries on the same node', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); await ref.set({ a: 1, @@ -1692,7 +1689,7 @@ describe('Query Tests', function() { const ea = EventAccumulatorFactory.waitsForCount(1); - var firstListen = false + let firstListen = false; ref.limitToLast(2).on('value', function(snap) { // This shouldn't get called twice, we don't update the values here expect(firstListen).to.be.false; @@ -1705,12 +1702,12 @@ describe('Query Tests', function() { // now do consecutive once calls await ref.limitToLast(1).once('value'); const snap = await ref.limitToLast(1).once('value'); - var val = snap.val(); + const val = snap.val(); expect(val).to.deep.equal({f: 6}); }); it('handles once called on a node with a default listener', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); await ref.set({ a: 1, @@ -1731,15 +1728,14 @@ describe('Query Tests', function() { // now do the once call const snap = await ref.limitToLast(1).once('child_added'); - var val = snap.val(); + const val = snap.val(); expect(val).to.equal(6); }); it('handles once called on a node with a default listener and non-complete limit', async function() { - var ref = getRandomNode(), - ready, done; - + const ref = getRandomNode(); + await ref.set({ a: 1, b: 2, @@ -1756,17 +1752,17 @@ describe('Query Tests', function() { // now do the once call const snap = await ref.limitToLast(5).once('value'); - var val = snap.val(); + const val = snap.val(); expect(val).to.deep.equal({a: 1, b: 2, c: 3}); }); it('Remote remove triggers events.', function(done) { - var refPair = getRandomNode(2), writeRef = refPair[0], readRef = refPair[1]; + const refPair = getRandomNode(2), writeRef = refPair[0], readRef = refPair[1]; writeRef.set({ a: 'a', b: 'b', c: 'c', d: 'd', e: 'e' }, function() { // Wait to get the initial data, and then remove 'c' remotely and wait for new data. - var count = 0; + let count = 0; readRef.limitToLast(5).on('value', function(s) { count++; if (count == 1) { @@ -1782,7 +1778,7 @@ describe('Query Tests', function() { }); it(".endAt(null, 'f').limitToLast(5) returns the right set of children.", function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({ a: 'a', b: 'b', c: 'c', d: 'd', e: 'e', f: 'f', g: 'g', h: 'h' }, function() { ref.endAt(null, 'f').limitToLast(5).on('value', function(s) { expect(s.val()).to.deep.equal({b: 'b', c: 'c', d: 'd', e: 'e', f: 'f' }); @@ -1792,14 +1788,14 @@ describe('Query Tests', function() { }); it('complex update() at query root raises correct value event', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var readerLoaded = false, numEventsReceived = 0; + let readerLoaded = false, numEventsReceived = 0; writer.child('foo').set({a: 1, b: 2, c: 3, d: 4, e: 5}, function(error, dummy) { reader.child('foo').startAt().limitToFirst(4).on('value', function(snapshot) { - var val = snapshot.val(); + const val = snapshot.val(); if (!readerLoaded) { readerLoaded = true; expect(val).to.deep.equal({a: 1, b: 2, c: 3, d: 4}); @@ -1819,14 +1815,14 @@ describe('Query Tests', function() { }); it('update() at query root raises correct value event', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var readerLoaded = false, numEventsReceived = 0; + let readerLoaded = false, numEventsReceived = 0; writer.child('foo').set({ 'bar': 'a', 'baz': 'b', 'bam': 'c' }, function(error, dummy) { reader.child('foo').limitToLast(10).on('value', function(snapshot) { - var val = snapshot.val(); + const val = snapshot.val(); if (!readerLoaded) { readerLoaded = true; expect(val.bar).to.equal('a'); @@ -1845,14 +1841,14 @@ describe('Query Tests', function() { }); it('set() at query root raises correct value event', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var readerLoaded = false, numEventsReceived = 0; + let readerLoaded = false, numEventsReceived = 0; writer.child('foo').set({ 'bar': 'a', 'baz': 'b', 'bam': 'c' }, function(error, dummy) { reader.child('foo').limitToLast(10).on('value', function(snapshot) { - var val = snapshot.val(); + const val = snapshot.val(); if (!readerLoaded) { readerLoaded = true; expect(val.bar).to.equal('a'); @@ -1872,16 +1868,16 @@ describe('Query Tests', function() { it('listen for child_added events with limit and different types fires properly', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var numEventsReceived = 0, gotA = false, gotB = false, gotC = false; + let numEventsReceived = 0, gotA = false, gotB = false, gotC = false; writer.child('a').set(1, function(error, dummy) { writer.child('b').set('b', function(error, dummy) { writer.child('c').set({ 'deep': 'path', 'of': { 'stuff': true }}, function(error, dummy) { reader.limitToLast(3).on('child_added', function(snap) { - var val = snap.val(); + const val = snap.val(); switch (snap.key) { case 'a': gotA = true; @@ -1909,18 +1905,18 @@ describe('Query Tests', function() { }); it('listen for child_changed events with limit and different types fires properly', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; + let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; writer.set({ a: 'something', b: "we'll", c: 'overwrite '}, function(error, dummy) { reader.limitToLast(3).on('value', function(snapshot) { if (!readerLoaded) { readerLoaded = true; // Set up listener for upcoming change events reader.limitToLast(3).on('child_changed', function(snap) { - var val = snap.val(); + const val = snap.val(); switch (snap.key) { case 'a': gotA = true; @@ -1953,11 +1949,11 @@ describe('Query Tests', function() { }); it('listen for child_remove events with limit and different types fires properly', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; + let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; writer.set({ a: 1, b: 'b', c: { 'deep': 'path', 'of': { 'stuff': true }} }, function(error, dummy) { reader.limitToLast(3).on('value', function(snapshot) { if (!readerLoaded) { @@ -1965,7 +1961,7 @@ describe('Query Tests', function() { // Set up listener for upcoming change events reader.limitToLast(3).on('child_removed', function(snap) { - var val = snap.val(); + const val = snap.val(); switch (snap.key) { case 'a': gotA = true; @@ -1998,11 +1994,11 @@ describe('Query Tests', function() { }); it('listen for child_remove events when parent removed', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; + let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; writer.set({ a: 1, b: 'b', c: { 'deep': 'path', 'of': { 'stuff': true }} }, function(error, dummy) { reader.limitToLast(3).on('value', function(snapshot) { @@ -2011,7 +2007,7 @@ describe('Query Tests', function() { // Set up listener for upcoming change events reader.limitToLast(3).on('child_removed', function(snap) { - var val = snap.val(); + const val = snap.val(); switch (snap.key) { case 'a': gotA = true; @@ -2042,11 +2038,11 @@ describe('Query Tests', function() { }); it('listen for child_remove events when parent set to scalar', function(done) { - var nodePair = getRandomNode(2); - var writer = nodePair[0]; - var reader = nodePair[1]; + const nodePair = getRandomNode(2); + const writer = nodePair[0]; + const reader = nodePair[1]; - var numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; + let numEventsReceived = 0, gotA = false, gotB = false, gotC = false, readerLoaded = false; writer.set({ a: 1, b: 'b', c: { 'deep': 'path', 'of': { 'stuff': true }} }, function(error, dummy) { reader.limitToLast(3).on('value', function(snapshot) { @@ -2055,7 +2051,7 @@ describe('Query Tests', function() { // Set up listener for upcoming change events reader.limitToLast(3).on('child_removed', function(snap) { - var val = snap.val(); + const val = snap.val(); switch (snap.key) { case 'a': gotA = true; @@ -2087,10 +2083,10 @@ describe('Query Tests', function() { it('Queries behave wrong after .once().', async function() { - var refPair = getRandomNode(2), + const refPair = getRandomNode(2), writeRef = refPair[0], - readRef = refPair[1], - done, startAtCount, defaultCount; + readRef = refPair[1]; + let startAtCount, defaultCount; await writeRef.set({a: 1, b: 2, c: 3, d: 4 }); @@ -2119,8 +2115,8 @@ describe('Query Tests', function() { }); it('Case 2003: Correctly get events for startAt/endAt queries when priority changes.', function() { - var ref = (getRandomNode() as Reference); - var addedFirst = [], removedFirst = [], addedSecond = [], removedSecond = []; + const ref = (getRandomNode() as Reference); + const addedFirst = [], removedFirst = [], addedSecond = [], removedSecond = []; ref.startAt(0).endAt(10).on('child_added', function(snap) { addedFirst.push(snap.key); }); ref.startAt(0).endAt(10).on('child_removed', function(snap) { removedFirst.push(snap.key); }); ref.startAt(10).endAt(20).on('child_added', function(snap) { addedSecond.push(snap.key); }); @@ -2140,19 +2136,19 @@ describe('Query Tests', function() { }); it('Behaves with diverging queries', async function() { - var refs = getRandomNode(2); - var writer = refs[0]; - var reader = refs[1]; + const refs = getRandomNode(2); + const writer = refs[0]; + const reader = refs[1]; await writer.set({ a: {b: 1, c: 2}, e: 3 }); - var childCount = 0; + let childCount = 0; reader.child('a/b').on('value', function(snap) { - var val = snap.val(); + const val = snap.val(); childCount++; if (childCount == 1) { expect(val).to.equal(1); @@ -2163,10 +2159,10 @@ describe('Query Tests', function() { }); const ea = EventAccumulatorFactory.waitsForCount(1); - var count = 0; + let count = 0; reader.limitToLast(2).on('value', function(snap) { ea.addEvent(); - var val = snap.val(); + const val = snap.val(); count++; if (count == 1) { expect(val).to.deep.equal({a: {b: 1, c: 2}, e: 3}); @@ -2184,10 +2180,10 @@ describe('Query Tests', function() { }); it('Priority-only updates are processed correctly by server.', async function() { - var refPair = (getRandomNode(2) as Reference[]), readRef = refPair[0], writeRef = refPair[1]; + const refPair = (getRandomNode(2) as Reference[]), readRef = refPair[0], writeRef = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); - var readVal; + let readVal; readRef.limitToLast(2).on('value', function(s) { readVal = s.val(); if (readVal) { @@ -2211,7 +2207,7 @@ describe('Query Tests', function() { }); it('Server: Test re-listen', function(done) { - var refPair = (getRandomNode(2) as Reference[]), ref = refPair[0], ref2 = refPair[1]; + const refPair = (getRandomNode(2) as Reference[]), ref = refPair[0], ref2 = refPair[1]; ref.set({ a: 'a', b: 'b', @@ -2222,7 +2218,7 @@ describe('Query Tests', function() { g: 'g' }); - var before; + let before; ref.startAt(null, 'a').endAt(null, 'b').on('value', function(b) { before = b.val(); }); @@ -2236,7 +2232,7 @@ describe('Query Tests', function() { }); it('Server: Test re-listen 2', function(done) { - var refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; ref.set({ a: 'a', b: 'b', @@ -2247,7 +2243,7 @@ describe('Query Tests', function() { g: 'g' }); - var before; + let before; ref.startAt(null, 'b').limitToFirst(3).on('value', function(b) { before = b.val(); }); @@ -2261,7 +2257,7 @@ describe('Query Tests', function() { }); it('Server: Test re-listen 3', function(done) { - var refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; ref.set({ a: 'a', b: 'b', @@ -2272,7 +2268,7 @@ describe('Query Tests', function() { g: 'g' }); - var before; + let before; ref.limitToLast(3).on('value', function(b) { before = b.val(); }); @@ -2286,10 +2282,10 @@ describe('Query Tests', function() { }); it('Server limit below limit works properly.', async function() { - var refPair = getRandomNode(2), + const refPair = getRandomNode(2), readRef = refPair[0], - writeRef = refPair[1], - childData; + writeRef = refPair[1]; + let childData; await writeRef.set({ a: { @@ -2317,22 +2313,22 @@ describe('Query Tests', function() { ea.reset(); writeRef.child('a/ab').setWithPriority(1, 2); - await ea.promise + await ea.promise; expect(childData).to.deep.equal({ aa: 1 }); }); it('Server: Setting grandchild of item in limit works.', async function() { - var refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; ref.set({ a: { name: 'Mike' }}); const ea = EventAccumulatorFactory.waitsForCount(1); - var snaps = []; + const snaps = []; ref2.limitToLast(1).on('value', function(s) { - var val = s.val(); + const val = s.val(); if (val !== null) { snaps.push(val); ea.addEvent(); @@ -2350,16 +2346,16 @@ describe('Query Tests', function() { }); it('Server: Updating grandchildren of item in limit works.', async function() { - var refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; ref.set({ a: { name: 'Mike' }}); const ea = EventAccumulatorFactory.waitsForCount(1); - var snaps = []; + const snaps = []; ref2.limitToLast(1).on('value', function(s) { - var val = s.val(); + const val = s.val(); if (val !== null) { snaps.push(val); ea.addEvent(); @@ -2381,10 +2377,10 @@ describe('Query Tests', function() { }); it('Server: New child at end of limit shows up.', async function() { - var refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; + const refPair = getRandomNode(2), ref = refPair[0], ref2 = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); - var snap; + let snap; ref2.limitToLast(1).on('value', function(s) { snap = s.val(); ea.addEvent(); @@ -2405,10 +2401,10 @@ describe('Query Tests', function() { }); it('Server: Priority-only updates are processed correctly by server (1).', async function() { - var refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; + const refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); - var readVal; + let readVal; readRef.limitToLast(2).on('value', function(s) { readVal = s.val(); if (readVal) { @@ -2421,7 +2417,7 @@ describe('Query Tests', function() { c: { '.priority': 30, '.value': 3} }); - await ea.promise + await ea.promise; expect(readVal).to.deep.equal({ b: 2, c: 3 }); ea.reset(); @@ -2433,10 +2429,10 @@ describe('Query Tests', function() { // Same as above but with an endAt() so we hit CompoundQueryView instead of SimpleLimitView. it('Server: Priority-only updates are processed correctly by server (2).', async function() { - var refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; + const refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; const ea = EventAccumulatorFactory.waitsForCount(1); - var readVal; + let readVal; readRef.endAt(50).limitToLast(2).on('value', function(s) { readVal = s.val(); if (readVal) { @@ -2461,8 +2457,8 @@ describe('Query Tests', function() { }); it('Latency compensation works with limit and pushed object.', function() { - var ref = (getRandomNode() as Reference); - var events = []; + const ref = (getRandomNode() as Reference); + const events = []; ref.limitToLast(3).on('child_added', function(s) { events.push(s.val()); }); // If you change this to ref.push('foo') it works. @@ -2473,10 +2469,10 @@ describe('Query Tests', function() { }); it("Cache doesn't remove items that have fallen out of view.", async function() { - var refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; + const refPair = getRandomNode(2), readRef = refPair[0], writeRef = refPair[1]; let ea = EventAccumulatorFactory.waitsForCount(1); - var readVal; + let readVal; readRef.limitToLast(2).on('value', function(s) { readVal = s.val(); ea.addEvent(); @@ -2485,8 +2481,8 @@ describe('Query Tests', function() { await ea.promise; expect(readVal).to.be.null; - ea = EventAccumulatorFactory.waitsForCount(4) - for (var i = 0; i < 4; i++) { + ea = EventAccumulatorFactory.waitsForCount(4); + for (let i = 0; i < 4; i++) { writeRef.child('k' + i).set(i); } @@ -2495,7 +2491,7 @@ describe('Query Tests', function() { await pause(500); expect(readVal).to.deep.equal({'k2': 2, 'k3': 3}); - ea = EventAccumulatorFactory.waitsForCount(1) + ea = EventAccumulatorFactory.waitsForCount(1); writeRef.remove(); await ea.promise; @@ -2503,9 +2499,9 @@ describe('Query Tests', function() { }); it('handles an update that moves another child that has a deeper listener out of view', async function() { - var refs = getRandomNode(2); - var reader = refs[0]; - var writer = refs[1]; + const refs = getRandomNode(2); + const reader = refs[0]; + const writer = refs[1]; await writer.set({ a: { '.priority': 10, '.value': 1}, @@ -2518,7 +2514,7 @@ describe('Query Tests', function() { }); const ea = EventAccumulatorFactory.waitsForCount(1); - var val; + let val; reader.limitToLast(2).on('value', function(snap) { val = snap.val(); if (val) { @@ -2537,7 +2533,7 @@ describe('Query Tests', function() { }); it('Integer keys behave numerically 1.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({1: true, 50: true, 550: true, 6: true, 600: true, 70: true, 8: true, 80: true }, function() { ref.startAt(null, '80').once('value', function(s) { expect(s.val()).to.deep.equal({80: true, 550: true, 600: true }); @@ -2547,7 +2543,7 @@ describe('Query Tests', function() { }); it('Integer keys behave numerically 2.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({1: true, 50: true, 550: true, 6: true, 600: true, 70: true, 8: true, 80: true }, function() { ref.endAt(null, '50').once('value', function(s) { expect(s.val()).to.deep.equal({1: true, 6: true, 8: true, 50: true }); @@ -2557,7 +2553,7 @@ describe('Query Tests', function() { }); it('Integer keys behave numerically 3.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({1: true, 50: true, 550: true, 6: true, 600: true, 70: true, 8: true, 80: true}, function() { ref.startAt(null, '50').endAt(null, '80').once('value', function(s) { expect(s.val()).to.deep.equal({50: true, 70: true, 80: true }); @@ -2567,7 +2563,7 @@ describe('Query Tests', function() { }); it('.limitToLast() on node with priority.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({'a': 'blah', '.priority': 'priority'}, function() { ref.limitToLast(2).once('value', function(s) { expect(s.exportVal()).to.deep.equal({a: 'blah' }); @@ -2577,8 +2573,8 @@ describe('Query Tests', function() { }); it('.equalTo works', async function() { - var ref = (getRandomNode() as Reference); - var done = false; + const ref = (getRandomNode() as Reference); + const done = false; await ref.set({ a: 1, @@ -2587,21 +2583,21 @@ describe('Query Tests', function() { }); const snap1 = await ref.equalTo(2).once('value'); - var val1 = snap1.exportVal(); + const val1 = snap1.exportVal(); expect(val1).to.deep.equal({b: {'.priority': 2, '.value': 2}}); const snap2 = await ref.equalTo('3', 'c').once('value'); - var val2 = snap2.exportVal(); + const val2 = snap2.exportVal(); expect(val2).to.deep.equal({c: {'.priority': '3', '.value': 3}}); const snap3 = await ref.equalTo(null, 'c').once('value'); - var val3 = snap3.exportVal(); + const val3 = snap3.exportVal(); expect(val3).to.be.null; }); it('Handles fallback for orderBy', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); const children = []; @@ -2620,10 +2616,10 @@ describe('Query Tests', function() { }); it("Get notified of deletes that happen while offline.", async function() { - var refPair = getRandomNode(2); - var queryRef = refPair[0]; - var writerRef = refPair[1]; - var readSnapshot = null; + const refPair = getRandomNode(2); + const queryRef = refPair[0]; + const writerRef = refPair[1]; + let readSnapshot = null; // Write 3 children and then start our limit query. await writerRef.set({a: 1, b: 2, c: 3}); @@ -2653,10 +2649,10 @@ describe('Query Tests', function() { }); it('Snapshot children respect default ordering', function(done) { - var refPair = getRandomNode(2); - var queryRef = refPair[0], writerRef = refPair[1]; + const refPair = getRandomNode(2); + const queryRef = refPair[0], writerRef = refPair[1]; - var list = { + const list = { 'a': { thisvaluefirst: { '.value': true, '.priority': 1 }, name: { '.value': 'Michael', '.priority': 2 }, @@ -2676,22 +2672,22 @@ describe('Query Tests', function() { writerRef.set(list, function() { queryRef.orderByChild('name').once('value', function(snap) { - var expectedKeys = ['thisvaluefirst', 'name', 'thisvaluelast']; - var expectedNames = ['Jonny', 'Michael', 'Rob']; + const expectedKeys = ['thisvaluefirst', 'name', 'thisvaluelast']; + const expectedNames = ['Jonny', 'Michael', 'Rob']; // Validate that snap.child() resets order to default for child snaps - var orderedKeys = []; + const orderedKeys = []; snap.child('b').forEach(function(childSnap) { orderedKeys.push(childSnap.key); }); expect(orderedKeys).to.deep.equal(expectedKeys); // Validate that snap.forEach() resets ordering to default for child snaps - var orderedNames = []; + const orderedNames = []; snap.forEach(function(childSnap) { orderedNames.push(childSnap.child('name').val()); - var orderedKeys = []; + const orderedKeys = []; childSnap.forEach(function(grandchildSnap) { orderedKeys.push(grandchildSnap.key); }); @@ -2711,7 +2707,7 @@ describe('Query Tests', function() { // one-time listener. Event removal code path wasn't removing the listener because it stopped as soon as it // found the default view. This left the zombie one-time listener and check failed on the second attempt to // create a listener for the same path (asana#61028598952586). - var ref = getRandomNode(1)[0]; + const ref = getRandomNode(1)[0]; ref.child('child').set({name: "John"}, function() { ref.orderByChild('name').equalTo('John').on('value', function(snap) { @@ -2727,7 +2723,7 @@ describe('Query Tests', function() { }); it('Can JSON serialize refs', function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); expect(JSON.stringify(ref)).to.equal('"' + ref.toString() + '"'); }); }); diff --git a/tests/database/repoinfo.test.ts b/tests/database/repoinfo.test.ts index 6f930f4d237..9f1b81505c9 100644 --- a/tests/database/repoinfo.test.ts +++ b/tests/database/repoinfo.test.ts @@ -14,22 +14,28 @@ * limitations under the License. */ -import { testRepoInfo } from "./helpers/util"; -import { CONSTANTS } from "../../src/database/realtime/Constants"; -import { expect } from "chai"; +import { testRepoInfo } from './helpers/util'; +import { + LAST_SESSION_PARAM, + LONG_POLLING, + PROTOCOL_VERSION, + VERSION_PARAM, + WEBSOCKET +} from '../../src/database/realtime/Constants'; +import { expect } from 'chai'; describe('RepoInfo', function() { it('should return the correct URL', function() { - var repoInfo = testRepoInfo('https://test-ns.firebaseio.com'); + const repoInfo = testRepoInfo('https://test-ns.firebaseio.com'); - var urlParams = {}; - urlParams[CONSTANTS.VERSION_PARAM] = CONSTANTS.PROTOCOL_VERSION; - urlParams[CONSTANTS.LAST_SESSION_PARAM] = 'test'; + const urlParams = {}; + urlParams[VERSION_PARAM] = PROTOCOL_VERSION; + urlParams[LAST_SESSION_PARAM] = 'test'; - var websocketUrl = repoInfo.connectionURL(CONSTANTS.WEBSOCKET, urlParams); + const websocketUrl = repoInfo.connectionURL(WEBSOCKET, urlParams); expect(websocketUrl).to.equal('wss://test-ns.firebaseio.com/.ws?v=5&ls=test'); - var longPollingUrl = repoInfo.connectionURL(CONSTANTS.LONG_POLLING, urlParams); + const longPollingUrl = repoInfo.connectionURL(LONG_POLLING, urlParams); expect(longPollingUrl).to.equal('https://test-ns.firebaseio.com/.lp?v=5&ls=test'); }); }); diff --git a/tests/database/sortedmap.test.ts b/tests/database/sortedmap.test.ts index 456bdda0d07..59f39d81c19 100644 --- a/tests/database/sortedmap.test.ts +++ b/tests/database/sortedmap.test.ts @@ -25,7 +25,7 @@ import { shuffle } from "./helpers/util"; // Many of these were adapted from the mugs source code. // http://mads379.github.com/mugs/ describe("SortedMap Tests", function() { - var defaultCmp = function(a, b) { + const defaultCmp = function(a, b) { if (a === b) { return 0; } else if (a < b) { @@ -36,33 +36,33 @@ describe("SortedMap Tests", function() { }; it("Create node", function() { - var map = new SortedMap(defaultCmp).insert("key", "value"); - expect(map.root_.left.isEmpty()).to.equal(true); - expect(map.root_.right.isEmpty()).to.equal(true); + const map = new SortedMap(defaultCmp).insert("key", "value"); + expect((map as any).root_.left.isEmpty()).to.equal(true); + expect((map as any).root_.right.isEmpty()).to.equal(true); }); it("You can search a map for a specific key", function() { - var map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); + const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); expect(map.get(1)).to.equal(1); expect(map.get(2)).to.equal(2); expect(map.get(3)).to.equal(null); }); it("You can insert a new key/value pair into the tree", function() { - var map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); - expect(map.root_.key).to.equal(2); - expect(map.root_.left.key).to.equal(1); + const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); + expect((map as any).root_.key).to.equal(2); + expect((map as any).root_.left.key).to.equal(1); }); it("You can remove a key/value pair from the map",function() { - var map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); - var newMap = map.remove(1); + const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2); + const newMap = map.remove(1); expect(newMap.get(2)).to.equal(2); expect(newMap.get(1)).to.equal(null); }); it("More removals",function(){ - var map = new SortedMap(defaultCmp) + const map = new SortedMap(defaultCmp) .insert(1,1) .insert(50,50) .insert(3,3) @@ -76,9 +76,9 @@ describe("SortedMap Tests", function() { .insert(42,42) .insert(88,88); - var m1 = map.remove(7); - var m2 = m1.remove(3); - var m3 = m2.remove(1); + const m1 = map.remove(7); + const m2 = m1.remove(3); + const m3 = m2.remove(1); expect(m3.count()).to.equal(9); expect(m3.get(1)).to.equal(null); expect(m3.get(3)).to.equal(null); @@ -87,40 +87,40 @@ describe("SortedMap Tests", function() { }); it("Removal bug", function() { - var map = new SortedMap(defaultCmp) + const map = new SortedMap(defaultCmp) .insert(1, 1) .insert(2, 2) .insert(3, 3); - var m1 = map.remove(2); + const m1 = map.remove(2); expect(m1.get(1)).to.equal(1); expect(m1.get(3)).to.equal(3); }); it("Test increasing", function(){ - var total = 100; - var item; - var map = new SortedMap(defaultCmp).insert(1,1); + const total = 100; + let item; + let map = new SortedMap(defaultCmp).insert(1,1); for (item = 2; item < total ; item++) { map = map.insert(item,item); } - expect(map.root_.checkMaxDepth_()).to.equal(true); + expect((map as any).root_.checkMaxDepth_()).to.equal(true); for (item = 2; item < total ; item++) { map = map.remove(item); } - expect(map.root_.checkMaxDepth_()).to.equal(true); + expect((map as any).root_.checkMaxDepth_()).to.equal(true); }); it("The structure should be valid after insertion (1)",function(){ - var map = new SortedMap(defaultCmp).insert(1,1).insert(2,2).insert(3,3); + const map = new SortedMap(defaultCmp).insert(1,1).insert(2,2).insert(3,3); - expect(map.root_.key).to.equal(2); - expect(map.root_.left.key).to.equal(1); - expect(map.root_.right.key).to.equal(3); + expect((map as any).root_.key).to.equal(2); + expect((map as any).root_.left.key).to.equal(1); + expect((map as any).root_.right.key).to.equal(3); }); it("The structure should be valid after insertion (2)",function(){ - var map = new SortedMap(defaultCmp) + const map = new SortedMap(defaultCmp) .insert(1,1) .insert(2,2) .insert(3,3) @@ -135,29 +135,29 @@ describe("SortedMap Tests", function() { .insert(12,12); expect(map.count()).to.equal(12); - expect(map.root_.checkMaxDepth_()).to.equal(true); + expect((map as any).root_.checkMaxDepth_()).to.equal(true); }); it("Rotate left leaves the tree in a valid state",function(){ - var node = new LLRBNode(4,4,false, + const node = new LLRBNode(4,4,false, new LLRBNode(2,2,false,null, null), new LLRBNode(7,7,true, new LLRBNode(5,5,false,null,null), new LLRBNode(8,8,false,null,null))); - var node2 = node.rotateLeft_(); + const node2 = (node as any).rotateLeft_(); expect(node2.count()).to.equal(5); expect(node2.checkMaxDepth_()).to.equal(true); }); it("Rotate right leaves the tree in a valid state", function(){ - var node = new LLRBNode(7,7,false, + const node = new LLRBNode(7,7,false, new LLRBNode(4,4,true, new LLRBNode(2,2,false, null, null), new LLRBNode(5,5,false, null, null)), new LLRBNode(8,8,false, null, null)); - var node2 = node.rotateRight_(); + const node2 = (node as any).rotateRight_(); expect(node2.count()).to.equal(5); expect(node2.key).to.equal(4); expect(node2.left.key).to.equal(2); @@ -167,7 +167,7 @@ describe("SortedMap Tests", function() { }); it("The structure should be valid after insertion (3)",function(){ - var map = new SortedMap(defaultCmp) + const map = new SortedMap(defaultCmp) .insert(1,1) .insert(50,50) .insert(3,3) @@ -176,53 +176,53 @@ describe("SortedMap Tests", function() { .insert(9,9); expect(map.count()).to.equal(6); - expect(map.root_.checkMaxDepth_()).to.equal(true); + expect((map as any).root_.checkMaxDepth_()).to.equal(true); - var m2 = map + const m2 = map .insert(20,20) .insert(18,18) .insert(2,2); expect(m2.count()).to.equal(9); - expect(m2.root_.checkMaxDepth_()).to.equal(true); + expect((m2 as any).root_.checkMaxDepth_()).to.equal(true); - var m3 = m2 + const m3 = m2 .insert(71,71) .insert(42,42) .insert(88,88); expect(m3.count()).to.equal(12); - expect(m3.root_.checkMaxDepth_()).to.equal(true); + expect((m3 as any).root_.checkMaxDepth_()).to.equal(true); }); it("you can overwrite a value",function(){ - var map = new SortedMap(defaultCmp).insert(10,10).insert(10,8); + const map = new SortedMap(defaultCmp).insert(10,10).insert(10,8); expect(map.get(10)).to.equal(8); }); it("removing the last element returns an empty map",function() { - var map = new SortedMap(defaultCmp).insert(10,10).remove(10); + const map = new SortedMap(defaultCmp).insert(10,10).remove(10); expect(map.isEmpty()).to.equal(true); }); it("empty .get()",function() { - var empty = new SortedMap(defaultCmp); + const empty = new SortedMap(defaultCmp); expect(empty.get("something")).to.equal(null); }); it("empty .count()",function() { - var empty = new SortedMap(defaultCmp); + const empty = new SortedMap(defaultCmp); expect(empty.count()).to.equal(0); }); it("empty .remove()",function() { - var empty = new SortedMap(defaultCmp); + const empty = new SortedMap(defaultCmp); expect(empty.remove("something").count()).to.equal(0); }); it(".reverseTraversal() works.", function() { - var map = new SortedMap(defaultCmp).insert(1, 1).insert(5, 5).insert(3, 3).insert(2, 2).insert(4, 4); - var next = 5; + const map = new SortedMap(defaultCmp).insert(1, 1).insert(5, 5).insert(3, 3).insert(2, 2).insert(4, 4); + let next = 5; map.reverseTraversal(function(key, value) { expect(key).to.equal(next); next--; @@ -231,9 +231,9 @@ describe("SortedMap Tests", function() { }); it("insertion and removal of 100 items in random order.", function() { - var N = 100; - var toInsert = [], toRemove = []; - for(var i = 0; i < N; i++) { + const N = 100; + const toInsert = [], toRemove = []; + for(let i = 0; i < N; i++) { toInsert.push(i); toRemove.push(i); } @@ -241,16 +241,16 @@ describe("SortedMap Tests", function() { shuffle(toInsert); shuffle(toRemove); - var map = new SortedMap(defaultCmp); + let map = new SortedMap(defaultCmp); - for (i = 0 ; i < N ; i++ ) { + for (let i = 0 ; i < N ; i++ ) { map = map.insert(toInsert[i], toInsert[i]); - expect(map.root_.checkMaxDepth_()).to.equal(true); + expect((map as any).root_.checkMaxDepth_()).to.equal(true); } expect(map.count()).to.equal(N); // Ensure order is correct. - var next = 0; + let next = 0; map.inorderTraversal(function(key, value) { expect(key).to.equal(next); expect(value).to.equal(next); @@ -258,8 +258,8 @@ describe("SortedMap Tests", function() { }); expect(next).to.equal(N); - for (i = 0 ; i < N ; i++ ) { - expect(map.root_.checkMaxDepth_()).to.equal(true); + for (let i = 0 ; i < N ; i++ ) { + expect((map as any).root_.checkMaxDepth_()).to.equal(true); map = map.remove(toRemove[i]); } expect(map.count()).to.equal(0); @@ -267,24 +267,24 @@ describe("SortedMap Tests", function() { // A little perf test for convenient benchmarking. xit("Perf", function() { - for(var j = 0; j < 5; j++) { - var map = new SortedMap(defaultCmp); - var start = new Date().getTime(); - for(var i = 0; i < 50000; i++) { + for(let j = 0; j < 5; j++) { + let map = new SortedMap(defaultCmp); + const start = new Date().getTime(); + for(let i = 0; i < 50000; i++) { map = map.insert(i, i); } - for(var i = 0; i < 50000; i++) { + for(let i = 0; i < 50000; i++) { map = map.remove(i); } - var end = new Date().getTime(); + const end = new Date().getTime(); // console.log(end-start); } }); xit("Perf: Insertion and removal with various # of items.", function() { - var verifyTraversal = function(map, max) { - var next = 0; + const verifyTraversal = function(map, max) { + let next = 0; map.inorderTraversal(function(key, value) { expect(key).to.equal(next); expect(value).to.equal(next); @@ -293,9 +293,9 @@ describe("SortedMap Tests", function() { expect(next).to.equal(max); }; - for(var N = 10; N <= 100000; N *= 10) { - var toInsert = [], toRemove = []; - for(var i = 0; i < N; i++) { + for(let N = 10; N <= 100000; N *= 10) { + const toInsert = [], toRemove = []; + for(let i = 0; i < N; i++) { toInsert.push(i); toRemove.push(i); } @@ -303,40 +303,40 @@ describe("SortedMap Tests", function() { shuffle(toInsert); shuffle(toRemove); - var map = new SortedMap(defaultCmp); + let map = new SortedMap(defaultCmp); - var start = new Date().getTime(); - for (i = 0 ; i < N ; i++ ) { + const start = new Date().getTime(); + for (let i = 0 ; i < N ; i++ ) { map = map.insert(toInsert[i], toInsert[i]); } // Ensure order is correct. verifyTraversal(map, N); - for (i = 0 ; i < N ; i++ ) { + for (let i = 0 ; i < N ; i++ ) { map = map.remove(toRemove[i]); } - var elapsed = new Date().getTime() - start; + const elapsed = new Date().getTime() - start; // console.log(N + ": " +elapsed); } }); xit("Perf: Comparison with {}: Insertion and removal with various # of items.", function() { - var verifyTraversal = function(tree, max) { - var keys = []; - for(var k in tree) + const verifyTraversal = function(tree, max) { + const keys = []; + for(const k in tree) keys.push(k); keys.sort(); expect(keys.length).to.equal(max); - for(var i = 0; i < max; i++) + for(let i = 0; i < max; i++) expect(tree[i]).to.equal(i); }; - for(var N = 10; N <= 100000; N *= 10) { - var toInsert = [], toRemove = []; - for(var i = 0; i < N; i++) { + for(let N = 10; N <= 100000; N *= 10) { + const toInsert = [], toRemove = []; + for(let i = 0; i < N; i++) { toInsert.push(i); toRemove.push(i); } @@ -344,43 +344,43 @@ describe("SortedMap Tests", function() { shuffle(toInsert); shuffle(toRemove); - var tree = { }; + const tree = { }; - var start = new Date().getTime(); - for (i = 0 ; i < N ; i++ ) { + const start = new Date().getTime(); + for (let i = 0 ; i < N ; i++ ) { tree[i] = i; } // Ensure order is correct. //verifyTraversal(tree, N); - for (i = 0 ; i < N ; i++ ) { + for (let i = 0 ; i < N ; i++ ) { delete tree[i]; } - var elapsed = (new Date().getTime()) - start; + const elapsed = (new Date().getTime()) - start; // console.log(N + ": " +elapsed); } }); it("SortedMapIterator empty test.", function() { - var map = new SortedMap(defaultCmp); - var iterator = map.getIterator(); + const map = new SortedMap(defaultCmp); + const iterator = map.getIterator(); expect(iterator.getNext()).to.equal(null); }); it("SortedMapIterator test with 10 items.", function() { - var items = []; - for(var i = 0; i < 10; i++) + const items = []; + for(let i = 0; i < 10; i++) items.push(i); shuffle(items); - var map = new SortedMap(defaultCmp); - for(i = 0; i < 10; i++) + let map = new SortedMap(defaultCmp); + for(let i = 0; i < 10; i++) map = map.insert(items[i], items[i]); - var iterator = map.getIterator(); - var n, expected = 0; + const iterator = map.getIterator(); + let n, expected = 0; while ((n = iterator.getNext()) !== null) { expect(n.key).to.equal(expected); expect(n.value).to.equal(expected); @@ -390,7 +390,7 @@ describe("SortedMap Tests", function() { }); it("SortedMap.getPredecessorKey works.", function() { - var map = new SortedMap(defaultCmp) + const map = new SortedMap(defaultCmp) .insert(1,1) .insert(50,50) .insert(3,3) diff --git a/tests/database/sparsesnapshottree.test.ts b/tests/database/sparsesnapshottree.test.ts index a1e5bf4e3e2..7186e2060bc 100644 --- a/tests/database/sparsesnapshottree.test.ts +++ b/tests/database/sparsesnapshottree.test.ts @@ -22,9 +22,9 @@ import { ChildrenNode } from "../../src/database/core/snap/ChildrenNode"; describe("SparseSnapshotTree Tests", function () { it("Basic remember and find.", function () { - var st = new SparseSnapshotTree(); - var path = new Path("a/b"); - var node = nodeFromJSON("sdfsd"); + const st = new SparseSnapshotTree(); + const path = new Path("a/b"); + const node = nodeFromJSON("sdfsd"); st.remember(path, node); expect(st.find(new Path("a/b")).isEmpty()).to.equal(false); @@ -33,9 +33,9 @@ describe("SparseSnapshotTree Tests", function () { it("Find inside an existing snapshot", function () { - var st = new SparseSnapshotTree(); - var path = new Path("t/tt"); - var node = nodeFromJSON({ a: "sdfsd", x: 5, "999i": true }); + const st = new SparseSnapshotTree(); + const path = new Path("t/tt"); + let node = nodeFromJSON({ a: "sdfsd", x: 5, "999i": true }); node = node.updateImmediateChild("apples", nodeFromJSON({ "goats": 88 })); st.remember(path, node); @@ -48,7 +48,7 @@ describe("SparseSnapshotTree Tests", function () { it("Write a snapshot inside a snapshot.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); st.remember(new Path("t/a/rr"), nodeFromJSON(19)); expect(st.find(new Path("t/a/b")).val()).to.equal("v"); @@ -57,7 +57,7 @@ describe("SparseSnapshotTree Tests", function () { it("Write a null value and confirm it is remembered.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("awq/fff"), nodeFromJSON(null)); expect(st.find(new Path("awq/fff"))).to.equal(ChildrenNode.EMPTY_NODE); expect(st.find(new Path("awq/sdf"))).to.equal(null); @@ -67,7 +67,7 @@ describe("SparseSnapshotTree Tests", function () { it("Overwrite with null and confirm it is remembered.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); expect(st.find(new Path("t")).isEmpty()).to.equal(false); st.remember(new Path("t"), ChildrenNode.EMPTY_NODE); @@ -76,7 +76,7 @@ describe("SparseSnapshotTree Tests", function () { it("Simple remember and forget.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); expect(st.find(new Path("t")).isEmpty()).to.equal(false); st.forget(new Path("t")); @@ -85,7 +85,7 @@ describe("SparseSnapshotTree Tests", function () { it("Forget the root.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("t"), nodeFromJSON({ a: { b: "v" } })); expect(st.find(new Path("t")).isEmpty()).to.equal(false); st.forget(new Path("")); @@ -94,7 +94,7 @@ describe("SparseSnapshotTree Tests", function () { it("Forget snapshot inside snapshot.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("t"), nodeFromJSON({ a: { b: "v", c: 9, art: false } })); expect(st.find(new Path("t/a/c")).isEmpty()).to.equal(false); expect(st.find(new Path("t")).isEmpty()).to.equal(false); @@ -109,7 +109,7 @@ describe("SparseSnapshotTree Tests", function () { it("Forget path shallower than snapshots.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("t/x1"), nodeFromJSON(false)); st.remember(new Path("t/x2"), nodeFromJSON(true)); st.forget(new Path("t")); @@ -118,11 +118,11 @@ describe("SparseSnapshotTree Tests", function () { it("Iterate children.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path("t"), nodeFromJSON({ b: "v", c: 9, art: false })); st.remember(new Path("q"), ChildrenNode.EMPTY_NODE); - var num = 0, gotT = false, gotQ = false; + let num = 0, gotT = false, gotQ = false; st.forEachChild(function(key, child) { num += 1; if (key === "t") { @@ -141,9 +141,9 @@ describe("SparseSnapshotTree Tests", function () { it("Iterate trees.", function () { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); - var count = 0; + let count = 0; st.forEachTree(new Path(""), function(path, tree) { count += 1; }); @@ -154,10 +154,10 @@ describe("SparseSnapshotTree Tests", function () { st.remember(new Path("a/x/g"), nodeFromJSON(3)); st.remember(new Path("a/x/null"), nodeFromJSON(null)); - var num = 0, got1 = false, got2 = false, got3 = false, got4 = false; + let num = 0, got1 = false, got2 = false, got3 = false, got4 = false; st.forEachTree(new Path("q"), function(path, node) { num += 1; - var pathString = path.toString(); + const pathString = path.toString(); if (pathString === "/q/t") { got1 = true; expect(node.val()).to.equal(1); @@ -183,10 +183,10 @@ describe("SparseSnapshotTree Tests", function () { }); it("Set leaf, then forget deeper path", function() { - var st = new SparseSnapshotTree(); + const st = new SparseSnapshotTree(); st.remember(new Path('foo'), nodeFromJSON('bar')); - var safeToRemove = st.forget(new Path('foo/baz')); + const safeToRemove = st.forget(new Path('foo/baz')); // it's not safe to remove this node expect(safeToRemove).to.equal(false); }); diff --git a/tests/database/transaction.test.ts b/tests/database/transaction.test.ts index 1006ce521f4..39db68bdd4d 100644 --- a/tests/database/transaction.test.ts +++ b/tests/database/transaction.test.ts @@ -28,18 +28,14 @@ import { hijackHash } from "../../src/database/api/test_access"; import firebase from "../../src/app"; import "../../src/database"; -// declare var runs; -// declare var waitsFor; -declare var TEST_TIMEOUT; - describe('Transaction Tests', function() { it('New value is immediately visible.', function() { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); node.child('foo').transaction(function() { return 42; }); - var val = null; + let val = null; node.child('foo').on('value', function(snap) { val = snap.val(); }); @@ -47,9 +43,9 @@ describe('Transaction Tests', function() { }); it.skip('Event is raised for new value.', function() { - var node = (getRandomNode() as Reference); - var fooNode = node.child('foo'); - var eventHelper = eventTestHelper([ + const node = (getRandomNode() as Reference); + const fooNode = node.child('foo'); + const eventHelper = eventTestHelper([ [fooNode, ['value', '']] ]); @@ -61,7 +57,7 @@ describe('Transaction Tests', function() { }); it('Non-aborted transaction sets committed to true in callback.', function(done) { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); node.transaction(function() { return 42; @@ -75,7 +71,7 @@ describe('Transaction Tests', function() { }); it('Aborted transaction sets committed to false in callback.', function(done) { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); node.transaction(function() {}, function(error, committed, snapshot) { @@ -88,10 +84,9 @@ describe('Transaction Tests', function() { it('Tetris bug test - set data, reconnect, do transaction that aborts once data arrives, verify correct events.', async function() { - var nodePair = (getRandomNode(2) as Reference[]); - var node = nodePair[0]; - var dataWritten = false; - var eventsReceived = 0; + const nodePair = (getRandomNode(2) as Reference[]); + let node = nodePair[0]; + let eventsReceived = 0; const ea = EventAccumulatorFactory.waitsForCount(2); await node.child('foo').set(42); @@ -127,8 +122,8 @@ describe('Transaction Tests', function() { }); it('Use transaction to create a node, make sure exactly one event is received.', function() { - var node = (getRandomNode() as Reference); - var events = 0, done = false; + const node = (getRandomNode() as Reference); + let events = 0, done = false; const ea = new EventAccumulator(() => done && events === 1); @@ -150,9 +145,8 @@ describe('Transaction Tests', function() { it('Use transaction to update one of two existing child nodes. ' + 'Make sure events are only raised for the changed node.', async function() { - var nodePair = (getRandomNode(2) as Reference[]); - var node = nodePair[0].child('foo'); - var writesDone = 0; + const nodePair = (getRandomNode(2) as Reference[]); + let node = nodePair[0].child('foo'); await Promise.all([ node.child('a').set(42), @@ -186,8 +180,8 @@ describe('Transaction Tests', function() { }); it('Transaction is only called once when initializing an empty node.', function() { - var node = (getRandomNode() as Reference); - var updateCalled = 0; + const node = (getRandomNode() as Reference); + let updateCalled = 0; const ea = EventAccumulatorFactory.waitsForCount(1); node.transaction(function(value) { @@ -206,8 +200,8 @@ describe('Transaction Tests', function() { }); it('Second transaction gets run immediately on previous output and only runs once.', function(done) { - var nodePair = (getRandomNode(2) as Reference[]); - var firstRun = false, firstDone = false, secondRun = false, secondDone = false; + const nodePair = (getRandomNode(2) as Reference[]); + let firstRun = false, firstDone = false, secondRun = false, secondDone = false; function onComplete() { if (firstDone && secondDone) { @@ -253,22 +247,22 @@ describe('Transaction Tests', function() { // Transaction #1 should complete as planned (since it was already sent). // Transaction #2 should be aborted by the set. // Transaction #3 should be re-run after #2 is reverted, and then be sent to the server and succeed. - var firstDone = false, secondDone = false, thirdDone = false; - var node = (getRandomNode() as Reference); - var nodeSnap = null; - var nodeFooSnap = null; + let firstDone = false, secondDone = false, thirdDone = false; + const node = (getRandomNode() as Reference); + let nodeSnap = null; + let nodeFooSnap = null; node.on('value', function(s) { - var str = JSON.stringify(s.val()); + const str = JSON.stringify(s.val()); nodeSnap = s; }); node.child('foo').on('value', function(s) { - var str = JSON.stringify(s.val()); + const str = JSON.stringify(s.val()); nodeFooSnap = s; }); - var firstRun = false, secondRun = false, thirdRunCount = 0; + let firstRun = false, secondRun = false, thirdRunCount = 0; const ea = new EventAccumulator(() => firstDone && thirdDone); node.child('foo').transaction( function() { @@ -338,7 +332,7 @@ describe('Transaction Tests', function() { }); it('transaction(), set(), set() should work.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.transaction(function(curr) { expect(curr).to.equal(null); return 'hi!'; @@ -353,13 +347,14 @@ describe('Transaction Tests', function() { }); it('Priority is preserved when setting data.', async function() { - var node = (getRandomNode() as Reference), complete = false; - var snap; + const node = (getRandomNode() as Reference); + let complete = false; + let snap; node.on('value', function(s) { snap = s; }); node.setWithPriority('test', 5); expect(snap.getPriority()).to.equal(5); - const promise = node.transaction( + let promise = node.transaction( function() { return 'new value'}, function() { complete = true; } ); @@ -372,12 +367,10 @@ describe('Transaction Tests', function() { }); it('Tetris bug test - Can do transactions from transaction callback.', async function() { - var nodePair = (getRandomNode(2) as Reference[]), writeDone = false; + const nodePair = (getRandomNode(2) as Reference[]), writeDone = false; await nodePair[0].child('foo').set(42); - var transactionTwoDone = false; - - var node = nodePair[1]; + const node = nodePair[1]; return new Promise(resolve => { node.child('foo').transaction(function(val) { @@ -393,8 +386,7 @@ describe('Transaction Tests', function() { }); it('Resulting snapshot is passed to onComplete callback.', async function() { - var nodePair = (getRandomNode(2) as Reference[]); - var done = false; + const nodePair = (getRandomNode(2) as Reference[]); await nodePair[0].transaction(function(v) { if (v === null) return 'hello!'; @@ -424,12 +416,12 @@ describe('Transaction Tests', function() { }); it('Transaction aborts after 25 retries.', function(done) { - var restoreHash = hijackHash(function() { + const restoreHash = hijackHash(function() { return 'duck, duck, goose.'; }); - var node = (getRandomNode() as Reference); - var tries = 0; + const node = (getRandomNode() as Reference); + let tries = 0; node.transaction(function(curr) { expect(tries).to.be.lessThan(25); tries++; @@ -444,8 +436,8 @@ describe('Transaction Tests', function() { }); it('Set should cancel already sent transactions that come back as datastale.', function(done) { - var nodePair = (getRandomNode(2) as Reference[]); - var transactionCalls = 0; + const nodePair = (getRandomNode(2) as Reference[]); + let transactionCalls = 0; nodePair[0].set(5, function() { nodePair[1].transaction(function(old) { expect(transactionCalls).to.equal(0); @@ -464,10 +456,10 @@ describe('Transaction Tests', function() { }); it('Update should not cancel unrelated transactions', async function() { - var node = (getRandomNode() as Reference); - var fooTransactionDone = false; - var barTransactionDone = false; - var restoreHash = hijackHash(function() { + const node = (getRandomNode() as Reference); + let fooTransactionDone = false; + let barTransactionDone = false; + const restoreHash = hijackHash(function() { return 'foobar'; }); @@ -507,7 +499,7 @@ describe('Transaction Tests', function() { }); it('Test transaction on wacky unicode data.', function(done) { - var nodePair = (getRandomNode(2) as Reference[]); + const nodePair = (getRandomNode(2) as Reference[]); nodePair[0].set('♜♞♝♛♚♝♞♜', function() { nodePair[1].transaction(function(current) { if (current !== null) @@ -522,7 +514,7 @@ describe('Transaction Tests', function() { }); it('Test immediately aborted transaction.', function(done) { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); // without callback. node.transaction(function(curr) { return; @@ -538,7 +530,7 @@ describe('Transaction Tests', function() { }); it('Test adding to an array with a transaction.', function(done) { - var node = (getRandomNode() as Reference); + const node = (getRandomNode() as Reference); node.set(['cat', 'horse'], function() { node.transaction(function(current) { if (current) { @@ -557,8 +549,8 @@ describe('Transaction Tests', function() { }); it('Merged transactions have correct snapshot in onComplete.', async function() { - var nodePair = (getRandomNode(2) as Reference[]), node1 = nodePair[0], node2 = nodePair[1]; - var transaction1Done, transaction2Done; + const nodePair = (getRandomNode(2) as Reference[]), node1 = nodePair[0], node2 = nodePair[1]; + let transaction1Done, transaction2Done; await node1.set({a: 0}); const tx1 = node2.transaction(function(val) { @@ -581,7 +573,7 @@ describe('Transaction Tests', function() { } return 2; }, function(error, committed, snapshot) { - expect(error).to.equal(null) + expect(error).to.equal(null); expect(committed).to.equal(true); expect(snapshot.key).to.equal('a'); expect(snapshot.val()).to.deep.equal(2); @@ -592,9 +584,9 @@ describe('Transaction Tests', function() { }); it('Doing set() in successful transaction callback works. Case 870.', function(done) { - var node = (getRandomNode() as Reference); - var transactionCalled = false; - var callbackCalled = false; + const node = (getRandomNode() as Reference); + let transactionCalled = false; + let callbackCalled = false; node.transaction(function(val) { expect(transactionCalled).to.not.be.ok; transactionCalled = true; @@ -609,11 +601,11 @@ describe('Transaction Tests', function() { }); it('Doing set() in aborted transaction callback works. Case 870.', function(done) { - var nodePair = (getRandomNode(2) as Reference[]), node1 = nodePair[0], node2 = nodePair[1]; + const nodePair = (getRandomNode(2) as Reference[]), node1 = nodePair[0], node2 = nodePair[1]; node1.set('initial', function() { - var transactionCalled = false; - var callbackCalled = false; + let transactionCalled = false; + let callbackCalled = false; node2.transaction(function(val) { // Return dummy value until we're called with the actual current value. if (val === null) @@ -633,7 +625,7 @@ describe('Transaction Tests', function() { }); it('Pending transactions are canceled on disconnect.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); // wait to be connected and some data set. ref.set('initial', function() { @@ -653,7 +645,7 @@ describe('Transaction Tests', function() { }); it('Transaction without local events (1)', async function() { - var ref = (getRandomNode() as Reference), actions = []; + const ref = (getRandomNode() as Reference), actions = []; let ea = EventAccumulatorFactory.waitsForCount(1); ref.on('value', function(s) { @@ -689,11 +681,10 @@ describe('Transaction Tests', function() { // This test is meant to ensure that with applyLocally=false, while the transaction is outstanding, we continue // to get events from other clients. it('Transaction without local events (2)', function(done) { - var refPair = (getRandomNode(2) as Reference[]), ref1 = refPair[0], ref2 = refPair[1]; - var restoreHash = hijackHash(function() { return 'badhash'; }); - var SETS = 4; - var events = [], retries = 0, setsDone = 0; - var ready = false; + const refPair = (getRandomNode(2) as Reference[]), ref1 = refPair[0], ref2 = refPair[1]; + const restoreHash = hijackHash(function() { return 'badhash'; }); + const SETS = 4; + let events = [], retries = 0, setsDone = 0; function txn1(next) { // Do a transaction on the first connection which will keep retrying (cause we hijacked the hash). @@ -701,7 +692,7 @@ describe('Transaction Tests', function() { ref1.transaction(function(current) { retries++; // We should be getting server events while the transaction is outstanding. - for (var i = 0; i < (current || 0); i++) { + for (let i = 0; i < (current || 0); i++) { expect(events[i]).to.equal(i); } @@ -719,7 +710,7 @@ describe('Transaction Tests', function() { // Meanwhile, do sets from the second connection. - var doSet = function() { + const doSet = function() { ref2.set(setsDone, function() { setsDone++; if (setsDone < SETS) @@ -740,7 +731,7 @@ describe('Transaction Tests', function() { throw 'Transaction should have had to retry!'; // Validate we got the correct events. - for (var i = 0; i < SETS; i++) { + for (let i = 0; i < SETS; i++) { expect(events[i]).to.equal(i); } expect(events[SETS]).to.equal('txn result'); @@ -754,10 +745,10 @@ describe('Transaction Tests', function() { }); it('Transaction from value callback.', function(done) { - var ref = (getRandomNode() as Reference); - var COUNT = 1; + const ref = (getRandomNode() as Reference); + const COUNT = 1; ref.on('value', function(snap) { - var shouldCommit = true; + let shouldCommit = true; ref.transaction(function(current) { if (current == null) { return 0; @@ -779,10 +770,10 @@ describe('Transaction Tests', function() { it('Transaction runs on null only once after reconnect (Case 1981).', async function() { if (!canCreateExtraConnections()) return; - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); await ref.set(42); - var newRef = getFreshRepoFromReference(ref); - var run = 0; + const newRef = getFreshRepoFromReference(ref); + let run = 0; return newRef.transaction(function(curr) { run++; if (run === 1) { @@ -805,11 +796,10 @@ describe('Transaction Tests', function() { if (!canCreateExtraConnections()) return; function makeFriends(accountID, friendAccountIDs, firebase) { - var friendAccountID, - i; + let friendAccountID; // add friend relationships - for (i in friendAccountIDs) { + for (let i in friendAccountIDs) { if (friendAccountIDs.hasOwnProperty(i)) { friendAccountID = friendAccountIDs[i]; makeFriend(friendAccountID, accountID, firebase); @@ -841,10 +831,10 @@ describe('Transaction Tests', function() { }, false); } - var firebase = (getRandomNode() as Reference); + const firebase = (getRandomNode() as Reference); firebase.database.goOffline(); firebase.database.goOnline(); - var count = 0; + let count = 0; makeFriends('a1', ['a2', 'a3'], firebase); makeFriends('a2', ['a1', 'a3'], firebase); makeFriends('a3', ['a1', 'a2'], firebase); @@ -852,8 +842,8 @@ describe('Transaction Tests', function() { }); it('transaction() respects .priority.', function(done) { - var ref = (getRandomNode() as Reference); - var values = []; + const ref = (getRandomNode() as Reference); + const values = []; ref.on('value', function(s) { values.push(s.exportVal()); }); ref.transaction(function(curr) { @@ -874,8 +864,7 @@ describe('Transaction Tests', function() { }); it('Transaction properly reverts data when you add a deeper listen.', function(done) { - var refPair = (getRandomNode(2) as Reference[]), ref1 = refPair[0], ref2 = refPair[1]; - var gotTest; + const refPair = (getRandomNode(2) as Reference[]), ref1 = refPair[0], ref2 = refPair[1]; ref1.child('y').set('test', function() { ref2.transaction(function(curr) { if (curr === null) { @@ -886,13 +875,13 @@ describe('Transaction Tests', function() { ref2.child('y').on('value', function(s) { if (s.val() === 'test') { done(); - }; + } }); }); }); it('Transaction with integer keys', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({1: 1, 5: 5, 10: 10, 20: 20}, function() { ref.transaction(function(current) { return 42; @@ -905,7 +894,7 @@ describe('Transaction Tests', function() { }); it('Return null from first run of transaction.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.transaction(function(c) { return null; }, function(error, committed) { @@ -917,7 +906,7 @@ describe('Transaction Tests', function() { // https://app.asana.com/0/5673976843758/9259161251948 it('Bubble-app transaction bug.', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.child('a').transaction(function() { return 1; }); @@ -942,8 +931,7 @@ describe('Transaction Tests', function() { }); it('Transaction and priority: Can set priority in transaction on empty node', async function() { - var ref = (getRandomNode() as Reference); - var done = false; + const ref = (getRandomNode() as Reference); await ref.transaction(function(current) { return { '.value': 42, '.priority': 7 }; @@ -955,8 +943,7 @@ describe('Transaction Tests', function() { }); it("Transaction and priority: Transaction doesn't change priority.", async function() { - var ref = (getRandomNode() as Reference); - var done = false; + const ref = (getRandomNode() as Reference); await ref.set({ '.value': 42, '.priority': 7 }); @@ -970,15 +957,12 @@ describe('Transaction Tests', function() { }); it('Transaction and priority: Transaction can change priority on non-empty node.', async function() { - var ref = (getRandomNode() as Reference); - var done = false; + const ref = (getRandomNode() as Reference); await ref.set({ '.value': 42, '.priority': 7 }); await ref.transaction(function(current) { return { '.value': 43, '.priority': 8 }; - }, function() { - done = true; }); return ref.once('value', function(s) { @@ -987,8 +971,7 @@ describe('Transaction Tests', function() { }); it('Transaction and priority: Changing priority on siblings.', async function() { - var ref = (getRandomNode() as Reference); - var done = false, done2 = false; + const ref = (getRandomNode() as Reference); await ref.set({ a: { '.value': 'a', '.priority': 'a' }, @@ -1011,8 +994,7 @@ describe('Transaction Tests', function() { }); it('Transaction and priority: Leaving priority on siblings.', async function() { - var ref = (getRandomNode() as Reference); - var done = false, done2 = false; + const ref = (getRandomNode() as Reference); await ref.set({a: {'.value': 'a', '.priority': 'a'}, b: {'.value': 'b', '.priority': 'b'}}); @@ -1032,8 +1014,8 @@ describe('Transaction Tests', function() { }); it('transaction() doesn\'t pick up cached data from previous once().', function(done) { - var refPair = (getRandomNode(2) as Reference[]); - var me = refPair[0], other = refPair[1]; + const refPair = (getRandomNode(2) as Reference[]); + const me = refPair[0], other = refPair[1]; me.set('not null', function() { me.once('value', function(snapshot) { other.set(null, function() { @@ -1055,8 +1037,8 @@ describe('Transaction Tests', function() { }); it('transaction() doesn\'t pick up cached data from previous transaction.', function(done) { - var refPair = (getRandomNode(2) as Reference[]); - var me = refPair[0], other = refPair[1]; + const refPair = (getRandomNode(2) as Reference[]); + const me = refPair[0], other = refPair[1]; me.transaction(function() { return 'not null'; }, function(err, committed) { @@ -1080,12 +1062,12 @@ describe('Transaction Tests', function() { }); it("server values: local timestamp should eventually (but not immediately) match the server with txns", function(done) { - var refPair = (getRandomNode(2) as Reference[]), + const refPair = (getRandomNode(2) as Reference[]), writer = refPair[0], reader = refPair[1], readSnaps = [], writeSnaps = []; - var evaluateCompletionCriteria = function() { + const evaluateCompletionCriteria = function() { if (readSnaps.length === 1 && writeSnaps.length === 2) { expect(Math.abs(new Date().getTime() - writeSnaps[0].val()) < 10000).to.equal(true); expect(Math.abs(new Date().getTime() - writeSnaps[0].getPriority()) < 10000).to.equal(true); @@ -1130,7 +1112,7 @@ describe('Transaction Tests', function() { }); it("transaction() still works when there's a query listen.", function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({ a: 1, @@ -1153,7 +1135,7 @@ describe('Transaction Tests', function() { it("transaction() on queried location doesn't run initially on null (firebase-worker-queue depends on this).", function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.push({ a: 1, b: 2}, function() { ref.startAt().limitToFirst(1).on('child_added', function(snap) { snap.ref.transaction(function(current) { @@ -1170,15 +1152,14 @@ describe('Transaction Tests', function() { }); it('transactions raise correct child_changed events on queries', async function() { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); - var value = { foo: { value: 1 } }; - var txnDone = false; - var snapshots = []; + const value = { foo: { value: 1 } }; + const snapshots = []; - await ref.set(value) + await ref.set(value); - var query = ref.endAt(Number.MIN_VALUE); + const query = ref.endAt(Number.MIN_VALUE); query.on('child_added', function(snapshot) { snapshots.push(snapshot); }); @@ -1195,16 +1176,16 @@ describe('Transaction Tests', function() { }, false); expect(snapshots.length).to.equal(2); - var addedSnapshot = snapshots[0]; + const addedSnapshot = snapshots[0]; expect(addedSnapshot.key).to.equal('foo'); expect(addedSnapshot.val()).to.deep.equal({ value: 1 }); - var changedSnapshot = snapshots[1]; + const changedSnapshot = snapshots[1]; expect(changedSnapshot.key).to.equal('foo'); expect(changedSnapshot.val()).to.deep.equal({ value: 2 }); }); it('transactions can use local merges', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.update({'foo': 'bar'}); @@ -1219,7 +1200,7 @@ describe('Transaction Tests', function() { }); it('transactions works with merges without the transaction path', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.update({'foo': 'bar'}); @@ -1235,7 +1216,7 @@ describe('Transaction Tests', function() { //See https://app.asana.com/0/15566422264127/23303789496881 it('out of order remove writes are handled correctly', function(done) { - var ref = (getRandomNode() as Reference); + const ref = (getRandomNode() as Reference); ref.set({foo: 'bar'}); ref.transaction(function() {