Skip to content

Instantly share code, notes, and snippets.

@sbuggay
Created July 6, 2019 10:02
Show Gist options
  • Save sbuggay/946b8b23d484ecfafe9e24bd7a1e89fe to your computer and use it in GitHub Desktop.
Save sbuggay/946b8b23d484ecfafe9e24bd7a1e89fe to your computer and use it in GitHub Desktop.

Revisions

  1. sbuggay created this gist Jul 6, 2019.
    72 changes: 72 additions & 0 deletions BatchedRequester.ts
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,72 @@
    type BatchedRequest<T> = (data: string[]) => Promise<T[]>;
    interface IRequestStore<T> {
    promise: Promise<T>;
    resolve: Function;
    }

    export class BatchedSteamRequester<T> {
    private _requests: { [index: string]: IRequestStore<T> };
    private _interval: NodeJS.Timeout;
    private _batchedRequest: BatchedRequest<T>;
    private _mapToRequest: (obj: T) => string;

    constructor(batchedRequest: (BatchedRequest<T>), mapToRequest: (obj: T) => string, interval: number) {
    this._batchedRequest = batchedRequest;
    this._mapToRequest = mapToRequest;
    this._interval = setInterval(this._flushRequests.bind(this), interval);
    this._requests = {};
    }

    /**
    * Queue up a request to be made, return the promise.
    *
    * @param {T} data
    * @returns {Promise<T>}
    * @memberof BatchedSteamRequester
    */
    public request(data: string): Promise<T> {

    // If this has already been requested, return the previously generated promise
    if (this._requests[data]) {
    return this._requests[data].promise;
    }

    let retainedResolve = () => {};
    const promise = new Promise<T>((resolve) => {
    retainedResolve = resolve;
    });

    this._requests[data] = {
    promise,
    resolve: retainedResolve
    }

    return promise;
    }

    /**
    * Clear interval and request queue.
    *
    * @memberof BatchedSteamRequester
    */
    public tearDown() {
    clearInterval(this._interval);
    this._requests = {};
    }

    /**
    * Flush request queue. Batch each and send to set request, resolve all batched promise resolves.
    *
    * @private
    * @memberof BatchedSteamRequester
    */
    private async _flushRequests() {
    if (Object.keys(this._requests).length <= 0) return;
    const data = Object.keys(this._requests);
    const response = await this._batchedRequest(data);
    response.forEach((r) => {
    const key = this._mapToRequest(r);
    this._requests[key].resolve(r);
    });
    }
    }