BufferTokenizer.js 3.62 KB
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BufferTokenizer = void 0;
const peek_readable_1 = require("peek-readable");
class BufferTokenizer {
    /**
     * Construct BufferTokenizer
     * @param buffer - Buffer to tokenize
     * @param fileInfo - Pass additional file information to the tokenizer
     */
    constructor(buffer, fileInfo) {
        this.buffer = buffer;
        this.position = 0;
        this.fileInfo = fileInfo ? fileInfo : {};
        this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : buffer.length;
    }
    /**
     * Read buffer from tokenizer
     * @param buffer
     * @param options - Read behaviour options
     * @returns {Promise<number>}
     */
    async readBuffer(buffer, options) {
        if (options && options.position) {
            if (options.position < this.position) {
                throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
            }
            this.position = options.position;
        }
        return this.peekBuffer(buffer, options).then(bytesRead => {
            this.position += bytesRead;
            return bytesRead;
        });
    }
    /**
     * Peek (read ahead) buffer from tokenizer
     * @param buffer
     * @param options - Read behaviour options
     * @returns {Promise<number>}
     */
    async peekBuffer(buffer, options) {
        let offset = 0;
        let length = buffer.length;
        let position = this.position;
        if (options) {
            if (options.position) {
                if (options.position < this.position) {
                    throw new Error('`options.position` can be less than `tokenizer.position`');
                }
                position = options.position;
            }
            if (Number.isInteger(options.length)) {
                length = options.length;
            }
            else {
                length -= options.offset || 0;
            }
            if (options.offset) {
                offset = options.offset;
            }
        }
        if (length === 0) {
            return Promise.resolve(0);
        }
        position = position || this.position;
        if (!length) {
            length = buffer.length;
        }
        const bytes2read = Math.min(this.buffer.length - position, length);
        if ((!options || !options.mayBeLess) && bytes2read < length) {
            throw new peek_readable_1.EndOfStreamError();
        }
        else {
            this.buffer.copy(buffer, offset, position, position + bytes2read);
            return bytes2read;
        }
    }
    async readToken(token, position) {
        this.position = position || this.position;
        try {
            const tv = this.peekToken(token, this.position);
            this.position += token.len;
            return tv;
        }
        catch (err) {
            this.position += this.buffer.length - position;
            throw err;
        }
    }
    async peekToken(token, position = this.position) {
        if (this.buffer.length - position < token.len) {
            throw new peek_readable_1.EndOfStreamError();
        }
        return token.get(this.buffer, position);
    }
    async readNumber(token) {
        return this.readToken(token);
    }
    async peekNumber(token) {
        return this.peekToken(token);
    }
    /**
     * @return actual number of bytes ignored
     */
    async ignore(length) {
        const bytesIgnored = Math.min(this.buffer.length - this.position, length);
        this.position += bytesIgnored;
        return bytesIgnored;
    }
    async close() {
        // empty
    }
}
exports.BufferTokenizer = BufferTokenizer;