BufferTokenizer.js
3.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BufferTokenizer = void 0;
const peek_readable_1 = require("peek-readable");
class BufferTokenizer {
/**
* Construct BufferTokenizer
* @param buffer - Buffer to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
*/
constructor(buffer, fileInfo) {
this.buffer = buffer;
this.position = 0;
this.fileInfo = fileInfo ? fileInfo : {};
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : buffer.length;
}
/**
* Read buffer from tokenizer
* @param buffer
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async readBuffer(buffer, options) {
if (options && options.position) {
if (options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
this.position = options.position;
}
return this.peekBuffer(buffer, options).then(bytesRead => {
this.position += bytesRead;
return bytesRead;
});
}
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async peekBuffer(buffer, options) {
let offset = 0;
let length = buffer.length;
let position = this.position;
if (options) {
if (options.position) {
if (options.position < this.position) {
throw new Error('`options.position` can be less than `tokenizer.position`');
}
position = options.position;
}
if (Number.isInteger(options.length)) {
length = options.length;
}
else {
length -= options.offset || 0;
}
if (options.offset) {
offset = options.offset;
}
}
if (length === 0) {
return Promise.resolve(0);
}
position = position || this.position;
if (!length) {
length = buffer.length;
}
const bytes2read = Math.min(this.buffer.length - position, length);
if ((!options || !options.mayBeLess) && bytes2read < length) {
throw new peek_readable_1.EndOfStreamError();
}
else {
this.buffer.copy(buffer, offset, position, position + bytes2read);
return bytes2read;
}
}
async readToken(token, position) {
this.position = position || this.position;
try {
const tv = this.peekToken(token, this.position);
this.position += token.len;
return tv;
}
catch (err) {
this.position += this.buffer.length - position;
throw err;
}
}
async peekToken(token, position = this.position) {
if (this.buffer.length - position < token.len) {
throw new peek_readable_1.EndOfStreamError();
}
return token.get(this.buffer, position);
}
async readNumber(token) {
return this.readToken(token);
}
async peekNumber(token) {
return this.peekToken(token);
}
/**
* @return actual number of bytes ignored
*/
async ignore(length) {
const bytesIgnored = Math.min(this.buffer.length - this.position, length);
this.position += bytesIgnored;
return bytesIgnored;
}
async close() {
// empty
}
}
exports.BufferTokenizer = BufferTokenizer;