Initial commit - Event Planner application
This commit is contained in:
108
node_modules/strtok3/lib/AbstractTokenizer.js
generated
vendored
Normal file
108
node_modules/strtok3/lib/AbstractTokenizer.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
import { EndOfStreamError } from './stream/index.js';
|
||||
/**
|
||||
* Core tokenizer
|
||||
*/
|
||||
export class AbstractTokenizer {
|
||||
/**
|
||||
* Constructor
|
||||
* @param options Tokenizer options
|
||||
* @protected
|
||||
*/
|
||||
constructor(options) {
|
||||
this.numBuffer = new Uint8Array(8);
|
||||
/**
|
||||
* Tokenizer-stream position
|
||||
*/
|
||||
this.position = 0;
|
||||
this.onClose = options?.onClose;
|
||||
if (options?.abortSignal) {
|
||||
options.abortSignal.addEventListener('abort', () => {
|
||||
this.abort();
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Read a token from the tokenizer-stream
|
||||
* @param token - The token to read
|
||||
* @param position - If provided, the desired position in the tokenizer-stream
|
||||
* @returns Promise with token data
|
||||
*/
|
||||
async readToken(token, position = this.position) {
|
||||
const uint8Array = new Uint8Array(token.len);
|
||||
const len = await this.readBuffer(uint8Array, { position });
|
||||
if (len < token.len)
|
||||
throw new EndOfStreamError();
|
||||
return token.get(uint8Array, 0);
|
||||
}
|
||||
/**
|
||||
* Peek a token from the tokenizer-stream.
|
||||
* @param token - Token to peek from the tokenizer-stream.
|
||||
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
|
||||
* @returns Promise with token data
|
||||
*/
|
||||
async peekToken(token, position = this.position) {
|
||||
const uint8Array = new Uint8Array(token.len);
|
||||
const len = await this.peekBuffer(uint8Array, { position });
|
||||
if (len < token.len)
|
||||
throw new EndOfStreamError();
|
||||
return token.get(uint8Array, 0);
|
||||
}
|
||||
/**
|
||||
* Read a numeric token from the stream
|
||||
* @param token - Numeric token
|
||||
* @returns Promise with number
|
||||
*/
|
||||
async readNumber(token) {
|
||||
const len = await this.readBuffer(this.numBuffer, { length: token.len });
|
||||
if (len < token.len)
|
||||
throw new EndOfStreamError();
|
||||
return token.get(this.numBuffer, 0);
|
||||
}
|
||||
/**
|
||||
* Read a numeric token from the stream
|
||||
* @param token - Numeric token
|
||||
* @returns Promise with number
|
||||
*/
|
||||
async peekNumber(token) {
|
||||
const len = await this.peekBuffer(this.numBuffer, { length: token.len });
|
||||
if (len < token.len)
|
||||
throw new EndOfStreamError();
|
||||
return token.get(this.numBuffer, 0);
|
||||
}
|
||||
/**
|
||||
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
|
||||
* @param length - Number of bytes to ignore
|
||||
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
|
||||
*/
|
||||
async ignore(length) {
|
||||
if (this.fileInfo.size !== undefined) {
|
||||
const bytesLeft = this.fileInfo.size - this.position;
|
||||
if (length > bytesLeft) {
|
||||
this.position += bytesLeft;
|
||||
return bytesLeft;
|
||||
}
|
||||
}
|
||||
this.position += length;
|
||||
return length;
|
||||
}
|
||||
async close() {
|
||||
await this.abort();
|
||||
await this.onClose?.();
|
||||
}
|
||||
normalizeOptions(uint8Array, options) {
|
||||
if (!this.supportsRandomAccess() && options && options.position !== undefined && options.position < this.position) {
|
||||
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
|
||||
}
|
||||
return {
|
||||
...{
|
||||
mayBeLess: false,
|
||||
offset: 0,
|
||||
length: uint8Array.length,
|
||||
position: this.position
|
||||
}, ...options
|
||||
};
|
||||
}
|
||||
abort() {
|
||||
return Promise.resolve(); // Ignore abort signal
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user