Initial commit - Event Planner application

This commit is contained in:
mberlin
2026-03-18 14:55:56 -03:00
commit 86d779eb4d
7548 changed files with 1006324 additions and 0 deletions

76
node_modules/strtok3/lib/AbstractTokenizer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,76 @@
import type { ITokenizer, IFileInfo, IReadChunkOptions, ITokenizerOptions } from './types.js';
import type { IGetToken, IToken } from '@tokenizer/token';
interface INormalizedReadChunkOptions extends IReadChunkOptions {
length: number;
position: number;
mayBeLess?: boolean;
}
/**
* Core tokenizer
*/
export declare abstract class AbstractTokenizer implements ITokenizer {
private onClose?;
private numBuffer;
abstract fileInfo: IFileInfo;
/**
* Tokenizer-stream position
*/
position: number;
/**
* Constructor
* @param options Tokenizer options
* @protected
*/
protected constructor(options?: ITokenizerOptions);
abstract supportsRandomAccess(): boolean;
/**
* Read buffer from tokenizer
* @param buffer - Target buffer to fill with data read from the tokenizer-stream
* @param options - Additional read options
* @returns Promise with number of bytes read
*/
abstract readBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array - Target buffer to fill with data peeked from the tokenizer-stream
* @param options - Peek behaviour options
* @returns Promise with number of bytes read
*/
abstract peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Read a token from the tokenizer-stream
* @param token - The token to read
* @param position - If provided, the desired position in the tokenizer-stream
* @returns Promise with token data
*/
readToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @returns Promise with token data
*/
peekToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
readNumber(token: IToken<number>): Promise<number>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
peekNumber(token: IToken<number>): Promise<number>;
/**
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
* @param length - Number of bytes to ignore
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
*/
ignore(length: number): Promise<number>;
close(): Promise<void>;
protected normalizeOptions(uint8Array: Uint8Array, options?: IReadChunkOptions): INormalizedReadChunkOptions;
abort(): Promise<void>;
}
export {};

108
node_modules/strtok3/lib/AbstractTokenizer.js generated vendored Normal file
View File

@@ -0,0 +1,108 @@
import { EndOfStreamError } from './stream/index.js';
/**
* Core tokenizer
*/
export class AbstractTokenizer {
/**
* Constructor
* @param options Tokenizer options
* @protected
*/
constructor(options) {
this.numBuffer = new Uint8Array(8);
/**
* Tokenizer-stream position
*/
this.position = 0;
this.onClose = options?.onClose;
if (options?.abortSignal) {
options.abortSignal.addEventListener('abort', () => {
this.abort();
});
}
}
/**
* Read a token from the tokenizer-stream
* @param token - The token to read
* @param position - If provided, the desired position in the tokenizer-stream
* @returns Promise with token data
*/
async readToken(token, position = this.position) {
const uint8Array = new Uint8Array(token.len);
const len = await this.readBuffer(uint8Array, { position });
if (len < token.len)
throw new EndOfStreamError();
return token.get(uint8Array, 0);
}
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @returns Promise with token data
*/
async peekToken(token, position = this.position) {
const uint8Array = new Uint8Array(token.len);
const len = await this.peekBuffer(uint8Array, { position });
if (len < token.len)
throw new EndOfStreamError();
return token.get(uint8Array, 0);
}
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
async readNumber(token) {
const len = await this.readBuffer(this.numBuffer, { length: token.len });
if (len < token.len)
throw new EndOfStreamError();
return token.get(this.numBuffer, 0);
}
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
async peekNumber(token) {
const len = await this.peekBuffer(this.numBuffer, { length: token.len });
if (len < token.len)
throw new EndOfStreamError();
return token.get(this.numBuffer, 0);
}
/**
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
* @param length - Number of bytes to ignore
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
*/
async ignore(length) {
if (this.fileInfo.size !== undefined) {
const bytesLeft = this.fileInfo.size - this.position;
if (length > bytesLeft) {
this.position += bytesLeft;
return bytesLeft;
}
}
this.position += length;
return length;
}
async close() {
await this.abort();
await this.onClose?.();
}
normalizeOptions(uint8Array, options) {
if (!this.supportsRandomAccess() && options && options.position !== undefined && options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
return {
...{
mayBeLess: false,
offset: 0,
length: uint8Array.length,
position: this.position
}, ...options
};
}
abort() {
return Promise.resolve(); // Ignore abort signal
}
}

29
node_modules/strtok3/lib/BlobTokenizer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,29 @@
import type { ITokenizerOptions, IReadChunkOptions, IRandomAccessFileInfo, IRandomAccessTokenizer } from './types.js';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export declare class BlobTokenizer extends AbstractTokenizer implements IRandomAccessTokenizer {
private blob;
fileInfo: IRandomAccessFileInfo;
/**
* Construct BufferTokenizer
* @param blob - Uint8Array to tokenize
* @param options Tokenizer options
*/
constructor(blob: Blob, options?: ITokenizerOptions);
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
peekBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
close(): Promise<void>;
supportsRandomAccess(): boolean;
setPosition(position: number): void;
}

53
node_modules/strtok3/lib/BlobTokenizer.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
import { EndOfStreamError } from './stream/index.js';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export class BlobTokenizer extends AbstractTokenizer {
/**
* Construct BufferTokenizer
* @param blob - Uint8Array to tokenize
* @param options Tokenizer options
*/
constructor(blob, options) {
super(options);
this.blob = blob;
this.fileInfo = { ...options?.fileInfo ?? {}, ...{ size: blob.size, mimeType: blob.type } };
}
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async readBuffer(uint8Array, options) {
if (options?.position) {
this.position = options.position;
}
const bytesRead = await this.peekBuffer(uint8Array, options);
this.position += bytesRead;
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async peekBuffer(buffer, options) {
const normOptions = this.normalizeOptions(buffer, options);
const bytes2read = Math.min(this.blob.size - normOptions.position, normOptions.length);
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
throw new EndOfStreamError();
}
const arrayBuffer = await this.blob.slice(normOptions.position, normOptions.position + bytes2read).arrayBuffer();
buffer.set(new Uint8Array(arrayBuffer));
return bytes2read;
}
close() {
return super.close();
}
supportsRandomAccess() {
return true;
}
setPosition(position) {
this.position = position;
}
}

29
node_modules/strtok3/lib/BufferTokenizer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,29 @@
import type { ITokenizerOptions, IReadChunkOptions, IRandomAccessFileInfo, IRandomAccessTokenizer } from './types.js';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export declare class BufferTokenizer extends AbstractTokenizer implements IRandomAccessTokenizer {
private uint8Array;
fileInfo: IRandomAccessFileInfo;
/**
* Construct BufferTokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options Tokenizer options
*/
constructor(uint8Array: Uint8Array, options?: ITokenizerOptions);
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
close(): Promise<void>;
supportsRandomAccess(): boolean;
setPosition(position: number): void;
}

52
node_modules/strtok3/lib/BufferTokenizer.js generated vendored Normal file
View File

@@ -0,0 +1,52 @@
import { EndOfStreamError } from './stream/index.js';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export class BufferTokenizer extends AbstractTokenizer {
/**
* Construct BufferTokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options Tokenizer options
*/
constructor(uint8Array, options) {
super(options);
this.uint8Array = uint8Array;
this.fileInfo = { ...options?.fileInfo ?? {}, ...{ size: uint8Array.length } };
}
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async readBuffer(uint8Array, options) {
if (options?.position) {
this.position = options.position;
}
const bytesRead = await this.peekBuffer(uint8Array, options);
this.position += bytesRead;
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
throw new EndOfStreamError();
}
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read));
return bytes2read;
}
close() {
return super.close();
}
supportsRandomAccess() {
return true;
}
setPosition(position) {
this.position = position;
}
}

37
node_modules/strtok3/lib/FileTokenizer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,37 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import type { IRandomAccessTokenizer, IRandomAccessFileInfo, IReadChunkOptions, ITokenizerOptions } from './types.js';
import { type FileHandle } from 'node:fs/promises';
interface IFileTokenizerOptions extends ITokenizerOptions {
/**
* Pass additional file information to the tokenizer
*/
fileInfo: IRandomAccessFileInfo;
}
export declare class FileTokenizer extends AbstractTokenizer implements IRandomAccessTokenizer {
private fileHandle;
fileInfo: IRandomAccessFileInfo;
/**
* Create tokenizer from provided file path
* @param sourceFilePath File path
*/
static fromFile(sourceFilePath: string): Promise<FileTokenizer>;
protected constructor(fileHandle: FileHandle, options: IFileTokenizerOptions);
/**
* Read buffer from file
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek buffer from file
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
close(): Promise<void>;
setPosition(position: number): void;
supportsRandomAccess(): boolean;
}
export {};

61
node_modules/strtok3/lib/FileTokenizer.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError } from './stream/index.js';
import { open as fsOpen } from 'node:fs/promises';
export class FileTokenizer extends AbstractTokenizer {
/**
* Create tokenizer from provided file path
* @param sourceFilePath File path
*/
static async fromFile(sourceFilePath) {
const fileHandle = await fsOpen(sourceFilePath, 'r');
const stat = await fileHandle.stat();
return new FileTokenizer(fileHandle, { fileInfo: { path: sourceFilePath, size: stat.size } });
}
constructor(fileHandle, options) {
super(options);
this.fileHandle = fileHandle;
this.fileInfo = options.fileInfo;
}
/**
* Read buffer from file
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
this.position = normOptions.position;
if (normOptions.length === 0)
return 0;
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position);
this.position += res.bytesRead;
if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) {
throw new EndOfStreamError();
}
return res.bytesRead;
}
/**
* Peek buffer from file
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position);
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
return res.bytesRead;
}
async close() {
await this.fileHandle.close();
return super.close();
}
setPosition(position) {
this.position = position;
}
supportsRandomAccess() {
return true;
}
}

31
node_modules/strtok3/lib/ReadStreamTokenizer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,31 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { type IStreamReader } from './stream/index.js';
import type { IFileInfo, IReadChunkOptions, ITokenizerOptions } from './types.js';
export declare class ReadStreamTokenizer extends AbstractTokenizer {
private streamReader;
fileInfo: IFileInfo;
/**
* Constructor
* @param streamReader stream-reader to read from
* @param options Tokenizer options
*/
constructor(streamReader: IStreamReader, options?: ITokenizerOptions);
/**
* Read buffer from tokenizer
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise with number of bytes peeked
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
ignore(length: number): Promise<number>;
abort(): Promise<void>;
close(): Promise<void>;
supportsRandomAccess(): boolean;
}

102
node_modules/strtok3/lib/ReadStreamTokenizer.js generated vendored Normal file
View File

@@ -0,0 +1,102 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError } from './stream/index.js';
const maxBufferSize = 256000;
export class ReadStreamTokenizer extends AbstractTokenizer {
/**
* Constructor
* @param streamReader stream-reader to read from
* @param options Tokenizer options
*/
constructor(streamReader, options) {
super(options);
this.streamReader = streamReader;
this.fileInfo = options?.fileInfo ?? {};
}
/**
* Read buffer from tokenizer
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
await this.ignore(skipBytes);
return this.readBuffer(uint8Array, options);
}
if (skipBytes < 0) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
if (normOptions.length === 0) {
return 0;
}
const bytesRead = await this.streamReader.read(uint8Array.subarray(0, normOptions.length), normOptions.mayBeLess);
this.position += bytesRead;
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise with number of bytes peeked
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
let bytesRead = 0;
if (normOptions.position) {
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess });
uint8Array.set(skipBuffer.subarray(skipBytes));
return bytesRead - skipBytes;
}
if (skipBytes < 0) {
throw new Error('Cannot peek from a negative offset in a stream');
}
}
if (normOptions.length > 0) {
try {
bytesRead = await this.streamReader.peek(uint8Array.subarray(0, normOptions.length), normOptions.mayBeLess);
}
catch (err) {
if (options?.mayBeLess && err instanceof EndOfStreamError) {
return 0;
}
throw err;
}
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
}
return bytesRead;
}
async ignore(length) {
// debug(`ignore ${this.position}...${this.position + length - 1}`);
const bufSize = Math.min(maxBufferSize, length);
const buf = new Uint8Array(bufSize);
let totBytesRead = 0;
while (totBytesRead < length) {
const remaining = length - totBytesRead;
const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
if (bytesRead < 0) {
return bytesRead;
}
totBytesRead += bytesRead;
}
return totBytesRead;
}
abort() {
return this.streamReader.abort();
}
async close() {
return this.streamReader.close();
}
supportsRandomAccess() {
return false;
}
}

40
node_modules/strtok3/lib/core.d.ts generated vendored Normal file
View File

@@ -0,0 +1,40 @@
import type { Readable } from 'node:stream';
import { type AnyWebByteStream } from './stream/index.js';
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { BufferTokenizer } from './BufferTokenizer.js';
import type { ITokenizerOptions } from './types.js';
import { BlobTokenizer } from './BlobTokenizer.js';
export { EndOfStreamError, AbortError, type AnyWebByteStream } from './stream/index.js';
export type { ITokenizer, IRandomAccessTokenizer, IFileInfo, IRandomAccessFileInfo, ITokenizerOptions, IReadChunkOptions, OnClose } from './types.js';
export type { IToken, IGetToken } from '@tokenizer/token';
export { AbstractTokenizer } from './AbstractTokenizer.js';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property/
* @param stream - Read from Node.js Stream.Readable
* @param options - Tokenizer options
* @returns ReadStreamTokenizer
*/
export declare function fromStream(stream: Readable, options?: ITokenizerOptions): ReadStreamTokenizer;
/**
* Construct ReadStreamTokenizer from given ReadableStream (WebStream API).
* Will set fileSize, if provided given Stream has set the .path property/
* @param webStream - Read from Node.js Stream.Readable (must be a byte stream)
* @param options - Tokenizer options
* @returns ReadStreamTokenizer
*/
export declare function fromWebStream(webStream: AnyWebByteStream, options?: ITokenizerOptions): ReadStreamTokenizer;
/**
* Construct ReadStreamTokenizer from given Buffer.
* @param uint8Array - Uint8Array to tokenize
* @param options - Tokenizer options
* @returns BufferTokenizer
*/
export declare function fromBuffer(uint8Array: Uint8Array, options?: ITokenizerOptions): BufferTokenizer;
/**
* Construct ReadStreamTokenizer from given Blob.
* @param blob - Uint8Array to tokenize
* @param options - Tokenizer options
* @returns BufferTokenizer
*/
export declare function fromBlob(blob: Blob, options?: ITokenizerOptions): BlobTokenizer;

62
node_modules/strtok3/lib/core.js generated vendored Normal file
View File

@@ -0,0 +1,62 @@
import { StreamReader, makeWebStreamReader } from './stream/index.js';
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { BufferTokenizer } from './BufferTokenizer.js';
import { BlobTokenizer } from './BlobTokenizer.js';
export { EndOfStreamError, AbortError } from './stream/index.js';
export { AbstractTokenizer } from './AbstractTokenizer.js';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property/
* @param stream - Read from Node.js Stream.Readable
* @param options - Tokenizer options
* @returns ReadStreamTokenizer
*/
export function fromStream(stream, options) {
const streamReader = new StreamReader(stream);
const _options = options ?? {};
const chainedClose = _options.onClose;
_options.onClose = async () => {
await streamReader.close();
if (chainedClose) {
return chainedClose();
}
};
return new ReadStreamTokenizer(streamReader, _options);
}
/**
* Construct ReadStreamTokenizer from given ReadableStream (WebStream API).
* Will set fileSize, if provided given Stream has set the .path property/
* @param webStream - Read from Node.js Stream.Readable (must be a byte stream)
* @param options - Tokenizer options
* @returns ReadStreamTokenizer
*/
export function fromWebStream(webStream, options) {
const webStreamReader = makeWebStreamReader(webStream);
const _options = options ?? {};
const chainedClose = _options.onClose;
_options.onClose = async () => {
await webStreamReader.close();
if (chainedClose) {
return chainedClose();
}
};
return new ReadStreamTokenizer(webStreamReader, _options);
}
/**
* Construct ReadStreamTokenizer from given Buffer.
* @param uint8Array - Uint8Array to tokenize
* @param options - Tokenizer options
* @returns BufferTokenizer
*/
export function fromBuffer(uint8Array, options) {
return new BufferTokenizer(uint8Array, options);
}
/**
* Construct ReadStreamTokenizer from given Blob.
* @param blob - Uint8Array to tokenize
* @param options - Tokenizer options
* @returns BufferTokenizer
*/
export function fromBlob(blob, options) {
return new BlobTokenizer(blob, options);
}

16
node_modules/strtok3/lib/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,16 @@
import type { Readable } from 'node:stream';
import type { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { type ITokenizerOptions } from './core.js';
import { FileTokenizer } from "./FileTokenizer.js";
export { FileTokenizer } from './FileTokenizer.js';
export * from './core.js';
export type { IToken, IGetToken } from '@tokenizer/token';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property.
* @param stream - Node.js Stream.Readable
* @param options - Pass additional file information to the tokenizer
* @returns Tokenizer
*/
export declare function fromStream(stream: Readable, options?: ITokenizerOptions): Promise<ReadStreamTokenizer>;
export declare const fromFile: typeof FileTokenizer.fromFile;

22
node_modules/strtok3/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,22 @@
import { stat as fsStat } from 'node:fs/promises';
import { fromStream as coreFromStream } from './core.js';
import { FileTokenizer } from "./FileTokenizer.js";
export { FileTokenizer } from './FileTokenizer.js';
export * from './core.js';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property.
* @param stream - Node.js Stream.Readable
* @param options - Pass additional file information to the tokenizer
* @returns Tokenizer
*/
export async function fromStream(stream, options) {
const rst = coreFromStream(stream, options);
if (stream.path) {
const stat = await fsStat(stream.path);
rst.fileInfo.path = stream.path;
rst.fileInfo.size = stat.size;
}
return rst;
}
export const fromFile = FileTokenizer.fromFile;

View File

@@ -0,0 +1,54 @@
export interface IStreamReader {
/**
* Peak ahead (peek) from stream. Subsequent read or peeks will return the same data.
* @param uint8Array - Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - Allow the read to complete, without the buffer being fully filled (length may be smaller)
* @returns Number of bytes peeked. If `maybeLess` is set, this shall be the `uint8Array.length`.
*/
peek(uint8Array: Uint8Array, mayBeLess?: boolean): Promise<number>;
/**
* Read from stream the stream.
* @param uint8Array - Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - Allow the read to complete, without the buffer being fully filled (length may be smaller)
* @returns Number of actually bytes read. If `maybeLess` is set, this shall be the `uint8Array.length`.
*/
read(uint8Array: Uint8Array, mayBeLess?: boolean): Promise<number>;
close(): Promise<void>;
/**
* Abort any active asynchronous operation are active, abort those before they may have completed.
*/
abort(): Promise<void>;
}
export declare abstract class AbstractStreamReader implements IStreamReader {
protected endOfStream: boolean;
protected interrupted: boolean;
/**
* Store peeked data
* @type {Array}
*/
protected peekQueue: Uint8Array[];
peek(uint8Array: Uint8Array, mayBeLess?: boolean): Promise<number>;
read(buffer: Uint8Array, mayBeLess?: boolean): Promise<number>;
/**
* Read chunk from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @returns Number of bytes read
*/
protected readFromPeekBuffer(buffer: Uint8Array): number;
readRemainderFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
/**
* Read from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @protected Bytes read
*/
protected abstract readFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
/**
* abort synchronous operations
*/
abstract close(): Promise<void>;
/**
* Abort any active asynchronous operation are active, abort those before they may have completed.
*/
abstract abort(): Promise<void>;
}

View File

@@ -0,0 +1,71 @@
import { EndOfStreamError, AbortError } from "./Errors.js";
export class AbstractStreamReader {
constructor() {
this.endOfStream = false;
this.interrupted = false;
/**
* Store peeked data
* @type {Array}
*/
this.peekQueue = [];
}
async peek(uint8Array, mayBeLess = false) {
const bytesRead = await this.read(uint8Array, mayBeLess);
this.peekQueue.push(uint8Array.subarray(0, bytesRead)); // Put read data back to peek buffer
return bytesRead;
}
async read(buffer, mayBeLess = false) {
if (buffer.length === 0) {
return 0;
}
let bytesRead = this.readFromPeekBuffer(buffer);
if (!this.endOfStream) {
bytesRead += await this.readRemainderFromStream(buffer.subarray(bytesRead), mayBeLess);
}
if (bytesRead === 0 && !mayBeLess) {
throw new EndOfStreamError();
}
return bytesRead;
}
/**
* Read chunk from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @returns Number of bytes read
*/
readFromPeekBuffer(buffer) {
let remaining = buffer.length;
let bytesRead = 0;
// consume peeked data first
while (this.peekQueue.length > 0 && remaining > 0) {
const peekData = this.peekQueue.pop(); // Front of queue
if (!peekData)
throw new Error('peekData should be defined');
const lenCopy = Math.min(peekData.length, remaining);
buffer.set(peekData.subarray(0, lenCopy), bytesRead);
bytesRead += lenCopy;
remaining -= lenCopy;
if (lenCopy < peekData.length) {
// remainder back to queue
this.peekQueue.push(peekData.subarray(lenCopy));
}
}
return bytesRead;
}
async readRemainderFromStream(buffer, mayBeLess) {
let bytesRead = 0;
// Continue reading from stream if required
while (bytesRead < buffer.length && !this.endOfStream) {
if (this.interrupted) {
throw new AbortError();
}
const chunkLen = await this.readFromStream(buffer.subarray(bytesRead), mayBeLess);
if (chunkLen === 0)
break;
bytesRead += chunkLen;
}
if (!mayBeLess && bytesRead < buffer.length) {
throw new EndOfStreamError();
}
return bytesRead;
}
}

6
node_modules/strtok3/lib/stream/Deferred.d.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
export declare class Deferred<T> {
promise: Promise<T>;
resolve: (value: T) => void;
reject: (reason: Error) => void;
constructor();
}

10
node_modules/strtok3/lib/stream/Deferred.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
export class Deferred {
constructor() {
this.resolve = () => null;
this.reject = () => null;
this.promise = new Promise((resolve, reject) => {
this.reject = reject;
this.resolve = resolve;
});
}
}

10
node_modules/strtok3/lib/stream/Errors.d.ts generated vendored Normal file
View File

@@ -0,0 +1,10 @@
export declare const defaultMessages = "End-Of-Stream";
/**
* Thrown on read operation of the end of file or stream has been reached
*/
export declare class EndOfStreamError extends Error {
constructor();
}
export declare class AbortError extends Error {
constructor(message?: string);
}

16
node_modules/strtok3/lib/stream/Errors.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
export const defaultMessages = 'End-Of-Stream';
/**
* Thrown on read operation of the end of file or stream has been reached
*/
export class EndOfStreamError extends Error {
constructor() {
super(defaultMessages);
this.name = "EndOfStreamError";
}
}
export class AbortError extends Error {
constructor(message = "The operation was aborted") {
super(message);
this.name = "AbortError";
}
}

29
node_modules/strtok3/lib/stream/StreamReader.d.ts generated vendored Normal file
View File

@@ -0,0 +1,29 @@
import type { Readable } from 'node:stream';
import { AbstractStreamReader } from "./AbstractStreamReader.js";
/**
* Node.js Readable Stream Reader
* Ref: https://nodejs.org/api/stream.html#readable-streams
*/
export declare class StreamReader extends AbstractStreamReader {
private s;
/**
* Deferred used for postponed read request (as not data is yet available to read)
*/
private deferred;
constructor(s: Readable);
/**
* Read chunk from stream
* @param buffer Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @returns Number of bytes read
*/
protected readFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
/**
* Process deferred read request
* @param request Deferred read request
*/
private readDeferred;
private reject;
abort(): Promise<void>;
close(): Promise<void>;
}

83
node_modules/strtok3/lib/stream/StreamReader.js generated vendored Normal file
View File

@@ -0,0 +1,83 @@
import { AbortError, } from './Errors.js';
import { Deferred } from './Deferred.js';
import { AbstractStreamReader } from "./AbstractStreamReader.js";
/**
* Node.js Readable Stream Reader
* Ref: https://nodejs.org/api/stream.html#readable-streams
*/
export class StreamReader extends AbstractStreamReader {
constructor(s) {
super();
this.s = s;
/**
* Deferred used for postponed read request (as not data is yet available to read)
*/
this.deferred = null;
if (!s.read || !s.once) {
throw new Error('Expected an instance of stream.Readable');
}
this.s.once('end', () => {
this.endOfStream = true;
if (this.deferred) {
this.deferred.resolve(0);
}
});
this.s.once('error', err => this.reject(err));
this.s.once('close', () => this.abort());
}
/**
* Read chunk from stream
* @param buffer Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @returns Number of bytes read
*/
async readFromStream(buffer, mayBeLess) {
if (buffer.length === 0)
return 0;
const readBuffer = this.s.read(buffer.length);
if (readBuffer) {
buffer.set(readBuffer);
return readBuffer.length;
}
const request = {
buffer,
mayBeLess,
deferred: new Deferred()
};
this.deferred = request.deferred;
this.s.once('readable', () => {
this.readDeferred(request);
});
return request.deferred.promise;
}
/**
* Process deferred read request
* @param request Deferred read request
*/
readDeferred(request) {
const readBuffer = this.s.read(request.buffer.length);
if (readBuffer) {
request.buffer.set(readBuffer);
request.deferred.resolve(readBuffer.length);
this.deferred = null;
}
else {
this.s.once('readable', () => {
this.readDeferred(request);
});
}
}
reject(err) {
this.interrupted = true;
if (this.deferred) {
this.deferred.reject(err);
this.deferred = null;
}
}
async abort() {
this.reject(new AbortError());
}
async close() {
return this.abort();
}
}

View File

@@ -0,0 +1,14 @@
import { WebStreamReader } from './WebStreamReader.js';
/**
* Read from a WebStream using a BYOB reader
* Reference: https://nodejs.org/api/webstreams.html#class-readablestreambyobreader
*/
export declare class WebStreamByobReader extends WebStreamReader {
/**
* Read from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @protected Bytes read
*/
protected readFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
}

27
node_modules/strtok3/lib/stream/WebStreamByobReader.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
import { WebStreamReader } from './WebStreamReader.js';
/**
* Read from a WebStream using a BYOB reader
* Reference: https://nodejs.org/api/webstreams.html#class-readablestreambyobreader
*/
export class WebStreamByobReader extends WebStreamReader {
/**
* Read from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @protected Bytes read
*/
async readFromStream(buffer, mayBeLess) {
if (buffer.length === 0)
return 0;
// @ts-ignore
const result = await this.reader.read(new Uint8Array(buffer.length), { min: mayBeLess ? undefined : buffer.length });
if (result.done) {
this.endOfStream = result.done;
}
if (result.value) {
buffer.set(result.value);
return result.value.length;
}
return 0;
}
}

View File

@@ -0,0 +1,19 @@
import { AbstractStreamReader } from "./AbstractStreamReader.js";
export declare class WebStreamDefaultReader extends AbstractStreamReader {
private reader;
private buffer;
constructor(reader: ReadableStreamDefaultReader<Uint8Array>);
/**
* Copy chunk to target, and store the remainder in this.buffer
*/
private writeChunk;
/**
* Read from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @protected Bytes read
*/
protected readFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
abort(): Promise<void>;
close(): Promise<void>;
}

View File

@@ -0,0 +1,62 @@
import { EndOfStreamError } from './Errors.js';
import { AbstractStreamReader } from "./AbstractStreamReader.js";
export class WebStreamDefaultReader extends AbstractStreamReader {
constructor(reader) {
super();
this.reader = reader;
this.buffer = null; // Internal buffer to store excess data
}
/**
* Copy chunk to target, and store the remainder in this.buffer
*/
writeChunk(target, chunk) {
const written = Math.min(chunk.length, target.length);
target.set(chunk.subarray(0, written));
// Adjust the remainder of the buffer
if (written < chunk.length) {
this.buffer = chunk.subarray(written);
}
else {
this.buffer = null;
}
return written;
}
/**
* Read from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @protected Bytes read
*/
async readFromStream(buffer, mayBeLess) {
if (buffer.length === 0)
return 0;
let totalBytesRead = 0;
// Serve from the internal buffer first
if (this.buffer) {
totalBytesRead += this.writeChunk(buffer, this.buffer);
}
// Continue reading from the stream if more data is needed
while (totalBytesRead < buffer.length && !this.endOfStream) {
const result = await this.reader.read();
if (result.done) {
this.endOfStream = true;
break;
}
if (result.value) {
totalBytesRead += this.writeChunk(buffer.subarray(totalBytesRead), result.value);
}
}
if (!mayBeLess && totalBytesRead === 0 && this.endOfStream) {
throw new EndOfStreamError();
}
return totalBytesRead;
}
abort() {
this.interrupted = true;
return this.reader.cancel();
}
async close() {
await this.abort();
this.reader.releaseLock();
}
}

14
node_modules/strtok3/lib/stream/WebStreamReader.d.ts generated vendored Normal file
View File

@@ -0,0 +1,14 @@
import { AbstractStreamReader } from "./AbstractStreamReader.js";
export declare abstract class WebStreamReader extends AbstractStreamReader {
protected reader: ReadableStreamDefaultReader | ReadableStreamBYOBReader;
constructor(reader: ReadableStreamDefaultReader | ReadableStreamBYOBReader);
/**
* Read from stream
* @param buffer - Target Uint8Array (or Buffer) to store data read from stream in
* @param mayBeLess - If true, may fill the buffer partially
* @protected Bytes read
*/
protected abstract readFromStream(buffer: Uint8Array, mayBeLess: boolean): Promise<number>;
abort(): Promise<void>;
close(): Promise<void>;
}

13
node_modules/strtok3/lib/stream/WebStreamReader.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
import { AbstractStreamReader } from "./AbstractStreamReader.js";
export class WebStreamReader extends AbstractStreamReader {
constructor(reader) {
super();
this.reader = reader;
}
async abort() {
return this.close();
}
async close() {
this.reader.releaseLock();
}
}

View File

@@ -0,0 +1,5 @@
import type { ReadableStream as NodeReadableStream } from 'node:stream/web';
import { WebStreamByobReader } from './WebStreamByobReader.js';
import { WebStreamDefaultReader } from './WebStreamDefaultReader.js';
export type AnyWebByteStream = NodeReadableStream<Uint8Array> | ReadableStream<Uint8Array>;
export declare function makeWebStreamReader(stream: AnyWebByteStream): WebStreamByobReader | WebStreamDefaultReader;

View File

@@ -0,0 +1,19 @@
import { WebStreamByobReader } from './WebStreamByobReader.js';
import { WebStreamDefaultReader } from './WebStreamDefaultReader.js';
export function makeWebStreamReader(stream) {
try {
const reader = stream.getReader({ mode: "byob" });
if (reader instanceof ReadableStreamDefaultReader) {
// Fallback to default reader in case `mode: byob` is ignored
return new WebStreamDefaultReader(reader);
}
return new WebStreamByobReader(reader);
}
catch (error) {
if (error instanceof TypeError) {
// Fallback to default reader in case `mode: byob` rejected by a `TypeError`
return new WebStreamDefaultReader(stream.getReader());
}
throw error;
}
}

6
node_modules/strtok3/lib/stream/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,6 @@
export { AbortError, EndOfStreamError } from './Errors.js';
export { StreamReader } from './StreamReader.js';
export { WebStreamByobReader } from './WebStreamByobReader.js';
export { WebStreamDefaultReader } from './WebStreamDefaultReader.js';
export type { IStreamReader } from './AbstractStreamReader.js';
export { type AnyWebByteStream, makeWebStreamReader } from './WebStreamReaderFactory.js';

5
node_modules/strtok3/lib/stream/index.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
export { AbortError, EndOfStreamError } from './Errors.js';
export { StreamReader } from './StreamReader.js';
export { WebStreamByobReader } from './WebStreamByobReader.js';
export { WebStreamDefaultReader } from './WebStreamDefaultReader.js';
export { makeWebStreamReader } from './WebStreamReaderFactory.js';

139
node_modules/strtok3/lib/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,139 @@
import type { IGetToken } from '@tokenizer/token';
export interface IFileInfo {
/**
* File size in bytes
*/
size?: number;
/**
* MIME-type of file
*/
mimeType?: string;
/**
* File path
*/
path?: string;
/**
* File URL
*/
url?: string;
}
export interface IRandomAccessFileInfo extends IFileInfo {
/**
* File size in bytes
*/
size: number;
}
export interface IReadChunkOptions {
/**
* Number of bytes to read.
*/
length?: number;
/**
* Position where to begin reading from the file.
* Default it is `tokenizer.position`.
* Position may not be less than `tokenizer.position`, unless `supportsRandomAccess()` returns `true`.
*/
position?: number;
/**
* If set, will not throw an EOF error if not all off the requested data could be read
*/
mayBeLess?: boolean;
}
export interface IRandomAccessTokenizer extends ITokenizer {
/**
* Provide access to information of the underlying information stream or file.
*/
fileInfo: IRandomAccessFileInfo;
/**
* Change the position (offset) of the tokenizer
* @param position New position
*/
setPosition(position: number): void;
}
/**
* The tokenizer allows us to read or peek from the tokenizer-stream.
* The tokenizer-stream is an abstraction of a stream, file or Buffer.
*/
export interface ITokenizer {
/**
* Provide access to information of the underlying information stream or file.
*/
readonly fileInfo: IFileInfo;
/**
* Offset in bytes (= number of bytes read) since beginning of file or stream
*/
readonly position: number;
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer - Target buffer to fill with data peek from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
peekBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer - Target buffer to fill with data peeked from the tokenizer-stream
* @param options - Additional read options
* @returns Promise with number of bytes read
*/
readBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @param maybeless - If set, will not throw an EOF error if the less then the requested length could be read.
*/
peekToken<T>(token: IGetToken<T>, position?: number | null, maybeless?: boolean): Promise<T>;
/**
* Read a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
*/
readToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
/**
* Peek a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
peekNumber(token: IGetToken<number>): Promise<number>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
readNumber(token: IGetToken<number>): Promise<number>;
/**
* Ignore given number of bytes
* @param length - Number of bytes ignored
*/
ignore(length: number): Promise<number>;
/**
* Clean up resources.
* It does not close the stream for StreamReader, but is does close the file-descriptor.
*/
close(): Promise<void>;
/**
* Abort pending asynchronous operations
*/
abort(): Promise<void>;
/**
* Returns true when the underlying file supports random access
*/
supportsRandomAccess(): boolean;
}
export type OnClose = () => Promise<void>;
export interface ITokenizerOptions {
/**
* Pass additional file information to the tokenizer
*/
fileInfo?: IFileInfo;
/**
* On tokenizer close handler
*/
onClose?: OnClose;
/**
* Pass `AbortSignal` which can stop active async operations
* Ref: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal
*/
abortSignal?: AbortSignal;
}

1
node_modules/strtok3/lib/types.js generated vendored Normal file
View File

@@ -0,0 +1 @@
export {};