diff --git a/source/errors.mjs b/source/errors.mjs index 7ef6e37..bdc1f72 100644 --- a/source/errors.mjs +++ b/source/errors.mjs @@ -1,5 +1,17 @@ import { inspect } from 'node:util'; +// § GROUP: Regexp tokenization + + + +export class Tokenization_Error extends Error { + constructor(data) { + const { parser, value, index, end_index } = data; + super(`Tokenization_Error`); //TODO: Format message + this.data = data; + } +} + // § GROUP: Configuration field errors export class Data_Validation_Failed extends Error { diff --git a/source/parsing/regexp-dispatch.mjs b/source/parsing/regexp-dispatch.mjs index 300e802..31ba098 100644 --- a/source/parsing/regexp-dispatch.mjs +++ b/source/parsing/regexp-dispatch.mjs @@ -1,4 +1,6 @@ import * as RE from '@efforting.tech/text/regexp'; +import { Tokenization_Error } from '@efforting.tech/errors'; + // NOTE: There are some open questions about this implementation and API which may change as the library matures. // Check out the example at experiments/regexp-tokenizer.mjs for more information on how to use this in its current state. @@ -104,7 +106,7 @@ export class RegExp_Tokenizer { _handle_default_match(value, index, end_index=null) { const { default_action } = this; if (!default_action) { - throw new Parsing_Error({ parser: this, value, index, end_index }); + throw new Tokenization_Error({ parser: this, value, index, end_index }); } return new Default_Match(value, index, end_index, default_action); }