Skip to content

Commit

Permalink
Merge branch 'master' into rmse_score_fix
Browse files Browse the repository at this point in the history
  • Loading branch information
mosheraboh authored Oct 6, 2024
2 parents 711add2 + e0c7b73 commit 745afd1
Showing 1 changed file with 17 additions and 1 deletion.
18 changes: 17 additions & 1 deletion fuse/data/tokenizers/modular_tokenizer/op.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def get_max_token_id(self) -> Tuple[str, int]:
def get_min_max_sentinels(
self,
sentinel_prefix: Optional[str] = "<SENTINEL_ID_",
integer_find_regex: Optional[str] = "\d{1,}",
integer_find_regex: Optional[str] = r"\d{1,}",
) -> Tuple[int, int]:
"""
returns a Tuple [min encountered sentinel name, max encountered sentinel name]
Expand Down Expand Up @@ -186,6 +186,22 @@ def get_max_len(
"""
return self._tokenizer.get_expected_max_len(override_max_len=override_max_len)

def add_new_special_tokens(self, new_special_tokens: list[str]) -> int:
"""add new special tokens if they are not in the tokenizer.
Skipps allready existing special tokens.
Args:
new_special_tokens (list[str]): the tokens to add
Returns:
`int`: The number of tokens that were created in the vocabulary
Will raise an exception if any of the tokens are allready in the tokenizer as _regular_ tokens.
"""

tokenizer = self._tokenizer
num_new_tokens = tokenizer.add_special_tokens(new_special_tokens)
return num_new_tokens

def __call__(
self,
sample_dict: NDict,
Expand Down

0 comments on commit 745afd1

Please sign in to comment.