char_tokenizer.py 2.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. from pathlib import Path
  2. from typing import Iterable
  3. from typing import List
  4. from typing import Union
  5. import warnings
  6. from funasr.text.abs_tokenizer import AbsTokenizer
  7. class CharTokenizer(AbsTokenizer):
  8. def __init__(
  9. self,
  10. non_linguistic_symbols: Union[Path, str, Iterable[str]] = None,
  11. space_symbol: str = "<space>",
  12. remove_non_linguistic_symbols: bool = False,
  13. ):
  14. self.space_symbol = space_symbol
  15. if non_linguistic_symbols is None:
  16. self.non_linguistic_symbols = set()
  17. elif isinstance(non_linguistic_symbols, (Path, str)):
  18. non_linguistic_symbols = Path(non_linguistic_symbols)
  19. try:
  20. with non_linguistic_symbols.open("r", encoding="utf-8") as f:
  21. self.non_linguistic_symbols = set(line.rstrip() for line in f)
  22. except FileNotFoundError:
  23. warnings.warn(f"{non_linguistic_symbols} doesn't exist.")
  24. self.non_linguistic_symbols = set()
  25. else:
  26. self.non_linguistic_symbols = set(non_linguistic_symbols)
  27. self.remove_non_linguistic_symbols = remove_non_linguistic_symbols
  28. def __repr__(self):
  29. return (
  30. f"{self.__class__.__name__}("
  31. f'space_symbol="{self.space_symbol}"'
  32. f'non_linguistic_symbols="{self.non_linguistic_symbols}"'
  33. f")"
  34. )
  35. def text2tokens(self, line: Union[str, list]) -> List[str]:
  36. tokens = []
  37. while len(line) != 0:
  38. for w in self.non_linguistic_symbols:
  39. if line.startswith(w):
  40. if not self.remove_non_linguistic_symbols:
  41. tokens.append(line[: len(w)])
  42. line = line[len(w) :]
  43. break
  44. else:
  45. t = line[0]
  46. if t == " ":
  47. t = "<space>"
  48. tokens.append(t)
  49. line = line[1:]
  50. return tokens
  51. def tokens2text(self, tokens: Iterable[str]) -> str:
  52. tokens = [t if t != self.space_symbol else " " for t in tokens]
  53. return "".join(tokens)