import tokenizers > available_tokenizers = tokenizers.available_tokenizers() > print(available_tokenizers) > pip install toiro[all_tokenizers] { "nagisa":{"is_available":true,"version":"0.2.7"}, "janome":{"is_available":true,"version":"0.4.1"}, "mecab-python3":{"is_available":false,"version":false}, "sudachipy":{"is_available":true,"version":"0.6.2"}, "spacy":{"is_available":true,"version":"3.2.1"}, "ginza":{"is_available":false,"version":false}, "kytea":{"is_available":false,"version":false}, "jumanpp":{"is_available":false,"version":false}, "sentencepiece":{"is_available":true,"version":"0.1.91"}, "fugashi-ipadic":{"is_available":false,"version":fals}, "tinysegmenter":{"is_available":true,"version":"0.1.0", "fugashi-unidic":{"is_available":false,"version":false} } Πϯετʔϧ https://github.com/taishi-i/toiro ܗଶૉղੳثൺֱϥΠϒϥϦ