From 48ac00b548a03346ce741f08b6f982ba2df5340e Mon Sep 17 00:00:00 2001 From: Stefano Pigozzi Date: Fri, 10 Feb 2023 03:19:17 +0100 Subject: [PATCH] Document and fix imports in `.tokenizer` --- unimore_bda_6/tokenizer/__init__.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/unimore_bda_6/tokenizer/__init__.py b/unimore_bda_6/tokenizer/__init__.py index 777174f..290c706 100644 --- a/unimore_bda_6/tokenizer/__init__.py +++ b/unimore_bda_6/tokenizer/__init__.py @@ -1,15 +1,9 @@ -from .base import BaseTokenizer -from .nltk_word_tokenize import NLTKWordTokenizer -from .potts import PottsTokenizer, PottsTokenizerWithNegation -from .plain import PlainTokenizer -from .lower import LowercaseTokenizer +""" +This module contains all implemented tokenizers. +""" - -__all__ = ( - "BaseTokenizer", - "NLTKWordTokenizer", - "PottsTokenizer", - "PottsTokenizerWithNegation", - "PlainTokenizer", - "LowercaseTokenizer", -) +from .base import * +from .nltk_word_tokenize import * +from .potts import * +from .plain import * +from .lower import *