from .... import nltk, spacy, re from abc import ABC, abstractmethod import unittest class NLPInterface(ABC): """ 定义一个抽象基类,用于处理自然语言处理(NLP)任务。 """ @abstractmethod def __init__(self): """ 初始化方法,子类必须实现。 """ pass @abstractmethod def split_sentences(self, text): """ 将输入的文本分割成句子列表。 Args: text (str): 需要分割的文本。 Returns: list: 分割后的句子列表。 """ pass @staticmethod def _print_sentences(sentences,count=(20, 20)): """ 打印分割后的句子列表,根据语言类型区分。 Args: sentences (list): 分割后的句子列表。 """ filtered_sentences = [] for sentence in sentences: sentence = re.sub(re.compile("\t|\n"), '', str(sentence)) if re.match(r'^[a-zA-Z]', sentence): if len(sentence.split()) > count[0]: filtered_sentences.append(str(sentence)) elif re.match(r'^[\u4e00-\u9fa5]', sentence): if len(sentence) >= count[1]: filtered_sentences.append(str(sentence)) return filtered_sentences class SpacyNLP(NLPInterface): """ 使用Spacy进行NLP处理的类。 """ def __init__(self): """ 初始化Spacy模型。 """ self.nlp = spacy.load('zh_core_web_sm') def split_sentences(self, text): """ 使用Spacy分割文本成句子列表。 Args: text (str): 需要分割的文本。 Returns: list: 分割后的句子列表。 """ doc = self.nlp(text) sentences = [sent.text for sent in doc.sents] sentences = self._print_sentences(sentences,(5,5)) return sentences class NLTKNLP(NLPInterface): """ 使用NLTK进行NLP处理的类。 """ def __init__(self): """ 初始化NLTK分割句子模型。 """ self.nlp = nltk.sent_tokenize def split_sentences(self, text): """ 使用NLTK分割文本成句子列表。 Args: text (str): 需要分割的文本。 Returns: list: 分割后的句子列表。 """ sentences = [sent for sent in self.nlp(text) if sent] sentences = self._print_sentences(sentences,(5,5)) return sentences class TestNLPProcessor(unittest.TestCase): def setUp(self): self.spacy_nlp = SpacyNLP() self.nltk_nlp = NLTKNLP() def test_spacy_split_sentences(self): text = "This is a test sentence. Another sentence for testing." expected_sentences = ["This is a test sentence.", "Another sentence for testing."] # self.assertEqual(self.spacy_nlp.split_sentences(text), expected_sentences) print(self.spacy_nlp.split_sentences(text)) def test_nltk_split_sentences(self): text = "This is a test sentence. Another sentence for testing." expected_sentences = ["This is a test sentence.", "Another sentence for testing."] # self.assertEqual(self.nltk_nlp.split_sentences(text), expected_sentences) print(self.spacy_nlp.split_sentences(text)) if __name__ == '__main__': unittest.main()