英文语料的分词处理

"""
实现额外的方法
"""
import re

def tokenlize(sentence):
    """
    进行文本分词
    :param sentence: str
    :return: [str,str,str]
    """

    fileters = ['!', '"', '#', '$', '%', '&', '(', ')', '*', '+', ',', '-', '.', '/', ':', ';', '<', '=', '>',
                '?', '@', '[', '\', ']', '^', '_', '`', '{', '|', '}', '~', '	', '
', 'x97', 'x96', '”', '“', ]
    sentence = sentence.lower() #把大写转化为小写
    sentence = re.sub("<br />"," ",sentence)
    # sentence = re.sub("I'm","I am",sentence)
    # sentence = re.sub("isn't","is not",sentence)
    sentence = re.sub("|".join(fileters)," ",sentence)
    result = [i for i in sentence.split(" ") if len(i)>0]

    return result

  

多思考也是一种努力,做出正确的分析和选择,因为我们的时间和精力都有限,所以把时间花在更有价值的地方。
原文地址:https://www.cnblogs.com/LiuXinyu12378/p/12318769.html