diff --git a/tweetokenize/tokenizer.py b/tweetokenize/tokenizer.py index 10e1c3a..fc07108 100644 --- a/tweetokenize/tokenizer.py +++ b/tweetokenize/tokenizer.py @@ -263,7 +263,7 @@ def tokenize(self, message): message = self.quotes_re.sub(" ", message) message = self._replacetokens(self.tokenize_re.findall(message)) if self.ignorestopwords: - message = [word for word in message if word not in self._stopwords] + message = [word for word in message if word.lower() not in self._stopwords] return message def emoticons(self, iterable=None, filename=None):