Path 1: 869 calls (0.78)

list (869)

1def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
2        """Inspect the source to find fixme problems."""
3        if not self.linter.config.notes:
4            return
5        for token_info in tokens:
6            if token_info.type != tokenize.COMMENT:
7                continue
8            comment_text = token_info.string[1:].lstrip()  # trim '#' and white-spaces
9            if self._fixme_pattern.search("#" + comment_text.lower()):
10                self.add_message(
11                    "fixme",
12                    col_offset=token_info.start[1] + 1,
13                    args=comment_text,
14                    line=token_info.start[0],
15                )
            

Path 2: 233 calls (0.21)

list (233)

1def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
2        """Inspect the source to find fixme problems."""
3        if not self.linter.config.notes:
4            return
5        for token_info in tokens:
6            if token_info.type != tokenize.COMMENT:
7                continue
8            comment_text = token_info.string[1:].lstrip()  # trim '#' and white-spaces
9            if self._fixme_pattern.search("#" + comment_text.lower()):
10                self.add_message(
11                    "fixme",
12                    col_offset=token_info.start[1] + 1,
13                    args=comment_text,
14                    line=token_info.start[0],
15                )
            

Path 3: 13 calls (0.01)

list (13)

1def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
2        """Inspect the source to find fixme problems."""
3        if not self.linter.config.notes:
4            return
5        for token_info in tokens:
6            if token_info.type != tokenize.COMMENT:
7                continue
8            comment_text = token_info.string[1:].lstrip()  # trim '#' and white-spaces
9            if self._fixme_pattern.search("#" + comment_text.lower()):
10                self.add_message(
11                    "fixme",
12                    col_offset=token_info.start[1] + 1,
13                    args=comment_text,
14                    line=token_info.start[0],
15                )
            

Path 4: 1 calls (0.0)

list (1)

None (1)

1def process_tokens(self, tokens: list[tokenize.TokenInfo]) -> None:
2        """Inspect the source to find fixme problems."""
3        if not self.linter.config.notes:
4            return
5        for token_info in tokens:
6            if token_info.type != tokenize.COMMENT:
7                continue
8            comment_text = token_info.string[1:].lstrip()  # trim '#' and white-spaces
9            if self._fixme_pattern.search("#" + comment_text.lower()):
10                self.add_message(
11                    "fixme",
12                    col_offset=token_info.start[1] + 1,
13                    args=comment_text,
14                    line=token_info.start[0],
15                )