Path 1: 26210 calls (0.65)

TokenWrapper (26210)

0 (712) 7 (222) 9 (212) 1 (209) 3 (206) 2 (199) 15 (180) 13 (178) 6 (175) 4 (175)

1 (712) 8 (215) 2 (209) 10 (209) 4 (204) 3 (199) 15 (180) 5 (175) 19 (168) 7 (166)

1def new_line(self, tokens: TokenWrapper, line_end: int, line_start: int) -> None:
2        """A new line has been encountered, process it if necessary."""
3        if _last_token_on_line_is(tokens, line_end, ";"):
4            self.add_message("unnecessary-semicolon", line=tokens.start_line(line_end))
5
6        line_num = tokens.start_line(line_start)
7        line = tokens.line(line_start)
8        if tokens.type(line_start) not in _JUNK_TOKENS:
9            self._lines[line_num] = line.split("\n")[0]
10        self.check_lines(tokens, line_start, line, line_num)
            

Path 2: 13987 calls (0.35)

TokenWrapper (13987)

2 (770) 0 (396) 4 (370) 6 (220) 5 (211) 3 (168) 8 (124) 7 (122) 9 (85) 14 (76)

3 (770) 1 (396) 5 (370) 7 (220) 6 (211) 4 (168) 9 (124) 8 (122) 10 (85) 15 (76)

1def new_line(self, tokens: TokenWrapper, line_end: int, line_start: int) -> None:
2        """A new line has been encountered, process it if necessary."""
3        if _last_token_on_line_is(tokens, line_end, ";"):
4            self.add_message("unnecessary-semicolon", line=tokens.start_line(line_end))
5
6        line_num = tokens.start_line(line_start)
7        line = tokens.line(line_start)
8        if tokens.type(line_start) not in _JUNK_TOKENS:
9            self._lines[line_num] = line.split("\n")[0]
10        self.check_lines(tokens, line_start, line, line_num)
            

Path 3: 1 calls (0.0)

TokenWrapper (1)

56 (1)

57 (1)

1def new_line(self, tokens: TokenWrapper, line_end: int, line_start: int) -> None:
2        """A new line has been encountered, process it if necessary."""
3        if _last_token_on_line_is(tokens, line_end, ";"):
4            self.add_message("unnecessary-semicolon", line=tokens.start_line(line_end))
5
6        line_num = tokens.start_line(line_start)
7        line = tokens.line(line_start)
8        if tokens.type(line_start) not in _JUNK_TOKENS:
9            self._lines[line_num] = line.split("\n")[0]
10        self.check_lines(tokens, line_start, line, line_num)