Method: pylint.checkers.refactoring.refactoring_checker._is_trailing_comma
Calls: 188787, Exceptions: 9, Paths: 4Back
Path 1: 181158 calls (0.96)
list (181158)
0 (1121) 1 (1121) 3 (1003) 2 (1002) 4 (992) 5 (988) 6 (983) 7 (965) 8 (953) 9 (937)
False (181158)
1def _is_trailing_comma(tokens: list[tokenize.TokenInfo], index: int) -> bool:
2 """Check if the given token is a trailing comma.
3
4 :param tokens: Sequence of modules tokens
5 :type tokens: list[tokenize.TokenInfo]
6 :param int index: Index of token under check in tokens
7 :returns: True if the token is a comma which trails an expression
8 :rtype: bool
9 """
10 token = tokens[index]
11 if token.exact_type != tokenize.COMMA:
12 return False
13 # Must have remaining tokens on the same line such as NEWLINE
14 left_tokens = list(itertools.islice(tokens, index + 1, None))
15
16 more_tokens_on_line = False
17 for remaining_token in left_tokens:
18 if remaining_token.start[0] == token.start[0]:
19 more_tokens_on_line = True
20 # If one of the remaining same line tokens is not NEWLINE or COMMENT
21 # the comma is not trailing.
22 if remaining_token.type not in (tokenize.NEWLINE, tokenize.COMMENT):
23 return False
24
25 if not more_tokens_on_line:
26 return False
27
28 def get_curline_index_start() -> int:
29 """Get the index denoting the start of the current line."""
30 for subindex, token in enumerate(reversed(tokens[:index])):
31 # See Lib/tokenize.py and Lib/token.py in cpython for more info
32 if token.type == tokenize.NEWLINE:
33 return index - subindex
34 return 0
35
36 curline_start = get_curline_index_start()
37 expected_tokens = {"return", "yield"}
38 return any(
39 "=" in prevtoken.string or prevtoken.string in expected_tokens
40 for prevtoken in tokens[curline_start:index]
41 )
Path 2: 7617 calls (0.04)
list (7617)
11 (36) 47 (35) 26 (34) 14 (33) 34 (32) 36 (31) 60 (28) 35 (26) 58 (26) 9 (25)
False (7617)
1def _is_trailing_comma(tokens: list[tokenize.TokenInfo], index: int) -> bool:
2 """Check if the given token is a trailing comma.
3
4 :param tokens: Sequence of modules tokens
5 :type tokens: list[tokenize.TokenInfo]
6 :param int index: Index of token under check in tokens
7 :returns: True if the token is a comma which trails an expression
8 :rtype: bool
9 """
10 token = tokens[index]
11 if token.exact_type != tokenize.COMMA:
12 return False
13 # Must have remaining tokens on the same line such as NEWLINE
14 left_tokens = list(itertools.islice(tokens, index + 1, None))
15
16 more_tokens_on_line = False
17 for remaining_token in left_tokens:
18 if remaining_token.start[0] == token.start[0]:
19 more_tokens_on_line = True
20 # If one of the remaining same line tokens is not NEWLINE or COMMENT
21 # the comma is not trailing.
22 if remaining_token.type not in (tokenize.NEWLINE, tokenize.COMMENT):
23 return False
24
25 if not more_tokens_on_line:
26 return False
27
28 def get_curline_index_start() -> int:
29 """Get the index denoting the start of the current line."""
30 for subindex, token in enumerate(reversed(tokens[:index])):
31 # See Lib/tokenize.py and Lib/token.py in cpython for more info
32 if token.type == tokenize.NEWLINE:
33 return index - subindex
34 return 0
35
36 curline_start = get_curline_index_start()
37 expected_tokens = {"return", "yield"}
38 return any(
39 "=" in prevtoken.string or prevtoken.string in expected_tokens
40 for prevtoken in tokens[curline_start:index]
41 )
Path 3: 9 calls (0.0)
list (9)
8 (1) 14 (1) 20 (1) 28 (1) 145 (1) 167 (1) 182 (1) 193 (1) 214 (1)
True (9)
GeneratorExit (9)
1def _is_trailing_comma(tokens: list[tokenize.TokenInfo], index: int) -> bool:
2 """Check if the given token is a trailing comma.
3
4 :param tokens: Sequence of modules tokens
5 :type tokens: list[tokenize.TokenInfo]
6 :param int index: Index of token under check in tokens
7 :returns: True if the token is a comma which trails an expression
8 :rtype: bool
9 """
10 token = tokens[index]
11 if token.exact_type != tokenize.COMMA:
12 return False
13 # Must have remaining tokens on the same line such as NEWLINE
14 left_tokens = list(itertools.islice(tokens, index + 1, None))
15
16 more_tokens_on_line = False
17 for remaining_token in left_tokens:
18 if remaining_token.start[0] == token.start[0]:
19 more_tokens_on_line = True
20 # If one of the remaining same line tokens is not NEWLINE or COMMENT
21 # the comma is not trailing.
22 if remaining_token.type not in (tokenize.NEWLINE, tokenize.COMMENT):
23 return False
24
25 if not more_tokens_on_line:
26 return False
27
28 def get_curline_index_start() -> int:
29 """Get the index denoting the start of the current line."""
30 for subindex, token in enumerate(reversed(tokens[:index])):
31 # See Lib/tokenize.py and Lib/token.py in cpython for more info
32 if token.type == tokenize.NEWLINE:
33 return index - subindex
34 return 0
35
36 curline_start = get_curline_index_start()
37 expected_tokens = {"return", "yield"}
38 return any(
39 "=" in prevtoken.string or prevtoken.string in expected_tokens
40 for prevtoken in tokens[curline_start:index]
41 )
Path 4: 3 calls (0.0)
list (3)
216 (1) 120 (1) 82 (1)
False (3)
1def _is_trailing_comma(tokens: list[tokenize.TokenInfo], index: int) -> bool:
2 """Check if the given token is a trailing comma.
3
4 :param tokens: Sequence of modules tokens
5 :type tokens: list[tokenize.TokenInfo]
6 :param int index: Index of token under check in tokens
7 :returns: True if the token is a comma which trails an expression
8 :rtype: bool
9 """
10 token = tokens[index]
11 if token.exact_type != tokenize.COMMA:
12 return False
13 # Must have remaining tokens on the same line such as NEWLINE
14 left_tokens = list(itertools.islice(tokens, index + 1, None))
15
16 more_tokens_on_line = False
17 for remaining_token in left_tokens:
18 if remaining_token.start[0] == token.start[0]:
19 more_tokens_on_line = True
20 # If one of the remaining same line tokens is not NEWLINE or COMMENT
21 # the comma is not trailing.
22 if remaining_token.type not in (tokenize.NEWLINE, tokenize.COMMENT):
23 return False
24
25 if not more_tokens_on_line:
26 return False
27
28 def get_curline_index_start() -> int:
29 """Get the index denoting the start of the current line."""
30 for subindex, token in enumerate(reversed(tokens[:index])):
31 # See Lib/tokenize.py and Lib/token.py in cpython for more info
32 if token.type == tokenize.NEWLINE:
33 return index - subindex
34 return 0
35
36 curline_start = get_curline_index_start()
37 expected_tokens = {"return", "yield"}
38 return any(
39 "=" in prevtoken.string or prevtoken.string in expected_tokens
40 for prevtoken in tokens[curline_start:index]
41 )