Path 1: 23 calls (0.56)

'import io\nimport re\nimport sys\nfrom typing import List\n\nimport pytest\n\nfrom rich.console import Console\nfrom rich.theme import Theme\nfrom ri...

(2, 10) (2) (48, 54) (1) (79, 85) (1) (77, 87) (1) (111, 117) (1) (109, 119) (1) (2, 3) (1) (33, 39) (1) (25, 31) (1) (28, 34) (1)

Text (23)

1def highlight(
2        self,
3        code: str,
4        line_range: Optional[Tuple[Optional[int], Optional[int]]] = None,
5    ) -> Text:
6        """Highlight code and return a Text instance.
7
8        Args:
9            code (str): Code to highlight.
10            line_range(Tuple[int, int], optional): Optional line range to highlight.
11
12        Returns:
13            Text: A text instance containing highlighted syntax.
14        """
15
16        base_style = self._get_base_style()
17        justify: JustifyMethod = (
18            "default" if base_style.transparent_background else "left"
19        )
20
21        text = Text(
22            justify=justify,
23            style=base_style,
24            tab_size=self.tab_size,
25            no_wrap=not self.word_wrap,
26        )
27        _get_theme_style = self._theme.get_style_for_token
28
29        lexer = self.lexer
30
31        if lexer is None:
32            text.append(code)
33        else:
34            if line_range:
35                # More complicated path to only stylize a portion of the code
36                # This speeds up further operations as there are less spans to process
37                line_start, line_end = line_range
38
39                def line_tokenize() -> Iterable[Tuple[Any, str]]:
40                    """Split tokens to one per line."""
41                    assert lexer  # required to make MyPy happy - we know lexer is not None at this point
42
43                    for token_type, token in lexer.get_tokens(code):
44                        while token:
45                            line_token, new_line, token = token.partition("\n")
46                            yield token_type, line_token + new_line
47
48                def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]:
49                    """Convert tokens to spans."""
50                    tokens = iter(line_tokenize())
51                    line_no = 0
52                    _line_start = line_start - 1 if line_start else 0
53
54                    # Skip over tokens until line start
55                    while line_no < _line_start:
56                        _token_type, token = next(tokens)
57                        yield (token, None)
58                        if token.endswith("\n"):
59                            line_no += 1
60                    # Generate spans until line end
61                    for token_type, token in tokens:
62                        yield (token, _get_theme_style(token_type))
63                        if token.endswith("\n"):
64                            line_no += 1
65                            if line_end and line_no >= line_end:
66                                break
67
68                text.append_tokens(tokens_to_spans())
69
70            else:
71                text.append_tokens(
72                    (token, _get_theme_style(token_type))
73                    for token_type, token in lexer.get_tokens(code)
74                )
75            if self.background_color is not None:
76                text.stylize(f"on {self.background_color}")
77
78        if self._stylized_ranges:
79            self._apply_stylized_ranges(text)
80
81        return text
            

Path 2: 13 calls (0.32)

'import this\n' (3) 'def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]:\n """Iterate and generate a tuple with a flag for ...

None (13)

Text (13)

1def highlight(
2        self,
3        code: str,
4        line_range: Optional[Tuple[Optional[int], Optional[int]]] = None,
5    ) -> Text:
6        """Highlight code and return a Text instance.
7
8        Args:
9            code (str): Code to highlight.
10            line_range(Tuple[int, int], optional): Optional line range to highlight.
11
12        Returns:
13            Text: A text instance containing highlighted syntax.
14        """
15
16        base_style = self._get_base_style()
17        justify: JustifyMethod = (
18            "default" if base_style.transparent_background else "left"
19        )
20
21        text = Text(
22            justify=justify,
23            style=base_style,
24            tab_size=self.tab_size,
25            no_wrap=not self.word_wrap,
26        )
27        _get_theme_style = self._theme.get_style_for_token
28
29        lexer = self.lexer
30
31        if lexer is None:
32            text.append(code)
33        else:
34            if line_range:
35                # More complicated path to only stylize a portion of the code
36                # This speeds up further operations as there are less spans to process
37                line_start, line_end = line_range
38
39                def line_tokenize() -> Iterable[Tuple[Any, str]]:
40                    """Split tokens to one per line."""
41                    assert lexer  # required to make MyPy happy - we know lexer is not None at this point
42
43                    for token_type, token in lexer.get_tokens(code):
44                        while token:
45                            line_token, new_line, token = token.partition("\n")
46                            yield token_type, line_token + new_line
47
48                def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]:
49                    """Convert tokens to spans."""
50                    tokens = iter(line_tokenize())
51                    line_no = 0
52                    _line_start = line_start - 1 if line_start else 0
53
54                    # Skip over tokens until line start
55                    while line_no < _line_start:
56                        _token_type, token = next(tokens)
57                        yield (token, None)
58                        if token.endswith("\n"):
59                            line_no += 1
60                    # Generate spans until line end
61                    for token_type, token in tokens:
62                        yield (token, _get_theme_style(token_type))
63                        if token.endswith("\n"):
64                            line_no += 1
65                            if line_end and line_no >= line_end:
66                                break
67
68                text.append_tokens(tokens_to_spans())
69
70            else:
71                text.append_tokens(
72                    (token, _get_theme_style(token_type))
73                    for token_type, token in lexer.get_tokens(code)
74                )
75            if self.background_color is not None:
76                text.stylize(f"on {self.background_color}")
77
78        if self._stylized_ranges:
79            self._apply_stylized_ranges(text)
80
81        return text
            

Path 3: 2 calls (0.05)

'foobar\n' (2)

None (2)

Text (2)

1def highlight(
2        self,
3        code: str,
4        line_range: Optional[Tuple[Optional[int], Optional[int]]] = None,
5    ) -> Text:
6        """Highlight code and return a Text instance.
7
8        Args:
9            code (str): Code to highlight.
10            line_range(Tuple[int, int], optional): Optional line range to highlight.
11
12        Returns:
13            Text: A text instance containing highlighted syntax.
14        """
15
16        base_style = self._get_base_style()
17        justify: JustifyMethod = (
18            "default" if base_style.transparent_background else "left"
19        )
20
21        text = Text(
22            justify=justify,
23            style=base_style,
24            tab_size=self.tab_size,
25            no_wrap=not self.word_wrap,
26        )
27        _get_theme_style = self._theme.get_style_for_token
28
29        lexer = self.lexer
30
31        if lexer is None:
32            text.append(code)
33        else:
34            if line_range:
35                # More complicated path to only stylize a portion of the code
36                # This speeds up further operations as there are less spans to process
37                line_start, line_end = line_range
38
39                def line_tokenize() -> Iterable[Tuple[Any, str]]:
40                    """Split tokens to one per line."""
41                    assert lexer  # required to make MyPy happy - we know lexer is not None at this point
42
43                    for token_type, token in lexer.get_tokens(code):
44                        while token:
45                            line_token, new_line, token = token.partition("\n")
46                            yield token_type, line_token + new_line
47
48                def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]:
49                    """Convert tokens to spans."""
50                    tokens = iter(line_tokenize())
51                    line_no = 0
52                    _line_start = line_start - 1 if line_start else 0
53
54                    # Skip over tokens until line start
55                    while line_no < _line_start:
56                        _token_type, token = next(tokens)
57                        yield (token, None)
58                        if token.endswith("\n"):
59                            line_no += 1
60                    # Generate spans until line end
61                    for token_type, token in tokens:
62                        yield (token, _get_theme_style(token_type))
63                        if token.endswith("\n"):
64                            line_no += 1
65                            if line_end and line_no >= line_end:
66                                break
67
68                text.append_tokens(tokens_to_spans())
69
70            else:
71                text.append_tokens(
72                    (token, _get_theme_style(token_type))
73                    for token_type, token in lexer.get_tokens(code)
74                )
75            if self.background_color is not None:
76                text.stylize(f"on {self.background_color}")
77
78        if self._stylized_ranges:
79            self._apply_stylized_ranges(text)
80
81        return text
            

Path 4: 1 calls (0.02)

'def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]:\n """Iterate and generate a tuple with a flag for first and last value...

None (1)

Text (1)

1def highlight(
2        self,
3        code: str,
4        line_range: Optional[Tuple[Optional[int], Optional[int]]] = None,
5    ) -> Text:
6        """Highlight code and return a Text instance.
7
8        Args:
9            code (str): Code to highlight.
10            line_range(Tuple[int, int], optional): Optional line range to highlight.
11
12        Returns:
13            Text: A text instance containing highlighted syntax.
14        """
15
16        base_style = self._get_base_style()
17        justify: JustifyMethod = (
18            "default" if base_style.transparent_background else "left"
19        )
20
21        text = Text(
22            justify=justify,
23            style=base_style,
24            tab_size=self.tab_size,
25            no_wrap=not self.word_wrap,
26        )
27        _get_theme_style = self._theme.get_style_for_token
28
29        lexer = self.lexer
30
31        if lexer is None:
32            text.append(code)
33        else:
34            if line_range:
35                # More complicated path to only stylize a portion of the code
36                # This speeds up further operations as there are less spans to process
37                line_start, line_end = line_range
38
39                def line_tokenize() -> Iterable[Tuple[Any, str]]:
40                    """Split tokens to one per line."""
41                    assert lexer  # required to make MyPy happy - we know lexer is not None at this point
42
43                    for token_type, token in lexer.get_tokens(code):
44                        while token:
45                            line_token, new_line, token = token.partition("\n")
46                            yield token_type, line_token + new_line
47
48                def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]:
49                    """Convert tokens to spans."""
50                    tokens = iter(line_tokenize())
51                    line_no = 0
52                    _line_start = line_start - 1 if line_start else 0
53
54                    # Skip over tokens until line start
55                    while line_no < _line_start:
56                        _token_type, token = next(tokens)
57                        yield (token, None)
58                        if token.endswith("\n"):
59                            line_no += 1
60                    # Generate spans until line end
61                    for token_type, token in tokens:
62                        yield (token, _get_theme_style(token_type))
63                        if token.endswith("\n"):
64                            line_no += 1
65                            if line_end and line_no >= line_end:
66                                break
67
68                text.append_tokens(tokens_to_spans())
69
70            else:
71                text.append_tokens(
72                    (token, _get_theme_style(token_type))
73                    for token_type, token in lexer.get_tokens(code)
74                )
75            if self.background_color is not None:
76                text.stylize(f"on {self.background_color}")
77
78        if self._stylized_ranges:
79            self._apply_stylized_ranges(text)
80
81        return text
            

Path 5: 1 calls (0.02)

'def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]:\n """Iterate and generate a tuple with a flag for first and last value...

(2, 10) (1)

Text (1)

1def highlight(
2        self,
3        code: str,
4        line_range: Optional[Tuple[Optional[int], Optional[int]]] = None,
5    ) -> Text:
6        """Highlight code and return a Text instance.
7
8        Args:
9            code (str): Code to highlight.
10            line_range(Tuple[int, int], optional): Optional line range to highlight.
11
12        Returns:
13            Text: A text instance containing highlighted syntax.
14        """
15
16        base_style = self._get_base_style()
17        justify: JustifyMethod = (
18            "default" if base_style.transparent_background else "left"
19        )
20
21        text = Text(
22            justify=justify,
23            style=base_style,
24            tab_size=self.tab_size,
25            no_wrap=not self.word_wrap,
26        )
27        _get_theme_style = self._theme.get_style_for_token
28
29        lexer = self.lexer
30
31        if lexer is None:
32            text.append(code)
33        else:
34            if line_range:
35                # More complicated path to only stylize a portion of the code
36                # This speeds up further operations as there are less spans to process
37                line_start, line_end = line_range
38
39                def line_tokenize() -> Iterable[Tuple[Any, str]]:
40                    """Split tokens to one per line."""
41                    assert lexer  # required to make MyPy happy - we know lexer is not None at this point
42
43                    for token_type, token in lexer.get_tokens(code):
44                        while token:
45                            line_token, new_line, token = token.partition("\n")
46                            yield token_type, line_token + new_line
47
48                def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]:
49                    """Convert tokens to spans."""
50                    tokens = iter(line_tokenize())
51                    line_no = 0
52                    _line_start = line_start - 1 if line_start else 0
53
54                    # Skip over tokens until line start
55                    while line_no < _line_start:
56                        _token_type, token = next(tokens)
57                        yield (token, None)
58                        if token.endswith("\n"):
59                            line_no += 1
60                    # Generate spans until line end
61                    for token_type, token in tokens:
62                        yield (token, _get_theme_style(token_type))
63                        if token.endswith("\n"):
64                            line_no += 1
65                            if line_end and line_no >= line_end:
66                                break
67
68                text.append_tokens(tokens_to_spans())
69
70            else:
71                text.append_tokens(
72                    (token, _get_theme_style(token_type))
73                    for token_type, token in lexer.get_tokens(code)
74                )
75            if self.background_color is not None:
76                text.stylize(f"on {self.background_color}")
77
78        if self._stylized_ranges:
79            self._apply_stylized_ranges(text)
80
81        return text
            

Path 6: 1 calls (0.02)

'def loop_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]:\n """Iterate and generate a tuple with a flag for first and last value...

None (1)

Text (1)

1def highlight(
2        self,
3        code: str,
4        line_range: Optional[Tuple[Optional[int], Optional[int]]] = None,
5    ) -> Text:
6        """Highlight code and return a Text instance.
7
8        Args:
9            code (str): Code to highlight.
10            line_range(Tuple[int, int], optional): Optional line range to highlight.
11
12        Returns:
13            Text: A text instance containing highlighted syntax.
14        """
15
16        base_style = self._get_base_style()
17        justify: JustifyMethod = (
18            "default" if base_style.transparent_background else "left"
19        )
20
21        text = Text(
22            justify=justify,
23            style=base_style,
24            tab_size=self.tab_size,
25            no_wrap=not self.word_wrap,
26        )
27        _get_theme_style = self._theme.get_style_for_token
28
29        lexer = self.lexer
30
31        if lexer is None:
32            text.append(code)
33        else:
34            if line_range:
35                # More complicated path to only stylize a portion of the code
36                # This speeds up further operations as there are less spans to process
37                line_start, line_end = line_range
38
39                def line_tokenize() -> Iterable[Tuple[Any, str]]:
40                    """Split tokens to one per line."""
41                    assert lexer  # required to make MyPy happy - we know lexer is not None at this point
42
43                    for token_type, token in lexer.get_tokens(code):
44                        while token:
45                            line_token, new_line, token = token.partition("\n")
46                            yield token_type, line_token + new_line
47
48                def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]:
49                    """Convert tokens to spans."""
50                    tokens = iter(line_tokenize())
51                    line_no = 0
52                    _line_start = line_start - 1 if line_start else 0
53
54                    # Skip over tokens until line start
55                    while line_no < _line_start:
56                        _token_type, token = next(tokens)
57                        yield (token, None)
58                        if token.endswith("\n"):
59                            line_no += 1
60                    # Generate spans until line end
61                    for token_type, token in tokens:
62                        yield (token, _get_theme_style(token_type))
63                        if token.endswith("\n"):
64                            line_no += 1
65                            if line_end and line_no >= line_end:
66                                break
67
68                text.append_tokens(tokens_to_spans())
69
70            else:
71                text.append_tokens(
72                    (token, _get_theme_style(token_type))
73                    for token_type, token in lexer.get_tokens(code)
74                )
75            if self.background_color is not None:
76                text.stylize(f"on {self.background_color}")
77
78        if self._stylized_ranges:
79            self._apply_stylized_ranges(text)
80
81        return text