Path 1: 71 calls (0.84)

'1' (11) ')' (6) '0' (3) 'Layout(' (3) '(' (3) 'Layout()' (2) " name='foo'" (2) ' 1,' (2) 'Hello' (1) '{' (1)

_AnsiToken (71) None (71)

1def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]:
2    """Tokenize a string in to plain text and ANSI codes.
3
4    Args:
5        ansi_text (str): A String containing ANSI codes.
6
7    Yields:
8        AnsiToken: A named tuple of (plain, sgr, osc)
9    """
10
11    position = 0
12    sgr: Optional[str]
13    osc: Optional[str]
14    for match in re_ansi.finditer(ansi_text):
15        start, end = match.span(0)
16        osc, sgr = match.groups()
17        if start > position:
18            yield _AnsiToken(ansi_text[position:start])
19        if sgr:
20            if sgr.endswith("m"):
21                yield _AnsiToken("", sgr[1:-1], osc)
22        else:
23            yield _AnsiToken("", sgr, osc)
24        position = end
25    if position < len(ansi_text):
26        yield _AnsiToken(ansi_text[position:])
            

Path 2: 7 calls (0.08)

'\x1b[1mfoo\x1b[0m' (1) '\x1b[38;2;255;0;0;48;5;200mred\x1b[0m' (1) '\x1b[38;5;200;48;2;255;0;0mred\x1b[0m' (1) ' | \x1b[01;35m\x1b[K^\x1b[m\...

_AnsiToken (21)

1def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]:
2    """Tokenize a string in to plain text and ANSI codes.
3
4    Args:
5        ansi_text (str): A String containing ANSI codes.
6
7    Yields:
8        AnsiToken: A named tuple of (plain, sgr, osc)
9    """
10
11    position = 0
12    sgr: Optional[str]
13    osc: Optional[str]
14    for match in re_ansi.finditer(ansi_text):
15        start, end = match.span(0)
16        osc, sgr = match.groups()
17        if start > position:
18            yield _AnsiToken(ansi_text[position:start])
19        if sgr:
20            if sgr.endswith("m"):
21                yield _AnsiToken("", sgr[1:-1], osc)
22        else:
23            yield _AnsiToken("", sgr, osc)
24        position = end
25    if position < len(ansi_text):
26        yield _AnsiToken(ansi_text[position:])
            

Path 3: 5 calls (0.06)

"\x1b[01m\x1b[KC:\\Users\\stefa\\AppData\\Local\\Temp\\tmp3ydingba:\x1b[m\x1b[K In function '\x1b[01m\x1b[Kmain\x1b[m\x1b[K':" (1) "\x1b[01m\x1b[KC:\\...

_AnsiToken (36) None (5)

1def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]:
2    """Tokenize a string in to plain text and ANSI codes.
3
4    Args:
5        ansi_text (str): A String containing ANSI codes.
6
7    Yields:
8        AnsiToken: A named tuple of (plain, sgr, osc)
9    """
10
11    position = 0
12    sgr: Optional[str]
13    osc: Optional[str]
14    for match in re_ansi.finditer(ansi_text):
15        start, end = match.span(0)
16        osc, sgr = match.groups()
17        if start > position:
18            yield _AnsiToken(ansi_text[position:start])
19        if sgr:
20            if sgr.endswith("m"):
21                yield _AnsiToken("", sgr[1:-1], osc)
22        else:
23            yield _AnsiToken("", sgr, osc)
24        position = end
25    if position < len(ansi_text):
26        yield _AnsiToken(ansi_text[position:])
            

Path 4: 1 calls (0.01)

'\x1b]8;id=216776;http://example.org\x1b\\bar\x1b]8;;\x1b\\' (1)

_AnsiToken (3)

1def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]:
2    """Tokenize a string in to plain text and ANSI codes.
3
4    Args:
5        ansi_text (str): A String containing ANSI codes.
6
7    Yields:
8        AnsiToken: A named tuple of (plain, sgr, osc)
9    """
10
11    position = 0
12    sgr: Optional[str]
13    osc: Optional[str]
14    for match in re_ansi.finditer(ansi_text):
15        start, end = match.span(0)
16        osc, sgr = match.groups()
17        if start > position:
18            yield _AnsiToken(ansi_text[position:start])
19        if sgr:
20            if sgr.endswith("m"):
21                yield _AnsiToken("", sgr[1:-1], osc)
22        else:
23            yield _AnsiToken("", sgr, osc)
24        position = end
25    if position < len(ansi_text):
26        yield _AnsiToken(ansi_text[position:])
            

Path 5: 1 calls (0.01)

'\x1b[1m\x1b[Kfoo barmbaz' (1)

_AnsiToken (2) None (1)

1def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]:
2    """Tokenize a string in to plain text and ANSI codes.
3
4    Args:
5        ansi_text (str): A String containing ANSI codes.
6
7    Yields:
8        AnsiToken: A named tuple of (plain, sgr, osc)
9    """
10
11    position = 0
12    sgr: Optional[str]
13    osc: Optional[str]
14    for match in re_ansi.finditer(ansi_text):
15        start, end = match.span(0)
16        osc, sgr = match.groups()
17        if start > position:
18            yield _AnsiToken(ansi_text[position:start])
19        if sgr:
20            if sgr.endswith("m"):
21                yield _AnsiToken("", sgr[1:-1], osc)
22        else:
23            yield _AnsiToken("", sgr, osc)
24        position = end
25    if position < len(ansi_text):
26        yield _AnsiToken(ansi_text[position:])