Method: gzip._GzipReader.read
Calls: 346, Exceptions: 11, Paths: 9Back
Path 1: 206 calls (0.6)
9223372036854775807 (6) 8192 (1) 8191 (1) 8190 (1) 8189 (1) 8188 (1) 8187 (1) 8186 (1) 8185 (1) 8184 (1)
bytes (206)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 2: 62 calls (0.18)
9223372036854775807 (43) 8192 (19)
bytes (62)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 3: 54 calls (0.16)
9223372036854775807 (43) 8192 (9) 7992 (1) 8071 (1)
bytes (54)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 4: 9 calls (0.03)
8192 (9)
EOFError (8) BadGzipFile (1)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 5: 5 calls (0.01)
8192 (5)
bytes (5)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 6: 3 calls (0.01)
8192 (2) 6049 (1)
bytes (3)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 7: 3 calls (0.01)
8192 (3)
bytes (3)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 8: 2 calls (0.01)
9223372036854775807 (1) 8192 (1)
bytes (2)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress
Path 9: 2 calls (0.01)
9223372036854775807 (1) 8192 (1)
EOFError (2)
1def read(self, size=-1):
2 if size < 0:
3 return self.readall()
4 # size=0 is special because decompress(max_length=0) is not supported
5 if not size:
6 return b""
7
8 # For certain input data, a single
9 # call to decompress() may not return
10 # any data. In this case, retry until we get some data or reach EOF.
11 while True:
12 if self._decompressor.eof:
13 # Ending case: we've come to the end of a member in the file,
14 # so finish up this member, and read a new gzip header.
15 # Check the CRC and file size, and set the flag so we read
16 # a new member
17 self._read_eof()
18 self._new_member = True
19 self._decompressor = self._decomp_factory(
20 **self._decomp_args)
21
22 if self._new_member:
23 # If the _new_member flag is set, we have to
24 # jump to the next member, if there is one.
25 self._init_read()
26 if not self._read_gzip_header():
27 self._size = self._pos
28 return b""
29 self._new_member = False
30
31 # Read a chunk of data from the file
32 buf = self._fp.read(io.DEFAULT_BUFFER_SIZE)
33
34 uncompress = self._decompressor.decompress(buf, size)
35 if self._decompressor.unconsumed_tail != b"":
36 self._fp.prepend(self._decompressor.unconsumed_tail)
37 elif self._decompressor.unused_data != b"":
38 # Prepend the already read bytes to the fileobj so they can
39 # be seen by _read_eof() and _read_gzip_header()
40 self._fp.prepend(self._decompressor.unused_data)
41
42 if uncompress != b"":
43 break
44 if buf == b"":
45 raise EOFError("Compressed file ended before the "
46 "end-of-stream marker was reached")
47
48 self._add_read_data( uncompress )
49 self._pos += len(uncompress)
50 return uncompress