1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

420

421

422

423

424

425

426

427

428

429

430

431

432

433

434

435

436

437

438

439

440

441

442

443

444

445

446

447

448

449

450

451

452

453

454

455

456

457

458

459

460

461

462

463

464

465

466

467

468

469

470

471

472

473

474

475

476

477

478

479

480

481

482

483

484

485

486

487

488

489

490

491

492

493

494

495

496

497

498

499

500

501

502

503

504

505

506

507

508

509

510

511

512

513

514

515

516

517

518

519

520

521

522

523

524

525

526

527

528

529

530

531

532

533

534

535

536

537

538

539

540

541

542

543

544

545

546

547

548

549

550

551

552

553

554

555

556

557

558

559

560

561

562

563

564

565

566

567

568

569

570

571

572

573

574

575

576

577

578

579

580

581

582

583

584

585

586

587

588

589

590

591

592

593

594

595

596

597

598

599

600

601

602

603

604

605

606

607

608

609

610

611

612

613

614

615

616

617

618

619

620

621

622

623

624

625

626

627

628

629

630

631

632

633

634

635

636

637

638

639

640

641

642

643

644

645

646

647

648

649

650

651

652

653

654

655

656

657

658

659

660

661

662

663

664

665

666

667

668

669

670

671

672

673

674

675

676

677

678

679

680

681

682

683

684

685

686

687

688

689

690

691

692

693

694

695

696

697

698

699

700

701

702

703

704

705

706

707

708

709

710

711

712

713

714

715

716

717

718

719

720

721

722

723

724

725

726

727

728

729

730

731

732

733

734

735

736

737

738

739

740

741

742

743

744

745

746

747

748

749

750

751

752

753

754

755

756

757

758

759

760

761

762

763

764

765

766

767

768

769

770

771

772

773

774

775

776

777

778

779

780

781

782

783

784

785

786

787

788

789

790

791

792

793

794

795

796

797

798

799

800

801

802

803

804

805

806

807

808

809

810

811

812

813

814

815

816

817

818

819

820

from __future__ import absolute_import 

from contextlib import contextmanager 

import zlib 

import io 

import logging 

from socket import timeout as SocketTimeout 

from socket import error as SocketError 

 

try: 

import brotli 

except ImportError: 

brotli = None 

 

from ._collections import HTTPHeaderDict 

from .exceptions import ( 

BodyNotHttplibCompatible, 

ProtocolError, 

DecodeError, 

ReadTimeoutError, 

ResponseNotChunked, 

IncompleteRead, 

InvalidHeader, 

HTTPError, 

) 

from .packages.six import string_types as basestring, PY3 

from .packages.six.moves import http_client as httplib 

from .connection import HTTPException, BaseSSLError 

from .util.response import is_fp_closed, is_response_to_head 

 

log = logging.getLogger(__name__) 

 

 

class DeflateDecoder(object): 

def __init__(self): 

self._first_try = True 

self._data = b"" 

self._obj = zlib.decompressobj() 

 

def __getattr__(self, name): 

return getattr(self._obj, name) 

 

def decompress(self, data): 

if not data: 

return data 

 

if not self._first_try: 

return self._obj.decompress(data) 

 

self._data += data 

try: 

decompressed = self._obj.decompress(data) 

if decompressed: 

self._first_try = False 

self._data = None 

return decompressed 

except zlib.error: 

self._first_try = False 

self._obj = zlib.decompressobj(-zlib.MAX_WBITS) 

try: 

return self.decompress(self._data) 

finally: 

self._data = None 

 

 

class GzipDecoderState(object): 

 

FIRST_MEMBER = 0 

OTHER_MEMBERS = 1 

SWALLOW_DATA = 2 

 

 

class GzipDecoder(object): 

def __init__(self): 

self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) 

self._state = GzipDecoderState.FIRST_MEMBER 

 

def __getattr__(self, name): 

return getattr(self._obj, name) 

 

def decompress(self, data): 

ret = bytearray() 

if self._state == GzipDecoderState.SWALLOW_DATA or not data: 

return bytes(ret) 

while True: 

try: 

ret += self._obj.decompress(data) 

except zlib.error: 

previous_state = self._state 

# Ignore data after the first error 

self._state = GzipDecoderState.SWALLOW_DATA 

if previous_state == GzipDecoderState.OTHER_MEMBERS: 

# Allow trailing garbage acceptable in other gzip clients 

return bytes(ret) 

raise 

data = self._obj.unused_data 

if not data: 

return bytes(ret) 

self._state = GzipDecoderState.OTHER_MEMBERS 

self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) 

 

 

if brotli is not None: 

 

class BrotliDecoder(object): 

# Supports both 'brotlipy' and 'Brotli' packages 

# since they share an import name. The top branches 

# are for 'brotlipy' and bottom branches for 'Brotli' 

def __init__(self): 

self._obj = brotli.Decompressor() 

if hasattr(self._obj, "decompress"): 

self.decompress = self._obj.decompress 

else: 

self.decompress = self._obj.process 

 

def flush(self): 

if hasattr(self._obj, "flush"): 

return self._obj.flush() 

return b"" 

 

 

class MultiDecoder(object): 

""" 

From RFC7231: 

If one or more encodings have been applied to a representation, the 

sender that applied the encodings MUST generate a Content-Encoding 

header field that lists the content codings in the order in which 

they were applied. 

""" 

 

def __init__(self, modes): 

self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] 

 

def flush(self): 

return self._decoders[0].flush() 

 

def decompress(self, data): 

for d in reversed(self._decoders): 

data = d.decompress(data) 

return data 

 

 

def _get_decoder(mode): 

if "," in mode: 

return MultiDecoder(mode) 

 

if mode == "gzip": 

return GzipDecoder() 

 

if brotli is not None and mode == "br": 

return BrotliDecoder() 

 

return DeflateDecoder() 

 

 

class HTTPResponse(io.IOBase): 

""" 

HTTP Response container. 

 

Backwards-compatible to httplib's HTTPResponse but the response ``body`` is 

loaded and decoded on-demand when the ``data`` property is accessed. This 

class is also compatible with the Python standard library's :mod:`io` 

module, and can hence be treated as a readable object in the context of that 

framework. 

 

Extra parameters for behaviour not present in httplib.HTTPResponse: 

 

:param preload_content: 

If True, the response's body will be preloaded during construction. 

 

:param decode_content: 

If True, will attempt to decode the body based on the 

'content-encoding' header. 

 

:param original_response: 

When this HTTPResponse wrapper is generated from an httplib.HTTPResponse 

object, it's convenient to include the original for debug purposes. It's 

otherwise unused. 

 

:param retries: 

The retries contains the last :class:`~urllib3.util.retry.Retry` that 

was used during the request. 

 

:param enforce_content_length: 

Enforce content length checking. Body returned by server must match 

value of Content-Length header, if present. Otherwise, raise error. 

""" 

 

CONTENT_DECODERS = ["gzip", "deflate"] 

if brotli is not None: 

CONTENT_DECODERS += ["br"] 

REDIRECT_STATUSES = [301, 302, 303, 307, 308] 

 

def __init__( 

self, 

body="", 

headers=None, 

status=0, 

version=0, 

reason=None, 

strict=0, 

preload_content=True, 

decode_content=True, 

original_response=None, 

pool=None, 

connection=None, 

msg=None, 

retries=None, 

enforce_content_length=False, 

request_method=None, 

request_url=None, 

auto_close=True, 

): 

 

if isinstance(headers, HTTPHeaderDict): 

self.headers = headers 

else: 

self.headers = HTTPHeaderDict(headers) 

self.status = status 

self.version = version 

self.reason = reason 

self.strict = strict 

self.decode_content = decode_content 

self.retries = retries 

self.enforce_content_length = enforce_content_length 

self.auto_close = auto_close 

 

self._decoder = None 

self._body = None 

self._fp = None 

self._original_response = original_response 

self._fp_bytes_read = 0 

self.msg = msg 

self._request_url = request_url 

 

if body and isinstance(body, (basestring, bytes)): 

self._body = body 

 

self._pool = pool 

self._connection = connection 

 

if hasattr(body, "read"): 

self._fp = body 

 

# Are we using the chunked-style of transfer encoding? 

self.chunked = False 

self.chunk_left = None 

tr_enc = self.headers.get("transfer-encoding", "").lower() 

# Don't incur the penalty of creating a list and then discarding it 

encodings = (enc.strip() for enc in tr_enc.split(",")) 

if "chunked" in encodings: 

self.chunked = True 

 

# Determine length of response 

self.length_remaining = self._init_length(request_method) 

 

# If requested, preload the body. 

if preload_content and not self._body: 

self._body = self.read(decode_content=decode_content) 

 

def get_redirect_location(self): 

""" 

Should we redirect and where to? 

 

:returns: Truthy redirect location string if we got a redirect status 

code and valid location. ``None`` if redirect status and no 

location. ``False`` if not a redirect status code. 

""" 

if self.status in self.REDIRECT_STATUSES: 

return self.headers.get("location") 

 

return False 

 

def release_conn(self): 

if not self._pool or not self._connection: 

return 

 

self._pool._put_conn(self._connection) 

self._connection = None 

 

def drain_conn(self): 

""" 

Read and discard any remaining HTTP response data in the response connection. 

 

Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. 

""" 

try: 

self.read() 

except (HTTPError, SocketError, BaseSSLError, HTTPException): 

pass 

 

@property 

def data(self): 

# For backwords-compat with earlier urllib3 0.4 and earlier. 

if self._body: 

return self._body 

 

if self._fp: 

return self.read(cache_content=True) 

 

@property 

def connection(self): 

return self._connection 

 

def isclosed(self): 

return is_fp_closed(self._fp) 

 

def tell(self): 

""" 

Obtain the number of bytes pulled over the wire so far. May differ from 

the amount of content returned by :meth:``HTTPResponse.read`` if bytes 

are encoded on the wire (e.g, compressed). 

""" 

return self._fp_bytes_read 

 

def _init_length(self, request_method): 

""" 

Set initial length value for Response content if available. 

""" 

length = self.headers.get("content-length") 

 

if length is not None: 

if self.chunked: 

# This Response will fail with an IncompleteRead if it can't be 

# received as chunked. This method falls back to attempt reading 

# the response before raising an exception. 

log.warning( 

"Received response with both Content-Length and " 

"Transfer-Encoding set. This is expressly forbidden " 

"by RFC 7230 sec 3.3.2. Ignoring Content-Length and " 

"attempting to process response as Transfer-Encoding: " 

"chunked." 

) 

return None 

 

try: 

# RFC 7230 section 3.3.2 specifies multiple content lengths can 

# be sent in a single Content-Length header 

# (e.g. Content-Length: 42, 42). This line ensures the values 

# are all valid ints and that as long as the `set` length is 1, 

# all values are the same. Otherwise, the header is invalid. 

lengths = set([int(val) for val in length.split(",")]) 

if len(lengths) > 1: 

raise InvalidHeader( 

"Content-Length contained multiple " 

"unmatching values (%s)" % length 

) 

length = lengths.pop() 

except ValueError: 

length = None 

else: 

if length < 0: 

length = None 

 

# Convert status to int for comparison 

# In some cases, httplib returns a status of "_UNKNOWN" 

try: 

status = int(self.status) 

except ValueError: 

status = 0 

 

# Check for responses that shouldn't include a body 

if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": 

length = 0 

 

return length 

 

def _init_decoder(self): 

""" 

Set-up the _decoder attribute if necessary. 

""" 

# Note: content-encoding value should be case-insensitive, per RFC 7230 

# Section 3.2 

content_encoding = self.headers.get("content-encoding", "").lower() 

if self._decoder is None: 

if content_encoding in self.CONTENT_DECODERS: 

self._decoder = _get_decoder(content_encoding) 

elif "," in content_encoding: 

encodings = [ 

e.strip() 

for e in content_encoding.split(",") 

if e.strip() in self.CONTENT_DECODERS 

] 

if len(encodings): 

self._decoder = _get_decoder(content_encoding) 

 

DECODER_ERROR_CLASSES = (IOError, zlib.error) 

if brotli is not None: 

DECODER_ERROR_CLASSES += (brotli.error,) 

 

def _decode(self, data, decode_content, flush_decoder): 

""" 

Decode the data passed in and potentially flush the decoder. 

""" 

if not decode_content: 

return data 

 

try: 

if self._decoder: 

data = self._decoder.decompress(data) 

except self.DECODER_ERROR_CLASSES as e: 

content_encoding = self.headers.get("content-encoding", "").lower() 

raise DecodeError( 

"Received response with content-encoding: %s, but " 

"failed to decode it." % content_encoding, 

e, 

) 

if flush_decoder: 

data += self._flush_decoder() 

 

return data 

 

def _flush_decoder(self): 

""" 

Flushes the decoder. Should only be called if the decoder is actually 

being used. 

""" 

if self._decoder: 

buf = self._decoder.decompress(b"") 

return buf + self._decoder.flush() 

 

return b"" 

 

@contextmanager 

def _error_catcher(self): 

""" 

Catch low-level python exceptions, instead re-raising urllib3 

variants, so that low-level exceptions are not leaked in the 

high-level api. 

 

On exit, release the connection back to the pool. 

""" 

clean_exit = False 

 

try: 

try: 

yield 

 

except SocketTimeout: 

# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but 

# there is yet no clean way to get at it from this context. 

raise ReadTimeoutError(self._pool, None, "Read timed out.") 

 

except BaseSSLError as e: 

# FIXME: Is there a better way to differentiate between SSLErrors? 

if "read operation timed out" not in str(e): # Defensive: 

# This shouldn't happen but just in case we're missing an edge 

# case, let's avoid swallowing SSL errors. 

raise 

 

raise ReadTimeoutError(self._pool, None, "Read timed out.") 

 

except (HTTPException, SocketError) as e: 

# This includes IncompleteRead. 

raise ProtocolError("Connection broken: %r" % e, e) 

 

# If no exception is thrown, we should avoid cleaning up 

# unnecessarily. 

clean_exit = True 

finally: 

# If we didn't terminate cleanly, we need to throw away our 

# connection. 

if not clean_exit: 

# The response may not be closed but we're not going to use it 

# anymore so close it now to ensure that the connection is 

# released back to the pool. 

if self._original_response: 

self._original_response.close() 

 

# Closing the response may not actually be sufficient to close 

# everything, so if we have a hold of the connection close that 

# too. 

if self._connection: 

self._connection.close() 

 

# If we hold the original response but it's closed now, we should 

# return the connection back to the pool. 

if self._original_response and self._original_response.isclosed(): 

self.release_conn() 

 

def read(self, amt=None, decode_content=None, cache_content=False): 

""" 

Similar to :meth:`httplib.HTTPResponse.read`, but with two additional 

parameters: ``decode_content`` and ``cache_content``. 

 

:param amt: 

How much of the content to read. If specified, caching is skipped 

because it doesn't make sense to cache partial content as the full 

response. 

 

:param decode_content: 

If True, will attempt to decode the body based on the 

'content-encoding' header. 

 

:param cache_content: 

If True, will save the returned data such that the same result is 

returned despite of the state of the underlying file object. This 

is useful if you want the ``.data`` property to continue working 

after having ``.read()`` the file object. (Overridden if ``amt`` is 

set.) 

""" 

self._init_decoder() 

if decode_content is None: 

decode_content = self.decode_content 

 

if self._fp is None: 

return 

 

flush_decoder = False 

fp_closed = getattr(self._fp, "closed", False) 

 

with self._error_catcher(): 

if amt is None: 

# cStringIO doesn't like amt=None 

data = self._fp.read() if not fp_closed else b"" 

flush_decoder = True 

else: 

cache_content = False 

data = self._fp.read(amt) if not fp_closed else b"" 

if ( 

amt != 0 and not data 

): # Platform-specific: Buggy versions of Python. 

# Close the connection when no data is returned 

# 

# This is redundant to what httplib/http.client _should_ 

# already do. However, versions of python released before 

# December 15, 2012 (http://bugs.python.org/issue16298) do 

# not properly close the connection in all cases. There is 

# no harm in redundantly calling close. 

self._fp.close() 

flush_decoder = True 

if self.enforce_content_length and self.length_remaining not in ( 

0, 

None, 

): 

# This is an edge case that httplib failed to cover due 

# to concerns of backward compatibility. We're 

# addressing it here to make sure IncompleteRead is 

# raised during streaming, so all calls with incorrect 

# Content-Length are caught. 

raise IncompleteRead(self._fp_bytes_read, self.length_remaining) 

 

if data: 

self._fp_bytes_read += len(data) 

if self.length_remaining is not None: 

self.length_remaining -= len(data) 

 

data = self._decode(data, decode_content, flush_decoder) 

 

if cache_content: 

self._body = data 

 

return data 

 

def stream(self, amt=2 ** 16, decode_content=None): 

""" 

A generator wrapper for the read() method. A call will block until 

``amt`` bytes have been read from the connection or until the 

connection is closed. 

 

:param amt: 

How much of the content to read. The generator will return up to 

much data per iteration, but may return less. This is particularly 

likely when using compressed data. However, the empty string will 

never be returned. 

 

:param decode_content: 

If True, will attempt to decode the body based on the 

'content-encoding' header. 

""" 

if self.chunked and self.supports_chunked_reads(): 

for line in self.read_chunked(amt, decode_content=decode_content): 

yield line 

else: 

while not is_fp_closed(self._fp): 

data = self.read(amt=amt, decode_content=decode_content) 

 

if data: 

yield data 

 

@classmethod 

def from_httplib(ResponseCls, r, **response_kw): 

""" 

Given an :class:`httplib.HTTPResponse` instance ``r``, return a 

corresponding :class:`urllib3.response.HTTPResponse` object. 

 

Remaining parameters are passed to the HTTPResponse constructor, along 

with ``original_response=r``. 

""" 

headers = r.msg 

 

if not isinstance(headers, HTTPHeaderDict): 

if PY3: 

headers = HTTPHeaderDict(headers.items()) 

else: 

# Python 2.7 

headers = HTTPHeaderDict.from_httplib(headers) 

 

# HTTPResponse objects in Python 3 don't have a .strict attribute 

strict = getattr(r, "strict", 0) 

resp = ResponseCls( 

body=r, 

headers=headers, 

status=r.status, 

version=r.version, 

reason=r.reason, 

strict=strict, 

original_response=r, 

**response_kw 

) 

return resp 

 

# Backwards-compatibility methods for httplib.HTTPResponse 

def getheaders(self): 

return self.headers 

 

def getheader(self, name, default=None): 

return self.headers.get(name, default) 

 

# Backwards compatibility for http.cookiejar 

def info(self): 

return self.headers 

 

# Overrides from io.IOBase 

def close(self): 

if not self.closed: 

self._fp.close() 

 

if self._connection: 

self._connection.close() 

 

if not self.auto_close: 

io.IOBase.close(self) 

 

@property 

def closed(self): 

if not self.auto_close: 

return io.IOBase.closed.__get__(self) 

elif self._fp is None: 

return True 

elif hasattr(self._fp, "isclosed"): 

return self._fp.isclosed() 

elif hasattr(self._fp, "closed"): 

return self._fp.closed 

else: 

return True 

 

def fileno(self): 

if self._fp is None: 

raise IOError("HTTPResponse has no file to get a fileno from") 

elif hasattr(self._fp, "fileno"): 

return self._fp.fileno() 

else: 

raise IOError( 

"The file-like object this HTTPResponse is wrapped " 

"around has no file descriptor" 

) 

 

def flush(self): 

if ( 

self._fp is not None 

and hasattr(self._fp, "flush") 

and not getattr(self._fp, "closed", False) 

): 

return self._fp.flush() 

 

def readable(self): 

# This method is required for `io` module compatibility. 

return True 

 

def readinto(self, b): 

# This method is required for `io` module compatibility. 

temp = self.read(len(b)) 

if len(temp) == 0: 

return 0 

else: 

b[: len(temp)] = temp 

return len(temp) 

 

def supports_chunked_reads(self): 

""" 

Checks if the underlying file-like object looks like a 

httplib.HTTPResponse object. We do this by testing for the fp 

attribute. If it is present we assume it returns raw chunks as 

processed by read_chunked(). 

""" 

return hasattr(self._fp, "fp") 

 

def _update_chunk_length(self): 

# First, we'll figure out length of a chunk and then 

# we'll try to read it from socket. 

if self.chunk_left is not None: 

return 

line = self._fp.fp.readline() 

line = line.split(b";", 1)[0] 

try: 

self.chunk_left = int(line, 16) 

except ValueError: 

# Invalid chunked protocol response, abort. 

self.close() 

raise httplib.IncompleteRead(line) 

 

def _handle_chunk(self, amt): 

returned_chunk = None 

if amt is None: 

chunk = self._fp._safe_read(self.chunk_left) 

returned_chunk = chunk 

self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. 

self.chunk_left = None 

elif amt < self.chunk_left: 

value = self._fp._safe_read(amt) 

self.chunk_left = self.chunk_left - amt 

returned_chunk = value 

elif amt == self.chunk_left: 

value = self._fp._safe_read(amt) 

self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. 

self.chunk_left = None 

returned_chunk = value 

else: # amt > self.chunk_left 

returned_chunk = self._fp._safe_read(self.chunk_left) 

self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. 

self.chunk_left = None 

return returned_chunk 

 

def read_chunked(self, amt=None, decode_content=None): 

""" 

Similar to :meth:`HTTPResponse.read`, but with an additional 

parameter: ``decode_content``. 

 

:param amt: 

How much of the content to read. If specified, caching is skipped 

because it doesn't make sense to cache partial content as the full 

response. 

 

:param decode_content: 

If True, will attempt to decode the body based on the 

'content-encoding' header. 

""" 

self._init_decoder() 

# FIXME: Rewrite this method and make it a class with a better structured logic. 

if not self.chunked: 

raise ResponseNotChunked( 

"Response is not chunked. " 

"Header 'transfer-encoding: chunked' is missing." 

) 

if not self.supports_chunked_reads(): 

raise BodyNotHttplibCompatible( 

"Body should be httplib.HTTPResponse like. " 

"It should have have an fp attribute which returns raw chunks." 

) 

 

with self._error_catcher(): 

# Don't bother reading the body of a HEAD request. 

if self._original_response and is_response_to_head(self._original_response): 

self._original_response.close() 

return 

 

# If a response is already read and closed 

# then return immediately. 

if self._fp.fp is None: 

return 

 

while True: 

self._update_chunk_length() 

if self.chunk_left == 0: 

break 

chunk = self._handle_chunk(amt) 

decoded = self._decode( 

chunk, decode_content=decode_content, flush_decoder=False 

) 

if decoded: 

yield decoded 

 

if decode_content: 

# On CPython and PyPy, we should never need to flush the 

# decoder. However, on Jython we *might* need to, so 

# lets defensively do it anyway. 

decoded = self._flush_decoder() 

if decoded: # Platform-specific: Jython. 

yield decoded 

 

# Chunk content ends with \r\n: discard it. 

while True: 

line = self._fp.fp.readline() 

if not line: 

# Some sites may not end with '\r\n'. 

break 

if line == b"\r\n": 

break 

 

# We read everything; close the "file". 

if self._original_response: 

self._original_response.close() 

 

def geturl(self): 

""" 

Returns the URL that was the source of this response. 

If the request that generated this response redirected, this method 

will return the final redirect location. 

""" 

if self.retries is not None and len(self.retries.history): 

return self.retries.history[-1].redirect_location 

else: 

return self._request_url 

 

def __iter__(self): 

buffer = [] 

for chunk in self.stream(decode_content=True): 

if b"\n" in chunk: 

chunk = chunk.split(b"\n") 

yield b"".join(buffer) + chunk[0] + b"\n" 

for x in chunk[1:-1]: 

yield x + b"\n" 

if chunk[-1]: 

buffer = [chunk[-1]] 

else: 

buffer = [] 

else: 

buffer.append(chunk) 

if buffer: 

yield b"".join(buffer)