1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

420

421

422

423

424

425

426

427

428

429

430

431

432

433

434

435

436

437

438

439

440

441

442

443

444

445

446

447

448

449

450

451

452

453

454

455

456

457

458

459

460

461

462

463

464

465

466

467

468

469

470

471

472

473

474

475

476

477

478

479

480

481

482

483

484

485

486

487

488

489

490

491

492

493

494

495

496

497

498

499

500

501

502

503

504

505

506

507

508

509

510

511

512

513

514

515

516

517

518

519

520

521

522

523

524

525

526

527

528

529

530

531

532

533

534

535

536

537

538

539

540

541

542

543

544

545

546

547

548

549

550

551

552

553

554

555

556

557

558

559

560

561

562

563

564

565

566

567

568

569

570

571

572

573

574

575

576

577

578

579

580

581

582

583

584

585

586

587

588

589

590

591

592

593

594

595

596

597

598

599

600

601

602

603

604

605

606

607

608

609

610

611

612

613

614

615

616

617

618

619

620

621

622

623

624

625

626

627

628

629

630

631

632

633

634

635

636

637

638

639

640

641

642

643

644

645

646

647

648

649

650

651

652

653

654

655

656

657

658

659

660

661

662

663

664

665

666

667

668

669

670

671

672

673

674

675

676

677

678

679

680

681

682

683

684

685

686

687

688

689

690

691

692

693

694

695

696

697

698

699

700

701

702

703

704

705

706

707

708

709

710

711

712

713

714

715

716

717

718

719

720

721

722

723

724

725

726

727

728

729

730

731

732

733

734

735

736

737

738

739

740

741

742

743

744

745

746

747

748

749

750

751

752

753

754

755

756

757

758

759

760

761

762

763

764

765

766

767

768

769

770

771

772

773

774

775

776

777

778

779

780

781

782

783

784

785

786

787

788

789

790

791

792

793

794

795

796

797

798

799

800

801

802

803

804

805

806

807

808

809

810

811

812

813

814

815

816

817

818

819

820

821

822

823

824

825

826

827

828

829

830

831

832

833

834

835

836

837

838

839

840

841

842

843

844

845

846

847

848

849

850

851

852

853

854

855

856

857

858

859

860

861

862

863

864

865

866

867

868

869

870

871

872

873

874

875

876

877

878

879

880

881

882

883

884

885

886

887

888

889

890

891

892

893

894

895

896

897

898

899

900

901

902

903

904

905

906

907

908

909

910

911

912

913

914

915

916

917

918

919

920

921

922

923

924

925

926

927

928

929

930

931

932

933

934

935

936

937

938

939

940

941

942

943

944

945

946

947

948

949

950

951

952

953

954

955

956

957

958

959

960

961

962

963

964

965

966

967

968

969

970

971

972

973

974

975

976

977

978

979

980

981

982

983

984

985

986

987

988

989

990

991

992

993

994

995

996

997

998

999

1000

1001

1002

1003

1004

1005

1006

1007

1008

1009

1010

1011

1012

1013

1014

1015

1016

1017

1018

1019

1020

1021

1022

1023

1024

1025

1026

1027

1028

1029

1030

1031

1032

1033

1034

1035

from __future__ import absolute_import 

import errno 

import logging 

import sys 

import warnings 

 

from socket import error as SocketError, timeout as SocketTimeout 

import socket 

 

 

from .exceptions import ( 

ClosedPoolError, 

ProtocolError, 

EmptyPoolError, 

HeaderParsingError, 

HostChangedError, 

LocationValueError, 

MaxRetryError, 

ProxyError, 

ReadTimeoutError, 

SSLError, 

TimeoutError, 

InsecureRequestWarning, 

NewConnectionError, 

) 

from .packages.ssl_match_hostname import CertificateError 

from .packages import six 

from .packages.six.moves import queue 

from .connection import ( 

port_by_scheme, 

DummyConnection, 

HTTPConnection, 

HTTPSConnection, 

VerifiedHTTPSConnection, 

HTTPException, 

BaseSSLError, 

) 

from .request import RequestMethods 

from .response import HTTPResponse 

 

from .util.connection import is_connection_dropped 

from .util.request import set_file_position 

from .util.response import assert_header_parsing 

from .util.retry import Retry 

from .util.timeout import Timeout 

from .util.url import ( 

get_host, 

parse_url, 

Url, 

_normalize_host as normalize_host, 

_encode_target, 

) 

from .util.queue import LifoQueue 

 

 

xrange = six.moves.xrange 

 

log = logging.getLogger(__name__) 

 

_Default = object() 

 

 

# Pool objects 

class ConnectionPool(object): 

""" 

Base class for all connection pools, such as 

:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. 

 

.. note:: 

ConnectionPool.urlopen() does not normalize or percent-encode target URIs 

which is useful if your target server doesn't support percent-encoded 

target URIs. 

""" 

 

scheme = None 

QueueCls = LifoQueue 

 

def __init__(self, host, port=None): 

if not host: 

raise LocationValueError("No host specified.") 

 

self.host = _normalize_host(host, scheme=self.scheme) 

self._proxy_host = host.lower() 

self.port = port 

 

def __str__(self): 

return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port) 

 

def __enter__(self): 

return self 

 

def __exit__(self, exc_type, exc_val, exc_tb): 

self.close() 

# Return False to re-raise any potential exceptions 

return False 

 

def close(self): 

""" 

Close all pooled connections and disable the pool. 

""" 

pass 

 

 

# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 

_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} 

 

 

class HTTPConnectionPool(ConnectionPool, RequestMethods): 

""" 

Thread-safe connection pool for one host. 

 

:param host: 

Host used for this HTTP Connection (e.g. "localhost"), passed into 

:class:`httplib.HTTPConnection`. 

 

:param port: 

Port used for this HTTP Connection (None is equivalent to 80), passed 

into :class:`httplib.HTTPConnection`. 

 

:param strict: 

Causes BadStatusLine to be raised if the status line can't be parsed 

as a valid HTTP/1.0 or 1.1 status line, passed into 

:class:`httplib.HTTPConnection`. 

 

.. note:: 

Only works in Python 2. This parameter is ignored in Python 3. 

 

:param timeout: 

Socket timeout in seconds for each individual connection. This can 

be a float or integer, which sets the timeout for the HTTP request, 

or an instance of :class:`urllib3.util.Timeout` which gives you more 

fine-grained control over request timeouts. After the constructor has 

been parsed, this is always a `urllib3.util.Timeout` object. 

 

:param maxsize: 

Number of connections to save that can be reused. More than 1 is useful 

in multithreaded situations. If ``block`` is set to False, more 

connections will be created but they will not be saved once they've 

been used. 

 

:param block: 

If set to True, no more than ``maxsize`` connections will be used at 

a time. When no free connections are available, the call will block 

until a connection has been released. This is a useful side effect for 

particular multithreaded situations where one does not want to use more 

than maxsize connections per host to prevent flooding. 

 

:param headers: 

Headers to include with all requests, unless other headers are given 

explicitly. 

 

:param retries: 

Retry configuration to use by default with requests in this pool. 

 

:param _proxy: 

Parsed proxy URL, should not be used directly, instead, see 

:class:`urllib3.connectionpool.ProxyManager`" 

 

:param _proxy_headers: 

A dictionary with proxy headers, should not be used directly, 

instead, see :class:`urllib3.connectionpool.ProxyManager`" 

 

:param \\**conn_kw: 

Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, 

:class:`urllib3.connection.HTTPSConnection` instances. 

""" 

 

scheme = "http" 

ConnectionCls = HTTPConnection 

ResponseCls = HTTPResponse 

 

def __init__( 

self, 

host, 

port=None, 

strict=False, 

timeout=Timeout.DEFAULT_TIMEOUT, 

maxsize=1, 

block=False, 

headers=None, 

retries=None, 

_proxy=None, 

_proxy_headers=None, 

**conn_kw 

): 

ConnectionPool.__init__(self, host, port) 

RequestMethods.__init__(self, headers) 

 

self.strict = strict 

 

if not isinstance(timeout, Timeout): 

timeout = Timeout.from_float(timeout) 

 

if retries is None: 

retries = Retry.DEFAULT 

 

self.timeout = timeout 

self.retries = retries 

 

self.pool = self.QueueCls(maxsize) 

self.block = block 

 

self.proxy = _proxy 

self.proxy_headers = _proxy_headers or {} 

 

# Fill the queue up so that doing get() on it will block properly 

for _ in xrange(maxsize): 

self.pool.put(None) 

 

# These are mostly for testing and debugging purposes. 

self.num_connections = 0 

self.num_requests = 0 

self.conn_kw = conn_kw 

 

if self.proxy: 

# Enable Nagle's algorithm for proxies, to avoid packet fragmentation. 

# We cannot know if the user has added default socket options, so we cannot replace the 

# list. 

self.conn_kw.setdefault("socket_options", []) 

 

def _new_conn(self): 

""" 

Return a fresh :class:`HTTPConnection`. 

""" 

self.num_connections += 1 

log.debug( 

"Starting new HTTP connection (%d): %s:%s", 

self.num_connections, 

self.host, 

self.port or "80", 

) 

 

conn = self.ConnectionCls( 

host=self.host, 

port=self.port, 

timeout=self.timeout.connect_timeout, 

strict=self.strict, 

**self.conn_kw 

) 

return conn 

 

def _get_conn(self, timeout=None): 

""" 

Get a connection. Will return a pooled connection if one is available. 

 

If no connections are available and :prop:`.block` is ``False``, then a 

fresh connection is returned. 

 

:param timeout: 

Seconds to wait before giving up and raising 

:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and 

:prop:`.block` is ``True``. 

""" 

conn = None 

try: 

conn = self.pool.get(block=self.block, timeout=timeout) 

 

except AttributeError: # self.pool is None 

raise ClosedPoolError(self, "Pool is closed.") 

 

except queue.Empty: 

if self.block: 

raise EmptyPoolError( 

self, 

"Pool reached maximum size and no more connections are allowed.", 

) 

pass # Oh well, we'll create a new connection then 

 

# If this is a persistent connection, check if it got disconnected 

if conn and is_connection_dropped(conn): 

log.debug("Resetting dropped connection: %s", self.host) 

conn.close() 

if getattr(conn, "auto_open", 1) == 0: 

# This is a proxied connection that has been mutated by 

# httplib._tunnel() and cannot be reused (since it would 

# attempt to bypass the proxy) 

conn = None 

 

return conn or self._new_conn() 

 

def _put_conn(self, conn): 

""" 

Put a connection back into the pool. 

 

:param conn: 

Connection object for the current host and port as returned by 

:meth:`._new_conn` or :meth:`._get_conn`. 

 

If the pool is already full, the connection is closed and discarded 

because we exceeded maxsize. If connections are discarded frequently, 

then maxsize should be increased. 

 

If the pool is closed, then the connection will be closed and discarded. 

""" 

try: 

self.pool.put(conn, block=False) 

return # Everything is dandy, done. 

except AttributeError: 

# self.pool is None. 

pass 

except queue.Full: 

# This should never happen if self.block == True 

log.warning("Connection pool is full, discarding connection: %s", self.host) 

 

# Connection never got put back into the pool, close it. 

if conn: 

conn.close() 

 

def _validate_conn(self, conn): 

""" 

Called right before a request is made, after the socket is created. 

""" 

pass 

 

def _prepare_proxy(self, conn): 

# Nothing to do for HTTP connections. 

pass 

 

def _get_timeout(self, timeout): 

""" Helper that always returns a :class:`urllib3.util.Timeout` """ 

if timeout is _Default: 

return self.timeout.clone() 

 

if isinstance(timeout, Timeout): 

return timeout.clone() 

else: 

# User passed us an int/float. This is for backwards compatibility, 

# can be removed later 

return Timeout.from_float(timeout) 

 

def _raise_timeout(self, err, url, timeout_value): 

"""Is the error actually a timeout? Will raise a ReadTimeout or pass""" 

 

if isinstance(err, SocketTimeout): 

raise ReadTimeoutError( 

self, url, "Read timed out. (read timeout=%s)" % timeout_value 

) 

 

# See the above comment about EAGAIN in Python 3. In Python 2 we have 

# to specifically catch it and throw the timeout error 

if hasattr(err, "errno") and err.errno in _blocking_errnos: 

raise ReadTimeoutError( 

self, url, "Read timed out. (read timeout=%s)" % timeout_value 

) 

 

# Catch possible read timeouts thrown as SSL errors. If not the 

# case, rethrow the original. We need to do this because of: 

# http://bugs.python.org/issue10272 

if "timed out" in str(err) or "did not complete (read)" in str( 

err 

): # Python < 2.7.4 

raise ReadTimeoutError( 

self, url, "Read timed out. (read timeout=%s)" % timeout_value 

) 

 

def _make_request( 

self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw 

): 

""" 

Perform a request on a given urllib connection object taken from our 

pool. 

 

:param conn: 

a connection from one of our connection pools 

 

:param timeout: 

Socket timeout in seconds for the request. This can be a 

float or integer, which will set the same timeout value for 

the socket connect and the socket read, or an instance of 

:class:`urllib3.util.Timeout`, which gives you more fine-grained 

control over your timeouts. 

""" 

self.num_requests += 1 

 

timeout_obj = self._get_timeout(timeout) 

timeout_obj.start_connect() 

conn.timeout = timeout_obj.connect_timeout 

 

# Trigger any extra validation we need to do. 

try: 

self._validate_conn(conn) 

except (SocketTimeout, BaseSSLError) as e: 

# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. 

self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) 

raise 

 

# conn.request() calls httplib.*.request, not the method in 

# urllib3.request. It also calls makefile (recv) on the socket. 

if chunked: 

conn.request_chunked(method, url, **httplib_request_kw) 

else: 

conn.request(method, url, **httplib_request_kw) 

 

# Reset the timeout for the recv() on the socket 

read_timeout = timeout_obj.read_timeout 

 

# App Engine doesn't have a sock attr 

if getattr(conn, "sock", None): 

# In Python 3 socket.py will catch EAGAIN and return None when you 

# try and read into the file pointer created by http.client, which 

# instead raises a BadStatusLine exception. Instead of catching 

# the exception and assuming all BadStatusLine exceptions are read 

# timeouts, check for a zero timeout before making the request. 

if read_timeout == 0: 

raise ReadTimeoutError( 

self, url, "Read timed out. (read timeout=%s)" % read_timeout 

) 

if read_timeout is Timeout.DEFAULT_TIMEOUT: 

conn.sock.settimeout(socket.getdefaulttimeout()) 

else: # None or a value 

conn.sock.settimeout(read_timeout) 

 

# Receive the response from the server 

try: 

try: 

# Python 2.7, use buffering of HTTP responses 

httplib_response = conn.getresponse(buffering=True) 

except TypeError: 

# Python 3 

try: 

httplib_response = conn.getresponse() 

except BaseException as e: 

# Remove the TypeError from the exception chain in 

# Python 3 (including for exceptions like SystemExit). 

# Otherwise it looks like a bug in the code. 

six.raise_from(e, None) 

except (SocketTimeout, BaseSSLError, SocketError) as e: 

self._raise_timeout(err=e, url=url, timeout_value=read_timeout) 

raise 

 

# AppEngine doesn't have a version attr. 

http_version = getattr(conn, "_http_vsn_str", "HTTP/?") 

log.debug( 

'%s://%s:%s "%s %s %s" %s %s', 

self.scheme, 

self.host, 

self.port, 

method, 

url, 

http_version, 

httplib_response.status, 

httplib_response.length, 

) 

 

try: 

assert_header_parsing(httplib_response.msg) 

except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 

log.warning( 

"Failed to parse headers (url=%s): %s", 

self._absolute_url(url), 

hpe, 

exc_info=True, 

) 

 

return httplib_response 

 

def _absolute_url(self, path): 

return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url 

 

def close(self): 

""" 

Close all pooled connections and disable the pool. 

""" 

if self.pool is None: 

return 

# Disable access to the pool 

old_pool, self.pool = self.pool, None 

 

try: 

while True: 

conn = old_pool.get(block=False) 

if conn: 

conn.close() 

 

except queue.Empty: 

pass # Done. 

 

def is_same_host(self, url): 

""" 

Check if the given ``url`` is a member of the same host as this 

connection pool. 

""" 

if url.startswith("/"): 

return True 

 

# TODO: Add optional support for socket.gethostbyname checking. 

scheme, host, port = get_host(url) 

if host is not None: 

host = _normalize_host(host, scheme=scheme) 

 

# Use explicit default port for comparison when none is given 

if self.port and not port: 

port = port_by_scheme.get(scheme) 

elif not self.port and port == port_by_scheme.get(scheme): 

port = None 

 

return (scheme, host, port) == (self.scheme, self.host, self.port) 

 

def urlopen( 

self, 

method, 

url, 

body=None, 

headers=None, 

retries=None, 

redirect=True, 

assert_same_host=True, 

timeout=_Default, 

pool_timeout=None, 

release_conn=None, 

chunked=False, 

body_pos=None, 

**response_kw 

): 

""" 

Get a connection from the pool and perform an HTTP request. This is the 

lowest level call for making a request, so you'll need to specify all 

the raw details. 

 

.. note:: 

 

More commonly, it's appropriate to use a convenience method provided 

by :class:`.RequestMethods`, such as :meth:`request`. 

 

.. note:: 

 

`release_conn` will only behave as expected if 

`preload_content=False` because we want to make 

`preload_content=False` the default behaviour someday soon without 

breaking backwards compatibility. 

 

:param method: 

HTTP request method (such as GET, POST, PUT, etc.) 

 

:param body: 

Data to send in the request body (useful for creating 

POST requests, see HTTPConnectionPool.post_url for 

more convenience). 

 

:param headers: 

Dictionary of custom headers to send, such as User-Agent, 

If-None-Match, etc. If None, pool headers are used. If provided, 

these headers completely replace any pool-specific headers. 

 

:param retries: 

Configure the number of retries to allow before raising a 

:class:`~urllib3.exceptions.MaxRetryError` exception. 

 

Pass ``None`` to retry until you receive a response. Pass a 

:class:`~urllib3.util.retry.Retry` object for fine-grained control 

over different types of retries. 

Pass an integer number to retry connection errors that many times, 

but no other types of errors. Pass zero to never retry. 

 

If ``False``, then retries are disabled and any exception is raised 

immediately. Also, instead of raising a MaxRetryError on redirects, 

the redirect response will be returned. 

 

:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

 

:param redirect: 

If True, automatically handle redirects (status codes 301, 302, 

303, 307, 308). Each redirect counts as a retry. Disabling retries 

will disable redirect, too. 

 

:param assert_same_host: 

If ``True``, will make sure that the host of the pool requests is 

consistent else will raise HostChangedError. When False, you can 

use the pool on an HTTP proxy and request foreign hosts. 

 

:param timeout: 

If specified, overrides the default timeout for this one 

request. It may be a float (in seconds) or an instance of 

:class:`urllib3.util.Timeout`. 

 

:param pool_timeout: 

If set and the pool is set to block=True, then this method will 

block for ``pool_timeout`` seconds and raise EmptyPoolError if no 

connection is available within the time period. 

 

:param release_conn: 

If False, then the urlopen call will not release the connection 

back into the pool once a response is received (but will release if 

you read the entire contents of the response such as when 

`preload_content=True`). This is useful if you're not preloading 

the response's content immediately. You will need to call 

``r.release_conn()`` on the response ``r`` to return the connection 

back into the pool. If None, it takes the value of 

``response_kw.get('preload_content', True)``. 

 

:param chunked: 

If True, urllib3 will send the body using chunked transfer 

encoding. Otherwise, urllib3 will send the body using the standard 

content-length form. Defaults to False. 

 

:param int body_pos: 

Position to seek to in file-like body in the event of a retry or 

redirect. Typically this won't need to be set because urllib3 will 

auto-populate the value when needed. 

 

:param \\**response_kw: 

Additional parameters are passed to 

:meth:`urllib3.response.HTTPResponse.from_httplib` 

""" 

if headers is None: 

headers = self.headers 

 

if not isinstance(retries, Retry): 

retries = Retry.from_int(retries, redirect=redirect, default=self.retries) 

 

if release_conn is None: 

release_conn = response_kw.get("preload_content", True) 

 

# Check host 

if assert_same_host and not self.is_same_host(url): 

raise HostChangedError(self, url, retries) 

 

# Ensure that the URL we're connecting to is properly encoded 

if url.startswith("/"): 

url = six.ensure_str(_encode_target(url)) 

else: 

url = six.ensure_str(parse_url(url).url) 

 

conn = None 

 

# Track whether `conn` needs to be released before 

# returning/raising/recursing. Update this variable if necessary, and 

# leave `release_conn` constant throughout the function. That way, if 

# the function recurses, the original value of `release_conn` will be 

# passed down into the recursive call, and its value will be respected. 

# 

# See issue #651 [1] for details. 

# 

# [1] <https://github.com/urllib3/urllib3/issues/651> 

release_this_conn = release_conn 

 

# Merge the proxy headers. Only do this in HTTP. We have to copy the 

# headers dict so we can safely change it without those changes being 

# reflected in anyone else's copy. 

if self.scheme == "http": 

headers = headers.copy() 

headers.update(self.proxy_headers) 

 

# Must keep the exception bound to a separate variable or else Python 3 

# complains about UnboundLocalError. 

err = None 

 

# Keep track of whether we cleanly exited the except block. This 

# ensures we do proper cleanup in finally. 

clean_exit = False 

 

# Rewind body position, if needed. Record current position 

# for future rewinds in the event of a redirect/retry. 

body_pos = set_file_position(body, body_pos) 

 

try: 

# Request a connection from the queue. 

timeout_obj = self._get_timeout(timeout) 

conn = self._get_conn(timeout=pool_timeout) 

 

conn.timeout = timeout_obj.connect_timeout 

 

is_new_proxy_conn = self.proxy is not None and not getattr( 

conn, "sock", None 

) 

if is_new_proxy_conn: 

self._prepare_proxy(conn) 

 

# Make the request on the httplib connection object. 

httplib_response = self._make_request( 

conn, 

method, 

url, 

timeout=timeout_obj, 

body=body, 

headers=headers, 

chunked=chunked, 

) 

 

# If we're going to release the connection in ``finally:``, then 

# the response doesn't need to know about the connection. Otherwise 

# it will also try to release it and we'll have a double-release 

# mess. 

response_conn = conn if not release_conn else None 

 

# Pass method to Response for length checking 

response_kw["request_method"] = method 

 

# Import httplib's response into our own wrapper object 

response = self.ResponseCls.from_httplib( 

httplib_response, 

pool=self, 

connection=response_conn, 

retries=retries, 

**response_kw 

) 

 

# Everything went great! 

clean_exit = True 

 

except EmptyPoolError: 

# Didn't get a connection from the pool, no need to clean up 

clean_exit = True 

release_this_conn = False 

raise 

 

except ( 

TimeoutError, 

HTTPException, 

SocketError, 

ProtocolError, 

BaseSSLError, 

SSLError, 

CertificateError, 

) as e: 

# Discard the connection for these exceptions. It will be 

# replaced during the next _get_conn() call. 

clean_exit = False 

if isinstance(e, (BaseSSLError, CertificateError)): 

e = SSLError(e) 

elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: 

e = ProxyError("Cannot connect to proxy.", e) 

elif isinstance(e, (SocketError, HTTPException)): 

e = ProtocolError("Connection aborted.", e) 

 

retries = retries.increment( 

method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] 

) 

retries.sleep() 

 

# Keep track of the error for the retry warning. 

err = e 

 

finally: 

if not clean_exit: 

# We hit some kind of exception, handled or otherwise. We need 

# to throw the connection away unless explicitly told not to. 

# Close the connection, set the variable to None, and make sure 

# we put the None back in the pool to avoid leaking it. 

conn = conn and conn.close() 

release_this_conn = True 

 

if release_this_conn: 

# Put the connection back to be reused. If the connection is 

# expired then it will be None, which will get replaced with a 

# fresh connection during _get_conn. 

self._put_conn(conn) 

 

if not conn: 

# Try again 

log.warning( 

"Retrying (%r) after connection broken by '%r': %s", retries, err, url 

) 

return self.urlopen( 

method, 

url, 

body, 

headers, 

retries, 

redirect, 

assert_same_host, 

timeout=timeout, 

pool_timeout=pool_timeout, 

release_conn=release_conn, 

chunked=chunked, 

body_pos=body_pos, 

**response_kw 

) 

 

# Handle redirect? 

redirect_location = redirect and response.get_redirect_location() 

if redirect_location: 

if response.status == 303: 

method = "GET" 

 

try: 

retries = retries.increment(method, url, response=response, _pool=self) 

except MaxRetryError: 

if retries.raise_on_redirect: 

response.drain_conn() 

raise 

return response 

 

response.drain_conn() 

retries.sleep_for_retry(response) 

log.debug("Redirecting %s -> %s", url, redirect_location) 

return self.urlopen( 

method, 

redirect_location, 

body, 

headers, 

retries=retries, 

redirect=redirect, 

assert_same_host=assert_same_host, 

timeout=timeout, 

pool_timeout=pool_timeout, 

release_conn=release_conn, 

chunked=chunked, 

body_pos=body_pos, 

**response_kw 

) 

 

# Check if we should retry the HTTP response. 

has_retry_after = bool(response.getheader("Retry-After")) 

if retries.is_retry(method, response.status, has_retry_after): 

try: 

retries = retries.increment(method, url, response=response, _pool=self) 

except MaxRetryError: 

if retries.raise_on_status: 

response.drain_conn() 

raise 

return response 

 

response.drain_conn() 

retries.sleep(response) 

log.debug("Retry: %s", url) 

return self.urlopen( 

method, 

url, 

body, 

headers, 

retries=retries, 

redirect=redirect, 

assert_same_host=assert_same_host, 

timeout=timeout, 

pool_timeout=pool_timeout, 

release_conn=release_conn, 

chunked=chunked, 

body_pos=body_pos, 

**response_kw 

) 

 

return response 

 

 

class HTTPSConnectionPool(HTTPConnectionPool): 

""" 

Same as :class:`.HTTPConnectionPool`, but HTTPS. 

 

When Python is compiled with the :mod:`ssl` module, then 

:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, 

instead of :class:`.HTTPSConnection`. 

 

:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, 

``assert_hostname`` and ``host`` in this order to verify connections. 

If ``assert_hostname`` is False, no verification is done. 

 

The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, 

``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` 

is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade 

the connection socket into an SSL socket. 

""" 

 

scheme = "https" 

ConnectionCls = HTTPSConnection 

 

def __init__( 

self, 

host, 

port=None, 

strict=False, 

timeout=Timeout.DEFAULT_TIMEOUT, 

maxsize=1, 

block=False, 

headers=None, 

retries=None, 

_proxy=None, 

_proxy_headers=None, 

key_file=None, 

cert_file=None, 

cert_reqs=None, 

key_password=None, 

ca_certs=None, 

ssl_version=None, 

assert_hostname=None, 

assert_fingerprint=None, 

ca_cert_dir=None, 

**conn_kw 

): 

 

HTTPConnectionPool.__init__( 

self, 

host, 

port, 

strict, 

timeout, 

maxsize, 

block, 

headers, 

retries, 

_proxy, 

_proxy_headers, 

**conn_kw 

) 

 

self.key_file = key_file 

self.cert_file = cert_file 

self.cert_reqs = cert_reqs 

self.key_password = key_password 

self.ca_certs = ca_certs 

self.ca_cert_dir = ca_cert_dir 

self.ssl_version = ssl_version 

self.assert_hostname = assert_hostname 

self.assert_fingerprint = assert_fingerprint 

 

def _prepare_conn(self, conn): 

""" 

Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` 

and establish the tunnel if proxy is used. 

""" 

 

if isinstance(conn, VerifiedHTTPSConnection): 

conn.set_cert( 

key_file=self.key_file, 

key_password=self.key_password, 

cert_file=self.cert_file, 

cert_reqs=self.cert_reqs, 

ca_certs=self.ca_certs, 

ca_cert_dir=self.ca_cert_dir, 

assert_hostname=self.assert_hostname, 

assert_fingerprint=self.assert_fingerprint, 

) 

conn.ssl_version = self.ssl_version 

return conn 

 

def _prepare_proxy(self, conn): 

""" 

Establish tunnel connection early, because otherwise httplib 

would improperly set Host: header to proxy's IP:port. 

""" 

conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) 

conn.connect() 

 

def _new_conn(self): 

""" 

Return a fresh :class:`httplib.HTTPSConnection`. 

""" 

self.num_connections += 1 

log.debug( 

"Starting new HTTPS connection (%d): %s:%s", 

self.num_connections, 

self.host, 

self.port or "443", 

) 

 

if not self.ConnectionCls or self.ConnectionCls is DummyConnection: 

raise SSLError( 

"Can't connect to HTTPS URL because the SSL module is not available." 

) 

 

actual_host = self.host 

actual_port = self.port 

if self.proxy is not None: 

actual_host = self.proxy.host 

actual_port = self.proxy.port 

 

conn = self.ConnectionCls( 

host=actual_host, 

port=actual_port, 

timeout=self.timeout.connect_timeout, 

strict=self.strict, 

cert_file=self.cert_file, 

key_file=self.key_file, 

key_password=self.key_password, 

**self.conn_kw 

) 

 

return self._prepare_conn(conn) 

 

def _validate_conn(self, conn): 

""" 

Called right before a request is made, after the socket is created. 

""" 

super(HTTPSConnectionPool, self)._validate_conn(conn) 

 

# Force connect early to allow us to validate the connection. 

if not getattr(conn, "sock", None): # AppEngine might not have `.sock` 

conn.connect() 

 

if not conn.is_verified: 

warnings.warn( 

( 

"Unverified HTTPS request is being made to host '%s'. " 

"Adding certificate verification is strongly advised. See: " 

"https://urllib3.readthedocs.io/en/latest/advanced-usage.html" 

"#ssl-warnings" % conn.host 

), 

InsecureRequestWarning, 

) 

 

 

def connection_from_url(url, **kw): 

""" 

Given a url, return an :class:`.ConnectionPool` instance of its host. 

 

This is a shortcut for not having to parse out the scheme, host, and port 

of the url before creating an :class:`.ConnectionPool` instance. 

 

:param url: 

Absolute URL string that must include the scheme. Port is optional. 

 

:param \\**kw: 

Passes additional parameters to the constructor of the appropriate 

:class:`.ConnectionPool`. Useful for specifying things like 

timeout, maxsize, headers, etc. 

 

Example:: 

 

>>> conn = connection_from_url('http://google.com/') 

>>> r = conn.request('GET', '/') 

""" 

scheme, host, port = get_host(url) 

port = port or port_by_scheme.get(scheme, 80) 

if scheme == "https": 

return HTTPSConnectionPool(host, port=port, **kw) 

else: 

return HTTPConnectionPool(host, port=port, **kw) 

 

 

def _normalize_host(host, scheme): 

""" 

Normalize hosts for comparisons and use with sockets. 

""" 

 

host = normalize_host(host, scheme) 

 

# httplib doesn't like it when we include brackets in IPv6 addresses 

# Specifically, if we include brackets but also pass the port then 

# httplib crazily doubles up the square brackets on the Host header. 

# Instead, we need to make sure we never pass ``None`` as the port. 

# However, for backward compatibility reasons we can't actually 

# *assert* that. See http://bugs.python.org/issue28539 

if host.startswith("[") and host.endswith("]"): 

host = host[1:-1] 

return host