Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

420

421

422

423

424

425

426

427

428

429

430

431

432

433

434

435

436

437

# -*- coding: utf-8 -*- 

 

""" 

requests.adapters 

~~~~~~~~~~~~~~~~~ 

 

This module contains the transport adapters that Requests uses to define 

and maintain connections. 

""" 

 

import socket 

 

from .models import Response 

from .packages.urllib3.poolmanager import PoolManager, proxy_from_url 

from .packages.urllib3.response import HTTPResponse 

from .packages.urllib3.util import Timeout as TimeoutSauce 

from .packages.urllib3.util.retry import Retry 

from .compat import urlparse, basestring 

from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, 

                    prepend_scheme_if_needed, get_auth_from_url, urldefragauth) 

from .structures import CaseInsensitiveDict 

from .packages.urllib3.exceptions import ConnectTimeoutError 

from .packages.urllib3.exceptions import HTTPError as _HTTPError 

from .packages.urllib3.exceptions import MaxRetryError 

from .packages.urllib3.exceptions import ProxyError as _ProxyError 

from .packages.urllib3.exceptions import ProtocolError 

from .packages.urllib3.exceptions import ReadTimeoutError 

from .packages.urllib3.exceptions import SSLError as _SSLError 

from .packages.urllib3.exceptions import ResponseError 

from .cookies import extract_cookies_to_jar 

from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, 

                         ProxyError, RetryError) 

from .auth import _basic_auth_str 

 

DEFAULT_POOLBLOCK = False 

DEFAULT_POOLSIZE = 10 

DEFAULT_RETRIES = 0 

 

 

class BaseAdapter(object): 

    """The Base Transport Adapter""" 

 

    def __init__(self): 

        super(BaseAdapter, self).__init__() 

 

    def send(self): 

        raise NotImplementedError 

 

    def close(self): 

        raise NotImplementedError 

 

 

class HTTPAdapter(BaseAdapter): 

    """The built-in HTTP Adapter for urllib3. 

 

    Provides a general-case interface for Requests sessions to contact HTTP and 

    HTTPS urls by implementing the Transport Adapter interface. This class will 

    usually be created by the :class:`Session <Session>` class under the 

    covers. 

 

    :param pool_connections: The number of urllib3 connection pools to cache. 

    :param pool_maxsize: The maximum number of connections to save in the pool. 

    :param int max_retries: The maximum number of retries each connection 

        should attempt. Note, this applies only to failed DNS lookups, socket 

        connections and connection timeouts, never to requests where data has 

        made it to the server. By default, Requests does not retry failed 

        connections. If you need granular control over the conditions under 

        which we retry a request, import urllib3's ``Retry`` class and pass 

        that instead. 

    :param pool_block: Whether the connection pool should block for connections. 

 

    Usage:: 

 

      >>> import requests 

      >>> s = requests.Session() 

      >>> a = requests.adapters.HTTPAdapter(max_retries=3) 

      >>> s.mount('http://', a) 

    """ 

    __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', 

                 '_pool_block'] 

 

    def __init__(self, pool_connections=DEFAULT_POOLSIZE, 

                 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, 

                 pool_block=DEFAULT_POOLBLOCK): 

        if max_retries == DEFAULT_RETRIES: 

            self.max_retries = Retry(0, read=False) 

        else: 

            self.max_retries = Retry.from_int(max_retries) 

        self.config = {} 

        self.proxy_manager = {} 

 

        super(HTTPAdapter, self).__init__() 

 

        self._pool_connections = pool_connections 

        self._pool_maxsize = pool_maxsize 

        self._pool_block = pool_block 

 

        self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) 

 

    def __getstate__(self): 

        return dict((attr, getattr(self, attr, None)) for attr in 

                    self.__attrs__) 

 

    def __setstate__(self, state): 

        # Can't handle by adding 'proxy_manager' to self.__attrs__ because 

        # because self.poolmanager uses a lambda function, which isn't pickleable. 

        self.proxy_manager = {} 

        self.config = {} 

 

        for attr, value in state.items(): 

            setattr(self, attr, value) 

 

        self.init_poolmanager(self._pool_connections, self._pool_maxsize, 

                              block=self._pool_block) 

 

    def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): 

        """Initializes a urllib3 PoolManager. 

 

        This method should not be called from user code, and is only 

        exposed for use when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        :param connections: The number of urllib3 connection pools to cache. 

        :param maxsize: The maximum number of connections to save in the pool. 

        :param block: Block when no free connections are available. 

        :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. 

        """ 

        # save these values for pickling 

        self._pool_connections = connections 

        self._pool_maxsize = maxsize 

        self._pool_block = block 

 

        self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, 

                                       block=block, strict=True, **pool_kwargs) 

 

    def proxy_manager_for(self, proxy, **proxy_kwargs): 

        """Return urllib3 ProxyManager for the given proxy. 

 

        This method should not be called from user code, and is only 

        exposed for use when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        :param proxy: The proxy to return a urllib3 ProxyManager for. 

        :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. 

        :returns: ProxyManager 

        """ 

        if not proxy in self.proxy_manager: 

            proxy_headers = self.proxy_headers(proxy) 

            self.proxy_manager[proxy] = proxy_from_url( 

                proxy, 

                proxy_headers=proxy_headers, 

                num_pools=self._pool_connections, 

                maxsize=self._pool_maxsize, 

                block=self._pool_block, 

                **proxy_kwargs) 

 

        return self.proxy_manager[proxy] 

 

    def cert_verify(self, conn, url, verify, cert): 

        """Verify a SSL certificate. This method should not be called from user 

        code, and is only exposed for use when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        :param conn: The urllib3 connection object associated with the cert. 

        :param url: The requested URL. 

        :param verify: Whether we should actually verify the certificate. 

        :param cert: The SSL certificate to verify. 

        """ 

        if url.lower().startswith('https') and verify: 

 

            cert_loc = None 

 

            # Allow self-specified cert location. 

            if verify is not True: 

                cert_loc = verify 

 

            if not cert_loc: 

                cert_loc = DEFAULT_CA_BUNDLE_PATH 

 

            if not cert_loc: 

                raise Exception("Could not find a suitable SSL CA certificate bundle.") 

 

            conn.cert_reqs = 'CERT_REQUIRED' 

            conn.ca_certs = cert_loc 

        else: 

            conn.cert_reqs = 'CERT_NONE' 

            conn.ca_certs = None 

 

        if cert: 

            if not isinstance(cert, basestring): 

                conn.cert_file = cert[0] 

                conn.key_file = cert[1] 

            else: 

                conn.cert_file = cert 

 

    def build_response(self, req, resp): 

        """Builds a :class:`Response <requests.Response>` object from a urllib3 

        response. This should not be called from user code, and is only exposed 

        for use when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` 

 

        :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. 

        :param resp: The urllib3 response object. 

        """ 

        response = Response() 

 

        # Fallback to None if there's no status_code, for whatever reason. 

        response.status_code = getattr(resp, 'status', None) 

 

        # Make headers case-insensitive. 

        response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) 

 

        # Set encoding. 

        response.encoding = get_encoding_from_headers(response.headers) 

        response.raw = resp 

        response.reason = response.raw.reason 

 

        if isinstance(req.url, bytes): 

            response.url = req.url.decode('utf-8') 

        else: 

            response.url = req.url 

 

        # Add new cookies from the server. 

        extract_cookies_to_jar(response.cookies, req, resp) 

 

        # Give the Response some context. 

        response.request = req 

        response.connection = self 

 

        return response 

 

    def get_connection(self, url, proxies=None): 

        """Returns a urllib3 connection for the given URL. This should not be 

        called from user code, and is only exposed for use when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        :param url: The URL to connect to. 

        :param proxies: (optional) A Requests-style dictionary of proxies used on this request. 

        """ 

        proxies = proxies or {} 

        proxy = proxies.get(urlparse(url.lower()).scheme) 

 

        if proxy: 

            proxy = prepend_scheme_if_needed(proxy, 'http') 

            proxy_manager = self.proxy_manager_for(proxy) 

            conn = proxy_manager.connection_from_url(url) 

        else: 

            # Only scheme should be lower case 

            parsed = urlparse(url) 

            url = parsed.geturl() 

            conn = self.poolmanager.connection_from_url(url) 

 

        return conn 

 

    def close(self): 

        """Disposes of any internal state. 

 

        Currently, this just closes the PoolManager, which closes pooled 

        connections. 

        """ 

        self.poolmanager.clear() 

 

    def request_url(self, request, proxies): 

        """Obtain the url to use when making the final request. 

 

        If the message is being sent through a HTTP proxy, the full URL has to 

        be used. Otherwise, we should only use the path portion of the URL. 

 

        This should not be called from user code, and is only exposed for use 

        when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. 

        :param proxies: A dictionary of schemes to proxy URLs. 

        """ 

        proxies = proxies or {} 

        scheme = urlparse(request.url).scheme 

        proxy = proxies.get(scheme) 

 

        if proxy and scheme != 'https': 

            url = urldefragauth(request.url) 

        else: 

            url = request.path_url 

 

        return url 

 

    def add_headers(self, request, **kwargs): 

        """Add any headers needed by the connection. As of v2.0 this does 

        nothing by default, but is left for overriding by users that subclass 

        the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        This should not be called from user code, and is only exposed for use 

        when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. 

        :param kwargs: The keyword arguments from the call to send(). 

        """ 

        pass 

 

    def proxy_headers(self, proxy): 

        """Returns a dictionary of the headers to add to any request sent 

        through a proxy. This works with urllib3 magic to ensure that they are 

        correctly sent to the proxy, rather than in a tunnelled request if 

        CONNECT is being used. 

 

        This should not be called from user code, and is only exposed for use 

        when subclassing the 

        :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. 

 

        :param proxies: The url of the proxy being used for this request. 

        :param kwargs: Optional additional keyword arguments. 

        """ 

        headers = {} 

        username, password = get_auth_from_url(proxy) 

 

        if username and password: 

            headers['Proxy-Authorization'] = _basic_auth_str(username, 

                                                             password) 

 

        return headers 

 

    def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): 

        """Sends PreparedRequest object. Returns Response object. 

 

        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. 

        :param stream: (optional) Whether to stream the request content. 

        :param timeout: (optional) How long to wait for the server to send 

            data before giving up, as a float, or a (`connect timeout, read 

            timeout <user/advanced.html#timeouts>`_) tuple. 

        :type timeout: float or tuple 

        :param verify: (optional) Whether to verify SSL certificates. 

        :param cert: (optional) Any user-provided SSL certificate to be trusted. 

        :param proxies: (optional) The proxies dictionary to apply to the request. 

        """ 

 

        conn = self.get_connection(request.url, proxies) 

 

        self.cert_verify(conn, request.url, verify, cert) 

        url = self.request_url(request, proxies) 

        self.add_headers(request) 

 

        chunked = not (request.body is None or 'Content-Length' in request.headers) 

 

        if isinstance(timeout, tuple): 

            try: 

                connect, read = timeout 

                timeout = TimeoutSauce(connect=connect, read=read) 

            except ValueError as e: 

                # this may raise a string formatting error. 

                err = ("Invalid timeout {0}. Pass a (connect, read) " 

                       "timeout tuple, or a single float to set " 

                       "both timeouts to the same value".format(timeout)) 

                raise ValueError(err) 

        else: 

            timeout = TimeoutSauce(connect=timeout, read=timeout) 

 

        try: 

            if not chunked: 

                resp = conn.urlopen( 

                    method=request.method, 

                    url=url, 

                    body=request.body, 

                    headers=request.headers, 

                    redirect=False, 

                    assert_same_host=False, 

                    preload_content=False, 

                    decode_content=False, 

                    retries=self.max_retries, 

                    timeout=timeout 

                ) 

 

            # Send the request. 

            else: 

                if hasattr(conn, 'proxy_pool'): 

                    conn = conn.proxy_pool 

 

                low_conn = conn._get_conn(timeout=timeout) 

 

                try: 

                    low_conn.putrequest(request.method, 

                                        url, 

                                        skip_accept_encoding=True) 

 

                    for header, value in request.headers.items(): 

                        low_conn.putheader(header, value) 

 

                    low_conn.endheaders() 

 

                    for i in request.body: 

                        low_conn.send(hex(len(i))[2:].encode('utf-8')) 

                        low_conn.send(b'\r\n') 

                        low_conn.send(i) 

                        low_conn.send(b'\r\n') 

                    low_conn.send(b'0\r\n\r\n') 

 

                    r = low_conn.getresponse() 

                    resp = HTTPResponse.from_httplib( 

                        r, 

                        pool=conn, 

                        connection=low_conn, 

                        preload_content=False, 

                        decode_content=False 

                    ) 

                except: 

                    # If we hit any problems here, clean up the connection. 

                    # Then, reraise so that we can handle the actual exception. 

                    low_conn.close() 

                    raise 

                else: 

                    # All is well, return the connection to the pool. 

                    conn._put_conn(low_conn) 

 

        except (ProtocolError, socket.error) as err: 

            raise ConnectionError(err, request=request) 

 

        except MaxRetryError as e: 

            if isinstance(e.reason, ConnectTimeoutError): 

                raise ConnectTimeout(e, request=request) 

 

            if isinstance(e.reason, ResponseError): 

                raise RetryError(e, request=request) 

 

            raise ConnectionError(e, request=request) 

 

        except _ProxyError as e: 

            raise ProxyError(e) 

 

        except (_SSLError, _HTTPError) as e: 

            if isinstance(e, _SSLError): 

                raise SSLError(e, request=request) 

            elif isinstance(e, ReadTimeoutError): 

                raise ReadTimeout(e, request=request) 

            else: 

                raise 

 

        return self.build_response(request, resp)