2012-08-17 23:23:02 +02:00
|
|
|
import json
|
|
|
|
import sys
|
|
|
|
|
2019-08-29 09:39:19 +02:00
|
|
|
import http.client
|
2012-08-17 23:23:02 +02:00
|
|
|
import requests
|
2019-08-29 11:46:08 +02:00
|
|
|
from contextlib import contextmanager
|
2016-03-02 05:12:05 +01:00
|
|
|
from requests.adapters import HTTPAdapter
|
2018-02-22 12:52:57 +01:00
|
|
|
from requests.structures import CaseInsensitiveDict
|
2012-08-17 23:23:02 +02:00
|
|
|
|
2014-04-28 23:33:30 +02:00
|
|
|
from httpie import sessions
|
|
|
|
from httpie import __version__
|
2016-03-02 05:12:05 +01:00
|
|
|
from httpie.input import SSL_VERSION_ARG_MAPPING
|
2014-04-28 23:33:30 +02:00
|
|
|
from httpie.plugins import plugin_manager
|
2016-03-04 18:42:13 +01:00
|
|
|
from httpie.utils import repr_dict_nice
|
2012-08-17 23:23:02 +02:00
|
|
|
|
2019-08-29 10:44:59 +02:00
|
|
|
import zlib
|
|
|
|
|
2015-12-02 18:50:48 +01:00
|
|
|
try:
|
2016-04-28 12:28:20 +02:00
|
|
|
# https://urllib3.readthedocs.io/en/latest/security.html
|
2017-12-28 18:32:12 +01:00
|
|
|
# noinspection PyPackageRequirements
|
|
|
|
import urllib3
|
2015-12-02 18:50:48 +01:00
|
|
|
urllib3.disable_warnings()
|
2017-12-28 18:32:12 +01:00
|
|
|
except (ImportError, AttributeError):
|
2016-01-01 22:41:58 +01:00
|
|
|
# In some rare cases, the user may have an old version of the requests
|
|
|
|
# or urllib3, and there is no method called "disable_warnings." In these
|
|
|
|
# cases, we don't need to call the method.
|
|
|
|
# They may get some noisy output but execution shouldn't die. Move on.
|
2015-12-02 18:50:48 +01:00
|
|
|
pass
|
2015-02-16 19:36:02 +01:00
|
|
|
|
|
|
|
|
2016-07-02 14:18:36 +02:00
|
|
|
FORM_CONTENT_TYPE = 'application/x-www-form-urlencoded; charset=utf-8'
|
|
|
|
JSON_CONTENT_TYPE = 'application/json'
|
|
|
|
JSON_ACCEPT = '{0}, */*'.format(JSON_CONTENT_TYPE)
|
2012-08-18 23:03:31 +02:00
|
|
|
DEFAULT_UA = 'HTTPie/%s' % __version__
|
2012-08-17 23:23:02 +02:00
|
|
|
|
|
|
|
|
2019-08-29 09:39:19 +02:00
|
|
|
# noinspection PyProtectedMember
|
|
|
|
@contextmanager
|
|
|
|
def max_headers(limit):
|
|
|
|
# <https://github.com/jakubroztocil/httpie/issues/802>
|
|
|
|
orig = http.client._MAXHEADERS
|
|
|
|
http.client._MAXHEADERS = limit or float('Inf')
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
|
|
|
http.client._MAXHEADERS = orig
|
|
|
|
|
|
|
|
|
2016-03-02 05:12:05 +01:00
|
|
|
class HTTPieHTTPAdapter(HTTPAdapter):
|
|
|
|
|
|
|
|
def __init__(self, ssl_version=None, **kwargs):
|
|
|
|
self._ssl_version = ssl_version
|
2019-08-30 11:32:14 +02:00
|
|
|
super().__init__(**kwargs)
|
2016-03-02 05:12:05 +01:00
|
|
|
|
|
|
|
def init_poolmanager(self, *args, **kwargs):
|
|
|
|
kwargs['ssl_version'] = self._ssl_version
|
2019-08-30 11:32:14 +02:00
|
|
|
super().init_poolmanager(*args, **kwargs)
|
2016-03-02 05:12:05 +01:00
|
|
|
|
|
|
|
|
2019-08-29 10:44:59 +02:00
|
|
|
class ContentCompressionHttpAdapter(HTTPAdapter):
|
|
|
|
|
|
|
|
def __init__(self, compress, **kwargs):
|
|
|
|
self.compress = compress
|
2019-08-30 11:32:14 +02:00
|
|
|
super().__init__(**kwargs)
|
2019-08-29 10:44:59 +02:00
|
|
|
|
|
|
|
def send(self, request, **kwargs):
|
|
|
|
if request.body and self.compress > 0:
|
|
|
|
deflater = zlib.compressobj()
|
|
|
|
if isinstance(request.body, bytes):
|
|
|
|
deflated_data = deflater.compress(request.body)
|
|
|
|
else:
|
|
|
|
deflated_data = deflater.compress(request.body.encode())
|
|
|
|
deflated_data += deflater.flush()
|
|
|
|
if len(deflated_data) < len(request.body) or self.compress > 1:
|
|
|
|
request.body = deflated_data
|
|
|
|
request.headers['Content-Encoding'] = 'deflate'
|
|
|
|
request.headers['Content-Length'] = str(len(deflated_data))
|
2019-08-30 11:32:14 +02:00
|
|
|
return super().send(request, **kwargs)
|
2019-08-29 10:44:59 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_requests_session(ssl_version, compress):
|
2015-02-05 15:25:00 +01:00
|
|
|
requests_session = requests.Session()
|
2016-03-02 05:12:05 +01:00
|
|
|
requests_session.mount(
|
|
|
|
'https://',
|
|
|
|
HTTPieHTTPAdapter(ssl_version=ssl_version)
|
|
|
|
)
|
2019-08-29 10:44:59 +02:00
|
|
|
if compress:
|
|
|
|
adapter = ContentCompressionHttpAdapter(compress)
|
|
|
|
for prefix in ['http://', 'https://']:
|
|
|
|
requests_session.mount(prefix, adapter)
|
2015-08-31 03:37:47 +02:00
|
|
|
for cls in plugin_manager.get_transport_plugins():
|
2015-02-05 15:55:20 +01:00
|
|
|
transport_plugin = cls()
|
|
|
|
requests_session.mount(prefix=transport_plugin.prefix,
|
|
|
|
adapter=transport_plugin.get_adapter())
|
2015-02-05 15:25:00 +01:00
|
|
|
return requests_session
|
|
|
|
|
|
|
|
|
2012-09-17 02:15:00 +02:00
|
|
|
def get_response(args, config_dir):
|
2012-09-07 12:38:52 +02:00
|
|
|
"""Send the request and return a `request.Response`."""
|
2016-03-02 05:12:05 +01:00
|
|
|
|
|
|
|
ssl_version = None
|
|
|
|
if args.ssl_version:
|
|
|
|
ssl_version = SSL_VERSION_ARG_MAPPING[args.ssl_version]
|
|
|
|
|
2019-08-29 10:44:59 +02:00
|
|
|
requests_session = get_requests_session(ssl_version, args.compress)
|
2016-02-29 07:21:25 +01:00
|
|
|
requests_session.max_redirects = args.max_redirects
|
2015-02-05 15:25:00 +01:00
|
|
|
|
2019-08-29 09:39:19 +02:00
|
|
|
with max_headers(args.max_headers):
|
|
|
|
if not args.session and not args.session_read_only:
|
|
|
|
kwargs = get_requests_kwargs(args)
|
|
|
|
if args.debug:
|
|
|
|
dump_request(kwargs)
|
|
|
|
response = requests_session.request(**kwargs)
|
|
|
|
else:
|
|
|
|
response = sessions.get_response(
|
|
|
|
requests_session=requests_session,
|
|
|
|
args=args,
|
|
|
|
config_dir=config_dir,
|
|
|
|
session_name=args.session or args.session_read_only,
|
|
|
|
read_only=bool(args.session_read_only),
|
|
|
|
)
|
2012-08-17 23:23:02 +02:00
|
|
|
|
2012-12-01 15:55:58 +01:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2014-05-08 13:27:50 +02:00
|
|
|
def dump_request(kwargs):
|
2015-02-28 17:02:05 +01:00
|
|
|
sys.stderr.write('\n>>> requests.request(**%s)\n\n'
|
2016-03-04 18:42:13 +01:00
|
|
|
% repr_dict_nice(kwargs))
|
2014-05-08 13:27:50 +02:00
|
|
|
|
|
|
|
|
2016-08-13 22:40:01 +02:00
|
|
|
def finalize_headers(headers):
|
|
|
|
final_headers = {}
|
|
|
|
for name, value in headers.items():
|
|
|
|
if value is not None:
|
|
|
|
|
|
|
|
# >leading or trailing LWS MAY be removed without
|
|
|
|
# >changing the semantics of the field value"
|
|
|
|
# -https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html
|
|
|
|
# Also, requests raises `InvalidHeader` for leading spaces.
|
|
|
|
value = value.strip()
|
|
|
|
|
|
|
|
if isinstance(value, str):
|
2017-03-10 11:27:38 +01:00
|
|
|
# See: https://github.com/jakubroztocil/httpie/issues/212
|
2016-08-13 22:40:01 +02:00
|
|
|
value = value.encode('utf8')
|
|
|
|
|
|
|
|
final_headers[name] = value
|
|
|
|
return final_headers
|
2014-04-26 18:16:30 +02:00
|
|
|
|
|
|
|
|
2014-05-08 13:27:50 +02:00
|
|
|
def get_default_headers(args):
|
2018-02-22 12:52:57 +01:00
|
|
|
default_headers = CaseInsensitiveDict({
|
2012-12-17 17:02:27 +01:00
|
|
|
'User-Agent': DEFAULT_UA
|
2018-02-22 12:52:57 +01:00
|
|
|
})
|
2012-08-18 23:03:31 +02:00
|
|
|
|
2012-08-17 23:23:02 +02:00
|
|
|
auto_json = args.data and not args.form
|
|
|
|
if args.json or auto_json:
|
2016-07-02 14:18:36 +02:00
|
|
|
default_headers['Accept'] = JSON_ACCEPT
|
2013-04-02 16:07:14 +02:00
|
|
|
if args.json or (auto_json and args.data):
|
2016-07-02 14:18:36 +02:00
|
|
|
default_headers['Content-Type'] = JSON_CONTENT_TYPE
|
2012-08-17 23:23:02 +02:00
|
|
|
|
2012-08-18 23:03:31 +02:00
|
|
|
elif args.form and not args.files:
|
2012-09-07 12:48:59 +02:00
|
|
|
# If sending files, `requests` will set
|
|
|
|
# the `Content-Type` for us.
|
2016-07-02 14:18:36 +02:00
|
|
|
default_headers['Content-Type'] = FORM_CONTENT_TYPE
|
2014-05-08 13:27:50 +02:00
|
|
|
return default_headers
|
|
|
|
|
|
|
|
|
|
|
|
def get_requests_kwargs(args, base_headers=None):
|
|
|
|
"""
|
|
|
|
Translate our `args` into `requests.request` keyword arguments.
|
|
|
|
|
|
|
|
"""
|
|
|
|
# Serialize JSON data, if needed.
|
|
|
|
data = args.data
|
|
|
|
auto_json = data and not args.form
|
2015-04-11 03:11:22 +02:00
|
|
|
if (args.json or auto_json) and isinstance(data, dict):
|
2014-05-08 13:27:50 +02:00
|
|
|
if data:
|
|
|
|
data = json.dumps(data)
|
|
|
|
else:
|
|
|
|
# We need to set data to an empty string to prevent requests
|
|
|
|
# from assigning an empty list to `response.request.data`.
|
|
|
|
data = ''
|
|
|
|
|
|
|
|
# Finalize headers.
|
|
|
|
headers = get_default_headers(args)
|
|
|
|
if base_headers:
|
|
|
|
headers.update(base_headers)
|
|
|
|
headers.update(args.headers)
|
2016-08-13 22:40:01 +02:00
|
|
|
headers = finalize_headers(headers)
|
2012-08-17 23:23:02 +02:00
|
|
|
|
2014-01-28 16:16:48 +01:00
|
|
|
cert = None
|
2014-02-05 12:50:40 +01:00
|
|
|
if args.cert:
|
|
|
|
cert = args.cert
|
2015-01-23 23:54:27 +01:00
|
|
|
if args.cert_key:
|
|
|
|
cert = cert, args.cert_key
|
2014-01-28 16:16:48 +01:00
|
|
|
|
2012-08-17 23:23:02 +02:00
|
|
|
kwargs = {
|
2012-12-17 17:02:27 +01:00
|
|
|
'stream': True,
|
2012-08-17 23:23:02 +02:00
|
|
|
'method': args.method.lower(),
|
|
|
|
'url': args.url,
|
2014-05-08 13:27:50 +02:00
|
|
|
'headers': headers,
|
|
|
|
'data': data,
|
2012-08-17 23:23:02 +02:00
|
|
|
'verify': {
|
|
|
|
'yes': True,
|
2017-02-10 01:51:23 +01:00
|
|
|
'true': True,
|
|
|
|
'no': False,
|
|
|
|
'false': False,
|
|
|
|
}.get(args.verify.lower(), args.verify),
|
2014-01-28 16:16:48 +01:00
|
|
|
'cert': cert,
|
2019-08-29 10:04:49 +02:00
|
|
|
'timeout': args.timeout or None,
|
2016-11-23 22:01:58 +01:00
|
|
|
'auth': args.auth,
|
2017-12-28 18:15:17 +01:00
|
|
|
'proxies': {p.key: p.value for p in args.proxy},
|
2012-08-17 23:23:02 +02:00
|
|
|
'files': args.files,
|
2012-09-07 11:58:39 +02:00
|
|
|
'allow_redirects': args.follow,
|
2012-08-18 23:03:31 +02:00
|
|
|
'params': args.params,
|
2012-08-17 23:23:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return kwargs
|