mirror of
https://github.com/httpie/cli.git
synced 2025-08-11 11:13:58 +02:00
Compare commits
141 Commits
Author | SHA1 | Date | |
---|---|---|---|
753aa69a8a | |||
f7e62336db | |||
a41dd7ac6d | |||
4a6f32a0f4 | |||
548bef7dff | |||
6c2001d1f5 | |||
4029dbf309 | |||
478d654945 | |||
66bdbc3745 | |||
316e3f45a9 | |||
da0eb7db79 | |||
9338aadd75 | |||
dc7d03e6b8 | |||
898408c20c | |||
47de4e2c9c | |||
f74424ef03 | |||
8a9cedb16e | |||
ff9f23da5b | |||
50810e5bd9 | |||
9b586b953b | |||
149cbc1604 | |||
d3df59c8af | |||
2057e13a1d | |||
4957686bcd | |||
4c0d7d526f | |||
0b3bad9c81 | |||
1ed43c1a1e | |||
bf03937f06 | |||
4660da949f | |||
86256af1df | |||
8bf7f8219c | |||
a5522b8233 | |||
b92a3a6d95 | |||
9098e5b6e8 | |||
68640a81b3 | |||
27f08920c4 | |||
c01dd8d64a | |||
76feea2f68 | |||
22a10aec4a | |||
fa334bdf4d | |||
f6724452cf | |||
07de32c406 | |||
1fbe7a6121 | |||
49e44d9b7e | |||
193683afbb | |||
126b1da515 | |||
969b310ea9 | |||
dd2c89412c | |||
381e60f9d8 | |||
44e409693b | |||
4e58a3849a | |||
94c77c9bfc | |||
747b87c4e6 | |||
c7657e3c4b | |||
4615011f2e | |||
4b1a04e5ed | |||
e045ca6bd8 | |||
52e46bedda | |||
67ad5980b2 | |||
00d85a4b97 | |||
90d34ffd0d | |||
8905b4fc72 | |||
a5b98818c8 | |||
5e7bb1f6dc | |||
4117d99dd0 | |||
49604e7c29 | |||
72d371c467 | |||
a8c9441f71 | |||
e13f65ace1 | |||
a1682d0d2e | |||
923a8b71bd | |||
6eed0d92eb | |||
edf87c3392 | |||
f73bfea6b8 | |||
16635870e3 | |||
f5bc081fda | |||
1efea59a8d | |||
098e1d3100 | |||
a8ddb8301d | |||
a770d79aef | |||
b53d483163 | |||
f45cc0eec0 | |||
f26f2f1438 | |||
851412c698 | |||
26a76e8243 | |||
f5cfd0143b | |||
9391c89205 | |||
76ebe7c6db | |||
7af08b6faa | |||
9944def703 | |||
728a1a195b | |||
2646ebaaed | |||
fba3912f2e | |||
0572158ba1 | |||
0a673613ef | |||
19f760450f | |||
35da44309f | |||
ced6e33230 | |||
87042f65c9 | |||
c271715a98 | |||
57fc606f6b | |||
7d82b853ae | |||
16f23d8147 | |||
ab7915d9e0 | |||
1d6fcfff73 | |||
76a3125153 | |||
24d6331d15 | |||
06ea36aaa4 | |||
c2d70e2bb1 | |||
40948dbd2e | |||
2dba176aa8 | |||
54e3e5bca4 | |||
533a662651 | |||
1ce02ebbd5 | |||
8a7f4c0d6e | |||
f29c458611 | |||
2d7df0afb4 | |||
16a7d0a719 | |||
0cffda86f6 | |||
f42ee6da85 | |||
deeb7cbbac | |||
12f2fb4a92 | |||
489bd64295 | |||
9b8cb42efd | |||
2036337a53 | |||
5ca8bec9ff | |||
df79792fd9 | |||
5a82c79fdf | |||
05b321d38f | |||
681b652bf9 | |||
85b3a016eb | |||
929ead437a | |||
36de166b28 | |||
7bc2de2f9d | |||
cb7ead04e2 | |||
cd2ca41f48 | |||
c71de95505 | |||
6ab03b21b4 | |||
50196be0f2 | |||
41d640920c | |||
3179631603 |
4
.gitignore
vendored
4
.gitignore
vendored
@ -3,3 +3,7 @@ httpie.egg-info
|
|||||||
build
|
build
|
||||||
*.pyc
|
*.pyc
|
||||||
.tox
|
.tox
|
||||||
|
README.html
|
||||||
|
.coverage
|
||||||
|
htmlcov
|
||||||
|
|
||||||
|
@ -3,7 +3,6 @@ python:
|
|||||||
- 2.6
|
- 2.6
|
||||||
- 2.7
|
- 2.7
|
||||||
- pypy
|
- pypy
|
||||||
- 3.1
|
|
||||||
- 3.2
|
- 3.2
|
||||||
script: python setup.py test
|
script: python setup.py test
|
||||||
install:
|
install:
|
||||||
|
29
AUTHORS.rst
Normal file
29
AUTHORS.rst
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
==============
|
||||||
|
HTTPie authors
|
||||||
|
==============
|
||||||
|
|
||||||
|
* `Jakub Roztocil <https://github.com/jkbr>`_
|
||||||
|
|
||||||
|
|
||||||
|
Patches and ideas
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
* `Hank Gay <https://github.com/gthank>`_
|
||||||
|
* `Jake Basile <https://github.com/jakebasile>`_
|
||||||
|
* `Vladimir Berkutov <https://github.com/dair-targ>`_
|
||||||
|
* `Jakob Kramer <https://github.com/gandaro>`_
|
||||||
|
* `Chris Faulkner <https://github.com/faulkner>`_
|
||||||
|
* `Alen Mujezinovic <https://github.com/flashingpumpkin>`_
|
||||||
|
* `Praful Mathur <https://github.com/tictactix>`_
|
||||||
|
* `Marc Abramowitz <https://github.com/msabramo>`_
|
||||||
|
* `Ismail Badawi <https://github.com/isbadawi>`_
|
||||||
|
* `Laurent Bachelier <https://github.com/laurentb>`_
|
||||||
|
* `Isman Firmansyah <https://github.com/iromli>`_
|
||||||
|
* `Simon Olofsson <https://github.com/simono>`_
|
||||||
|
* `Churkin Oleg <https://github.com/Bahus>`_
|
||||||
|
* `Jökull Sólberg Auðunsson <https://github.com/jokull>`_
|
||||||
|
* `Matthew M. Boedicker <https://github.com/mmb>`_
|
||||||
|
* `marblar <https://github.com/marblar>`_
|
||||||
|
* `Tomek Wójcik <https://github.com/tomekwojcik>`_
|
||||||
|
* `Davey Shafik <https://github.com/dshafik>`_
|
||||||
|
* `cido <https://github.com/cido>`_
|
1208
README.rst
1208
README.rst
File diff suppressed because it is too large
Load Diff
BIN
httpie.png
BIN
httpie.png
Binary file not shown.
Before Width: | Height: | Size: 135 KiB After Width: | Height: | Size: 446 KiB |
@ -3,5 +3,16 @@ HTTPie - cURL for humans.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
__author__ = 'Jakub Roztocil'
|
__author__ = 'Jakub Roztocil'
|
||||||
__version__ = '0.2.2'
|
__version__ = '0.3.0'
|
||||||
__licence__ = 'BSD'
|
__licence__ = 'BSD'
|
||||||
|
|
||||||
|
|
||||||
|
class exit:
|
||||||
|
OK = 0
|
||||||
|
ERROR = 1
|
||||||
|
ERROR_TIMEOUT = 2
|
||||||
|
|
||||||
|
# Used only when requested with --check-status:
|
||||||
|
ERROR_HTTP_3XX = 3
|
||||||
|
ERROR_HTTP_4XX = 4
|
||||||
|
ERROR_HTTP_5XX = 5
|
||||||
|
@ -1,124 +1,10 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
"""The main entry point. Invoke as `http' or `python -m httpie'.
|
||||||
|
|
||||||
|
"""
|
||||||
import sys
|
import sys
|
||||||
import json
|
from .core import main
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from requests.compat import str
|
|
||||||
|
|
||||||
from . import httpmessage
|
|
||||||
from . import cliparse
|
|
||||||
from . import cli
|
|
||||||
from . import pretty
|
|
||||||
|
|
||||||
|
|
||||||
TYPE_FORM = 'application/x-www-form-urlencoded; charset=utf-8'
|
|
||||||
TYPE_JSON = 'application/json; charset=utf-8'
|
|
||||||
|
|
||||||
|
|
||||||
def _get_response(parser, args, stdin, stdin_isatty):
|
|
||||||
|
|
||||||
if args.json or (not args.form and args.data):
|
|
||||||
# JSON
|
|
||||||
if not args.files and (
|
|
||||||
'Content-Type' not in args.headers
|
|
||||||
and (args.data or args.json)):
|
|
||||||
args.headers['Content-Type'] = TYPE_JSON
|
|
||||||
if stdin_isatty:
|
|
||||||
# Serialize the parsed data.
|
|
||||||
args.data = json.dumps(args.data)
|
|
||||||
if 'Accept' not in args.headers:
|
|
||||||
# Default Accept to JSON as well.
|
|
||||||
args.headers['Accept'] = 'application/json'
|
|
||||||
elif not args.files and 'Content-Type' not in args.headers:
|
|
||||||
# Form
|
|
||||||
args.headers['Content-Type'] = TYPE_FORM
|
|
||||||
|
|
||||||
# Fire the request.
|
|
||||||
try:
|
|
||||||
credentials = None
|
|
||||||
if args.auth:
|
|
||||||
auth_type = (requests.auth.HTTPDigestAuth
|
|
||||||
if args.auth_type == 'digest'
|
|
||||||
else requests.auth.HTTPBasicAuth)
|
|
||||||
credentials = auth_type(args.auth.key, args.auth.value)
|
|
||||||
|
|
||||||
return requests.request(
|
|
||||||
method=args.method.lower(),
|
|
||||||
url=args.url if '://' in args.url else 'http://%s' % args.url,
|
|
||||||
headers=args.headers,
|
|
||||||
data=args.data,
|
|
||||||
verify={'yes': True, 'no': False}.get(args.verify, args.verify),
|
|
||||||
timeout=args.timeout,
|
|
||||||
auth=credentials,
|
|
||||||
proxies=dict((p.key, p.value) for p in args.proxy),
|
|
||||||
files=args.files,
|
|
||||||
allow_redirects=args.allow_redirects,
|
|
||||||
)
|
|
||||||
|
|
||||||
except (KeyboardInterrupt, SystemExit):
|
|
||||||
sys.stderr.write('\n')
|
|
||||||
sys.exit(1)
|
|
||||||
except Exception as e:
|
|
||||||
if args.traceback:
|
|
||||||
raise
|
|
||||||
sys.stderr.write(str(e.message) + '\n')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_output(args, stdout_isatty, response):
|
|
||||||
|
|
||||||
do_prettify = (args.prettify is True or
|
|
||||||
(args.prettify == cliparse.PRETTIFY_STDOUT_TTY_ONLY
|
|
||||||
and stdout_isatty))
|
|
||||||
|
|
||||||
do_output_request = (cliparse.OUT_REQ_HEADERS in args.output_options
|
|
||||||
or cliparse.OUT_REQ_BODY in args.output_options)
|
|
||||||
|
|
||||||
do_output_response = (cliparse.OUT_RESP_HEADERS in args.output_options
|
|
||||||
or cliparse.OUT_RESP_BODY in args.output_options)
|
|
||||||
|
|
||||||
prettifier = pretty.PrettyHttp(args.style) if do_prettify else None
|
|
||||||
output = []
|
|
||||||
|
|
||||||
if do_output_request:
|
|
||||||
output.append(httpmessage.format(
|
|
||||||
message=httpmessage.from_request(response.request),
|
|
||||||
prettifier=prettifier,
|
|
||||||
with_headers=cliparse.OUT_REQ_HEADERS in args.output_options,
|
|
||||||
with_body=cliparse.OUT_REQ_BODY in args.output_options
|
|
||||||
))
|
|
||||||
output.append('\n')
|
|
||||||
if do_output_response:
|
|
||||||
output.append('\n')
|
|
||||||
|
|
||||||
if do_output_response:
|
|
||||||
output.append(httpmessage.format(
|
|
||||||
message=httpmessage.from_response(response),
|
|
||||||
prettifier=prettifier,
|
|
||||||
with_headers=cliparse.OUT_RESP_HEADERS in args.output_options,
|
|
||||||
with_body=cliparse.OUT_RESP_BODY in args.output_options
|
|
||||||
))
|
|
||||||
output.append('\n')
|
|
||||||
|
|
||||||
return ''.join(output)
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=None,
|
|
||||||
stdin=sys.stdin, stdin_isatty=sys.stdin.isatty(),
|
|
||||||
stdout=sys.stdout, stdout_isatty=sys.stdout.isatty()):
|
|
||||||
parser = cli.parser
|
|
||||||
args = parser.parse_args(
|
|
||||||
args=args if args is not None else sys.argv[1:],
|
|
||||||
stdin=stdin,
|
|
||||||
stdin_isatty=stdin_isatty
|
|
||||||
)
|
|
||||||
response = _get_response(parser, args, stdin, stdin_isatty)
|
|
||||||
output = _get_output(args, stdout_isatty, response)
|
|
||||||
output_bytes = output.encode('utf8')
|
|
||||||
f = (stdout.buffer if hasattr(stdout, 'buffer') else stdout)
|
|
||||||
f.write(output_bytes)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
sys.exit(main())
|
||||||
|
453
httpie/cli.py
453
httpie/cli.py
@ -1,138 +1,296 @@
|
|||||||
"""
|
"""CLI arguments definition.
|
||||||
CLI definition.
|
|
||||||
|
NOTE: the CLI interface may change before reaching v1.0.
|
||||||
|
TODO: make the options config friendly, i.e., no mutually exclusive groups to
|
||||||
|
allow options overwriting.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from . import pretty
|
from argparse import FileType, OPTIONAL, ZERO_OR_MORE, SUPPRESS
|
||||||
|
|
||||||
|
from requests.compat import is_windows
|
||||||
|
|
||||||
from . import __doc__
|
from . import __doc__
|
||||||
from . import __version__
|
from . import __version__
|
||||||
from . import cliparse
|
from .sessions import DEFAULT_SESSIONS_DIR
|
||||||
|
from .output import AVAILABLE_STYLES, DEFAULT_STYLE
|
||||||
|
from .input import (Parser, AuthCredentialsArgType, KeyValueArgType,
|
||||||
|
SEP_PROXY, SEP_CREDENTIALS, SEP_GROUP_ITEMS,
|
||||||
|
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
|
||||||
|
OUT_RESP_BODY, OUTPUT_OPTIONS,
|
||||||
|
PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY)
|
||||||
|
|
||||||
|
|
||||||
def _(text):
|
def _(text):
|
||||||
"""Normalize white space."""
|
"""Normalize whitespace."""
|
||||||
return ' '.join(text.strip().split())
|
return ' '.join(text.strip().split())
|
||||||
|
|
||||||
|
|
||||||
desc = '%s <http://httpie.org>'
|
parser = Parser(
|
||||||
parser = cliparse.Parser(description=desc % __doc__.strip(),)
|
description='%s <http://httpie.org>' % __doc__.strip(),
|
||||||
parser.add_argument('--version', action='version', version=__version__)
|
epilog=_('''
|
||||||
|
Suggestions and bug reports are greatly appreciated:
|
||||||
|
https://github.com/jkbr/httpie/issues
|
||||||
# Content type.
|
|
||||||
#############################################
|
|
||||||
|
|
||||||
group_type = parser.add_mutually_exclusive_group(required=False)
|
|
||||||
group_type.add_argument(
|
|
||||||
'--json', '-j', action='store_true',
|
|
||||||
help=_('''
|
|
||||||
(default) Data items are serialized as a JSON object.
|
|
||||||
The Content-Type and Accept headers
|
|
||||||
are set to application/json (if not set via the command line).
|
|
||||||
''')
|
''')
|
||||||
)
|
)
|
||||||
group_type.add_argument(
|
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Positional arguments.
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
positional = parser.add_argument_group(
|
||||||
|
title='Positional arguments',
|
||||||
|
description=_('''
|
||||||
|
These arguments come after any flags and in the
|
||||||
|
order they are listed here. Only URL is required.'''
|
||||||
|
)
|
||||||
|
)
|
||||||
|
positional.add_argument(
|
||||||
|
'method', metavar='METHOD',
|
||||||
|
nargs=OPTIONAL,
|
||||||
|
default=None,
|
||||||
|
help=_('''
|
||||||
|
The HTTP method to be used for the request
|
||||||
|
(GET, POST, PUT, DELETE, PATCH, ...).
|
||||||
|
If this argument is omitted, then HTTPie
|
||||||
|
will guess the HTTP method. If there is some
|
||||||
|
data to be sent, then it will be POST, otherwise GET.
|
||||||
|
''')
|
||||||
|
)
|
||||||
|
positional.add_argument(
|
||||||
|
'url', metavar='URL',
|
||||||
|
help=_('''
|
||||||
|
The protocol defaults to http:// if the
|
||||||
|
URL does not include one.
|
||||||
|
''')
|
||||||
|
)
|
||||||
|
positional.add_argument(
|
||||||
|
'items', metavar='REQUEST ITEM',
|
||||||
|
nargs=ZERO_OR_MORE,
|
||||||
|
type=KeyValueArgType(*SEP_GROUP_ITEMS),
|
||||||
|
help=_('''
|
||||||
|
A key-value pair whose type is defined by the
|
||||||
|
separator used. It can be an HTTP header (header:value),
|
||||||
|
a data field to be used in the request body (field_name=value),
|
||||||
|
a raw JSON data field (field_name:=value),
|
||||||
|
a query parameter (name==value),
|
||||||
|
or a file field (field_name@/path/to/file).
|
||||||
|
You can use a backslash to escape a colliding
|
||||||
|
separator in the field name.
|
||||||
|
''')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Content type.
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
content_type = parser.add_argument_group(
|
||||||
|
title='Predefined content types',
|
||||||
|
description=None
|
||||||
|
).add_mutually_exclusive_group(required=False)
|
||||||
|
|
||||||
|
content_type.add_argument(
|
||||||
|
'--json', '-j', action='store_true',
|
||||||
|
help=_('''
|
||||||
|
(default) Data items from the command
|
||||||
|
line are serialized as a JSON object.
|
||||||
|
The Content-Type and Accept headers
|
||||||
|
are set to application/json (if not specified).
|
||||||
|
''')
|
||||||
|
)
|
||||||
|
content_type.add_argument(
|
||||||
'--form', '-f', action='store_true',
|
'--form', '-f', action='store_true',
|
||||||
help=_('''
|
help=_('''
|
||||||
Data items are serialized as form fields.
|
Data items from the command line are serialized as form fields.
|
||||||
The Content-Type is set to application/x-www-form-urlencoded (if not specifid).
|
The Content-Type is set to application/x-www-form-urlencoded
|
||||||
The presence of any file fields results into a multipart/form-data request.
|
(if not specified).
|
||||||
|
The presence of any file fields results
|
||||||
|
in a multipart/form-data request.
|
||||||
''')
|
''')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Output options.
|
###############################################################################
|
||||||
#############################################
|
# Output processing
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
parser.add_argument(
|
output_processing = parser.add_argument_group(title='Output processing')
|
||||||
'--traceback', action='store_true', default=False,
|
|
||||||
|
output_processing.add_argument(
|
||||||
|
'--output', '-o', type=FileType('w+b'),
|
||||||
|
metavar='FILE',
|
||||||
|
help= SUPPRESS if not is_windows else _(
|
||||||
|
'''
|
||||||
|
Save output to FILE.
|
||||||
|
This option is a replacement for piping output to FILE,
|
||||||
|
which would on Windows result in corrupted data
|
||||||
|
being saved.
|
||||||
|
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
)
|
||||||
|
output_processing.add_argument(
|
||||||
|
'--pretty', dest='prettify', default=PRETTY_STDOUT_TTY_ONLY,
|
||||||
|
choices=sorted(PRETTY_MAP.keys()),
|
||||||
help=_('''
|
help=_('''
|
||||||
Print exception traceback should one occur.
|
Controls output processing. The value can be "none" to not prettify
|
||||||
|
the output (default for redirected output), "all" to apply both colors
|
||||||
|
and formatting
|
||||||
|
(default for terminal output), "colors", or "format".
|
||||||
''')
|
''')
|
||||||
)
|
)
|
||||||
|
output_processing.add_argument(
|
||||||
prettify = parser.add_mutually_exclusive_group(required=False)
|
'--style', '-s', dest='style', default=DEFAULT_STYLE, metavar='STYLE',
|
||||||
prettify.add_argument(
|
choices=AVAILABLE_STYLES,
|
||||||
'--pretty', dest='prettify', action='store_true',
|
|
||||||
default=cliparse.PRETTIFY_STDOUT_TTY_ONLY,
|
|
||||||
help=_('''
|
help=_('''
|
||||||
If stdout is a terminal, the response is prettified
|
Output coloring style. One of %s. Defaults to "%s".
|
||||||
by default (colorized and indented if it is JSON).
|
|
||||||
This flag ensures prettifying even when stdout is redirected.
|
|
||||||
''')
|
|
||||||
)
|
|
||||||
prettify.add_argument(
|
|
||||||
'--ugly', '-u', dest='prettify', action='store_false',
|
|
||||||
help=_('''
|
|
||||||
Do not prettify the response.
|
|
||||||
''')
|
|
||||||
)
|
|
||||||
|
|
||||||
output_options = parser.add_mutually_exclusive_group(required=False)
|
|
||||||
output_options.add_argument('--print', '-p', dest='output_options',
|
|
||||||
default=cliparse.OUT_RESP_HEADERS + cliparse.OUT_RESP_BODY,
|
|
||||||
help=_('''
|
|
||||||
String specifying what should the output contain.
|
|
||||||
"{request_headers}" stands for the request headers and
|
|
||||||
"{request_body}" for the request body.
|
|
||||||
"{response_headers}" stands for the response headers and
|
|
||||||
"{response_body}" for response the body.
|
|
||||||
Defaults to "hb" which means that the whole response
|
|
||||||
(headers and body) is printed.
|
|
||||||
'''.format(
|
|
||||||
request_headers=cliparse.OUT_REQ_HEADERS,
|
|
||||||
request_body=cliparse.OUT_REQ_BODY,
|
|
||||||
response_headers=cliparse.OUT_RESP_HEADERS,
|
|
||||||
response_body=cliparse.OUT_RESP_BODY,
|
|
||||||
))
|
|
||||||
)
|
|
||||||
output_options.add_argument(
|
|
||||||
'--verbose', '-v', dest='output_options',
|
|
||||||
action='store_const', const=''.join(cliparse.OUTPUT_OPTIONS),
|
|
||||||
help=_('''
|
|
||||||
Print the whole request as well as the response.
|
|
||||||
Shortcut for --print={0}.
|
|
||||||
'''.format(''.join(cliparse.OUTPUT_OPTIONS)))
|
|
||||||
)
|
|
||||||
output_options.add_argument(
|
|
||||||
'--headers', '-t', dest='output_options',
|
|
||||||
action='store_const', const=cliparse.OUT_RESP_HEADERS,
|
|
||||||
help=_('''
|
|
||||||
Print only the response headers.
|
|
||||||
Shortcut for --print={0}.
|
|
||||||
'''.format(cliparse.OUT_RESP_HEADERS))
|
|
||||||
)
|
|
||||||
output_options.add_argument(
|
|
||||||
'--body', '-b', dest='output_options',
|
|
||||||
action='store_const', const=cliparse.OUT_RESP_BODY,
|
|
||||||
help=_('''
|
|
||||||
Print only the response body.
|
|
||||||
Shortcut for --print={0}.
|
|
||||||
'''.format(cliparse.OUT_RESP_BODY))
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
'--style', '-s', dest='style', default='solarized', metavar='STYLE',
|
|
||||||
choices=pretty.AVAILABLE_STYLES,
|
|
||||||
help=_('''
|
|
||||||
Output coloring style, one of %s. Defaults to solarized.
|
|
||||||
For this option to work properly, please make sure that the
|
For this option to work properly, please make sure that the
|
||||||
$TERM environment variable is set to "xterm-256color" or similar
|
$TERM environment variable is set to "xterm-256color" or similar
|
||||||
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
||||||
''') % ', '.join(sorted(pretty.AVAILABLE_STYLES))
|
''') % (', '.join(sorted(AVAILABLE_STYLES)), DEFAULT_STYLE)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Output options
|
||||||
|
###############################################################################
|
||||||
|
output_options = parser.add_argument_group(title='Output options')
|
||||||
|
|
||||||
|
output_print = output_options.add_mutually_exclusive_group(required=False)
|
||||||
|
output_print.add_argument('--print', '-p', dest='output_options',
|
||||||
|
metavar='WHAT',
|
||||||
|
help=_('''
|
||||||
|
String specifying what the output should contain:
|
||||||
|
"{request_headers}" stands for the request headers, and
|
||||||
|
"{request_body}" for the request body.
|
||||||
|
"{response_headers}" stands for the response headers and
|
||||||
|
"{response_body}" for response the body.
|
||||||
|
The default behaviour is "hb" (i.e., the response
|
||||||
|
headers and body is printed), if standard output is not redirected.
|
||||||
|
If the output is piped to another program or to a file,
|
||||||
|
then only the body is printed by default.
|
||||||
|
'''.format(
|
||||||
|
request_headers=OUT_REQ_HEAD,
|
||||||
|
request_body=OUT_REQ_BODY,
|
||||||
|
response_headers=OUT_RESP_HEAD,
|
||||||
|
response_body=OUT_RESP_BODY,
|
||||||
|
))
|
||||||
|
)
|
||||||
|
output_print.add_argument(
|
||||||
|
'--verbose', '-v', dest='output_options',
|
||||||
|
action='store_const', const=''.join(OUTPUT_OPTIONS),
|
||||||
|
help=_('''
|
||||||
|
Print the whole request as well as the response.
|
||||||
|
Shortcut for --print={0}.
|
||||||
|
'''.format(''.join(OUTPUT_OPTIONS)))
|
||||||
|
)
|
||||||
|
output_print.add_argument(
|
||||||
|
'--headers', '-h', dest='output_options',
|
||||||
|
action='store_const', const=OUT_RESP_HEAD,
|
||||||
|
help=_('''
|
||||||
|
Print only the response headers.
|
||||||
|
Shortcut for --print={0}.
|
||||||
|
'''.format(OUT_RESP_HEAD))
|
||||||
|
)
|
||||||
|
output_print.add_argument(
|
||||||
|
'--body', '-b', dest='output_options',
|
||||||
|
action='store_const', const=OUT_RESP_BODY,
|
||||||
|
help=_('''
|
||||||
|
Print only the response body.
|
||||||
|
Shortcut for --print={0}.
|
||||||
|
'''.format(OUT_RESP_BODY))
|
||||||
|
)
|
||||||
|
|
||||||
|
output_options.add_argument('--stream', '-S', action='store_true', default=False,
|
||||||
|
help=_('''
|
||||||
|
Always stream the output by line, i.e., behave like `tail -f'.
|
||||||
|
|
||||||
|
Without --stream and with --pretty (either set or implied),
|
||||||
|
HTTPie fetches the whole response before it outputs the processed data.
|
||||||
|
|
||||||
|
Set this option when you want to continuously display a prettified
|
||||||
|
long-lived response, such as one from the Twitter streaming API.
|
||||||
|
|
||||||
|
It is useful also without --pretty: It ensures that the output is flushed
|
||||||
|
more often and in smaller chunks.
|
||||||
|
|
||||||
|
'''
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Sessions
|
||||||
|
###############################################################################
|
||||||
|
sessions = parser.add_argument_group(title='Sessions')\
|
||||||
|
.add_mutually_exclusive_group(required=False)
|
||||||
|
|
||||||
|
sessions.add_argument(
|
||||||
|
'--session', metavar='SESSION_NAME',
|
||||||
|
help=_('''
|
||||||
|
Create, or reuse and update a session.
|
||||||
|
Withing a session, custom headers, auth credential, as well as any
|
||||||
|
cookies sent by the server persist between requests.
|
||||||
|
Session files are stored in %s/<HOST>/<SESSION_NAME>.json.
|
||||||
|
''' % DEFAULT_SESSIONS_DIR)
|
||||||
|
)
|
||||||
|
sessions.add_argument(
|
||||||
|
'--session-read-only', metavar='SESSION_NAME',
|
||||||
|
help=_('''
|
||||||
|
Create or read a session without updating it form the
|
||||||
|
request/response exchange.
|
||||||
|
''')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Authentication
|
||||||
|
###############################################################################
|
||||||
# ``requests.request`` keyword arguments.
|
# ``requests.request`` keyword arguments.
|
||||||
parser.add_argument(
|
auth = parser.add_argument_group(title='Authentication')
|
||||||
'--auth', '-a', help='username:password',
|
auth.add_argument(
|
||||||
type=cliparse.KeyValueType(cliparse.SEP_COMMON)
|
'--auth', '-a', metavar='USER[:PASS]',
|
||||||
|
type=AuthCredentialsArgType(SEP_CREDENTIALS),
|
||||||
|
help=_('''
|
||||||
|
If only the username is provided (-a username),
|
||||||
|
HTTPie will prompt for the password.
|
||||||
|
'''),
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
auth.add_argument(
|
||||||
'--auth-type', choices=['basic', 'digest'],
|
'--auth-type', choices=['basic', 'digest'], default='basic',
|
||||||
help=_('The authentication mechanism to be used. Defaults to "basic".')
|
help=_('''
|
||||||
|
The authentication mechanism to be used.
|
||||||
|
Defaults to "basic".
|
||||||
|
''')
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
|
|
||||||
|
# Network
|
||||||
|
#############################################
|
||||||
|
|
||||||
|
network = parser.add_argument_group(title='Network')
|
||||||
|
|
||||||
|
network.add_argument(
|
||||||
|
'--proxy', default=[], action='append', metavar='PROTOCOL:HOST',
|
||||||
|
type=KeyValueArgType(SEP_PROXY),
|
||||||
|
help=_('''
|
||||||
|
String mapping protocol to the URL of the proxy
|
||||||
|
(e.g. http:foo.bar:3128). You can specify multiple
|
||||||
|
proxies with different protocols.
|
||||||
|
''')
|
||||||
|
)
|
||||||
|
network.add_argument(
|
||||||
|
'--follow', default=False, action='store_true',
|
||||||
|
help=_('''
|
||||||
|
Set this flag if full redirects are allowed
|
||||||
|
(e.g. re-POST-ing of data at new ``Location``)
|
||||||
|
''')
|
||||||
|
)
|
||||||
|
network.add_argument(
|
||||||
'--verify', default='yes',
|
'--verify', default='yes',
|
||||||
help=_('''
|
help=_('''
|
||||||
Set to "no" to skip checking the host\'s SSL certificate.
|
Set to "no" to skip checking the host\'s SSL certificate.
|
||||||
@ -142,66 +300,55 @@ parser.add_argument(
|
|||||||
Defaults to "yes".
|
Defaults to "yes".
|
||||||
''')
|
''')
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
|
||||||
'--proxy', default=[], action='append',
|
network.add_argument(
|
||||||
type=cliparse.KeyValueType(cliparse.SEP_COMMON),
|
'--timeout', type=float, default=30, metavar='SECONDS',
|
||||||
help=_('''
|
help=_('''
|
||||||
String mapping protocol to the URL of the proxy
|
The connection timeout of the request in seconds.
|
||||||
(e.g. http:foo.bar:3128).
|
The default value is 30 seconds.
|
||||||
''')
|
''')
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
network.add_argument(
|
||||||
'--allow-redirects', default=False, action='store_true',
|
'--check-status', default=False, action='store_true',
|
||||||
help=_('''
|
help=_('''
|
||||||
Set this flag if full redirects are allowed
|
By default, HTTPie exits with 0 when no network or other fatal
|
||||||
(e.g. re-POST-ing of data at new ``Location``)
|
errors occur.
|
||||||
''')
|
|
||||||
)
|
This flag instructs HTTPie to also check the HTTP status code and
|
||||||
parser.add_argument(
|
exit with an error if the status indicates one.
|
||||||
'--timeout', type=float,
|
|
||||||
help=_('''
|
When the server replies with a 4xx (Client Error) or 5xx
|
||||||
Float describes the timeout of the request
|
(Server Error) status code, HTTPie exits with 4 or 5 respectively.
|
||||||
(Use socket.setdefaulttimeout() as fallback).
|
If the response is a 3xx (Redirect) and --follow
|
||||||
|
hasn't been set, then the exit status is 3.
|
||||||
|
|
||||||
|
Also an error message is written to stderr if stdout is redirected.
|
||||||
|
|
||||||
''')
|
''')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Positional arguments.
|
###############################################################################
|
||||||
#############################################
|
# Troubleshooting
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
parser.add_argument(
|
troubleshooting = parser.add_argument_group(title='Troubleshooting')
|
||||||
'method', metavar='METHOD',
|
|
||||||
nargs='?',
|
troubleshooting.add_argument(
|
||||||
default=None,
|
'--help',
|
||||||
|
action='help', default=SUPPRESS,
|
||||||
|
help='Show this help message and exit'
|
||||||
|
)
|
||||||
|
troubleshooting.add_argument('--version', action='version', version=__version__)
|
||||||
|
troubleshooting.add_argument(
|
||||||
|
'--traceback', action='store_true', default=False,
|
||||||
|
help='Prints exception traceback should one occur.'
|
||||||
|
)
|
||||||
|
troubleshooting.add_argument(
|
||||||
|
'--debug', action='store_true', default=False,
|
||||||
help=_('''
|
help=_('''
|
||||||
The HTTP method to be used for the request
|
Prints exception traceback should one occur, and also other
|
||||||
(GET, POST, PUT, DELETE, PATCH, ...).
|
information that is useful for debugging HTTPie itself and
|
||||||
If this argument is omitted, then HTTPie will guess the HTTP method.
|
for bug reports.
|
||||||
If there is some data to be sent, then it will be POST, otherwise GET.
|
|
||||||
''')
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'url', metavar='URL',
|
|
||||||
help=_('''
|
|
||||||
The protocol defaults to http:// if the
|
|
||||||
URL does not include one.
|
|
||||||
''')
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'items', nargs='*',
|
|
||||||
metavar='ITEM',
|
|
||||||
type=cliparse.KeyValueType(
|
|
||||||
cliparse.SEP_COMMON,
|
|
||||||
cliparse.SEP_DATA,
|
|
||||||
cliparse.SEP_DATA_RAW_JSON,
|
|
||||||
cliparse.SEP_FILES
|
|
||||||
),
|
|
||||||
help=_('''
|
|
||||||
A key-value pair whose type is defined by the separator used. It can be an
|
|
||||||
HTTP header (header:value),
|
|
||||||
a data field to be used in the request body (field_name=value),
|
|
||||||
a raw JSON data field (field_name:=value),
|
|
||||||
or a file field (field_name@/path/to/file).
|
|
||||||
You can use a backslash to escape a colliding separator in the field name.
|
|
||||||
''')
|
''')
|
||||||
)
|
)
|
||||||
|
88
httpie/client.py
Normal file
88
httpie/client.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import requests.auth
|
||||||
|
from requests.defaults import defaults
|
||||||
|
|
||||||
|
from . import sessions
|
||||||
|
from . import __version__
|
||||||
|
|
||||||
|
|
||||||
|
FORM = 'application/x-www-form-urlencoded; charset=utf-8'
|
||||||
|
JSON = 'application/json; charset=utf-8'
|
||||||
|
DEFAULT_UA = 'HTTPie/%s' % __version__
|
||||||
|
|
||||||
|
|
||||||
|
def get_response(args, config_dir):
|
||||||
|
"""Send the request and return a `request.Response`."""
|
||||||
|
|
||||||
|
requests_kwargs = get_requests_kwargs(args)
|
||||||
|
|
||||||
|
if args.debug:
|
||||||
|
sys.stderr.write(
|
||||||
|
'\n>>> requests.request(%s)\n\n' % pformat(requests_kwargs))
|
||||||
|
|
||||||
|
if not args.session and not args.session_read_only:
|
||||||
|
return requests.request(**requests_kwargs)
|
||||||
|
else:
|
||||||
|
return sessions.get_response(
|
||||||
|
config_dir=config_dir,
|
||||||
|
name=args.session or args.session_read_only,
|
||||||
|
request_kwargs=requests_kwargs,
|
||||||
|
read_only=bool(args.session_read_only),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_requests_kwargs(args):
|
||||||
|
"""Translate our `args` into `requests.request` keyword arguments."""
|
||||||
|
|
||||||
|
base_headers = defaults['base_headers'].copy()
|
||||||
|
base_headers['User-Agent'] = DEFAULT_UA
|
||||||
|
|
||||||
|
auto_json = args.data and not args.form
|
||||||
|
if args.json or auto_json:
|
||||||
|
base_headers['Accept'] = 'application/json'
|
||||||
|
if args.data:
|
||||||
|
base_headers['Content-Type'] = JSON
|
||||||
|
|
||||||
|
if isinstance(args.data, dict):
|
||||||
|
# If not empty, serialize the data `dict` parsed from arguments.
|
||||||
|
# Otherwise set it to `None` avoid sending "{}".
|
||||||
|
args.data = json.dumps(args.data) if args.data else None
|
||||||
|
|
||||||
|
elif args.form and not args.files:
|
||||||
|
# If sending files, `requests` will set
|
||||||
|
# the `Content-Type` for us.
|
||||||
|
base_headers['Content-Type'] = FORM
|
||||||
|
|
||||||
|
credentials = None
|
||||||
|
if args.auth:
|
||||||
|
credentials = {
|
||||||
|
'basic': requests.auth.HTTPBasicAuth,
|
||||||
|
'digest': requests.auth.HTTPDigestAuth,
|
||||||
|
}[args.auth_type](args.auth.key, args.auth.value)
|
||||||
|
|
||||||
|
kwargs = {
|
||||||
|
'prefetch': False,
|
||||||
|
'method': args.method.lower(),
|
||||||
|
'url': args.url,
|
||||||
|
'headers': args.headers,
|
||||||
|
'data': args.data,
|
||||||
|
'verify': {
|
||||||
|
'yes': True,
|
||||||
|
'no': False
|
||||||
|
}.get(args.verify, args.verify),
|
||||||
|
'timeout': args.timeout,
|
||||||
|
'auth': credentials,
|
||||||
|
'proxies': dict((p.key, p.value) for p in args.proxy),
|
||||||
|
'files': args.files,
|
||||||
|
'allow_redirects': args.follow,
|
||||||
|
'params': args.params,
|
||||||
|
'config': {
|
||||||
|
'base_headers': base_headers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return kwargs
|
@ -1,230 +0,0 @@
|
|||||||
"""
|
|
||||||
CLI argument parsing logic.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import json
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
try:
|
|
||||||
from collections import OrderedDict
|
|
||||||
except ImportError:
|
|
||||||
OrderedDict = dict
|
|
||||||
|
|
||||||
from requests.structures import CaseInsensitiveDict
|
|
||||||
|
|
||||||
from . import __version__
|
|
||||||
|
|
||||||
|
|
||||||
SEP_COMMON = ':'
|
|
||||||
SEP_HEADERS = SEP_COMMON
|
|
||||||
SEP_DATA = '='
|
|
||||||
SEP_DATA_RAW_JSON = ':='
|
|
||||||
SEP_FILES = '@'
|
|
||||||
DATA_ITEM_SEPARATORS = [
|
|
||||||
SEP_DATA,
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_FILES
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
OUT_REQ_HEADERS = 'H'
|
|
||||||
OUT_REQ_BODY = 'B'
|
|
||||||
OUT_RESP_HEADERS = 'h'
|
|
||||||
OUT_RESP_BODY = 'b'
|
|
||||||
OUTPUT_OPTIONS = [OUT_REQ_HEADERS,
|
|
||||||
OUT_REQ_BODY,
|
|
||||||
OUT_RESP_HEADERS,
|
|
||||||
OUT_RESP_BODY]
|
|
||||||
|
|
||||||
|
|
||||||
PRETTIFY_STDOUT_TTY_ONLY = object()
|
|
||||||
|
|
||||||
DEFAULT_UA = 'HTTPie/%s' % __version__
|
|
||||||
|
|
||||||
|
|
||||||
class Parser(argparse.ArgumentParser):
|
|
||||||
|
|
||||||
def parse_args(self, args=None, namespace=None,
|
|
||||||
stdin=sys.stdin,
|
|
||||||
stdin_isatty=sys.stdin.isatty()):
|
|
||||||
args = super(Parser, self).parse_args(args, namespace)
|
|
||||||
self._validate_output_options(args)
|
|
||||||
self._validate_auth_options(args)
|
|
||||||
self._guess_method(args, stdin_isatty)
|
|
||||||
self._parse_items(args)
|
|
||||||
if not stdin_isatty:
|
|
||||||
self._process_stdin(args, stdin)
|
|
||||||
return args
|
|
||||||
|
|
||||||
def _process_stdin(self, args, stdin):
|
|
||||||
if args.data:
|
|
||||||
self.error('Request body (stdin) and request '
|
|
||||||
'data (key=value) cannot be mixed.')
|
|
||||||
args.data = stdin.read()
|
|
||||||
|
|
||||||
def _guess_method(self, args, stdin_isatty=sys.stdin.isatty()):
|
|
||||||
"""
|
|
||||||
Set `args.method`, if not specified, to either POST or GET
|
|
||||||
based on whether the request has data or not.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if args.method is None:
|
|
||||||
# Invoked as `http URL'.
|
|
||||||
assert not args.items
|
|
||||||
if not stdin_isatty:
|
|
||||||
args.method = 'POST'
|
|
||||||
else:
|
|
||||||
args.method = 'GET'
|
|
||||||
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
|
|
||||||
elif not re.match('^[a-zA-Z]+$', args.method):
|
|
||||||
# Invoked as `http URL item+':
|
|
||||||
# - The URL is now in `args.method`.
|
|
||||||
# - The first item is now in `args.url`.
|
|
||||||
#
|
|
||||||
# So we need to:
|
|
||||||
# - Guess the HTTP method.
|
|
||||||
# - Set `args.url` correctly.
|
|
||||||
# - Parse the first item and move it to `args.items[0]`.
|
|
||||||
|
|
||||||
item = KeyValueType(
|
|
||||||
SEP_COMMON,
|
|
||||||
SEP_DATA,
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_FILES).__call__(args.url)
|
|
||||||
|
|
||||||
args.url = args.method
|
|
||||||
args.items.insert(0, item)
|
|
||||||
|
|
||||||
has_data = not stdin_isatty or any(
|
|
||||||
item.sep in DATA_ITEM_SEPARATORS for item in args.items)
|
|
||||||
if has_data:
|
|
||||||
args.method = 'POST'
|
|
||||||
else:
|
|
||||||
args.method = 'GET'
|
|
||||||
|
|
||||||
def _parse_items(self, args):
|
|
||||||
"""
|
|
||||||
Parse `args.items` into `args.headers`, `args.data` and `args.files`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
args.headers = CaseInsensitiveDict()
|
|
||||||
args.headers['User-Agent'] = DEFAULT_UA
|
|
||||||
args.data = OrderedDict()
|
|
||||||
args.files = OrderedDict()
|
|
||||||
try:
|
|
||||||
parse_items(items=args.items, headers=args.headers,
|
|
||||||
data=args.data, files=args.files)
|
|
||||||
except ParseError as e:
|
|
||||||
if args.traceback:
|
|
||||||
raise
|
|
||||||
self.error(e.message)
|
|
||||||
|
|
||||||
if args.files and not args.form:
|
|
||||||
# We could just switch to --form automatically here,
|
|
||||||
# but I think it's better to make it explicit.
|
|
||||||
self.error(
|
|
||||||
' You need to set the --form / -f flag to'
|
|
||||||
' to issue a multipart request. File fields: %s'
|
|
||||||
% ','.join(args.files.keys()))
|
|
||||||
|
|
||||||
def _validate_output_options(self, args):
|
|
||||||
unknown_output_options = set(args.output_options) - set(OUTPUT_OPTIONS)
|
|
||||||
if unknown_output_options:
|
|
||||||
self.error('Unknown output options: %s' % ','.join(unknown_output_options))
|
|
||||||
|
|
||||||
def _validate_auth_options(self, args):
|
|
||||||
if args.auth_type and not args.auth:
|
|
||||||
self.error('--auth-type can only be used with --auth')
|
|
||||||
|
|
||||||
|
|
||||||
class ParseError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
KeyValue = namedtuple('KeyValue', ['key', 'value', 'sep', 'orig'])
|
|
||||||
|
|
||||||
|
|
||||||
class KeyValueType(object):
|
|
||||||
"""A type used with `argparse`."""
|
|
||||||
|
|
||||||
def __init__(self, *separators):
|
|
||||||
self.separators = separators
|
|
||||||
self.escapes = ['\\\\' + sep for sep in separators]
|
|
||||||
|
|
||||||
def __call__(self, string):
|
|
||||||
found = {}
|
|
||||||
found_escapes = []
|
|
||||||
for esc in self.escapes:
|
|
||||||
found_escapes += [m.span() for m in re.finditer(esc, string)]
|
|
||||||
for sep in self.separators:
|
|
||||||
matches = re.finditer(sep, string)
|
|
||||||
for match in matches:
|
|
||||||
start, end = match.span()
|
|
||||||
inside_escape = False
|
|
||||||
for estart, eend in found_escapes:
|
|
||||||
if start >= estart and end <= eend:
|
|
||||||
inside_escape = True
|
|
||||||
break
|
|
||||||
if not inside_escape:
|
|
||||||
found[start] = sep
|
|
||||||
|
|
||||||
if not found:
|
|
||||||
raise argparse.ArgumentTypeError(
|
|
||||||
'"%s" is not a valid value' % string)
|
|
||||||
|
|
||||||
# split the string at the earliest non-escaped separator.
|
|
||||||
seploc = min(found.keys())
|
|
||||||
sep = found[seploc]
|
|
||||||
key = string[:seploc]
|
|
||||||
value = string[seploc + len(sep):]
|
|
||||||
|
|
||||||
# remove escape chars
|
|
||||||
for sepstr in self.separators:
|
|
||||||
key = key.replace('\\' + sepstr, sepstr)
|
|
||||||
value = value.replace('\\' + sepstr, sepstr)
|
|
||||||
return KeyValue(key=key, value=value, sep=sep, orig=string)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_items(items, data=None, headers=None, files=None):
|
|
||||||
"""Parse `KeyValueType` `items` into `data`, `headers` and `files`."""
|
|
||||||
if headers is None:
|
|
||||||
headers = {}
|
|
||||||
if data is None:
|
|
||||||
data = {}
|
|
||||||
if files is None:
|
|
||||||
files = {}
|
|
||||||
for item in items:
|
|
||||||
value = item.value
|
|
||||||
key = item.key
|
|
||||||
if item.sep == SEP_HEADERS:
|
|
||||||
target = headers
|
|
||||||
elif item.sep == SEP_FILES:
|
|
||||||
try:
|
|
||||||
value = open(os.path.expanduser(item.value), 'r')
|
|
||||||
except IOError as e:
|
|
||||||
raise ParseError(
|
|
||||||
'Invalid argument %r. %s' % (item.orig, e))
|
|
||||||
if not key:
|
|
||||||
key = os.path.basename(value.name)
|
|
||||||
target = files
|
|
||||||
elif item.sep in [SEP_DATA, SEP_DATA_RAW_JSON]:
|
|
||||||
if item.sep == SEP_DATA_RAW_JSON:
|
|
||||||
try:
|
|
||||||
value = json.loads(item.value)
|
|
||||||
except ValueError:
|
|
||||||
raise ParseError('%s is not valid JSON' % item.orig)
|
|
||||||
target = data
|
|
||||||
else:
|
|
||||||
raise ParseError('%s is not valid item' % item.orig)
|
|
||||||
|
|
||||||
if key in target:
|
|
||||||
ParseError('duplicate item %s (%s)' % (item.key, item.orig))
|
|
||||||
|
|
||||||
target[key] = value
|
|
||||||
|
|
||||||
return headers, data, files
|
|
83
httpie/config.py
Normal file
83
httpie/config.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import errno
|
||||||
|
|
||||||
|
from . import __version__
|
||||||
|
from requests.compat import is_windows
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_CONFIG_DIR = os.environ.get(
|
||||||
|
'HTTPIE_CONFIG_DIR',
|
||||||
|
os.path.expanduser('~/.httpie') if not is_windows else
|
||||||
|
os.path.expandvars(r'%APPDATA%\\httpie')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseConfigDict(dict):
|
||||||
|
|
||||||
|
name = None
|
||||||
|
help = None
|
||||||
|
directory=DEFAULT_CONFIG_DIR
|
||||||
|
|
||||||
|
def __init__(self, directory=None, *args, **kwargs):
|
||||||
|
super(BaseConfigDict, self).__init__(*args, **kwargs)
|
||||||
|
if directory:
|
||||||
|
self.directory = directory
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return self[item]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
try:
|
||||||
|
os.makedirs(self.directory, mode=0o700)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
return os.path.join(self.directory, self.name + '.json')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_new(self):
|
||||||
|
return not os.path.exists(self.path)
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
try:
|
||||||
|
with open(self.path, 'rt') as f:
|
||||||
|
try:
|
||||||
|
data = json.load(f)
|
||||||
|
except ValueError as e:
|
||||||
|
raise ValueError(
|
||||||
|
'Invalid %s JSON: %s [%s]' %
|
||||||
|
(type(self).__name__, e.message, self.path)
|
||||||
|
)
|
||||||
|
self.update(data)
|
||||||
|
except IOError as e:
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
self['__version__'] = __version__
|
||||||
|
with open(self.path, 'w') as f:
|
||||||
|
json.dump(self, f, indent=4, sort_keys=True, ensure_ascii=True)
|
||||||
|
f.write('\n')
|
||||||
|
|
||||||
|
def delete(self):
|
||||||
|
try:
|
||||||
|
os.unlink(self.path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class Config(BaseConfigDict):
|
||||||
|
|
||||||
|
name = 'config'
|
||||||
|
|
||||||
|
DEFAULTS = {
|
||||||
|
'implicit_content_type': 'json',
|
||||||
|
'default_options': []
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(Config, self).__init__(*args, **kwargs)
|
||||||
|
self.update(self.DEFAULTS)
|
123
httpie/core.py
Normal file
123
httpie/core.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
"""This module provides the main functionality of HTTPie.
|
||||||
|
|
||||||
|
Invocation flow:
|
||||||
|
|
||||||
|
1. Read, validate and process the input (args, `stdin`).
|
||||||
|
2. Create and send a request.
|
||||||
|
3. Stream, and possibly process and format, the requested parts
|
||||||
|
of the request-response exchange.
|
||||||
|
4. Simultaneously write to `stdout`
|
||||||
|
5. Exit.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
import errno
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from requests.compat import str, is_py3
|
||||||
|
from httpie import __version__ as httpie_version
|
||||||
|
from requests import __version__ as requests_version
|
||||||
|
from pygments import __version__ as pygments_version
|
||||||
|
|
||||||
|
from .cli import parser
|
||||||
|
from .client import get_response
|
||||||
|
from .models import Environment
|
||||||
|
from .output import output_stream, write, write_with_colors_win_p3k
|
||||||
|
from . import exit
|
||||||
|
|
||||||
|
|
||||||
|
def get_exist_status(code, follow=False):
|
||||||
|
"""Translate HTTP status code to exit status."""
|
||||||
|
if 300 <= code <= 399 and not follow:
|
||||||
|
# Redirect
|
||||||
|
return exit.ERROR_HTTP_3XX
|
||||||
|
elif 400 <= code <= 499:
|
||||||
|
# Client Error
|
||||||
|
return exit.ERROR_HTTP_4XX
|
||||||
|
elif 500 <= code <= 599:
|
||||||
|
# Server Error
|
||||||
|
return exit.ERROR_HTTP_5XX
|
||||||
|
else:
|
||||||
|
return exit.OK
|
||||||
|
|
||||||
|
|
||||||
|
def print_debug_info(env):
|
||||||
|
sys.stderr.writelines([
|
||||||
|
'HTTPie %s\n' % httpie_version,
|
||||||
|
'HTTPie data: %s\n' % env.config.directory,
|
||||||
|
'Requests %s\n' % requests_version,
|
||||||
|
'Pygments %s\n' % pygments_version,
|
||||||
|
'Python %s %s\n' % (sys.version, sys.platform)
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=sys.argv[1:], env=Environment()):
|
||||||
|
"""Run the main program and write the output to ``env.stdout``.
|
||||||
|
|
||||||
|
Return exit status.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if env.config.default_options:
|
||||||
|
args = env.config.default_options + args
|
||||||
|
|
||||||
|
def error(msg, *args):
|
||||||
|
msg = msg % args
|
||||||
|
env.stderr.write('\nhttp: error: %s\n' % msg)
|
||||||
|
|
||||||
|
debug = '--debug' in args
|
||||||
|
traceback = debug or '--traceback' in args
|
||||||
|
status = exit.OK
|
||||||
|
|
||||||
|
if debug:
|
||||||
|
print_debug_info(env)
|
||||||
|
if args == ['--debug']:
|
||||||
|
sys.exit(exit.OK)
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = parser.parse_args(args=args, env=env)
|
||||||
|
|
||||||
|
response = get_response(args, config_dir=env.config.directory)
|
||||||
|
|
||||||
|
if args.check_status:
|
||||||
|
status = get_exist_status(response.status_code,
|
||||||
|
args.follow)
|
||||||
|
if status and not env.stdout_isatty:
|
||||||
|
error('%s %s', response.raw.status, response.raw.reason)
|
||||||
|
|
||||||
|
stream = output_stream(args, env, response.request, response)
|
||||||
|
|
||||||
|
write_kwargs = {
|
||||||
|
'stream': stream,
|
||||||
|
'outfile': env.stdout,
|
||||||
|
'flush': env.stdout_isatty or args.stream
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
if env.is_windows and is_py3 and 'colors' in args.prettify:
|
||||||
|
write_with_colors_win_p3k(**write_kwargs)
|
||||||
|
else:
|
||||||
|
write(**write_kwargs)
|
||||||
|
|
||||||
|
except IOError as e:
|
||||||
|
if not traceback and e.errno == errno.EPIPE:
|
||||||
|
# Ignore broken pipes unless --traceback.
|
||||||
|
env.stderr.write('\n')
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
if traceback:
|
||||||
|
raise
|
||||||
|
env.stderr.write('\n')
|
||||||
|
status = exit.ERROR
|
||||||
|
except requests.Timeout:
|
||||||
|
status = exit.ERROR_TIMEOUT
|
||||||
|
error('Request timed out (%ss).', args.timeout)
|
||||||
|
except Exception as e:
|
||||||
|
# TODO: distinguish between expected and unexpected errors.
|
||||||
|
# network errors vs. bugs, etc.
|
||||||
|
if traceback:
|
||||||
|
raise
|
||||||
|
error('%s: %s', type(e).__name__, str(e))
|
||||||
|
status = exit.ERROR
|
||||||
|
|
||||||
|
return status
|
@ -1,78 +0,0 @@
|
|||||||
from requests.compat import urlparse
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMessage(object):
|
|
||||||
"""Model representing an HTTP message."""
|
|
||||||
|
|
||||||
def __init__(self, line, headers, body, content_type=None):
|
|
||||||
# {Request,Status}-Line
|
|
||||||
self.line = line
|
|
||||||
self.headers = headers
|
|
||||||
self.body = body
|
|
||||||
self.content_type = content_type
|
|
||||||
|
|
||||||
|
|
||||||
def from_request(request):
|
|
||||||
"""Make an `HTTPMessage` from `requests.models.Request`."""
|
|
||||||
url = urlparse(request.url)
|
|
||||||
request_headers = dict(request.headers)
|
|
||||||
if 'Host' not in request_headers:
|
|
||||||
request_headers['Host'] = url.netloc
|
|
||||||
|
|
||||||
try:
|
|
||||||
body = request.data
|
|
||||||
except AttributeError:
|
|
||||||
# requests < 0.12.1
|
|
||||||
body = request._enc_data
|
|
||||||
|
|
||||||
if isinstance(body, dict):
|
|
||||||
# --form
|
|
||||||
body = request.__class__._encode_params(body)
|
|
||||||
|
|
||||||
return HTTPMessage(
|
|
||||||
line='{method} {path} HTTP/1.1'.format(
|
|
||||||
method=request.method,
|
|
||||||
path=url.path or '/'),
|
|
||||||
headers='\n'.join(str('%s: %s') % (name, value)
|
|
||||||
for name, value
|
|
||||||
in request_headers.items()),
|
|
||||||
body=body,
|
|
||||||
content_type=request_headers.get('Content-Type')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def from_response(response):
|
|
||||||
"""Make an `HTTPMessage` from `requests.models.Response`."""
|
|
||||||
encoding = response.encoding or 'ISO-8859-1'
|
|
||||||
original = response.raw._original_response
|
|
||||||
response_headers = response.headers
|
|
||||||
return HTTPMessage(
|
|
||||||
line='HTTP/{version} {status} {reason}'.format(
|
|
||||||
version='.'.join(str(original.version)),
|
|
||||||
status=original.status, reason=original.reason),
|
|
||||||
headers=str(original.msg),
|
|
||||||
body=response.content.decode(encoding) if response.content else '',
|
|
||||||
content_type=response_headers.get('Content-Type'))
|
|
||||||
|
|
||||||
|
|
||||||
def format(message, prettifier=None,
|
|
||||||
with_headers=True, with_body=True):
|
|
||||||
"""Return a `unicode` representation of `message`. """
|
|
||||||
pretty = prettifier is not None
|
|
||||||
bits = []
|
|
||||||
|
|
||||||
if with_headers:
|
|
||||||
bits.append(message.line)
|
|
||||||
bits.append(message.headers)
|
|
||||||
if pretty:
|
|
||||||
bits = [prettifier.headers('\n'.join(bits))]
|
|
||||||
if with_body and message.body:
|
|
||||||
bits.append('\n')
|
|
||||||
|
|
||||||
if with_body and message.body:
|
|
||||||
if pretty and message.content_type:
|
|
||||||
bits.append(prettifier.body(message.body, message.content_type))
|
|
||||||
else:
|
|
||||||
bits.append(message.body)
|
|
||||||
|
|
||||||
return '\n'.join(bit.strip() for bit in bits)
|
|
463
httpie/input.py
Normal file
463
httpie/input.py
Normal file
@ -0,0 +1,463 @@
|
|||||||
|
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
import mimetypes
|
||||||
|
import getpass
|
||||||
|
from io import BytesIO
|
||||||
|
from argparse import ArgumentParser, ArgumentTypeError
|
||||||
|
|
||||||
|
try:
|
||||||
|
from collections import OrderedDict
|
||||||
|
except ImportError:
|
||||||
|
OrderedDict = dict
|
||||||
|
|
||||||
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
from requests.compat import str, urlparse
|
||||||
|
|
||||||
|
|
||||||
|
HTTP_POST = 'POST'
|
||||||
|
HTTP_GET = 'GET'
|
||||||
|
HTTP = 'http://'
|
||||||
|
HTTPS = 'https://'
|
||||||
|
|
||||||
|
|
||||||
|
# Various separators used in args
|
||||||
|
SEP_HEADERS = ':'
|
||||||
|
SEP_CREDENTIALS = ':'
|
||||||
|
SEP_PROXY = ':'
|
||||||
|
SEP_DATA = '='
|
||||||
|
SEP_DATA_RAW_JSON = ':='
|
||||||
|
SEP_FILES = '@'
|
||||||
|
SEP_QUERY = '=='
|
||||||
|
|
||||||
|
# Separators that become request data
|
||||||
|
SEP_GROUP_DATA_ITEMS = frozenset([
|
||||||
|
SEP_DATA,
|
||||||
|
SEP_DATA_RAW_JSON,
|
||||||
|
SEP_FILES
|
||||||
|
])
|
||||||
|
|
||||||
|
# Separators allowed in ITEM arguments
|
||||||
|
SEP_GROUP_ITEMS = frozenset([
|
||||||
|
SEP_HEADERS,
|
||||||
|
SEP_QUERY,
|
||||||
|
SEP_DATA,
|
||||||
|
SEP_DATA_RAW_JSON,
|
||||||
|
SEP_FILES
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
# Output options
|
||||||
|
OUT_REQ_HEAD = 'H'
|
||||||
|
OUT_REQ_BODY = 'B'
|
||||||
|
OUT_RESP_HEAD = 'h'
|
||||||
|
OUT_RESP_BODY = 'b'
|
||||||
|
|
||||||
|
OUTPUT_OPTIONS = frozenset([
|
||||||
|
OUT_REQ_HEAD,
|
||||||
|
OUT_REQ_BODY,
|
||||||
|
OUT_RESP_HEAD,
|
||||||
|
OUT_RESP_BODY
|
||||||
|
])
|
||||||
|
|
||||||
|
# Pretty
|
||||||
|
PRETTY_MAP = {
|
||||||
|
'all': ['format', 'colors'],
|
||||||
|
'colors': ['colors'],
|
||||||
|
'format': ['format'],
|
||||||
|
'none': []
|
||||||
|
}
|
||||||
|
PRETTY_STDOUT_TTY_ONLY = object()
|
||||||
|
|
||||||
|
|
||||||
|
# Defaults
|
||||||
|
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
||||||
|
|
||||||
|
|
||||||
|
class Parser(ArgumentParser):
|
||||||
|
"""Adds additional logic to `argparse.ArgumentParser`.
|
||||||
|
|
||||||
|
Handles all input (CLI args, file args, stdin), applies defaults,
|
||||||
|
and performs extra validation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs['add_help'] = False
|
||||||
|
super(Parser, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
#noinspection PyMethodOverriding
|
||||||
|
def parse_args(self, env, args=None, namespace=None):
|
||||||
|
|
||||||
|
self.env = env
|
||||||
|
|
||||||
|
args = super(Parser, self).parse_args(args, namespace)
|
||||||
|
|
||||||
|
if not args.json and env.config.implicit_content_type == 'form':
|
||||||
|
args.form = True
|
||||||
|
|
||||||
|
if args.debug:
|
||||||
|
args.traceback = True
|
||||||
|
|
||||||
|
if args.output:
|
||||||
|
env.stdout = args.output
|
||||||
|
env.stdout_isatty = False
|
||||||
|
|
||||||
|
self._process_output_options(args, env)
|
||||||
|
self._process_pretty_options(args, env)
|
||||||
|
self._guess_method(args, env)
|
||||||
|
self._parse_items(args)
|
||||||
|
|
||||||
|
if not env.stdin_isatty:
|
||||||
|
self._body_from_file(args, env.stdin)
|
||||||
|
|
||||||
|
if not (args.url.startswith(HTTP) or args.url.startswith(HTTPS)):
|
||||||
|
scheme = HTTPS if env.progname == 'https' else HTTP
|
||||||
|
args.url = scheme + args.url
|
||||||
|
|
||||||
|
if args.auth and not args.auth.has_password():
|
||||||
|
# Stdin already read (if not a tty) so it's save to prompt.
|
||||||
|
args.auth.prompt_password(urlparse(args.url).netloc)
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
def _print_message(self, message, file=None):
|
||||||
|
# Sneak in our stderr/stdout.
|
||||||
|
file = {
|
||||||
|
sys.stdout: self.env.stdout,
|
||||||
|
sys.stderr: self.env.stderr,
|
||||||
|
None: self.env.stderr
|
||||||
|
}.get(file, file)
|
||||||
|
|
||||||
|
super(Parser, self)._print_message(message, file)
|
||||||
|
|
||||||
|
def _body_from_file(self, args, fd):
|
||||||
|
"""There can only be one source of request data.
|
||||||
|
|
||||||
|
Bytes are always read.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if args.data:
|
||||||
|
self.error('Request body (from stdin or a file) and request '
|
||||||
|
'data (key=value) cannot be mixed.')
|
||||||
|
args.data = getattr(fd, 'buffer', fd).read()
|
||||||
|
|
||||||
|
def _guess_method(self, args, env):
|
||||||
|
"""Set `args.method` if not specified to either POST or GET
|
||||||
|
based on whether the request has data or not.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if args.method is None:
|
||||||
|
# Invoked as `http URL'.
|
||||||
|
assert not args.items
|
||||||
|
if not env.stdin_isatty:
|
||||||
|
args.method = HTTP_POST
|
||||||
|
else:
|
||||||
|
args.method = HTTP_GET
|
||||||
|
|
||||||
|
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
|
||||||
|
elif not re.match('^[a-zA-Z]+$', args.method):
|
||||||
|
# Invoked as `http URL item+'. The URL is now in `args.method`
|
||||||
|
# and the first ITEM is now incorrectly in `args.url`.
|
||||||
|
try:
|
||||||
|
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
||||||
|
args.items.insert(
|
||||||
|
0, KeyValueArgType(*SEP_GROUP_ITEMS).__call__(args.url))
|
||||||
|
|
||||||
|
except ArgumentTypeError as e:
|
||||||
|
if args.traceback:
|
||||||
|
raise
|
||||||
|
self.error(e.message)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Set the URL correctly
|
||||||
|
args.url = args.method
|
||||||
|
# Infer the method
|
||||||
|
has_data = not env.stdin_isatty or any(
|
||||||
|
item.sep in SEP_GROUP_DATA_ITEMS for item in args.items)
|
||||||
|
args.method = HTTP_POST if has_data else HTTP_GET
|
||||||
|
|
||||||
|
def _parse_items(self, args):
|
||||||
|
"""Parse `args.items` into `args.headers`, `args.data`,
|
||||||
|
`args.`, and `args.files`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
args.headers = CaseInsensitiveDict()
|
||||||
|
args.data = ParamDict() if args.form else OrderedDict()
|
||||||
|
args.files = OrderedDict()
|
||||||
|
args.params = ParamDict()
|
||||||
|
|
||||||
|
try:
|
||||||
|
parse_items(items=args.items,
|
||||||
|
headers=args.headers,
|
||||||
|
data=args.data,
|
||||||
|
files=args.files,
|
||||||
|
params=args.params)
|
||||||
|
except ParseError as e:
|
||||||
|
if args.traceback:
|
||||||
|
raise
|
||||||
|
self.error(e.message)
|
||||||
|
|
||||||
|
if args.files and not args.form:
|
||||||
|
# `http url @/path/to/file`
|
||||||
|
file_fields = list(args.files.keys())
|
||||||
|
if file_fields != ['']:
|
||||||
|
self.error(
|
||||||
|
'Invalid file fields (perhaps you meant --form?): %s'
|
||||||
|
% ','.join(file_fields))
|
||||||
|
|
||||||
|
fn, fd = args.files['']
|
||||||
|
args.files = {}
|
||||||
|
self._body_from_file(args, fd)
|
||||||
|
if 'Content-Type' not in args.headers:
|
||||||
|
mime, encoding = mimetypes.guess_type(fn, strict=False)
|
||||||
|
if mime:
|
||||||
|
content_type = mime
|
||||||
|
if encoding:
|
||||||
|
content_type = '%s; charset=%s' % (mime, encoding)
|
||||||
|
args.headers['Content-Type'] = content_type
|
||||||
|
|
||||||
|
def _process_output_options(self, args, env):
|
||||||
|
"""Apply defaults to output options or validate the provided ones.
|
||||||
|
|
||||||
|
The default output options are stdout-type-sensitive.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not args.output_options:
|
||||||
|
args.output_options = (OUTPUT_OPTIONS_DEFAULT if env.stdout_isatty
|
||||||
|
else OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED)
|
||||||
|
|
||||||
|
unknown = set(args.output_options) - OUTPUT_OPTIONS
|
||||||
|
if unknown:
|
||||||
|
self.error('Unknown output options: %s' % ','.join(unknown))
|
||||||
|
|
||||||
|
def _process_pretty_options(self, args, env):
|
||||||
|
if args.prettify == PRETTY_STDOUT_TTY_ONLY:
|
||||||
|
args.prettify = PRETTY_MAP['all' if env.stdout_isatty else 'none']
|
||||||
|
elif args.prettify and env.is_windows:
|
||||||
|
self.error('Only terminal output can be colorized on Windows.')
|
||||||
|
else:
|
||||||
|
args.prettify = PRETTY_MAP[args.prettify]
|
||||||
|
|
||||||
|
|
||||||
|
class ParseError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class KeyValue(object):
|
||||||
|
"""Base key-value pair parsed from CLI."""
|
||||||
|
|
||||||
|
def __init__(self, key, value, sep, orig):
|
||||||
|
self.key = key
|
||||||
|
self.value = value
|
||||||
|
self.sep = sep
|
||||||
|
self.orig = orig
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
|
||||||
|
class KeyValueArgType(object):
|
||||||
|
"""A key-value pair argument type used with `argparse`.
|
||||||
|
|
||||||
|
Parses a key-value arg and constructs a `KeyValue` instance.
|
||||||
|
Used for headers, form data, and other key-value pair types.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
key_value_class = KeyValue
|
||||||
|
|
||||||
|
def __init__(self, *separators):
|
||||||
|
self.separators = separators
|
||||||
|
|
||||||
|
def __call__(self, string):
|
||||||
|
"""Parse `string` and return `self.key_value_class()` instance.
|
||||||
|
|
||||||
|
The best of `self.separators` is determined (first found, longest).
|
||||||
|
Back slash escaped characters aren't considered as separators
|
||||||
|
(or parts thereof). Literal back slash characters have to be escaped
|
||||||
|
as well (r'\\').
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Escaped(str):
|
||||||
|
"""Represents an escaped character."""
|
||||||
|
|
||||||
|
def tokenize(s):
|
||||||
|
"""Tokenize `s`. There are only two token types - strings
|
||||||
|
and escaped characters:
|
||||||
|
|
||||||
|
>>> tokenize(r'foo\=bar\\baz')
|
||||||
|
['foo', Escaped('='), 'bar', Escaped('\\'), 'baz']
|
||||||
|
|
||||||
|
"""
|
||||||
|
tokens = ['']
|
||||||
|
esc = False
|
||||||
|
for c in s:
|
||||||
|
if esc:
|
||||||
|
tokens.extend([Escaped(c), ''])
|
||||||
|
esc = False
|
||||||
|
else:
|
||||||
|
if c == '\\':
|
||||||
|
esc = True
|
||||||
|
else:
|
||||||
|
tokens[-1] += c
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
tokens = tokenize(string)
|
||||||
|
|
||||||
|
# Sorting by length ensures that the longest one will be
|
||||||
|
# chosen as it will overwrite any shorter ones starting
|
||||||
|
# at the same position in the `found` dictionary.
|
||||||
|
separators = sorted(self.separators, key=len)
|
||||||
|
|
||||||
|
for i, token in enumerate(tokens):
|
||||||
|
|
||||||
|
if isinstance(token, Escaped):
|
||||||
|
continue
|
||||||
|
|
||||||
|
found = {}
|
||||||
|
for sep in separators:
|
||||||
|
pos = token.find(sep)
|
||||||
|
if pos != -1:
|
||||||
|
found[pos] = sep
|
||||||
|
|
||||||
|
if found:
|
||||||
|
# Starting first, longest separator found.
|
||||||
|
sep = found[min(found.keys())]
|
||||||
|
|
||||||
|
key, value = token.split(sep, 1)
|
||||||
|
|
||||||
|
# Any preceding tokens are part of the key.
|
||||||
|
key = ''.join(tokens[:i]) + key
|
||||||
|
|
||||||
|
# Any following tokens are part of the value.
|
||||||
|
value += ''.join(tokens[i + 1:])
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ArgumentTypeError(
|
||||||
|
'"%s" is not a valid value' % string)
|
||||||
|
|
||||||
|
return self.key_value_class(
|
||||||
|
key=key, value=value, sep=sep, orig=string)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthCredentials(KeyValue):
|
||||||
|
"""Represents parsed credentials."""
|
||||||
|
|
||||||
|
def _getpass(self, prompt):
|
||||||
|
# To allow mocking.
|
||||||
|
return getpass.getpass(prompt)
|
||||||
|
|
||||||
|
def has_password(self):
|
||||||
|
return self.value is not None
|
||||||
|
|
||||||
|
def prompt_password(self, host):
|
||||||
|
try:
|
||||||
|
self.value = self._getpass(
|
||||||
|
'http: password for %s@%s: ' % (self.key, host))
|
||||||
|
except (EOFError, KeyboardInterrupt):
|
||||||
|
sys.stderr.write('\n')
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthCredentialsArgType(KeyValueArgType):
|
||||||
|
"""A key-value arg type that parses credentials."""
|
||||||
|
|
||||||
|
key_value_class = AuthCredentials
|
||||||
|
|
||||||
|
def __call__(self, string):
|
||||||
|
"""Parse credentials from `string`.
|
||||||
|
|
||||||
|
("username" or "username:password").
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return super(AuthCredentialsArgType, self).__call__(string)
|
||||||
|
except ArgumentTypeError:
|
||||||
|
# No password provided, will prompt for it later.
|
||||||
|
return self.key_value_class(
|
||||||
|
key=string,
|
||||||
|
value=None,
|
||||||
|
sep=SEP_CREDENTIALS,
|
||||||
|
orig=string
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ParamDict(OrderedDict):
|
||||||
|
"""Multi-value dict for URL parameters and form data."""
|
||||||
|
|
||||||
|
#noinspection PyMethodOverriding
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
""" If `key` is assigned more than once, `self[key]` holds a
|
||||||
|
`list` of all the values.
|
||||||
|
|
||||||
|
This allows having multiple fields with the same name in form
|
||||||
|
data and URL params.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# NOTE: Won't work when used for form data with multiple values
|
||||||
|
# for a field and a file field is present:
|
||||||
|
# https://github.com/kennethreitz/requests/issues/737
|
||||||
|
if key not in self:
|
||||||
|
super(ParamDict, self).__setitem__(key, value)
|
||||||
|
else:
|
||||||
|
if not isinstance(self[key], list):
|
||||||
|
super(ParamDict, self).__setitem__(key, [self[key]])
|
||||||
|
self[key].append(value)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_items(items, data=None, headers=None, files=None, params=None):
|
||||||
|
"""Parse `KeyValue` `items` into `data`, `headers`, `files`,
|
||||||
|
and `params`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if headers is None:
|
||||||
|
headers = CaseInsensitiveDict()
|
||||||
|
if data is None:
|
||||||
|
data = OrderedDict()
|
||||||
|
if files is None:
|
||||||
|
files = OrderedDict()
|
||||||
|
if params is None:
|
||||||
|
params = ParamDict()
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
|
||||||
|
value = item.value
|
||||||
|
key = item.key
|
||||||
|
|
||||||
|
if item.sep == SEP_HEADERS:
|
||||||
|
target = headers
|
||||||
|
elif item.sep == SEP_QUERY:
|
||||||
|
target = params
|
||||||
|
elif item.sep == SEP_FILES:
|
||||||
|
try:
|
||||||
|
with open(os.path.expanduser(value), 'rb') as f:
|
||||||
|
value = (os.path.basename(value),
|
||||||
|
BytesIO(f.read()))
|
||||||
|
except IOError as e:
|
||||||
|
raise ParseError(
|
||||||
|
'Invalid argument "%s": %s' % (item.orig, e))
|
||||||
|
target = files
|
||||||
|
|
||||||
|
elif item.sep in [SEP_DATA, SEP_DATA_RAW_JSON]:
|
||||||
|
if item.sep == SEP_DATA_RAW_JSON:
|
||||||
|
try:
|
||||||
|
value = json.loads(item.value)
|
||||||
|
except ValueError:
|
||||||
|
raise ParseError('"%s" is not valid JSON' % item.orig)
|
||||||
|
target = data
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise TypeError(item)
|
||||||
|
|
||||||
|
target[key] = value
|
||||||
|
|
||||||
|
return headers, data, files, params
|
30
httpie/manage.py
Normal file
30
httpie/manage.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
"""
|
||||||
|
Provides the `httpie' management command.
|
||||||
|
|
||||||
|
Note that the main `http' command points to `httpie.__main__.main()`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from . import sessions
|
||||||
|
from . import __version__
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='The HTTPie management command.',
|
||||||
|
version=__version__
|
||||||
|
)
|
||||||
|
subparsers = parser.add_subparsers()
|
||||||
|
|
||||||
|
|
||||||
|
# Only sessions as of now.
|
||||||
|
sessions.add_commands(subparsers)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parser.parse_args()
|
||||||
|
args.command(args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
209
httpie/models.py
Normal file
209
httpie/models.py
Normal file
@ -0,0 +1,209 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from requests.compat import urlparse, is_windows, bytes, str
|
||||||
|
|
||||||
|
from .config import DEFAULT_CONFIG_DIR, Config
|
||||||
|
|
||||||
|
|
||||||
|
class Environment(object):
|
||||||
|
"""Holds information about the execution context.
|
||||||
|
|
||||||
|
Groups various aspects of the environment in a changeable object
|
||||||
|
and allows for mocking.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
#noinspection PyUnresolvedReferences
|
||||||
|
is_windows = is_windows
|
||||||
|
|
||||||
|
progname = os.path.basename(sys.argv[0])
|
||||||
|
if progname not in ['http', 'https']:
|
||||||
|
progname = 'http'
|
||||||
|
|
||||||
|
stdin_isatty = sys.stdin.isatty()
|
||||||
|
stdin = sys.stdin
|
||||||
|
stdout_isatty = sys.stdout.isatty()
|
||||||
|
|
||||||
|
config_dir = DEFAULT_CONFIG_DIR
|
||||||
|
|
||||||
|
if stdout_isatty and is_windows:
|
||||||
|
from colorama.initialise import wrap_stream
|
||||||
|
stdout = wrap_stream(sys.stdout, convert=None,
|
||||||
|
strip=None, autoreset=True, wrap=True)
|
||||||
|
else:
|
||||||
|
stdout = sys.stdout
|
||||||
|
stderr = sys.stderr
|
||||||
|
|
||||||
|
# Can be set to 0 to disable colors completely.
|
||||||
|
colors = 256 if '256color' in os.environ.get('TERM', '') else 88
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
assert all(hasattr(type(self), attr)
|
||||||
|
for attr in kwargs.keys())
|
||||||
|
self.__dict__.update(**kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self):
|
||||||
|
if not hasattr(self, '_config'):
|
||||||
|
self._config = Config(directory=self.config_dir)
|
||||||
|
if self._config.is_new:
|
||||||
|
self._config.save()
|
||||||
|
else:
|
||||||
|
self._config.load()
|
||||||
|
return self._config
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPMessage(object):
|
||||||
|
"""Abstract class for HTTP messages."""
|
||||||
|
|
||||||
|
def __init__(self, orig):
|
||||||
|
self._orig = orig
|
||||||
|
|
||||||
|
def iter_body(self, chunk_size):
|
||||||
|
"""Return an iterator over the body."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def iter_lines(self, chunk_size):
|
||||||
|
"""Return an iterator over the body yielding (`line`, `line_feed`)."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
"""Return a `str` with the message's headers."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def encoding(self):
|
||||||
|
"""Return a `str` with the message's encoding, if known."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def body(self):
|
||||||
|
"""Return a `bytes` with the message's body."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content_type(self):
|
||||||
|
"""Return the message content type."""
|
||||||
|
ct = self._orig.headers.get('Content-Type', '')
|
||||||
|
if isinstance(ct, bytes):
|
||||||
|
ct = ct.decode()
|
||||||
|
return ct
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPResponse(HTTPMessage):
|
||||||
|
"""A :class:`requests.models.Response` wrapper."""
|
||||||
|
|
||||||
|
def iter_body(self, chunk_size=1):
|
||||||
|
return self._orig.iter_content(chunk_size=chunk_size)
|
||||||
|
|
||||||
|
def iter_lines(self, chunk_size):
|
||||||
|
return ((line, b'\n') for line in self._orig.iter_lines(chunk_size))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
original = self._orig.raw._original_response
|
||||||
|
status_line = 'HTTP/{version} {status} {reason}'.format(
|
||||||
|
version='.'.join(str(original.version)),
|
||||||
|
status=original.status,
|
||||||
|
reason=original.reason
|
||||||
|
)
|
||||||
|
headers = [status_line]
|
||||||
|
try:
|
||||||
|
# `original.msg` is a `http.client.HTTPMessage` on Python 3
|
||||||
|
# `_headers` is a 2-tuple
|
||||||
|
headers.extend(
|
||||||
|
'%s: %s' % header for header in original.msg._headers)
|
||||||
|
except AttributeError:
|
||||||
|
# and a `httplib.HTTPMessage` on Python 2.x
|
||||||
|
# `headers` is a list of `name: val<CRLF>`.
|
||||||
|
headers.extend(h.strip() for h in original.msg.headers)
|
||||||
|
|
||||||
|
return '\r\n'.join(headers)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def encoding(self):
|
||||||
|
return self._orig.encoding or 'utf8'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def body(self):
|
||||||
|
# Only now the response body is fetched.
|
||||||
|
# Shouldn't be touched unless the body is actually needed.
|
||||||
|
return self._orig.content
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPRequest(HTTPMessage):
|
||||||
|
"""A :class:`requests.models.Request` wrapper."""
|
||||||
|
|
||||||
|
def iter_body(self, chunk_size):
|
||||||
|
yield self.body
|
||||||
|
|
||||||
|
def iter_lines(self, chunk_size):
|
||||||
|
yield self.body, b''
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
"""Return Request-Line"""
|
||||||
|
url = urlparse(self._orig.url)
|
||||||
|
|
||||||
|
# Querystring
|
||||||
|
qs = ''
|
||||||
|
if url.query or self._orig.params:
|
||||||
|
qs = '?'
|
||||||
|
if url.query:
|
||||||
|
qs += url.query
|
||||||
|
# Requests doesn't make params part of ``request.url``.
|
||||||
|
if self._orig.params:
|
||||||
|
if url.query:
|
||||||
|
qs += '&'
|
||||||
|
#noinspection PyUnresolvedReferences
|
||||||
|
qs += type(self._orig)._encode_params(self._orig.params)
|
||||||
|
|
||||||
|
# Request-Line
|
||||||
|
request_line = '{method} {path}{query} HTTP/1.1'.format(
|
||||||
|
method=self._orig.method,
|
||||||
|
path=url.path or '/',
|
||||||
|
query=qs
|
||||||
|
)
|
||||||
|
|
||||||
|
headers = dict(self._orig.headers)
|
||||||
|
|
||||||
|
if 'Host' not in headers:
|
||||||
|
headers['Host'] = urlparse(self._orig.url).netloc
|
||||||
|
|
||||||
|
headers = ['%s: %s' % (name, value)
|
||||||
|
for name, value in headers.items()]
|
||||||
|
|
||||||
|
headers.insert(0, request_line)
|
||||||
|
|
||||||
|
return '\r\n'.join(headers).strip()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def encoding(self):
|
||||||
|
return 'utf8'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def body(self):
|
||||||
|
"""Reconstruct and return the original request body bytes."""
|
||||||
|
if self._orig.files:
|
||||||
|
# TODO: would be nice if we didn't need to encode the files again
|
||||||
|
# FIXME: Also the boundary header doesn't match the one used.
|
||||||
|
for fn, fd in self._orig.files.values():
|
||||||
|
# Rewind the files as they have already been read before.
|
||||||
|
fd.seek(0)
|
||||||
|
body, _ = self._orig._encode_files(self._orig.files)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
body = self._orig.data
|
||||||
|
except AttributeError:
|
||||||
|
# requests < 0.12.1
|
||||||
|
body = self._orig._enc_data
|
||||||
|
|
||||||
|
if isinstance(body, dict):
|
||||||
|
#noinspection PyUnresolvedReferences
|
||||||
|
body = type(self._orig)._encode_params(body)
|
||||||
|
|
||||||
|
if isinstance(body, str):
|
||||||
|
body = body.encode('utf8')
|
||||||
|
|
||||||
|
return body
|
496
httpie/output.py
Normal file
496
httpie/output.py
Normal file
@ -0,0 +1,496 @@
|
|||||||
|
"""Output streaming, processing and formatting.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
from functools import partial
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
|
import pygments
|
||||||
|
from pygments import token, lexer
|
||||||
|
from pygments.styles import get_style_by_name, STYLE_MAP
|
||||||
|
from pygments.lexers import get_lexer_for_mimetype, get_lexer_by_name
|
||||||
|
from pygments.formatters.terminal import TerminalFormatter
|
||||||
|
from pygments.formatters.terminal256 import Terminal256Formatter
|
||||||
|
from pygments.util import ClassNotFound
|
||||||
|
from requests.compat import is_windows
|
||||||
|
|
||||||
|
from .solarized import Solarized256Style
|
||||||
|
from .models import HTTPRequest, HTTPResponse, Environment
|
||||||
|
from .input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
||||||
|
OUT_RESP_HEAD, OUT_RESP_BODY)
|
||||||
|
|
||||||
|
|
||||||
|
# Colors on Windows via colorama don't look that
|
||||||
|
# great and fruity seems to give the best result there.
|
||||||
|
AVAILABLE_STYLES = set(STYLE_MAP.keys())
|
||||||
|
AVAILABLE_STYLES.add('solarized')
|
||||||
|
DEFAULT_STYLE = 'solarized' if not is_windows else 'fruity'
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SUPPRESSED_NOTICE = (
|
||||||
|
b'\n'
|
||||||
|
b'+-----------------------------------------+\n'
|
||||||
|
b'| NOTE: binary data not shown in terminal |\n'
|
||||||
|
b'+-----------------------------------------+'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BinarySuppressedError(Exception):
|
||||||
|
"""An error indicating that the body is binary and won't be written,
|
||||||
|
e.g., for terminal output)."""
|
||||||
|
|
||||||
|
message = BINARY_SUPPRESSED_NOTICE
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Output Streams
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
def write(stream, outfile, flush):
|
||||||
|
"""Write the output stream."""
|
||||||
|
try:
|
||||||
|
# Writing bytes so we use the buffer interface (Python 3).
|
||||||
|
buf = outfile.buffer
|
||||||
|
except AttributeError:
|
||||||
|
buf = outfile
|
||||||
|
|
||||||
|
for chunk in stream:
|
||||||
|
buf.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def write_with_colors_win_p3k(stream, outfile, flush):
|
||||||
|
"""Like `write`, but colorized chunks are written as text
|
||||||
|
directly to `outfile` to ensure it gets processed by colorama.
|
||||||
|
Applies only to Windows with Python 3 and colorized terminal output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
color = b'\x1b['
|
||||||
|
encoding = outfile.encoding
|
||||||
|
for chunk in stream:
|
||||||
|
if color in chunk:
|
||||||
|
outfile.write(chunk.decode(encoding))
|
||||||
|
else:
|
||||||
|
outfile.buffer.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def output_stream(args, env, request, response):
|
||||||
|
"""Build and return a chain of iterators over the `request`-`response`
|
||||||
|
exchange each of which yields `bytes` chunks.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
Stream = make_stream(env, args)
|
||||||
|
|
||||||
|
req_h = OUT_REQ_HEAD in args.output_options
|
||||||
|
req_b = OUT_REQ_BODY in args.output_options
|
||||||
|
resp_h = OUT_RESP_HEAD in args.output_options
|
||||||
|
resp_b = OUT_RESP_BODY in args.output_options
|
||||||
|
|
||||||
|
req = req_h or req_b
|
||||||
|
resp = resp_h or resp_b
|
||||||
|
|
||||||
|
output = []
|
||||||
|
|
||||||
|
if req:
|
||||||
|
output.append(Stream(
|
||||||
|
msg=HTTPRequest(request),
|
||||||
|
with_headers=req_h,
|
||||||
|
with_body=req_b))
|
||||||
|
|
||||||
|
if req_b and resp:
|
||||||
|
# Request/Response separator.
|
||||||
|
output.append([b'\n\n'])
|
||||||
|
|
||||||
|
if resp:
|
||||||
|
output.append(Stream(
|
||||||
|
msg=HTTPResponse(response),
|
||||||
|
with_headers=resp_h,
|
||||||
|
with_body=resp_b))
|
||||||
|
|
||||||
|
if env.stdout_isatty and resp_b:
|
||||||
|
# Ensure a blank line after the response body.
|
||||||
|
# For terminal output only.
|
||||||
|
output.append([b'\n\n'])
|
||||||
|
|
||||||
|
return chain(*output)
|
||||||
|
|
||||||
|
|
||||||
|
def make_stream(env, args):
|
||||||
|
"""Pick the right stream type based on `env` and `args`.
|
||||||
|
Wrap it in a partial with the type-specific args so that
|
||||||
|
we don't need to think what stream we are dealing with.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not env.stdout_isatty and not args.prettify:
|
||||||
|
Stream = partial(
|
||||||
|
RawStream,
|
||||||
|
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
||||||
|
if args.stream
|
||||||
|
else RawStream.CHUNK_SIZE
|
||||||
|
)
|
||||||
|
elif args.prettify:
|
||||||
|
Stream = partial(
|
||||||
|
PrettyStream if args.stream else BufferedPrettyStream,
|
||||||
|
env=env,
|
||||||
|
processor=OutputProcessor(
|
||||||
|
env=env, groups=args.prettify, pygments_style=args.style),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
Stream = partial(EncodedStream, env=env)
|
||||||
|
|
||||||
|
return Stream
|
||||||
|
|
||||||
|
|
||||||
|
class BaseStream(object):
|
||||||
|
"""Base HTTP message stream class."""
|
||||||
|
|
||||||
|
def __init__(self, msg, with_headers=True, with_body=True):
|
||||||
|
"""
|
||||||
|
:param msg: a :class:`models.HTTPMessage` subclass
|
||||||
|
:param with_headers: if `True`, headers will be included
|
||||||
|
:param with_body: if `True`, body will be included
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.msg = msg
|
||||||
|
self.with_headers = with_headers
|
||||||
|
self.with_body = with_body
|
||||||
|
|
||||||
|
def _headers(self):
|
||||||
|
"""Return the headers' bytes."""
|
||||||
|
return self.msg.headers.encode('ascii')
|
||||||
|
|
||||||
|
def _body(self):
|
||||||
|
"""Return an iterator over the message body."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Return an iterator over `self.msg`."""
|
||||||
|
if self.with_headers:
|
||||||
|
yield self._headers()
|
||||||
|
yield b'\r\n\r\n'
|
||||||
|
|
||||||
|
if self.with_body:
|
||||||
|
try:
|
||||||
|
for chunk in self._body():
|
||||||
|
yield chunk
|
||||||
|
except BinarySuppressedError as e:
|
||||||
|
if self.with_headers:
|
||||||
|
yield b'\n'
|
||||||
|
yield e.message
|
||||||
|
|
||||||
|
|
||||||
|
class RawStream(BaseStream):
|
||||||
|
"""The message is streamed in chunks with no processing."""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1024 * 100
|
||||||
|
CHUNK_SIZE_BY_LINE = 1024 * 5
|
||||||
|
|
||||||
|
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
||||||
|
super(RawStream, self).__init__(**kwargs)
|
||||||
|
self.chunk_size = chunk_size
|
||||||
|
|
||||||
|
def _body(self):
|
||||||
|
return self.msg.iter_body(self.chunk_size)
|
||||||
|
|
||||||
|
|
||||||
|
class EncodedStream(BaseStream):
|
||||||
|
"""Encoded HTTP message stream.
|
||||||
|
|
||||||
|
The message bytes are converted to an encoding suitable for
|
||||||
|
`self.env.stdout`. Unicode errors are replaced and binary data
|
||||||
|
is suppressed. The body is always streamed by line.
|
||||||
|
|
||||||
|
"""
|
||||||
|
CHUNK_SIZE = 1024 * 5
|
||||||
|
|
||||||
|
def __init__(self, env=Environment(), **kwargs):
|
||||||
|
|
||||||
|
super(EncodedStream, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
if env.stdout_isatty:
|
||||||
|
# Use the encoding supported by the terminal.
|
||||||
|
output_encoding = getattr(env.stdout, 'encoding', None)
|
||||||
|
else:
|
||||||
|
# Preserve the message encoding.
|
||||||
|
output_encoding = self.msg.encoding
|
||||||
|
|
||||||
|
# Default to utf8 when unsure.
|
||||||
|
self.output_encoding = output_encoding or 'utf8'
|
||||||
|
|
||||||
|
def _body(self):
|
||||||
|
|
||||||
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
||||||
|
|
||||||
|
if b'\0' in line:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
|
||||||
|
yield line.decode(self.msg.encoding)\
|
||||||
|
.encode(self.output_encoding, 'replace') + lf
|
||||||
|
|
||||||
|
|
||||||
|
class PrettyStream(EncodedStream):
|
||||||
|
"""In addition to :class:`EncodedStream` behaviour, this stream applies
|
||||||
|
content processing.
|
||||||
|
|
||||||
|
Useful for long-lived HTTP responses that stream by lines
|
||||||
|
such as the Twitter streaming API.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1024 * 5
|
||||||
|
|
||||||
|
def __init__(self, processor, **kwargs):
|
||||||
|
super(PrettyStream, self).__init__(**kwargs)
|
||||||
|
self.processor = processor
|
||||||
|
|
||||||
|
def _headers(self):
|
||||||
|
return self.processor.process_headers(
|
||||||
|
self.msg.headers).encode(self.output_encoding)
|
||||||
|
|
||||||
|
def _body(self):
|
||||||
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
||||||
|
if b'\0' in line:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
yield self._process_body(line) + lf
|
||||||
|
|
||||||
|
def _process_body(self, chunk):
|
||||||
|
return (self.processor
|
||||||
|
.process_body(
|
||||||
|
chunk.decode(self.msg.encoding, 'replace'),
|
||||||
|
self.msg.content_type)
|
||||||
|
.encode(self.output_encoding, 'replace'))
|
||||||
|
|
||||||
|
|
||||||
|
class BufferedPrettyStream(PrettyStream):
|
||||||
|
"""The same as :class:`PrettyStream` except that the body is fully
|
||||||
|
fetched before it's processed.
|
||||||
|
|
||||||
|
Suitable regular HTTP responses.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1024 * 10
|
||||||
|
|
||||||
|
def _body(self):
|
||||||
|
|
||||||
|
#noinspection PyArgumentList
|
||||||
|
# Read the whole body before prettifying it,
|
||||||
|
# but bail out immediately if the body is binary.
|
||||||
|
body = bytearray()
|
||||||
|
for chunk in self.msg.iter_body(self.CHUNK_SIZE):
|
||||||
|
if b'\0' in chunk:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
body.extend(chunk)
|
||||||
|
|
||||||
|
yield self._process_body(body)
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Processing
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
class HTTPLexer(lexer.RegexLexer):
|
||||||
|
"""Simplified HTTP lexer for Pygments.
|
||||||
|
|
||||||
|
It only operates on headers and provides a stronger contrast between
|
||||||
|
their names and values than the original one bundled with Pygments
|
||||||
|
(:class:`pygments.lexers.text import HttpLexer`), especially when
|
||||||
|
Solarized color scheme is used.
|
||||||
|
|
||||||
|
"""
|
||||||
|
name = 'HTTP'
|
||||||
|
aliases = ['http']
|
||||||
|
filenames = ['*.http']
|
||||||
|
tokens = {
|
||||||
|
'root': [
|
||||||
|
# Request-Line
|
||||||
|
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
||||||
|
lexer.bygroups(
|
||||||
|
token.Name.Function,
|
||||||
|
token.Text,
|
||||||
|
token.Name.Namespace,
|
||||||
|
token.Text,
|
||||||
|
token.Keyword.Reserved,
|
||||||
|
token.Operator,
|
||||||
|
token.Number
|
||||||
|
)),
|
||||||
|
# Response Status-Line
|
||||||
|
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
||||||
|
lexer.bygroups(
|
||||||
|
token.Keyword.Reserved, # 'HTTP'
|
||||||
|
token.Operator, # '/'
|
||||||
|
token.Number, # Version
|
||||||
|
token.Text,
|
||||||
|
token.Number, # Status code
|
||||||
|
token.Text,
|
||||||
|
token.Name.Exception, # Reason
|
||||||
|
)),
|
||||||
|
# Header
|
||||||
|
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
|
||||||
|
token.Name.Attribute, # Name
|
||||||
|
token.Text,
|
||||||
|
token.Operator, # Colon
|
||||||
|
token.Text,
|
||||||
|
token.String # Value
|
||||||
|
))
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class BaseProcessor(object):
|
||||||
|
"""Base, noop output processor class."""
|
||||||
|
|
||||||
|
enabled = True
|
||||||
|
|
||||||
|
def __init__(self, env=Environment(), **kwargs):
|
||||||
|
"""
|
||||||
|
:param env: an class:`Environment` instance
|
||||||
|
:param kwargs: additional keyword argument that some
|
||||||
|
processor might require.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.env = env
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def process_headers(self, headers):
|
||||||
|
"""Return processed `headers`
|
||||||
|
|
||||||
|
:param headers: The headers as text.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def process_body(self, content, content_type, subtype):
|
||||||
|
"""Return processed `content`.
|
||||||
|
|
||||||
|
:param content: The body content as text
|
||||||
|
:param content_type: Full content type, e.g., 'application/atom+xml'.
|
||||||
|
:param subtype: E.g. 'xml'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
class JSONProcessor(BaseProcessor):
|
||||||
|
"""JSON body processor."""
|
||||||
|
|
||||||
|
def process_body(self, content, content_type, subtype):
|
||||||
|
if subtype == 'json':
|
||||||
|
try:
|
||||||
|
# Indent the JSON data, sort keys by name, and
|
||||||
|
# avoid unicode escapes to improve readability.
|
||||||
|
content = json.dumps(json.loads(content),
|
||||||
|
sort_keys=True,
|
||||||
|
ensure_ascii=False,
|
||||||
|
indent=4)
|
||||||
|
except ValueError:
|
||||||
|
# Invalid JSON but we don't care.
|
||||||
|
pass
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
class PygmentsProcessor(BaseProcessor):
|
||||||
|
"""A processor that applies syntax-highlighting using Pygments
|
||||||
|
to the headers, and to the body as well if its content type is recognized.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(PygmentsProcessor, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Cache that speeds up when we process streamed body by line.
|
||||||
|
self.lexers_by_type = {}
|
||||||
|
|
||||||
|
if not self.env.colors:
|
||||||
|
self.enabled = False
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
style = get_style_by_name(
|
||||||
|
self.kwargs.get('pygments_style', DEFAULT_STYLE))
|
||||||
|
except ClassNotFound:
|
||||||
|
style = Solarized256Style
|
||||||
|
|
||||||
|
if self.env.is_windows or self.env.colors == 256:
|
||||||
|
fmt_class = Terminal256Formatter
|
||||||
|
else:
|
||||||
|
fmt_class = TerminalFormatter
|
||||||
|
self.formatter = fmt_class(style=style)
|
||||||
|
|
||||||
|
def process_headers(self, headers):
|
||||||
|
return pygments.highlight(
|
||||||
|
headers, HTTPLexer(), self.formatter).strip()
|
||||||
|
|
||||||
|
def process_body(self, content, content_type, subtype):
|
||||||
|
try:
|
||||||
|
lexer = self.lexers_by_type.get(content_type)
|
||||||
|
if not lexer:
|
||||||
|
try:
|
||||||
|
lexer = get_lexer_for_mimetype(content_type)
|
||||||
|
except ClassNotFound:
|
||||||
|
lexer = get_lexer_by_name(subtype)
|
||||||
|
self.lexers_by_type[content_type] = lexer
|
||||||
|
except ClassNotFound:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
content = pygments.highlight(content, lexer, self.formatter)
|
||||||
|
return content.strip()
|
||||||
|
|
||||||
|
|
||||||
|
class HeadersProcessor(BaseProcessor):
|
||||||
|
"""Sorts headers by name retaining relative order of multiple headers
|
||||||
|
with the same name.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def process_headers(self, headers):
|
||||||
|
lines = headers.splitlines()
|
||||||
|
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
|
||||||
|
return '\r\n'.join(lines[:1] + headers)
|
||||||
|
|
||||||
|
|
||||||
|
class OutputProcessor(object):
|
||||||
|
"""A delegate class that invokes the actual processors."""
|
||||||
|
|
||||||
|
installed_processors = {
|
||||||
|
'format': [
|
||||||
|
HeadersProcessor,
|
||||||
|
JSONProcessor
|
||||||
|
],
|
||||||
|
'colors': [
|
||||||
|
PygmentsProcessor
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, groups, env=Environment(), **kwargs):
|
||||||
|
"""
|
||||||
|
:param env: a :class:`models.Environment` instance
|
||||||
|
:param groups: the groups of processors to be applied
|
||||||
|
:param kwargs: additional keyword arguments for processors
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.processors = []
|
||||||
|
for group in groups:
|
||||||
|
for cls in self.installed_processors[group]:
|
||||||
|
processor = cls(env, **kwargs)
|
||||||
|
if processor.enabled:
|
||||||
|
self.processors.append(processor)
|
||||||
|
|
||||||
|
def process_headers(self, headers):
|
||||||
|
for processor in self.processors:
|
||||||
|
headers = processor.process_headers(headers)
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def process_body(self, content, content_type):
|
||||||
|
# e.g., 'application/atom+xml'
|
||||||
|
content_type = content_type.split(';')[0]
|
||||||
|
# e.g., 'xml'
|
||||||
|
subtype = content_type.split('/')[-1].split('+')[-1]
|
||||||
|
|
||||||
|
for processor in self.processors:
|
||||||
|
content = processor.process_body(content, content_type, subtype)
|
||||||
|
|
||||||
|
return content
|
@ -1,49 +0,0 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
|
|
||||||
import pygments
|
|
||||||
|
|
||||||
from pygments.util import ClassNotFound
|
|
||||||
from pygments.styles import get_style_by_name, STYLE_MAP
|
|
||||||
from pygments.lexers import get_lexer_for_mimetype, HttpLexer
|
|
||||||
from pygments.formatters.terminal256 import Terminal256Formatter
|
|
||||||
from pygments.formatters.terminal import TerminalFormatter
|
|
||||||
|
|
||||||
from . import solarized
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_STYLE = 'solarized'
|
|
||||||
AVAILABLE_STYLES = [DEFAULT_STYLE] + list(STYLE_MAP.keys())
|
|
||||||
FORMATTER = (Terminal256Formatter
|
|
||||||
if '256color' in os.environ.get('TERM', '')
|
|
||||||
else TerminalFormatter)
|
|
||||||
|
|
||||||
|
|
||||||
class PrettyHttp(object):
|
|
||||||
|
|
||||||
def __init__(self, style_name):
|
|
||||||
if style_name == 'solarized':
|
|
||||||
style = solarized.SolarizedStyle
|
|
||||||
else:
|
|
||||||
style = get_style_by_name(style_name)
|
|
||||||
self.formatter = FORMATTER(style=style)
|
|
||||||
|
|
||||||
def headers(self, content):
|
|
||||||
return pygments.highlight(content, HttpLexer(), self.formatter)
|
|
||||||
|
|
||||||
def body(self, content, content_type):
|
|
||||||
content_type = content_type.split(';')[0]
|
|
||||||
try:
|
|
||||||
lexer = get_lexer_for_mimetype(content_type)
|
|
||||||
except ClassNotFound:
|
|
||||||
return content
|
|
||||||
|
|
||||||
if content_type == 'application/json':
|
|
||||||
try:
|
|
||||||
# Indent and sort the JSON data.
|
|
||||||
content = json.dumps(json.loads(content),
|
|
||||||
sort_keys=True, indent=4)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return pygments.highlight(content, lexer, self.formatter)
|
|
233
httpie/sessions.py
Normal file
233
httpie/sessions.py
Normal file
@ -0,0 +1,233 @@
|
|||||||
|
"""Persistent, JSON-serialized sessions.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import glob
|
||||||
|
import errno
|
||||||
|
import codecs
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from requests.compat import urlparse
|
||||||
|
from requests.cookies import RequestsCookieJar, create_cookie
|
||||||
|
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
|
||||||
|
from argparse import OPTIONAL
|
||||||
|
|
||||||
|
from .config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
||||||
|
from .output import PygmentsProcessor
|
||||||
|
|
||||||
|
|
||||||
|
SESSIONS_DIR_NAME = 'sessions'
|
||||||
|
DEFAULT_SESSIONS_DIR = os.path.join(DEFAULT_CONFIG_DIR, SESSIONS_DIR_NAME)
|
||||||
|
|
||||||
|
|
||||||
|
def get_response(name, request_kwargs, config_dir, read_only=False):
|
||||||
|
"""Like `client.get_response`, but applies permanent
|
||||||
|
aspects of the session to the request.
|
||||||
|
|
||||||
|
"""
|
||||||
|
sessions_dir = os.path.join(config_dir, SESSIONS_DIR_NAME)
|
||||||
|
host = Host(
|
||||||
|
root_dir=sessions_dir,
|
||||||
|
name=request_kwargs['headers'].get('Host', None)
|
||||||
|
or urlparse(request_kwargs['url']).netloc.split('@')[-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
session = Session(host, name)
|
||||||
|
session.load()
|
||||||
|
|
||||||
|
# Update session headers with the request headers.
|
||||||
|
session['headers'].update(request_kwargs.get('headers', {}))
|
||||||
|
# Use the merged headers for the request
|
||||||
|
request_kwargs['headers'] = session['headers']
|
||||||
|
|
||||||
|
auth = request_kwargs.get('auth', None)
|
||||||
|
if auth:
|
||||||
|
session.auth = auth
|
||||||
|
elif session.auth:
|
||||||
|
request_kwargs['auth'] = session.auth
|
||||||
|
|
||||||
|
rsession = requests.Session(cookies=session.cookies)
|
||||||
|
try:
|
||||||
|
response = rsession.request(**request_kwargs)
|
||||||
|
except Exception:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
# Existing sessions with `read_only=True` don't get updated.
|
||||||
|
if session.is_new or not read_only:
|
||||||
|
session.cookies = rsession.cookies
|
||||||
|
session.save()
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class Host(object):
|
||||||
|
"""A host is a per-host directory on the disk containing sessions files."""
|
||||||
|
|
||||||
|
def __init__(self, name, root_dir=DEFAULT_CONFIG_DIR):
|
||||||
|
self.name = name
|
||||||
|
self.root_dir = root_dir
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Return a iterator yielding `(session_name, session_path)`."""
|
||||||
|
for fn in sorted(glob.glob1(self.path, '*.json')):
|
||||||
|
yield os.path.splitext(fn)[0], os.path.join(self.path, fn)
|
||||||
|
|
||||||
|
def delete(self):
|
||||||
|
shutil.rmtree(self.path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
# Name will include ':' if a port is specified, which is invalid
|
||||||
|
# on windows. DNS does not allow '_' in a domain, or for it to end
|
||||||
|
# in a number (I think?)
|
||||||
|
path = os.path.join(self.root_dir, self.name.replace(':', '_'))
|
||||||
|
try:
|
||||||
|
os.makedirs(path, mode=0o700)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
return path
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def all(cls):
|
||||||
|
"""Return a generator yielding a host at a time."""
|
||||||
|
for name in sorted(glob.glob1(DEFAULT_SESSIONS_DIR, '*')):
|
||||||
|
if os.path.isdir(os.path.join(DEFAULT_SESSIONS_DIR, name)):
|
||||||
|
yield Host(name)
|
||||||
|
|
||||||
|
|
||||||
|
class Session(BaseConfigDict):
|
||||||
|
""""""
|
||||||
|
|
||||||
|
def __init__(self, host, name, *args, **kwargs):
|
||||||
|
super(Session, self).__init__(*args, **kwargs)
|
||||||
|
self.host = host
|
||||||
|
self.name = name
|
||||||
|
self['headers'] = {}
|
||||||
|
self['cookies'] = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def directory(self):
|
||||||
|
return self.host.path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cookies(self):
|
||||||
|
jar = RequestsCookieJar()
|
||||||
|
for name, cookie_dict in self['cookies'].items():
|
||||||
|
jar.set_cookie(create_cookie(
|
||||||
|
name, cookie_dict.pop('value'), **cookie_dict))
|
||||||
|
jar.clear_expired_cookies()
|
||||||
|
return jar
|
||||||
|
|
||||||
|
@cookies.setter
|
||||||
|
def cookies(self, jar):
|
||||||
|
excluded = [
|
||||||
|
'_rest', 'name', 'port_specified',
|
||||||
|
'domain_specified', 'domain_initial_dot',
|
||||||
|
'path_specified', 'comment', 'comment_url'
|
||||||
|
]
|
||||||
|
self['cookies'] = {}
|
||||||
|
for host in jar._cookies.values():
|
||||||
|
for path in host.values():
|
||||||
|
for name, cookie in path.items():
|
||||||
|
cookie_dict = {}
|
||||||
|
for k, v in cookie.__dict__.items():
|
||||||
|
if k not in excluded:
|
||||||
|
cookie_dict[k] = v
|
||||||
|
self['cookies'][name] = cookie_dict
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auth(self):
|
||||||
|
auth = self.get('auth', None)
|
||||||
|
if not auth:
|
||||||
|
return None
|
||||||
|
Auth = {'basic': HTTPBasicAuth,
|
||||||
|
'digest': HTTPDigestAuth}[auth['type']]
|
||||||
|
return Auth(auth['username'], auth['password'])
|
||||||
|
|
||||||
|
@auth.setter
|
||||||
|
def auth(self, cred):
|
||||||
|
self['auth'] = {
|
||||||
|
'type': {HTTPBasicAuth: 'basic',
|
||||||
|
HTTPDigestAuth: 'digest'}[type(cred)],
|
||||||
|
'username': cred.username,
|
||||||
|
'password': cred.password,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# The commands are disabled for now.
|
||||||
|
# TODO: write tests for the commands.
|
||||||
|
|
||||||
|
def list_command(args):
|
||||||
|
if args.host:
|
||||||
|
for name, path in Host(args.host):
|
||||||
|
print(name + ' [' + path + ']')
|
||||||
|
else:
|
||||||
|
for host in Host.all():
|
||||||
|
print(host.name)
|
||||||
|
for name, path in host:
|
||||||
|
print(' ' + name + ' [' + path + ']')
|
||||||
|
|
||||||
|
|
||||||
|
def show_command(args):
|
||||||
|
path = Session(Host(args.host), args.name).path
|
||||||
|
if not os.path.exists(path):
|
||||||
|
sys.stderr.write('Session "%s" does not exist [%s].\n'
|
||||||
|
% (args.name, path))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
with codecs.open(path, encoding='utf8') as f:
|
||||||
|
print(path + ':\n')
|
||||||
|
proc = PygmentsProcessor()
|
||||||
|
print(proc.process_body(f.read(), 'application/json', 'json'))
|
||||||
|
print('')
|
||||||
|
|
||||||
|
|
||||||
|
def delete_command(args):
|
||||||
|
host = Host(args.host)
|
||||||
|
if not args.name:
|
||||||
|
host.delete()
|
||||||
|
else:
|
||||||
|
Session(host, args.name).delete()
|
||||||
|
|
||||||
|
|
||||||
|
def edit_command(args):
|
||||||
|
editor = os.environ.get('EDITOR', None)
|
||||||
|
if not editor:
|
||||||
|
sys.stderr.write(
|
||||||
|
'You need to configure the environment variable EDITOR.\n')
|
||||||
|
sys.exit(1)
|
||||||
|
command = editor.split()
|
||||||
|
command.append(Session(Host(args.host), args.name).path)
|
||||||
|
subprocess.call(command)
|
||||||
|
|
||||||
|
|
||||||
|
def add_commands(subparsers):
|
||||||
|
|
||||||
|
# List
|
||||||
|
list_ = subparsers.add_parser('session-list', help='list sessions')
|
||||||
|
list_.set_defaults(command=list_command)
|
||||||
|
list_.add_argument('host', nargs=OPTIONAL)
|
||||||
|
|
||||||
|
# Show
|
||||||
|
show = subparsers.add_parser('session-show', help='show a session')
|
||||||
|
show.set_defaults(command=show_command)
|
||||||
|
show.add_argument('host')
|
||||||
|
show.add_argument('name')
|
||||||
|
|
||||||
|
# Edit
|
||||||
|
edit = subparsers.add_parser(
|
||||||
|
'session-edit', help='edit a session in $EDITOR')
|
||||||
|
edit.set_defaults(command=edit_command)
|
||||||
|
edit.add_argument('host')
|
||||||
|
edit.add_argument('name')
|
||||||
|
|
||||||
|
# Delete
|
||||||
|
delete = subparsers.add_parser('session-delete', help='delete a session')
|
||||||
|
delete.set_defaults(command=delete_command)
|
||||||
|
delete.add_argument('host')
|
||||||
|
delete.add_argument('name', nargs=OPTIONAL,
|
||||||
|
help='The name of the session to be deleted.'
|
||||||
|
' If not specified, all host sessions are deleted.')
|
@ -1,74 +1,57 @@
|
|||||||
"""
|
# -*- coding: utf-8 -*-
|
||||||
A Pygments_ style based on the dark background variant of Solarized_.
|
|
||||||
|
|
||||||
.. _Pygments: http://pygments.org/
|
|
||||||
.. _Solarized: http://ethanschoonover.com/solarized
|
|
||||||
|
|
||||||
Copyright (c) 2011 Hank Gay
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
solarized256
|
||||||
|
------------
|
||||||
|
|
||||||
|
A Pygments style inspired by Solarized's 256 color mode.
|
||||||
|
|
||||||
|
:copyright: (c) 2011 by Hank Gay, (c) 2012 by John Mastro.
|
||||||
|
:license: BSD, see LICENSE for more details.
|
||||||
|
"""
|
||||||
|
|
||||||
from pygments.style import Style
|
from pygments.style import Style
|
||||||
from pygments.token import (Token, Comment, Name, Keyword, Generic, Number,
|
from pygments.token import Token, Comment, Name, Keyword, Generic, Number, \
|
||||||
Operator, String)
|
Operator, String
|
||||||
|
|
||||||
|
BASE03 = "#1c1c1c"
|
||||||
|
BASE02 = "#262626"
|
||||||
|
BASE01 = "#4e4e4e"
|
||||||
|
BASE00 = "#585858"
|
||||||
|
BASE0 = "#808080"
|
||||||
|
BASE1 = "#8a8a8a"
|
||||||
|
BASE2 = "#d7d7af"
|
||||||
|
BASE3 = "#ffffd7"
|
||||||
|
YELLOW = "#af8700"
|
||||||
|
ORANGE = "#d75f00"
|
||||||
|
RED = "#af0000"
|
||||||
|
MAGENTA = "#af005f"
|
||||||
|
VIOLET = "#5f5faf"
|
||||||
|
BLUE = "#0087ff"
|
||||||
|
CYAN = "#00afaf"
|
||||||
|
GREEN = "#5f8700"
|
||||||
|
|
||||||
|
|
||||||
BASE03 = '#002B36'
|
class Solarized256Style(Style):
|
||||||
BASE02 = '#073642'
|
|
||||||
BASE01 = '#586E75'
|
|
||||||
BASE00 = '#657B83'
|
|
||||||
BASE0 = '#839496'
|
|
||||||
BASE1 = '#93A1A1'
|
|
||||||
BASE2 = '#EEE8D5'
|
|
||||||
BASE3 = '#FDF6E3'
|
|
||||||
YELLOW = '#B58900'
|
|
||||||
ORANGE = '#CB4B16'
|
|
||||||
RED = '#DC322F'
|
|
||||||
MAGENTA = '#D33682'
|
|
||||||
VIOLET = '#6C71C4'
|
|
||||||
BLUE = '#268BD2'
|
|
||||||
CYAN = '#2AA198'
|
|
||||||
GREEN = '#859900'
|
|
||||||
|
|
||||||
|
|
||||||
class SolarizedStyle(Style):
|
|
||||||
background_color = BASE03
|
background_color = BASE03
|
||||||
styles = {
|
styles = {
|
||||||
Keyword: GREEN,
|
Keyword: GREEN,
|
||||||
Keyword.Constant: ORANGE,
|
Keyword.Constant: ORANGE,
|
||||||
Keyword.Declaration: BLUE,
|
Keyword.Declaration: BLUE,
|
||||||
#Keyword.Namespace
|
Keyword.Namespace: ORANGE,
|
||||||
#Keyword.Pseudo
|
#Keyword.Pseudo
|
||||||
Keyword.Reserved: BLUE,
|
Keyword.Reserved: BLUE,
|
||||||
Keyword.Type: RED,
|
Keyword.Type: RED,
|
||||||
|
|
||||||
#Name
|
#Name
|
||||||
Name.Attribute: BASE1,
|
Name.Attribute: BASE1,
|
||||||
Name.Builtin: YELLOW,
|
Name.Builtin: BLUE,
|
||||||
Name.Builtin.Pseudo: BLUE,
|
Name.Builtin.Pseudo: BLUE,
|
||||||
Name.Class: BLUE,
|
Name.Class: BLUE,
|
||||||
Name.Constant: ORANGE,
|
Name.Constant: ORANGE,
|
||||||
Name.Decorator: BLUE,
|
Name.Decorator: BLUE,
|
||||||
Name.Entity: ORANGE,
|
Name.Entity: ORANGE,
|
||||||
Name.Exception: ORANGE,
|
Name.Exception: YELLOW,
|
||||||
Name.Function: BLUE,
|
Name.Function: BLUE,
|
||||||
#Name.Label
|
#Name.Label
|
||||||
#Name.Namespace
|
#Name.Namespace
|
||||||
@ -84,10 +67,10 @@ class SolarizedStyle(Style):
|
|||||||
String: CYAN,
|
String: CYAN,
|
||||||
String.Backtick: BASE01,
|
String.Backtick: BASE01,
|
||||||
String.Char: CYAN,
|
String.Char: CYAN,
|
||||||
String.Doc: BASE1,
|
String.Doc: CYAN,
|
||||||
#String.Double
|
#String.Double
|
||||||
String.Escape: ORANGE,
|
String.Escape: RED,
|
||||||
String.Heredoc: BASE1,
|
String.Heredoc: CYAN,
|
||||||
#String.Interpol
|
#String.Interpol
|
||||||
#String.Other
|
#String.Other
|
||||||
String.Regex: RED,
|
String.Regex: RED,
|
||||||
@ -100,8 +83,8 @@ class SolarizedStyle(Style):
|
|||||||
#Number.Integer.Long
|
#Number.Integer.Long
|
||||||
#Number.Oct
|
#Number.Oct
|
||||||
|
|
||||||
Operator: GREEN,
|
Operator: BASE1,
|
||||||
#Operator.Word
|
Operator.Word: GREEN,
|
||||||
|
|
||||||
#Punctuation: ORANGE,
|
#Punctuation: ORANGE,
|
||||||
|
|
||||||
|
24
setup.py
24
setup.py
@ -1,28 +1,43 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import re
|
||||||
|
import codecs
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
import httpie
|
import httpie
|
||||||
|
|
||||||
|
|
||||||
if sys.argv[-1] == 'test':
|
if sys.argv[-1] == 'test':
|
||||||
sys.exit(os.system('python tests/tests.py'))
|
status = os.system('python tests/tests.py')
|
||||||
|
sys.exit(1 if status > 127 else status)
|
||||||
|
|
||||||
|
|
||||||
requirements = [
|
requirements = [
|
||||||
# Debian has only requests==0.10.1 and httpie.deb depends on that.
|
# Debian has only requests==0.10.1 and httpie.deb depends on that.
|
||||||
'requests>=0.10.1',
|
'requests>=0.10.1,<1.0',
|
||||||
'Pygments>=1.5'
|
'Pygments>=1.5'
|
||||||
]
|
]
|
||||||
if sys.version_info[:2] in ((2, 6), (3, 1)):
|
if sys.version_info[:2] in ((2, 6), (3, 1)):
|
||||||
# argparse has been added in Python 3.2 / 2.7
|
# argparse has been added in Python 3.2 / 2.7
|
||||||
requirements.append('argparse>=1.2.1')
|
requirements.append('argparse>=1.2.1')
|
||||||
|
if 'win32' in str(sys.platform).lower():
|
||||||
|
# Terminal colors for Windows
|
||||||
|
requirements.append('colorama>=0.2.4')
|
||||||
|
|
||||||
|
|
||||||
|
def long_description():
|
||||||
|
"""Pre-process the README so that PyPi can render it properly."""
|
||||||
|
with codecs.open('README.rst', encoding='utf8') as f:
|
||||||
|
rst = f.read()
|
||||||
|
code_block = '(:\n\n)?\.\. code-block::.*'
|
||||||
|
rst = re.sub(code_block, '::', rst)
|
||||||
|
return rst
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='httpie',
|
name='httpie',
|
||||||
version=httpie.__version__,
|
version=httpie.__version__,
|
||||||
description=httpie.__doc__.strip(),
|
description=httpie.__doc__.strip(),
|
||||||
long_description=open('README.rst').read(),
|
long_description=long_description(),
|
||||||
url='http://httpie.org/',
|
url='http://httpie.org/',
|
||||||
download_url='https://github.com/jkbr/httpie',
|
download_url='https://github.com/jkbr/httpie',
|
||||||
author=httpie.__author__,
|
author=httpie.__author__,
|
||||||
@ -32,6 +47,8 @@ setup(
|
|||||||
entry_points={
|
entry_points={
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'http = httpie.__main__:main',
|
'http = httpie.__main__:main',
|
||||||
|
# Not ready yet.
|
||||||
|
# 'httpie = httpie.manage:main',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
install_requires=requirements,
|
install_requires=requirements,
|
||||||
@ -51,5 +68,6 @@ setup(
|
|||||||
'Topic :: System :: Networking',
|
'Topic :: System :: Networking',
|
||||||
'Topic :: Terminals',
|
'Topic :: Terminals',
|
||||||
'Topic :: Text Processing',
|
'Topic :: Text Processing',
|
||||||
|
'Topic :: Utilities'
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
BIN
tests/fixtures/file.bin
vendored
Normal file
BIN
tests/fixtures/file.bin
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.1 KiB |
1
tests/fixtures/file2.txt
vendored
Normal file
1
tests/fixtures/file2.txt
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
__test_file_content__
|
1263
tests/tests.py
Normal file → Executable file
1263
tests/tests.py
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
2
tox.ini
2
tox.ini
@ -4,7 +4,7 @@
|
|||||||
# and then run "tox" from this directory.
|
# and then run "tox" from this directory.
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = py26, py27, py30, py31, py32, pypy
|
envlist = py26, py27, py32, pypy
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
commands = {envpython} setup.py test
|
commands = {envpython} setup.py test
|
||||||
|
Reference in New Issue
Block a user