mirror of
https://github.com/httpie/cli.git
synced 2024-11-22 07:43:20 +01:00
Modularized output, refactoring
Making it ready for output formatting plugin API.
This commit is contained in:
parent
c06598a0c4
commit
05db75bdb1
@ -13,7 +13,7 @@ from . import __version__
|
|||||||
from .plugins.builtin import BuiltinAuthPlugin
|
from .plugins.builtin import BuiltinAuthPlugin
|
||||||
from .plugins import plugin_manager
|
from .plugins import plugin_manager
|
||||||
from .sessions import DEFAULT_SESSIONS_DIR
|
from .sessions import DEFAULT_SESSIONS_DIR
|
||||||
from .output import AVAILABLE_STYLES, DEFAULT_STYLE
|
from .output.processors.colors import AVAILABLE_STYLES, DEFAULT_STYLE
|
||||||
from .input import (Parser, AuthCredentialsArgType, KeyValueArgType,
|
from .input import (Parser, AuthCredentialsArgType, KeyValueArgType,
|
||||||
SEP_PROXY, SEP_CREDENTIALS, SEP_GROUP_ALL_ITEMS,
|
SEP_PROXY, SEP_CREDENTIALS, SEP_GROUP_ALL_ITEMS,
|
||||||
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
|
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
|
||||||
|
@ -34,6 +34,7 @@ except ImportError:
|
|||||||
# noinspection PyCompatibility
|
# noinspection PyCompatibility
|
||||||
from UserDict import DictMixin
|
from UserDict import DictMixin
|
||||||
|
|
||||||
|
# noinspection PyShadowingBuiltins
|
||||||
class OrderedDict(dict, DictMixin):
|
class OrderedDict(dict, DictMixin):
|
||||||
# Copyright (c) 2009 Raymond Hettinger
|
# Copyright (c) 2009 Raymond Hettinger
|
||||||
#
|
#
|
||||||
@ -56,6 +57,7 @@ except ImportError:
|
|||||||
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
# OTHER DEALINGS IN THE SOFTWARE.
|
# OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
# noinspection PyMissingConstructor
|
||||||
def __init__(self, *args, **kwds):
|
def __init__(self, *args, **kwds):
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
raise TypeError('expected at most 1 arguments, got %d'
|
raise TypeError('expected at most 1 arguments, got %d'
|
||||||
@ -68,6 +70,7 @@ except ImportError:
|
|||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
self.__end = end = []
|
self.__end = end = []
|
||||||
|
# noinspection PyUnusedLocal
|
||||||
end += [None, end, end] # sentinel node for doubly linked list
|
end += [None, end, end] # sentinel node for doubly linked list
|
||||||
self.__map = {} # key --> [key, prev, next]
|
self.__map = {} # key --> [key, prev, next]
|
||||||
dict.clear(self)
|
dict.clear(self)
|
||||||
@ -139,6 +142,7 @@ except ImportError:
|
|||||||
def copy(self):
|
def copy(self):
|
||||||
return self.__class__(self)
|
return self.__class__(self)
|
||||||
|
|
||||||
|
# noinspection PyMethodOverriding
|
||||||
@classmethod
|
@classmethod
|
||||||
def fromkeys(cls, iterable, value=None):
|
def fromkeys(cls, iterable, value=None):
|
||||||
d = cls()
|
d = cls()
|
||||||
|
@ -43,7 +43,6 @@ class BaseConfigDict(dict):
|
|||||||
raise
|
raise
|
||||||
return path
|
return path
|
||||||
|
|
||||||
@property
|
|
||||||
def is_new(self):
|
def is_new(self):
|
||||||
return not os.path.exists(self._get_path())
|
return not os.path.exists(self._get_path())
|
||||||
|
|
||||||
|
68
httpie/context.py
Normal file
68
httpie/context.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from requests.compat import is_windows
|
||||||
|
|
||||||
|
from httpie.config import DEFAULT_CONFIG_DIR, Config
|
||||||
|
|
||||||
|
|
||||||
|
class Environment(object):
|
||||||
|
"""
|
||||||
|
Information about the execution context
|
||||||
|
(standard streams, config directory, etc).
|
||||||
|
|
||||||
|
By default, it represents the actual environment.
|
||||||
|
All of the attributes can be overwritten though, which
|
||||||
|
is used by the test suite to simulate various scenarios.
|
||||||
|
|
||||||
|
"""
|
||||||
|
is_windows = is_windows
|
||||||
|
config_dir = DEFAULT_CONFIG_DIR
|
||||||
|
colors = 256 if '256color' in os.environ.get('TERM', '') else 88
|
||||||
|
stdin = sys.stdin
|
||||||
|
stdin_isatty = stdin.isatty()
|
||||||
|
stdin_encoding = None
|
||||||
|
stdout = sys.stdout
|
||||||
|
stdout_isatty = stdout.isatty()
|
||||||
|
stdout_encoding = None
|
||||||
|
stderr = sys.stderr
|
||||||
|
stderr_isatty = stderr.isatty()
|
||||||
|
if is_windows:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
from colorama.initialise import wrap_stream
|
||||||
|
stdout = wrap_stream(stdout, convert=None, strip=None,
|
||||||
|
autoreset=True, wrap=True)
|
||||||
|
stderr = wrap_stream(stderr, convert=None, strip=None,
|
||||||
|
autoreset=True, wrap=True)
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Use keyword arguments to overwrite
|
||||||
|
any of the class attributes for this instance.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
|
||||||
|
self.__dict__.update(**kwargs)
|
||||||
|
|
||||||
|
# Keyword arguments > stream.encoding > default utf8
|
||||||
|
if self.stdin_encoding is None:
|
||||||
|
self.stdin_encoding = getattr(
|
||||||
|
self.stdin, 'encoding', None) or 'utf8'
|
||||||
|
if self.stdout_encoding is None:
|
||||||
|
actual_stdout = self.stdout
|
||||||
|
if is_windows:
|
||||||
|
from colorama import AnsiToWin32
|
||||||
|
if isinstance(self.stdout, AnsiToWin32):
|
||||||
|
actual_stdout = self.stdout.wrapped
|
||||||
|
self.stdout_encoding = getattr(
|
||||||
|
actual_stdout, 'encoding', None) or 'utf8'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self):
|
||||||
|
if not hasattr(self, '_config'):
|
||||||
|
self._config = Config(directory=self.config_dir)
|
||||||
|
if self._config.is_new():
|
||||||
|
self._config.save()
|
||||||
|
else:
|
||||||
|
self._config.load()
|
||||||
|
return self._config
|
@ -4,8 +4,8 @@ Invocation flow:
|
|||||||
|
|
||||||
1. Read, validate and process the input (args, `stdin`).
|
1. Read, validate and process the input (args, `stdin`).
|
||||||
2. Create and send a request.
|
2. Create and send a request.
|
||||||
3. Stream, and possibly process and format, the requested parts
|
3. Stream, and possibly process and format, the parts
|
||||||
of the request-response exchange.
|
of the request-response exchange selected by output options.
|
||||||
4. Simultaneously write to `stdout`
|
4. Simultaneously write to `stdout`
|
||||||
5. Exit.
|
5. Exit.
|
||||||
|
|
||||||
@ -14,17 +14,19 @@ import sys
|
|||||||
import errno
|
import errno
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from httpie import __version__ as httpie_version
|
|
||||||
from requests import __version__ as requests_version
|
from requests import __version__ as requests_version
|
||||||
from pygments import __version__ as pygments_version
|
from pygments import __version__ as pygments_version
|
||||||
|
|
||||||
from .compat import str, bytes, is_py3
|
from httpie import __version__ as httpie_version, ExitStatus
|
||||||
from .client import get_response
|
from httpie.compat import str, bytes, is_py3
|
||||||
from .downloads import Download
|
from httpie.client import get_response
|
||||||
from .models import Environment
|
from httpie.downloads import Download
|
||||||
from .output import build_output_stream, write, write_with_colors_win_py3
|
from httpie.context import Environment
|
||||||
from . import ExitStatus
|
from httpie.plugins import plugin_manager
|
||||||
from .plugins import plugin_manager
|
from httpie.output.streams import (
|
||||||
|
build_output_stream,
|
||||||
|
write, write_with_colors_win_py3
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_exit_status(http_status, follow=False):
|
def get_exit_status(http_status, follow=False):
|
||||||
|
@ -12,10 +12,10 @@ import threading
|
|||||||
from time import sleep, time
|
from time import sleep, time
|
||||||
from mailbox import Message
|
from mailbox import Message
|
||||||
|
|
||||||
from .output import RawStream
|
from httpie.output.streams import RawStream
|
||||||
from .models import HTTPResponse
|
from httpie.models import HTTPResponse
|
||||||
from .utils import humanize_bytes
|
from httpie.utils import humanize_bytes
|
||||||
from .compat import urlsplit
|
from httpie.compat import urlsplit
|
||||||
|
|
||||||
|
|
||||||
PARTIAL_CONTENT = 206
|
PARTIAL_CONTENT = 206
|
||||||
|
@ -132,8 +132,7 @@ class Parser(ArgumentParser):
|
|||||||
if not self.args.ignore_stdin and not env.stdin_isatty:
|
if not self.args.ignore_stdin and not env.stdin_isatty:
|
||||||
self._body_from_file(self.env.stdin)
|
self._body_from_file(self.env.stdin)
|
||||||
if not (self.args.url.startswith((HTTP, HTTPS))):
|
if not (self.args.url.startswith((HTTP, HTTPS))):
|
||||||
# Default to 'https://' if invoked as `https args`.
|
scheme = HTTP
|
||||||
scheme = HTTPS if self.env.progname == 'https' else HTTP
|
|
||||||
|
|
||||||
# See if we're using curl style shorthand for localhost (:3000/foo)
|
# See if we're using curl style shorthand for localhost (:3000/foo)
|
||||||
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
||||||
@ -277,10 +276,8 @@ class Parser(ArgumentParser):
|
|||||||
# and the first ITEM is now incorrectly in `args.url`.
|
# and the first ITEM is now incorrectly in `args.url`.
|
||||||
try:
|
try:
|
||||||
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
||||||
self.args.items.insert(
|
self.args.items.insert(0, KeyValueArgType(
|
||||||
0,
|
*SEP_GROUP_ALL_ITEMS).__call__(self.args.url))
|
||||||
KeyValueArgType(*SEP_GROUP_ALL_ITEMS).__call__(self.args.url)
|
|
||||||
)
|
|
||||||
|
|
||||||
except ArgumentTypeError as e:
|
except ArgumentTypeError as e:
|
||||||
if self.args.traceback:
|
if self.args.traceback:
|
||||||
@ -292,11 +289,9 @@ class Parser(ArgumentParser):
|
|||||||
self.args.url = self.args.method
|
self.args.url = self.args.method
|
||||||
# Infer the method
|
# Infer the method
|
||||||
has_data = (
|
has_data = (
|
||||||
(not self.args.ignore_stdin and
|
(not self.args.ignore_stdin and not self.env.stdin_isatty)
|
||||||
not self.env.stdin_isatty) or any(
|
or any(item.sep in SEP_GROUP_DATA_ITEMS
|
||||||
item.sep in SEP_GROUP_DATA_ITEMS
|
for item in self.args.items)
|
||||||
for item in self.args.items
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self.args.method = HTTP_POST if has_data else HTTP_GET
|
self.args.method = HTTP_POST if has_data else HTTP_GET
|
||||||
|
|
||||||
|
@ -1,75 +1,4 @@
|
|||||||
import os
|
from .compat import urlsplit, str
|
||||||
import sys
|
|
||||||
|
|
||||||
from .config import DEFAULT_CONFIG_DIR, Config
|
|
||||||
from .compat import urlsplit, is_windows, str
|
|
||||||
|
|
||||||
|
|
||||||
class Environment(object):
|
|
||||||
"""Holds information about the execution context.
|
|
||||||
|
|
||||||
Groups various aspects of the environment in a changeable object
|
|
||||||
and allows for mocking.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
is_windows = is_windows
|
|
||||||
|
|
||||||
progname = os.path.basename(sys.argv[0])
|
|
||||||
if progname not in ['http', 'https']:
|
|
||||||
progname = 'http'
|
|
||||||
|
|
||||||
config_dir = DEFAULT_CONFIG_DIR
|
|
||||||
|
|
||||||
# Can be set to 0 to disable colors completely.
|
|
||||||
colors = 256 if '256color' in os.environ.get('TERM', '') else 88
|
|
||||||
|
|
||||||
stdin = sys.stdin
|
|
||||||
stdin_isatty = sys.stdin.isatty()
|
|
||||||
|
|
||||||
stdout_isatty = sys.stdout.isatty()
|
|
||||||
stderr_isatty = sys.stderr.isatty()
|
|
||||||
if is_windows:
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
from colorama.initialise import wrap_stream
|
|
||||||
stdout = wrap_stream(sys.stdout, convert=None,
|
|
||||||
strip=None, autoreset=True, wrap=True)
|
|
||||||
stderr = wrap_stream(sys.stderr, convert=None,
|
|
||||||
strip=None, autoreset=True, wrap=True)
|
|
||||||
else:
|
|
||||||
stdout = sys.stdout
|
|
||||||
stderr = sys.stderr
|
|
||||||
|
|
||||||
stdin_encoding = None
|
|
||||||
stdout_encoding = None
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
assert all(hasattr(type(self), attr)
|
|
||||||
for attr in kwargs.keys())
|
|
||||||
self.__dict__.update(**kwargs)
|
|
||||||
|
|
||||||
if self.stdin_encoding is None:
|
|
||||||
self.stdin_encoding = getattr(
|
|
||||||
self.stdin, 'encoding', None) or 'utf8'
|
|
||||||
|
|
||||||
if self.stdout_encoding is None:
|
|
||||||
actual_stdout = self.stdout
|
|
||||||
if is_windows:
|
|
||||||
from colorama import AnsiToWin32
|
|
||||||
if isinstance(self.stdout, AnsiToWin32):
|
|
||||||
actual_stdout = self.stdout.wrapped
|
|
||||||
self.stdout_encoding = getattr(
|
|
||||||
actual_stdout, 'encoding', None) or 'utf8'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
if not hasattr(self, '_config'):
|
|
||||||
self._config = Config(directory=self.config_dir)
|
|
||||||
if self._config.is_new:
|
|
||||||
self._config.save()
|
|
||||||
else:
|
|
||||||
self._config.load()
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMessage(object):
|
class HTTPMessage(object):
|
||||||
|
546
httpie/output.py
546
httpie/output.py
@ -1,546 +0,0 @@
|
|||||||
"""Output streaming, processing and formatting.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
from xml.etree import ElementTree
|
|
||||||
from functools import partial
|
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
import pygments
|
|
||||||
from pygments import token, lexer
|
|
||||||
from pygments.styles import get_style_by_name, STYLE_MAP
|
|
||||||
from pygments.lexers import get_lexer_for_mimetype, get_lexer_by_name
|
|
||||||
from pygments.formatters.terminal import TerminalFormatter
|
|
||||||
from pygments.formatters.terminal256 import Terminal256Formatter
|
|
||||||
from pygments.util import ClassNotFound
|
|
||||||
|
|
||||||
from .compat import is_windows
|
|
||||||
from .solarized import Solarized256Style
|
|
||||||
from .models import HTTPRequest, HTTPResponse, Environment
|
|
||||||
from .input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
|
||||||
OUT_RESP_HEAD, OUT_RESP_BODY)
|
|
||||||
|
|
||||||
|
|
||||||
# The default number of spaces to indent when pretty printing
|
|
||||||
DEFAULT_INDENT = 4
|
|
||||||
|
|
||||||
# Colors on Windows via colorama don't look that
|
|
||||||
# great and fruity seems to give the best result there.
|
|
||||||
AVAILABLE_STYLES = set(STYLE_MAP.keys())
|
|
||||||
AVAILABLE_STYLES.add('solarized')
|
|
||||||
DEFAULT_STYLE = 'solarized' if not is_windows else 'fruity'
|
|
||||||
|
|
||||||
|
|
||||||
BINARY_SUPPRESSED_NOTICE = (
|
|
||||||
b'\n'
|
|
||||||
b'+-----------------------------------------+\n'
|
|
||||||
b'| NOTE: binary data not shown in terminal |\n'
|
|
||||||
b'+-----------------------------------------+'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BinarySuppressedError(Exception):
|
|
||||||
"""An error indicating that the body is binary and won't be written,
|
|
||||||
e.g., for terminal output)."""
|
|
||||||
|
|
||||||
message = BINARY_SUPPRESSED_NOTICE
|
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# Output Streams
|
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
|
|
||||||
def write(stream, outfile, flush):
|
|
||||||
"""Write the output stream."""
|
|
||||||
try:
|
|
||||||
# Writing bytes so we use the buffer interface (Python 3).
|
|
||||||
buf = outfile.buffer
|
|
||||||
except AttributeError:
|
|
||||||
buf = outfile
|
|
||||||
|
|
||||||
for chunk in stream:
|
|
||||||
buf.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def write_with_colors_win_py3(stream, outfile, flush):
|
|
||||||
"""Like `write`, but colorized chunks are written as text
|
|
||||||
directly to `outfile` to ensure it gets processed by colorama.
|
|
||||||
Applies only to Windows with Python 3 and colorized terminal output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
color = b'\x1b['
|
|
||||||
encoding = outfile.encoding
|
|
||||||
for chunk in stream:
|
|
||||||
if color in chunk:
|
|
||||||
outfile.write(chunk.decode(encoding))
|
|
||||||
else:
|
|
||||||
outfile.buffer.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def build_output_stream(args, env, request, response):
|
|
||||||
"""Build and return a chain of iterators over the `request`-`response`
|
|
||||||
exchange each of which yields `bytes` chunks.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
req_h = OUT_REQ_HEAD in args.output_options
|
|
||||||
req_b = OUT_REQ_BODY in args.output_options
|
|
||||||
resp_h = OUT_RESP_HEAD in args.output_options
|
|
||||||
resp_b = OUT_RESP_BODY in args.output_options
|
|
||||||
req = req_h or req_b
|
|
||||||
resp = resp_h or resp_b
|
|
||||||
|
|
||||||
output = []
|
|
||||||
Stream = get_stream_type(env, args)
|
|
||||||
|
|
||||||
if req:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPRequest(request),
|
|
||||||
with_headers=req_h,
|
|
||||||
with_body=req_b))
|
|
||||||
|
|
||||||
if req_b and resp:
|
|
||||||
# Request/Response separator.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
if resp:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPResponse(response),
|
|
||||||
with_headers=resp_h,
|
|
||||||
with_body=resp_b))
|
|
||||||
|
|
||||||
if env.stdout_isatty and resp_b:
|
|
||||||
# Ensure a blank line after the response body.
|
|
||||||
# For terminal output only.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
return chain(*output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_stream_type(env, args):
|
|
||||||
"""Pick the right stream type based on `env` and `args`.
|
|
||||||
Wrap it in a partial with the type-specific args so that
|
|
||||||
we don't need to think what stream we are dealing with.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not env.stdout_isatty and not args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
RawStream,
|
|
||||||
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
|
||||||
if args.stream
|
|
||||||
else RawStream.CHUNK_SIZE
|
|
||||||
)
|
|
||||||
elif args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
PrettyStream if args.stream else BufferedPrettyStream,
|
|
||||||
env=env,
|
|
||||||
processor=OutputProcessor(
|
|
||||||
env=env, groups=args.prettify, pygments_style=args.style),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
Stream = partial(EncodedStream, env=env)
|
|
||||||
|
|
||||||
return Stream
|
|
||||||
|
|
||||||
|
|
||||||
class BaseStream(object):
|
|
||||||
"""Base HTTP message output stream class."""
|
|
||||||
|
|
||||||
def __init__(self, msg, with_headers=True, with_body=True,
|
|
||||||
on_body_chunk_downloaded=None):
|
|
||||||
"""
|
|
||||||
:param msg: a :class:`models.HTTPMessage` subclass
|
|
||||||
:param with_headers: if `True`, headers will be included
|
|
||||||
:param with_body: if `True`, body will be included
|
|
||||||
|
|
||||||
"""
|
|
||||||
assert with_headers or with_body
|
|
||||||
self.msg = msg
|
|
||||||
self.with_headers = with_headers
|
|
||||||
self.with_body = with_body
|
|
||||||
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
|
||||||
|
|
||||||
def _get_headers(self):
|
|
||||||
"""Return the headers' bytes."""
|
|
||||||
return self.msg.headers.encode('utf8')
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
"""Return an iterator over the message body."""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
"""Return an iterator over `self.msg`."""
|
|
||||||
if self.with_headers:
|
|
||||||
yield self._get_headers()
|
|
||||||
yield b'\r\n\r\n'
|
|
||||||
|
|
||||||
if self.with_body:
|
|
||||||
try:
|
|
||||||
for chunk in self._iter_body():
|
|
||||||
yield chunk
|
|
||||||
if self.on_body_chunk_downloaded:
|
|
||||||
self.on_body_chunk_downloaded(chunk)
|
|
||||||
except BinarySuppressedError as e:
|
|
||||||
if self.with_headers:
|
|
||||||
yield b'\n'
|
|
||||||
yield e.message
|
|
||||||
|
|
||||||
|
|
||||||
class RawStream(BaseStream):
|
|
||||||
"""The message is streamed in chunks with no processing."""
|
|
||||||
|
|
||||||
CHUNK_SIZE = 1024 * 100
|
|
||||||
CHUNK_SIZE_BY_LINE = 1
|
|
||||||
|
|
||||||
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
|
||||||
super(RawStream, self).__init__(**kwargs)
|
|
||||||
self.chunk_size = chunk_size
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
return self.msg.iter_body(self.chunk_size)
|
|
||||||
|
|
||||||
|
|
||||||
class EncodedStream(BaseStream):
|
|
||||||
"""Encoded HTTP message stream.
|
|
||||||
|
|
||||||
The message bytes are converted to an encoding suitable for
|
|
||||||
`self.env.stdout`. Unicode errors are replaced and binary data
|
|
||||||
is suppressed. The body is always streamed by line.
|
|
||||||
|
|
||||||
"""
|
|
||||||
CHUNK_SIZE = 1
|
|
||||||
|
|
||||||
def __init__(self, env=Environment(), **kwargs):
|
|
||||||
|
|
||||||
super(EncodedStream, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
if env.stdout_isatty:
|
|
||||||
# Use the encoding supported by the terminal.
|
|
||||||
output_encoding = env.stdout_encoding
|
|
||||||
else:
|
|
||||||
# Preserve the message encoding.
|
|
||||||
output_encoding = self.msg.encoding
|
|
||||||
|
|
||||||
# Default to utf8 when unsure.
|
|
||||||
self.output_encoding = output_encoding or 'utf8'
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
|
|
||||||
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
|
||||||
|
|
||||||
if b'\0' in line:
|
|
||||||
raise BinarySuppressedError()
|
|
||||||
|
|
||||||
yield line.decode(self.msg.encoding)\
|
|
||||||
.encode(self.output_encoding, 'replace') + lf
|
|
||||||
|
|
||||||
|
|
||||||
class PrettyStream(EncodedStream):
|
|
||||||
"""In addition to :class:`EncodedStream` behaviour, this stream applies
|
|
||||||
content processing.
|
|
||||||
|
|
||||||
Useful for long-lived HTTP responses that stream by lines
|
|
||||||
such as the Twitter streaming API.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
CHUNK_SIZE = 1
|
|
||||||
|
|
||||||
def __init__(self, processor, **kwargs):
|
|
||||||
super(PrettyStream, self).__init__(**kwargs)
|
|
||||||
self.processor = processor
|
|
||||||
|
|
||||||
def _get_headers(self):
|
|
||||||
return self.processor.process_headers(
|
|
||||||
self.msg.headers).encode(self.output_encoding)
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
|
||||||
if b'\0' in line:
|
|
||||||
raise BinarySuppressedError()
|
|
||||||
yield self._process_body(line) + lf
|
|
||||||
|
|
||||||
def _process_body(self, chunk):
|
|
||||||
return (self.processor
|
|
||||||
.process_body(
|
|
||||||
content=chunk.decode(self.msg.encoding, 'replace'),
|
|
||||||
content_type=self.msg.content_type,
|
|
||||||
encoding=self.msg.encoding)
|
|
||||||
.encode(self.output_encoding, 'replace'))
|
|
||||||
|
|
||||||
|
|
||||||
class BufferedPrettyStream(PrettyStream):
|
|
||||||
"""The same as :class:`PrettyStream` except that the body is fully
|
|
||||||
fetched before it's processed.
|
|
||||||
|
|
||||||
Suitable regular HTTP responses.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
CHUNK_SIZE = 1024 * 10
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
|
|
||||||
# Read the whole body before prettifying it,
|
|
||||||
# but bail out immediately if the body is binary.
|
|
||||||
body = bytearray()
|
|
||||||
for chunk in self.msg.iter_body(self.CHUNK_SIZE):
|
|
||||||
if b'\0' in chunk:
|
|
||||||
raise BinarySuppressedError()
|
|
||||||
body.extend(chunk)
|
|
||||||
|
|
||||||
yield self._process_body(body)
|
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# Processing
|
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
class HTTPLexer(lexer.RegexLexer):
|
|
||||||
"""Simplified HTTP lexer for Pygments.
|
|
||||||
|
|
||||||
It only operates on headers and provides a stronger contrast between
|
|
||||||
their names and values than the original one bundled with Pygments
|
|
||||||
(:class:`pygments.lexers.text import HttpLexer`), especially when
|
|
||||||
Solarized color scheme is used.
|
|
||||||
|
|
||||||
"""
|
|
||||||
name = 'HTTP'
|
|
||||||
aliases = ['http']
|
|
||||||
filenames = ['*.http']
|
|
||||||
tokens = {
|
|
||||||
'root': [
|
|
||||||
# Request-Line
|
|
||||||
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
|
||||||
lexer.bygroups(
|
|
||||||
token.Name.Function,
|
|
||||||
token.Text,
|
|
||||||
token.Name.Namespace,
|
|
||||||
token.Text,
|
|
||||||
token.Keyword.Reserved,
|
|
||||||
token.Operator,
|
|
||||||
token.Number
|
|
||||||
)),
|
|
||||||
# Response Status-Line
|
|
||||||
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
|
||||||
lexer.bygroups(
|
|
||||||
token.Keyword.Reserved, # 'HTTP'
|
|
||||||
token.Operator, # '/'
|
|
||||||
token.Number, # Version
|
|
||||||
token.Text,
|
|
||||||
token.Number, # Status code
|
|
||||||
token.Text,
|
|
||||||
token.Name.Exception, # Reason
|
|
||||||
)),
|
|
||||||
# Header
|
|
||||||
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
|
|
||||||
token.Name.Attribute, # Name
|
|
||||||
token.Text,
|
|
||||||
token.Operator, # Colon
|
|
||||||
token.Text,
|
|
||||||
token.String # Value
|
|
||||||
))
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class BaseProcessor(object):
|
|
||||||
"""Base, noop output processor class."""
|
|
||||||
|
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __init__(self, env=Environment(), **kwargs):
|
|
||||||
"""
|
|
||||||
:param env: an class:`Environment` instance
|
|
||||||
:param kwargs: additional keyword argument that some
|
|
||||||
processor might require.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.env = env
|
|
||||||
self.kwargs = kwargs
|
|
||||||
|
|
||||||
def process_headers(self, headers):
|
|
||||||
"""Return processed `headers`
|
|
||||||
|
|
||||||
:param headers: The headers as text.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return headers
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
"""Return processed `content`.
|
|
||||||
|
|
||||||
:param content: The body content as text
|
|
||||||
:param content_type: Full content type, e.g., 'application/atom+xml'.
|
|
||||||
:param subtype: E.g. 'xml'.
|
|
||||||
:param encoding: The original content encoding.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
class JSONProcessor(BaseProcessor):
|
|
||||||
"""JSON body processor."""
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
if subtype == 'json':
|
|
||||||
try:
|
|
||||||
# Indent the JSON data, sort keys by name, and
|
|
||||||
# avoid unicode escapes to improve readability.
|
|
||||||
content = json.dumps(json.loads(content),
|
|
||||||
sort_keys=True,
|
|
||||||
ensure_ascii=False,
|
|
||||||
indent=DEFAULT_INDENT)
|
|
||||||
except ValueError:
|
|
||||||
# Invalid JSON but we don't care.
|
|
||||||
pass
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
class XMLProcessor(BaseProcessor):
|
|
||||||
"""XML body processor."""
|
|
||||||
# TODO: tests
|
|
||||||
|
|
||||||
# in-place prettyprint formatter
|
|
||||||
# c.f. http://effbot.org/zone/element-lib.htm#prettyprint
|
|
||||||
@staticmethod
|
|
||||||
def indent(elem, indent_text=' ' * DEFAULT_INDENT):
|
|
||||||
def _indent(elem, level=0):
|
|
||||||
i = "\n" + level * indent_text
|
|
||||||
if len(elem):
|
|
||||||
if not elem.text or not elem.text.strip():
|
|
||||||
elem.text = i + indent_text
|
|
||||||
if not elem.tail or not elem.tail.strip():
|
|
||||||
elem.tail = i
|
|
||||||
for elem in elem:
|
|
||||||
_indent(elem, level + 1)
|
|
||||||
if not elem.tail or not elem.tail.strip():
|
|
||||||
elem.tail = i
|
|
||||||
else:
|
|
||||||
if level and (not elem.tail or not elem.tail.strip()):
|
|
||||||
elem.tail = i
|
|
||||||
return _indent(elem)
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
if subtype == 'xml':
|
|
||||||
try:
|
|
||||||
root = ElementTree.fromstring(content.encode(encoding))
|
|
||||||
self.indent(root)
|
|
||||||
content = ElementTree.tostring(root)
|
|
||||||
except ElementTree.ParseError:
|
|
||||||
# Ignore invalid XML errors (skips attempting to pretty print)
|
|
||||||
pass
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
class PygmentsProcessor(BaseProcessor):
|
|
||||||
"""A processor that applies syntax-highlighting using Pygments
|
|
||||||
to the headers, and to the body as well if its content type is recognized.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(PygmentsProcessor, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# Cache that speeds up when we process streamed body by line.
|
|
||||||
self.lexers_by_type = {}
|
|
||||||
|
|
||||||
if not self.env.colors:
|
|
||||||
self.enabled = False
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
style = get_style_by_name(
|
|
||||||
self.kwargs.get('pygments_style', DEFAULT_STYLE))
|
|
||||||
except ClassNotFound:
|
|
||||||
style = Solarized256Style
|
|
||||||
|
|
||||||
if self.env.is_windows or self.env.colors == 256:
|
|
||||||
fmt_class = Terminal256Formatter
|
|
||||||
else:
|
|
||||||
fmt_class = TerminalFormatter
|
|
||||||
self.formatter = fmt_class(style=style)
|
|
||||||
|
|
||||||
def process_headers(self, headers):
|
|
||||||
return pygments.highlight(
|
|
||||||
headers, HTTPLexer(), self.formatter).strip()
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
try:
|
|
||||||
lexer = self.lexers_by_type.get(content_type)
|
|
||||||
if not lexer:
|
|
||||||
try:
|
|
||||||
lexer = get_lexer_for_mimetype(content_type)
|
|
||||||
except ClassNotFound:
|
|
||||||
lexer = get_lexer_by_name(subtype)
|
|
||||||
self.lexers_by_type[content_type] = lexer
|
|
||||||
except ClassNotFound:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
content = pygments.highlight(content, lexer, self.formatter)
|
|
||||||
return content.strip()
|
|
||||||
|
|
||||||
|
|
||||||
class HeadersProcessor(BaseProcessor):
|
|
||||||
"""Sorts headers by name retaining relative order of multiple headers
|
|
||||||
with the same name.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def process_headers(self, headers):
|
|
||||||
lines = headers.splitlines()
|
|
||||||
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
|
|
||||||
return '\r\n'.join(lines[:1] + headers)
|
|
||||||
|
|
||||||
|
|
||||||
class OutputProcessor(object):
|
|
||||||
"""A delegate class that invokes the actual processors."""
|
|
||||||
|
|
||||||
installed_processors = {
|
|
||||||
'format': [
|
|
||||||
HeadersProcessor,
|
|
||||||
JSONProcessor,
|
|
||||||
XMLProcessor
|
|
||||||
],
|
|
||||||
'colors': [
|
|
||||||
PygmentsProcessor
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, groups, env=Environment(), **kwargs):
|
|
||||||
"""
|
|
||||||
:param env: a :class:`models.Environment` instance
|
|
||||||
:param groups: the groups of processors to be applied
|
|
||||||
:param kwargs: additional keyword arguments for processors
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.processors = []
|
|
||||||
for group in groups:
|
|
||||||
for cls in self.installed_processors[group]:
|
|
||||||
processor = cls(env, **kwargs)
|
|
||||||
if processor.enabled:
|
|
||||||
self.processors.append(processor)
|
|
||||||
|
|
||||||
def process_headers(self, headers):
|
|
||||||
for processor in self.processors:
|
|
||||||
headers = processor.process_headers(headers)
|
|
||||||
return headers
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, encoding):
|
|
||||||
# e.g., 'application/atom+xml'
|
|
||||||
content_type = content_type.split(';')[0]
|
|
||||||
# e.g., 'xml'
|
|
||||||
subtype = content_type.split('/')[-1].split('+')[-1]
|
|
||||||
|
|
||||||
for processor in self.processors:
|
|
||||||
content = processor.process_body(
|
|
||||||
content,
|
|
||||||
content_type,
|
|
||||||
subtype,
|
|
||||||
encoding
|
|
||||||
)
|
|
||||||
|
|
||||||
return content
|
|
0
httpie/output/__init__.py
Normal file
0
httpie/output/__init__.py
Normal file
44
httpie/output/processors/__init__.py
Normal file
44
httpie/output/processors/__init__.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from httpie.context import Environment
|
||||||
|
from .headers import HeadersProcessor
|
||||||
|
from .json import JSONProcessor
|
||||||
|
from .xml import XMLProcessor
|
||||||
|
from .colors import PygmentsProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessorManager(object):
|
||||||
|
"""A delegate class that invokes the actual processors."""
|
||||||
|
|
||||||
|
available = {
|
||||||
|
'format': [
|
||||||
|
HeadersProcessor,
|
||||||
|
JSONProcessor,
|
||||||
|
XMLProcessor
|
||||||
|
],
|
||||||
|
'colors': [
|
||||||
|
PygmentsProcessor
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, groups, env=Environment(), **kwargs):
|
||||||
|
"""
|
||||||
|
:param groups: names of processor groups to be applied
|
||||||
|
:param env: Environment
|
||||||
|
:param kwargs: additional keyword arguments for processors
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.enabled = []
|
||||||
|
for group in groups:
|
||||||
|
for cls in self.available[group]:
|
||||||
|
p = cls(env, **kwargs)
|
||||||
|
if p.enabled:
|
||||||
|
self.enabled.append(p)
|
||||||
|
|
||||||
|
def process_headers(self, headers):
|
||||||
|
for p in self.enabled:
|
||||||
|
headers = p.process_headers(headers)
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def process_body(self, body, mime):
|
||||||
|
for p in self.enabled:
|
||||||
|
body = p.process_body(body, mime)
|
||||||
|
return body
|
37
httpie/output/processors/base.py
Normal file
37
httpie/output/processors/base.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from httpie.context import Environment
|
||||||
|
|
||||||
|
|
||||||
|
# The default number of spaces to indent when pretty printing
|
||||||
|
DEFAULT_INDENT = 4
|
||||||
|
|
||||||
|
|
||||||
|
class BaseProcessor(object):
|
||||||
|
"""Base output processor class."""
|
||||||
|
|
||||||
|
def __init__(self, env=Environment(), **kwargs):
|
||||||
|
"""
|
||||||
|
:param env: an class:`Environment` instance
|
||||||
|
:param kwargs: additional keyword argument that some
|
||||||
|
processor might require.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.enabled = True
|
||||||
|
self.env = env
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def process_headers(self, headers):
|
||||||
|
"""Return processed `headers`
|
||||||
|
|
||||||
|
:param headers: The headers as text.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def process_body(self, content, mime):
|
||||||
|
"""Return processed `content`.
|
||||||
|
|
||||||
|
:param content: The body content as text
|
||||||
|
:param mime: E.g., 'application/atom+xml'.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return content
|
194
httpie/output/processors/colors.py
Normal file
194
httpie/output/processors/colors.py
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
import pygments
|
||||||
|
from pygments import token, lexer
|
||||||
|
from pygments.styles import get_style_by_name, STYLE_MAP
|
||||||
|
from pygments.lexers import get_lexer_for_mimetype, get_lexer_by_name
|
||||||
|
from pygments.formatters.terminal import TerminalFormatter
|
||||||
|
from pygments.formatters.terminal256 import Terminal256Formatter
|
||||||
|
from pygments.util import ClassNotFound
|
||||||
|
from pygments.style import Style
|
||||||
|
|
||||||
|
from httpie.compat import is_windows
|
||||||
|
from .base import BaseProcessor
|
||||||
|
|
||||||
|
|
||||||
|
# Colors on Windows via colorama don't look that
|
||||||
|
# great and fruity seems to give the best result there.
|
||||||
|
AVAILABLE_STYLES = set(STYLE_MAP.keys())
|
||||||
|
AVAILABLE_STYLES.add('solarized')
|
||||||
|
DEFAULT_STYLE = 'solarized' if not is_windows else 'fruity'
|
||||||
|
|
||||||
|
|
||||||
|
class PygmentsProcessor(BaseProcessor):
|
||||||
|
"""
|
||||||
|
Colorize using Pygments
|
||||||
|
|
||||||
|
This processor that applies syntax highlighting to the headers,
|
||||||
|
and also to the body if its content type is recognized.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(PygmentsProcessor, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
if not self.env.colors:
|
||||||
|
self.enabled = False
|
||||||
|
return
|
||||||
|
|
||||||
|
# Cache to speed things up when we process streamed body by line.
|
||||||
|
self.lexers_by_type = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
style = get_style_by_name(
|
||||||
|
self.kwargs.get('pygments_style', DEFAULT_STYLE))
|
||||||
|
except ClassNotFound:
|
||||||
|
style = Solarized256Style
|
||||||
|
|
||||||
|
if self.env.is_windows or self.env.colors == 256:
|
||||||
|
fmt_class = Terminal256Formatter
|
||||||
|
else:
|
||||||
|
fmt_class = TerminalFormatter
|
||||||
|
self.formatter = fmt_class(style=style)
|
||||||
|
|
||||||
|
def process_headers(self, headers):
|
||||||
|
return pygments.highlight(headers, HTTPLexer(), self.formatter).strip()
|
||||||
|
|
||||||
|
def process_body(self, body, mime):
|
||||||
|
lexer = self.get_lexer(mime)
|
||||||
|
if lexer:
|
||||||
|
body = pygments.highlight(body, lexer, self.formatter)
|
||||||
|
return body.strip()
|
||||||
|
|
||||||
|
def get_lexer(self, mime):
|
||||||
|
lexer = self.lexers_by_type.get(mime)
|
||||||
|
if not lexer:
|
||||||
|
try:
|
||||||
|
lexer = get_lexer_for_mimetype(mime)
|
||||||
|
except ClassNotFound:
|
||||||
|
if '+' in mime:
|
||||||
|
# 'application/atom+xml' => 'xml'
|
||||||
|
subtype = mime.split('+')[-1]
|
||||||
|
try:
|
||||||
|
lexer = get_lexer_by_name(subtype)
|
||||||
|
except ClassNotFound:
|
||||||
|
pass
|
||||||
|
self.lexers_by_type[mime] = lexer
|
||||||
|
return lexer
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPLexer(lexer.RegexLexer):
|
||||||
|
"""Simplified HTTP lexer for Pygments.
|
||||||
|
|
||||||
|
It only operates on headers and provides a stronger contrast between
|
||||||
|
their names and values than the original one bundled with Pygments
|
||||||
|
(:class:`pygments.lexers.text import HttpLexer`), especially when
|
||||||
|
Solarized color scheme is used.
|
||||||
|
|
||||||
|
"""
|
||||||
|
name = 'HTTP'
|
||||||
|
aliases = ['http']
|
||||||
|
filenames = ['*.http']
|
||||||
|
tokens = {
|
||||||
|
'root': [
|
||||||
|
# Request-Line
|
||||||
|
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
||||||
|
lexer.bygroups(
|
||||||
|
token.Name.Function,
|
||||||
|
token.Text,
|
||||||
|
token.Name.Namespace,
|
||||||
|
token.Text,
|
||||||
|
token.Keyword.Reserved,
|
||||||
|
token.Operator,
|
||||||
|
token.Number
|
||||||
|
)),
|
||||||
|
# Response Status-Line
|
||||||
|
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
||||||
|
lexer.bygroups(
|
||||||
|
token.Keyword.Reserved, # 'HTTP'
|
||||||
|
token.Operator, # '/'
|
||||||
|
token.Number, # Version
|
||||||
|
token.Text,
|
||||||
|
token.Number, # Status code
|
||||||
|
token.Text,
|
||||||
|
token.Name.Exception, # Reason
|
||||||
|
)),
|
||||||
|
# Header
|
||||||
|
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
|
||||||
|
token.Name.Attribute, # Name
|
||||||
|
token.Text,
|
||||||
|
token.Operator, # Colon
|
||||||
|
token.Text,
|
||||||
|
token.String # Value
|
||||||
|
))
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Solarized256Style(Style):
|
||||||
|
"""
|
||||||
|
solarized256
|
||||||
|
------------
|
||||||
|
|
||||||
|
A Pygments style inspired by Solarized's 256 color mode.
|
||||||
|
|
||||||
|
:copyright: (c) 2011 by Hank Gay, (c) 2012 by John Mastro.
|
||||||
|
:license: BSD, see LICENSE for more details.
|
||||||
|
|
||||||
|
"""
|
||||||
|
BASE03 = "#1c1c1c"
|
||||||
|
BASE02 = "#262626"
|
||||||
|
BASE01 = "#4e4e4e"
|
||||||
|
BASE00 = "#585858"
|
||||||
|
BASE0 = "#808080"
|
||||||
|
BASE1 = "#8a8a8a"
|
||||||
|
BASE2 = "#d7d7af"
|
||||||
|
BASE3 = "#ffffd7"
|
||||||
|
YELLOW = "#af8700"
|
||||||
|
ORANGE = "#d75f00"
|
||||||
|
RED = "#af0000"
|
||||||
|
MAGENTA = "#af005f"
|
||||||
|
VIOLET = "#5f5faf"
|
||||||
|
BLUE = "#0087ff"
|
||||||
|
CYAN = "#00afaf"
|
||||||
|
GREEN = "#5f8700"
|
||||||
|
|
||||||
|
background_color = BASE03
|
||||||
|
styles = {
|
||||||
|
token.Keyword: GREEN,
|
||||||
|
token.Keyword.Constant: ORANGE,
|
||||||
|
token.Keyword.Declaration: BLUE,
|
||||||
|
token.Keyword.Namespace: ORANGE,
|
||||||
|
token.Keyword.Reserved: BLUE,
|
||||||
|
token.Keyword.Type: RED,
|
||||||
|
token.Name.Attribute: BASE1,
|
||||||
|
token.Name.Builtin: BLUE,
|
||||||
|
token.Name.Builtin.Pseudo: BLUE,
|
||||||
|
token.Name.Class: BLUE,
|
||||||
|
token.Name.Constant: ORANGE,
|
||||||
|
token.Name.Decorator: BLUE,
|
||||||
|
token.Name.Entity: ORANGE,
|
||||||
|
token.Name.Exception: YELLOW,
|
||||||
|
token.Name.Function: BLUE,
|
||||||
|
token.Name.Tag: BLUE,
|
||||||
|
token.Name.Variable: BLUE,
|
||||||
|
token.String: CYAN,
|
||||||
|
token.String.Backtick: BASE01,
|
||||||
|
token.String.Char: CYAN,
|
||||||
|
token.String.Doc: CYAN,
|
||||||
|
token.String.Escape: RED,
|
||||||
|
token.String.Heredoc: CYAN,
|
||||||
|
token.String.Regex: RED,
|
||||||
|
token.Number: CYAN,
|
||||||
|
token.Operator: BASE1,
|
||||||
|
token.Operator.Word: GREEN,
|
||||||
|
token.Comment: BASE01,
|
||||||
|
token.Comment.Preproc: GREEN,
|
||||||
|
token.Comment.Special: GREEN,
|
||||||
|
token.Generic.Deleted: CYAN,
|
||||||
|
token.Generic.Emph: 'italic',
|
||||||
|
token.Generic.Error: RED,
|
||||||
|
token.Generic.Heading: ORANGE,
|
||||||
|
token.Generic.Inserted: GREEN,
|
||||||
|
token.Generic.Strong: 'bold',
|
||||||
|
token.Generic.Subheading: ORANGE,
|
||||||
|
token.Token: BASE1,
|
||||||
|
token.Token.Other: ORANGE,
|
||||||
|
}
|
14
httpie/output/processors/headers.py
Normal file
14
httpie/output/processors/headers.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from .base import BaseProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class HeadersProcessor(BaseProcessor):
|
||||||
|
|
||||||
|
def process_headers(self, headers):
|
||||||
|
"""
|
||||||
|
Sorts headers by name while retaining relative
|
||||||
|
order of multiple headers with the same name.
|
||||||
|
|
||||||
|
"""
|
||||||
|
lines = headers.splitlines()
|
||||||
|
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
|
||||||
|
return '\r\n'.join(lines[:1] + headers)
|
23
httpie/output/processors/json.py
Normal file
23
httpie/output/processors/json.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
import json
|
||||||
|
|
||||||
|
from .base import BaseProcessor, DEFAULT_INDENT
|
||||||
|
|
||||||
|
|
||||||
|
class JSONProcessor(BaseProcessor):
|
||||||
|
|
||||||
|
def process_body(self, body, mime):
|
||||||
|
if 'json' in mime:
|
||||||
|
try:
|
||||||
|
obj = json.loads(body)
|
||||||
|
except ValueError:
|
||||||
|
# Invalid JSON, ignore.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# Indent, sort keys by name, and avoid
|
||||||
|
# unicode escapes to improve readability.
|
||||||
|
body = json.dumps(obj,
|
||||||
|
sort_keys=True,
|
||||||
|
ensure_ascii=False,
|
||||||
|
indent=DEFAULT_INDENT)
|
||||||
|
return body
|
58
httpie/output/processors/xml.py
Normal file
58
httpie/output/processors/xml.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
import re
|
||||||
|
from xml.etree import ElementTree
|
||||||
|
|
||||||
|
from .base import BaseProcessor, DEFAULT_INDENT
|
||||||
|
|
||||||
|
|
||||||
|
DECLARATION_RE = re.compile('<\?xml[^\n]+?\?>', flags=re.I)
|
||||||
|
DOCTYPE_RE = re.compile('<!DOCTYPE[^\n]+?>', flags=re.I)
|
||||||
|
|
||||||
|
|
||||||
|
def indent(elem, indent_text=' ' * DEFAULT_INDENT):
|
||||||
|
"""
|
||||||
|
In-place prettyprint formatter
|
||||||
|
C.f. http://effbot.org/zone/element-lib.htm#prettyprint
|
||||||
|
|
||||||
|
"""
|
||||||
|
def _indent(elem, level=0):
|
||||||
|
i = "\n" + level * indent_text
|
||||||
|
if len(elem):
|
||||||
|
if not elem.text or not elem.text.strip():
|
||||||
|
elem.text = i + indent_text
|
||||||
|
if not elem.tail or not elem.tail.strip():
|
||||||
|
elem.tail = i
|
||||||
|
for elem in elem:
|
||||||
|
_indent(elem, level + 1)
|
||||||
|
if not elem.tail or not elem.tail.strip():
|
||||||
|
elem.tail = i
|
||||||
|
else:
|
||||||
|
if level and (not elem.tail or not elem.tail.strip()):
|
||||||
|
elem.tail = i
|
||||||
|
|
||||||
|
return _indent(elem)
|
||||||
|
|
||||||
|
|
||||||
|
class XMLProcessor(BaseProcessor):
|
||||||
|
# TODO: tests
|
||||||
|
|
||||||
|
def process_body(self, body, mime):
|
||||||
|
if 'xml' in mime:
|
||||||
|
# FIXME: orig NS names get forgotten during the conversion, etc.
|
||||||
|
try:
|
||||||
|
root = ElementTree.fromstring(body.encode('utf8'))
|
||||||
|
except ElementTree.ParseError:
|
||||||
|
# Ignore invalid XML errors (skips attempting to pretty print)
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
indent(root)
|
||||||
|
# Use the original declaration
|
||||||
|
declaration = DECLARATION_RE.match(body)
|
||||||
|
doctype = DOCTYPE_RE.match(body)
|
||||||
|
body = ElementTree.tostring(root, encoding='utf-8')\
|
||||||
|
.decode('utf8')
|
||||||
|
if doctype:
|
||||||
|
body = '%s\n%s' % (doctype.group(0), body)
|
||||||
|
if declaration:
|
||||||
|
body = '%s\n%s' % (declaration.group(0), body)
|
||||||
|
return body
|
270
httpie/output/streams.py
Normal file
270
httpie/output/streams.py
Normal file
@ -0,0 +1,270 @@
|
|||||||
|
from itertools import chain
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.models import HTTPRequest, HTTPResponse
|
||||||
|
from httpie.input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
||||||
|
OUT_RESP_HEAD, OUT_RESP_BODY)
|
||||||
|
from httpie.output.processors import ProcessorManager
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SUPPRESSED_NOTICE = (
|
||||||
|
b'\n'
|
||||||
|
b'+-----------------------------------------+\n'
|
||||||
|
b'| NOTE: binary data not shown in terminal |\n'
|
||||||
|
b'+-----------------------------------------+'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BinarySuppressedError(Exception):
|
||||||
|
"""An error indicating that the body is binary and won't be written,
|
||||||
|
e.g., for terminal output)."""
|
||||||
|
|
||||||
|
message = BINARY_SUPPRESSED_NOTICE
|
||||||
|
|
||||||
|
|
||||||
|
def write(stream, outfile, flush):
|
||||||
|
"""Write the output stream."""
|
||||||
|
try:
|
||||||
|
# Writing bytes so we use the buffer interface (Python 3).
|
||||||
|
buf = outfile.buffer
|
||||||
|
except AttributeError:
|
||||||
|
buf = outfile
|
||||||
|
|
||||||
|
for chunk in stream:
|
||||||
|
buf.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def write_with_colors_win_py3(stream, outfile, flush):
|
||||||
|
"""Like `write`, but colorized chunks are written as text
|
||||||
|
directly to `outfile` to ensure it gets processed by colorama.
|
||||||
|
Applies only to Windows with Python 3 and colorized terminal output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
color = b'\x1b['
|
||||||
|
encoding = outfile.encoding
|
||||||
|
for chunk in stream:
|
||||||
|
if color in chunk:
|
||||||
|
outfile.write(chunk.decode(encoding))
|
||||||
|
else:
|
||||||
|
outfile.buffer.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def build_output_stream(args, env, request, response):
|
||||||
|
"""Build and return a chain of iterators over the `request`-`response`
|
||||||
|
exchange each of which yields `bytes` chunks.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
req_h = OUT_REQ_HEAD in args.output_options
|
||||||
|
req_b = OUT_REQ_BODY in args.output_options
|
||||||
|
resp_h = OUT_RESP_HEAD in args.output_options
|
||||||
|
resp_b = OUT_RESP_BODY in args.output_options
|
||||||
|
req = req_h or req_b
|
||||||
|
resp = resp_h or resp_b
|
||||||
|
|
||||||
|
output = []
|
||||||
|
Stream = get_stream_type(env, args)
|
||||||
|
|
||||||
|
if req:
|
||||||
|
output.append(Stream(
|
||||||
|
msg=HTTPRequest(request),
|
||||||
|
with_headers=req_h,
|
||||||
|
with_body=req_b))
|
||||||
|
|
||||||
|
if req_b and resp:
|
||||||
|
# Request/Response separator.
|
||||||
|
output.append([b'\n\n'])
|
||||||
|
|
||||||
|
if resp:
|
||||||
|
output.append(Stream(
|
||||||
|
msg=HTTPResponse(response),
|
||||||
|
with_headers=resp_h,
|
||||||
|
with_body=resp_b))
|
||||||
|
|
||||||
|
if env.stdout_isatty and resp_b:
|
||||||
|
# Ensure a blank line after the response body.
|
||||||
|
# For terminal output only.
|
||||||
|
output.append([b'\n\n'])
|
||||||
|
|
||||||
|
return chain(*output)
|
||||||
|
|
||||||
|
|
||||||
|
def get_stream_type(env, args):
|
||||||
|
"""Pick the right stream type based on `env` and `args`.
|
||||||
|
Wrap it in a partial with the type-specific args so that
|
||||||
|
we don't need to think what stream we are dealing with.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not env.stdout_isatty and not args.prettify:
|
||||||
|
Stream = partial(
|
||||||
|
RawStream,
|
||||||
|
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
||||||
|
if args.stream
|
||||||
|
else RawStream.CHUNK_SIZE
|
||||||
|
)
|
||||||
|
elif args.prettify:
|
||||||
|
Stream = partial(
|
||||||
|
PrettyStream if args.stream else BufferedPrettyStream,
|
||||||
|
env=env,
|
||||||
|
processor=ProcessorManager(
|
||||||
|
env=env,
|
||||||
|
groups=args.prettify,
|
||||||
|
pygments_style=args.style
|
||||||
|
),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
Stream = partial(EncodedStream, env=env)
|
||||||
|
|
||||||
|
return Stream
|
||||||
|
|
||||||
|
|
||||||
|
class BaseStream(object):
|
||||||
|
"""Base HTTP message output stream class."""
|
||||||
|
|
||||||
|
def __init__(self, msg, with_headers=True, with_body=True,
|
||||||
|
on_body_chunk_downloaded=None):
|
||||||
|
"""
|
||||||
|
:param msg: a :class:`models.HTTPMessage` subclass
|
||||||
|
:param with_headers: if `True`, headers will be included
|
||||||
|
:param with_body: if `True`, body will be included
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert with_headers or with_body
|
||||||
|
self.msg = msg
|
||||||
|
self.with_headers = with_headers
|
||||||
|
self.with_body = with_body
|
||||||
|
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
||||||
|
|
||||||
|
def _get_headers(self):
|
||||||
|
"""Return the headers' bytes."""
|
||||||
|
return self.msg.headers.encode('utf8')
|
||||||
|
|
||||||
|
def _iter_body(self):
|
||||||
|
"""Return an iterator over the message body."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Return an iterator over `self.msg`."""
|
||||||
|
if self.with_headers:
|
||||||
|
yield self._get_headers()
|
||||||
|
yield b'\r\n\r\n'
|
||||||
|
|
||||||
|
if self.with_body:
|
||||||
|
try:
|
||||||
|
for chunk in self._iter_body():
|
||||||
|
yield chunk
|
||||||
|
if self.on_body_chunk_downloaded:
|
||||||
|
self.on_body_chunk_downloaded(chunk)
|
||||||
|
except BinarySuppressedError as e:
|
||||||
|
if self.with_headers:
|
||||||
|
yield b'\n'
|
||||||
|
yield e.message
|
||||||
|
|
||||||
|
|
||||||
|
class RawStream(BaseStream):
|
||||||
|
"""The message is streamed in chunks with no processing."""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1024 * 100
|
||||||
|
CHUNK_SIZE_BY_LINE = 1
|
||||||
|
|
||||||
|
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
||||||
|
super(RawStream, self).__init__(**kwargs)
|
||||||
|
self.chunk_size = chunk_size
|
||||||
|
|
||||||
|
def _iter_body(self):
|
||||||
|
return self.msg.iter_body(self.chunk_size)
|
||||||
|
|
||||||
|
|
||||||
|
class EncodedStream(BaseStream):
|
||||||
|
"""Encoded HTTP message stream.
|
||||||
|
|
||||||
|
The message bytes are converted to an encoding suitable for
|
||||||
|
`self.env.stdout`. Unicode errors are replaced and binary data
|
||||||
|
is suppressed. The body is always streamed by line.
|
||||||
|
|
||||||
|
"""
|
||||||
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
|
def __init__(self, env=Environment(), **kwargs):
|
||||||
|
|
||||||
|
super(EncodedStream, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
if env.stdout_isatty:
|
||||||
|
# Use the encoding supported by the terminal.
|
||||||
|
output_encoding = env.stdout_encoding
|
||||||
|
else:
|
||||||
|
# Preserve the message encoding.
|
||||||
|
output_encoding = self.msg.encoding
|
||||||
|
|
||||||
|
# Default to utf8 when unsure.
|
||||||
|
self.output_encoding = output_encoding or 'utf8'
|
||||||
|
|
||||||
|
def _iter_body(self):
|
||||||
|
|
||||||
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
||||||
|
|
||||||
|
if b'\0' in line:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
|
||||||
|
yield line.decode(self.msg.encoding) \
|
||||||
|
.encode(self.output_encoding, 'replace') + lf
|
||||||
|
|
||||||
|
|
||||||
|
class PrettyStream(EncodedStream):
|
||||||
|
"""In addition to :class:`EncodedStream` behaviour, this stream applies
|
||||||
|
content processing.
|
||||||
|
|
||||||
|
Useful for long-lived HTTP responses that stream by lines
|
||||||
|
such as the Twitter streaming API.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
|
def __init__(self, processor, **kwargs):
|
||||||
|
super(PrettyStream, self).__init__(**kwargs)
|
||||||
|
self.processor = processor
|
||||||
|
|
||||||
|
def _get_headers(self):
|
||||||
|
return self.processor.process_headers(
|
||||||
|
self.msg.headers).encode(self.output_encoding)
|
||||||
|
|
||||||
|
def _iter_body(self):
|
||||||
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
||||||
|
if b'\0' in line:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
yield self._process_body(line) + lf
|
||||||
|
|
||||||
|
def _process_body(self, chunk):
|
||||||
|
return self.processor.process_body(
|
||||||
|
body=chunk.decode(self.msg.encoding, 'replace'),
|
||||||
|
mime=self.msg.content_type.split(';')[0]
|
||||||
|
).encode(self.output_encoding, 'replace')
|
||||||
|
|
||||||
|
|
||||||
|
class BufferedPrettyStream(PrettyStream):
|
||||||
|
"""The same as :class:`PrettyStream` except that the body is fully
|
||||||
|
fetched before it's processed.
|
||||||
|
|
||||||
|
Suitable regular HTTP responses.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1024 * 10
|
||||||
|
|
||||||
|
def _iter_body(self):
|
||||||
|
|
||||||
|
# Read the whole body before prettifying it,
|
||||||
|
# but bail out immediately if the body is binary.
|
||||||
|
body = bytearray()
|
||||||
|
for chunk in self.msg.iter_body(self.CHUNK_SIZE):
|
||||||
|
if b'\0' in chunk:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
body.extend(chunk)
|
||||||
|
|
||||||
|
yield self._process_body(body)
|
@ -74,7 +74,7 @@ def get_response(session_name, requests_kwargs, config_dir, args,
|
|||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
# Existing sessions with `read_only=True` don't get updated.
|
# Existing sessions with `read_only=True` don't get updated.
|
||||||
if session.is_new or not read_only:
|
if session.is_new() or not read_only:
|
||||||
session.cookies = requests_session.cookies
|
session.cookies = requests_session.cookies
|
||||||
session.save()
|
session.save()
|
||||||
return response
|
return response
|
||||||
|
@ -1,111 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
|
||||||
solarized256
|
|
||||||
------------
|
|
||||||
|
|
||||||
A Pygments style inspired by Solarized's 256 color mode.
|
|
||||||
|
|
||||||
:copyright: (c) 2011 by Hank Gay, (c) 2012 by John Mastro.
|
|
||||||
:license: BSD, see LICENSE for more details.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from pygments.style import Style
|
|
||||||
from pygments.token import Token, Comment, Name, Keyword, Generic, Number, \
|
|
||||||
Operator, String
|
|
||||||
|
|
||||||
BASE03 = "#1c1c1c"
|
|
||||||
BASE02 = "#262626"
|
|
||||||
BASE01 = "#4e4e4e"
|
|
||||||
BASE00 = "#585858"
|
|
||||||
BASE0 = "#808080"
|
|
||||||
BASE1 = "#8a8a8a"
|
|
||||||
BASE2 = "#d7d7af"
|
|
||||||
BASE3 = "#ffffd7"
|
|
||||||
YELLOW = "#af8700"
|
|
||||||
ORANGE = "#d75f00"
|
|
||||||
RED = "#af0000"
|
|
||||||
MAGENTA = "#af005f"
|
|
||||||
VIOLET = "#5f5faf"
|
|
||||||
BLUE = "#0087ff"
|
|
||||||
CYAN = "#00afaf"
|
|
||||||
GREEN = "#5f8700"
|
|
||||||
|
|
||||||
|
|
||||||
class Solarized256Style(Style):
|
|
||||||
background_color = BASE03
|
|
||||||
styles = {
|
|
||||||
Keyword: GREEN,
|
|
||||||
Keyword.Constant: ORANGE,
|
|
||||||
Keyword.Declaration: BLUE,
|
|
||||||
Keyword.Namespace: ORANGE,
|
|
||||||
#Keyword.Pseudo
|
|
||||||
Keyword.Reserved: BLUE,
|
|
||||||
Keyword.Type: RED,
|
|
||||||
|
|
||||||
#Name
|
|
||||||
Name.Attribute: BASE1,
|
|
||||||
Name.Builtin: BLUE,
|
|
||||||
Name.Builtin.Pseudo: BLUE,
|
|
||||||
Name.Class: BLUE,
|
|
||||||
Name.Constant: ORANGE,
|
|
||||||
Name.Decorator: BLUE,
|
|
||||||
Name.Entity: ORANGE,
|
|
||||||
Name.Exception: YELLOW,
|
|
||||||
Name.Function: BLUE,
|
|
||||||
#Name.Label
|
|
||||||
#Name.Namespace
|
|
||||||
#Name.Other
|
|
||||||
Name.Tag: BLUE,
|
|
||||||
Name.Variable: BLUE,
|
|
||||||
#Name.Variable.Class
|
|
||||||
#Name.Variable.Global
|
|
||||||
#Name.Variable.Instance
|
|
||||||
|
|
||||||
#Literal
|
|
||||||
#Literal.Date
|
|
||||||
String: CYAN,
|
|
||||||
String.Backtick: BASE01,
|
|
||||||
String.Char: CYAN,
|
|
||||||
String.Doc: CYAN,
|
|
||||||
#String.Double
|
|
||||||
String.Escape: RED,
|
|
||||||
String.Heredoc: CYAN,
|
|
||||||
#String.Interpol
|
|
||||||
#String.Other
|
|
||||||
String.Regex: RED,
|
|
||||||
#String.Single
|
|
||||||
#String.Symbol
|
|
||||||
Number: CYAN,
|
|
||||||
#Number.Float
|
|
||||||
#Number.Hex
|
|
||||||
#Number.Integer
|
|
||||||
#Number.Integer.Long
|
|
||||||
#Number.Oct
|
|
||||||
|
|
||||||
Operator: BASE1,
|
|
||||||
Operator.Word: GREEN,
|
|
||||||
|
|
||||||
#Punctuation: ORANGE,
|
|
||||||
|
|
||||||
Comment: BASE01,
|
|
||||||
#Comment.Multiline
|
|
||||||
Comment.Preproc: GREEN,
|
|
||||||
#Comment.Single
|
|
||||||
Comment.Special: GREEN,
|
|
||||||
|
|
||||||
#Generic
|
|
||||||
Generic.Deleted: CYAN,
|
|
||||||
Generic.Emph: 'italic',
|
|
||||||
Generic.Error: RED,
|
|
||||||
Generic.Heading: ORANGE,
|
|
||||||
Generic.Inserted: GREEN,
|
|
||||||
#Generic.Output
|
|
||||||
#Generic.Prompt
|
|
||||||
Generic.Strong: 'bold',
|
|
||||||
Generic.Subheading: ORANGE,
|
|
||||||
#Generic.Traceback
|
|
||||||
|
|
||||||
Token: BASE1,
|
|
||||||
Token.Other: ORANGE,
|
|
||||||
}
|
|
@ -43,5 +43,6 @@ def humanize_bytes(n, precision=2):
|
|||||||
if n >= factor:
|
if n >= factor:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
# noinspection PyUnboundLocalVariable
|
||||||
return '%.*f %s' % (precision, n / factor, suffix)
|
return '%.*f %s' % (precision, n / factor, suffix)
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ import shutil
|
|||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import httpie
|
import httpie
|
||||||
from httpie.models import Environment
|
from httpie.context import Environment
|
||||||
from httpie.core import main
|
from httpie.core import main
|
||||||
from httpie.compat import bytes, str
|
from httpie.compat import bytes, str
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""Tests for dealing with binary request and response data."""
|
"""Tests for dealing with binary request and response data."""
|
||||||
from httpie.compat import urlopen
|
from httpie.compat import urlopen
|
||||||
from httpie.output import BINARY_SUPPRESSED_NOTICE
|
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
||||||
from tests import TestEnvironment, http, httpbin
|
from tests import TestEnvironment, http, httpbin
|
||||||
from tests.fixtures import BIN_FILE_PATH, BIN_FILE_CONTENT, BIN_FILE_PATH_ARG
|
from tests.fixtures import BIN_FILE_PATH, BIN_FILE_CONTENT, BIN_FILE_PATH_ARG
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from httpie.compat import is_windows
|
from httpie.compat import is_windows
|
||||||
from httpie.output import BINARY_SUPPRESSED_NOTICE
|
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
||||||
from tests import http, httpbin, TestEnvironment
|
from tests import http, httpbin, TestEnvironment
|
||||||
from tests.fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
from tests.fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
||||||
|
|
||||||
|
@ -2,8 +2,9 @@ import os
|
|||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from httpie.context import Environment
|
||||||
|
|
||||||
from tests import TestEnvironment, http, httpbin, Environment
|
from tests import TestEnvironment, http, httpbin
|
||||||
from httpie.compat import is_windows
|
from httpie.compat import is_windows
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user