forked from extern/httpie-cli
Cleanup
This commit is contained in:
parent
dc7d03e6b8
commit
9338aadd75
@ -3,7 +3,7 @@
|
||||
NOTE: the CLI interface may change before reaching v1.0.
|
||||
|
||||
"""
|
||||
import argparse
|
||||
from argparse import FileType, OPTIONAL, SUPPRESS
|
||||
|
||||
from requests.compat import is_windows
|
||||
|
||||
@ -45,7 +45,7 @@ positional = parser.add_argument_group(
|
||||
)
|
||||
positional.add_argument(
|
||||
'method', metavar='METHOD',
|
||||
nargs='?',
|
||||
nargs=OPTIONAL,
|
||||
default=None,
|
||||
help=_('''
|
||||
The HTTP method to be used for the request
|
||||
@ -114,9 +114,9 @@ content_type.add_argument(
|
||||
output_processing = parser.add_argument_group(title='Output processing')
|
||||
|
||||
output_processing.add_argument(
|
||||
'--output', '-o', type=argparse.FileType('w+b'),
|
||||
'--output', '-o', type=FileType('w+b'),
|
||||
metavar='FILE',
|
||||
help= argparse.SUPPRESS if not is_windows else _(
|
||||
help= SUPPRESS if not is_windows else _(
|
||||
'''
|
||||
Save output to FILE.
|
||||
This option is a replacement for piping output to FILE,
|
||||
@ -322,15 +322,13 @@ network.add_argument(
|
||||
troubleshooting = parser.add_argument_group(title='Troubleshooting')
|
||||
troubleshooting.add_argument(
|
||||
'--help',
|
||||
action='help', default=argparse.SUPPRESS,
|
||||
help=argparse._('Show this help message and exit')
|
||||
action='help', default=SUPPRESS,
|
||||
help='Show this help message and exit'
|
||||
)
|
||||
troubleshooting.add_argument('--version', action='version', version=__version__)
|
||||
troubleshooting.add_argument(
|
||||
'--traceback', action='store_true', default=False,
|
||||
help=_('''
|
||||
Prints exception traceback should one occur.
|
||||
''')
|
||||
help='Prints exception traceback should one occur.'
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--debug', action='store_true', default=False,
|
||||
|
@ -67,7 +67,7 @@ def get_requests_kwargs(args):
|
||||
'verify': {
|
||||
'yes': True,
|
||||
'no': False
|
||||
}.get(args.verify,args.verify),
|
||||
}.get(args.verify, args.verify),
|
||||
'timeout': args.timeout,
|
||||
'auth': credentials,
|
||||
'proxies': dict((p.key, p.value) for p in args.proxy),
|
||||
|
@ -5,10 +5,10 @@ import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
import argparse
|
||||
import mimetypes
|
||||
import getpass
|
||||
from io import BytesIO
|
||||
from argparse import ArgumentParser, ArgumentTypeError
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
@ -79,7 +79,7 @@ OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
||||
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
||||
|
||||
|
||||
class Parser(argparse.ArgumentParser):
|
||||
class Parser(ArgumentParser):
|
||||
"""Adds additional logic to `argparse.ArgumentParser`.
|
||||
|
||||
Handles all input (CLI args, file args, stdin), applies defaults,
|
||||
@ -125,7 +125,6 @@ class Parser(argparse.ArgumentParser):
|
||||
# Stdin already read (if not a tty) so it's save to prompt.
|
||||
args.auth.prompt_password(urlparse(args.url).netloc)
|
||||
|
||||
|
||||
return args
|
||||
|
||||
def _print_message(self, message, file=None):
|
||||
@ -171,7 +170,7 @@ class Parser(argparse.ArgumentParser):
|
||||
args.items.insert(
|
||||
0, KeyValueArgType(*SEP_GROUP_ITEMS).__call__(args.url))
|
||||
|
||||
except argparse.ArgumentTypeError as e:
|
||||
except ArgumentTypeError as e:
|
||||
if args.traceback:
|
||||
raise
|
||||
self.error(e.message)
|
||||
@ -344,7 +343,7 @@ class KeyValueArgType(object):
|
||||
break
|
||||
|
||||
else:
|
||||
raise argparse.ArgumentTypeError(
|
||||
raise ArgumentTypeError(
|
||||
'"%s" is not a valid value' % string)
|
||||
|
||||
return self.key_value_class(
|
||||
@ -383,7 +382,7 @@ class AuthCredentialsArgType(KeyValueArgType):
|
||||
"""
|
||||
try:
|
||||
return super(AuthCredentialsArgType, self).__call__(string)
|
||||
except argparse.ArgumentTypeError:
|
||||
except ArgumentTypeError:
|
||||
# No password provided, will prompt for it later.
|
||||
return self.key_value_class(
|
||||
key=string,
|
||||
|
@ -90,9 +90,9 @@ class HTTPResponse(HTTPMessage):
|
||||
def headers(self):
|
||||
original = self._orig.raw._original_response
|
||||
status_line = 'HTTP/{version} {status} {reason}'.format(
|
||||
version='.'.join(str(original.version)),
|
||||
status=original.status,
|
||||
reason=original.reason
|
||||
version='.'.join(str(original.version)),
|
||||
status=original.status,
|
||||
reason=original.reason
|
||||
)
|
||||
headers = [status_line]
|
||||
try:
|
||||
|
@ -89,7 +89,7 @@ def output_stream(args, env, request, response):
|
||||
req_h = OUT_REQ_HEAD in args.output_options
|
||||
req_b = OUT_REQ_BODY in args.output_options
|
||||
resp_h = OUT_RESP_HEAD in args.output_options
|
||||
resp_b = OUT_RESP_BODY in args.output_options
|
||||
resp_b = OUT_RESP_BODY in args.output_options
|
||||
|
||||
req = req_h or req_b
|
||||
resp = resp_h or resp_b
|
||||
@ -131,13 +131,15 @@ def make_stream(env, args):
|
||||
RawStream,
|
||||
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
||||
if args.stream
|
||||
else RawStream.CHUNK_SIZE)
|
||||
else RawStream.CHUNK_SIZE
|
||||
)
|
||||
elif args.prettify:
|
||||
Stream = partial(
|
||||
PrettyStream if args.stream else BufferedPrettyStream,
|
||||
processor=OutputProcessor(env=env, groups=args.prettify,
|
||||
pygments_style=args.style),
|
||||
env=env)
|
||||
env=env,
|
||||
processor=OutputProcessor(
|
||||
env=env, groups=args.prettify, pygments_style=args.style),
|
||||
)
|
||||
else:
|
||||
Stream = partial(EncodedStream, env=env)
|
||||
|
||||
@ -205,6 +207,7 @@ class EncodedStream(BaseStream):
|
||||
|
||||
"""
|
||||
CHUNK_SIZE = 1024 * 5
|
||||
|
||||
def __init__(self, env=Environment(), **kwargs):
|
||||
|
||||
super(EncodedStream, self).__init__(**kwargs)
|
||||
@ -308,13 +311,13 @@ class HTTPLexer(lexer.RegexLexer):
|
||||
# Request-Line
|
||||
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
||||
lexer.bygroups(
|
||||
token.Name.Function,
|
||||
token.Text,
|
||||
token.Name.Namespace,
|
||||
token.Text,
|
||||
token.Keyword.Reserved,
|
||||
token.Operator,
|
||||
token.Number
|
||||
token.Name.Function,
|
||||
token.Text,
|
||||
token.Name.Namespace,
|
||||
token.Text,
|
||||
token.Keyword.Reserved,
|
||||
token.Operator,
|
||||
token.Number
|
||||
)),
|
||||
# Response Status-Line
|
||||
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
||||
@ -329,13 +332,14 @@ class HTTPLexer(lexer.RegexLexer):
|
||||
)),
|
||||
# Header
|
||||
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
|
||||
token.Name.Attribute, # Name
|
||||
token.Name.Attribute, # Name
|
||||
token.Text,
|
||||
token.Operator, # Colon
|
||||
token.Text,
|
||||
token.String # Value
|
||||
))
|
||||
]}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class BaseProcessor(object):
|
||||
|
@ -1,13 +1,13 @@
|
||||
"""Persistent, JSON-serialized sessions.
|
||||
|
||||
"""
|
||||
import shutil
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import glob
|
||||
import errno
|
||||
import codecs
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from requests.compat import urlparse
|
||||
@ -15,18 +15,17 @@ from requests import Session as RSession
|
||||
from requests.cookies import RequestsCookieJar, create_cookie
|
||||
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
|
||||
|
||||
from . import __version__
|
||||
from .config import CONFIG_DIR
|
||||
from .output import PygmentsProcessor
|
||||
from.import __version__
|
||||
from.config import CONFIG_DIR
|
||||
from.output import PygmentsProcessor
|
||||
|
||||
|
||||
SESSIONS_DIR = os.path.join(CONFIG_DIR, 'sessions')
|
||||
|
||||
|
||||
def get_response(name, request_kwargs):
|
||||
|
||||
host = Host(request_kwargs['headers'].get('Host', None)
|
||||
or urlparse(request_kwargs['url']).netloc.split('@')[-1])
|
||||
or urlparse(request_kwargs['url']).netloc.split('@')[-1])
|
||||
|
||||
session = Session(host, name)
|
||||
session.load()
|
||||
@ -54,7 +53,6 @@ def get_response(name, request_kwargs):
|
||||
|
||||
|
||||
class Host(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
@ -75,7 +73,7 @@ class Host(object):
|
||||
os.makedirs(path, mode=0o700)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
raise
|
||||
return path
|
||||
|
||||
@classmethod
|
||||
@ -86,7 +84,6 @@ class Host(object):
|
||||
|
||||
|
||||
class Session(dict):
|
||||
|
||||
def __init__(self, host, name, *args, **kwargs):
|
||||
super(Session, self).__init__(*args, **kwargs)
|
||||
self.host = host
|
||||
@ -128,9 +125,8 @@ class Session(dict):
|
||||
def cookies(self):
|
||||
jar = RequestsCookieJar()
|
||||
for name, cookie_dict in self['cookies'].items():
|
||||
cookie = create_cookie(
|
||||
name, cookie_dict.pop('value'), **cookie_dict)
|
||||
jar.set_cookie(cookie)
|
||||
jar.set_cookie(create_cookie(
|
||||
name, cookie_dict.pop('value'), **cookie_dict))
|
||||
jar.clear_expired_cookies()
|
||||
return jar
|
||||
|
||||
@ -167,7 +163,7 @@ class Session(dict):
|
||||
HTTPDigestAuth: 'digest'}[type(cred)],
|
||||
'username': cred.username,
|
||||
'password': cred.password,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def list_command(args):
|
||||
@ -185,7 +181,7 @@ def show_command(args):
|
||||
path = Session(Host(args.host), args.name).path
|
||||
if not os.path.exists(path):
|
||||
sys.stderr.write('Session "%s" does not exist [%s].\n'
|
||||
% (args.name, path))
|
||||
% (args.name, path))
|
||||
sys.exit(1)
|
||||
|
||||
with codecs.open(path, encoding='utf8') as f:
|
||||
@ -200,12 +196,7 @@ def delete_command(args):
|
||||
if not args.name:
|
||||
host.delete()
|
||||
else:
|
||||
session = Session(host, args.name)
|
||||
try:
|
||||
session.delete()
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
Session(host, args.name).delete()
|
||||
|
||||
|
||||
def edit_command(args):
|
||||
@ -220,7 +211,6 @@ def edit_command(args):
|
||||
|
||||
|
||||
def add_commands(subparsers):
|
||||
|
||||
# List
|
||||
list_ = subparsers.add_parser('session-list', help='list sessions')
|
||||
list_.set_defaults(command=list_command)
|
||||
|
Loading…
Reference in New Issue
Block a user