mirror of
https://github.com/httpie/cli.git
synced 2025-01-23 13:58:45 +01:00
Modernize the code base with f-strings (#1068)
This commit is contained in:
parent
39314887c4
commit
0ff0874fa3
@ -284,8 +284,7 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
|||||||
invalid.append(option)
|
invalid.append(option)
|
||||||
|
|
||||||
if invalid:
|
if invalid:
|
||||||
msg = 'unrecognized arguments: %s'
|
self.error(f'unrecognized arguments: {" ".join(invalid)}')
|
||||||
self.error(msg % ' '.join(invalid))
|
|
||||||
|
|
||||||
def _body_from_file(self, fd):
|
def _body_from_file(self, fd):
|
||||||
"""Read the data from a file-like object.
|
"""Read the data from a file-like object.
|
||||||
@ -381,8 +380,8 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
|||||||
for key, file in self.args.files.items():
|
for key, file in self.args.files.items():
|
||||||
if key != '':
|
if key != '':
|
||||||
self.error(
|
self.error(
|
||||||
'Invalid file fields (perhaps you meant --form?): %s'
|
'Invalid file fields (perhaps you meant --form?):'
|
||||||
% ','.join(self.args.files.keys()))
|
f' {",".join(self.args.files.keys())}')
|
||||||
if request_file is not None:
|
if request_file is not None:
|
||||||
self.error("Can't read request from multiple files")
|
self.error("Can't read request from multiple files")
|
||||||
request_file = file
|
request_file = file
|
||||||
@ -407,10 +406,7 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
|||||||
def check_options(value, option):
|
def check_options(value, option):
|
||||||
unknown = set(value) - OUTPUT_OPTIONS
|
unknown = set(value) - OUTPUT_OPTIONS
|
||||||
if unknown:
|
if unknown:
|
||||||
self.error('Unknown output options: {0}={1}'.format(
|
self.error(f'Unknown output options: {option}={",".join(unknown)}')
|
||||||
option,
|
|
||||||
','.join(unknown)
|
|
||||||
))
|
|
||||||
|
|
||||||
if self.args.verbose:
|
if self.args.verbose:
|
||||||
self.args.all = True
|
self.args.all = True
|
||||||
|
@ -30,7 +30,7 @@ from ..ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
|
|||||||
|
|
||||||
parser = HTTPieArgumentParser(
|
parser = HTTPieArgumentParser(
|
||||||
prog='http',
|
prog='http',
|
||||||
description='%s <https://httpie.org>' % __doc__.strip(),
|
description=f'{__doc__.strip()} <https://httpie.org>',
|
||||||
epilog=dedent('''
|
epilog=dedent('''
|
||||||
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
||||||
to its default value.
|
to its default value.
|
||||||
@ -267,7 +267,7 @@ output_processing.add_argument(
|
|||||||
'''.format(
|
'''.format(
|
||||||
default=DEFAULT_STYLE,
|
default=DEFAULT_STYLE,
|
||||||
available_styles='\n'.join(
|
available_styles='\n'.join(
|
||||||
'{0}{1}'.format(8 * ' ', line.strip())
|
f' {line.strip()}'
|
||||||
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
||||||
).strip(),
|
).strip(),
|
||||||
auto_style=AUTO_STYLE,
|
auto_style=AUTO_STYLE,
|
||||||
@ -330,7 +330,7 @@ output_processing.add_argument(
|
|||||||
|
|
||||||
'''.format(
|
'''.format(
|
||||||
option_list='\n'.join(
|
option_list='\n'.join(
|
||||||
(8 * ' ') + option for option in DEFAULT_FORMAT_OPTIONS).strip()
|
f' {option}' for option in DEFAULT_FORMAT_OPTIONS).strip()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -383,12 +383,12 @@ output_options.add_argument(
|
|||||||
'--verbose', '-v',
|
'--verbose', '-v',
|
||||||
dest='verbose',
|
dest='verbose',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='''
|
help=f'''
|
||||||
Verbose output. Print the whole request as well as the response. Also print
|
Verbose output. Print the whole request as well as the response. Also print
|
||||||
any intermediary requests/responses (such as redirects).
|
any intermediary requests/responses (such as redirects).
|
||||||
It's a shortcut for: --all --print={0}
|
It's a shortcut for: --all --print={''.join(OUTPUT_OPTIONS)}
|
||||||
|
|
||||||
'''.format(''.join(OUTPUT_OPTIONS))
|
'''
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--all',
|
'--all',
|
||||||
@ -562,7 +562,7 @@ auth.add_argument(
|
|||||||
name=plugin.name,
|
name=plugin.name,
|
||||||
package=(
|
package=(
|
||||||
'' if issubclass(plugin, BuiltinAuthPlugin)
|
'' if issubclass(plugin, BuiltinAuthPlugin)
|
||||||
else ' (provided by %s)' % plugin.package_name
|
else f' (provided by {plugin.package_name})'
|
||||||
),
|
),
|
||||||
description=(
|
description=(
|
||||||
'' if not plugin.description else
|
'' if not plugin.description else
|
||||||
|
@ -89,13 +89,11 @@ def process_header_arg(arg: KeyValueArg) -> Optional[str]:
|
|||||||
|
|
||||||
|
|
||||||
def process_empty_header_arg(arg: KeyValueArg) -> str:
|
def process_empty_header_arg(arg: KeyValueArg) -> str:
|
||||||
if arg.value:
|
if not arg.value:
|
||||||
raise ParseError(
|
return arg.value
|
||||||
'Invalid item "%s" '
|
raise ParseError(
|
||||||
'(to specify an empty header use `Header;`)'
|
f'Invalid item {arg.orig!r} (to specify an empty header use `Header;`)'
|
||||||
% arg.orig
|
)
|
||||||
)
|
|
||||||
return arg.value
|
|
||||||
|
|
||||||
|
|
||||||
def process_query_param_arg(arg: KeyValueArg) -> str:
|
def process_query_param_arg(arg: KeyValueArg) -> str:
|
||||||
@ -109,7 +107,7 @@ def process_file_upload_arg(arg: KeyValueArg) -> Tuple[str, IO, str]:
|
|||||||
try:
|
try:
|
||||||
f = open(os.path.expanduser(filename), 'rb')
|
f = open(os.path.expanduser(filename), 'rb')
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
raise ParseError('"%s": %s' % (arg.orig, e))
|
raise ParseError(f'{arg.orig!r}: {e}')
|
||||||
return (
|
return (
|
||||||
os.path.basename(filename),
|
os.path.basename(filename),
|
||||||
f,
|
f,
|
||||||
@ -142,12 +140,11 @@ def load_text_file(item: KeyValueArg) -> str:
|
|||||||
with open(os.path.expanduser(path), 'rb') as f:
|
with open(os.path.expanduser(path), 'rb') as f:
|
||||||
return f.read().decode()
|
return f.read().decode()
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
raise ParseError('"%s": %s' % (item.orig, e))
|
raise ParseError(f'{item.orig!r}: {e}')
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise ParseError(
|
raise ParseError(
|
||||||
'"%s": cannot embed the content of "%s",'
|
f'{item.orig!r}: cannot embed the content of {item.value!r},'
|
||||||
' not a UTF8 or ASCII-encoded text file'
|
' not a UTF8 or ASCII-encoded text file'
|
||||||
% (item.orig, item.value)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -155,4 +152,4 @@ def load_json(arg: KeyValueArg, contents: str) -> JSONType:
|
|||||||
try:
|
try:
|
||||||
return load_json_preserve_order(contents)
|
return load_json_preserve_order(contents)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ParseError('"%s": %s' % (arg.orig, e))
|
raise ParseError(f'{arg.orig!r}: {e}')
|
||||||
|
@ -205,10 +205,9 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
|||||||
if downloader.interrupted:
|
if downloader.interrupted:
|
||||||
exit_status = ExitStatus.ERROR
|
exit_status = ExitStatus.ERROR
|
||||||
env.log_error(
|
env.log_error(
|
||||||
'Incomplete download: size=%d; downloaded=%d' % (
|
f'Incomplete download: size={downloader.status.total_size};'
|
||||||
downloader.status.total_size,
|
f' downloaded={downloader.status.downloaded}'
|
||||||
downloader.status.downloaded
|
)
|
||||||
))
|
|
||||||
return exit_status
|
return exit_status
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
@ -64,7 +64,7 @@ def parse_content_range(content_range: str, resumed_from: int) -> int:
|
|||||||
|
|
||||||
if not match:
|
if not match:
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Invalid Content-Range format %r' % content_range)
|
f'Invalid Content-Range format {content_range!r}')
|
||||||
|
|
||||||
content_range_dict = match.groupdict()
|
content_range_dict = match.groupdict()
|
||||||
first_byte_pos = int(content_range_dict['first_byte_pos'])
|
first_byte_pos = int(content_range_dict['first_byte_pos'])
|
||||||
@ -85,16 +85,15 @@ def parse_content_range(content_range: str, resumed_from: int) -> int:
|
|||||||
or (instance_length is not None
|
or (instance_length is not None
|
||||||
and instance_length <= last_byte_pos)):
|
and instance_length <= last_byte_pos)):
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Invalid Content-Range returned: %r' % content_range)
|
f'Invalid Content-Range returned: {content_range!r}')
|
||||||
|
|
||||||
if (first_byte_pos != resumed_from
|
if (first_byte_pos != resumed_from
|
||||||
or (instance_length is not None
|
or (instance_length is not None
|
||||||
and last_byte_pos + 1 != instance_length)):
|
and last_byte_pos + 1 != instance_length)):
|
||||||
# Not what we asked for.
|
# Not what we asked for.
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Unexpected Content-Range returned (%r)'
|
f'Unexpected Content-Range returned ({content_range!r})'
|
||||||
' for the requested Range ("bytes=%d-")'
|
f' for the requested Range ("bytes={resumed_from}-")'
|
||||||
% (content_range, resumed_from)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return last_byte_pos + 1
|
return last_byte_pos + 1
|
||||||
@ -112,7 +111,7 @@ def filename_from_content_disposition(
|
|||||||
"""
|
"""
|
||||||
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
||||||
|
|
||||||
msg = Message('Content-Disposition: %s' % content_disposition)
|
msg = Message(f'Content-Disposition: {content_disposition}')
|
||||||
filename = msg.get_filename()
|
filename = msg.get_filename()
|
||||||
if filename:
|
if filename:
|
||||||
# Basic sanitation.
|
# Basic sanitation.
|
||||||
@ -177,7 +176,7 @@ def trim_filename_if_needed(filename: str, directory='.', extra=0) -> str:
|
|||||||
def get_unique_filename(filename: str, exists=os.path.exists) -> str:
|
def get_unique_filename(filename: str, exists=os.path.exists) -> str:
|
||||||
attempt = 0
|
attempt = 0
|
||||||
while True:
|
while True:
|
||||||
suffix = '-' + str(attempt) if attempt > 0 else ''
|
suffix = f'-{attempt}' if attempt > 0 else ''
|
||||||
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
|
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
|
||||||
try_filename += suffix
|
try_filename += suffix
|
||||||
if not exists(try_filename):
|
if not exists(try_filename):
|
||||||
@ -226,7 +225,7 @@ class Downloader:
|
|||||||
if bytes_have:
|
if bytes_have:
|
||||||
# Set ``Range`` header to resume the download
|
# Set ``Range`` header to resume the download
|
||||||
# TODO: Use "If-Range: mtime" to make sure it's fresh?
|
# TODO: Use "If-Range: mtime" to make sure it's fresh?
|
||||||
request_headers['Range'] = 'bytes=%d-' % bytes_have
|
request_headers['Range'] = f'bytes={bytes_have}-'
|
||||||
self._resumed_from = bytes_have
|
self._resumed_from = bytes_have
|
||||||
|
|
||||||
def start(
|
def start(
|
||||||
@ -288,12 +287,8 @@ class Downloader:
|
|||||||
)
|
)
|
||||||
|
|
||||||
self._progress_reporter.output.write(
|
self._progress_reporter.output.write(
|
||||||
'Downloading %sto "%s"\n' % (
|
f'Downloading {humanize_bytes(total_size) + " " if total_size is not None else ""}'
|
||||||
(humanize_bytes(total_size) + ' '
|
f'to "{self._output_file.name}"\n'
|
||||||
if total_size is not None
|
|
||||||
else ''),
|
|
||||||
self._output_file.name
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self._progress_reporter.start()
|
self._progress_reporter.start()
|
||||||
|
|
||||||
@ -442,7 +437,7 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
s = int((self.status.total_size - downloaded) / speed)
|
s = int((self.status.total_size - downloaded) / speed)
|
||||||
h, s = divmod(s, 60 * 60)
|
h, s = divmod(s, 60 * 60)
|
||||||
m, s = divmod(s, 60)
|
m, s = divmod(s, 60)
|
||||||
eta = '{0}:{1:0>2}:{2:0>2}'.format(h, m, s)
|
eta = f'{h}:{m:0>2}:{s:0>2}'
|
||||||
|
|
||||||
self._status_line = PROGRESS.format(
|
self._status_line = PROGRESS.format(
|
||||||
percentage=percentage,
|
percentage=percentage,
|
||||||
@ -455,11 +450,7 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
self._prev_bytes = downloaded
|
self._prev_bytes = downloaded
|
||||||
|
|
||||||
self.output.write(
|
self.output.write(
|
||||||
CLEAR_LINE
|
f'{CLEAR_LINE} {SPINNER[self._spinner_pos]} {self._status_line}'
|
||||||
+ ' '
|
|
||||||
+ SPINNER[self._spinner_pos]
|
|
||||||
+ ' '
|
|
||||||
+ self._status_line
|
|
||||||
)
|
)
|
||||||
self.output.flush()
|
self.output.flush()
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ class HTTPRequest(HTTPMessage):
|
|||||||
request_line = '{method} {path}{query} HTTP/1.1'.format(
|
request_line = '{method} {path}{query} HTTP/1.1'.format(
|
||||||
method=self._orig.method,
|
method=self._orig.method,
|
||||||
path=url.path or '/',
|
path=url.path or '/',
|
||||||
query='?' + url.query if url.query else ''
|
query=f'?{url.query}' if url.query else ''
|
||||||
)
|
)
|
||||||
|
|
||||||
headers = dict(self._orig.headers)
|
headers = dict(self._orig.headers)
|
||||||
@ -110,10 +110,7 @@ class HTTPRequest(HTTPMessage):
|
|||||||
headers['Host'] = url.netloc.split('@')[-1]
|
headers['Host'] = url.netloc.split('@')[-1]
|
||||||
|
|
||||||
headers = [
|
headers = [
|
||||||
'%s: %s' % (
|
f'{name}: {value if isinstance(value, str) else value.decode("utf-8")}'
|
||||||
name,
|
|
||||||
value if isinstance(value, str) else value.decode('utf8')
|
|
||||||
)
|
|
||||||
for name, value in headers.items()
|
for name, value in headers.items()
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -120,8 +120,8 @@ def get_lexer(
|
|||||||
subtype_name, subtype_suffix = subtype.split('+', 1)
|
subtype_name, subtype_suffix = subtype.split('+', 1)
|
||||||
lexer_names.extend([subtype_name, subtype_suffix])
|
lexer_names.extend([subtype_name, subtype_suffix])
|
||||||
mime_types.extend([
|
mime_types.extend([
|
||||||
'%s/%s' % (type_, subtype_name),
|
f'{type_}/{subtype_name}',
|
||||||
'%s/%s' % (type_, subtype_suffix)
|
f'{type_}/{subtype_suffix}',
|
||||||
])
|
])
|
||||||
|
|
||||||
# As a last resort, if no lexer feels responsible, and
|
# As a last resort, if no lexer feels responsible, and
|
||||||
|
@ -29,9 +29,9 @@ class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def make_header(username: str, password: str) -> str:
|
def make_header(username: str, password: str) -> str:
|
||||||
credentials = u'%s:%s' % (username, password)
|
credentials = f'{username}:{password}'
|
||||||
token = b64encode(credentials.encode('utf8')).strip().decode('latin1')
|
token = b64encode(credentials.encode('utf-8')).strip().decode('latin1')
|
||||||
return 'Basic %s' % token
|
return f'Basic {token}'
|
||||||
|
|
||||||
|
|
||||||
class BasicAuthPlugin(BuiltinAuthPlugin):
|
class BasicAuthPlugin(BuiltinAuthPlugin):
|
||||||
|
@ -62,7 +62,7 @@ def humanize_bytes(n, precision=2):
|
|||||||
break
|
break
|
||||||
|
|
||||||
# noinspection PyUnboundLocalVariable
|
# noinspection PyUnboundLocalVariable
|
||||||
return '%.*f %s' % (precision, n / factor, suffix)
|
return f'{n / factor:.{precision}f} {suffix}'
|
||||||
|
|
||||||
|
|
||||||
class ExplicitNullAuth(requests.auth.AuthBase):
|
class ExplicitNullAuth(requests.auth.AuthBase):
|
||||||
@ -85,7 +85,7 @@ def get_content_type(filename):
|
|||||||
if mime:
|
if mime:
|
||||||
content_type = mime
|
content_type = mime
|
||||||
if encoding:
|
if encoding:
|
||||||
content_type = '%s; charset=%s' % (mime, encoding)
|
content_type = f'{mime}; charset={encoding}'
|
||||||
return content_type
|
return content_type
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user