mirror of
https://github.com/httpie/cli.git
synced 2025-03-13 06:18:33 +01:00
Add basic file parsing and loop for http file
Co-authored-by: Jakub Rybak <laykos0@protonmail.com>
This commit is contained in:
parent
c37e5aefe6
commit
51e61eb565
200
httpie/core.py
200
httpie/core.py
@ -15,6 +15,7 @@ from .cli.nested_json import NestedJSONSyntaxError
|
||||
from .client import collect_messages
|
||||
from .context import Environment, LogLevel
|
||||
from .downloads import Downloader
|
||||
from .http_parser import http_parser
|
||||
from .models import (
|
||||
RequestsMessageKind,
|
||||
OutputOptions
|
||||
@ -172,103 +173,118 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
||||
The main program without error handling.
|
||||
|
||||
"""
|
||||
# TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere.
|
||||
exit_status = ExitStatus.SUCCESS
|
||||
downloader = None
|
||||
initial_request: Optional[requests.PreparedRequest] = None
|
||||
final_response: Optional[requests.Response] = None
|
||||
processing_options = ProcessingOptions.from_raw_args(args)
|
||||
|
||||
def separate():
|
||||
getattr(env.stdout, 'buffer', env.stdout).write(MESSAGE_SEPARATOR_BYTES)
|
||||
def actual_program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
||||
# TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere.
|
||||
exit_status = ExitStatus.SUCCESS
|
||||
downloader = None
|
||||
initial_request: Optional[requests.PreparedRequest] = None
|
||||
final_response: Optional[requests.Response] = None
|
||||
processing_options = ProcessingOptions.from_raw_args(args)
|
||||
|
||||
def request_body_read_callback(chunk: bytes):
|
||||
should_pipe_to_stdout = bool(
|
||||
# Request body output desired
|
||||
OUT_REQ_BODY in args.output_options
|
||||
# & not `.read()` already pre-request (e.g., for compression)
|
||||
and initial_request
|
||||
# & non-EOF chunk
|
||||
and chunk
|
||||
)
|
||||
if should_pipe_to_stdout:
|
||||
return write_raw_data(
|
||||
env,
|
||||
chunk,
|
||||
processing_options=processing_options,
|
||||
headers=initial_request.headers
|
||||
def separate():
|
||||
getattr(env.stdout, 'buffer', env.stdout).write(MESSAGE_SEPARATOR_BYTES)
|
||||
|
||||
def request_body_read_callback(chunk: bytes):
|
||||
should_pipe_to_stdout = bool(
|
||||
# Request body output desired
|
||||
OUT_REQ_BODY in args.output_options
|
||||
# & not `.read()` already pre-request (e.g., for compression)
|
||||
and initial_request
|
||||
# & non-EOF chunk
|
||||
and chunk
|
||||
)
|
||||
|
||||
try:
|
||||
if args.download:
|
||||
args.follow = True # --download implies --follow.
|
||||
downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume)
|
||||
downloader.pre_request(args.headers)
|
||||
|
||||
if args.http_file:
|
||||
print("################# Reading from HTTP file:", args.url)
|
||||
|
||||
messages = collect_messages(env, args=args,
|
||||
request_body_read_callback=request_body_read_callback)
|
||||
force_separator = False
|
||||
prev_with_body = False
|
||||
|
||||
# Process messages as they’re generated
|
||||
for message in messages:
|
||||
output_options = OutputOptions.from_message(message, args.output_options)
|
||||
|
||||
do_write_body = output_options.body
|
||||
if prev_with_body and output_options.any() and (force_separator or not env.stdout_isatty):
|
||||
# Separate after a previous message with body, if needed. See test_tokens.py.
|
||||
separate()
|
||||
force_separator = False
|
||||
if output_options.kind is RequestsMessageKind.REQUEST:
|
||||
if not initial_request:
|
||||
initial_request = message
|
||||
if output_options.body:
|
||||
is_streamed_upload = not isinstance(message.body, (str, bytes))
|
||||
do_write_body = not is_streamed_upload
|
||||
force_separator = is_streamed_upload and env.stdout_isatty
|
||||
else:
|
||||
final_response = message
|
||||
if args.check_status or downloader:
|
||||
exit_status = http_status_to_exit_status(http_status=message.status_code, follow=args.follow)
|
||||
if exit_status != ExitStatus.SUCCESS and (not env.stdout_isatty or args.quiet == 1):
|
||||
env.log_error(f'HTTP {message.raw.status} {message.raw.reason}', level=LogLevel.WARNING)
|
||||
write_message(
|
||||
requests_message=message,
|
||||
env=env,
|
||||
output_options=output_options._replace(
|
||||
body=do_write_body
|
||||
),
|
||||
processing_options=processing_options
|
||||
)
|
||||
prev_with_body = output_options.body
|
||||
|
||||
# Cleanup
|
||||
if force_separator:
|
||||
separate()
|
||||
if downloader and exit_status == ExitStatus.SUCCESS:
|
||||
# Last response body download.
|
||||
download_stream, download_to = downloader.start(
|
||||
initial_url=initial_request.url,
|
||||
final_response=final_response,
|
||||
)
|
||||
write_stream(stream=download_stream, outfile=download_to, flush=False)
|
||||
downloader.finish()
|
||||
if downloader.interrupted:
|
||||
exit_status = ExitStatus.ERROR
|
||||
env.log_error(
|
||||
f'Incomplete download: size={downloader.status.total_size};'
|
||||
f' downloaded={downloader.status.downloaded}'
|
||||
if should_pipe_to_stdout:
|
||||
return write_raw_data(
|
||||
env,
|
||||
chunk,
|
||||
processing_options=processing_options,
|
||||
headers=initial_request.headers
|
||||
)
|
||||
return exit_status
|
||||
|
||||
finally:
|
||||
if downloader and not downloader.finished:
|
||||
downloader.failed()
|
||||
if args.output_file and args.output_file_specified:
|
||||
args.output_file.close()
|
||||
try:
|
||||
if args.download:
|
||||
args.follow = True # --download implies --follow.
|
||||
downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume)
|
||||
downloader.pre_request(args.headers)
|
||||
|
||||
|
||||
messages = collect_messages(env, args=args,
|
||||
request_body_read_callback=request_body_read_callback)
|
||||
force_separator = False
|
||||
prev_with_body = False
|
||||
|
||||
# Process messages as they’re generated
|
||||
for message in messages:
|
||||
output_options = OutputOptions.from_message(message, args.output_options)
|
||||
|
||||
do_write_body = output_options.body
|
||||
if prev_with_body and output_options.any() and (force_separator or not env.stdout_isatty):
|
||||
# Separate after a previous message with body, if needed. See test_tokens.py.
|
||||
separate()
|
||||
force_separator = False
|
||||
if output_options.kind is RequestsMessageKind.REQUEST:
|
||||
if not initial_request:
|
||||
initial_request = message
|
||||
if output_options.body:
|
||||
is_streamed_upload = not isinstance(message.body, (str, bytes))
|
||||
do_write_body = not is_streamed_upload
|
||||
force_separator = is_streamed_upload and env.stdout_isatty
|
||||
else:
|
||||
final_response = message
|
||||
if args.check_status or downloader:
|
||||
exit_status = http_status_to_exit_status(http_status=message.status_code, follow=args.follow)
|
||||
if exit_status != ExitStatus.SUCCESS and (not env.stdout_isatty or args.quiet == 1):
|
||||
env.log_error(f'HTTP {message.raw.status} {message.raw.reason}', level=LogLevel.WARNING)
|
||||
write_message(
|
||||
requests_message=message,
|
||||
env=env,
|
||||
output_options=output_options._replace(
|
||||
body=do_write_body
|
||||
),
|
||||
processing_options=processing_options
|
||||
)
|
||||
prev_with_body = output_options.body
|
||||
|
||||
# Cleanup
|
||||
if force_separator:
|
||||
separate()
|
||||
if downloader and exit_status == ExitStatus.SUCCESS:
|
||||
# Last response body download.
|
||||
download_stream, download_to = downloader.start(
|
||||
initial_url=initial_request.url,
|
||||
final_response=final_response,
|
||||
)
|
||||
write_stream(stream=download_stream, outfile=download_to, flush=False)
|
||||
downloader.finish()
|
||||
if downloader.interrupted:
|
||||
exit_status = ExitStatus.ERROR
|
||||
env.log_error(
|
||||
f'Incomplete download: size={downloader.status.total_size};'
|
||||
f' downloaded={downloader.status.downloaded}'
|
||||
)
|
||||
return exit_status
|
||||
|
||||
finally:
|
||||
if downloader and not downloader.finished:
|
||||
downloader.failed()
|
||||
if args.output_file and args.output_file_specified:
|
||||
args.output_file.close()
|
||||
|
||||
if args.http_file:
|
||||
# TODO: FILE PARSING TO REQUESTS ARRAY
|
||||
requests_list = http_parser(args.url)
|
||||
returns = []
|
||||
for req in requests_list:
|
||||
args.url = req.url
|
||||
args.method = req.method
|
||||
# args.headers = req.headers
|
||||
# args.body = req.body
|
||||
returns.append(actual_program(args, env))
|
||||
|
||||
return ExitStatus.SUCCESS if all(r is ExitStatus.SUCCESS for r in returns) else ExitStatus.ERROR
|
||||
|
||||
return actual_program(args, env)
|
||||
|
||||
|
||||
def print_debug_info(env: Environment):
|
||||
|
@ -1,136 +1,34 @@
|
||||
import argparse
|
||||
from time import monotonic
|
||||
from typing import Callable, Iterable
|
||||
|
||||
import requests
|
||||
# noinspection PyPackageRequirements
|
||||
import urllib3
|
||||
|
||||
from httpie.client import (
|
||||
build_requests_session,
|
||||
dump_request,
|
||||
ensure_path_as_is,
|
||||
make_request_kwargs, make_send_kwargs,
|
||||
make_send_kwargs_mergeable_from_env,
|
||||
max_headers,
|
||||
transform_headers
|
||||
)
|
||||
from . import __version__
|
||||
from .cli.constants import HTTP_OPTIONS
|
||||
from .context import Environment
|
||||
from .encoding import UTF8
|
||||
from .models import RequestsMessage
|
||||
from .sessions import get_httpie_session
|
||||
from .uploads import (
|
||||
compress_request,
|
||||
)
|
||||
from .utils import get_expired_cookies
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
urllib3.disable_warnings()
|
||||
@dataclass
|
||||
class HttpFileRequest:
|
||||
method: str
|
||||
url: str
|
||||
headers: dict
|
||||
body: bytes
|
||||
|
||||
FORM_CONTENT_TYPE = f'application/x-www-form-urlencoded; charset={UTF8}'
|
||||
JSON_CONTENT_TYPE = 'application/json'
|
||||
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*;q=0.5'
|
||||
DEFAULT_UA = f'HTTPie/{__version__}'
|
||||
|
||||
IGNORE_CONTENT_LENGTH_METHODS = frozenset([HTTP_OPTIONS])
|
||||
def http_parser(filename: str) -> list[HttpFileRequest]:
|
||||
http_file = Path(filename)
|
||||
if not http_file.exists():
|
||||
raise FileNotFoundError(f"File not found: {filename}")
|
||||
if not http_file.is_file():
|
||||
raise IsADirectoryError(f"Path is not a file: {filename}")
|
||||
http_contents = http_file.read_text()
|
||||
http_lines = [
|
||||
line for line in http_contents.splitlines() if not line.startswith("#")
|
||||
]
|
||||
http_lines = [line for line in http_lines if line.strip()]
|
||||
first_line = http_lines[0]
|
||||
method, url = first_line.split(" ")
|
||||
|
||||
def collect_messages(
|
||||
env: Environment,
|
||||
args: argparse.Namespace,
|
||||
request_body_read_callback: Callable[[bytes], None] = None,
|
||||
) -> Iterable[RequestsMessage]:
|
||||
httpie_session = None
|
||||
httpie_session_headers = None
|
||||
if args.session or args.session_read_only:
|
||||
httpie_session = get_httpie_session(
|
||||
env=env,
|
||||
config_dir=env.config.directory,
|
||||
session_name=args.session or args.session_read_only,
|
||||
host=args.headers.get('Host'),
|
||||
url=args.url,
|
||||
return [
|
||||
HttpFileRequest(
|
||||
method=method,
|
||||
url=url,
|
||||
headers={},
|
||||
body=b"",
|
||||
)
|
||||
httpie_session_headers = httpie_session.headers
|
||||
|
||||
request_kwargs = make_request_kwargs(
|
||||
env,
|
||||
args=args,
|
||||
base_headers=httpie_session_headers,
|
||||
request_body_read_callback=request_body_read_callback
|
||||
)
|
||||
send_kwargs = make_send_kwargs(args)
|
||||
send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args)
|
||||
requests_session = build_requests_session(
|
||||
ssl_version=args.ssl_version,
|
||||
ciphers=args.ciphers,
|
||||
verify=bool(send_kwargs_mergeable_from_env['verify'])
|
||||
)
|
||||
|
||||
if httpie_session:
|
||||
httpie_session.update_headers(request_kwargs['headers'])
|
||||
requests_session.cookies = httpie_session.cookies
|
||||
if args.auth_plugin:
|
||||
# Save auth from CLI to HTTPie session.
|
||||
httpie_session.auth = {
|
||||
'type': args.auth_plugin.auth_type,
|
||||
'raw_auth': args.auth_plugin.raw_auth,
|
||||
}
|
||||
elif httpie_session.auth:
|
||||
# Apply auth from HTTPie session
|
||||
request_kwargs['auth'] = httpie_session.auth
|
||||
|
||||
if args.debug:
|
||||
# TODO: reflect the split between request and send kwargs.
|
||||
dump_request(request_kwargs)
|
||||
|
||||
request = requests.Request(**request_kwargs)
|
||||
prepared_request = requests_session.prepare_request(request)
|
||||
transform_headers(request, prepared_request)
|
||||
if args.path_as_is:
|
||||
prepared_request.url = ensure_path_as_is(
|
||||
orig_url=args.url,
|
||||
prepped_url=prepared_request.url,
|
||||
)
|
||||
if args.compress and prepared_request.body:
|
||||
compress_request(
|
||||
request=prepared_request,
|
||||
always=args.compress > 1,
|
||||
)
|
||||
response_count = 0
|
||||
expired_cookies = []
|
||||
while prepared_request:
|
||||
yield prepared_request
|
||||
if not args.offline:
|
||||
send_kwargs_merged = requests_session.merge_environment_settings(
|
||||
url=prepared_request.url,
|
||||
**send_kwargs_mergeable_from_env,
|
||||
)
|
||||
with max_headers(args.max_headers):
|
||||
response = requests_session.send(
|
||||
request=prepared_request,
|
||||
**send_kwargs_merged,
|
||||
**send_kwargs,
|
||||
)
|
||||
response._httpie_headers_parsed_at = monotonic()
|
||||
expired_cookies += get_expired_cookies(
|
||||
response.headers.get('Set-Cookie', '')
|
||||
)
|
||||
|
||||
response_count += 1
|
||||
if response.next:
|
||||
if args.max_redirects and response_count == args.max_redirects:
|
||||
raise requests.TooManyRedirects
|
||||
if args.follow:
|
||||
prepared_request = response.next
|
||||
if args.all:
|
||||
yield response
|
||||
continue
|
||||
yield response
|
||||
break
|
||||
|
||||
if httpie_session:
|
||||
if httpie_session.is_new() or not args.session_read_only:
|
||||
httpie_session.cookies = requests_session.cookies
|
||||
httpie_session.remove_cookies(expired_cookies)
|
||||
httpie_session.save()
|
||||
]
|
||||
|
Loading…
Reference in New Issue
Block a user