Add basic file parsing and loop for http file

Co-authored-by: Jakub Rybak <laykos0@protonmail.com>
This commit is contained in:
Elias Floreteng
2025-02-26 15:04:00 +00:00
parent c37e5aefe6
commit 51e61eb565
2 changed files with 136 additions and 222 deletions

View File

@ -15,6 +15,7 @@ from .cli.nested_json import NestedJSONSyntaxError
from .client import collect_messages
from .context import Environment, LogLevel
from .downloads import Downloader
from .http_parser import http_parser
from .models import (
RequestsMessageKind,
OutputOptions
@ -172,6 +173,8 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
The main program without error handling.
"""
def actual_program(args: argparse.Namespace, env: Environment) -> ExitStatus:
# TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere.
exit_status = ExitStatus.SUCCESS
downloader = None
@ -205,8 +208,6 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume)
downloader.pre_request(args.headers)
if args.http_file:
print("################# Reading from HTTP file:", args.url)
messages = collect_messages(env, args=args,
request_body_read_callback=request_body_read_callback)
@ -270,6 +271,21 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
if args.output_file and args.output_file_specified:
args.output_file.close()
if args.http_file:
# TODO: FILE PARSING TO REQUESTS ARRAY
requests_list = http_parser(args.url)
returns = []
for req in requests_list:
args.url = req.url
args.method = req.method
# args.headers = req.headers
# args.body = req.body
returns.append(actual_program(args, env))
return ExitStatus.SUCCESS if all(r is ExitStatus.SUCCESS for r in returns) else ExitStatus.ERROR
return actual_program(args, env)
def print_debug_info(env: Environment):
env.stderr.writelines([

View File

@ -1,136 +1,34 @@
import argparse
from time import monotonic
from typing import Callable, Iterable
from dataclasses import dataclass
from pathlib import Path
import requests
# noinspection PyPackageRequirements
import urllib3
from httpie.client import (
build_requests_session,
dump_request,
ensure_path_as_is,
make_request_kwargs, make_send_kwargs,
make_send_kwargs_mergeable_from_env,
max_headers,
transform_headers
@dataclass
class HttpFileRequest:
method: str
url: str
headers: dict
body: bytes
def http_parser(filename: str) -> list[HttpFileRequest]:
http_file = Path(filename)
if not http_file.exists():
raise FileNotFoundError(f"File not found: {filename}")
if not http_file.is_file():
raise IsADirectoryError(f"Path is not a file: {filename}")
http_contents = http_file.read_text()
http_lines = [
line for line in http_contents.splitlines() if not line.startswith("#")
]
http_lines = [line for line in http_lines if line.strip()]
first_line = http_lines[0]
method, url = first_line.split(" ")
return [
HttpFileRequest(
method=method,
url=url,
headers={},
body=b"",
)
from . import __version__
from .cli.constants import HTTP_OPTIONS
from .context import Environment
from .encoding import UTF8
from .models import RequestsMessage
from .sessions import get_httpie_session
from .uploads import (
compress_request,
)
from .utils import get_expired_cookies
urllib3.disable_warnings()
FORM_CONTENT_TYPE = f'application/x-www-form-urlencoded; charset={UTF8}'
JSON_CONTENT_TYPE = 'application/json'
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*;q=0.5'
DEFAULT_UA = f'HTTPie/{__version__}'
IGNORE_CONTENT_LENGTH_METHODS = frozenset([HTTP_OPTIONS])
def collect_messages(
env: Environment,
args: argparse.Namespace,
request_body_read_callback: Callable[[bytes], None] = None,
) -> Iterable[RequestsMessage]:
httpie_session = None
httpie_session_headers = None
if args.session or args.session_read_only:
httpie_session = get_httpie_session(
env=env,
config_dir=env.config.directory,
session_name=args.session or args.session_read_only,
host=args.headers.get('Host'),
url=args.url,
)
httpie_session_headers = httpie_session.headers
request_kwargs = make_request_kwargs(
env,
args=args,
base_headers=httpie_session_headers,
request_body_read_callback=request_body_read_callback
)
send_kwargs = make_send_kwargs(args)
send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args)
requests_session = build_requests_session(
ssl_version=args.ssl_version,
ciphers=args.ciphers,
verify=bool(send_kwargs_mergeable_from_env['verify'])
)
if httpie_session:
httpie_session.update_headers(request_kwargs['headers'])
requests_session.cookies = httpie_session.cookies
if args.auth_plugin:
# Save auth from CLI to HTTPie session.
httpie_session.auth = {
'type': args.auth_plugin.auth_type,
'raw_auth': args.auth_plugin.raw_auth,
}
elif httpie_session.auth:
# Apply auth from HTTPie session
request_kwargs['auth'] = httpie_session.auth
if args.debug:
# TODO: reflect the split between request and send kwargs.
dump_request(request_kwargs)
request = requests.Request(**request_kwargs)
prepared_request = requests_session.prepare_request(request)
transform_headers(request, prepared_request)
if args.path_as_is:
prepared_request.url = ensure_path_as_is(
orig_url=args.url,
prepped_url=prepared_request.url,
)
if args.compress and prepared_request.body:
compress_request(
request=prepared_request,
always=args.compress > 1,
)
response_count = 0
expired_cookies = []
while prepared_request:
yield prepared_request
if not args.offline:
send_kwargs_merged = requests_session.merge_environment_settings(
url=prepared_request.url,
**send_kwargs_mergeable_from_env,
)
with max_headers(args.max_headers):
response = requests_session.send(
request=prepared_request,
**send_kwargs_merged,
**send_kwargs,
)
response._httpie_headers_parsed_at = monotonic()
expired_cookies += get_expired_cookies(
response.headers.get('Set-Cookie', '')
)
response_count += 1
if response.next:
if args.max_redirects and response_count == args.max_redirects:
raise requests.TooManyRedirects
if args.follow:
prepared_request = response.next
if args.all:
yield response
continue
yield response
break
if httpie_session:
if httpie_session.is_new() or not args.session_read_only:
httpie_session.cookies = requests_session.cookies
httpie_session.remove_cookies(expired_cookies)
httpie_session.save()
]