mirror of
https://github.com/httpie/cli.git
synced 2025-08-10 10:07:44 +02:00
Compare commits
135 Commits
Author | SHA1 | Date | |
---|---|---|---|
dc3687f7ac | |||
51bc8fb2c6 | |||
a69d6f44fd | |||
507cd6e255 | |||
759e4400d0 | |||
8cb1af7376 | |||
2f8d330b57 | |||
32d8b481e9 | |||
75f1e02215 | |||
70ba84dc48 | |||
5a5b42340f | |||
299250b3c3 | |||
6925d930da | |||
c1948f8340 | |||
b80ba040ac | |||
b7754f92ce | |||
e4e40e5b06 | |||
d12af4a569 | |||
c431ed7728 | |||
16ef08a159 | |||
100872b5cf | |||
664cebfbcc | |||
743f9738a3 | |||
69445c106c | |||
1813cf6156 | |||
a23b0e39e5 | |||
06dec4e6c6 | |||
ce185bd0fa | |||
1e1dbfeba0 | |||
5a908aa411 | |||
6cd934d1b8 | |||
d32c8cab12 | |||
5ce7c190e9 | |||
1aa1366f99 | |||
2c7f24e3e5 | |||
c90d039a0b | |||
ae22d4e754 | |||
69e1067a2c | |||
7e38f9ccf0 | |||
d546081340 | |||
6421c145d9 | |||
61e7cd786e | |||
4bd2e622a5 | |||
a4a1e8d43b | |||
ebf2139fd5 | |||
6c84cebed4 | |||
10246366da | |||
a448b0d928 | |||
0541490dda | |||
3704db9b6d | |||
d1665b08d2 | |||
1a4e0c2646 | |||
0d480139e4 | |||
9931747901 | |||
8891afa3b7 | |||
4f493d51f8 | |||
cf937b6b79 | |||
14677bd25d | |||
49e71d252f | |||
d6f25b1017 | |||
a434cddd42 | |||
55d7af86fd | |||
978aace86c | |||
ecdeffe7c8 | |||
9500ce136a | |||
93d07cfe57 | |||
5945845420 | |||
3ee5b49256 | |||
bb024757b6 | |||
d35864e79d | |||
8a106781be | |||
23dd80563f | |||
2bab69d9fb | |||
826489950d | |||
b86598886e | |||
c240162cab | |||
26e29612f2 | |||
37200eb055 | |||
9c68d7dd87 | |||
7ee519ef46 | |||
c4627cc882 | |||
492687b0da | |||
caeef2fb7c | |||
aae596d472 | |||
cb51faec51 | |||
c2a0cef76e | |||
493e98c833 | |||
ca02e51420 | |||
cd085cbc0d | |||
27d57ce773 | |||
4c4efff56a | |||
a53505f26e | |||
165dc36f8d | |||
5df3a91619 | |||
7dbceafc01 | |||
d62d6a77d1 | |||
0a81facccf | |||
3e20ade645 | |||
0c47094109 | |||
defe4bc76d | |||
afee6a7970 | |||
7b676dd583 | |||
5af0874ed3 | |||
e11a2d1346 | |||
b2044fc18d | |||
d9a2d665ad | |||
e83e275dff | |||
4a99495466 | |||
495f67229a | |||
45b9bae3dc | |||
774ff148cd | |||
70a78249c1 | |||
fc85988368 | |||
83bd8059de | |||
3af5f1f305 | |||
4351650691 | |||
770976a66e | |||
29b692d597 | |||
8936d1b71e | |||
4f32b76223 | |||
c9d770017e | |||
cdf691c212 | |||
684a4708d7 | |||
5754e33a75 | |||
14fe7dbb27 | |||
3a6ac7d126 | |||
e9080e6b22 | |||
c73858b9c3 | |||
7340b2b64d | |||
8d246415fd | |||
381dd4f619 | |||
e6bad645ed | |||
6e9cd139a6 | |||
deee2dffd0 | |||
c3be722188 |
4
.github/FUNDING.yml
vendored
4
.github/FUNDING.yml
vendored
@ -1,6 +1,6 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
github: jakubroztocil # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
@ -9,4 +9,4 @@ community_bridge: # Replace with a single Community Bridge project-name e.g., cl
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: https://paypal.me/roztocil
|
||||
custom:
|
||||
|
8
.github/workflows/build.yml
vendored
8
.github/workflows/build.yml
vendored
@ -1,5 +1,5 @@
|
||||
name: Build
|
||||
on: [push]
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
extras:
|
||||
# Run coverage and extra tests only once
|
||||
@ -9,7 +9,7 @@ jobs:
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.8
|
||||
- run: pip install --upgrade pip
|
||||
- run: python -m pip install --upgrade pip setuptools wheel
|
||||
- run: make install
|
||||
- run: make pycodestyle
|
||||
- run: make test-cover
|
||||
@ -32,6 +32,6 @@ jobs:
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: python -m pip install --upgrade pip
|
||||
- run: pip install --upgrade --editable .
|
||||
- run: python -m pip install --upgrade pip setuptools wheel
|
||||
- run: python -m pip install --upgrade --editable .
|
||||
- run: python setup.py test
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -7,7 +7,7 @@ htmlcov
|
||||
|
||||
|
||||
##############################################################################
|
||||
# The bellow is GitHub template for Python project. gitignore.
|
||||
# The below is GitHub template for Python project. gitignore.
|
||||
# <https://github.com/github/gitignore/blob/master/Python.gitignore>
|
||||
##############################################################################
|
||||
|
||||
@ -52,7 +52,6 @@ pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
|
@ -6,6 +6,47 @@ This document records all notable changes to `HTTPie <https://httpie.org>`_.
|
||||
This project adheres to `Semantic Versioning <https://semver.org/>`_.
|
||||
|
||||
|
||||
`2.3.0`_ (2020-10-25)
|
||||
-------------------------
|
||||
|
||||
* Added support for multipart upload streaming (`#684`_).
|
||||
* Added support for body-from-file upload streaming (``http httpbin.org/post @file``).
|
||||
* Added ``--chunked`` to allow chunked transfer encoding.
|
||||
* Added ``--multipart`` to allow ``multipart/form-data`` encoding for non-file ``--form`` requests as well.
|
||||
* Added ``--boundary`` to allow a custom boundary string for ``multipart/form-data`` requests.
|
||||
* Added support for combining cookies specified on the CLI and in a session file (`#932`_).
|
||||
* Added out of the box SOCKS support with no extra installation (`#904`_).
|
||||
* Added ``--quiet, -q`` flag to enforce silent behaviour.
|
||||
* Fixed the handling of invalid ``expires`` dates in ``Set-Cookie`` headers (`#963`_).
|
||||
* Removed Tox testing entirely (`#943`_).
|
||||
|
||||
|
||||
`2.2.0`_ (2020-06-18)
|
||||
-------------------------
|
||||
|
||||
* Added support for custom content types for uploaded files (`#668`_).
|
||||
* Added support for ``$XDG_CONFIG_HOME`` (`#920`_).
|
||||
* Added support for ``Set-Cookie``-triggered cookie expiration (`#853`_).
|
||||
* Added ``--format-options`` to allow disabling sorting, etc. (`#128`_)
|
||||
* Added ``--sorted`` and ``--unsorted`` shortcuts for (un)setting all sorting-related ``--format-options``. (`#128`_)
|
||||
* Added ``--ciphers`` to allow configuring OpenSSL ciphers (`#870`_).
|
||||
* Added ``netrc`` support for auth plugins. Enabled for ``--auth-type=basic``
|
||||
and ``digest``, 3rd parties may opt in (`#718`_, `#719`_, `#852`_, `#934`_).
|
||||
* Fixed built-in plugins-related circular imports (`#925`_).
|
||||
|
||||
|
||||
`2.1.0`_ (2020-04-18)
|
||||
---------------------
|
||||
|
||||
* Added ``--path-as-is`` to bypass dot segment (``/../`` or ``/./``)
|
||||
URL squashing (`#895`_).
|
||||
* Changed the default ``Accept`` header value for JSON requests from
|
||||
``application/json, */*`` to ``application/json, */*;q=0.5``
|
||||
to clearly indicate preference (`#488`_).
|
||||
* Fixed ``--form`` file upload mixed with redirected ``stdin`` error handling
|
||||
(`#840`_).
|
||||
|
||||
|
||||
`2.0.0`_ (2020-01-12)
|
||||
-------------------------
|
||||
* Removed Python 2.7 support (`EOL Jan 2020 <https://www.python.org/doc/sunset-python-2/>`_).
|
||||
@ -14,7 +55,7 @@ This project adheres to `Semantic Versioning <https://semver.org/>`_.
|
||||
* Replaced the old collect-all-then-process handling of HTTP communication
|
||||
with one-by-one processing of each HTTP request or response as they become
|
||||
available. This means that you can see headers immediately,
|
||||
see what is being send even when the request fails, etc.
|
||||
see what is being sent even if the request fails, etc.
|
||||
* Removed automatic config file creation to avoid concurrency issues.
|
||||
* Removed the default 30-second connection ``--timeout`` limit.
|
||||
* Removed Python’s default limit of 100 response headers.
|
||||
@ -113,7 +154,7 @@ This project adheres to `Semantic Versioning <https://semver.org/>`_.
|
||||
``$ alias https='http --default-scheme=https``.
|
||||
* Added ``-I`` as a shortcut for ``--ignore-stdin``.
|
||||
* Added fish shell completion (located in ``extras/httpie-completion.fish``
|
||||
in the Github repo).
|
||||
in the GitHub repo).
|
||||
* Updated ``requests`` to 2.10.0 so that SOCKS support can be added via
|
||||
``pip install requests[socks]``.
|
||||
* Changed the default JSON ``Accept`` header from ``application/json``
|
||||
@ -409,4 +450,26 @@ This project adheres to `Semantic Versioning <https://semver.org/>`_.
|
||||
.. _1.0.2: https://github.com/jakubroztocil/httpie/compare/1.0.1...1.0.2
|
||||
.. _1.0.3: https://github.com/jakubroztocil/httpie/compare/1.0.2...1.0.3
|
||||
.. _2.0.0: https://github.com/jakubroztocil/httpie/compare/1.0.3...2.0.0
|
||||
.. _2.1.0-dev: https://github.com/jakubroztocil/httpie/compare/2.0.0...master
|
||||
.. _2.1.0: https://github.com/jakubroztocil/httpie/compare/2.0.0...2.1.0
|
||||
.. _2.2.0: https://github.com/jakubroztocil/httpie/compare/2.1.0...2.2.0
|
||||
.. _2.3.0: https://github.com/jakubroztocil/httpie/compare/2.2.0...2.3.0
|
||||
|
||||
|
||||
.. _#128: https://github.com/jakubroztocil/httpie/issues/128
|
||||
.. _#488: https://github.com/jakubroztocil/httpie/issues/488
|
||||
.. _#668: https://github.com/jakubroztocil/httpie/issues/668
|
||||
.. _#684: https://github.com/jakubroztocil/httpie/issues/684
|
||||
.. _#718: https://github.com/jakubroztocil/httpie/issues/718
|
||||
.. _#719: https://github.com/jakubroztocil/httpie/issues/719
|
||||
.. _#840: https://github.com/jakubroztocil/httpie/issues/840
|
||||
.. _#853: https://github.com/jakubroztocil/httpie/issues/853
|
||||
.. _#852: https://github.com/jakubroztocil/httpie/issues/852
|
||||
.. _#870: https://github.com/jakubroztocil/httpie/issues/870
|
||||
.. _#895: https://github.com/jakubroztocil/httpie/issues/895
|
||||
.. _#920: https://github.com/jakubroztocil/httpie/issues/920
|
||||
.. _#904: https://github.com/jakubroztocil/httpie/issues/904
|
||||
.. _#925: https://github.com/jakubroztocil/httpie/issues/925
|
||||
.. _#932: https://github.com/jakubroztocil/httpie/issues/932
|
||||
.. _#934: https://github.com/jakubroztocil/httpie/issues/934
|
||||
.. _#943: https://github.com/jakubroztocil/httpie/issues/943
|
||||
.. _#963: https://github.com/jakubroztocil/httpie/issues/963
|
||||
|
@ -16,17 +16,34 @@ to your bug report, e.g.:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ http --debug [COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR]
|
||||
[COMPLETE OUTPUT]
|
||||
$ http --debug <COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR>
|
||||
|
||||
<COMPLETE OUTPUT>
|
||||
|
||||
|
||||
2. Contributing Code and Docs
|
||||
=============================
|
||||
|
||||
Before working on a new feature or a bug, please browse `existing issues`_
|
||||
to see whether it has been previously discussed. If the change in question
|
||||
is a bigger one, it's always good to discuss before you start working on
|
||||
it.
|
||||
to see whether it has previously been discussed.
|
||||
|
||||
If your change alters HTTPie’s behaviour or interface, it's a good idea to
|
||||
discuss it before you start working on it.
|
||||
|
||||
If you are fixing an issue, the first step should be to create a test case that
|
||||
reproduces the incorrect behaviour. That will also help you to build an
|
||||
understanding of the issue at hand.
|
||||
|
||||
**Pull requests introducing code changes without tests
|
||||
will generally not get merged. The same goes for PRs changing HTTPie’s
|
||||
behaviour and not providing documentation.**
|
||||
|
||||
Conversely, PRs consisting of documentation improvements or tests
|
||||
for existing-yet-previously-untested behavior will very likely be merged.
|
||||
Therefore, docs and tests improvements are a great candidate for your first
|
||||
contribution.
|
||||
|
||||
Consider also adding a ``CHANGELOG`` entry for your changes.
|
||||
|
||||
|
||||
Development Environment
|
||||
@ -87,7 +104,7 @@ activate the environment — we have created a symlink for you. It’s a bit of
|
||||
a hack but it works™.)
|
||||
|
||||
You should now see ``(httpie)`` next to your shell prompt, and
|
||||
the ``http`` should point to you development copy:
|
||||
the ``http`` command should point to your development copy:
|
||||
|
||||
.. code-block::
|
||||
|
||||
@ -115,7 +132,7 @@ Testing & CI
|
||||
Please add tests for any new features and bug fixes.
|
||||
|
||||
When you open a pull request,
|
||||
`Github Actions <https://github.com/jakubroztocil/httpie/actions>`_
|
||||
`GitHub Actions <https://github.com/jakubroztocil/httpie/actions>`_
|
||||
will automatically run HTTPie’s `test suite`_ against your code
|
||||
so please make sure all checks pass.
|
||||
|
||||
@ -123,8 +140,7 @@ so please make sure all checks pass.
|
||||
Running tests locally
|
||||
*********************
|
||||
|
||||
HTTPie uses the `pytest`_ runner. It also uses `Tox`_ which allows you to run
|
||||
tests on multiple Python versions even when testing locally.
|
||||
HTTPie uses the `pytest`_ runner.
|
||||
|
||||
|
||||
.. code-block:: bash
|
||||
@ -135,9 +151,6 @@ tests on multiple Python versions even when testing locally.
|
||||
# Run tests with coverage
|
||||
make test-cover
|
||||
|
||||
# Run all tests in all of the supported and available Pythons via Tox
|
||||
make test-tox
|
||||
|
||||
# Test PEP8 compliance
|
||||
make pycodestyle
|
||||
|
||||
@ -158,22 +171,65 @@ can run specific tests from the terminal:
|
||||
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload
|
||||
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok
|
||||
|
||||
# Run specific tests on the on all Pythons via Tox
|
||||
# (change to `tox -e py37' to limit Python version)
|
||||
tox -- tests/test_uploads.py --verbose
|
||||
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload --verbose
|
||||
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok --verbose
|
||||
|
||||
-----
|
||||
|
||||
See `Makefile`_ for additional development utilities.
|
||||
|
||||
Windows
|
||||
*******
|
||||
|
||||
Finally, don't forget to add yourself to `AUTHORS`_!
|
||||
If you are on a Windows machine and not able to run ``make``,
|
||||
follow the next steps for a basic setup. As a prerequisite, you need to have
|
||||
Python 3.6+ installed.
|
||||
|
||||
Create a virtual environment and activate it:
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
python -m venv --prompt httpie venv
|
||||
venv\Scripts\activate
|
||||
|
||||
Install HTTPie in editable mode with all the dependencies:
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
pip install --upgrade -e . -r requirements-dev.txt
|
||||
|
||||
You should now see ``(httpie)`` next to your shell prompt, and
|
||||
the ``http`` command should point to your development copy:
|
||||
|
||||
.. code-block:: powershell
|
||||
|
||||
# In PowerShell:
|
||||
(httpie) PS C:\Users\ovezovs\httpie> Get-Command http
|
||||
CommandType Name Version Source
|
||||
----------- ---- ------- ------
|
||||
Application http.exe 0.0.0.0 C:\Users\ovezovs\httpie\venv\Scripts\http.exe
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# In CMD:
|
||||
(httpie) C:\Users\ovezovs\httpie> where http
|
||||
C:\Users\ovezovs\httpie\venv\Scripts\http.exe
|
||||
C:\Users\ovezovs\AppData\Local\Programs\Python\Python38-32\Scripts\http.exe
|
||||
|
||||
(httpie) C:\Users\ovezovs\httpie> http --version
|
||||
2.3.0-dev
|
||||
|
||||
Use ``pytest`` to run tests locally with an active virtual environment:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Run all tests
|
||||
py.test
|
||||
|
||||
|
||||
-----
|
||||
|
||||
|
||||
Finally, feel free to add yourself to `AUTHORS`_!
|
||||
|
||||
|
||||
.. _Tox: http://tox.testrun.org
|
||||
.. _supported Python environments: https://github.com/jakubroztocil/httpie/blob/master/tox.ini
|
||||
.. _existing issues: https://github.com/jakubroztocil/httpie/issues?state=open
|
||||
.. _AUTHORS: https://github.com/jakubroztocil/httpie/blob/master/AUTHORS.rst
|
||||
.. _Makefile: https://github.com/jakubroztocil/httpie/blob/master/Makefile
|
||||
|
2
LICENSE
2
LICENSE
@ -1,4 +1,4 @@
|
||||
Copyright © 2012-2019 Jakub Roztocil <jakub@roztocil.co>
|
||||
Copyright © 2012-2020 Jakub Roztocil <jakub@roztocil.co>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
11
Makefile
11
Makefile
@ -38,7 +38,7 @@ clean:
|
||||
rm -rf $(VENV_ROOT)
|
||||
# Remove symlink for virtualenvwrapper, if we’ve created one.
|
||||
[ -n "$(WORKON_HOME)" -a -L "$(WORKON_HOME)/httpie" -a -f "$(WORKON_HOME)/httpie" ] && rm $(WORKON_HOME)/httpie || true
|
||||
rm -rf .tox *.egg dist build .coverage .cache .pytest_cache httpie.egg-info
|
||||
rm -rf *.egg dist build .coverage .cache .pytest_cache httpie.egg-info
|
||||
find . -name '__pycache__' -delete -o -name '*.pyc' -delete
|
||||
@echo
|
||||
|
||||
@ -86,7 +86,7 @@ test-cover: test
|
||||
|
||||
|
||||
# test-all is meant to test everything — even this Makefile
|
||||
test-all: clean install test test-tox test-dist pycodestyle
|
||||
test-all: clean install test test-dist pycodestyle
|
||||
@echo
|
||||
|
||||
|
||||
@ -94,12 +94,6 @@ test-dist: test-sdist test-bdist-wheel
|
||||
@echo
|
||||
|
||||
|
||||
test-tox: uninstall-httpie install
|
||||
@echo $(H1)Running tests on all Pythons via Tox$(H1END)
|
||||
$(VENV_BIN)/tox
|
||||
@echo
|
||||
|
||||
|
||||
test-sdist: clean venv
|
||||
@echo $(H1)Testing sdist build an installation$(H1END)
|
||||
$(VENV_PYTHON) setup.py sdist
|
||||
@ -170,7 +164,6 @@ uninstall-httpie:
|
||||
###############################################################################
|
||||
|
||||
pdf:
|
||||
# NOTE: rst2pdf needs to be installed manually and against a Python 2
|
||||
@echo "Converting README.rst to PDF…"
|
||||
rst2pdf \
|
||||
--strip-elements-with-class=no-pdf \
|
||||
|
810
README.rst
810
README.rst
File diff suppressed because it is too large
Load Diff
@ -16,6 +16,7 @@ PACKAGES = [
|
||||
'httpie',
|
||||
'Pygments',
|
||||
'requests',
|
||||
'requests-toolbelt',
|
||||
'certifi',
|
||||
'urllib3',
|
||||
'idna',
|
||||
|
@ -9,42 +9,42 @@ class Httpie < Formula
|
||||
|
||||
desc "User-friendly cURL replacement (command-line HTTP client)"
|
||||
homepage "https://httpie.org/"
|
||||
url "https://files.pythonhosted.org/packages/d5/a4/ab61c1dbfdef33c7b7f5f7df0d79eb5cd55a106601a4acc17f983f320b4a/httpie-1.0.3.tar.gz"
|
||||
sha256 "6d1b6e21da7d3ec030ae95536d4032c1129bdaf9de4adc72c596b87e5f646e80"
|
||||
url "https://files.pythonhosted.org/packages/37/6c/0d050f49e3b2bac589367d0c3aee9c078e23c6914b0210ffc0117218bdaf/httpie-2.2.0.tar.gz"
|
||||
sha256 "31ac28088ee6a0b6f3ba7a53379000c4d1910c1708c9ff768f84b111c14405a0"
|
||||
head "https://github.com/jakubroztocil/httpie.git"
|
||||
|
||||
bottle do
|
||||
cellar :any_skip_relocation
|
||||
sha256 "158258be68ac93de13860be2bef02da6fd8b68aa24b2e6609bcff1ec3f93e7a0" => :mojave
|
||||
sha256 "54352116b6fa2c3bd65f26136fdcb57986dbff8a52de5febf7aea59c126d29e1" => :high_sierra
|
||||
sha256 "9cce71768fe388808e11b26d651b44a6b54219f5406845b4273b5099f5c1f76f" => :sierra
|
||||
sha256 "25f0e58f81a2cdd9cba772f07d67591533b4b31a2b970a356701aa046d4d9638" => :catalina
|
||||
sha256 "be158ebb4cfd327ebea02f7b8b8d63d093e474cd303eafff4a2b56b0611983a2" => :mojave
|
||||
sha256 "f331edb94183bfc5fa9de4b4abf148cc91a3a8b3c0e24cc1f5e6b0a4172dd34d" => :high_sierra
|
||||
end
|
||||
|
||||
depends_on "python"
|
||||
depends_on "python@3.8"
|
||||
|
||||
resource "Pygments" do
|
||||
url "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz"
|
||||
sha256 "881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
|
||||
url "https://files.pythonhosted.org/packages/6e/4d/4d2fe93a35dfba417311a4ff627489a947b01dc0cc377a3673c00cf7e4b2/Pygments-2.6.1.tar.gz"
|
||||
sha256 "647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"
|
||||
end
|
||||
|
||||
resource "requests" do
|
||||
url "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"
|
||||
sha256 "11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"
|
||||
url "https://files.pythonhosted.org/packages/da/67/672b422d9daf07365259958912ba533a0ecab839d4084c487a5fe9a5405f/requests-2.24.0.tar.gz"
|
||||
sha256 "b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"
|
||||
end
|
||||
|
||||
resource "certifi" do
|
||||
url "https://files.pythonhosted.org/packages/c5/67/5d0548226bcc34468e23a0333978f0e23d28d0b3f0c71a151aef9c3f7680/certifi-2019.6.16.tar.gz"
|
||||
sha256 "945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
|
||||
url "https://files.pythonhosted.org/packages/b4/19/53433f37a31543364c8676f30b291d128cdf4cd5b31b755b7890f8e89ac8/certifi-2020.4.5.2.tar.gz"
|
||||
sha256 "5ad7e9a056d25ffa5082862e36f119f7f7cec6457fa07ee2f8c339814b80c9b1"
|
||||
end
|
||||
|
||||
resource "urllib3" do
|
||||
url "https://files.pythonhosted.org/packages/4c/13/2386233f7ee40aa8444b47f7463338f3cbdf00c316627558784e3f542f07/urllib3-1.25.3.tar.gz"
|
||||
sha256 "dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
|
||||
url "https://files.pythonhosted.org/packages/05/8c/40cd6949373e23081b3ea20d5594ae523e681b6f472e600fbc95ed046a36/urllib3-1.25.9.tar.gz"
|
||||
sha256 "3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"
|
||||
end
|
||||
|
||||
resource "idna" do
|
||||
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
|
||||
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
|
||||
url "https://files.pythonhosted.org/packages/cb/19/57503b5de719ee45e83472f339f617b0c01ad75cba44aba1e4c97c2b0abd/idna-2.9.tar.gz"
|
||||
sha256 "7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb"
|
||||
end
|
||||
|
||||
resource "chardet" do
|
||||
@ -53,8 +53,8 @@ class Httpie < Formula
|
||||
end
|
||||
|
||||
resource "PySocks" do
|
||||
url "https://files.pythonhosted.org/packages/15/ab/35824cfdee1aac662e3298275fa1e6cbedb52126d1785f8977959b769ccf/PySocks-1.7.0.tar.gz"
|
||||
sha256 "d9031ea45fdfacbe59a99273e9f0448ddb33c1580fe3831c1b09557c5718977c"
|
||||
url "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz"
|
||||
sha256 "3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"
|
||||
end
|
||||
|
||||
def install
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""
|
||||
HTTPie - a CLI, cURL-like tool for humans.
|
||||
HTTPie: command-line HTTP client for the API era.
|
||||
|
||||
"""
|
||||
|
||||
__version__ = '2.0.0'
|
||||
__version__ = '2.3.0'
|
||||
__author__ = 'Jakub Roztocil'
|
||||
__licence__ = 'BSD'
|
||||
|
@ -7,18 +7,24 @@ from argparse import RawDescriptionHelpFormatter
|
||||
from textwrap import dedent
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from httpie.cli.argtypes import AuthCredentials, KeyValueArgType, parse_auth
|
||||
from requests.utils import get_netrc_auth
|
||||
|
||||
from httpie.cli.argtypes import (
|
||||
AuthCredentials, KeyValueArgType, PARSED_DEFAULT_FORMAT_OPTIONS,
|
||||
parse_auth,
|
||||
parse_format_options,
|
||||
)
|
||||
from httpie.cli.constants import (
|
||||
HTTP_GET, HTTP_POST, OUTPUT_OPTIONS, OUTPUT_OPTIONS_DEFAULT,
|
||||
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED, OUT_RESP_BODY, PRETTY_MAP,
|
||||
PRETTY_STDOUT_TTY_ONLY, SEPARATOR_CREDENTIALS, SEPARATOR_GROUP_ALL_ITEMS,
|
||||
SEPARATOR_GROUP_DATA_ITEMS, URL_SCHEME_RE,
|
||||
OUTPUT_OPTIONS_DEFAULT_OFFLINE,
|
||||
OUTPUT_OPTIONS_DEFAULT_OFFLINE, OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED,
|
||||
OUT_RESP_BODY, PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY, RequestType,
|
||||
SEPARATOR_CREDENTIALS,
|
||||
SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_GROUP_DATA_ITEMS, URL_SCHEME_RE,
|
||||
)
|
||||
from httpie.cli.exceptions import ParseError
|
||||
from httpie.cli.requestitems import RequestItems
|
||||
from httpie.context import Environment
|
||||
from httpie.plugins import plugin_manager
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from httpie.utils import ExplicitNullAuth, get_content_type
|
||||
|
||||
|
||||
@ -42,6 +48,8 @@ class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
||||
return text.splitlines()
|
||||
|
||||
|
||||
# TODO: refactor and design type-annotated data structures
|
||||
# for raw args + parsed args and keep things immutable.
|
||||
class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
"""Adds additional logic to `argparse.ArgumentParser`.
|
||||
|
||||
@ -66,32 +74,52 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
) -> argparse.Namespace:
|
||||
self.env = env
|
||||
self.args, no_options = super().parse_known_args(args, namespace)
|
||||
|
||||
if self.args.debug:
|
||||
self.args.traceback = True
|
||||
|
||||
self.has_stdin_data = (
|
||||
self.env.stdin
|
||||
and not self.args.ignore_stdin
|
||||
and not self.env.stdin_isatty
|
||||
)
|
||||
|
||||
# Arguments processing and environment setup.
|
||||
self._apply_no_options(no_options)
|
||||
self._validate_download_options()
|
||||
self._process_request_type()
|
||||
self._process_download_options()
|
||||
self._setup_standard_streams()
|
||||
self._process_output_options()
|
||||
self._process_pretty_options()
|
||||
self._process_format_options()
|
||||
self._guess_method()
|
||||
self._parse_items()
|
||||
|
||||
if self.has_stdin_data:
|
||||
self._body_from_file(self.env.stdin)
|
||||
self._process_url()
|
||||
self._process_auth()
|
||||
|
||||
if self.args.compress:
|
||||
# TODO: allow --compress with --chunked / --multipart
|
||||
if self.args.chunked:
|
||||
self.error('cannot combine --compress and --chunked')
|
||||
if self.args.multipart:
|
||||
self.error('cannot combine --compress and --multipart')
|
||||
|
||||
return self.args
|
||||
|
||||
def _process_request_type(self):
|
||||
request_type = self.args.request_type
|
||||
self.args.json = request_type is RequestType.JSON
|
||||
self.args.multipart = request_type is RequestType.MULTIPART
|
||||
self.args.form = request_type in {
|
||||
RequestType.FORM,
|
||||
RequestType.MULTIPART,
|
||||
}
|
||||
|
||||
def _process_url(self):
|
||||
if not URL_SCHEME_RE.match(self.args.url):
|
||||
if os.path.basename(env.program_name) == 'https':
|
||||
if os.path.basename(self.env.program_name) == 'https':
|
||||
scheme = 'https://'
|
||||
else:
|
||||
scheme = self.args.default_scheme + "://"
|
||||
scheme = self.args.default_scheme + '://'
|
||||
|
||||
# See if we're using curl style shorthand for localhost (:3000/foo)
|
||||
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
||||
@ -104,9 +132,6 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
self.args.url += rest
|
||||
else:
|
||||
self.args.url = scheme + self.args.url
|
||||
self._process_auth()
|
||||
|
||||
return self.args
|
||||
|
||||
# noinspection PyShadowingBuiltins
|
||||
def _print_message(self, message, file=None):
|
||||
@ -125,6 +150,7 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
|
||||
|
||||
"""
|
||||
|
||||
self.args.output_file_specified = bool(self.args.output_file)
|
||||
if self.args.download:
|
||||
# FIXME: Come up with a cleaner solution.
|
||||
@ -137,6 +163,7 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
# The response body will be treated separately.
|
||||
self.env.stdout = self.env.stderr
|
||||
self.env.stdout_isatty = self.env.stderr_isatty
|
||||
|
||||
elif self.args.output_file:
|
||||
# When not `--download`ing, then `--output` simply replaces
|
||||
# `stdout`. The file is opened for appending, which isn't what
|
||||
@ -153,8 +180,13 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
self.env.stdout = self.args.output_file
|
||||
self.env.stdout_isatty = False
|
||||
|
||||
if self.args.quiet:
|
||||
self.env.stderr = self.env.devnull
|
||||
if not (self.args.output_file_specified and not self.args.download):
|
||||
self.env.stdout = self.env.devnull
|
||||
|
||||
def _process_auth(self):
|
||||
# TODO: refactor
|
||||
# TODO: refactor & simplify this method.
|
||||
self.args.auth_plugin = None
|
||||
default_auth_plugin = plugin_manager.get_auth_plugins()[0]
|
||||
auth_type_set = self.args.auth_type is not None
|
||||
@ -177,6 +209,19 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
self.args.auth_type = default_auth_plugin.auth_type
|
||||
plugin = plugin_manager.get_auth_plugin(self.args.auth_type)()
|
||||
|
||||
if (not self.args.ignore_netrc
|
||||
and self.args.auth is None
|
||||
and plugin.netrc_parse):
|
||||
# Only host needed, so it’s OK URL not finalized.
|
||||
netrc_credentials = get_netrc_auth(self.args.url)
|
||||
if netrc_credentials:
|
||||
self.args.auth = AuthCredentials(
|
||||
key=netrc_credentials[0],
|
||||
value=netrc_credentials[1],
|
||||
sep=SEPARATOR_CREDENTIALS,
|
||||
orig=SEPARATOR_CREDENTIALS.join(netrc_credentials)
|
||||
)
|
||||
|
||||
if plugin.auth_require and self.args.auth is None:
|
||||
self.error('--auth required')
|
||||
|
||||
@ -243,11 +288,12 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
Bytes are always read.
|
||||
|
||||
"""
|
||||
if self.args.data:
|
||||
if self.args.data or self.args.files:
|
||||
self.error('Request body (from stdin or a file) and request '
|
||||
'data (key=value) cannot be mixed. Pass '
|
||||
'--ignore-stdin to let key/value take priority.')
|
||||
self.args.data = getattr(fd, 'buffer', fd).read()
|
||||
'--ignore-stdin to let key/value take priority. '
|
||||
'See https://httpie.org/doc#scripting for details.')
|
||||
self.args.data = getattr(fd, 'buffer', fd)
|
||||
|
||||
def _guess_method(self):
|
||||
"""Set `args.method` if not specified to either POST or GET
|
||||
@ -308,6 +354,7 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
self.args.data = request_items.data
|
||||
self.args.files = request_items.files
|
||||
self.args.params = request_items.params
|
||||
self.args.multipart_data = request_items.multipart_data
|
||||
|
||||
if self.args.files and not self.args.form:
|
||||
# `http url @/path/to/file`
|
||||
@ -378,10 +425,20 @@ class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||
# noinspection PyTypeChecker
|
||||
self.args.prettify = PRETTY_MAP[self.args.prettify]
|
||||
|
||||
def _validate_download_options(self):
|
||||
def _process_download_options(self):
|
||||
if self.args.offline:
|
||||
self.args.download = False
|
||||
self.args.download_resume = False
|
||||
return
|
||||
if not self.args.download:
|
||||
if self.args.download_resume:
|
||||
self.error('--continue only works with --download')
|
||||
if self.args.download_resume and not (
|
||||
self.args.download and self.args.output_file):
|
||||
self.error('--continue requires --output to be specified')
|
||||
|
||||
def _process_format_options(self):
|
||||
parsed_options = PARSED_DEFAULT_FORMAT_OPTIONS
|
||||
for options_group in self.args.format_options or []:
|
||||
parsed_options = parse_format_options(options_group, defaults=parsed_options)
|
||||
self.args.format_options = parsed_options
|
||||
|
@ -2,9 +2,10 @@ import argparse
|
||||
import getpass
|
||||
import os
|
||||
import sys
|
||||
from typing import Union, List, Optional
|
||||
from copy import deepcopy
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from httpie.cli.constants import SEPARATOR_CREDENTIALS
|
||||
from httpie.cli.constants import DEFAULT_FORMAT_OPTIONS, SEPARATOR_CREDENTIALS
|
||||
from httpie.sessions import VALID_SESSION_NAME_PATTERN
|
||||
|
||||
|
||||
@ -181,3 +182,63 @@ def readable_file_arg(filename):
|
||||
return filename
|
||||
except IOError as ex:
|
||||
raise argparse.ArgumentTypeError(f'{filename}: {ex.args[1]}')
|
||||
|
||||
|
||||
def parse_format_options(s: str, defaults: Optional[dict]) -> dict:
|
||||
"""
|
||||
Parse `s` and update `defaults` with the parsed values.
|
||||
|
||||
>>> parse_format_options(
|
||||
... defaults={'json': {'indent': 4, 'sort_keys': True}},
|
||||
... s='json.indent:2,json.sort_keys:False',
|
||||
... )
|
||||
{'json': {'indent': 2, 'sort_keys': False}}
|
||||
|
||||
"""
|
||||
value_map = {
|
||||
'true': True,
|
||||
'false': False,
|
||||
}
|
||||
options = deepcopy(defaults or {})
|
||||
for option in s.split(','):
|
||||
try:
|
||||
path, value = option.lower().split(':')
|
||||
section, key = path.split('.')
|
||||
except ValueError:
|
||||
raise argparse.ArgumentTypeError(f'invalid option {option!r}')
|
||||
|
||||
if value in value_map:
|
||||
parsed_value = value_map[value]
|
||||
else:
|
||||
if value.isnumeric():
|
||||
parsed_value = int(value)
|
||||
else:
|
||||
parsed_value = value
|
||||
|
||||
if defaults is None:
|
||||
options.setdefault(section, {})
|
||||
else:
|
||||
try:
|
||||
default_value = defaults[section][key]
|
||||
except KeyError:
|
||||
raise argparse.ArgumentTypeError(
|
||||
f'invalid key {path!r}')
|
||||
|
||||
default_type, parsed_type = type(default_value), type(parsed_value)
|
||||
if parsed_type is not default_type:
|
||||
raise argparse.ArgumentTypeError(
|
||||
'invalid value'
|
||||
f' {value!r} in {option!r}'
|
||||
f' (expected {default_type.__name__}'
|
||||
f' got {parsed_type.__name__})'
|
||||
)
|
||||
|
||||
options[section][key] = parsed_value
|
||||
|
||||
return options
|
||||
|
||||
|
||||
PARSED_DEFAULT_FORMAT_OPTIONS = parse_format_options(
|
||||
s=','.join(DEFAULT_FORMAT_OPTIONS),
|
||||
defaults=None,
|
||||
)
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
|
||||
|
||||
"""
|
||||
import enum
|
||||
import re
|
||||
import ssl
|
||||
|
||||
|
||||
# TODO: Use MultiDict for headers once added to `requests`.
|
||||
@ -11,6 +11,9 @@ import ssl
|
||||
|
||||
# ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
|
||||
# <https://tools.ietf.org/html/rfc3986#section-3.1>
|
||||
from enum import Enum
|
||||
|
||||
|
||||
URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
|
||||
|
||||
HTTP_POST = 'POST'
|
||||
@ -24,6 +27,7 @@ SEPARATOR_PROXY = ':'
|
||||
SEPARATOR_DATA_STRING = '='
|
||||
SEPARATOR_DATA_RAW_JSON = ':='
|
||||
SEPARATOR_FILE_UPLOAD = '@'
|
||||
SEPARATOR_FILE_UPLOAD_TYPE = ';type=' # in already parsed file upload path only
|
||||
SEPARATOR_DATA_EMBED_FILE_CONTENTS = '=@'
|
||||
SEPARATOR_DATA_EMBED_RAW_JSON_FILE = ':=@'
|
||||
SEPARATOR_QUERY_PARAM = '=='
|
||||
@ -37,6 +41,12 @@ SEPARATOR_GROUP_DATA_ITEMS = frozenset({
|
||||
SEPARATOR_DATA_EMBED_RAW_JSON_FILE
|
||||
})
|
||||
|
||||
SEPARATORS_GROUP_MULTIPART = frozenset({
|
||||
SEPARATOR_DATA_STRING,
|
||||
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||
SEPARATOR_FILE_UPLOAD,
|
||||
})
|
||||
|
||||
# Separators for items whose value is a filename to be embedded
|
||||
SEPARATOR_GROUP_DATA_EMBED_ITEMS = frozenset({
|
||||
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||
@ -83,21 +93,28 @@ PRETTY_MAP = {
|
||||
}
|
||||
PRETTY_STDOUT_TTY_ONLY = object()
|
||||
|
||||
|
||||
DEFAULT_FORMAT_OPTIONS = [
|
||||
'headers.sort:true',
|
||||
'json.format:true',
|
||||
'json.indent:4',
|
||||
'json.sort_keys:true',
|
||||
]
|
||||
SORTED_FORMAT_OPTIONS = [
|
||||
'headers.sort:true',
|
||||
'json.sort_keys:true',
|
||||
]
|
||||
SORTED_FORMAT_OPTIONS_STRING = ','.join(SORTED_FORMAT_OPTIONS)
|
||||
UNSORTED_FORMAT_OPTIONS_STRING = ','.join(
|
||||
option.replace('true', 'false') for option in SORTED_FORMAT_OPTIONS)
|
||||
|
||||
# Defaults
|
||||
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
||||
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
||||
OUTPUT_OPTIONS_DEFAULT_OFFLINE = OUT_REQ_HEAD + OUT_REQ_BODY
|
||||
|
||||
SSL_VERSION_ARG_MAPPING = {
|
||||
'ssl2.3': 'PROTOCOL_SSLv23',
|
||||
'ssl3': 'PROTOCOL_SSLv3',
|
||||
'tls1': 'PROTOCOL_TLSv1',
|
||||
'tls1.1': 'PROTOCOL_TLSv1_1',
|
||||
'tls1.2': 'PROTOCOL_TLSv1_2',
|
||||
'tls1.3': 'PROTOCOL_TLSv1_3',
|
||||
}
|
||||
SSL_VERSION_ARG_MAPPING = {
|
||||
cli_arg: getattr(ssl, ssl_constant)
|
||||
for cli_arg, ssl_constant in SSL_VERSION_ARG_MAPPING.items()
|
||||
if hasattr(ssl, ssl_constant)
|
||||
}
|
||||
|
||||
class RequestType(enum.Enum):
|
||||
FORM = enum.auto()
|
||||
MULTIPART = enum.auto()
|
||||
JSON = enum.auto()
|
||||
|
@ -8,25 +8,30 @@ from textwrap import dedent, wrap
|
||||
from httpie import __doc__, __version__
|
||||
from httpie.cli.argparser import HTTPieArgumentParser
|
||||
from httpie.cli.argtypes import (
|
||||
KeyValueArgType, SessionNameValidator, readable_file_arg,
|
||||
KeyValueArgType, SessionNameValidator,
|
||||
readable_file_arg,
|
||||
)
|
||||
from httpie.cli.constants import (
|
||||
OUTPUT_OPTIONS, OUTPUT_OPTIONS_DEFAULT, OUT_REQ_BODY, OUT_REQ_HEAD,
|
||||
DEFAULT_FORMAT_OPTIONS, OUTPUT_OPTIONS,
|
||||
OUTPUT_OPTIONS_DEFAULT, OUT_REQ_BODY, OUT_REQ_HEAD,
|
||||
OUT_RESP_BODY, OUT_RESP_HEAD, PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY,
|
||||
SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_PROXY, SSL_VERSION_ARG_MAPPING,
|
||||
RequestType, SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_PROXY,
|
||||
SORTED_FORMAT_OPTIONS_STRING,
|
||||
UNSORTED_FORMAT_OPTIONS_STRING,
|
||||
)
|
||||
from httpie.output.formatters.colors import (
|
||||
AUTO_STYLE, AVAILABLE_STYLES, DEFAULT_STYLE,
|
||||
)
|
||||
from httpie.plugins import plugin_manager
|
||||
from httpie.plugins.builtin import BuiltinAuthPlugin
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from httpie.sessions import DEFAULT_SESSIONS_DIR
|
||||
from httpie.ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
|
||||
|
||||
|
||||
parser = HTTPieArgumentParser(
|
||||
prog='http',
|
||||
description='%s <https://httpie.org>' % __doc__.strip(),
|
||||
epilog=dedent("""
|
||||
epilog=dedent('''
|
||||
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
||||
to its default value.
|
||||
|
||||
@ -34,7 +39,7 @@ parser = HTTPieArgumentParser(
|
||||
|
||||
https://github.com/jakubroztocil/httpie/issues
|
||||
|
||||
"""),
|
||||
'''),
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -43,18 +48,18 @@ parser = HTTPieArgumentParser(
|
||||
|
||||
positional = parser.add_argument_group(
|
||||
title='Positional Arguments',
|
||||
description=dedent("""
|
||||
description=dedent('''
|
||||
These arguments come after any flags and in the order they are listed here.
|
||||
Only URL is required.
|
||||
|
||||
""")
|
||||
''')
|
||||
)
|
||||
positional.add_argument(
|
||||
dest='method',
|
||||
metavar='METHOD',
|
||||
nargs=OPTIONAL,
|
||||
default=None,
|
||||
help="""
|
||||
help='''
|
||||
The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).
|
||||
|
||||
This argument can be omitted in which case HTTPie will use POST if there
|
||||
@ -63,12 +68,12 @@ positional.add_argument(
|
||||
$ http example.org # => GET
|
||||
$ http example.org hello=world # => POST
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
positional.add_argument(
|
||||
dest='url',
|
||||
metavar='URL',
|
||||
help="""
|
||||
help='''
|
||||
The scheme defaults to 'http://' if the URL does not include one.
|
||||
(You can override this with: --default-scheme=https)
|
||||
|
||||
@ -77,7 +82,7 @@ positional.add_argument(
|
||||
$ http :3000 # => http://localhost:3000
|
||||
$ http :/foo # => http://localhost/foo
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
positional.add_argument(
|
||||
dest='request_items',
|
||||
@ -85,7 +90,7 @@ positional.add_argument(
|
||||
nargs=ZERO_OR_MORE,
|
||||
default=None,
|
||||
type=KeyValueArgType(*SEPARATOR_GROUP_ALL_ITEMS),
|
||||
help=r"""
|
||||
help=r'''
|
||||
Optional key-value pairs to be included in the request. The separator used
|
||||
determines the type:
|
||||
|
||||
@ -106,9 +111,10 @@ positional.add_argument(
|
||||
|
||||
awesome:=true amount:=42 colors:='["red", "green", "blue"]'
|
||||
|
||||
'@' Form file fields (only with --form, -f):
|
||||
'@' Form file fields (only with --form or --multipart):
|
||||
|
||||
cs@~/Documents/CV.pdf
|
||||
cv@~/Documents/CV.pdf
|
||||
cv@'~/Documents/CV.pdf;type=application/pdf'
|
||||
|
||||
'=@' A data field like '=', but takes a file path and embeds its content:
|
||||
|
||||
@ -122,7 +128,7 @@ positional.add_argument(
|
||||
|
||||
field-name-with\:colon=value
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -136,26 +142,50 @@ content_type = parser.add_argument_group(
|
||||
|
||||
content_type.add_argument(
|
||||
'--json', '-j',
|
||||
action='store_true',
|
||||
help="""
|
||||
action='store_const',
|
||||
const=RequestType.JSON,
|
||||
dest='request_type',
|
||||
help='''
|
||||
(default) Data items from the command line are serialized as a JSON object.
|
||||
The Content-Type and Accept headers are set to application/json
|
||||
(if not specified).
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
content_type.add_argument(
|
||||
'--form', '-f',
|
||||
action='store_true',
|
||||
help="""
|
||||
action='store_const',
|
||||
const=RequestType.FORM,
|
||||
dest='request_type',
|
||||
help='''
|
||||
Data items from the command line are serialized as form fields.
|
||||
|
||||
The Content-Type is set to application/x-www-form-urlencoded (if not
|
||||
specified). The presence of any file fields results in a
|
||||
multipart/form-data request.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
content_type.add_argument(
|
||||
'--multipart',
|
||||
action='store_const',
|
||||
const=RequestType.MULTIPART,
|
||||
dest='request_type',
|
||||
help='''
|
||||
Similar to --form, but always sends a multipart/form-data
|
||||
request (i.e., even without files).
|
||||
|
||||
'''
|
||||
)
|
||||
content_type.add_argument(
|
||||
'--boundary',
|
||||
help='''
|
||||
Specify a custom boundary string for multipart/form-data requests.
|
||||
Only has effect only together with --form.
|
||||
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
#######################################################################
|
||||
# Content processing.
|
||||
@ -170,14 +200,14 @@ content_processing.add_argument(
|
||||
'--compress', '-x',
|
||||
action='count',
|
||||
default=0,
|
||||
help="""
|
||||
help='''
|
||||
Content compressed (encoded) with Deflate algorithm.
|
||||
The Content-Encoding header is set to deflate.
|
||||
|
||||
Compression is skipped if it appears that compression ratio is
|
||||
negative. Compression can be forced by repeating the argument.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -191,12 +221,12 @@ output_processing.add_argument(
|
||||
dest='prettify',
|
||||
default=PRETTY_STDOUT_TTY_ONLY,
|
||||
choices=sorted(PRETTY_MAP.keys()),
|
||||
help="""
|
||||
help='''
|
||||
Controls output processing. The value can be "none" to not prettify
|
||||
the output (default for redirected output), "all" to apply both colors
|
||||
and formatting (default for terminal output), "colors", or "format".
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
output_processing.add_argument(
|
||||
'--style', '-s',
|
||||
@ -204,10 +234,10 @@ output_processing.add_argument(
|
||||
metavar='STYLE',
|
||||
default=DEFAULT_STYLE,
|
||||
choices=AVAILABLE_STYLES,
|
||||
help="""
|
||||
Output coloring style (default is "{default}"). One of:
|
||||
help='''
|
||||
Output coloring style (default is "{default}"). It can be One of:
|
||||
|
||||
{available_styles}
|
||||
{available_styles}
|
||||
|
||||
The "{auto_style}" style follows your terminal's ANSI color styles.
|
||||
|
||||
@ -215,15 +245,75 @@ output_processing.add_argument(
|
||||
$TERM environment variable is set to "xterm-256color" or similar
|
||||
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
||||
|
||||
""".format(
|
||||
'''.format(
|
||||
default=DEFAULT_STYLE,
|
||||
available_styles='\n'.join(
|
||||
'{0}{1}'.format(8 * ' ', line.strip())
|
||||
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
||||
).rstrip(),
|
||||
).strip(),
|
||||
auto_style=AUTO_STYLE,
|
||||
)
|
||||
)
|
||||
_sorted_kwargs = {
|
||||
'action': 'append_const',
|
||||
'const': SORTED_FORMAT_OPTIONS_STRING,
|
||||
'dest': 'format_options'
|
||||
}
|
||||
_unsorted_kwargs = {
|
||||
'action': 'append_const',
|
||||
'const': UNSORTED_FORMAT_OPTIONS_STRING,
|
||||
'dest': 'format_options'
|
||||
}
|
||||
# The closest approx. of the documented resetting to default via --no-<option>.
|
||||
# We hide them from the doc because they act only as low-level aliases here.
|
||||
output_processing.add_argument('--no-unsorted', **_sorted_kwargs, help=SUPPRESS)
|
||||
output_processing.add_argument('--no-sorted', **_unsorted_kwargs, help=SUPPRESS)
|
||||
|
||||
output_processing.add_argument(
|
||||
'--unsorted',
|
||||
**_unsorted_kwargs,
|
||||
help=f'''
|
||||
Disables all sorting while formatting output. It is a shortcut for:
|
||||
|
||||
--format-options={UNSORTED_FORMAT_OPTIONS_STRING}
|
||||
|
||||
'''
|
||||
)
|
||||
output_processing.add_argument(
|
||||
'--sorted',
|
||||
**_sorted_kwargs,
|
||||
help=f'''
|
||||
Re-enables all sorting options while formatting output. It is a shortcut for:
|
||||
|
||||
--format-options={SORTED_FORMAT_OPTIONS_STRING}
|
||||
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
output_processing.add_argument(
|
||||
'--format-options',
|
||||
action='append',
|
||||
help='''
|
||||
Controls output formatting. Only relevant when formatting is enabled
|
||||
through (explicit or implied) --pretty=all or --pretty=format.
|
||||
The following are the default options:
|
||||
|
||||
{option_list}
|
||||
|
||||
You may use this option multiple times, as well as specify multiple
|
||||
comma-separated options at the same time. For example, this modifies the
|
||||
settings to disable the sorting of JSON keys, and sets the indent size to 2:
|
||||
|
||||
--format-options json.sort_keys:false,json.indent:2
|
||||
|
||||
This is something you will typically put into your config file.
|
||||
|
||||
'''.format(
|
||||
option_list='\n'.join(
|
||||
(8 * ' ') + option for option in DEFAULT_FORMAT_OPTIONS).strip()
|
||||
)
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
# Output options
|
||||
@ -234,7 +324,7 @@ output_options.add_argument(
|
||||
'--print', '-p',
|
||||
dest='output_options',
|
||||
metavar='WHAT',
|
||||
help=f"""
|
||||
help=f'''
|
||||
String specifying what the output should contain:
|
||||
|
||||
'{OUT_REQ_HEAD}' request headers
|
||||
@ -247,70 +337,70 @@ output_options.add_argument(
|
||||
If the output is piped to another program or to a file, then only the
|
||||
response body is printed by default.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--headers', '-h',
|
||||
dest='output_options',
|
||||
action='store_const',
|
||||
const=OUT_RESP_HEAD,
|
||||
help=f"""
|
||||
help=f'''
|
||||
Print only the response headers. Shortcut for --print={OUT_RESP_HEAD}.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--body', '-b',
|
||||
dest='output_options',
|
||||
action='store_const',
|
||||
const=OUT_RESP_BODY,
|
||||
help=f"""
|
||||
help=f'''
|
||||
Print only the response body. Shortcut for --print={OUT_RESP_BODY}.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
output_options.add_argument(
|
||||
'--verbose', '-v',
|
||||
dest='verbose',
|
||||
action='store_true',
|
||||
help="""
|
||||
help='''
|
||||
Verbose output. Print the whole request as well as the response. Also print
|
||||
any intermediary requests/responses (such as redirects).
|
||||
It's a shortcut for: --all --print={0}
|
||||
|
||||
""".format(''.join(OUTPUT_OPTIONS))
|
||||
'''.format(''.join(OUTPUT_OPTIONS))
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--all',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
help='''
|
||||
By default, only the final request/response is shown. Use this flag to show
|
||||
any intermediary requests/responses as well. Intermediary requests include
|
||||
followed redirects (with --follow), the first unauthorized request when
|
||||
Digest auth is used (--auth=digest), etc.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--history-print', '-P',
|
||||
dest='output_options_history',
|
||||
metavar='WHAT',
|
||||
help="""
|
||||
help='''
|
||||
The same as --print, -p but applies only to intermediary requests/responses
|
||||
(such as redirects) when their inclusion is enabled with --all. If this
|
||||
options is not specified, then they are formatted the same way as the final
|
||||
response.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--stream', '-S',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
Always stream the output by line, i.e., behave like `tail -f'.
|
||||
help='''
|
||||
Always stream the response body by line, i.e., behave like `tail -f'.
|
||||
|
||||
Without --stream and with --pretty (either set or implied),
|
||||
HTTPie fetches the whole response before it outputs the processed data.
|
||||
@ -321,19 +411,19 @@ output_options.add_argument(
|
||||
It is useful also without --pretty: It ensures that the output is flushed
|
||||
more often and in smaller chunks.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--output', '-o',
|
||||
type=FileType('a+b'),
|
||||
dest='output_file',
|
||||
metavar='FILE',
|
||||
help="""
|
||||
help='''
|
||||
Save output to FILE instead of stdout. If --download is also set, then only
|
||||
the response body is saved to FILE. Other parts of the HTTP exchange are
|
||||
printed to stderr.
|
||||
|
||||
"""
|
||||
'''
|
||||
|
||||
)
|
||||
|
||||
@ -341,12 +431,12 @@ output_options.add_argument(
|
||||
'--download', '-d',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
help='''
|
||||
Do not print the response body to stdout. Rather, download it and store it
|
||||
in a file. The filename is guessed unless specified with --output
|
||||
[filename]. This action is similar to the default behaviour of wget.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
output_options.add_argument(
|
||||
@ -354,11 +444,22 @@ output_options.add_argument(
|
||||
dest='download_resume',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
help='''
|
||||
Resume an interrupted download. Note that the --output option needs to be
|
||||
specified as well.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
output_options.add_argument(
|
||||
'--quiet', '-q',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='''
|
||||
Do not print to stdout or stderr.
|
||||
stdout is still redirected if --output is specified.
|
||||
Flag doesn't affect behaviour of download beyond not printing to terminal.
|
||||
'''
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -376,7 +477,7 @@ sessions.add_argument(
|
||||
'--session',
|
||||
metavar='SESSION_NAME_OR_PATH',
|
||||
type=session_name_validator,
|
||||
help=f"""
|
||||
help=f'''
|
||||
Create, or reuse and update a session. Within a session, custom headers,
|
||||
auth credential, as well as any cookies sent by the server persist between
|
||||
requests.
|
||||
@ -385,17 +486,17 @@ sessions.add_argument(
|
||||
|
||||
{DEFAULT_SESSIONS_DIR}/<HOST>/<SESSION_NAME>.json.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
sessions.add_argument(
|
||||
'--session-read-only',
|
||||
metavar='SESSION_NAME_OR_PATH',
|
||||
type=session_name_validator,
|
||||
help="""
|
||||
help='''
|
||||
Create or read a session without updating it form the request/response
|
||||
exchange.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -408,11 +509,11 @@ auth.add_argument(
|
||||
'--auth', '-a',
|
||||
default=None,
|
||||
metavar='USER[:PASS]',
|
||||
help="""
|
||||
help='''
|
||||
If only the username is provided (-a username), HTTPie will prompt
|
||||
for the password.
|
||||
|
||||
""",
|
||||
''',
|
||||
)
|
||||
|
||||
|
||||
@ -431,12 +532,12 @@ auth.add_argument(
|
||||
'--auth-type', '-A',
|
||||
choices=_AuthTypeLazyChoices(),
|
||||
default=None,
|
||||
help="""
|
||||
help='''
|
||||
The authentication mechanism to be used. Defaults to "{default}".
|
||||
|
||||
{types}
|
||||
|
||||
""".format(default=_auth_plugins[0].auth_type, types='\n '.join(
|
||||
'''.format(default=_auth_plugins[0].auth_type, types='\n '.join(
|
||||
'"{type}": {name}{package}{description}'.format(
|
||||
type=plugin.auth_type,
|
||||
name=plugin.name,
|
||||
@ -456,10 +557,10 @@ auth.add_argument(
|
||||
'--ignore-netrc',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
help='''
|
||||
Ignore credentials from .netrc.
|
||||
|
||||
""",
|
||||
''',
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -472,9 +573,9 @@ network.add_argument(
|
||||
'--offline',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
help='''
|
||||
Build the request and print it but don’t actually send it.
|
||||
"""
|
||||
'''
|
||||
)
|
||||
network.add_argument(
|
||||
'--proxy',
|
||||
@ -482,43 +583,43 @@ network.add_argument(
|
||||
action='append',
|
||||
metavar='PROTOCOL:PROXY_URL',
|
||||
type=KeyValueArgType(SEPARATOR_PROXY),
|
||||
help="""
|
||||
help='''
|
||||
String mapping protocol to the URL of the proxy
|
||||
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
||||
different protocols. The environment variables $ALL_PROXY, $HTTP_PROXY,
|
||||
and $HTTPS_proxy are supported as well.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
network.add_argument(
|
||||
'--follow', '-F',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
help='''
|
||||
Follow 30x Location redirects.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
network.add_argument(
|
||||
'--max-redirects',
|
||||
type=int,
|
||||
default=30,
|
||||
help="""
|
||||
help='''
|
||||
By default, requests have a limit of 30 redirects (works with --follow).
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
network.add_argument(
|
||||
'--max-headers',
|
||||
type=int,
|
||||
default=0,
|
||||
help="""
|
||||
help='''
|
||||
The maximum number of response headers to be read before giving up
|
||||
(default 0, i.e., no limit).
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
network.add_argument(
|
||||
@ -526,7 +627,7 @@ network.add_argument(
|
||||
type=float,
|
||||
default=0,
|
||||
metavar='SECONDS',
|
||||
help="""
|
||||
help='''
|
||||
The connection timeout of the request in seconds.
|
||||
The default value is 0, i.e., there is no timeout limit.
|
||||
This is not a time limit on the entire response download;
|
||||
@ -534,13 +635,13 @@ network.add_argument(
|
||||
timeout seconds (more precisely, if no bytes have been received on
|
||||
the underlying socket for timeout seconds).
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
network.add_argument(
|
||||
'--check-status',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
help='''
|
||||
By default, HTTPie exits with 0 when no network or other fatal errors
|
||||
occur. This flag instructs HTTPie to also check the HTTP status code and
|
||||
exit with an error if the status indicates one.
|
||||
@ -550,6 +651,24 @@ network.add_argument(
|
||||
3xx (Redirect) and --follow hasn't been set, then the exit status is 3.
|
||||
Also an error message is written to stderr if stdout is redirected.
|
||||
|
||||
'''
|
||||
)
|
||||
network.add_argument(
|
||||
'--path-as-is',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='''
|
||||
Bypass dot segment (/../ or /./) URL squashing.
|
||||
|
||||
'''
|
||||
)
|
||||
|
||||
network.add_argument(
|
||||
'--chunked',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
@ -561,47 +680,58 @@ ssl = parser.add_argument_group(title='SSL')
|
||||
ssl.add_argument(
|
||||
'--verify',
|
||||
default='yes',
|
||||
help="""
|
||||
help='''
|
||||
Set to "no" (or "false") to skip checking the host's SSL certificate.
|
||||
Defaults to "yes" ("true"). You can also pass the path to a CA_BUNDLE file
|
||||
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
|
||||
variable instead.)
|
||||
"""
|
||||
'''
|
||||
)
|
||||
ssl.add_argument(
|
||||
'--ssl', # TODO: Maybe something more general, such as --secure-protocol?
|
||||
'--ssl',
|
||||
dest='ssl_version',
|
||||
choices=list(sorted(SSL_VERSION_ARG_MAPPING.keys())),
|
||||
help="""
|
||||
choices=list(sorted(AVAILABLE_SSL_VERSION_ARG_MAPPING.keys())),
|
||||
help='''
|
||||
The desired protocol version to use. This will default to
|
||||
SSL v2.3 which will negotiate the highest protocol that both
|
||||
the server and your installation of OpenSSL support. Available protocols
|
||||
may vary depending on OpenSSL installation (only the supported ones
|
||||
are shown here).
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
ssl.add_argument(
|
||||
'--ciphers',
|
||||
help=f'''
|
||||
|
||||
A string in the OpenSSL cipher list format. By default, the following
|
||||
is used:
|
||||
|
||||
{DEFAULT_SSL_CIPHERS}
|
||||
|
||||
'''
|
||||
)
|
||||
ssl.add_argument(
|
||||
'--cert',
|
||||
default=None,
|
||||
type=readable_file_arg,
|
||||
help="""
|
||||
help='''
|
||||
You can specify a local cert to use as client side SSL certificate.
|
||||
This file may either contain both private key and certificate or you may
|
||||
specify --cert-key separately.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
ssl.add_argument(
|
||||
'--cert-key',
|
||||
default=None,
|
||||
type=readable_file_arg,
|
||||
help="""
|
||||
help='''
|
||||
The private key to use with SSL. Only needed if --cert is given and the
|
||||
certificate file does not contain the private key.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -614,53 +744,53 @@ troubleshooting.add_argument(
|
||||
'--ignore-stdin', '-I',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
help='''
|
||||
Do not attempt to read stdin.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--help',
|
||||
action='help',
|
||||
default=SUPPRESS,
|
||||
help="""
|
||||
help='''
|
||||
Show this help message and exit.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--version',
|
||||
action='version',
|
||||
version=__version__,
|
||||
help="""
|
||||
help='''
|
||||
Show version and exit.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--traceback',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
help='''
|
||||
Prints the exception traceback should one occur.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--default-scheme',
|
||||
default="http",
|
||||
help="""
|
||||
help='''
|
||||
The default scheme to use if not specified in the URL.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--debug',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
help='''
|
||||
Prints the exception traceback should one occur, as well as other
|
||||
information useful for debugging HTTPie itself and for reporting bugs.
|
||||
|
||||
"""
|
||||
'''
|
||||
)
|
||||
|
@ -34,20 +34,25 @@ class MultiValueOrderedDict(OrderedDict):
|
||||
super().__setitem__(key, [self[key]])
|
||||
self[key].append(value)
|
||||
|
||||
|
||||
class RequestQueryParamsDict(MultiValueOrderedDict):
|
||||
pass
|
||||
|
||||
|
||||
class RequestDataDict(MultiValueOrderedDict):
|
||||
|
||||
def items(self):
|
||||
for key, values in super(MultiValueOrderedDict, self).items():
|
||||
for key, values in super().items():
|
||||
if not isinstance(values, list):
|
||||
values = [values]
|
||||
for value in values:
|
||||
yield key, value
|
||||
|
||||
|
||||
class RequestQueryParamsDict(MultiValueOrderedDict):
|
||||
pass
|
||||
|
||||
|
||||
class RequestDataDict(MultiValueOrderedDict):
|
||||
pass
|
||||
|
||||
|
||||
class MultipartRequestDataDict(MultiValueOrderedDict):
|
||||
pass
|
||||
|
||||
|
||||
class RequestFilesDict(RequestDataDict):
|
||||
pass
|
||||
|
@ -4,12 +4,15 @@ from typing import Callable, Dict, IO, List, Optional, Tuple, Union
|
||||
|
||||
from httpie.cli.argtypes import KeyValueArg
|
||||
from httpie.cli.constants import (
|
||||
SEPARATOR_DATA_EMBED_FILE_CONTENTS, SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||
SEPARATORS_GROUP_MULTIPART, SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||
SEPARATOR_DATA_RAW_JSON, SEPARATOR_DATA_STRING, SEPARATOR_FILE_UPLOAD,
|
||||
SEPARATOR_HEADER, SEPARATOR_HEADER_EMPTY, SEPARATOR_QUERY_PARAM,
|
||||
SEPARATOR_FILE_UPLOAD_TYPE, SEPARATOR_HEADER, SEPARATOR_HEADER_EMPTY,
|
||||
SEPARATOR_QUERY_PARAM,
|
||||
)
|
||||
from httpie.cli.dicts import (
|
||||
RequestDataDict, RequestFilesDict, RequestHeadersDict, RequestJSONDataDict,
|
||||
MultipartRequestDataDict, RequestDataDict, RequestFilesDict,
|
||||
RequestHeadersDict, RequestJSONDataDict,
|
||||
RequestQueryParamsDict,
|
||||
)
|
||||
from httpie.cli.exceptions import ParseError
|
||||
@ -23,6 +26,8 @@ class RequestItems:
|
||||
self.data = RequestDataDict() if as_form else RequestJSONDataDict()
|
||||
self.files = RequestFilesDict()
|
||||
self.params = RequestQueryParamsDict()
|
||||
# To preserve the order of fields in file upload multipart requests.
|
||||
self.multipart_data = MultipartRequestDataDict()
|
||||
|
||||
@classmethod
|
||||
def from_args(
|
||||
@ -68,7 +73,11 @@ class RequestItems:
|
||||
|
||||
for arg in request_item_args:
|
||||
processor_func, target_dict = rules[arg.sep]
|
||||
target_dict[arg.key] = processor_func(arg)
|
||||
value = processor_func(arg)
|
||||
target_dict[arg.key] = value
|
||||
|
||||
if arg.sep in SEPARATORS_GROUP_MULTIPART:
|
||||
instance.multipart_data[arg.key] = value
|
||||
|
||||
return instance
|
||||
|
||||
@ -95,16 +104,17 @@ def process_query_param_arg(arg: KeyValueArg) -> str:
|
||||
|
||||
|
||||
def process_file_upload_arg(arg: KeyValueArg) -> Tuple[str, IO, str]:
|
||||
filename = arg.value
|
||||
parts = arg.value.split(SEPARATOR_FILE_UPLOAD_TYPE)
|
||||
filename = parts[0]
|
||||
mime_type = parts[1] if len(parts) > 1 else None
|
||||
try:
|
||||
with open(os.path.expanduser(filename), 'rb') as f:
|
||||
contents = f.read()
|
||||
f = open(os.path.expanduser(filename), 'rb')
|
||||
except IOError as e:
|
||||
raise ParseError('"%s": %s' % (arg.orig, e))
|
||||
return (
|
||||
os.path.basename(filename),
|
||||
BytesIO(contents),
|
||||
get_content_type(filename),
|
||||
f,
|
||||
mime_type or get_content_type(filename),
|
||||
)
|
||||
|
||||
|
||||
|
148
httpie/client.py
148
httpie/client.py
@ -2,40 +2,38 @@ import argparse
|
||||
import http.client
|
||||
import json
|
||||
import sys
|
||||
import zlib
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Union
|
||||
from typing import Callable, Iterable, Union
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
# noinspection PyPackageRequirements
|
||||
import urllib3
|
||||
from httpie import __version__
|
||||
from httpie.cli.constants import SSL_VERSION_ARG_MAPPING
|
||||
from httpie.cli.dicts import RequestHeadersDict
|
||||
from httpie.plugins import plugin_manager
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from httpie.sessions import get_httpie_session
|
||||
from httpie.utils import repr_dict
|
||||
from httpie.ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, HTTPieHTTPSAdapter
|
||||
from httpie.uploads import (
|
||||
compress_request, prepare_request_body,
|
||||
get_multipart_data_and_content_type,
|
||||
)
|
||||
from httpie.utils import get_expired_cookies, repr_dict
|
||||
|
||||
|
||||
try:
|
||||
# noinspection PyPackageRequirements
|
||||
import urllib3
|
||||
# <https://urllib3.readthedocs.io/en/latest/security.html>
|
||||
urllib3.disable_warnings()
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
urllib3.disable_warnings()
|
||||
|
||||
FORM_CONTENT_TYPE = 'application/x-www-form-urlencoded; charset=utf-8'
|
||||
JSON_CONTENT_TYPE = 'application/json'
|
||||
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*'
|
||||
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*;q=0.5'
|
||||
DEFAULT_UA = f'HTTPie/{__version__}'
|
||||
|
||||
|
||||
def collect_messages(
|
||||
args: argparse.Namespace,
|
||||
config_dir: Path,
|
||||
request_body_read_callback: Callable[[bytes], None] = None,
|
||||
) -> Iterable[Union[requests.PreparedRequest, requests.Response]]:
|
||||
httpie_session = None
|
||||
httpie_session_headers = None
|
||||
@ -51,11 +49,14 @@ def collect_messages(
|
||||
request_kwargs = make_request_kwargs(
|
||||
args=args,
|
||||
base_headers=httpie_session_headers,
|
||||
request_body_read_callback=request_body_read_callback
|
||||
)
|
||||
send_kwargs = make_send_kwargs(args)
|
||||
send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args)
|
||||
requests_session = build_requests_session(
|
||||
ssl_version=args.ssl_version,
|
||||
ciphers=args.ciphers,
|
||||
verify=bool(send_kwargs_mergeable_from_env['verify'])
|
||||
)
|
||||
|
||||
if httpie_session:
|
||||
@ -77,9 +78,18 @@ def collect_messages(
|
||||
|
||||
request = requests.Request(**request_kwargs)
|
||||
prepared_request = requests_session.prepare_request(request)
|
||||
if args.path_as_is:
|
||||
prepared_request.url = ensure_path_as_is(
|
||||
orig_url=args.url,
|
||||
prepped_url=prepared_request.url,
|
||||
)
|
||||
if args.compress and prepared_request.body:
|
||||
compress_body(prepared_request, always=args.compress > 1)
|
||||
compress_request(
|
||||
request=prepared_request,
|
||||
always=args.compress > 1,
|
||||
)
|
||||
response_count = 0
|
||||
expired_cookies = []
|
||||
while prepared_request:
|
||||
yield prepared_request
|
||||
if not args.offline:
|
||||
@ -93,6 +103,12 @@ def collect_messages(
|
||||
**send_kwargs_merged,
|
||||
**send_kwargs,
|
||||
)
|
||||
|
||||
# noinspection PyProtectedMember
|
||||
expired_cookies += get_expired_cookies(
|
||||
headers=response.raw._original_response.msg._headers
|
||||
)
|
||||
|
||||
response_count += 1
|
||||
if response.next:
|
||||
if args.max_redirects and response_count == args.max_redirects:
|
||||
@ -108,6 +124,10 @@ def collect_messages(
|
||||
if httpie_session:
|
||||
if httpie_session.is_new() or not args.session_read_only:
|
||||
httpie_session.cookies = requests_session.cookies
|
||||
httpie_session.remove_cookies(
|
||||
# TODO: take path & domain into account?
|
||||
cookie['name'] for cookie in expired_cookies
|
||||
)
|
||||
httpie_session.save()
|
||||
|
||||
|
||||
@ -115,6 +135,7 @@ def collect_messages(
|
||||
@contextmanager
|
||||
def max_headers(limit):
|
||||
# <https://github.com/jakubroztocil/httpie/issues/802>
|
||||
# noinspection PyUnresolvedReferences
|
||||
orig = http.client._MAXHEADERS
|
||||
http.client._MAXHEADERS = limit or float('Inf')
|
||||
try:
|
||||
@ -123,45 +144,23 @@ def max_headers(limit):
|
||||
http.client._MAXHEADERS = orig
|
||||
|
||||
|
||||
def compress_body(request: requests.PreparedRequest, always: bool):
|
||||
deflater = zlib.compressobj()
|
||||
body_bytes = (
|
||||
request.body
|
||||
if isinstance(request.body, bytes)
|
||||
else request.body.encode()
|
||||
)
|
||||
deflated_data = deflater.compress(body_bytes)
|
||||
deflated_data += deflater.flush()
|
||||
is_economical = len(deflated_data) < len(body_bytes)
|
||||
if is_economical or always:
|
||||
request.body = deflated_data
|
||||
request.headers['Content-Encoding'] = 'deflate'
|
||||
request.headers['Content-Length'] = str(len(deflated_data))
|
||||
|
||||
|
||||
class HTTPieHTTPSAdapter(HTTPAdapter):
|
||||
|
||||
def __init__(self, ssl_version=None, **kwargs):
|
||||
self._ssl_version = ssl_version
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def init_poolmanager(self, *args, **kwargs):
|
||||
kwargs['ssl_version'] = self._ssl_version
|
||||
super().init_poolmanager(*args, **kwargs)
|
||||
|
||||
|
||||
def build_requests_session(
|
||||
verify: bool,
|
||||
ssl_version: str = None,
|
||||
ciphers: str = None,
|
||||
) -> requests.Session:
|
||||
requests_session = requests.Session()
|
||||
|
||||
# Install our adapter.
|
||||
requests_session.mount('https://', HTTPieHTTPSAdapter(
|
||||
https_adapter = HTTPieHTTPSAdapter(
|
||||
ciphers=ciphers,
|
||||
verify=verify,
|
||||
ssl_version=(
|
||||
SSL_VERSION_ARG_MAPPING[ssl_version]
|
||||
AVAILABLE_SSL_VERSION_ARG_MAPPING[ssl_version]
|
||||
if ssl_version else None
|
||||
)
|
||||
))
|
||||
),
|
||||
)
|
||||
requests_session.mount('https://', https_adapter)
|
||||
|
||||
# Install adapters from plugins.
|
||||
for plugin_cls in plugin_manager.get_transport_plugins():
|
||||
@ -243,12 +242,14 @@ def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict:
|
||||
|
||||
def make_request_kwargs(
|
||||
args: argparse.Namespace,
|
||||
base_headers: RequestHeadersDict = None
|
||||
base_headers: RequestHeadersDict = None,
|
||||
request_body_read_callback=lambda chunk: chunk
|
||||
) -> dict:
|
||||
"""
|
||||
Translate our `args` into `requests.Request` keyword arguments.
|
||||
|
||||
"""
|
||||
files = args.files
|
||||
# Serialize JSON data, if needed.
|
||||
data = args.data
|
||||
auto_json = data and not args.form
|
||||
@ -265,16 +266,59 @@ def make_request_kwargs(
|
||||
if base_headers:
|
||||
headers.update(base_headers)
|
||||
headers.update(args.headers)
|
||||
if args.offline and args.chunked and 'Transfer-Encoding' not in headers:
|
||||
# When online, we let requests set the header instead to be able more
|
||||
# easily verify chunking is taking place.
|
||||
headers['Transfer-Encoding'] = 'chunked'
|
||||
headers = finalize_headers(headers)
|
||||
|
||||
if (args.form and files) or args.multipart:
|
||||
data, headers['Content-Type'] = get_multipart_data_and_content_type(
|
||||
data=args.multipart_data,
|
||||
boundary=args.boundary,
|
||||
content_type=args.headers.get('Content-Type'),
|
||||
)
|
||||
|
||||
kwargs = {
|
||||
'method': args.method.lower(),
|
||||
'url': args.url,
|
||||
'headers': headers,
|
||||
'data': data,
|
||||
'data': prepare_request_body(
|
||||
body=data,
|
||||
body_read_callback=request_body_read_callback,
|
||||
chunked=args.chunked,
|
||||
offline=args.offline,
|
||||
content_length_header_value=headers.get('Content-Length'),
|
||||
),
|
||||
'auth': args.auth,
|
||||
'params': args.params,
|
||||
'files': args.files,
|
||||
'params': args.params.items(),
|
||||
}
|
||||
|
||||
return kwargs
|
||||
|
||||
|
||||
def ensure_path_as_is(orig_url: str, prepped_url: str) -> str:
|
||||
"""
|
||||
Handle `--path-as-is` by replacing the path component of the prepared
|
||||
URL with the path component from the original URL. Other parts stay
|
||||
untouched because other (welcome) processing on the URL might have
|
||||
taken place.
|
||||
|
||||
<https://github.com/jakubroztocil/httpie/issues/895>
|
||||
|
||||
|
||||
<https://ec.haxx.se/http/http-basics#path-as-is>
|
||||
<https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html>
|
||||
|
||||
>>> ensure_path_as_is('http://foo/../', 'http://foo/?foo=bar')
|
||||
'http://foo/../?foo=bar'
|
||||
|
||||
"""
|
||||
parsed_orig, parsed_prepped = urlparse(orig_url), urlparse(prepped_url)
|
||||
final_dict = {
|
||||
# noinspection PyProtectedMember
|
||||
**parsed_prepped._asdict(),
|
||||
'path': parsed_orig.path,
|
||||
}
|
||||
final_url = urlunparse(tuple(final_dict.values()))
|
||||
return final_url
|
||||
|
@ -8,11 +8,54 @@ from httpie import __version__
|
||||
from httpie.compat import is_windows
|
||||
|
||||
|
||||
DEFAULT_CONFIG_DIR = Path(os.environ.get(
|
||||
'HTTPIE_CONFIG_DIR',
|
||||
os.path.expanduser('~/.httpie') if not is_windows else
|
||||
os.path.expandvars(r'%APPDATA%\\httpie')
|
||||
))
|
||||
ENV_XDG_CONFIG_HOME = 'XDG_CONFIG_HOME'
|
||||
ENV_HTTPIE_CONFIG_DIR = 'HTTPIE_CONFIG_DIR'
|
||||
DEFAULT_CONFIG_DIRNAME = 'httpie'
|
||||
DEFAULT_RELATIVE_XDG_CONFIG_HOME = Path('.config')
|
||||
DEFAULT_RELATIVE_LEGACY_CONFIG_DIR = Path('.httpie')
|
||||
DEFAULT_WINDOWS_CONFIG_DIR = Path(
|
||||
os.path.expandvars('%APPDATA%')) / DEFAULT_CONFIG_DIRNAME
|
||||
|
||||
|
||||
def get_default_config_dir() -> Path:
|
||||
"""
|
||||
Return the path to the httpie configuration directory.
|
||||
|
||||
This directory isn't guaranteed to exist, and nor are any of its
|
||||
ancestors (only the legacy ~/.httpie, if returned, is guaranteed to exist).
|
||||
|
||||
XDG Base Directory Specification support:
|
||||
|
||||
<https://wiki.archlinux.org/index.php/XDG_Base_Directory>
|
||||
|
||||
$XDG_CONFIG_HOME is supported; $XDG_CONFIG_DIRS is not
|
||||
|
||||
"""
|
||||
# 1. explicitly set through env
|
||||
env_config_dir = os.environ.get(ENV_HTTPIE_CONFIG_DIR)
|
||||
if env_config_dir:
|
||||
return Path(env_config_dir)
|
||||
|
||||
# 2. Windows
|
||||
if is_windows:
|
||||
return DEFAULT_WINDOWS_CONFIG_DIR
|
||||
|
||||
home_dir = Path.home()
|
||||
|
||||
# 3. legacy ~/.httpie
|
||||
legacy_config_dir = home_dir / DEFAULT_RELATIVE_LEGACY_CONFIG_DIR
|
||||
if legacy_config_dir.exists():
|
||||
return legacy_config_dir
|
||||
|
||||
# 4. XDG
|
||||
xdg_config_home_dir = os.environ.get(
|
||||
ENV_XDG_CONFIG_HOME, # 4.1. explicit
|
||||
home_dir / DEFAULT_RELATIVE_XDG_CONFIG_HOME # 4.2. default
|
||||
)
|
||||
return Path(xdg_config_home_dir) / DEFAULT_CONFIG_DIRNAME
|
||||
|
||||
|
||||
DEFAULT_CONFIG_DIR = get_default_config_dir()
|
||||
|
||||
|
||||
class ConfigFileError(Exception):
|
||||
@ -65,16 +108,14 @@ class BaseConfigDict(dict):
|
||||
|
||||
self.ensure_directory()
|
||||
|
||||
json_string = json.dumps(
|
||||
obj=self,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
ensure_ascii=True,
|
||||
)
|
||||
try:
|
||||
with self.path.open('w') as f:
|
||||
json.dump(
|
||||
obj=self,
|
||||
fp=f,
|
||||
indent=4,
|
||||
sort_keys=True,
|
||||
ensure_ascii=True,
|
||||
)
|
||||
f.write('\n')
|
||||
self.path.write_text(json_string + '\n')
|
||||
except IOError:
|
||||
if not fail_silently:
|
||||
raise
|
||||
|
@ -1,7 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Union, IO, Optional
|
||||
from typing import IO, Optional
|
||||
|
||||
|
||||
try:
|
||||
@ -57,7 +57,7 @@ class Environment:
|
||||
)
|
||||
del colorama
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
def __init__(self, devnull=None, **kwargs):
|
||||
"""
|
||||
Use keyword arguments to overwrite
|
||||
any of the class attributes for this instance.
|
||||
@ -66,6 +66,10 @@ class Environment:
|
||||
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
|
||||
self.__dict__.update(**kwargs)
|
||||
|
||||
# The original STDERR unaffected by --quiet’ing.
|
||||
self._orig_stderr = self.stderr
|
||||
self._devnull = devnull
|
||||
|
||||
# Keyword arguments > stream.encoding > default utf8
|
||||
if self.stdin and self.stdin_encoding is None:
|
||||
self.stdin_encoding = getattr(
|
||||
@ -109,6 +113,16 @@ class Environment:
|
||||
self.log_error(e, level='warning')
|
||||
return config
|
||||
|
||||
@property
|
||||
def devnull(self) -> IO:
|
||||
if self._devnull is None:
|
||||
self._devnull = open(os.devnull, 'w+')
|
||||
return self._devnull
|
||||
|
||||
@devnull.setter
|
||||
def devnull(self, value):
|
||||
self._devnull = value
|
||||
|
||||
def log_error(self, msg, level='error'):
|
||||
assert level in ['error', 'warning']
|
||||
self.stderr.write(f'\n{self.program_name}: {level}: {msg}\n\n')
|
||||
self._orig_stderr.write(f'\n{self.program_name}: {level}: {msg}\n\n')
|
||||
|
@ -2,21 +2,29 @@ import argparse
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from typing import List, Union
|
||||
from typing import List, Optional, Tuple, Union
|
||||
|
||||
import requests
|
||||
from pygments import __version__ as pygments_version
|
||||
from requests import __version__ as requests_version
|
||||
|
||||
from httpie import __version__ as httpie_version
|
||||
from httpie.cli.constants import (
|
||||
OUT_REQ_BODY, OUT_REQ_HEAD, OUT_RESP_BODY,
|
||||
OUT_RESP_HEAD,
|
||||
)
|
||||
from httpie.client import collect_messages
|
||||
from httpie.context import Environment
|
||||
from httpie.downloads import Downloader
|
||||
from httpie.output.writer import write_message, write_stream
|
||||
from httpie.plugins import plugin_manager
|
||||
from httpie.output.writer import (
|
||||
write_message,
|
||||
write_stream,
|
||||
)
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from httpie.status import ExitStatus, http_status_to_exit_status
|
||||
|
||||
|
||||
# noinspection PyDefaultArgument
|
||||
def main(
|
||||
args: List[Union[str, bytes]] = sys.argv,
|
||||
env=Environment(),
|
||||
@ -90,7 +98,7 @@ def main(
|
||||
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
||||
env.log_error(
|
||||
f'Too many redirects'
|
||||
f' (--max-redirects=parsed_args.max_redirects).'
|
||||
f' (--max-redirects={parsed_args.max_redirects}).'
|
||||
)
|
||||
except Exception as e:
|
||||
# TODO: Further distinction between expected and unexpected errors.
|
||||
@ -110,6 +118,22 @@ def main(
|
||||
return exit_status
|
||||
|
||||
|
||||
def get_output_options(
|
||||
args: argparse.Namespace,
|
||||
message: Union[requests.PreparedRequest, requests.Response]
|
||||
) -> Tuple[bool, bool]:
|
||||
return {
|
||||
requests.PreparedRequest: (
|
||||
OUT_REQ_HEAD in args.output_options,
|
||||
OUT_REQ_BODY in args.output_options,
|
||||
),
|
||||
requests.Response: (
|
||||
OUT_RESP_HEAD in args.output_options,
|
||||
OUT_RESP_BODY in args.output_options,
|
||||
),
|
||||
}[type(message)]
|
||||
|
||||
|
||||
def program(
|
||||
args: argparse.Namespace,
|
||||
env: Environment,
|
||||
@ -131,18 +155,57 @@ def program(
|
||||
)
|
||||
downloader.pre_request(args.headers)
|
||||
|
||||
initial_request = None
|
||||
final_response = None
|
||||
needs_separator = False
|
||||
|
||||
for message in collect_messages(args, env.config.directory):
|
||||
write_message(
|
||||
requests_message=message,
|
||||
env=env,
|
||||
args=args,
|
||||
def maybe_separate():
|
||||
nonlocal needs_separator
|
||||
if env.stdout.isatty() and needs_separator:
|
||||
needs_separator = False
|
||||
getattr(env.stdout, 'buffer', env.stdout).write(b'\n\n')
|
||||
|
||||
initial_request: Optional[requests.PreparedRequest] = None
|
||||
final_response: Optional[requests.Response] = None
|
||||
|
||||
def request_body_read_callback(chunk: bytes):
|
||||
should_pipe_to_stdout = (
|
||||
# Request body output desired
|
||||
OUT_REQ_BODY in args.output_options
|
||||
# & not `.read()` already pre-request (e.g., for compression)
|
||||
and initial_request
|
||||
# & non-EOF chunk
|
||||
and chunk
|
||||
)
|
||||
if isinstance(message, requests.PreparedRequest):
|
||||
if should_pipe_to_stdout:
|
||||
msg = requests.PreparedRequest()
|
||||
msg.is_body_upload_chunk = True
|
||||
msg.body = chunk
|
||||
msg.headers = initial_request.headers
|
||||
write_message(
|
||||
requests_message=msg,
|
||||
env=env,
|
||||
args=args,
|
||||
with_body=True,
|
||||
with_headers=False
|
||||
)
|
||||
|
||||
messages = collect_messages(
|
||||
args=args,
|
||||
config_dir=env.config.directory,
|
||||
request_body_read_callback=request_body_read_callback
|
||||
)
|
||||
for message in messages:
|
||||
maybe_separate()
|
||||
is_request = isinstance(message, requests.PreparedRequest)
|
||||
with_headers, with_body = get_output_options(
|
||||
args=args, message=message)
|
||||
if is_request:
|
||||
if not initial_request:
|
||||
initial_request = message
|
||||
is_streamed_upload = not isinstance(
|
||||
message.body, (str, bytes))
|
||||
if with_body:
|
||||
with_body = not is_streamed_upload
|
||||
needs_separator = is_streamed_upload
|
||||
else:
|
||||
final_response = message
|
||||
if args.check_status or downloader:
|
||||
@ -156,6 +219,15 @@ def program(
|
||||
f'HTTP {message.raw.status} {message.raw.reason}',
|
||||
level='warning'
|
||||
)
|
||||
write_message(
|
||||
requests_message=message,
|
||||
env=env,
|
||||
args=args,
|
||||
with_headers=with_headers,
|
||||
with_body=with_body,
|
||||
)
|
||||
|
||||
maybe_separate()
|
||||
|
||||
if downloader and exit_status == ExitStatus.SUCCESS:
|
||||
# Last response body download.
|
||||
|
@ -196,7 +196,6 @@ class Downloader:
|
||||
"""
|
||||
:param resume: Should the download resume if partial download
|
||||
already exists.
|
||||
:type resume: bool
|
||||
|
||||
:param output_file: The file to store response body in. If not
|
||||
provided, it will be guessed from the response.
|
||||
@ -322,7 +321,6 @@ class Downloader:
|
||||
|
||||
:param chunk: A chunk of response body data that has just
|
||||
been downloaded and written to the output.
|
||||
:type chunk: bytes
|
||||
|
||||
"""
|
||||
self.status.chunk_downloaded(len(chunk))
|
||||
|
@ -86,7 +86,7 @@ class ColorFormatter(FormatterPlugin):
|
||||
lexer=lexer,
|
||||
formatter=self.formatter,
|
||||
)
|
||||
return body.strip()
|
||||
return body
|
||||
|
||||
def get_lexer_for_body(
|
||||
self, mime: str,
|
||||
|
@ -3,6 +3,10 @@ from httpie.plugins import FormatterPlugin
|
||||
|
||||
class HeadersFormatter(FormatterPlugin):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.enabled = self.format_options['headers']['sort']
|
||||
|
||||
def format_headers(self, headers: str) -> str:
|
||||
"""
|
||||
Sorts headers by name while retaining relative
|
||||
|
@ -4,11 +4,12 @@ import json
|
||||
from httpie.plugins import FormatterPlugin
|
||||
|
||||
|
||||
DEFAULT_INDENT = 4
|
||||
|
||||
|
||||
class JSONFormatter(FormatterPlugin):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.enabled = self.format_options['json']['format']
|
||||
|
||||
def format_body(self, body: str, mime: str) -> str:
|
||||
maybe_json = [
|
||||
'json',
|
||||
@ -26,8 +27,8 @@ class JSONFormatter(FormatterPlugin):
|
||||
# unicode escapes to improve readability.
|
||||
body = json.dumps(
|
||||
obj=obj,
|
||||
sort_keys=True,
|
||||
sort_keys=self.format_options['json']['sort_keys'],
|
||||
ensure_ascii=False,
|
||||
indent=DEFAULT_INDENT
|
||||
indent=self.format_options['json']['indent']
|
||||
)
|
||||
return body
|
||||
|
@ -1,7 +1,8 @@
|
||||
import re
|
||||
from typing import Optional, List
|
||||
|
||||
from httpie.plugins import plugin_manager, ConverterPlugin
|
||||
from httpie.plugins import ConverterPlugin
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from httpie.context import Environment
|
||||
|
||||
|
||||
|
@ -14,10 +14,13 @@ BINARY_SUPPRESSED_NOTICE = (
|
||||
)
|
||||
|
||||
|
||||
class BinarySuppressedError(Exception):
|
||||
class DataSuppressedError(Exception):
|
||||
message = None
|
||||
|
||||
|
||||
class BinarySuppressedError(DataSuppressedError):
|
||||
"""An error indicating that the body is binary and won't be written,
|
||||
e.g., for terminal output)."""
|
||||
|
||||
message = BINARY_SUPPRESSED_NOTICE
|
||||
|
||||
|
||||
@ -63,7 +66,7 @@ class BaseStream:
|
||||
yield chunk
|
||||
if self.on_body_chunk_downloaded:
|
||||
self.on_body_chunk_downloaded(chunk)
|
||||
except BinarySuppressedError as e:
|
||||
except DataSuppressedError as e:
|
||||
if self.with_headers:
|
||||
yield b'\n'
|
||||
yield e.message
|
||||
|
@ -1,6 +1,6 @@
|
||||
import argparse
|
||||
import errno
|
||||
from typing import Union, IO, TextIO, Tuple, Type
|
||||
from typing import IO, TextIO, Tuple, Type, Union
|
||||
|
||||
import requests
|
||||
|
||||
@ -8,12 +8,7 @@ from httpie.context import Environment
|
||||
from httpie.models import HTTPRequest, HTTPResponse
|
||||
from httpie.output.processing import Conversion, Formatting
|
||||
from httpie.output.streams import (
|
||||
RawStream, PrettyStream,
|
||||
BufferedPrettyStream, EncodedStream,
|
||||
BaseStream,
|
||||
)
|
||||
from httpie.cli.constants import (
|
||||
OUT_REQ_BODY, OUT_REQ_HEAD, OUT_RESP_BODY, OUT_RESP_HEAD,
|
||||
BaseStream, BufferedPrettyStream, EncodedStream, PrettyStream, RawStream,
|
||||
)
|
||||
|
||||
|
||||
@ -21,26 +16,18 @@ def write_message(
|
||||
requests_message: Union[requests.PreparedRequest, requests.Response],
|
||||
env: Environment,
|
||||
args: argparse.Namespace,
|
||||
with_headers=False,
|
||||
with_body=False,
|
||||
):
|
||||
output_options_by_message_type = {
|
||||
requests.PreparedRequest: {
|
||||
'with_headers': OUT_REQ_HEAD in args.output_options,
|
||||
'with_body': OUT_REQ_BODY in args.output_options,
|
||||
},
|
||||
requests.Response: {
|
||||
'with_headers': OUT_RESP_HEAD in args.output_options,
|
||||
'with_body': OUT_RESP_BODY in args.output_options,
|
||||
},
|
||||
}
|
||||
output_options = output_options_by_message_type[type(requests_message)]
|
||||
if not any(output_options.values()):
|
||||
if not (with_body or with_headers):
|
||||
return
|
||||
write_stream_kwargs = {
|
||||
'stream': build_output_stream_for_message(
|
||||
args=args,
|
||||
env=env,
|
||||
requests_message=requests_message,
|
||||
**output_options,
|
||||
with_body=with_body,
|
||||
with_headers=with_headers,
|
||||
),
|
||||
# NOTE: `env.stdout` will in fact be `stderr` with `--download`
|
||||
'outfile': env.stdout,
|
||||
@ -120,7 +107,8 @@ def build_output_stream_for_message(
|
||||
with_body=with_body,
|
||||
**stream_kwargs,
|
||||
)
|
||||
if env.stdout_isatty and with_body:
|
||||
if (env.stdout_isatty and with_body
|
||||
and not getattr(requests_message, 'is_body_upload_chunk', False)):
|
||||
# Ensure a blank line after the response body.
|
||||
# For terminal output only.
|
||||
yield b'\n\n'
|
||||
@ -152,6 +140,7 @@ def get_stream_type_and_kwargs(
|
||||
groups=args.prettify,
|
||||
color_scheme=args.style,
|
||||
explicit_json=args.json,
|
||||
format_options=args.format_options,
|
||||
)
|
||||
}
|
||||
else:
|
||||
|
@ -7,18 +7,3 @@ from httpie.plugins.base import (
|
||||
AuthPlugin, FormatterPlugin,
|
||||
ConverterPlugin, TransportPlugin
|
||||
)
|
||||
from httpie.plugins.manager import PluginManager
|
||||
from httpie.plugins.builtin import BasicAuthPlugin, DigestAuthPlugin
|
||||
from httpie.output.formatters.headers import HeadersFormatter
|
||||
from httpie.output.formatters.json import JSONFormatter
|
||||
from httpie.output.formatters.colors import ColorFormatter
|
||||
|
||||
|
||||
plugin_manager = PluginManager()
|
||||
plugin_manager.register(
|
||||
BasicAuthPlugin,
|
||||
DigestAuthPlugin,
|
||||
HeadersFormatter,
|
||||
JSONFormatter,
|
||||
ColorFormatter,
|
||||
)
|
||||
|
@ -3,7 +3,7 @@ class BasePlugin:
|
||||
# The name of the plugin, eg. "My auth".
|
||||
name = None
|
||||
|
||||
# Optional short description. Will be be shown in the help
|
||||
# Optional short description. It will be shown in the help
|
||||
# under --auth-type.
|
||||
description = None
|
||||
|
||||
@ -15,7 +15,9 @@ class AuthPlugin(BasePlugin):
|
||||
"""
|
||||
Base auth plugin class.
|
||||
|
||||
See <https://github.com/httpie/httpie-ntlm> for an example auth plugin.
|
||||
See httpie-ntlm for an example auth plugin:
|
||||
|
||||
<https://github.com/httpie/httpie-ntlm>
|
||||
|
||||
See also `test_auth_plugins.py`
|
||||
|
||||
@ -33,13 +35,22 @@ class AuthPlugin(BasePlugin):
|
||||
# Set this to `False` to disable the parsing and error handling.
|
||||
auth_parse = True
|
||||
|
||||
# Set to `True` to make it possible for this auth
|
||||
# plugin to acquire credentials from the user’s netrc file(s).
|
||||
# It is used as a fallback when the credentials are not provided explicitly
|
||||
# through `--auth, -a`. Enabling this will allow skipping `--auth, -a`
|
||||
# even when `auth_require` is set `True` (provided that netrc provides
|
||||
# credential for a given host).
|
||||
netrc_parse = False
|
||||
|
||||
# If both `auth_parse` and `prompt_password` are set to `True`,
|
||||
# and the value of `-a` lacks the password part,
|
||||
# then the user will be prompted to type the password in.
|
||||
prompt_password = True
|
||||
|
||||
# Will be set to the raw value of `-a` (if provided) before
|
||||
# `get_auth()` gets called.
|
||||
# `get_auth()` gets called. If the credentials came from a netrc file,
|
||||
# then this is `None`.
|
||||
raw_auth = None
|
||||
|
||||
def get_auth(self, username=None, password=None):
|
||||
@ -58,8 +69,13 @@ class AuthPlugin(BasePlugin):
|
||||
|
||||
class TransportPlugin(BasePlugin):
|
||||
"""
|
||||
Requests transport adapter docs:
|
||||
|
||||
https://2.python-requests.org/en/latest/user/advanced/#transport-adapters
|
||||
<https://requests.readthedocs.io/en/latest/user/advanced/#transport-adapters>
|
||||
|
||||
See httpie-unixsocket for an example transport plugin:
|
||||
|
||||
<https://github.com/httpie/httpie-unixsocket>
|
||||
|
||||
"""
|
||||
|
||||
@ -76,6 +92,14 @@ class TransportPlugin(BasePlugin):
|
||||
|
||||
|
||||
class ConverterPlugin(BasePlugin):
|
||||
"""
|
||||
Possibly converts response data for prettified terminal display.
|
||||
|
||||
See httpie-msgpack for an example converter plugin:
|
||||
|
||||
<https://github.com/rasky/httpie-msgpack>.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, mime):
|
||||
self.mime = mime
|
||||
@ -89,17 +113,22 @@ class ConverterPlugin(BasePlugin):
|
||||
|
||||
|
||||
class FormatterPlugin(BasePlugin):
|
||||
"""
|
||||
Possibly formats response body & headers for prettified terminal display.
|
||||
|
||||
"""
|
||||
group_name = 'format'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""
|
||||
:param env: an class:`Environment` instance
|
||||
:param kwargs: additional keyword argument that some
|
||||
processor might require.
|
||||
formatters might require.
|
||||
|
||||
"""
|
||||
self.enabled = True
|
||||
self.kwargs = kwargs
|
||||
self.format_options = kwargs['format_options']
|
||||
|
||||
def format_headers(self, headers: str) -> str:
|
||||
"""Return processed `headers`
|
||||
|
@ -22,6 +22,7 @@ class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
||||
See https://github.com/jakubroztocil/httpie/issues/212
|
||||
|
||||
"""
|
||||
# noinspection PyTypeChecker
|
||||
request.headers['Authorization'] = type(self).make_header(
|
||||
self.username, self.password).encode('latin1')
|
||||
return request
|
||||
@ -36,6 +37,7 @@ class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
||||
class BasicAuthPlugin(BuiltinAuthPlugin):
|
||||
name = 'Basic HTTP auth'
|
||||
auth_type = 'basic'
|
||||
netrc_parse = True
|
||||
|
||||
# noinspection PyMethodOverriding
|
||||
def get_auth(self, username: str, password: str) -> HTTPBasicAuth:
|
||||
@ -43,9 +45,9 @@ class BasicAuthPlugin(BuiltinAuthPlugin):
|
||||
|
||||
|
||||
class DigestAuthPlugin(BuiltinAuthPlugin):
|
||||
|
||||
name = 'Digest HTTP auth'
|
||||
auth_type = 'digest'
|
||||
netrc_parse = True
|
||||
|
||||
# noinspection PyMethodOverriding
|
||||
def get_auth(
|
||||
|
18
httpie/plugins/registry.py
Normal file
18
httpie/plugins/registry.py
Normal file
@ -0,0 +1,18 @@
|
||||
from httpie.plugins.manager import PluginManager
|
||||
from httpie.plugins.builtin import BasicAuthPlugin, DigestAuthPlugin
|
||||
from httpie.output.formatters.headers import HeadersFormatter
|
||||
from httpie.output.formatters.json import JSONFormatter
|
||||
from httpie.output.formatters.colors import ColorFormatter
|
||||
|
||||
|
||||
plugin_manager = PluginManager()
|
||||
|
||||
|
||||
# Register all built-in plugins.
|
||||
plugin_manager.register(
|
||||
BasicAuthPlugin,
|
||||
DigestAuthPlugin,
|
||||
HeadersFormatter,
|
||||
JSONFormatter,
|
||||
ColorFormatter,
|
||||
)
|
@ -4,8 +4,10 @@ Persistent, JSON-serialized sessions.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
|
||||
from http.cookies import SimpleCookie
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union
|
||||
from typing import Iterable, Optional, Union
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from requests.auth import AuthBase
|
||||
@ -13,7 +15,7 @@ from requests.cookies import RequestsCookieJar, create_cookie
|
||||
|
||||
from httpie.cli.dicts import RequestHeadersDict
|
||||
from httpie.config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
||||
from httpie.plugins import plugin_manager
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
|
||||
|
||||
SESSIONS_DIR_NAME = 'sessions'
|
||||
@ -76,7 +78,13 @@ class Session(BaseConfigDict):
|
||||
continue # Ignore explicitly unset headers
|
||||
|
||||
value = value.decode('utf8')
|
||||
if name == 'User-Agent' and value.startswith('HTTPie/'):
|
||||
if name.lower() == 'user-agent' and value.startswith('HTTPie/'):
|
||||
continue
|
||||
|
||||
if name.lower() == 'cookie':
|
||||
for cookie_name, morsel in SimpleCookie(value).items():
|
||||
self['cookies'][cookie_name] = {'value': morsel.value}
|
||||
del request_headers[name]
|
||||
continue
|
||||
|
||||
for prefix in SESSION_IGNORED_HEADER_PREFIXES:
|
||||
@ -144,3 +152,8 @@ class Session(BaseConfigDict):
|
||||
def auth(self, auth: dict):
|
||||
assert {'type', 'raw_auth'} == auth.keys()
|
||||
self['auth'] = auth
|
||||
|
||||
def remove_cookies(self, names: Iterable[str]):
|
||||
for name in names:
|
||||
if name in self['cookies']:
|
||||
del self['cookies'][name]
|
||||
|
63
httpie/ssl.py
Normal file
63
httpie/ssl.py
Normal file
@ -0,0 +1,63 @@
|
||||
import ssl
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
# noinspection PyPackageRequirements
|
||||
from urllib3.util.ssl_ import (
|
||||
DEFAULT_CIPHERS, create_urllib3_context,
|
||||
resolve_ssl_version,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_SSL_CIPHERS = DEFAULT_CIPHERS
|
||||
SSL_VERSION_ARG_MAPPING = {
|
||||
'ssl2.3': 'PROTOCOL_SSLv23',
|
||||
'ssl3': 'PROTOCOL_SSLv3',
|
||||
'tls1': 'PROTOCOL_TLSv1',
|
||||
'tls1.1': 'PROTOCOL_TLSv1_1',
|
||||
'tls1.2': 'PROTOCOL_TLSv1_2',
|
||||
'tls1.3': 'PROTOCOL_TLSv1_3',
|
||||
}
|
||||
AVAILABLE_SSL_VERSION_ARG_MAPPING = {
|
||||
arg: getattr(ssl, constant_name)
|
||||
for arg, constant_name in SSL_VERSION_ARG_MAPPING.items()
|
||||
if hasattr(ssl, constant_name)
|
||||
}
|
||||
|
||||
|
||||
class HTTPieHTTPSAdapter(HTTPAdapter):
|
||||
def __init__(
|
||||
self,
|
||||
verify: bool,
|
||||
ssl_version: str = None,
|
||||
ciphers: str = None,
|
||||
**kwargs
|
||||
):
|
||||
self._ssl_context = self._create_ssl_context(
|
||||
verify=verify,
|
||||
ssl_version=ssl_version,
|
||||
ciphers=ciphers,
|
||||
)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def init_poolmanager(self, *args, **kwargs):
|
||||
kwargs['ssl_context'] = self._ssl_context
|
||||
return super().init_poolmanager(*args, **kwargs)
|
||||
|
||||
def proxy_manager_for(self, *args, **kwargs):
|
||||
kwargs['ssl_context'] = self._ssl_context
|
||||
return super().proxy_manager_for(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _create_ssl_context(
|
||||
verify: bool,
|
||||
ssl_version: str = None,
|
||||
ciphers: str = None,
|
||||
) -> 'ssl.SSLContext':
|
||||
return create_urllib3_context(
|
||||
ciphers=ciphers,
|
||||
ssl_version=resolve_ssl_version(ssl_version),
|
||||
# Since we are using a custom SSL context, we need to pass this
|
||||
# here manually, even though it’s also passed to the connection
|
||||
# in `super().cert_verify()`.
|
||||
cert_reqs=ssl.CERT_REQUIRED if verify else ssl.CERT_NONE
|
||||
)
|
138
httpie/uploads.py
Normal file
138
httpie/uploads.py
Normal file
@ -0,0 +1,138 @@
|
||||
import zlib
|
||||
from typing import Callable, IO, Iterable, Tuple, Union
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from requests.utils import super_len
|
||||
from requests_toolbelt import MultipartEncoder
|
||||
|
||||
from httpie.cli.dicts import MultipartRequestDataDict, RequestDataDict
|
||||
|
||||
|
||||
class ChunkedUploadStream:
|
||||
def __init__(self, stream: Iterable, callback: Callable):
|
||||
self.callback = callback
|
||||
self.stream = stream
|
||||
|
||||
def __iter__(self) -> Iterable[Union[str, bytes]]:
|
||||
for chunk in self.stream:
|
||||
self.callback(chunk)
|
||||
yield chunk
|
||||
|
||||
|
||||
class ChunkedMultipartUploadStream:
|
||||
chunk_size = 100 * 1024
|
||||
|
||||
def __init__(self, encoder: MultipartEncoder):
|
||||
self.encoder = encoder
|
||||
|
||||
def __iter__(self) -> Iterable[Union[str, bytes]]:
|
||||
while True:
|
||||
chunk = self.encoder.read(self.chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
|
||||
def prepare_request_body(
|
||||
body: Union[str, bytes, IO, MultipartEncoder, RequestDataDict],
|
||||
body_read_callback: Callable[[bytes], bytes],
|
||||
content_length_header_value: int = None,
|
||||
chunked=False,
|
||||
offline=False,
|
||||
) -> Union[str, bytes, IO, MultipartEncoder, ChunkedUploadStream]:
|
||||
|
||||
is_file_like = hasattr(body, 'read')
|
||||
|
||||
if isinstance(body, RequestDataDict):
|
||||
body = urlencode(body, doseq=True)
|
||||
|
||||
if offline:
|
||||
if is_file_like:
|
||||
return body.read()
|
||||
return body
|
||||
|
||||
if not is_file_like:
|
||||
if chunked:
|
||||
body = ChunkedUploadStream(
|
||||
# Pass the entire body as one chunk.
|
||||
stream=(chunk.encode() for chunk in [body]),
|
||||
callback=body_read_callback,
|
||||
)
|
||||
else:
|
||||
# File-like object.
|
||||
|
||||
if not super_len(body):
|
||||
# Zero-length -> assume stdin.
|
||||
if content_length_header_value is None and not chunked:
|
||||
#
|
||||
# Read the whole stdin to determine `Content-Length`.
|
||||
#
|
||||
# TODO: Instead of opt-in --chunked, consider making
|
||||
# `Transfer-Encoding: chunked` for STDIN opt-out via
|
||||
# something like --no-chunked.
|
||||
# This would be backwards-incompatible so wait until v3.0.0.
|
||||
#
|
||||
body = body.read()
|
||||
else:
|
||||
orig_read = body.read
|
||||
|
||||
def new_read(*args):
|
||||
chunk = orig_read(*args)
|
||||
body_read_callback(chunk)
|
||||
return chunk
|
||||
|
||||
body.read = new_read
|
||||
|
||||
if chunked:
|
||||
if isinstance(body, MultipartEncoder):
|
||||
body = ChunkedMultipartUploadStream(
|
||||
encoder=body,
|
||||
)
|
||||
else:
|
||||
body = ChunkedUploadStream(
|
||||
stream=body,
|
||||
callback=body_read_callback,
|
||||
)
|
||||
|
||||
return body
|
||||
|
||||
|
||||
def get_multipart_data_and_content_type(
|
||||
data: MultipartRequestDataDict,
|
||||
boundary: str = None,
|
||||
content_type: str = None,
|
||||
) -> Tuple[MultipartEncoder, str]:
|
||||
encoder = MultipartEncoder(
|
||||
fields=data.items(),
|
||||
boundary=boundary,
|
||||
)
|
||||
if content_type:
|
||||
content_type = content_type.strip()
|
||||
if 'boundary=' not in content_type:
|
||||
content_type = f'{content_type}; boundary={encoder.boundary_value}'
|
||||
else:
|
||||
content_type = encoder.content_type
|
||||
|
||||
data = encoder
|
||||
return data, content_type
|
||||
|
||||
|
||||
def compress_request(
|
||||
request: requests.PreparedRequest,
|
||||
always: bool,
|
||||
):
|
||||
deflater = zlib.compressobj()
|
||||
if isinstance(request.body, str):
|
||||
body_bytes = request.body.encode()
|
||||
elif hasattr(request.body, 'read'):
|
||||
body_bytes = request.body.read()
|
||||
else:
|
||||
body_bytes = request.body
|
||||
deflated_data = deflater.compress(body_bytes)
|
||||
deflated_data += deflater.flush()
|
||||
is_economical = len(deflated_data) < len(body_bytes)
|
||||
if is_economical or always:
|
||||
request.body = deflated_data
|
||||
request.headers['Content-Encoding'] = 'deflate'
|
||||
request.headers['Content-Length'] = str(len(deflated_data))
|
@ -1,8 +1,12 @@
|
||||
from __future__ import division
|
||||
|
||||
import json
|
||||
import mimetypes
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
from http.cookiejar import parse_ns_headers
|
||||
from pprint import pformat
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import requests.auth
|
||||
|
||||
@ -83,3 +87,33 @@ def get_content_type(filename):
|
||||
if encoding:
|
||||
content_type = '%s; charset=%s' % (mime, encoding)
|
||||
return content_type
|
||||
|
||||
|
||||
def get_expired_cookies(
|
||||
headers: List[Tuple[str, str]],
|
||||
now: float = None
|
||||
) -> List[dict]:
|
||||
|
||||
now = now or time.time()
|
||||
|
||||
def is_expired(expires: Optional[float]) -> bool:
|
||||
return expires is not None and expires <= now
|
||||
|
||||
attr_sets: List[Tuple[str, str]] = parse_ns_headers(
|
||||
value for name, value in headers
|
||||
if name.lower() == 'set-cookie'
|
||||
)
|
||||
cookies = [
|
||||
# The first attr name is the cookie name.
|
||||
dict(attrs[1:], name=attrs[0][0])
|
||||
for attrs in attr_sets
|
||||
]
|
||||
|
||||
return [
|
||||
{
|
||||
'name': cookie['name'],
|
||||
'path': cookie.get('path', '/')
|
||||
}
|
||||
for cookie in cookies
|
||||
if is_expired(expires=cookie.get('expires'))
|
||||
]
|
||||
|
@ -1,4 +1,3 @@
|
||||
tox
|
||||
mock
|
||||
pytest
|
||||
pytest-cov
|
||||
|
@ -1,16 +1,16 @@
|
||||
[wheel]
|
||||
universal = 1
|
||||
|
||||
|
||||
[tool:pytest]
|
||||
# <https://docs.pytest.org/en/latest/customize.html>
|
||||
norecursedirs = tests/fixtures
|
||||
addopts = --tb=native
|
||||
|
||||
|
||||
[pycodestyle]
|
||||
# <http://pycodestyle.pycqa.org/en/latest/intro.html#configuration>
|
||||
|
||||
exclude = .git,.idea,__pycache__,build,dist,.tox,.pytest_cache,*.egg-info
|
||||
exclude = .git,.idea,__pycache__,build,dist,.pytest_cache,*.egg-info
|
||||
|
||||
# <http://pycodestyle.pycqa.org/en/latest/intro.html#error-codes>
|
||||
# E241 - multiple spaces after ‘,’
|
||||
|
46
setup.py
46
setup.py
@ -10,13 +10,18 @@ import httpie
|
||||
|
||||
|
||||
class PyTest(TestCommand):
|
||||
# `$ python setup.py test' simply installs minimal requirements
|
||||
# and runs the tests with no fancy stuff like parallel execution.
|
||||
"""
|
||||
Running `$ python setup.py test' simply installs minimal requirements
|
||||
and runs the tests with no fancy stuff like parallel execution.
|
||||
|
||||
"""
|
||||
def finalize_options(self):
|
||||
TestCommand.finalize_options(self)
|
||||
self.test_args = [
|
||||
'--doctest-modules', '--verbose',
|
||||
'./httpie', './tests'
|
||||
'--doctest-modules',
|
||||
'--verbose',
|
||||
'./httpie',
|
||||
'./tests',
|
||||
]
|
||||
self.test_suite = True
|
||||
|
||||
@ -26,8 +31,6 @@ class PyTest(TestCommand):
|
||||
|
||||
|
||||
tests_require = [
|
||||
# Pytest needs to come last.
|
||||
# https://bitbucket.org/pypa/setuptools/issue/196/
|
||||
'pytest-httpbin',
|
||||
'pytest',
|
||||
'mock',
|
||||
@ -35,31 +38,28 @@ tests_require = [
|
||||
|
||||
|
||||
install_requires = [
|
||||
'requests>=2.22.0',
|
||||
'requests[socks]>=2.22.0',
|
||||
'Pygments>=2.5.2',
|
||||
'requests-toolbelt>=0.9.1',
|
||||
]
|
||||
install_requires_win_only = [
|
||||
'colorama>=0.2.4',
|
||||
]
|
||||
|
||||
|
||||
# Conditional dependencies:
|
||||
|
||||
# sdist
|
||||
if 'bdist_wheel' not in sys.argv:
|
||||
try:
|
||||
# noinspection PyUnresolvedReferences
|
||||
import argparse
|
||||
except ImportError:
|
||||
install_requires.append('argparse>=1.2.1')
|
||||
|
||||
if 'win32' in str(sys.platform).lower():
|
||||
# Terminal colors for Windows
|
||||
install_requires.append('colorama>=0.2.4')
|
||||
install_requires.extend(install_requires_win_only)
|
||||
|
||||
|
||||
# bdist_wheel
|
||||
extras_require = {
|
||||
# https://wheel.readthedocs.io/en/latest/#defining-conditional-dependencies
|
||||
'python_version == "3.0" or python_version == "3.1"': ['argparse>=1.2.1'],
|
||||
':sys_platform == "win32"': ['colorama>=0.2.4'],
|
||||
':sys_platform == "win32"': install_requires_win_only,
|
||||
}
|
||||
|
||||
|
||||
@ -74,7 +74,7 @@ setup(
|
||||
description=httpie.__doc__.strip(),
|
||||
long_description=long_description(),
|
||||
url='https://httpie.org/',
|
||||
download_url='https://github.com/jakubroztocil/httpie',
|
||||
download_url=f'https://github.com/httpie/httpie/archive/{httpie.__version__}.tar.gz',
|
||||
author=httpie.__author__,
|
||||
author_email='jakub@roztocil.co',
|
||||
license=httpie.__licence__,
|
||||
@ -85,6 +85,7 @@ setup(
|
||||
'https = httpie.__main__:main',
|
||||
],
|
||||
},
|
||||
python_requires='>=3.6',
|
||||
extras_require=extras_require,
|
||||
install_requires=install_requires,
|
||||
tests_require=tests_require,
|
||||
@ -92,9 +93,7 @@ setup(
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Environment :: Console',
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: System Administrators',
|
||||
@ -106,4 +105,11 @@ setup(
|
||||
'Topic :: Text Processing',
|
||||
'Topic :: Utilities'
|
||||
],
|
||||
project_urls={
|
||||
'GitHub': 'https://github.com/httpie/httpie',
|
||||
'Twitter': 'https://twitter.com/httpie',
|
||||
'Documentation': 'https://httpie.org/docs',
|
||||
'Online Demo': 'https://httpie.org/run',
|
||||
'Donate': 'https://httpie.org/donate',
|
||||
},
|
||||
)
|
||||
|
@ -1,29 +1,31 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFAjCCAuoCAQEwDQYJKoZIhvcNAQEFBQAwSTELMAkGA1UEBhMCVVMxCzAJBgNV
|
||||
BAgTAkNBMQswCQYDVQQHEwJTRjEPMA0GA1UEChMGSFRUUGllMQ8wDQYDVQQDEwZI
|
||||
VFRQaWUwHhcNMTUwMTIzMjIyNTM2WhcNMTYwMTIzMjIyNTM2WjBFMQswCQYDVQQG
|
||||
EwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lk
|
||||
Z2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAu6aP
|
||||
iR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/Rn5mCMKmD506JrFV8fktQ
|
||||
M6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535lb9V9hHjAgy60QgJBgSE7
|
||||
lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et0RQiWIi7S6vpDRpZFxRi
|
||||
gtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQI6JadczU0JyVVjJVTny3
|
||||
ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ2nc+OrJwYLvOp1cG/zYl
|
||||
GHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK3gEbMz3y+YTlVNPo108H
|
||||
JI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZdVH3feAhTfDZbpSxhpRo
|
||||
Ja84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQFTCjN22UhPP0PrqY3ngEj
|
||||
1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5Vr89NO08QtnLwQduusVkc
|
||||
4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0EyV2z6pZiH6HK1r5Xwaq
|
||||
0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEAATANBgkqhkiG9w0BAQUF
|
||||
AAOCAgEAQgIicN/uWtaYYBVEVeMGMdxzpp2pv3AaCfQMoVGaQu9VLydK/GBlYOqj
|
||||
AGPjdmQ7p4ISlduXqslu646+RxZ+H6TSSj0NTF4FyR8LPckRPiePNlsGp3u6ffix
|
||||
PX0554Ks+JYyFJ7qyMhsilqCYtw8prX9lj8fjzbWWXlgJFH/SRZw4xdcJ1yYA9sQ
|
||||
fBHxveCWFS1ibX5+QGy/+7jPb99MP38HEIt9vTMW5aiwXeIbipXohWqcJhxL9GXz
|
||||
KPsrt9a++rLjqsquhZL4uCksGmI4Gv0FQQswgSyHSSQzagee5VRB68WYSAyYdvzi
|
||||
YCfkNcbQtOOQWGx4rsEdENViPs1GEZkWJJ1h9pmWzZl0U9c3cnABffK7o9v6ap2F
|
||||
NrnU5H/7jLuBiUJFzqwkgAjANLRZ6hLj6h/grcnIIThJwg6KaXvpEh4UkHuqHYBF
|
||||
Fq1BWZIWU25ASggEVIsCPXC2+I1oGhxK1DN/J+wIht9MBWWlQWVMZAQsBkszNZrh
|
||||
nzdfMoQZTG5bT4Bf0bI5LmPaY0xBxXA1f4TLuqrEAziOjRX3vIQV4i33nZZJvPcC
|
||||
mCoyhAUpTJm+OI90ePll+vBO1ENAx7EMHqNe6eCChZ/9DUsVxxtaorVq1l0xWons
|
||||
ynOCgx46hGE12/oiRIKq/wGMpv6ClfJhW1N5nJahDqoIMEvnNaQ=
|
||||
MIIFazCCA1OgAwIBAgIUNMIIO7cG2Lkx+qo0Z43k4+voT4swDQYJKoZIhvcNAQEN
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMDA3MDQxMDE5NDBaFw0yMTA3
|
||||
MDQxMDE5NDBaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQCpnv/bnF8qkRoFu2M/+btxR5kRDAMqvbBivG2F4Uop
|
||||
37mxwW0YJFOiMtzCN76w8JgEZrTeH3zG0fNNdIKIKjjwf+8j3KSbQi60oDOelkL5
|
||||
34Yt1o+lW9ricKQCsVl/XkYHh4RPnzNE8XRZmcZtL/6+1vVjTlxe6iW1Q0tzU2l3
|
||||
RHPhHbmsVclwFOd/eE+D6WB5tb6SVvhDyOfLdZwxWWpgARx6aboR/+/CKazE0wt4
|
||||
IJtTpe3M7IHt3i/8EkCZyFNdV+pQ8qz3PIOKBQws8aCpuQ+IHnvq4wSiyUV6eEaU
|
||||
bfOguWHGKlyVuN9AIiNl8A4xlU6QHKwzisTuRywschlvT8LaK1WGk+BNGBcidogh
|
||||
yp73KrDpiUd+Udv3TPDg5Q7pE6LT+sZxFrCidvZEZ1YdBDfXUhOaCTmtlFFYJiMT
|
||||
2+FnPQCfFv53D79llGaovE7t6KBf+qYRpIkSDoYhSSZ5GhFGTVsgQERYG39MSnbz
|
||||
4b1CQtg7Q8e9DJq8d/ChKUCfymJ+HSQIXEMu1FXrlEbEoyGvRyvA5cnUHjvY7GPY
|
||||
2HGHHaTFhiB9qRQhD3TdK4G6iIHF9tuxi2V+7waYp7q9N8KHfZRhIZbGSWQlaM9f
|
||||
njAUy8NAX6W4cL/ZpDf8PpVeMhLolvO8D8qCNZyWD+x5HtqDfqFkFPvr2vOSxZ+v
|
||||
6wIDAQABo1MwUTAdBgNVHQ4EFgQUkJwSpoGIxHUaArfJrX602HdHUWcwHwYDVR0j
|
||||
BBgwFoAUkJwSpoGIxHUaArfJrX602HdHUWcwDwYDVR0TAQH/BAUwAwEB/zANBgkq
|
||||
hkiG9w0BAQ0FAAOCAgEAqDuULnNBNJsydUXDyGTzCrXjJuqhuOhi1eALyCLwuT+F
|
||||
+/l7hOgdKWn4KJF4vcfNObcWh7sJ+iIcXEOYKaL3dPW9nqj+oCoPBKNAX+u3ZKXy
|
||||
I4O5wVAd3X0beh1ba69nOfdn9PMlVEB80TzTda0My9+tI5SD84OXUc7AWQXnh5Sb
|
||||
tHkul7cKcBA7/phnlC83qa6WoMlmNfqo8s2u+quDkhshKdrLFGGBI17gUQH3GbHN
|
||||
WBymHi/BCCIKYJB9+vt+M5L5C8FtNCMrCwTGtIOgC9IMre4wF2gODbjuRtkO2w6k
|
||||
sXOtKweCdgMd2H3SwE4txEU2hUHE1IYPYnG1fg0YwYfKfbTLZQtn7xgEK93+nkp8
|
||||
ufnnHgUxd//+pFPkbEOTnShuepl7g45qOBGUX4fBh78EVeL7NIZ9F8dHGsawD/CT
|
||||
/tATlH9gQ+JRvXCNCKO8jNgeu3v2gVw+haXP1d4F7NysVIr4A5LiFufJk5Zyizcm
|
||||
WyjgfI99CnEwvqzv4yMQCoHAOK3awhH7uR+QHhCpG9D91PlzdJu7yP7O7zQaKobg
|
||||
YTqMoMkYr63WbMrH21Tokoc/6CBPAAp3g8rC/E024SquJE7OUG0If5JkvlfJU5EP
|
||||
K+e7hFNoD4uc+0cgAccpEb9hCc0oPfC+3WM5poVBKSnukfs4KyqcVIt4ZaNoYic=
|
||||
-----END CERTIFICATE-----
|
||||
|
@ -1,51 +1,51 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEAu6aPiR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/R
|
||||
n5mCMKmD506JrFV8fktQM6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535l
|
||||
b9V9hHjAgy60QgJBgSE7lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et
|
||||
0RQiWIi7S6vpDRpZFxRigtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQ
|
||||
I6JadczU0JyVVjJVTny3ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ
|
||||
2nc+OrJwYLvOp1cG/zYlGHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK
|
||||
3gEbMz3y+YTlVNPo108HJI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZ
|
||||
dVH3feAhTfDZbpSxhpRoJa84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQF
|
||||
TCjN22UhPP0PrqY3ngEj1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5V
|
||||
r89NO08QtnLwQduusVkc4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0
|
||||
EyV2z6pZiH6HK1r5Xwaq0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEA
|
||||
AQKCAgBOY1DYlZYg8/eXAhuDDkayYYzDuny1ylG8c4F9nFYVCxB2GZ1Wz3icPWP1
|
||||
j1BhpkBgPbPeLfM+O0V1H6eCdVvapKOxXM52mDuHO3TJP6P8lOZgZOOY6RUK7qp0
|
||||
4mC4plqYx7oto23CBLoOdgMtM937rG0SLGDfIF6z8sI0XCMRkqPpRviNu5xxYYTk
|
||||
IoczSwtmYcSZJRjHhk4AGnmicDbMPRlJ2k2E0euHhI9wMAyQFUFnhLJlQGALj6pj
|
||||
DtYvcM1EAUN46EXK66bXQq8zgozYS0WIJ6+wOUKQMSIgUGCF6Rvm3ZTt9xwOxxW8
|
||||
wxebvfYVTJgIdh2Nfusgmye9Debl73f+k9/O4RsvYc5J5w2n4IxKqQrfCZrZqevZ
|
||||
s+KvARkuQbXrHPanvEd8MPrRZ6FOAdiZYAbB9OvzuKCbEkgag8GPjMMAvrjT49N2
|
||||
qp9gwGgnzczQYn+vLblJuRzofcblvLE+sxKKDE8qrfcOjN1murZP7714y5E3NmEZ
|
||||
NB2NTHveTflYI1HJ1tznI1C40GdBYH4GwT/0he53rBcjNaPhyP7j3cTR1doRfZap
|
||||
2oz8KE/Sij3Zb6b8r7hi+Lcwpa9txZftro7XNOJIX7ZT5B4KMiXowtCHbkMMnL6k
|
||||
48tRBpyX20MqDFezBRCK7lfGhU1Coms8UcDHoFXLuGY/sAYEcQKCAQEA9D9/PD1t
|
||||
e90haG6nLl4LKP5wH2wB2BK1RRBERqOVqwSmdSgn3/+GkpeYWKdhN2jyYn6qnpJQ
|
||||
hXXYGtHAAHuw0dGXnOmgcsyZSlAWPzpMYRYrSh3ds8JVJdV2d58yS0ty3Ae3W6aW
|
||||
p4SRuhf8yIMgOmE+TARCU1rJdke9jIIl2TQmnpJahlsZeGLEmEXE99EhB5VoshRJ
|
||||
hLXNn3xTtkQz3tNR0rMAtXI6SIMB00FFEG1+jClza6PYriT9dkORI5LSVqXDEpxR
|
||||
C41PvYMKTAloWd0hZ2gdfwAcJScoAv75L10uR7O1IeQI+Et5h2tj4a/OfzILa0d5
|
||||
BYMmVsTa3NZXLQKCAQEAxK3uJKmoN2uswJQSKpX4WziVBgbaZdpCNnAWhfJZYIcP
|
||||
zlIuv9qOc/DIPiy9Sa9XtITSkcONAwRowtI783AtXeAelyo3W7A2aLIfBBZAXDzJ
|
||||
8KMc9xMDPebvUhlPSzg4bNwvduukAnktlzCjrRWPXRoSfloSpFkFPP4GwTdVcf17
|
||||
1mkki6rK4rbHmIoCITlZkNbUBCcu20ivK6N3pvH1wN123bxnF7lwvB5qizdFO5P7
|
||||
xRVIoCdCXQ0+WK2ZokCa/r44rcp4ffgrLoO/WRlo4yERIa9NwaucIrXmotKX8kYc
|
||||
YYpFzrGs72DljS7TBZCOqek5fNQBNK79BA2hNcJ1FQKCAQBC+M44hFdq6T1p1z18
|
||||
F0lUGkBAPWtcBfUyVL2D6QL2+7Vw1mvonbYWp/6cAHlFqj8cBsNd65ysm51/7ReK
|
||||
il/3iFLcMatPDw7RM5iGCcQ7ssp37iyGR7j1QMzVDA/MWYnLD0qVlN4mXNFgh4dG
|
||||
q73AhD2CtoBBPtmS1yUATAd4wTX9sP+la4FWYy6o2iiiEvPNkog8nBd0ji0tl/eU
|
||||
OKtIZAVBkteU6RdWHqX3eSQo1v0mDY+aajjVt0rQjMJVUMLgA1+z0KzgUAUXX8EJ
|
||||
DGNSkLHCGuhLlIojHdN4ztUgyZoRCxOVkWNsQbW3Dhk7HuuuMNi0t8pVWpq+nAev
|
||||
Gg6ZAoIBAQC0mMk9nRO7oAGG6/Aqbn8YtEISwKQ2Nk3qUs47vKdZPWvEFi6bOILp
|
||||
70TP4qEFUh6EwhngguGuzZOsoQMvq+fcdXlhcQBYDtxHEpfsVspOZ/s+HWjxbuHh
|
||||
K3bBuj/XYA5f12c2GXYGV2MHm0AQJOX5pYEpyGepxZxLvy5QqRCqlQnrfaxzGycl
|
||||
OpTYepEuFM0rdDhGf/xEmt9OgNHT2AXDTRhizycS39Kmyn8myl+mL2JWPA7uEF6d
|
||||
txVytCWImS45kE3XNz2g3go4sf04QV7QgIKMnb4Wgg/ix4i6JgokC0DwR9mFzBxx
|
||||
ylW+aCqYx35YgrGo77sTt0LZP/KxvJdpAoIBAF7YfhR1wFbW2L8sJ4gAbjPUWOMu
|
||||
JUfE4FhdLcSdqCo+N8uN0qawJxXltBKfjeeoH0CDh9Yv0qqalVdSOKS9BPAa1zJc
|
||||
o2kBcT8AVwoPS5oxa9eDT+7iHPMF4BErB2IGv3yYwpjqSZBJ9TsTu1B6iTf5hOL5
|
||||
9pqcv/LjfcwtWu2XMCVoZj2Q8iYv55l3jJ1ByF/UDVezWajE69avvJkQZrMZmuBw
|
||||
UuHelP/7anRyyelh7RkndZpPCExGmuO7pd5aG25/mBs0i34R1PElAtt8AN36f5Tk
|
||||
1GxIltTNtLk4Mivwp9aZ1vf9s5FAhgPDvfGV5yFoKYmA/65ZlrKx0zlFNng=
|
||||
MIIJKQIBAAKCAgEAqZ7/25xfKpEaBbtjP/m7cUeZEQwDKr2wYrxtheFKKd+5scFt
|
||||
GCRTojLcwje+sPCYBGa03h98xtHzTXSCiCo48H/vI9ykm0IutKAznpZC+d+GLdaP
|
||||
pVva4nCkArFZf15GB4eET58zRPF0WZnGbS/+vtb1Y05cXuoltUNLc1Npd0Rz4R25
|
||||
rFXJcBTnf3hPg+lgebW+klb4Q8jny3WcMVlqYAEcemm6Ef/vwimsxNMLeCCbU6Xt
|
||||
zOyB7d4v/BJAmchTXVfqUPKs9zyDigUMLPGgqbkPiB576uMEoslFenhGlG3zoLlh
|
||||
xipclbjfQCIjZfAOMZVOkBysM4rE7kcsLHIZb0/C2itVhpPgTRgXInaIIcqe9yqw
|
||||
6YlHflHb90zw4OUO6ROi0/rGcRawonb2RGdWHQQ311ITmgk5rZRRWCYjE9vhZz0A
|
||||
nxb+dw+/ZZRmqLxO7eigX/qmEaSJEg6GIUkmeRoRRk1bIEBEWBt/TEp28+G9QkLY
|
||||
O0PHvQyavHfwoSlAn8pifh0kCFxDLtRV65RGxKMhr0crwOXJ1B472Oxj2Nhxhx2k
|
||||
xYYgfakUIQ903SuBuoiBxfbbsYtlfu8GmKe6vTfCh32UYSGWxklkJWjPX54wFMvD
|
||||
QF+luHC/2aQ3/D6VXjIS6JbzvA/KgjWclg/seR7ag36hZBT769rzksWfr+sCAwEA
|
||||
AQKCAgBmZ1W0si1KN5vsRftfjle5xi4E+qmWzjqFAZllsGPj7+veAxbn8laDoA1j
|
||||
O+BmVnqQfalISN498lbfNi3wIv2JRNONZRIDoesspWNEpRb+YBJT7it++3ukJbj+
|
||||
3y9XFAVXWlto7oY3Y0aJKauAE+/KK2CueYqOyvHFA0Gz+HG9zZfgGuATyR76CcTR
|
||||
UkM/MlBKao0JMHRmCA7Y6MJJkOAF4eXdiaMKZufK4vopQfi0p4re71gn1cmDYBa8
|
||||
KhDSRvz9Z6xQ/pGqGeCYHQACykXi8ZUM6sqJPlF4LedCTwbdaZwiNolu5/hJc/lk
|
||||
cLfKPSl0id2KZ6UW4PqPmGx00NXFP/XcCxzzht8ejrI1GY9LXR6fKpmoYZvUoXba
|
||||
SK58l+OcAaxJ7JoTCvH2adas5mhNGyHTTghceNlFPuT+LC7nNq6rJD0QLouDQMr5
|
||||
0my2lJtDiafa+Z3aGt759vkTT7k4wnfWNkjZJDIVf6UkAoMFtN5nOgR36OaDLegA
|
||||
7udascC3hKRUi2BIlc713hl2dlcPVMcCQArpvbwgwPFXiZO9PW+Qc7IWogHqWNWY
|
||||
Ms9JsDcAE5Q5PRlAA8QSveSyl3QNJpeHT9PVx159a28E8xEWCs9nfpI/jXfYxFnr
|
||||
dfS7gn8XW1WNUJvtHsKIhdSRD/4ks6VRPm6KMskR+j+zpTbmcQKCAQEA3CvDiT/E
|
||||
oD2VK9rE0KNDZBljED2p7IVE+zED5olGPUGC3F+WiEl9ldd6DKL6K0Xv/zAEv7Nt
|
||||
hHJ4m3B8siOQf2wzrX6JTvqDhBnrYjsD3VU7Zpys4ZjMOAp/aIM124ZRDECe2do3
|
||||
yzfV+oR0qw9KmyywjMwPa/8LL9d+kwYSQX6Y2hy+5TquDghKCmQzBw0iCDlmWfNP
|
||||
jqfztSc1oBPcij+X98h3EI3Ai7R+hlolWlowXy0qBY8qCWegbguRDFkDhTXDCPwW
|
||||
RMiQobI3xWfhZybSohx42/HUYMi5Uis++CV3XeE/aRdLw/O3gHTz5n9Z3v0i0Xnd
|
||||
KIWxpCKzLzLAVwKCAQEAxTlZHVlNaVz8fsSajAyq3n4LnOxGEwhYspzY7U2tHnbr
|
||||
U1QXTlvGN97u9hMdHgvvPu7OULfeJM0EPNBdQC2B2Y2vkAZBcdw2cgXdzVksv+gO
|
||||
//ryo37xBZXY46prGyPZCrfrrBXHNOHlxY1AklQUu8PnNKU+Z02hirMtY6pm/WyI
|
||||
2fbUJRqQu3nTMiuqFeee+5vaKbWXPRWKjpF/KZxoA4YSymGhG+fVIJVKxWjz1ns/
|
||||
0Kkx/a4D3xWZO+vY9LE24PZzygUfr3/ZsCe8N+UpvZ60h7eJT9DJB1ETgqPFL8zr
|
||||
EhGxoNDLRpm0b1JELAuclCHuHdqQ/uTJB2DjSFpAjQKCAQEAxmNU3R4toan7+Tk2
|
||||
cT07oz3Q6rh1nd70KlefSSLWvKmELeif7owx8kvn+Oz9+PIa8FmnXcli3J59GKsC
|
||||
YU30jSzFYAaN2TGYQfdNBwVgVRbQ4IQ6r0kMc07aQSVB6V4dN6oeuPSNo7rbP9IM
|
||||
gnrT4gEh0KyrFMgKn4BQ2E/3MTbOqnKOfGUkoxZLCRQCes8VpE18cX7xZ/zkd44u
|
||||
HuDmr1fgKnBjAPKJ1hi8jXk7ATAVOB2tKLc4zKKoh6A6geLPbj/kTvs/YZlL4beB
|
||||
04noLBdqYpK/QIimstMLUgQPyG+SIHCvv5UzOw0ng0Ne5opIQ8rajeB+LF5TlC+E
|
||||
P/o+HwKCAQAurZcI2jT3JfngqvmFAg6C4EQxXL5tDMGpbHPvHj5GApFJxJJLim8M
|
||||
lCfsd7Ohg+OY+n48HnhmL1u8ZPhdEygzbFRL+x8MKrl8HSVUz7FGrk62iRdaWNYE
|
||||
o2WU5KW6464f2k3eCb1/J6PxMLBCscHCeuhCzoVJf9cm86dfeloryr9NDx1Attvg
|
||||
c0HoEuuLialYFZf53S+xVmLXwVneaFU52EakPZ0a9LC9qHfs5x0m+z6sTQ824jOq
|
||||
XftJclWD/FlnvwzCmJnaOKE2DwF+HS/W4DQMFwVZramWoLrEZaxq1s4gFa37yM8D
|
||||
o6dP3aGi5xClAq7PxAYjPdTSeTzxx+KVAoIBAQDGwk1/sJW99Oif+7RXvV99l+BL
|
||||
1R0BI1Dgc+aXkXSX4OeWJdLdiGLztrJ/lEzesKEdVHmG+wamexaxWzYgUeKklcAA
|
||||
IPrEawh3qB9gmlWei4BrK+e0cGjPZwq5bQi7gkpsMdxlHYkCmO12DzZ7/4CaGqET
|
||||
+Az0Xa7wjlRbSv62HvKbCm1yMizs8l9k3E8vMo9vU1soyEvR3r/aHzo7KyiXJaio
|
||||
ioppLcx/FVQCkaFQ1/H4dBZCSxviJxQmnOWlTkJT1mH44GLQnv21UsEWUrpz13VK
|
||||
8Dp0zWwNtSKoEQ6YJYl1Nwt04OhUrxG5fStSOpRiQ2r8bUAM0d4qDSjV92Yf
|
||||
-----END RSA PRIVATE KEY-----
|
||||
|
@ -1,87 +1,82 @@
|
||||
Bag Attributes
|
||||
localKeyID: 93 0C 3E A7 82 62 36 37 5E 73 9B 05 C4 98 DF DC 04 5C B4 C9
|
||||
subject=/C=AU/ST=Some-State/O=Internet Widgits Pty Ltd
|
||||
issuer=/C=US/ST=CA/L=SF/O=HTTPie/CN=HTTPie
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFAjCCAuoCAQEwDQYJKoZIhvcNAQEFBQAwSTELMAkGA1UEBhMCVVMxCzAJBgNV
|
||||
BAgTAkNBMQswCQYDVQQHEwJTRjEPMA0GA1UEChMGSFRUUGllMQ8wDQYDVQQDEwZI
|
||||
VFRQaWUwHhcNMTUwMTIzMjIyNTM2WhcNMTYwMTIzMjIyNTM2WjBFMQswCQYDVQQG
|
||||
EwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lk
|
||||
Z2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAu6aP
|
||||
iR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/Rn5mCMKmD506JrFV8fktQ
|
||||
M6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535lb9V9hHjAgy60QgJBgSE7
|
||||
lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et0RQiWIi7S6vpDRpZFxRi
|
||||
gtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQI6JadczU0JyVVjJVTny3
|
||||
ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ2nc+OrJwYLvOp1cG/zYl
|
||||
GHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK3gEbMz3y+YTlVNPo108H
|
||||
JI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZdVH3feAhTfDZbpSxhpRo
|
||||
Ja84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQFTCjN22UhPP0PrqY3ngEj
|
||||
1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5Vr89NO08QtnLwQduusVkc
|
||||
4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0EyV2z6pZiH6HK1r5Xwaq
|
||||
0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEAATANBgkqhkiG9w0BAQUF
|
||||
AAOCAgEAQgIicN/uWtaYYBVEVeMGMdxzpp2pv3AaCfQMoVGaQu9VLydK/GBlYOqj
|
||||
AGPjdmQ7p4ISlduXqslu646+RxZ+H6TSSj0NTF4FyR8LPckRPiePNlsGp3u6ffix
|
||||
PX0554Ks+JYyFJ7qyMhsilqCYtw8prX9lj8fjzbWWXlgJFH/SRZw4xdcJ1yYA9sQ
|
||||
fBHxveCWFS1ibX5+QGy/+7jPb99MP38HEIt9vTMW5aiwXeIbipXohWqcJhxL9GXz
|
||||
KPsrt9a++rLjqsquhZL4uCksGmI4Gv0FQQswgSyHSSQzagee5VRB68WYSAyYdvzi
|
||||
YCfkNcbQtOOQWGx4rsEdENViPs1GEZkWJJ1h9pmWzZl0U9c3cnABffK7o9v6ap2F
|
||||
NrnU5H/7jLuBiUJFzqwkgAjANLRZ6hLj6h/grcnIIThJwg6KaXvpEh4UkHuqHYBF
|
||||
Fq1BWZIWU25ASggEVIsCPXC2+I1oGhxK1DN/J+wIht9MBWWlQWVMZAQsBkszNZrh
|
||||
nzdfMoQZTG5bT4Bf0bI5LmPaY0xBxXA1f4TLuqrEAziOjRX3vIQV4i33nZZJvPcC
|
||||
mCoyhAUpTJm+OI90ePll+vBO1ENAx7EMHqNe6eCChZ/9DUsVxxtaorVq1l0xWons
|
||||
ynOCgx46hGE12/oiRIKq/wGMpv6ClfJhW1N5nJahDqoIMEvnNaQ=
|
||||
MIIFazCCA1OgAwIBAgIUNMIIO7cG2Lkx+qo0Z43k4+voT4swDQYJKoZIhvcNAQEN
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMDA3MDQxMDE5NDBaFw0yMTA3
|
||||
MDQxMDE5NDBaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQCpnv/bnF8qkRoFu2M/+btxR5kRDAMqvbBivG2F4Uop
|
||||
37mxwW0YJFOiMtzCN76w8JgEZrTeH3zG0fNNdIKIKjjwf+8j3KSbQi60oDOelkL5
|
||||
34Yt1o+lW9ricKQCsVl/XkYHh4RPnzNE8XRZmcZtL/6+1vVjTlxe6iW1Q0tzU2l3
|
||||
RHPhHbmsVclwFOd/eE+D6WB5tb6SVvhDyOfLdZwxWWpgARx6aboR/+/CKazE0wt4
|
||||
IJtTpe3M7IHt3i/8EkCZyFNdV+pQ8qz3PIOKBQws8aCpuQ+IHnvq4wSiyUV6eEaU
|
||||
bfOguWHGKlyVuN9AIiNl8A4xlU6QHKwzisTuRywschlvT8LaK1WGk+BNGBcidogh
|
||||
yp73KrDpiUd+Udv3TPDg5Q7pE6LT+sZxFrCidvZEZ1YdBDfXUhOaCTmtlFFYJiMT
|
||||
2+FnPQCfFv53D79llGaovE7t6KBf+qYRpIkSDoYhSSZ5GhFGTVsgQERYG39MSnbz
|
||||
4b1CQtg7Q8e9DJq8d/ChKUCfymJ+HSQIXEMu1FXrlEbEoyGvRyvA5cnUHjvY7GPY
|
||||
2HGHHaTFhiB9qRQhD3TdK4G6iIHF9tuxi2V+7waYp7q9N8KHfZRhIZbGSWQlaM9f
|
||||
njAUy8NAX6W4cL/ZpDf8PpVeMhLolvO8D8qCNZyWD+x5HtqDfqFkFPvr2vOSxZ+v
|
||||
6wIDAQABo1MwUTAdBgNVHQ4EFgQUkJwSpoGIxHUaArfJrX602HdHUWcwHwYDVR0j
|
||||
BBgwFoAUkJwSpoGIxHUaArfJrX602HdHUWcwDwYDVR0TAQH/BAUwAwEB/zANBgkq
|
||||
hkiG9w0BAQ0FAAOCAgEAqDuULnNBNJsydUXDyGTzCrXjJuqhuOhi1eALyCLwuT+F
|
||||
+/l7hOgdKWn4KJF4vcfNObcWh7sJ+iIcXEOYKaL3dPW9nqj+oCoPBKNAX+u3ZKXy
|
||||
I4O5wVAd3X0beh1ba69nOfdn9PMlVEB80TzTda0My9+tI5SD84OXUc7AWQXnh5Sb
|
||||
tHkul7cKcBA7/phnlC83qa6WoMlmNfqo8s2u+quDkhshKdrLFGGBI17gUQH3GbHN
|
||||
WBymHi/BCCIKYJB9+vt+M5L5C8FtNCMrCwTGtIOgC9IMre4wF2gODbjuRtkO2w6k
|
||||
sXOtKweCdgMd2H3SwE4txEU2hUHE1IYPYnG1fg0YwYfKfbTLZQtn7xgEK93+nkp8
|
||||
ufnnHgUxd//+pFPkbEOTnShuepl7g45qOBGUX4fBh78EVeL7NIZ9F8dHGsawD/CT
|
||||
/tATlH9gQ+JRvXCNCKO8jNgeu3v2gVw+haXP1d4F7NysVIr4A5LiFufJk5Zyizcm
|
||||
WyjgfI99CnEwvqzv4yMQCoHAOK3awhH7uR+QHhCpG9D91PlzdJu7yP7O7zQaKobg
|
||||
YTqMoMkYr63WbMrH21Tokoc/6CBPAAp3g8rC/E024SquJE7OUG0If5JkvlfJU5EP
|
||||
K+e7hFNoD4uc+0cgAccpEb9hCc0oPfC+3WM5poVBKSnukfs4KyqcVIt4ZaNoYic=
|
||||
-----END CERTIFICATE-----
|
||||
Bag Attributes
|
||||
localKeyID: 93 0C 3E A7 82 62 36 37 5E 73 9B 05 C4 98 DF DC 04 5C B4 C9
|
||||
Key Attributes: <No Attributes>
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEAu6aPiR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/R
|
||||
n5mCMKmD506JrFV8fktQM6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535l
|
||||
b9V9hHjAgy60QgJBgSE7lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et
|
||||
0RQiWIi7S6vpDRpZFxRigtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQ
|
||||
I6JadczU0JyVVjJVTny3ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ
|
||||
2nc+OrJwYLvOp1cG/zYlGHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK
|
||||
3gEbMz3y+YTlVNPo108HJI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZ
|
||||
dVH3feAhTfDZbpSxhpRoJa84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQF
|
||||
TCjN22UhPP0PrqY3ngEj1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5V
|
||||
r89NO08QtnLwQduusVkc4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0
|
||||
EyV2z6pZiH6HK1r5Xwaq0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEA
|
||||
AQKCAgBOY1DYlZYg8/eXAhuDDkayYYzDuny1ylG8c4F9nFYVCxB2GZ1Wz3icPWP1
|
||||
j1BhpkBgPbPeLfM+O0V1H6eCdVvapKOxXM52mDuHO3TJP6P8lOZgZOOY6RUK7qp0
|
||||
4mC4plqYx7oto23CBLoOdgMtM937rG0SLGDfIF6z8sI0XCMRkqPpRviNu5xxYYTk
|
||||
IoczSwtmYcSZJRjHhk4AGnmicDbMPRlJ2k2E0euHhI9wMAyQFUFnhLJlQGALj6pj
|
||||
DtYvcM1EAUN46EXK66bXQq8zgozYS0WIJ6+wOUKQMSIgUGCF6Rvm3ZTt9xwOxxW8
|
||||
wxebvfYVTJgIdh2Nfusgmye9Debl73f+k9/O4RsvYc5J5w2n4IxKqQrfCZrZqevZ
|
||||
s+KvARkuQbXrHPanvEd8MPrRZ6FOAdiZYAbB9OvzuKCbEkgag8GPjMMAvrjT49N2
|
||||
qp9gwGgnzczQYn+vLblJuRzofcblvLE+sxKKDE8qrfcOjN1murZP7714y5E3NmEZ
|
||||
NB2NTHveTflYI1HJ1tznI1C40GdBYH4GwT/0he53rBcjNaPhyP7j3cTR1doRfZap
|
||||
2oz8KE/Sij3Zb6b8r7hi+Lcwpa9txZftro7XNOJIX7ZT5B4KMiXowtCHbkMMnL6k
|
||||
48tRBpyX20MqDFezBRCK7lfGhU1Coms8UcDHoFXLuGY/sAYEcQKCAQEA9D9/PD1t
|
||||
e90haG6nLl4LKP5wH2wB2BK1RRBERqOVqwSmdSgn3/+GkpeYWKdhN2jyYn6qnpJQ
|
||||
hXXYGtHAAHuw0dGXnOmgcsyZSlAWPzpMYRYrSh3ds8JVJdV2d58yS0ty3Ae3W6aW
|
||||
p4SRuhf8yIMgOmE+TARCU1rJdke9jIIl2TQmnpJahlsZeGLEmEXE99EhB5VoshRJ
|
||||
hLXNn3xTtkQz3tNR0rMAtXI6SIMB00FFEG1+jClza6PYriT9dkORI5LSVqXDEpxR
|
||||
C41PvYMKTAloWd0hZ2gdfwAcJScoAv75L10uR7O1IeQI+Et5h2tj4a/OfzILa0d5
|
||||
BYMmVsTa3NZXLQKCAQEAxK3uJKmoN2uswJQSKpX4WziVBgbaZdpCNnAWhfJZYIcP
|
||||
zlIuv9qOc/DIPiy9Sa9XtITSkcONAwRowtI783AtXeAelyo3W7A2aLIfBBZAXDzJ
|
||||
8KMc9xMDPebvUhlPSzg4bNwvduukAnktlzCjrRWPXRoSfloSpFkFPP4GwTdVcf17
|
||||
1mkki6rK4rbHmIoCITlZkNbUBCcu20ivK6N3pvH1wN123bxnF7lwvB5qizdFO5P7
|
||||
xRVIoCdCXQ0+WK2ZokCa/r44rcp4ffgrLoO/WRlo4yERIa9NwaucIrXmotKX8kYc
|
||||
YYpFzrGs72DljS7TBZCOqek5fNQBNK79BA2hNcJ1FQKCAQBC+M44hFdq6T1p1z18
|
||||
F0lUGkBAPWtcBfUyVL2D6QL2+7Vw1mvonbYWp/6cAHlFqj8cBsNd65ysm51/7ReK
|
||||
il/3iFLcMatPDw7RM5iGCcQ7ssp37iyGR7j1QMzVDA/MWYnLD0qVlN4mXNFgh4dG
|
||||
q73AhD2CtoBBPtmS1yUATAd4wTX9sP+la4FWYy6o2iiiEvPNkog8nBd0ji0tl/eU
|
||||
OKtIZAVBkteU6RdWHqX3eSQo1v0mDY+aajjVt0rQjMJVUMLgA1+z0KzgUAUXX8EJ
|
||||
DGNSkLHCGuhLlIojHdN4ztUgyZoRCxOVkWNsQbW3Dhk7HuuuMNi0t8pVWpq+nAev
|
||||
Gg6ZAoIBAQC0mMk9nRO7oAGG6/Aqbn8YtEISwKQ2Nk3qUs47vKdZPWvEFi6bOILp
|
||||
70TP4qEFUh6EwhngguGuzZOsoQMvq+fcdXlhcQBYDtxHEpfsVspOZ/s+HWjxbuHh
|
||||
K3bBuj/XYA5f12c2GXYGV2MHm0AQJOX5pYEpyGepxZxLvy5QqRCqlQnrfaxzGycl
|
||||
OpTYepEuFM0rdDhGf/xEmt9OgNHT2AXDTRhizycS39Kmyn8myl+mL2JWPA7uEF6d
|
||||
txVytCWImS45kE3XNz2g3go4sf04QV7QgIKMnb4Wgg/ix4i6JgokC0DwR9mFzBxx
|
||||
ylW+aCqYx35YgrGo77sTt0LZP/KxvJdpAoIBAF7YfhR1wFbW2L8sJ4gAbjPUWOMu
|
||||
JUfE4FhdLcSdqCo+N8uN0qawJxXltBKfjeeoH0CDh9Yv0qqalVdSOKS9BPAa1zJc
|
||||
o2kBcT8AVwoPS5oxa9eDT+7iHPMF4BErB2IGv3yYwpjqSZBJ9TsTu1B6iTf5hOL5
|
||||
9pqcv/LjfcwtWu2XMCVoZj2Q8iYv55l3jJ1ByF/UDVezWajE69avvJkQZrMZmuBw
|
||||
UuHelP/7anRyyelh7RkndZpPCExGmuO7pd5aG25/mBs0i34R1PElAtt8AN36f5Tk
|
||||
1GxIltTNtLk4Mivwp9aZ1vf9s5FAhgPDvfGV5yFoKYmA/65ZlrKx0zlFNng=
|
||||
MIIJKQIBAAKCAgEAqZ7/25xfKpEaBbtjP/m7cUeZEQwDKr2wYrxtheFKKd+5scFt
|
||||
GCRTojLcwje+sPCYBGa03h98xtHzTXSCiCo48H/vI9ykm0IutKAznpZC+d+GLdaP
|
||||
pVva4nCkArFZf15GB4eET58zRPF0WZnGbS/+vtb1Y05cXuoltUNLc1Npd0Rz4R25
|
||||
rFXJcBTnf3hPg+lgebW+klb4Q8jny3WcMVlqYAEcemm6Ef/vwimsxNMLeCCbU6Xt
|
||||
zOyB7d4v/BJAmchTXVfqUPKs9zyDigUMLPGgqbkPiB576uMEoslFenhGlG3zoLlh
|
||||
xipclbjfQCIjZfAOMZVOkBysM4rE7kcsLHIZb0/C2itVhpPgTRgXInaIIcqe9yqw
|
||||
6YlHflHb90zw4OUO6ROi0/rGcRawonb2RGdWHQQ311ITmgk5rZRRWCYjE9vhZz0A
|
||||
nxb+dw+/ZZRmqLxO7eigX/qmEaSJEg6GIUkmeRoRRk1bIEBEWBt/TEp28+G9QkLY
|
||||
O0PHvQyavHfwoSlAn8pifh0kCFxDLtRV65RGxKMhr0crwOXJ1B472Oxj2Nhxhx2k
|
||||
xYYgfakUIQ903SuBuoiBxfbbsYtlfu8GmKe6vTfCh32UYSGWxklkJWjPX54wFMvD
|
||||
QF+luHC/2aQ3/D6VXjIS6JbzvA/KgjWclg/seR7ag36hZBT769rzksWfr+sCAwEA
|
||||
AQKCAgBmZ1W0si1KN5vsRftfjle5xi4E+qmWzjqFAZllsGPj7+veAxbn8laDoA1j
|
||||
O+BmVnqQfalISN498lbfNi3wIv2JRNONZRIDoesspWNEpRb+YBJT7it++3ukJbj+
|
||||
3y9XFAVXWlto7oY3Y0aJKauAE+/KK2CueYqOyvHFA0Gz+HG9zZfgGuATyR76CcTR
|
||||
UkM/MlBKao0JMHRmCA7Y6MJJkOAF4eXdiaMKZufK4vopQfi0p4re71gn1cmDYBa8
|
||||
KhDSRvz9Z6xQ/pGqGeCYHQACykXi8ZUM6sqJPlF4LedCTwbdaZwiNolu5/hJc/lk
|
||||
cLfKPSl0id2KZ6UW4PqPmGx00NXFP/XcCxzzht8ejrI1GY9LXR6fKpmoYZvUoXba
|
||||
SK58l+OcAaxJ7JoTCvH2adas5mhNGyHTTghceNlFPuT+LC7nNq6rJD0QLouDQMr5
|
||||
0my2lJtDiafa+Z3aGt759vkTT7k4wnfWNkjZJDIVf6UkAoMFtN5nOgR36OaDLegA
|
||||
7udascC3hKRUi2BIlc713hl2dlcPVMcCQArpvbwgwPFXiZO9PW+Qc7IWogHqWNWY
|
||||
Ms9JsDcAE5Q5PRlAA8QSveSyl3QNJpeHT9PVx159a28E8xEWCs9nfpI/jXfYxFnr
|
||||
dfS7gn8XW1WNUJvtHsKIhdSRD/4ks6VRPm6KMskR+j+zpTbmcQKCAQEA3CvDiT/E
|
||||
oD2VK9rE0KNDZBljED2p7IVE+zED5olGPUGC3F+WiEl9ldd6DKL6K0Xv/zAEv7Nt
|
||||
hHJ4m3B8siOQf2wzrX6JTvqDhBnrYjsD3VU7Zpys4ZjMOAp/aIM124ZRDECe2do3
|
||||
yzfV+oR0qw9KmyywjMwPa/8LL9d+kwYSQX6Y2hy+5TquDghKCmQzBw0iCDlmWfNP
|
||||
jqfztSc1oBPcij+X98h3EI3Ai7R+hlolWlowXy0qBY8qCWegbguRDFkDhTXDCPwW
|
||||
RMiQobI3xWfhZybSohx42/HUYMi5Uis++CV3XeE/aRdLw/O3gHTz5n9Z3v0i0Xnd
|
||||
KIWxpCKzLzLAVwKCAQEAxTlZHVlNaVz8fsSajAyq3n4LnOxGEwhYspzY7U2tHnbr
|
||||
U1QXTlvGN97u9hMdHgvvPu7OULfeJM0EPNBdQC2B2Y2vkAZBcdw2cgXdzVksv+gO
|
||||
//ryo37xBZXY46prGyPZCrfrrBXHNOHlxY1AklQUu8PnNKU+Z02hirMtY6pm/WyI
|
||||
2fbUJRqQu3nTMiuqFeee+5vaKbWXPRWKjpF/KZxoA4YSymGhG+fVIJVKxWjz1ns/
|
||||
0Kkx/a4D3xWZO+vY9LE24PZzygUfr3/ZsCe8N+UpvZ60h7eJT9DJB1ETgqPFL8zr
|
||||
EhGxoNDLRpm0b1JELAuclCHuHdqQ/uTJB2DjSFpAjQKCAQEAxmNU3R4toan7+Tk2
|
||||
cT07oz3Q6rh1nd70KlefSSLWvKmELeif7owx8kvn+Oz9+PIa8FmnXcli3J59GKsC
|
||||
YU30jSzFYAaN2TGYQfdNBwVgVRbQ4IQ6r0kMc07aQSVB6V4dN6oeuPSNo7rbP9IM
|
||||
gnrT4gEh0KyrFMgKn4BQ2E/3MTbOqnKOfGUkoxZLCRQCes8VpE18cX7xZ/zkd44u
|
||||
HuDmr1fgKnBjAPKJ1hi8jXk7ATAVOB2tKLc4zKKoh6A6geLPbj/kTvs/YZlL4beB
|
||||
04noLBdqYpK/QIimstMLUgQPyG+SIHCvv5UzOw0ng0Ne5opIQ8rajeB+LF5TlC+E
|
||||
P/o+HwKCAQAurZcI2jT3JfngqvmFAg6C4EQxXL5tDMGpbHPvHj5GApFJxJJLim8M
|
||||
lCfsd7Ohg+OY+n48HnhmL1u8ZPhdEygzbFRL+x8MKrl8HSVUz7FGrk62iRdaWNYE
|
||||
o2WU5KW6464f2k3eCb1/J6PxMLBCscHCeuhCzoVJf9cm86dfeloryr9NDx1Attvg
|
||||
c0HoEuuLialYFZf53S+xVmLXwVneaFU52EakPZ0a9LC9qHfs5x0m+z6sTQ824jOq
|
||||
XftJclWD/FlnvwzCmJnaOKE2DwF+HS/W4DQMFwVZramWoLrEZaxq1s4gFa37yM8D
|
||||
o6dP3aGi5xClAq7PxAYjPdTSeTzxx+KVAoIBAQDGwk1/sJW99Oif+7RXvV99l+BL
|
||||
1R0BI1Dgc+aXkXSX4OeWJdLdiGLztrJ/lEzesKEdVHmG+wamexaxWzYgUeKklcAA
|
||||
IPrEawh3qB9gmlWei4BrK+e0cGjPZwq5bQi7gkpsMdxlHYkCmO12DzZ7/4CaGqET
|
||||
+Az0Xa7wjlRbSv62HvKbCm1yMizs8l9k3E8vMo9vU1soyEvR3r/aHzo7KyiXJaio
|
||||
ioppLcx/FVQCkaFQ1/H4dBZCSxviJxQmnOWlTkJT1mH44GLQnv21UsEWUrpz13VK
|
||||
8Dp0zWwNtSKoEQ6YJYl1Nwt04OhUrxG5fStSOpRiQ2r8bUAM0d4qDSjV92Yf
|
||||
-----END RSA PRIVATE KEY-----
|
||||
|
33
tests/fixtures/__init__.py
vendored
33
tests/fixtures/__init__.py
vendored
@ -1,6 +1,5 @@
|
||||
"""Test data"""
|
||||
from os import path
|
||||
import codecs
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def patharg(path):
|
||||
@ -9,32 +8,24 @@ def patharg(path):
|
||||
even in Windows paths.
|
||||
|
||||
"""
|
||||
return path.replace('\\', '\\\\\\')
|
||||
return str(path).replace('\\', '\\\\\\')
|
||||
|
||||
|
||||
FIXTURES_ROOT = path.join(path.abspath(path.dirname(__file__)))
|
||||
FILE_PATH = path.join(FIXTURES_ROOT, 'test.txt')
|
||||
JSON_FILE_PATH = path.join(FIXTURES_ROOT, 'test.json')
|
||||
BIN_FILE_PATH = path.join(FIXTURES_ROOT, 'test.bin')
|
||||
|
||||
FIXTURES_ROOT = Path(__file__).parent
|
||||
FILE_PATH = FIXTURES_ROOT / 'test.txt'
|
||||
JSON_FILE_PATH = FIXTURES_ROOT / 'test.json'
|
||||
BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin'
|
||||
|
||||
FILE_PATH_ARG = patharg(FILE_PATH)
|
||||
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
|
||||
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
|
||||
|
||||
|
||||
with codecs.open(FILE_PATH, encoding='utf8') as f:
|
||||
# Strip because we don't want new lines in the data so that we can
|
||||
# easily count occurrences also when embedded in JSON (where the new
|
||||
# line would be escaped).
|
||||
FILE_CONTENT = f.read().strip()
|
||||
# Strip because we don't want new lines in the data so that we can
|
||||
# easily count occurrences also when embedded in JSON (where the new
|
||||
# line would be escaped).
|
||||
FILE_CONTENT = FILE_PATH.read_text('utf8').strip()
|
||||
|
||||
|
||||
with codecs.open(JSON_FILE_PATH, encoding='utf8') as f:
|
||||
JSON_FILE_CONTENT = f.read()
|
||||
|
||||
|
||||
with open(BIN_FILE_PATH, 'rb') as f:
|
||||
BIN_FILE_CONTENT = f.read()
|
||||
|
||||
JSON_FILE_CONTENT = JSON_FILE_PATH.read_text('utf8')
|
||||
BIN_FILE_CONTENT = BIN_FILE_PATH.read_bytes()
|
||||
UNICODE = FILE_CONTENT
|
||||
|
@ -3,6 +3,7 @@ import mock
|
||||
import pytest
|
||||
|
||||
from httpie.plugins.builtin import HTTPBasicAuth
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.utils import ExplicitNullAuth
|
||||
from utils import http, add_auth, HTTP_OK, MockEnvironment
|
||||
import httpie.cli.constants
|
||||
@ -78,6 +79,8 @@ def test_missing_auth(httpbin):
|
||||
|
||||
|
||||
def test_netrc(httpbin_both):
|
||||
# This one gets handled by requests (no --auth, --auth-type present),
|
||||
# that’s why we patch inside `requests.sessions`.
|
||||
with mock.patch('requests.sessions.get_netrc_auth') as get_netrc_auth:
|
||||
get_netrc_auth.return_value = ('httpie', 'password')
|
||||
r = http(httpbin_both + '/basic-auth/httpie/password')
|
||||
@ -86,24 +89,54 @@ def test_netrc(httpbin_both):
|
||||
|
||||
|
||||
def test_ignore_netrc(httpbin_both):
|
||||
with mock.patch('requests.sessions.get_netrc_auth') as get_netrc_auth:
|
||||
with mock.patch('httpie.cli.argparser.get_netrc_auth') as get_netrc_auth:
|
||||
get_netrc_auth.return_value = ('httpie', 'password')
|
||||
r = http('--ignore-netrc', httpbin_both + '/basic-auth/httpie/password')
|
||||
assert get_netrc_auth.call_count == 0
|
||||
assert 'HTTP/1.1 401 UNAUTHORIZED' in r
|
||||
|
||||
|
||||
def test_ignore_netrc_null_auth():
|
||||
args = httpie.cli.definition.parser.parse_args(
|
||||
args=['--ignore-netrc', 'example.org'],
|
||||
env=MockEnvironment(),
|
||||
)
|
||||
assert isinstance(args.auth, ExplicitNullAuth)
|
||||
|
||||
|
||||
def test_ignore_netrc_together_with_auth():
|
||||
args = httpie.cli.definition.parser.parse_args(
|
||||
args=['--ignore-netrc', '--auth=username:password', 'example.org'],
|
||||
env=MockEnvironment(),
|
||||
)
|
||||
assert isinstance(args.auth, HTTPBasicAuth)
|
||||
|
||||
|
||||
def test_ignore_netrc_with_auth_type_resulting_in_missing_auth(httpbin):
|
||||
with mock.patch('httpie.cli.argparser.get_netrc_auth') as get_netrc_auth:
|
||||
get_netrc_auth.return_value = ('httpie', 'password')
|
||||
r = http(
|
||||
'--ignore-netrc',
|
||||
'--auth-type=basic',
|
||||
httpbin + '/basic-auth/httpie/password',
|
||||
tolerate_error_exit_status=True,
|
||||
)
|
||||
assert get_netrc_auth.call_count == 0
|
||||
assert r.exit_status == ExitStatus.ERROR
|
||||
assert '--auth required' in r.stderr
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['auth_type', 'endpoint'],
|
||||
argvalues=[
|
||||
('basic', '/basic-auth/httpie/password'),
|
||||
('digest', '/digest-auth/auth/httpie/password'),
|
||||
],
|
||||
)
|
||||
def test_auth_plugin_netrc_parse(auth_type, endpoint, httpbin):
|
||||
# Test
|
||||
with mock.patch('httpie.cli.argparser.get_netrc_auth') as get_netrc_auth:
|
||||
get_netrc_auth.return_value = ('httpie', 'password')
|
||||
r = http('--auth-type', auth_type, httpbin + endpoint)
|
||||
assert get_netrc_auth.call_count == 1
|
||||
assert HTTP_OK in r
|
||||
|
||||
|
||||
def test_ignore_netrc_null_auth():
|
||||
args = httpie.cli.definition.parser.parse_args(
|
||||
args=['--ignore-netrc', 'example.org'],
|
||||
env=MockEnvironment(),
|
||||
)
|
||||
assert isinstance(args.auth, ExplicitNullAuth)
|
||||
|
@ -1,9 +1,11 @@
|
||||
from mock import mock
|
||||
|
||||
from httpie.cli.constants import SEPARATOR_CREDENTIALS
|
||||
from httpie.plugins import AuthPlugin, plugin_manager
|
||||
from httpie.plugins import AuthPlugin
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from utils import http, HTTP_OK
|
||||
|
||||
|
||||
# TODO: run all these tests in session mode as well
|
||||
|
||||
USERNAME = 'user'
|
||||
|
@ -15,7 +15,7 @@ from httpie.cli import constants
|
||||
from httpie.cli.definition import parser
|
||||
from httpie.cli.argtypes import KeyValueArg, KeyValueArgType
|
||||
from httpie.cli.requestitems import RequestItems
|
||||
from utils import HTTP_OK, MockEnvironment, http
|
||||
from utils import HTTP_OK, MockEnvironment, StdinBytesIO, http
|
||||
|
||||
|
||||
class TestItemParsing:
|
||||
@ -312,10 +312,11 @@ class TestNoOptions:
|
||||
class TestStdin:
|
||||
|
||||
def test_ignore_stdin(self, httpbin):
|
||||
with open(FILE_PATH) as f:
|
||||
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||
r = http('--ignore-stdin', '--verbose', httpbin.url + '/get',
|
||||
env=env)
|
||||
env = MockEnvironment(
|
||||
stdin=StdinBytesIO(FILE_PATH.read_bytes()),
|
||||
stdin_isatty=False,
|
||||
)
|
||||
r = http('--ignore-stdin', '--verbose', httpbin.url + '/get', env=env)
|
||||
assert HTTP_OK in r
|
||||
assert 'GET /get HTTP' in r, "Don't default to POST."
|
||||
assert FILE_CONTENT not in r, "Don't send stdin data."
|
||||
|
@ -12,7 +12,8 @@ import base64
|
||||
import zlib
|
||||
|
||||
from fixtures import FILE_PATH, FILE_CONTENT
|
||||
from utils import http, HTTP_OK, MockEnvironment
|
||||
from httpie.status import ExitStatus
|
||||
from utils import StdinBytesIO, http, HTTP_OK, MockEnvironment
|
||||
|
||||
|
||||
def assert_decompressed_equal(base64_compressed_data, expected_str):
|
||||
@ -27,6 +28,20 @@ def assert_decompressed_equal(base64_compressed_data, expected_str):
|
||||
assert actual_str == expected_str
|
||||
|
||||
|
||||
def test_cannot_combine_compress_with_chunked(httpbin):
|
||||
r = http('--compress', '--chunked', httpbin.url + '/get',
|
||||
tolerate_error_exit_status=True)
|
||||
assert r.exit_status == ExitStatus.ERROR
|
||||
assert 'cannot combine --compress and --chunked' in r.stderr
|
||||
|
||||
|
||||
def test_cannot_combine_compress_with_multipart(httpbin):
|
||||
r = http('--compress', '--multipart', httpbin.url + '/get',
|
||||
tolerate_error_exit_status=True)
|
||||
assert r.exit_status == ExitStatus.ERROR
|
||||
assert 'cannot combine --compress and --multipart' in r.stderr
|
||||
|
||||
|
||||
def test_compress_skip_negative_ratio(httpbin_both):
|
||||
r = http(
|
||||
'--compress',
|
||||
@ -78,15 +93,17 @@ def test_compress_form(httpbin_both):
|
||||
|
||||
|
||||
def test_compress_stdin(httpbin_both):
|
||||
with open(FILE_PATH) as f:
|
||||
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||
r = http(
|
||||
'--compress',
|
||||
'--compress',
|
||||
'PATCH',
|
||||
httpbin_both + '/patch',
|
||||
env=env,
|
||||
)
|
||||
env = MockEnvironment(
|
||||
stdin=StdinBytesIO(FILE_PATH.read_bytes()),
|
||||
stdin_isatty=False,
|
||||
)
|
||||
r = http(
|
||||
'--compress',
|
||||
'--compress',
|
||||
'PATCH',
|
||||
httpbin_both + '/patch',
|
||||
env=env,
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert r.json['headers']['Content-Encoding'] == 'deflate'
|
||||
assert_decompressed_equal(r.json['data'], FILE_CONTENT.strip())
|
||||
@ -100,7 +117,7 @@ def test_compress_file(httpbin_both):
|
||||
'--compress',
|
||||
'PUT',
|
||||
httpbin_both + '/put',
|
||||
'file@' + FILE_PATH,
|
||||
f'file@{FILE_PATH}',
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert r.json['headers']['Content-Encoding'] == 'deflate'
|
||||
|
@ -1,7 +1,14 @@
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from _pytest.monkeypatch import MonkeyPatch
|
||||
|
||||
from httpie.compat import is_windows
|
||||
from httpie.config import Config
|
||||
from httpie.config import (
|
||||
Config, DEFAULT_CONFIG_DIRNAME, DEFAULT_RELATIVE_LEGACY_CONFIG_DIR,
|
||||
DEFAULT_RELATIVE_XDG_CONFIG_HOME, DEFAULT_WINDOWS_CONFIG_DIR,
|
||||
ENV_HTTPIE_CONFIG_DIR, ENV_XDG_CONFIG_HOME, get_default_config_dir,
|
||||
)
|
||||
from utils import HTTP_OK, MockEnvironment, http
|
||||
|
||||
|
||||
@ -48,3 +55,50 @@ def test_default_options_overwrite(httpbin):
|
||||
assert r.json['json'] == {
|
||||
"foo": "bar"
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.skipif(is_windows, reason='XDG_CONFIG_HOME needs *nix')
|
||||
def test_explicit_xdg_config_home(monkeypatch: MonkeyPatch, tmp_path: Path):
|
||||
home_dir = tmp_path
|
||||
monkeypatch.delenv(ENV_HTTPIE_CONFIG_DIR, raising=False)
|
||||
monkeypatch.setenv('HOME', str(home_dir))
|
||||
custom_xdg_config_home = home_dir / 'custom_xdg_config_home'
|
||||
monkeypatch.setenv(ENV_XDG_CONFIG_HOME, str(custom_xdg_config_home))
|
||||
expected_config_dir = custom_xdg_config_home / DEFAULT_CONFIG_DIRNAME
|
||||
assert get_default_config_dir() == expected_config_dir
|
||||
|
||||
|
||||
@pytest.mark.skipif(is_windows, reason='XDG_CONFIG_HOME needs *nix')
|
||||
def test_default_xdg_config_home(monkeypatch: MonkeyPatch, tmp_path: Path):
|
||||
home_dir = tmp_path
|
||||
monkeypatch.delenv(ENV_HTTPIE_CONFIG_DIR, raising=False)
|
||||
monkeypatch.delenv(ENV_XDG_CONFIG_HOME, raising=False)
|
||||
monkeypatch.setenv('HOME', str(home_dir))
|
||||
expected_config_dir = (
|
||||
home_dir
|
||||
/ DEFAULT_RELATIVE_XDG_CONFIG_HOME
|
||||
/ DEFAULT_CONFIG_DIRNAME
|
||||
)
|
||||
assert get_default_config_dir() == expected_config_dir
|
||||
|
||||
|
||||
@pytest.mark.skipif(is_windows, reason='legacy config dir needs *nix')
|
||||
def test_legacy_config_dir(monkeypatch: MonkeyPatch, tmp_path: Path):
|
||||
home_dir = tmp_path
|
||||
monkeypatch.delenv(ENV_HTTPIE_CONFIG_DIR, raising=False)
|
||||
monkeypatch.setenv('HOME', str(home_dir))
|
||||
legacy_config_dir = home_dir / DEFAULT_RELATIVE_LEGACY_CONFIG_DIR
|
||||
legacy_config_dir.mkdir()
|
||||
assert get_default_config_dir() == legacy_config_dir
|
||||
|
||||
|
||||
def test_custom_config_dir(monkeypatch: MonkeyPatch, tmp_path: Path):
|
||||
httpie_config_dir = tmp_path / 'custom/directory'
|
||||
monkeypatch.setenv(ENV_HTTPIE_CONFIG_DIR, str(httpie_config_dir))
|
||||
assert get_default_config_dir() == httpie_config_dir
|
||||
|
||||
|
||||
@pytest.mark.skipif(not is_windows, reason='windows-only')
|
||||
def test_windows_config_dir(monkeypatch: MonkeyPatch):
|
||||
monkeypatch.delenv(ENV_HTTPIE_CONFIG_DIR, raising=False)
|
||||
assert get_default_config_dir() == DEFAULT_WINDOWS_CONFIG_DIR
|
||||
|
@ -2,6 +2,8 @@
|
||||
Tests for the provided defaults regarding HTTP method, and --json vs. --form.
|
||||
|
||||
"""
|
||||
from io import BytesIO
|
||||
|
||||
from httpie.client import JSON_ACCEPT
|
||||
from utils import MockEnvironment, http, HTTP_OK
|
||||
from fixtures import FILE_PATH
|
||||
@ -22,6 +24,7 @@ def test_default_headers_case_insensitive(httpbin):
|
||||
assert 'Content-Type' not in r
|
||||
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
class TestImplicitHTTPMethod:
|
||||
def test_implicit_GET(self, httpbin):
|
||||
r = http(httpbin.url + '/get')
|
||||
@ -43,17 +46,19 @@ class TestImplicitHTTPMethod:
|
||||
assert r.json['form'] == {'foo': 'bar'}
|
||||
|
||||
def test_implicit_POST_stdin(self, httpbin):
|
||||
with open(FILE_PATH) as f:
|
||||
env = MockEnvironment(stdin_isatty=False, stdin=f)
|
||||
r = http('--form', httpbin.url + '/post', env=env)
|
||||
env = MockEnvironment(
|
||||
stdin_isatty=False,
|
||||
stdin=BytesIO(FILE_PATH.read_bytes())
|
||||
)
|
||||
r = http('--form', httpbin.url + '/post', env=env)
|
||||
assert HTTP_OK in r
|
||||
|
||||
|
||||
class TestAutoContentTypeAndAcceptHeaders:
|
||||
"""
|
||||
Test that Accept and Content-Type correctly defaults to JSON,
|
||||
but can still be overridden. The same with Content-Type when --form
|
||||
-f is used.
|
||||
Test that `Accept` and `Content-Type` correctly default to JSON,
|
||||
but can still be overridden. The same with Content-Type when `--form`
|
||||
`-f` is used.
|
||||
|
||||
"""
|
||||
|
||||
@ -84,7 +89,7 @@ class TestAutoContentTypeAndAcceptHeaders:
|
||||
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||
assert r.json['headers']['Content-Type'] == 'application/json'
|
||||
|
||||
def test_POST_explicit_JSON_auto_JSON_accept(self, httpbin):
|
||||
def test_POST_explicit_JSON_JSON_ACCEPT(self, httpbin):
|
||||
r = http('--json', 'POST', httpbin.url + '/post')
|
||||
assert HTTP_OK in r
|
||||
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||
|
@ -40,13 +40,17 @@ assert filenames
|
||||
|
||||
|
||||
# HACK: hardcoded paths, venv should be irrelevant, etc.
|
||||
# TODO: replaces the process with Python code
|
||||
# TODO: simplify by using the Python API instead of a subprocess
|
||||
# then we wont’t need the paths.
|
||||
VENV_BIN = Path(__file__).parent.parent / 'venv/bin'
|
||||
VENV_PYTHON = VENV_BIN / 'python'
|
||||
VENV_RST2PSEUDOXML = VENV_BIN / 'rst2pseudoxml.py'
|
||||
|
||||
|
||||
@pytest.mark.skipif(not os.path.exists(VENV_RST2PSEUDOXML), reason='docutils not installed')
|
||||
@pytest.mark.skipif(
|
||||
not VENV_RST2PSEUDOXML.exists(),
|
||||
reason='docutils not installed',
|
||||
)
|
||||
@pytest.mark.parametrize('filename', filenames)
|
||||
def test_rst_file_syntax(filename):
|
||||
p = subprocess.Popen(
|
||||
|
@ -5,6 +5,7 @@ from urllib.request import urlopen
|
||||
|
||||
import pytest
|
||||
import mock
|
||||
import requests
|
||||
from requests.structures import CaseInsensitiveDict
|
||||
|
||||
from httpie.downloads import (
|
||||
@ -182,10 +183,11 @@ class TestDownloads:
|
||||
# Redirect from `/redirect/1` to `/get`.
|
||||
expected_filename = '1.json'
|
||||
orig_cwd = os.getcwd()
|
||||
os.chdir(tempfile.mkdtemp(prefix='httpie_download_test_'))
|
||||
try:
|
||||
assert os.listdir('.') == []
|
||||
http('--download', httpbin.url + '/redirect/1')
|
||||
assert os.listdir('.') == [expected_filename]
|
||||
finally:
|
||||
os.chdir(orig_cwd)
|
||||
with tempfile.TemporaryDirectory() as tmp_dirname:
|
||||
os.chdir(tmp_dirname)
|
||||
try:
|
||||
assert os.listdir('.') == []
|
||||
http('--download', httpbin.url + '/redirect/1')
|
||||
assert os.listdir('.') == [expected_filename]
|
||||
finally:
|
||||
os.chdir(orig_cwd)
|
||||
|
@ -4,14 +4,13 @@ from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
import httpie
|
||||
import httpie.__main__
|
||||
from fixtures import FILE_CONTENT, FILE_PATH
|
||||
from httpie.cli.exceptions import ParseError
|
||||
from httpie.context import Environment
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.cli.exceptions import ParseError
|
||||
from utils import MockEnvironment, http, HTTP_OK
|
||||
from fixtures import FILE_PATH, FILE_CONTENT
|
||||
|
||||
import httpie
|
||||
from utils import HTTP_OK, MockEnvironment, StdinBytesIO, http
|
||||
|
||||
|
||||
def test_main_entry_point():
|
||||
@ -46,7 +45,6 @@ def test_help():
|
||||
def test_version():
|
||||
r = http('--version', tolerate_error_exit_status=True)
|
||||
assert r.exit_status == ExitStatus.SUCCESS
|
||||
# FIXME: py3 has version in stdout, py2 in stderr
|
||||
assert httpie.__version__ == r.strip()
|
||||
|
||||
|
||||
@ -55,6 +53,25 @@ def test_GET(httpbin_both):
|
||||
assert HTTP_OK in r
|
||||
|
||||
|
||||
def test_path_dot_normalization():
|
||||
r = http(
|
||||
'--offline',
|
||||
'example.org/../../etc/password',
|
||||
'param==value'
|
||||
)
|
||||
assert 'GET /etc/password?param=value' in r
|
||||
|
||||
|
||||
def test_path_as_is():
|
||||
r = http(
|
||||
'--offline',
|
||||
'--path-as-is',
|
||||
'example.org/../../etc/password',
|
||||
'param==value'
|
||||
)
|
||||
assert 'GET /../../etc/password?param=value' in r
|
||||
|
||||
|
||||
def test_DELETE(httpbin_both):
|
||||
r = http('DELETE', httpbin_both + '/delete')
|
||||
assert HTTP_OK in r
|
||||
@ -81,23 +98,48 @@ def test_POST_form(httpbin_both):
|
||||
def test_POST_form_multiple_values(httpbin_both):
|
||||
r = http('--form', 'POST', httpbin_both + '/post', 'foo=bar', 'foo=baz')
|
||||
assert HTTP_OK in r
|
||||
assert r.json['form'] == {'foo': ['bar', 'baz']}
|
||||
assert r.json['form'] == {
|
||||
'foo': ['bar', 'baz']
|
||||
}
|
||||
|
||||
|
||||
def test_POST_stdin(httpbin_both):
|
||||
with open(FILE_PATH) as f:
|
||||
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||
r = http('--form', 'POST', httpbin_both + '/post', env=env)
|
||||
env = MockEnvironment(
|
||||
stdin=StdinBytesIO(FILE_PATH.read_bytes()),
|
||||
stdin_isatty=False,
|
||||
)
|
||||
r = http('--form', 'POST', httpbin_both + '/post', env=env)
|
||||
assert HTTP_OK in r
|
||||
assert FILE_CONTENT in r
|
||||
|
||||
|
||||
def test_POST_file(httpbin_both):
|
||||
r = http('--form', 'POST', httpbin_both + '/post', 'file@' + FILE_PATH)
|
||||
r = http('--form', 'POST', httpbin_both + '/post', f'file@{FILE_PATH}')
|
||||
assert HTTP_OK in r
|
||||
assert FILE_CONTENT in r
|
||||
|
||||
|
||||
def test_form_POST_file_redirected_stdin(httpbin):
|
||||
"""
|
||||
<https://github.com/jakubroztocil/httpie/issues/840>
|
||||
|
||||
"""
|
||||
with open(FILE_PATH) as f:
|
||||
r = http(
|
||||
'--form',
|
||||
'POST',
|
||||
httpbin + '/post',
|
||||
f'file@{FILE_PATH}',
|
||||
tolerate_error_exit_status=True,
|
||||
env=MockEnvironment(
|
||||
stdin=StdinBytesIO(FILE_PATH.read_bytes()),
|
||||
stdin_isatty=False,
|
||||
),
|
||||
)
|
||||
assert r.exit_status == ExitStatus.ERROR
|
||||
assert 'cannot be mixed' in r.stderr
|
||||
|
||||
|
||||
def test_headers(httpbin_both):
|
||||
r = http('GET', httpbin_both + '/headers', 'Foo:bar')
|
||||
assert HTTP_OK in r
|
||||
@ -110,7 +152,7 @@ def test_headers_unset(httpbin_both):
|
||||
assert 'Accept' in r.json['headers'] # default Accept present
|
||||
|
||||
r = http('GET', httpbin_both + '/headers', 'Accept:')
|
||||
assert 'Accept' not in r.json['headers'] # default Accept unset
|
||||
assert 'Accept' not in r.json['headers'] # default Accept unset
|
||||
|
||||
|
||||
@pytest.mark.skip('unimplemented')
|
||||
@ -119,7 +161,7 @@ def test_unset_host_header(httpbin_both):
|
||||
assert 'Host' in r.json['headers'] # default Host present
|
||||
|
||||
r = http('GET', httpbin_both + '/headers', 'Host:')
|
||||
assert 'Host' not in r.json['headers'] # default Host unset
|
||||
assert 'Host' not in r.json['headers'] # default Host unset
|
||||
|
||||
|
||||
def test_headers_empty_value(httpbin_both):
|
||||
@ -127,7 +169,7 @@ def test_headers_empty_value(httpbin_both):
|
||||
assert r.json['headers']['Accept'] # default Accept has value
|
||||
|
||||
r = http('GET', httpbin_both + '/headers', 'Accept;')
|
||||
assert r.json['headers']['Accept'] == '' # Accept has no value
|
||||
assert r.json['headers']['Accept'] == '' # Accept has no value
|
||||
|
||||
|
||||
def test_headers_empty_value_with_value_gives_error(httpbin):
|
||||
@ -140,4 +182,4 @@ def test_json_input_preserve_order(httpbin_both):
|
||||
'order:={"map":{"1":"first","2":"second"}}')
|
||||
assert HTTP_OK in r
|
||||
assert r.json['data'] == \
|
||||
'{"order": {"map": {"1": "first", "2": "second"}}}'
|
||||
'{"order": {"map": {"1": "first", "2": "second"}}}'
|
||||
|
75
tests/test_offline.py
Normal file
75
tests/test_offline.py
Normal file
@ -0,0 +1,75 @@
|
||||
from fixtures import FILE_CONTENT, FILE_PATH_ARG
|
||||
from utils import http
|
||||
|
||||
|
||||
def test_offline():
|
||||
r = http(
|
||||
'--offline',
|
||||
'https://this-should.never-resolve/foo',
|
||||
)
|
||||
assert 'GET /foo' in r
|
||||
|
||||
|
||||
def test_offline_form():
|
||||
r = http(
|
||||
'--offline',
|
||||
'--form',
|
||||
'https://this-should.never-resolve/foo',
|
||||
'foo=bar'
|
||||
)
|
||||
assert 'POST /foo' in r
|
||||
assert 'foo=bar' in r
|
||||
|
||||
|
||||
def test_offline_json():
|
||||
r = http(
|
||||
'--offline',
|
||||
'https://this-should.never-resolve/foo',
|
||||
'foo=bar'
|
||||
)
|
||||
assert 'POST /foo' in r
|
||||
assert r.json == {'foo': 'bar'}
|
||||
|
||||
|
||||
def test_offline_multipart():
|
||||
r = http(
|
||||
'--offline',
|
||||
'--multipart',
|
||||
'https://this-should.never-resolve/foo',
|
||||
'foo=bar'
|
||||
)
|
||||
assert 'POST /foo' in r
|
||||
assert 'name="foo"' in r
|
||||
|
||||
|
||||
def test_offline_from_file():
|
||||
r = http(
|
||||
'--offline',
|
||||
'https://this-should.never-resolve/foo',
|
||||
f'@{FILE_PATH_ARG}'
|
||||
)
|
||||
assert 'POST /foo' in r
|
||||
assert FILE_CONTENT in r
|
||||
|
||||
|
||||
def test_offline_chunked():
|
||||
r = http(
|
||||
'--offline',
|
||||
'--chunked',
|
||||
'--form',
|
||||
'https://this-should.never-resolve/foo',
|
||||
'hello=world'
|
||||
)
|
||||
assert 'POST /foo' in r
|
||||
assert 'Transfer-Encoding: chunked' in r, r
|
||||
assert 'hello=world' in r
|
||||
|
||||
|
||||
def test_offline_download():
|
||||
"""Absence of response should be handled gracefully with --download"""
|
||||
r = http(
|
||||
'--offline',
|
||||
'--download',
|
||||
'https://this-should.never-resolve/foo',
|
||||
)
|
||||
assert 'GET /foo' in r
|
@ -1,12 +1,25 @@
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
import mock
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import io
|
||||
from tempfile import gettempdir
|
||||
from urllib.request import urlopen
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from utils import MockEnvironment, http, HTTP_OK, COLOR, CRLF
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.cli.argtypes import (
|
||||
PARSED_DEFAULT_FORMAT_OPTIONS,
|
||||
parse_format_options,
|
||||
)
|
||||
from httpie.cli.definition import parser
|
||||
from httpie.output.formatters.colors import get_lexer
|
||||
from httpie.status import ExitStatus
|
||||
from utils import COLOR, CRLF, HTTP_OK, MockEnvironment, http
|
||||
|
||||
|
||||
@pytest.mark.parametrize('stdout_isatty', [True, False])
|
||||
@ -25,6 +38,87 @@ def test_output_option(httpbin, stdout_isatty):
|
||||
assert actual_body == expected_body
|
||||
|
||||
|
||||
class TestQuietFlag:
|
||||
|
||||
@pytest.mark.parametrize('argument_name', ['--quiet', '-q'])
|
||||
def test_quiet(self, httpbin, argument_name):
|
||||
env = MockEnvironment(
|
||||
stdin_isatty=True,
|
||||
stdout_isatty=True,
|
||||
devnull=io.BytesIO()
|
||||
)
|
||||
r = http(argument_name, 'GET', httpbin.url + '/get', env=env)
|
||||
assert env.stdout is env.devnull
|
||||
assert env.stderr is env.devnull
|
||||
assert HTTP_OK in r.devnull
|
||||
assert r == ''
|
||||
assert r.stderr == ''
|
||||
|
||||
@mock.patch('httpie.cli.argtypes.AuthCredentials._getpass',
|
||||
new=lambda self, prompt: 'password')
|
||||
def test_quiet_with_password_prompt(self, httpbin):
|
||||
"""
|
||||
Tests whether httpie still prompts for a password when request
|
||||
requires authentication and only username is provided
|
||||
|
||||
"""
|
||||
env = MockEnvironment(
|
||||
stdin_isatty=True,
|
||||
stdout_isatty=True,
|
||||
devnull=io.BytesIO()
|
||||
)
|
||||
r = http(
|
||||
'--quiet', '--auth', 'user', 'GET',
|
||||
httpbin.url + '/basic-auth/user/password',
|
||||
env=env
|
||||
)
|
||||
assert env.stdout is env.devnull
|
||||
assert env.stderr is env.devnull
|
||||
assert HTTP_OK in r.devnull
|
||||
assert r == ''
|
||||
assert r.stderr == ''
|
||||
|
||||
@pytest.mark.parametrize('argument_name', ['-h', '-b', '-v', '-p=hH'])
|
||||
def test_quiet_with_explicit_output_options(self, httpbin, argument_name):
|
||||
env = MockEnvironment(stdin_isatty=True, stdout_isatty=True)
|
||||
r = http('--quiet', argument_name, httpbin.url + '/get', env=env)
|
||||
assert env.stdout is env.devnull
|
||||
assert env.stderr is env.devnull
|
||||
assert r == ''
|
||||
assert r.stderr == ''
|
||||
|
||||
@pytest.mark.parametrize('with_download', [True, False])
|
||||
def test_quiet_with_output_redirection(self, httpbin, with_download):
|
||||
url = httpbin + '/robots.txt'
|
||||
output_path = Path('output.txt')
|
||||
env = MockEnvironment()
|
||||
orig_cwd = os.getcwd()
|
||||
output = requests.get(url).text
|
||||
extra_args = ['--download'] if with_download else []
|
||||
with tempfile.TemporaryDirectory() as tmp_dirname:
|
||||
os.chdir(tmp_dirname)
|
||||
try:
|
||||
assert os.listdir('.') == []
|
||||
r = http(
|
||||
'--quiet',
|
||||
'--output', str(output_path),
|
||||
*extra_args,
|
||||
url,
|
||||
env=env
|
||||
)
|
||||
assert os.listdir('.') == [str(output_path)]
|
||||
assert r == ''
|
||||
assert r.stderr == ''
|
||||
assert env.stderr is env.devnull
|
||||
if with_download:
|
||||
assert env.stdout is env.devnull
|
||||
else:
|
||||
assert env.stdout is not env.devnull # --output swaps stdout.
|
||||
assert output_path.read_text() == output
|
||||
finally:
|
||||
os.chdir(orig_cwd)
|
||||
|
||||
|
||||
class TestVerboseFlag:
|
||||
def test_verbose(self, httpbin):
|
||||
r = http('--verbose',
|
||||
@ -58,19 +152,19 @@ class TestColors:
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['mime', 'explicit_json', 'body', 'expected_lexer_name'],
|
||||
argvalues=[
|
||||
('application/json', False, None, 'JSON'),
|
||||
('application/json', False, None, 'JSON'),
|
||||
('application/json+foo', False, None, 'JSON'),
|
||||
('application/foo+json', False, None, 'JSON'),
|
||||
('application/json-foo', False, None, 'JSON'),
|
||||
('application/x-json', False, None, 'JSON'),
|
||||
('foo/json', False, None, 'JSON'),
|
||||
('foo/json+bar', False, None, 'JSON'),
|
||||
('foo/bar+json', False, None, 'JSON'),
|
||||
('foo/json-foo', False, None, 'JSON'),
|
||||
('foo/x-json', False, None, 'JSON'),
|
||||
('application/x-json', False, None, 'JSON'),
|
||||
('foo/json', False, None, 'JSON'),
|
||||
('foo/json+bar', False, None, 'JSON'),
|
||||
('foo/bar+json', False, None, 'JSON'),
|
||||
('foo/json-foo', False, None, 'JSON'),
|
||||
('foo/x-json', False, None, 'JSON'),
|
||||
('application/vnd.comverge.grid+hal+json', False, None, 'JSON'),
|
||||
('text/plain', True, '{}', 'JSON'),
|
||||
('text/plain', True, 'foo', 'Text only'),
|
||||
('text/plain', True, '{}', 'JSON'),
|
||||
('text/plain', True, 'foo', 'Text only'),
|
||||
]
|
||||
)
|
||||
def test_get_lexer(self, mime, explicit_json, body, expected_lexer_name):
|
||||
@ -83,7 +177,7 @@ class TestColors:
|
||||
|
||||
|
||||
class TestPrettyOptions:
|
||||
"""Test the --pretty flag handling."""
|
||||
"""Test the --pretty handling."""
|
||||
|
||||
def test_pretty_enabled_by_default(self, httpbin):
|
||||
env = MockEnvironment(colors=256)
|
||||
@ -138,6 +232,7 @@ class TestLineEndings:
|
||||
and as the headers/body separator.
|
||||
|
||||
"""
|
||||
|
||||
def _validate_crlf(self, msg):
|
||||
lines = iter(msg.splitlines(True))
|
||||
for header in lines:
|
||||
@ -171,3 +266,199 @@ class TestLineEndings:
|
||||
def test_CRLF_formatted_request(self, httpbin):
|
||||
r = http('--pretty=format', '--print=HB', 'GET', httpbin.url + '/get')
|
||||
self._validate_crlf(r)
|
||||
|
||||
|
||||
class TestFormatOptions:
|
||||
def test_header_formatting_options(self):
|
||||
def get_headers(sort):
|
||||
return http(
|
||||
'--offline', '--print=H',
|
||||
'--format-options', 'headers.sort:' + sort,
|
||||
'example.org', 'ZZZ:foo', 'XXX:foo',
|
||||
)
|
||||
|
||||
r_sorted = get_headers('true')
|
||||
r_unsorted = get_headers('false')
|
||||
assert r_sorted != r_unsorted
|
||||
assert f'XXX: foo{CRLF}ZZZ: foo' in r_sorted
|
||||
assert f'ZZZ: foo{CRLF}XXX: foo' in r_unsorted
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['options', 'expected_json'],
|
||||
argvalues=[
|
||||
# @formatter:off
|
||||
(
|
||||
'json.sort_keys:true,json.indent:4',
|
||||
json.dumps({'a': 0, 'b': 0}, indent=4),
|
||||
),
|
||||
(
|
||||
'json.sort_keys:false,json.indent:2',
|
||||
json.dumps({'b': 0, 'a': 0}, indent=2),
|
||||
),
|
||||
(
|
||||
'json.format:false',
|
||||
json.dumps({'b': 0, 'a': 0}),
|
||||
),
|
||||
# @formatter:on
|
||||
]
|
||||
)
|
||||
def test_json_formatting_options(self, options: str, expected_json: str):
|
||||
r = http(
|
||||
'--offline', '--print=B',
|
||||
'--format-options', options,
|
||||
'example.org', 'b:=0', 'a:=0',
|
||||
)
|
||||
assert expected_json in r
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['defaults', 'options_string', 'expected'],
|
||||
argvalues=[
|
||||
# @formatter:off
|
||||
({'foo': {'bar': 1}}, 'foo.bar:2', {'foo': {'bar': 2}}),
|
||||
({'foo': {'bar': True}}, 'foo.bar:false', {'foo': {'bar': False}}),
|
||||
({'foo': {'bar': 'a'}}, 'foo.bar:b', {'foo': {'bar': 'b'}}),
|
||||
# @formatter:on
|
||||
]
|
||||
)
|
||||
def test_parse_format_options(self, defaults, options_string, expected):
|
||||
actual = parse_format_options(s=options_string, defaults=defaults)
|
||||
assert expected == actual
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['options_string', 'expected_error'],
|
||||
argvalues=[
|
||||
('foo:2', 'invalid option'),
|
||||
('foo.baz:2', 'invalid key'),
|
||||
('foo.bar:false', 'expected int got bool'),
|
||||
]
|
||||
)
|
||||
def test_parse_format_options_errors(self, options_string, expected_error):
|
||||
defaults = {
|
||||
'foo': {
|
||||
'bar': 1
|
||||
}
|
||||
}
|
||||
with pytest.raises(argparse.ArgumentTypeError, match=expected_error):
|
||||
parse_format_options(s=options_string, defaults=defaults)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['args', 'expected_format_options'],
|
||||
argvalues=[
|
||||
(
|
||||
[
|
||||
'--format-options',
|
||||
'headers.sort:false,json.sort_keys:false',
|
||||
'--format-options=json.indent:10'
|
||||
],
|
||||
{
|
||||
'headers': {
|
||||
'sort': False
|
||||
},
|
||||
'json': {
|
||||
'sort_keys': False,
|
||||
'indent': 10,
|
||||
'format': True
|
||||
},
|
||||
}
|
||||
),
|
||||
(
|
||||
[
|
||||
'--unsorted'
|
||||
],
|
||||
{
|
||||
'headers': {
|
||||
'sort': False
|
||||
},
|
||||
'json': {
|
||||
'sort_keys': False,
|
||||
'indent': 4,
|
||||
'format': True
|
||||
},
|
||||
}
|
||||
),
|
||||
(
|
||||
[
|
||||
'--format-options=headers.sort:true',
|
||||
'--unsorted',
|
||||
'--format-options=headers.sort:true',
|
||||
],
|
||||
{
|
||||
'headers': {
|
||||
'sort': True
|
||||
},
|
||||
'json': {
|
||||
'sort_keys': False,
|
||||
'indent': 4,
|
||||
'format': True
|
||||
},
|
||||
}
|
||||
),
|
||||
(
|
||||
[
|
||||
'--no-format-options', # --no-<option> anywhere resets
|
||||
'--format-options=headers.sort:true',
|
||||
'--unsorted',
|
||||
'--format-options=headers.sort:true',
|
||||
],
|
||||
PARSED_DEFAULT_FORMAT_OPTIONS,
|
||||
),
|
||||
(
|
||||
[
|
||||
'--format-options=json.indent:2',
|
||||
'--unsorted',
|
||||
'--no-unsorted',
|
||||
],
|
||||
{
|
||||
'headers': {
|
||||
'sort': True
|
||||
},
|
||||
'json': {
|
||||
'sort_keys': True,
|
||||
'indent': 2,
|
||||
'format': True
|
||||
},
|
||||
}
|
||||
),
|
||||
(
|
||||
[
|
||||
'--format-options=json.indent:2',
|
||||
'--unsorted',
|
||||
'--sorted',
|
||||
],
|
||||
{
|
||||
'headers': {
|
||||
'sort': True
|
||||
},
|
||||
'json': {
|
||||
'sort_keys': True,
|
||||
'indent': 2,
|
||||
'format': True
|
||||
},
|
||||
}
|
||||
),
|
||||
(
|
||||
[
|
||||
'--format-options=json.indent:2',
|
||||
'--sorted',
|
||||
'--no-sorted',
|
||||
'--no-unsorted',
|
||||
],
|
||||
{
|
||||
'headers': {
|
||||
'sort': True
|
||||
},
|
||||
'json': {
|
||||
'sort_keys': True,
|
||||
'indent': 2,
|
||||
'format': True
|
||||
},
|
||||
}
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_format_options_accumulation(self, args, expected_format_options):
|
||||
parsed_args = parser.parse_args(
|
||||
args=[*args, 'example.org'],
|
||||
env=MockEnvironment(),
|
||||
)
|
||||
assert parsed_args.format_options == expected_format_options
|
||||
|
@ -1,14 +1,21 @@
|
||||
# coding=utf-8
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from mock import mock
|
||||
from tempfile import gettempdir
|
||||
|
||||
import pytest
|
||||
|
||||
from httpie.plugins.builtin import HTTPBasicAuth
|
||||
from utils import MockEnvironment, mk_config_dir, http, HTTP_OK
|
||||
from fixtures import UNICODE
|
||||
from httpie.plugins import AuthPlugin
|
||||
from httpie.plugins.builtin import HTTPBasicAuth
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from httpie.sessions import Session
|
||||
from httpie.utils import get_expired_cookies
|
||||
from tests.test_auth_plugins import basic_auth
|
||||
from utils import HTTP_OK, MockEnvironment, http, mk_config_dir
|
||||
|
||||
|
||||
class SessionTestBase:
|
||||
@ -32,6 +39,27 @@ class SessionTestBase:
|
||||
return MockEnvironment(config_dir=self.config_dir)
|
||||
|
||||
|
||||
class CookieTestBase:
|
||||
def setup_method(self, method):
|
||||
self.config_dir = mk_config_dir()
|
||||
|
||||
orig_session = {
|
||||
'cookies': {
|
||||
'cookie1': {
|
||||
'value': 'foo',
|
||||
},
|
||||
'cookie2': {
|
||||
'value': 'foo',
|
||||
}
|
||||
}
|
||||
}
|
||||
self.session_path = self.config_dir / 'test-session.json'
|
||||
self.session_path.write_text(json.dumps(orig_session))
|
||||
|
||||
def teardown_method(self, method):
|
||||
shutil.rmtree(self.config_dir)
|
||||
|
||||
|
||||
class TestSessionFlow(SessionTestBase):
|
||||
"""
|
||||
These tests start with an existing session created in `setup_method()`.
|
||||
@ -186,3 +214,263 @@ class TestSession(SessionTestBase):
|
||||
httpbin.url + '/get', env=self.env())
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['auth_require_param', 'auth_parse_param'],
|
||||
argvalues=[
|
||||
(False, False),
|
||||
(False, True),
|
||||
(True, False)
|
||||
]
|
||||
)
|
||||
def test_auth_type_reused_in_session(self, auth_require_param, auth_parse_param, httpbin):
|
||||
self.start_session(httpbin)
|
||||
session_path = self.config_dir / 'test-session.json'
|
||||
|
||||
header = 'Custom dXNlcjpwYXNzd29yZA'
|
||||
|
||||
class Plugin(AuthPlugin):
|
||||
auth_type = 'test-reused'
|
||||
auth_require = auth_require_param
|
||||
auth_parse = auth_parse_param
|
||||
|
||||
def get_auth(self, username=None, password=None):
|
||||
return basic_auth(header=f'{header}==')
|
||||
|
||||
plugin_manager.register(Plugin)
|
||||
|
||||
r1 = http(
|
||||
'--session', str(session_path),
|
||||
httpbin + '/basic-auth/user/password',
|
||||
'--auth-type',
|
||||
Plugin.auth_type,
|
||||
'--auth', 'user:password',
|
||||
'--print=H',
|
||||
)
|
||||
|
||||
r2 = http(
|
||||
'--session', str(session_path),
|
||||
httpbin + '/basic-auth/user/password',
|
||||
'--print=H',
|
||||
)
|
||||
assert f'Authorization: {header}' in r1
|
||||
assert f'Authorization: {header}' in r2
|
||||
plugin_manager.unregister(Plugin)
|
||||
|
||||
@mock.patch('httpie.cli.argtypes.AuthCredentials._getpass',
|
||||
new=lambda self, prompt: 'password')
|
||||
def test_auth_plugin_prompt_password_in_session(self, httpbin):
|
||||
self.start_session(httpbin)
|
||||
session_path = self.config_dir / 'test-session.json'
|
||||
|
||||
class Plugin(AuthPlugin):
|
||||
auth_type = 'test-prompted'
|
||||
|
||||
def get_auth(self, username=None, password=None):
|
||||
return basic_auth()
|
||||
|
||||
plugin_manager.register(Plugin)
|
||||
|
||||
r1 = http(
|
||||
'--session', str(session_path),
|
||||
httpbin + '/basic-auth/user/password',
|
||||
'--auth-type',
|
||||
Plugin.auth_type,
|
||||
'--auth', 'user:',
|
||||
)
|
||||
|
||||
r2 = http(
|
||||
'--session', str(session_path),
|
||||
httpbin + '/basic-auth/user/password',
|
||||
)
|
||||
assert HTTP_OK in r1
|
||||
assert HTTP_OK in r2
|
||||
plugin_manager.unregister(Plugin)
|
||||
|
||||
def test_auth_type_stored_in_session_file(self, httpbin):
|
||||
self.config_dir = mk_config_dir()
|
||||
self.session_path = self.config_dir / 'test-session.json'
|
||||
|
||||
class Plugin(AuthPlugin):
|
||||
auth_type = 'test-saved'
|
||||
auth_require = True
|
||||
|
||||
def get_auth(self, username=None, password=None):
|
||||
return basic_auth()
|
||||
|
||||
plugin_manager.register(Plugin)
|
||||
http('--session', str(self.session_path),
|
||||
httpbin + '/basic-auth/user/password',
|
||||
'--auth-type',
|
||||
Plugin.auth_type,
|
||||
'--auth', 'user:password',
|
||||
)
|
||||
updated_session = json.loads(self.session_path.read_text())
|
||||
assert updated_session['auth']['type'] == 'test-saved'
|
||||
assert updated_session['auth']['raw_auth'] == "user:password"
|
||||
plugin_manager.unregister(Plugin)
|
||||
|
||||
|
||||
class TestExpiredCookies(CookieTestBase):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['initial_cookie', 'expired_cookie'],
|
||||
argvalues=[
|
||||
({'id': {'value': 123}}, 'id'),
|
||||
({'id': {'value': 123}}, 'token')
|
||||
]
|
||||
)
|
||||
def test_removes_expired_cookies_from_session_obj(self, initial_cookie, expired_cookie, httpbin):
|
||||
session = Session(self.config_dir)
|
||||
session['cookies'] = initial_cookie
|
||||
session.remove_cookies([expired_cookie])
|
||||
assert expired_cookie not in session.cookies
|
||||
|
||||
def test_expired_cookies(self, httpbin):
|
||||
r = http(
|
||||
'--session', str(self.session_path),
|
||||
'--print=H',
|
||||
httpbin.url + '/cookies/delete?cookie2',
|
||||
)
|
||||
assert 'Cookie: cookie1=foo; cookie2=foo' in r
|
||||
|
||||
updated_session = json.loads(self.session_path.read_text())
|
||||
assert 'cookie1' in updated_session['cookies']
|
||||
assert 'cookie2' not in updated_session['cookies']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['headers', 'now', 'expected_expired'],
|
||||
argvalues=[
|
||||
(
|
||||
[
|
||||
('Set-Cookie', 'hello=world; Path=/; Expires=Thu, 01-Jan-1970 00:00:00 GMT; HttpOnly'),
|
||||
('Connection', 'keep-alive')
|
||||
],
|
||||
None,
|
||||
[
|
||||
{
|
||||
'name': 'hello',
|
||||
'path': '/'
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
[
|
||||
('Set-Cookie', 'hello=world; Path=/; Expires=Thu, 01-Jan-1970 00:00:00 GMT; HttpOnly'),
|
||||
('Set-Cookie', 'pea=pod; Path=/ab; Expires=Thu, 01-Jan-1970 00:00:00 GMT; HttpOnly'),
|
||||
('Connection', 'keep-alive')
|
||||
],
|
||||
None,
|
||||
[
|
||||
{'name': 'hello', 'path': '/'},
|
||||
{'name': 'pea', 'path': '/ab'}
|
||||
]
|
||||
),
|
||||
(
|
||||
# Checks we gracefully ignore expires date in invalid format.
|
||||
# <https://github.com/httpie/httpie/issues/963>
|
||||
[
|
||||
('Set-Cookie', 'pfg=; Expires=Sat, 19-Sep-2020 06:58:14 GMT+0000; Max-Age=0; path=/; domain=.tumblr.com; secure; HttpOnly'),
|
||||
],
|
||||
None,
|
||||
[]
|
||||
),
|
||||
(
|
||||
[
|
||||
('Set-Cookie', 'hello=world; Path=/; Expires=Fri, 12 Jun 2020 12:28:55 GMT; HttpOnly'),
|
||||
('Connection', 'keep-alive')
|
||||
],
|
||||
datetime(2020, 6, 11).timestamp(),
|
||||
[]
|
||||
),
|
||||
]
|
||||
)
|
||||
def test_get_expired_cookies_manages_multiple_cookie_headers(self, headers, now, expected_expired):
|
||||
assert get_expired_cookies(headers, now=now) == expected_expired
|
||||
|
||||
|
||||
class TestCookieStorage(CookieTestBase):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['new_cookies', 'new_cookies_dict', 'expected'],
|
||||
argvalues=[(
|
||||
'new=bar',
|
||||
{'new': 'bar'},
|
||||
'cookie1=foo; cookie2=foo; new=bar'
|
||||
),
|
||||
(
|
||||
'new=bar;chocolate=milk',
|
||||
{'new': 'bar', 'chocolate': 'milk'},
|
||||
'chocolate=milk; cookie1=foo; cookie2=foo; new=bar'
|
||||
),
|
||||
(
|
||||
'new=bar; chocolate=milk',
|
||||
{'new': 'bar', 'chocolate': 'milk'},
|
||||
'chocolate=milk; cookie1=foo; cookie2=foo; new=bar'
|
||||
),
|
||||
(
|
||||
'new=bar;; chocolate=milk;;;',
|
||||
{'new': 'bar', 'chocolate': 'milk'},
|
||||
'cookie1=foo; cookie2=foo; new=bar'
|
||||
),
|
||||
(
|
||||
'new=bar; chocolate=milk;;;',
|
||||
{'new': 'bar', 'chocolate': 'milk'},
|
||||
'chocolate=milk; cookie1=foo; cookie2=foo; new=bar'
|
||||
)
|
||||
]
|
||||
)
|
||||
def test_existing_and_new_cookies_sent_in_request(self, new_cookies, new_cookies_dict, expected, httpbin):
|
||||
r = http(
|
||||
'--session', str(self.session_path),
|
||||
'--print=H',
|
||||
httpbin.url,
|
||||
'Cookie:' + new_cookies,
|
||||
)
|
||||
# Note: cookies in response are in alphabetical order
|
||||
assert 'Cookie: ' + expected in r
|
||||
|
||||
updated_session = json.loads(self.session_path.read_text())
|
||||
for name, value in new_cookies_dict.items():
|
||||
assert name, value in updated_session['cookies']
|
||||
assert 'Cookie' not in updated_session['headers']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
argnames=['cli_cookie', 'set_cookie', 'expected'],
|
||||
argvalues=[(
|
||||
'',
|
||||
'/cookies/set/cookie1/bar',
|
||||
'bar'
|
||||
),
|
||||
(
|
||||
'cookie1=not_foo',
|
||||
'/cookies/set/cookie1/bar',
|
||||
'bar'
|
||||
),
|
||||
(
|
||||
'cookie1=not_foo',
|
||||
'',
|
||||
'not_foo'
|
||||
),
|
||||
(
|
||||
'',
|
||||
'',
|
||||
'foo'
|
||||
)
|
||||
]
|
||||
)
|
||||
def test_cookie_storage_priority(self, cli_cookie, set_cookie, expected, httpbin):
|
||||
"""
|
||||
Expected order of priority for cookie storage in session file:
|
||||
1. set-cookie (from server)
|
||||
2. command line arg
|
||||
3. cookie already stored in session file
|
||||
"""
|
||||
r = http(
|
||||
'--session', str(self.session_path),
|
||||
httpbin.url + set_cookie,
|
||||
'Cookie:' + cli_cookie,
|
||||
)
|
||||
updated_session = json.loads(self.session_path.read_text())
|
||||
|
||||
assert updated_session['cookies']['cookie1']['value'] == expected
|
||||
|
@ -1,11 +1,11 @@
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import pytest_httpbin.certs
|
||||
import requests.exceptions
|
||||
import ssl
|
||||
import urllib3
|
||||
|
||||
from httpie.ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.cli.constants import SSL_VERSION_ARG_MAPPING
|
||||
from utils import HTTP_OK, TESTS_ROOT, http
|
||||
|
||||
|
||||
@ -23,18 +23,20 @@ except ImportError:
|
||||
requests.exceptions.SSLError,
|
||||
)
|
||||
|
||||
CERTS_ROOT = TESTS_ROOT / 'client_certs'
|
||||
CLIENT_CERT = str(CERTS_ROOT / 'client.crt')
|
||||
CLIENT_KEY = str(CERTS_ROOT / 'client.key')
|
||||
CLIENT_PEM = str(CERTS_ROOT / 'client.pem')
|
||||
|
||||
|
||||
CLIENT_CERT = os.path.join(TESTS_ROOT, 'client_certs', 'client.crt')
|
||||
CLIENT_KEY = os.path.join(TESTS_ROOT, 'client_certs', 'client.key')
|
||||
CLIENT_PEM = os.path.join(TESTS_ROOT, 'client_certs', 'client.pem')
|
||||
# FIXME:
|
||||
# We test against a local httpbin instance which uses a self-signed cert.
|
||||
# Requests without --verify=<CA_BUNDLE> will fail with a verification error.
|
||||
# See: https://github.com/kevin1024/pytest-httpbin#https-support
|
||||
CA_BUNDLE = pytest_httpbin.certs.where()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ssl_version', SSL_VERSION_ARG_MAPPING.keys())
|
||||
@pytest.mark.parametrize('ssl_version',
|
||||
AVAILABLE_SSL_VERSION_ARG_MAPPING.keys())
|
||||
def test_ssl_version(httpbin_secure, ssl_version):
|
||||
try:
|
||||
r = http(
|
||||
@ -46,6 +48,12 @@ def test_ssl_version(httpbin_secure, ssl_version):
|
||||
if ssl_version == 'ssl3':
|
||||
# pytest-httpbin doesn't support ssl3
|
||||
pass
|
||||
elif e.__context__ is not None: # Check if root cause was an unsupported TLS version
|
||||
root = e.__context__
|
||||
while root.__context__ is not None:
|
||||
root = root.__context__
|
||||
if isinstance(root, ssl.SSLError) and root.reason == "TLSV1_ALERT_PROTOCOL_VERSION":
|
||||
pytest.skip("Unsupported TLS version: {}".format(ssl_version))
|
||||
else:
|
||||
raise
|
||||
|
||||
@ -84,11 +92,14 @@ class TestClientCert:
|
||||
class TestServerCert:
|
||||
|
||||
def test_verify_no_OK(self, httpbin_secure):
|
||||
# Avoid warnings when explicitly testing insecure requests
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
r = http(httpbin_secure.url + '/get', '--verify=no')
|
||||
assert HTTP_OK in r
|
||||
|
||||
@pytest.mark.parametrize('verify_value', ['false', 'fALse'])
|
||||
def test_verify_false_OK(self, httpbin_secure, verify_value):
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
r = http(httpbin_secure.url + '/get', '--verify', verify_value)
|
||||
assert HTTP_OK in r
|
||||
|
||||
@ -113,3 +124,28 @@ class TestServerCert:
|
||||
def test_verify_custom_ca_bundle_invalid_bundle(self, httpbin_secure):
|
||||
with pytest.raises(ssl_errors):
|
||||
http(httpbin_secure.url + '/get', '--verify', __file__)
|
||||
|
||||
|
||||
def test_ciphers(httpbin_secure):
|
||||
r = http(
|
||||
httpbin_secure.url + '/get',
|
||||
'--ciphers',
|
||||
DEFAULT_SSL_CIPHERS,
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
|
||||
|
||||
def test_ciphers_none_can_be_selected(httpbin_secure):
|
||||
r = http(
|
||||
httpbin_secure.url + '/get',
|
||||
'--ciphers',
|
||||
'__FOO__',
|
||||
tolerate_error_exit_status=True,
|
||||
)
|
||||
assert r.exit_status == ExitStatus.ERROR
|
||||
# Linux/macOS:
|
||||
# http: error: SSLError: ('No cipher can be selected.',)
|
||||
# OpenBSD:
|
||||
# <https://marc.info/?l=openbsd-ports&m=159251948515635&w=2>
|
||||
# http: error: Error: [('SSL routines', '(UNKNOWN)SSL_internal', 'no cipher match')]
|
||||
assert 'cipher' in r.stderr
|
||||
|
@ -2,7 +2,7 @@ import pytest
|
||||
|
||||
from httpie.compat import is_windows
|
||||
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
||||
from utils import http, MockEnvironment
|
||||
from utils import StdinBytesIO, http, MockEnvironment
|
||||
from fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
||||
|
||||
|
||||
@ -13,32 +13,37 @@ from fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
||||
reason='Pretty redirect not supported under Windows')
|
||||
def test_pretty_redirected_stream(httpbin):
|
||||
"""Test that --stream works with prettified redirected output."""
|
||||
with open(BIN_FILE_PATH, 'rb') as f:
|
||||
env = MockEnvironment(colors=256, stdin=f,
|
||||
stdin_isatty=False,
|
||||
stdout_isatty=False)
|
||||
r = http('--verbose', '--pretty=all', '--stream', 'GET',
|
||||
httpbin.url + '/get', env=env)
|
||||
env = MockEnvironment(
|
||||
colors=256,
|
||||
stdin=StdinBytesIO(BIN_FILE_PATH.read_bytes()),
|
||||
stdin_isatty=False,
|
||||
stdout_isatty=False,
|
||||
)
|
||||
r = http('--verbose', '--pretty=all', '--stream', 'GET',
|
||||
httpbin.url + '/get', env=env)
|
||||
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||
|
||||
|
||||
def test_encoded_stream(httpbin):
|
||||
"""Test that --stream works with non-prettified
|
||||
redirected terminal output."""
|
||||
with open(BIN_FILE_PATH, 'rb') as f:
|
||||
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||
httpbin.url + '/get', env=env)
|
||||
env = MockEnvironment(
|
||||
stdin=StdinBytesIO(BIN_FILE_PATH.read_bytes()),
|
||||
stdin_isatty=False,
|
||||
)
|
||||
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||
httpbin.url + '/get', env=env)
|
||||
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||
|
||||
|
||||
def test_redirected_stream(httpbin):
|
||||
"""Test that --stream works with non-prettified
|
||||
redirected terminal output."""
|
||||
with open(BIN_FILE_PATH, 'rb') as f:
|
||||
env = MockEnvironment(stdout_isatty=False,
|
||||
stdin_isatty=False,
|
||||
stdin=f)
|
||||
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||
httpbin.url + '/get', env=env)
|
||||
env = MockEnvironment(
|
||||
stdout_isatty=False,
|
||||
stdin_isatty=False,
|
||||
stdin=StdinBytesIO(BIN_FILE_PATH.read_bytes()),
|
||||
)
|
||||
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||
httpbin.url + '/get', env=env)
|
||||
assert BIN_FILE_CONTENT in r
|
||||
|
@ -3,10 +3,55 @@ import os
|
||||
import pytest
|
||||
|
||||
from httpie.cli.exceptions import ParseError
|
||||
from utils import MockEnvironment, http, HTTP_OK
|
||||
from httpie.client import FORM_CONTENT_TYPE
|
||||
from httpie.status import ExitStatus
|
||||
from utils import (
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT, MockEnvironment, StdinBytesIO, http,
|
||||
HTTP_OK,
|
||||
)
|
||||
from fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT
|
||||
|
||||
|
||||
def test_chunked_json():
|
||||
r = http(
|
||||
'--verbose',
|
||||
'--chunked',
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT + '/post',
|
||||
'hello=world',
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert 'Transfer-Encoding: chunked' in r
|
||||
assert r.count('hello') == 3
|
||||
|
||||
|
||||
def test_chunked_form():
|
||||
r = http(
|
||||
'--verbose',
|
||||
'--chunked',
|
||||
'--form',
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT + '/post',
|
||||
'hello=world',
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert 'Transfer-Encoding: chunked' in r
|
||||
assert r.count('hello') == 2
|
||||
|
||||
|
||||
def test_chunked_stdin():
|
||||
r = http(
|
||||
'--verbose',
|
||||
'--chunked',
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT + '/post',
|
||||
env=MockEnvironment(
|
||||
stdin=StdinBytesIO(FILE_PATH.read_bytes()),
|
||||
stdin_isatty=False,
|
||||
)
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert 'Transfer-Encoding: chunked' in r
|
||||
assert r.count(FILE_CONTENT) == 2
|
||||
|
||||
|
||||
class TestMultipartFormDataFileUpload:
|
||||
|
||||
def test_non_existent_file_raises_parse_error(self, httpbin):
|
||||
@ -16,27 +61,142 @@ class TestMultipartFormDataFileUpload:
|
||||
|
||||
def test_upload_ok(self, httpbin):
|
||||
r = http('--form', '--verbose', 'POST', httpbin.url + '/post',
|
||||
'test-file@%s' % FILE_PATH_ARG, 'foo=bar')
|
||||
f'test-file@{FILE_PATH_ARG}', 'foo=bar')
|
||||
assert HTTP_OK in r
|
||||
assert 'Content-Disposition: form-data; name="foo"' in r
|
||||
assert 'Content-Disposition: form-data; name="test-file";' \
|
||||
' filename="%s"' % os.path.basename(FILE_PATH) in r
|
||||
f' filename="{os.path.basename(FILE_PATH)}"' in r
|
||||
assert FILE_CONTENT in r
|
||||
assert '"foo": "bar"' in r
|
||||
assert 'Content-Type: text/plain' in r
|
||||
|
||||
def test_upload_multiple_fields_with_the_same_name(self, httpbin):
|
||||
r = http('--form', '--verbose', 'POST', httpbin.url + '/post',
|
||||
'test-file@%s' % FILE_PATH_ARG,
|
||||
'test-file@%s' % FILE_PATH_ARG)
|
||||
f'test-file@{FILE_PATH_ARG}',
|
||||
f'test-file@{FILE_PATH_ARG}')
|
||||
assert HTTP_OK in r
|
||||
assert r.count('Content-Disposition: form-data; name="test-file";'
|
||||
' filename="%s"' % os.path.basename(FILE_PATH)) == 2
|
||||
f' filename="{os.path.basename(FILE_PATH)}"') == 2
|
||||
# Should be 4, but is 3 because httpbin
|
||||
# doesn't seem to support filed field lists
|
||||
assert r.count(FILE_CONTENT) in [3, 4]
|
||||
assert r.count('Content-Type: text/plain') == 2
|
||||
|
||||
def test_upload_custom_content_type(self, httpbin):
|
||||
r = http(
|
||||
'--form',
|
||||
'--verbose',
|
||||
httpbin.url + '/post',
|
||||
f'test-file@{FILE_PATH_ARG};type=image/vnd.microsoft.icon'
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
# Content type is stripped from the filename
|
||||
assert 'Content-Disposition: form-data; name="test-file";' \
|
||||
f' filename="{os.path.basename(FILE_PATH)}"' in r
|
||||
assert r.count(FILE_CONTENT) == 2
|
||||
assert 'Content-Type: image/vnd.microsoft.icon' in r
|
||||
|
||||
def test_form_no_files_urlencoded(self, httpbin):
|
||||
r = http(
|
||||
'--form',
|
||||
'--verbose',
|
||||
httpbin.url + '/post',
|
||||
'AAAA=AAA',
|
||||
'BBB=BBB',
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert FORM_CONTENT_TYPE in r
|
||||
|
||||
def test_multipart(self, httpbin):
|
||||
r = http(
|
||||
'--verbose',
|
||||
'--multipart',
|
||||
httpbin.url + '/post',
|
||||
'AAAA=AAA',
|
||||
'BBB=BBB',
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert FORM_CONTENT_TYPE not in r
|
||||
assert 'multipart/form-data' in r
|
||||
|
||||
def test_form_multipart_custom_boundary(self, httpbin):
|
||||
boundary = 'HTTPIE_FTW'
|
||||
r = http(
|
||||
'--print=HB',
|
||||
'--check-status',
|
||||
'--multipart',
|
||||
f'--boundary={boundary}',
|
||||
httpbin.url + '/post',
|
||||
'AAAA=AAA',
|
||||
'BBB=BBB',
|
||||
)
|
||||
assert f'multipart/form-data; boundary={boundary}' in r
|
||||
assert r.count(boundary) == 4
|
||||
|
||||
def test_multipart_custom_content_type_boundary_added(self, httpbin):
|
||||
boundary = 'HTTPIE_FTW'
|
||||
r = http(
|
||||
'--print=HB',
|
||||
'--check-status',
|
||||
'--multipart',
|
||||
f'--boundary={boundary}',
|
||||
httpbin.url + '/post',
|
||||
'Content-Type: multipart/magic',
|
||||
'AAAA=AAA',
|
||||
'BBB=BBB',
|
||||
)
|
||||
assert f'multipart/magic; boundary={boundary}' in r
|
||||
assert r.count(boundary) == 4
|
||||
|
||||
def test_multipart_custom_content_type_boundary_preserved(self, httpbin):
|
||||
# Allow explicit nonsense requests.
|
||||
boundary_in_header = 'HEADER_BOUNDARY'
|
||||
boundary_in_body = 'BODY_BOUNDARY'
|
||||
r = http(
|
||||
'--print=HB',
|
||||
'--check-status',
|
||||
'--multipart',
|
||||
f'--boundary={boundary_in_body}',
|
||||
httpbin.url + '/post',
|
||||
f'Content-Type: multipart/magic; boundary={boundary_in_header}',
|
||||
'AAAA=AAA',
|
||||
'BBB=BBB',
|
||||
)
|
||||
assert f'multipart/magic; boundary={boundary_in_header}' in r
|
||||
assert r.count(boundary_in_body) == 3
|
||||
|
||||
def test_multipart_chunked(self, httpbin):
|
||||
r = http(
|
||||
'--verbose',
|
||||
'--multipart',
|
||||
'--chunked',
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT + '/post',
|
||||
'AAA=AAA',
|
||||
)
|
||||
assert 'Transfer-Encoding: chunked' in r
|
||||
assert 'multipart/form-data' in r
|
||||
assert 'name="AAA"' in r # in request
|
||||
assert '"AAA": "AAA"', r # in response
|
||||
|
||||
def test_multipart_preserve_order(self, httpbin):
|
||||
r = http(
|
||||
'--form',
|
||||
'--offline',
|
||||
httpbin + '/post',
|
||||
'text_field=foo',
|
||||
f'file_field@{FILE_PATH_ARG}',
|
||||
)
|
||||
assert r.index('text_field') < r.index('file_field')
|
||||
|
||||
r = http(
|
||||
'--form',
|
||||
'--offline',
|
||||
httpbin + '/post',
|
||||
f'file_field@{FILE_PATH_ARG}',
|
||||
'text_field=foo',
|
||||
)
|
||||
assert r.index('text_field') > r.index('file_field')
|
||||
|
||||
|
||||
class TestRequestBodyFromFilePath:
|
||||
"""
|
||||
@ -45,12 +205,26 @@ class TestRequestBodyFromFilePath:
|
||||
"""
|
||||
|
||||
def test_request_body_from_file_by_path(self, httpbin):
|
||||
r = http('--verbose',
|
||||
'POST', httpbin.url + '/post', '@' + FILE_PATH_ARG)
|
||||
r = http(
|
||||
'--verbose',
|
||||
'POST', httpbin.url + '/post',
|
||||
'@' + FILE_PATH_ARG,
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert FILE_CONTENT in r, r
|
||||
assert r.count(FILE_CONTENT) == 2
|
||||
assert '"Content-Type": "text/plain"' in r
|
||||
|
||||
def test_request_body_from_file_by_path_chunked(self, httpbin):
|
||||
r = http(
|
||||
'--verbose', '--chunked',
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT + '/post',
|
||||
'@' + FILE_PATH_ARG,
|
||||
)
|
||||
assert HTTP_OK in r
|
||||
assert 'Transfer-Encoding: chunked' in r
|
||||
assert '"Content-Type": "text/plain"' in r
|
||||
assert r.count(FILE_CONTENT) == 2
|
||||
|
||||
def test_request_body_from_file_by_path_with_explicit_content_type(
|
||||
self, httpbin):
|
||||
r = http('--verbose',
|
||||
@ -77,4 +251,5 @@ class TestRequestBodyFromFilePath:
|
||||
env=env,
|
||||
tolerate_error_exit_status=True,
|
||||
)
|
||||
assert r.exit_status == ExitStatus.ERROR
|
||||
assert 'cannot be mixed' in r.stderr
|
||||
|
@ -1,10 +1,10 @@
|
||||
# coding=utf-8
|
||||
"""Utilities for HTTPie test suite."""
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import tempfile
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union
|
||||
|
||||
@ -14,6 +14,12 @@ from httpie.context import Environment
|
||||
from httpie.core import main
|
||||
|
||||
|
||||
# pytest-httpbin currently does not support chunked requests:
|
||||
# <https://github.com/kevin1024/pytest-httpbin/issues/33>
|
||||
# <https://github.com/kevin1024/pytest-httpbin/issues/28>
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT = 'http://httpbin.org'
|
||||
|
||||
|
||||
TESTS_ROOT = Path(__file__).parent
|
||||
CRLF = '\r\n'
|
||||
COLOR = '\x1b['
|
||||
@ -36,6 +42,11 @@ def add_auth(url, auth):
|
||||
return proto + '://' + auth + '@' + rest
|
||||
|
||||
|
||||
class StdinBytesIO(BytesIO):
|
||||
"""To be used for `MockEnvironment.stdin`"""
|
||||
len = 0 # See `prepare_request_body()`
|
||||
|
||||
|
||||
class MockEnvironment(Environment):
|
||||
"""Environment subclass with reasonable defaults for testing."""
|
||||
colors = 0
|
||||
@ -94,10 +105,12 @@ class BaseCLIResponse:
|
||||
|
||||
- stdout output: print(self)
|
||||
- stderr output: print(self.stderr)
|
||||
- devnull output: print(self.devnull)
|
||||
- exit_status output: print(self.exit_status)
|
||||
|
||||
"""
|
||||
stderr: str = None
|
||||
devnull: str = None
|
||||
json: dict = None
|
||||
exit_status: ExitStatus = None
|
||||
|
||||
@ -131,10 +144,9 @@ class StrCLIResponse(str, BaseCLIResponse):
|
||||
elif self.strip().startswith('{'):
|
||||
# Looks like JSON body.
|
||||
self._json = json.loads(self)
|
||||
elif (self.count('Content-Type:') == 1
|
||||
and 'application/json' in self):
|
||||
# Looks like a whole JSON HTTP message,
|
||||
# try to extract its body.
|
||||
elif self.count('Content-Type:') == 1:
|
||||
# Looks like a HTTP message,
|
||||
# try to extract JSON from its body.
|
||||
try:
|
||||
j = self.strip()[self.strip().rindex('\r\n\r\n'):]
|
||||
except ValueError:
|
||||
@ -161,17 +173,21 @@ def http(
|
||||
# noinspection PyUnresolvedReferences
|
||||
"""
|
||||
Run HTTPie and capture stderr/out and exit status.
|
||||
Content writtent to devnull will be captured only if
|
||||
env.devnull is set manually.
|
||||
|
||||
Invoke `httpie.core.main()` with `args` and `kwargs`,
|
||||
and return a `CLIResponse` subclass instance.
|
||||
|
||||
The return value is either a `StrCLIResponse`, or `BytesCLIResponse`
|
||||
if unable to decode the output.
|
||||
if unable to decode the output. Devnull is string when possible,
|
||||
bytes otherwise.
|
||||
|
||||
The response has the following attributes:
|
||||
|
||||
`stdout` is represented by the instance itself (print r)
|
||||
`stderr`: text written to stderr
|
||||
`devnull` text written to devnull.
|
||||
`exit_status`: the exit status
|
||||
`json`: decoded JSON (if possible) or `None`
|
||||
|
||||
@ -204,6 +220,7 @@ def http(
|
||||
|
||||
stdout = env.stdout
|
||||
stderr = env.stderr
|
||||
devnull = env.devnull
|
||||
|
||||
args = list(args)
|
||||
args_with_config_defaults = args + env.config.default_options
|
||||
@ -216,6 +233,7 @@ def http(
|
||||
add_to_args.append('--timeout=3')
|
||||
|
||||
complete_args = [program_name, *add_to_args, *args]
|
||||
# print(' '.join(complete_args))
|
||||
|
||||
def dump_stderr():
|
||||
stderr.seek(0)
|
||||
@ -248,13 +266,22 @@ def http(
|
||||
|
||||
stdout.seek(0)
|
||||
stderr.seek(0)
|
||||
devnull.seek(0)
|
||||
output = stdout.read()
|
||||
devnull_output = devnull.read()
|
||||
try:
|
||||
output = output.decode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
r = BytesCLIResponse(output)
|
||||
else:
|
||||
r = StrCLIResponse(output)
|
||||
|
||||
try:
|
||||
devnull_output = devnull_output.decode('utf8')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
r.devnull = devnull_output
|
||||
r.stderr = stderr.read()
|
||||
r.exit_status = exit_status
|
||||
|
||||
@ -264,6 +291,7 @@ def http(
|
||||
return r
|
||||
|
||||
finally:
|
||||
devnull.close()
|
||||
stdout.close()
|
||||
stderr.close()
|
||||
env.cleanup()
|
||||
|
23
tox.ini
23
tox.ini
@ -1,23 +0,0 @@
|
||||
# Tox (http://tox.testrun.org/) is a tool for running tests
|
||||
# in multiple virtualenvs. See ./CONTRIBUTING.rst
|
||||
|
||||
|
||||
[tox]
|
||||
# pypy3 currently fails because of a Flask issue
|
||||
envlist = py37
|
||||
|
||||
|
||||
[testenv]
|
||||
deps =
|
||||
mock
|
||||
pytest
|
||||
pytest-httpbin>=0.0.6
|
||||
|
||||
|
||||
commands =
|
||||
# NOTE: the order of the directories in posargs seems to matter.
|
||||
# When changed, then many ImportMismatchError exceptions occurrs.
|
||||
py.test \
|
||||
--verbose \
|
||||
--doctest-modules \
|
||||
{posargs:./httpie ./tests}
|
Reference in New Issue
Block a user