mirror of
https://github.com/httpie/cli.git
synced 2025-08-12 03:51:14 +02:00
Compare commits
329 Commits
Author | SHA1 | Date | |
---|---|---|---|
a7e5228712 | |||
5d628756ab | |||
364edc4bd8 | |||
ce5ca6c480 | |||
4b524e6a8c | |||
e4a3ce8b9d | |||
348cc7d5c5 | |||
ab3ea24630 | |||
cd5116705c | |||
38bc578744 | |||
1bc54d4cb4 | |||
fe8b547cc7 | |||
5aa9ed795e | |||
c82d9b629f | |||
e8b22d8b51 | |||
585cc0c039 | |||
615d887513 | |||
89faec994a | |||
490eeaa650 | |||
f1ab816ecd | |||
6e2c31a5a9 | |||
0608b5869f | |||
fcc3aaf873 | |||
dcd6b63e45 | |||
ab2bda3ffe | |||
7390869cd6 | |||
0af486d1b7 | |||
6cb822255d | |||
f202f338a4 | |||
f0058eeaee | |||
a23b636a63 | |||
fc497daf7d | |||
b48ba74ce2 | |||
9bae27354e | |||
d9b3a16fa6 | |||
f031b8cc8b | |||
2dbafe27ed | |||
3affc245c4 | |||
85da430d16 | |||
a42b275ae2 | |||
37fa67cd3c | |||
0df4db7bb4 | |||
374c371ef1 | |||
64c81fc2ec | |||
0252c2642e | |||
b53ace480a | |||
79b0f65fef | |||
ed6156084f | |||
92fe452f92 | |||
0169151aa3 | |||
525449f044 | |||
3c4a5e7304 | |||
d9aadeef51 | |||
2bb54da368 | |||
3fa583e591 | |||
b7767b3c62 | |||
a5d9a839e5 | |||
2ffd8d9d9b | |||
7f80408945 | |||
3ec5c4a643 | |||
3909a436a9 | |||
a77f660ba7 | |||
548857f35a | |||
8741438484 | |||
3176785a5f | |||
c8fd4c2d6e | |||
99f8a8c23d | |||
f866778421 | |||
5a4392076a | |||
bece3c77bb | |||
c946b3d34f | |||
45e8e4e4ea | |||
bd3208cf24 | |||
4dffac7a25 | |||
a34b3d9d87 | |||
30624e66ec | |||
d603502960 | |||
09cd85918e | |||
b947d4826a | |||
e8ef5a783f | |||
82a224a658 | |||
9da5c41704 | |||
224519e0e2 | |||
aba3b1ec01 | |||
466df77b6b | |||
3ea75a3577 | |||
3e24827f4d | |||
1dc67a6a38 | |||
a5713f7190 | |||
0f654388fc | |||
63df735fef | |||
2579827418 | |||
9bd8b4e8f7 | |||
d998013655 | |||
ced9212c1f | |||
07da8ea852 | |||
8e04a24b90 | |||
8512a630f9 | |||
2da2cec83c | |||
a4d8f1f22e | |||
5ec954c03d | |||
2deaccf2d1 | |||
46c4f4e225 | |||
2d16494845 | |||
bb4f101c1e | |||
82081c889b | |||
05fc9c480a | |||
e93de1fbe7 | |||
a969013bdd | |||
65601f09b2 | |||
0f439a5dab | |||
b3d2c1876e | |||
c297af0012 | |||
f27b626a96 | |||
c1d5a4a109 | |||
db3016a602 | |||
4dd9dbd314 | |||
29df4cd4f3 | |||
4d299a5531 | |||
add6601009 | |||
fa96041ec8 | |||
3dccb2e325 | |||
0a0de1755e | |||
747be30d2e | |||
88a9583f4c | |||
c5ca9d248e | |||
fd6e87914c | |||
6dee49357d | |||
df36d6255d | |||
e92b831e6e | |||
fd44f1af93 | |||
b6309547d5 | |||
3a46149de1 | |||
b7c8bf0800 | |||
69d010a11b | |||
42ff243400 | |||
933b438e5f | |||
358342d1c9 | |||
c591a3810d | |||
0eba037037 | |||
3898129e9c | |||
b88e88d2e3 | |||
d1407baf76 | |||
d5032ca859 | |||
f6a19cf552 | |||
74979f3b33 | |||
698eb51e60 | |||
ae8030c930 | |||
2e96d7ffbb | |||
b5625e3d75 | |||
932d3224f4 | |||
b596fedf13 | |||
96444f3345 | |||
89b66f1608 | |||
a7d570916d | |||
ab5a50cee8 | |||
91961c6b51 | |||
256ea7d49d | |||
2cd6ea3050 | |||
37dddf5bf7 | |||
e508c631f2 | |||
55530c8c6d | |||
eb929cbc04 | |||
2490bb25ca | |||
2038fa02e3 | |||
59d51ad513 | |||
61568f1def | |||
f93f4fa7c7 | |||
bf73b5701e | |||
7917f1b40c | |||
a50660cc70 | |||
749b1e2aca | |||
137889a267 | |||
c9c6f0fae5 | |||
6fd1ea0e5a | |||
8f7676a2a9 | |||
87e661c5f1 | |||
8ca333dda0 | |||
0f4dce98c7 | |||
05547224ce | |||
6301fee3d2 | |||
a803e845a5 | |||
11be041e06 | |||
7f5fd130c5 | |||
ec899d70b7 | |||
4d3b4fa0be | |||
27c557e983 | |||
7f24f7d34c | |||
4b61108005 | |||
8b189725fd | |||
1719ebded6 | |||
c5d6a4ad8e | |||
91e1fe2d0f | |||
ca7f41de53 | |||
46e24dd6b5 | |||
803127e8c9 | |||
4c138959ea | |||
91a28973bd | |||
02b28093a8 | |||
d64e7d8a6a | |||
8841b8bf46 | |||
6472ca55e1 | |||
37c3307018 | |||
0aab796960 | |||
95c33e31a2 | |||
9af833da30 | |||
dfe6245cd6 | |||
555761f3cb | |||
643735ef23 | |||
7a45f14542 | |||
e993f83355 | |||
d726a4cd92 | |||
8d3f09497b | |||
31c78c2885 | |||
9776a6dea0 | |||
f1d4861fae | |||
d99e1ff492 | |||
a196d1d451 | |||
02209c2db1 | |||
9886f01f91 | |||
a4f796fe69 | |||
c948f98b05 | |||
b0fde07cfd | |||
f74670fac1 | |||
7321b9fa4e | |||
cf8d5eb3e8 | |||
64af72eb88 | |||
de38f86730 | |||
244ad15c92 | |||
586f45e634 | |||
b1b4743663 | |||
5600b4a2d3 | |||
9261167a1f | |||
519654e21b | |||
4840499a43 | |||
ee6cdf4ab3 | |||
98003f545d | |||
0046ed73c6 | |||
66a6475064 | |||
97804802c0 | |||
c9296a9a45 | |||
64a41c2601 | |||
0af6ae1be4 | |||
d0fc10cf1a | |||
fe1d0b0a1e | |||
f133dbf22c | |||
9d93b07a9d | |||
761cdbf8be | |||
3a3aecca45 | |||
fb3a26586a | |||
cc9083f541 | |||
9ae86f3b4f | |||
3a6fd074a1 | |||
da59381b0b | |||
6de2d6c2cb | |||
b9b033ed0c | |||
64d6363565 | |||
923b7acbe6 | |||
2efc0db8d4 | |||
2bf71af286 | |||
0b84180485 | |||
5a1bd4ba83 | |||
3f7ed35238 | |||
47fd392c74 | |||
54a63a810e | |||
a49774d3ab | |||
b879d38b07 | |||
0913e8b2ef | |||
4fef4b9a75 | |||
bfc23b1412 | |||
6267f21f21 | |||
e9aba543b1 | |||
9b23a4ac9a | |||
b96eba336d | |||
48a6d234cb | |||
c6f2b32e36 | |||
64f6f69037 | |||
6bdfc7a071 | |||
497a91711a | |||
f515ef72d0 | |||
22a2fddc79 | |||
1847eaa299 | |||
e387c1d43e | |||
fc6d89913f | |||
d584686744 | |||
b565be4318 | |||
87e44ae639 | |||
0d08732397 | |||
c53a778f60 | |||
5efc9010cc | |||
08e883fcfe | |||
c4b309164f | |||
8e96238323 | |||
8a9206eceb | |||
8ac3c5961c | |||
487c7a9221 | |||
6d65668355 | |||
3e5115e4a2 | |||
2b8b572f22 | |||
af737fd338 | |||
ee375b6942 | |||
6b06d92a59 | |||
becb63de9a | |||
86c8abc485 | |||
8f6bee9196 | |||
9c2c058ae5 | |||
6238b59e72 | |||
702c21aa91 | |||
aab5cd9da0 | |||
8c0f0b578c | |||
bb4881a873 | |||
3a1726b4ed | |||
e1fa57d228 | |||
bfc64bce21 | |||
595dc51b2d | |||
83fa772247 | |||
49a0fb6e0f | |||
41e822ca2f | |||
1124d68946 | |||
c3735d0422 | |||
364b91cbc4 | |||
c8e06b55e1 | |||
5acbc904b7 | |||
0c7c248dce | |||
caf60cbc65 | |||
2b0e642842 | |||
e25948f6a0 | |||
ec245a1e80 | |||
6259b5dd3b |
@ -1 +0,0 @@
|
|||||||
; needs to exist otherwise `$ coveralls` fails
|
|
@ -1,4 +1,4 @@
|
|||||||
# http://editorconfig.org
|
# https://editorconfig.org
|
||||||
root = true
|
root = true
|
||||||
|
|
||||||
[*]
|
[*]
|
||||||
|
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
otechie: # Replace with a single Otechie username
|
||||||
|
custom: https://paypal.me/roztocil
|
37
.github/workflows/build.yml
vendored
Normal file
37
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
name: Build
|
||||||
|
on: [push]
|
||||||
|
jobs:
|
||||||
|
extras:
|
||||||
|
# Run coverage and extra tests only once
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- uses: actions/setup-python@v1
|
||||||
|
with:
|
||||||
|
python-version: 3.8
|
||||||
|
- run: pip install --upgrade pip
|
||||||
|
- run: make install
|
||||||
|
- run: make pycodestyle
|
||||||
|
- run: make test-cover
|
||||||
|
- run: make codecov-upload
|
||||||
|
env:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_REPO_TOKEN }}
|
||||||
|
- run: make test-dist
|
||||||
|
test:
|
||||||
|
# Run core HTTPie tests everywhere
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macOS-latest, windows-latest]
|
||||||
|
python-version: [3.6, 3.7, 3.8]
|
||||||
|
exclude:
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: 3.8
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- uses: actions/setup-python@v1
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- run: python -m pip install --upgrade pip
|
||||||
|
- run: pip install --upgrade --editable .
|
||||||
|
- run: python setup.py test
|
143
.gitignore
vendored
143
.gitignore
vendored
@ -1,13 +1,142 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
.idea/
|
.idea/
|
||||||
__pycache__/
|
|
||||||
dist/
|
|
||||||
httpie.egg-info/
|
|
||||||
build/
|
|
||||||
*.egg-info
|
*.egg-info
|
||||||
.cache/
|
.cache/
|
||||||
.tox
|
|
||||||
.coverage
|
|
||||||
*.pyc
|
*.pyc
|
||||||
*.egg
|
|
||||||
htmlcov
|
htmlcov
|
||||||
|
|
||||||
|
|
||||||
|
##############################################################################
|
||||||
|
# The bellow is GitHub template for Python project. gitignore.
|
||||||
|
# <https://github.com/github/gitignore/blob/master/Python.gitignore>
|
||||||
|
##############################################################################
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
pip-wheel-metadata/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
79
.travis.yml
79
.travis.yml
@ -1,79 +0,0 @@
|
|||||||
# https://travis-ci.org/jkbrzt/httpie
|
|
||||||
sudo: false
|
|
||||||
|
|
||||||
language: python
|
|
||||||
os:
|
|
||||||
- linux
|
|
||||||
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- NEWEST_PYTHON=3.5
|
|
||||||
|
|
||||||
python:
|
|
||||||
- 2.6
|
|
||||||
- 2.7
|
|
||||||
- pypy
|
|
||||||
- 3.4
|
|
||||||
- 3.5
|
|
||||||
- pypy3
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
|
|
||||||
include:
|
|
||||||
|
|
||||||
# Manually defined OS X builds
|
|
||||||
# https://docs.travis-ci.com/user/multi-os/#Python-example-(unsupported-languages)
|
|
||||||
|
|
||||||
# Stock OSX Python
|
|
||||||
- os: osx
|
|
||||||
language: generic
|
|
||||||
env:
|
|
||||||
- TOXENV=py27
|
|
||||||
|
|
||||||
# Latest Python 2.x from Homebrew
|
|
||||||
- os: osx
|
|
||||||
language: generic
|
|
||||||
env:
|
|
||||||
- TOXENV=py27
|
|
||||||
- BREW_INSTALL=python
|
|
||||||
|
|
||||||
# Latest Python 3.x from Homebrew
|
|
||||||
- os: osx
|
|
||||||
language: generic
|
|
||||||
env:
|
|
||||||
- TOXENV=py35
|
|
||||||
- BREW_INSTALL=python3
|
|
||||||
|
|
||||||
install:
|
|
||||||
- |
|
|
||||||
if [[ $TRAVIS_OS_NAME == 'osx' ]]; then
|
|
||||||
if [[ -n "$BREW_INSTALL" ]]; then
|
|
||||||
brew update
|
|
||||||
brew install "$BREW_INSTALL"
|
|
||||||
fi
|
|
||||||
sudo pip install tox
|
|
||||||
fi
|
|
||||||
|
|
||||||
script:
|
|
||||||
- |
|
|
||||||
if [[ $TRAVIS_OS_NAME == 'linux' ]]; then
|
|
||||||
make
|
|
||||||
else
|
|
||||||
PATH="/usr/local/bin:$PATH" tox -e "$TOXENV"
|
|
||||||
fi
|
|
||||||
|
|
||||||
after_success:
|
|
||||||
- |
|
|
||||||
if [[ $TRAVIS_PYTHON_VERSION == $NEWEST_PYTHON && $TRAVIS_OS_NAME == 'linux' ]]; then
|
|
||||||
pip install python-coveralls && coveralls
|
|
||||||
fi
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
|
|
||||||
webhooks:
|
|
||||||
urls:
|
|
||||||
# https://gitter.im/jkbrzt/httpie
|
|
||||||
- https://webhooks.gitter.im/e/c42fcd359a110d02830b
|
|
||||||
on_success: always # options: [always|never|change] default: always
|
|
||||||
on_failure: always # options: [always|never|change] default: always
|
|
||||||
on_start: always # options: [always|never|change] default: always
|
|
@ -2,13 +2,13 @@
|
|||||||
HTTPie authors
|
HTTPie authors
|
||||||
==============
|
==============
|
||||||
|
|
||||||
* `Jakub Roztocil <https://github.com/jkbrzt>`_
|
* `Jakub Roztocil <https://github.com/jakubroztocil>`_
|
||||||
|
|
||||||
|
|
||||||
Patches and ideas
|
Patches and ideas
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
`Complete list of contributors on GitHib <https://github.com/jkbrzt/httpie/graphs/contributors>`_
|
`Complete list of contributors on GitHub <https://github.com/jakubroztocil/httpie/graphs/contributors>`_
|
||||||
|
|
||||||
* `Cláudia T. Delgado <https://github.com/claudiatd>`_ (logo)
|
* `Cláudia T. Delgado <https://github.com/claudiatd>`_ (logo)
|
||||||
* `Hank Gay <https://github.com/gthank>`_
|
* `Hank Gay <https://github.com/gthank>`_
|
||||||
@ -36,5 +36,7 @@ Patches and ideas
|
|||||||
* `Dennis Brakhane <https://github.com/brakhane>`_
|
* `Dennis Brakhane <https://github.com/brakhane>`_
|
||||||
* `Matt Layman <https://github.com/mblayman>`_
|
* `Matt Layman <https://github.com/mblayman>`_
|
||||||
* `Edward Yang <https://github.com/honorabrutroll>`_
|
* `Edward Yang <https://github.com/honorabrutroll>`_
|
||||||
|
* `Aleksandr Vinokurov <https://github.com/aleksandr-vin>`_
|
||||||
|
* `Jeff Byrnes <https://github.com/jeffbyrnes>`_
|
||||||
|
|
||||||
|
|
||||||
|
182
CHANGELOG.rst
182
CHANGELOG.rst
@ -2,12 +2,124 @@
|
|||||||
Change Log
|
Change Log
|
||||||
==========
|
==========
|
||||||
|
|
||||||
This document records all notable changes to `HTTPie <http://httpie.org>`_.
|
This document records all notable changes to `HTTPie <https://httpie.org>`_.
|
||||||
This project adheres to `Semantic Versioning <http://semver.org/>`_.
|
This project adheres to `Semantic Versioning <https://semver.org/>`_.
|
||||||
|
|
||||||
|
|
||||||
`1.0.0-dev`_ (Unreleased)
|
`2.0.0`_ (2020-01-12)
|
||||||
-------------------------
|
-------------------------
|
||||||
|
* Removed Python 2.7 support (`EOL Jan 2020 <https://www.python.org/doc/sunset-python-2/>`_).
|
||||||
|
* Added ``--offline`` to allow building an HTTP request and printing it but not
|
||||||
|
actually sending it over the network.
|
||||||
|
* Replaced the old collect-all-then-process handling of HTTP communication
|
||||||
|
with one-by-one processing of each HTTP request or response as they become
|
||||||
|
available. This means that you can see headers immediately,
|
||||||
|
see what is being send even when the request fails, etc.
|
||||||
|
* Removed automatic config file creation to avoid concurrency issues.
|
||||||
|
* Removed the default 30-second connection ``--timeout`` limit.
|
||||||
|
* Removed Python’s default limit of 100 response headers.
|
||||||
|
* Added ``--max-headers`` to allow setting the max header limit.
|
||||||
|
* Added ``--compress`` to allow request body compression.
|
||||||
|
* Added ``--ignore-netrc`` to allow bypassing credentials from ``.netrc``.
|
||||||
|
* Added ``https`` alias command with ``https://`` as the default scheme.
|
||||||
|
* Added ``$ALL_PROXY`` documentation.
|
||||||
|
* Added type annotations throughout the codebase.
|
||||||
|
* Added ``tests/`` to the PyPi package for the convenience of
|
||||||
|
downstream package maintainers.
|
||||||
|
* Fixed an error when ``stdin`` was a closed fd.
|
||||||
|
* Improved ``--debug`` output formatting.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.3`_ (2019-08-26)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Fixed CVE-2019-10751 — the way the output filename is generated for
|
||||||
|
``--download`` requests without ``--output`` resulting in a redirect has
|
||||||
|
been changed to only consider the initial URL as the base for the generated
|
||||||
|
filename, and not the final one. This fixes a potential security issue under
|
||||||
|
the following scenario:
|
||||||
|
|
||||||
|
1. A ``--download`` request with no explicit ``--output`` is made (e.g.,
|
||||||
|
``$ http -d example.org/file.txt``), instructing httpie to
|
||||||
|
`generate the output filename <https://httpie.org/doc#downloaded-filename>`_
|
||||||
|
from the ``Content-Disposition`` response header, or from the URL if the header
|
||||||
|
is not provided.
|
||||||
|
2. The server handling the request has been modified by an attacker and
|
||||||
|
instead of the expected response the URL returns a redirect to another
|
||||||
|
URL, e.g., ``attacker.example.org/.bash_profile``, whose response does
|
||||||
|
not provide a ``Content-Disposition`` header (i.e., the base for the
|
||||||
|
generated filename becomes ``.bash_profile`` instead of ``file.txt``).
|
||||||
|
3. Your current directory doesn’t already contain ``.bash_profile``
|
||||||
|
(i.e., no unique suffix is added to the generated filename).
|
||||||
|
4. You don’t notice the potentially unexpected output filename
|
||||||
|
as reported by httpie in the console output
|
||||||
|
(e.g., ``Downloading 100.00 B to ".bash_profile"``).
|
||||||
|
|
||||||
|
Reported by Raul Onitza and Giulio Comi.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.2`_ (2018-11-14)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
* Fixed tests for installation with pyOpenSSL.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.1`_ (2018-11-14)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
* Removed external URL calls from tests.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.0`_ (2018-11-02)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
* Added ``--style=auto`` which follows the terminal ANSI color styles.
|
||||||
|
* Added support for selecting TLS 1.3 via ``--ssl=tls1.3``
|
||||||
|
(available once implemented in upstream libraries).
|
||||||
|
* Added ``true``/``false`` as valid values for ``--verify``
|
||||||
|
(in addition to ``yes``/``no``) and the boolean value is case-insensitive.
|
||||||
|
* Changed the default ``--style`` from ``solarized`` to ``auto`` (on Windows it stays ``fruity``).
|
||||||
|
* Fixed default headers being incorrectly case-sensitive.
|
||||||
|
* Removed Python 2.6 support.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.9`_ (2016-12-08)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Fixed README.
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.8`_ (2016-12-08)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Extended auth plugin API.
|
||||||
|
* Added exit status code ``7`` for plugin errors.
|
||||||
|
* Added support for ``curses``-less Python installations.
|
||||||
|
* Fixed ``REQUEST_ITEM`` arg incorrectly being reported as required.
|
||||||
|
* Improved ``CTRL-C`` interrupt handling.
|
||||||
|
* Added the standard exit status code ``130`` for keyboard interrupts.
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.6`_ (2016-08-13)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added Python 3 as a dependency for Homebrew installations
|
||||||
|
to ensure some of the newer HTTP features work out of the box
|
||||||
|
for macOS users (starting with HTTPie 0.9.4.).
|
||||||
|
* Added the ability to unset a request header with ``Header:``, and send an
|
||||||
|
empty value with ``Header;``.
|
||||||
|
* Added ``--default-scheme <URL_SCHEME>`` to enable things like
|
||||||
|
``$ alias https='http --default-scheme=https``.
|
||||||
|
* Added ``-I`` as a shortcut for ``--ignore-stdin``.
|
||||||
|
* Added fish shell completion (located in ``extras/httpie-completion.fish``
|
||||||
|
in the Github repo).
|
||||||
|
* Updated ``requests`` to 2.10.0 so that SOCKS support can be added via
|
||||||
|
``pip install requests[socks]``.
|
||||||
|
* Changed the default JSON ``Accept`` header from ``application/json``
|
||||||
|
to ``application/json, */*``.
|
||||||
|
* Changed the pre-processing of request HTTP headers so that any leading
|
||||||
|
and trailing whitespace is removed.
|
||||||
|
|
||||||
|
|
||||||
`0.9.4`_ (2016-07-01)
|
`0.9.4`_ (2016-07-01)
|
||||||
@ -61,8 +173,8 @@ This project adheres to `Semantic Versioning <http://semver.org/>`_.
|
|||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
* Added support for Requests transport adapter plugins
|
* Added support for Requests transport adapter plugins
|
||||||
(see `httpie-unixsocket <https://github.com/msabramo/httpie-unixsocket>`_
|
(see `httpie-unixsocket <https://github.com/httpie/httpie-unixsocket>`_
|
||||||
and `httpie-http2 <https://github.com/jkbrzt/httpie-http2>`_)
|
and `httpie-http2 <https://github.com/httpie/httpie-http2>`_)
|
||||||
|
|
||||||
|
|
||||||
`0.9.0`_ (2015-01-31)
|
`0.9.0`_ (2015-01-31)
|
||||||
@ -260,33 +372,41 @@ This project adheres to `Semantic Versioning <http://semver.org/>`_.
|
|||||||
* Many improvements and bug fixes
|
* Many improvements and bug fixes
|
||||||
|
|
||||||
|
|
||||||
`0.1`_ (2012-02-25)
|
`0.1.0`_ (2012-02-25)
|
||||||
-------------------
|
---------------------
|
||||||
|
|
||||||
* Initial public release
|
* Initial public release
|
||||||
|
|
||||||
|
|
||||||
.. _`0.1`: https://github.com/jkbrzt/httpie/commit/b966efa
|
.. _`0.1.0`: https://github.com/jakubroztocil/httpie/commit/b966efa
|
||||||
.. _0.1.4: https://github.com/jkbrzt/httpie/compare/b966efa...0.1.4
|
.. _0.1.4: https://github.com/jakubroztocil/httpie/compare/b966efa...0.1.4
|
||||||
.. _0.1.5: https://github.com/jkbrzt/httpie/compare/0.1.4...0.1.5
|
.. _0.1.5: https://github.com/jakubroztocil/httpie/compare/0.1.4...0.1.5
|
||||||
.. _0.1.6: https://github.com/jkbrzt/httpie/compare/0.1.5...0.1.6
|
.. _0.1.6: https://github.com/jakubroztocil/httpie/compare/0.1.5...0.1.6
|
||||||
.. _0.2.0: https://github.com/jkbrzt/httpie/compare/0.1.6...0.2.0
|
.. _0.2.0: https://github.com/jakubroztocil/httpie/compare/0.1.6...0.2.0
|
||||||
.. _0.2.1: https://github.com/jkbrzt/httpie/compare/0.2.0...0.2.1
|
.. _0.2.1: https://github.com/jakubroztocil/httpie/compare/0.2.0...0.2.1
|
||||||
.. _0.2.2: https://github.com/jkbrzt/httpie/compare/0.2.1...0.2.2
|
.. _0.2.2: https://github.com/jakubroztocil/httpie/compare/0.2.1...0.2.2
|
||||||
.. _0.2.5: https://github.com/jkbrzt/httpie/compare/0.2.2...0.2.5
|
.. _0.2.5: https://github.com/jakubroztocil/httpie/compare/0.2.2...0.2.5
|
||||||
.. _0.2.6: https://github.com/jkbrzt/httpie/compare/0.2.5...0.2.6
|
.. _0.2.6: https://github.com/jakubroztocil/httpie/compare/0.2.5...0.2.6
|
||||||
.. _0.2.7: https://github.com/jkbrzt/httpie/compare/0.2.5...0.2.7
|
.. _0.2.7: https://github.com/jakubroztocil/httpie/compare/0.2.5...0.2.7
|
||||||
.. _0.3.0: https://github.com/jkbrzt/httpie/compare/0.2.7...0.3.0
|
.. _0.3.0: https://github.com/jakubroztocil/httpie/compare/0.2.7...0.3.0
|
||||||
.. _0.4.0: https://github.com/jkbrzt/httpie/compare/0.3.0...0.4.0
|
.. _0.4.0: https://github.com/jakubroztocil/httpie/compare/0.3.0...0.4.0
|
||||||
.. _0.4.1: https://github.com/jkbrzt/httpie/compare/0.4.0...0.4.1
|
.. _0.4.1: https://github.com/jakubroztocil/httpie/compare/0.4.0...0.4.1
|
||||||
.. _0.5.0: https://github.com/jkbrzt/httpie/compare/0.4.1...0.5.0
|
.. _0.5.0: https://github.com/jakubroztocil/httpie/compare/0.4.1...0.5.0
|
||||||
.. _0.5.1: https://github.com/jkbrzt/httpie/compare/0.5.0...0.5.1
|
.. _0.5.1: https://github.com/jakubroztocil/httpie/compare/0.5.0...0.5.1
|
||||||
.. _0.6.0: https://github.com/jkbrzt/httpie/compare/0.5.1...0.6.0
|
.. _0.6.0: https://github.com/jakubroztocil/httpie/compare/0.5.1...0.6.0
|
||||||
.. _0.7.1: https://github.com/jkbrzt/httpie/compare/0.6.0...0.7.1
|
.. _0.7.1: https://github.com/jakubroztocil/httpie/compare/0.6.0...0.7.1
|
||||||
.. _0.8.0: https://github.com/jkbrzt/httpie/compare/0.7.1...0.8.0
|
.. _0.8.0: https://github.com/jakubroztocil/httpie/compare/0.7.1...0.8.0
|
||||||
.. _0.9.0: https://github.com/jkbrzt/httpie/compare/0.8.0...0.9.0
|
.. _0.9.0: https://github.com/jakubroztocil/httpie/compare/0.8.0...0.9.0
|
||||||
.. _0.9.1: https://github.com/jkbrzt/httpie/compare/0.9.0...0.9.1
|
.. _0.9.1: https://github.com/jakubroztocil/httpie/compare/0.9.0...0.9.1
|
||||||
.. _0.9.2: https://github.com/jkbrzt/httpie/compare/0.9.1...0.9.2
|
.. _0.9.2: https://github.com/jakubroztocil/httpie/compare/0.9.1...0.9.2
|
||||||
.. _0.9.3: https://github.com/jkbrzt/httpie/compare/0.9.2...0.9.3
|
.. _0.9.3: https://github.com/jakubroztocil/httpie/compare/0.9.2...0.9.3
|
||||||
.. _0.9.4: https://github.com/jkbrzt/httpie/compare/0.9.3...0.9.4
|
.. _0.9.4: https://github.com/jakubroztocil/httpie/compare/0.9.3...0.9.4
|
||||||
.. _1.0.0-dev: https://github.com/jkbrzt/httpie/compare/0.9.4...master
|
.. _0.9.6: https://github.com/jakubroztocil/httpie/compare/0.9.4...0.9.6
|
||||||
|
.. _0.9.8: https://github.com/jakubroztocil/httpie/compare/0.9.6...0.9.8
|
||||||
|
.. _0.9.9: https://github.com/jakubroztocil/httpie/compare/0.9.8...0.9.9
|
||||||
|
.. _1.0.0: https://github.com/jakubroztocil/httpie/compare/0.9.9...1.0.0
|
||||||
|
.. _1.0.1: https://github.com/jakubroztocil/httpie/compare/1.0.0...1.0.1
|
||||||
|
.. _1.0.2: https://github.com/jakubroztocil/httpie/compare/1.0.1...1.0.2
|
||||||
|
.. _1.0.3: https://github.com/jakubroztocil/httpie/compare/1.0.2...1.0.3
|
||||||
|
.. _2.0.0: https://github.com/jakubroztocil/httpie/compare/1.0.3...2.0.0
|
||||||
|
.. _2.1.0-dev: https://github.com/jakubroztocil/httpie/compare/2.0.0...master
|
||||||
|
76
CODE_OF_CONDUCT.md
Normal file
76
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as
|
||||||
|
contributors and maintainers pledge to making participation in our project and
|
||||||
|
our community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||||
|
level of experience, education, socio-economic status, nationality, personal
|
||||||
|
appearance, race, religion, or sexual identity and orientation.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment
|
||||||
|
include:
|
||||||
|
|
||||||
|
* Using welcoming and inclusive language
|
||||||
|
* Being respectful of differing viewpoints and experiences
|
||||||
|
* Gracefully accepting constructive criticism
|
||||||
|
* Focusing on what is best for the community
|
||||||
|
* Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||||
|
advances
|
||||||
|
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or electronic
|
||||||
|
address, without explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable
|
||||||
|
behavior and are expected to take appropriate and fair corrective action in
|
||||||
|
response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community. Examples of
|
||||||
|
representing a project or community include using an official project e-mail
|
||||||
|
address, posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event. Representation of a project may be
|
||||||
|
further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting the project team at jakub@roztocil.co. All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. The project team is
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||||
|
Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||||
|
faith may face temporary or permanent repercussions as determined by other
|
||||||
|
members of the project's leadership.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||||
|
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see
|
||||||
|
https://www.contributor-covenant.org/faq
|
127
CONTRIBUTING.rst
127
CONTRIBUTING.rst
@ -25,67 +25,131 @@ to your bug report, e.g.:
|
|||||||
|
|
||||||
Before working on a new feature or a bug, please browse `existing issues`_
|
Before working on a new feature or a bug, please browse `existing issues`_
|
||||||
to see whether it has been previously discussed. If the change in question
|
to see whether it has been previously discussed. If the change in question
|
||||||
is a bigger one, it's always good to discuss before your starting working on
|
is a bigger one, it's always good to discuss before you start working on
|
||||||
it.
|
it.
|
||||||
|
|
||||||
|
|
||||||
Creating Development Environment
|
Development Environment
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
Go to https://github.com/jkbrzt/httpie and fork the project repository.
|
|
||||||
|
Getting the code
|
||||||
|
****************
|
||||||
|
|
||||||
|
Go to https://github.com/jakubroztocil/httpie and fork the project repository.
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
git clone https://github.com/<YOU>/httpie
|
# Clone your fork
|
||||||
|
git clone git@github.com:<YOU>/httpie.git
|
||||||
|
|
||||||
|
# Enter the project directory
|
||||||
cd httpie
|
cd httpie
|
||||||
|
|
||||||
|
# Create a branch for your changes
|
||||||
git checkout -b my_topical_branch
|
git checkout -b my_topical_branch
|
||||||
|
|
||||||
# (Recommended: create a new virtualenv)
|
|
||||||
|
|
||||||
# Install dev. requirements and also HTTPie (in editable mode
|
Setup
|
||||||
# so that the `http' command will point to your working copy):
|
*****
|
||||||
|
|
||||||
|
The `Makefile`_ contains a bunch of tasks to get you started. Just run
|
||||||
|
the following command, which:
|
||||||
|
|
||||||
|
|
||||||
|
* Creates an isolated Python virtual environment inside ``./venv``
|
||||||
|
(via the standard library `venv`_ tool);
|
||||||
|
* installs all dependencies and also installs HTTPie
|
||||||
|
(in editable mode so that the ``http`` command will point to your
|
||||||
|
working copy).
|
||||||
|
* and runs tests (It is the same as running ``make install test``).
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
make
|
make
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Python virtual environment
|
||||||
|
**************************
|
||||||
|
|
||||||
|
Activate the Python virtual environment—created via the ``make install``
|
||||||
|
task during `setup`_—for your active shell session using the following command:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
source venv/bin/activate
|
||||||
|
|
||||||
|
(If you use ``virtualenvwrapper``, you can also use ``workon httpie`` to
|
||||||
|
activate the environment — we have created a symlink for you. It’s a bit of
|
||||||
|
a hack but it works™.)
|
||||||
|
|
||||||
|
You should now see ``(httpie)`` next to your shell prompt, and
|
||||||
|
the ``http`` should point to you development copy:
|
||||||
|
|
||||||
|
.. code-block::
|
||||||
|
|
||||||
|
(httpie) ~/Code/httpie $ which http
|
||||||
|
/Users/jakub/Code/httpie/venv/bin/http
|
||||||
|
(httpie) ~/Code/httpie $ http --version
|
||||||
|
2.0.0-dev
|
||||||
|
|
||||||
|
(Btw, you don’t need to activate the virtual environment if you just want
|
||||||
|
run some of the ``make`` tasks. You can also invoke the development
|
||||||
|
version of HTTPie directly with ``./venv/bin/http`` without having to activate
|
||||||
|
the environment first. The same goes for ``./venv/bin/py.test``, etc.).
|
||||||
|
|
||||||
|
|
||||||
Making Changes
|
Making Changes
|
||||||
--------------
|
--------------
|
||||||
|
|
||||||
Please make sure your changes conform to `Style Guide for Python Code`_ (PEP8).
|
Please make sure your changes conform to `Style Guide for Python Code`_ (PEP8)
|
||||||
|
and that ``make pycodestyle`` passes.
|
||||||
|
|
||||||
|
|
||||||
Testing
|
Testing & CI
|
||||||
-------
|
------------
|
||||||
|
|
||||||
Before opening a pull requests, please make sure the `test suite`_ passes
|
Please add tests for any new features and bug fixes.
|
||||||
in all of the `supported Python environments`_. You should also add tests
|
|
||||||
for any new features and bug fixes.
|
|
||||||
|
|
||||||
HTTPie uses `pytest`_ and `Tox`_ for testing.
|
When you open a pull request,
|
||||||
|
`Github Actions <https://github.com/jakubroztocil/httpie/actions>`_
|
||||||
|
will automatically run HTTPie’s `test suite`_ against your code
|
||||||
|
so please make sure all checks pass.
|
||||||
|
|
||||||
|
|
||||||
Running all tests:
|
Running tests locally
|
||||||
******************
|
*********************
|
||||||
|
|
||||||
|
HTTPie uses the `pytest`_ runner. It also uses `Tox`_ which allows you to run
|
||||||
|
tests on multiple Python versions even when testing locally.
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
# Run all tests on the current Python interpreter
|
# Run tests on the current Python interpreter with coverage.
|
||||||
make test
|
make test
|
||||||
|
|
||||||
# Run all tests on the current Python with coverage
|
# Run tests with coverage
|
||||||
make test-cover
|
make test-cover
|
||||||
|
|
||||||
# Run all tests in all of the supported and available Pythons via Tox
|
# Run all tests in all of the supported and available Pythons via Tox
|
||||||
make test-tox
|
make test-tox
|
||||||
|
|
||||||
# Run all tests for code as well as packaging, etc.
|
# Test PEP8 compliance
|
||||||
|
make pycodestyle
|
||||||
|
|
||||||
|
# Run extended tests — for code as well as .rst files syntax, packaging, etc.
|
||||||
make test-all
|
make test-all
|
||||||
|
|
||||||
|
|
||||||
Running specific tests:
|
Running specific tests
|
||||||
***********************
|
**********************
|
||||||
|
|
||||||
|
After you have activated your virtual environment (see `setup`_), you
|
||||||
|
can run specific tests from the terminal:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
@ -95,22 +159,25 @@ Running specific tests:
|
|||||||
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok
|
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok
|
||||||
|
|
||||||
# Run specific tests on the on all Pythons via Tox
|
# Run specific tests on the on all Pythons via Tox
|
||||||
|
# (change to `tox -e py37' to limit Python version)
|
||||||
tox -- tests/test_uploads.py --verbose
|
tox -- tests/test_uploads.py --verbose
|
||||||
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload --verbose
|
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload --verbose
|
||||||
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok --verbose
|
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok --verbose
|
||||||
|
|
||||||
|
|
||||||
-----
|
-----
|
||||||
|
|
||||||
See `Makefile`_ for additional development utilities.
|
See `Makefile`_ for additional development utilities.
|
||||||
Don't forget to add yourself to `AUTHORS`_!
|
|
||||||
|
|
||||||
|
Finally, don't forget to add yourself to `AUTHORS`_!
|
||||||
|
|
||||||
|
|
||||||
.. _Tox: http://tox.testrun.org
|
.. _Tox: http://tox.testrun.org
|
||||||
.. _supported Python environments: https://github.com/jkbrzt/httpie/blob/master/tox.ini
|
.. _supported Python environments: https://github.com/jakubroztocil/httpie/blob/master/tox.ini
|
||||||
.. _existing issues: https://github.com/jkbrzt/httpie/issues?state=open
|
.. _existing issues: https://github.com/jakubroztocil/httpie/issues?state=open
|
||||||
.. _AUTHORS: https://github.com/jkbrzt/httpie/blob/master/AUTHORS.rst
|
.. _AUTHORS: https://github.com/jakubroztocil/httpie/blob/master/AUTHORS.rst
|
||||||
.. _Makefile: https://github.com/jkbrzt/httpie/blob/master/Makefile
|
.. _Makefile: https://github.com/jakubroztocil/httpie/blob/master/Makefile
|
||||||
.. _pytest: http://pytest.org/
|
.. _venv: https://docs.python.org/3/library/venv.html
|
||||||
.. _Style Guide for Python Code: http://python.org/dev/peps/pep-0008/
|
.. _pytest: https://pytest.org/
|
||||||
.. _test suite: https://github.com/jkbrzt/httpie/tree/master/tests
|
.. _Style Guide for Python Code: https://python.org/dev/peps/pep-0008/
|
||||||
|
.. _test suite: https://github.com/jakubroztocil/httpie/tree/master/tests
|
||||||
|
10
LICENSE
10
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Copyright © 2012-2016 Jakub Roztocil <jakub@roztocil.co>
|
Copyright © 2012-2019 Jakub Roztocil <jakub@roztocil.co>
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
modification, are permitted provided that the following conditions are met:
|
modification, are permitted provided that the following conditions are met:
|
||||||
@ -10,14 +10,14 @@ modification, are permitted provided that the following conditions are met:
|
|||||||
notice, this list of conditions and the following disclaimer in the
|
notice, this list of conditions and the following disclaimer in the
|
||||||
documentation and/or other materials provided with the distribution.
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
3. Neither the name of The author nor the names of its contributors may
|
3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
be used to endorse or promote products derived from this software
|
may be used to endorse or promote products derived from this software
|
||||||
without specific prior written permission.
|
without specific prior written permission.
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR AND CONTRIBUTORS BE LIABLE FOR
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
@ -2,3 +2,6 @@ include LICENSE
|
|||||||
include README.rst
|
include README.rst
|
||||||
include CHANGELOG.rst
|
include CHANGELOG.rst
|
||||||
include AUTHORS.rst
|
include AUTHORS.rst
|
||||||
|
|
||||||
|
# <https://github.com/jakubroztocil/httpie/issues/182>
|
||||||
|
recursive-include tests/ *
|
||||||
|
198
Makefile
198
Makefile
@ -1,35 +1,92 @@
|
|||||||
#
|
###############################################################################
|
||||||
# See ./CONTRIBUTING.rst
|
# See ./CONTRIBUTING.rst
|
||||||
#
|
###############################################################################
|
||||||
|
|
||||||
|
ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
|
||||||
VERSION=$(shell grep __version__ httpie/__init__.py)
|
VERSION=$(shell grep __version__ httpie/__init__.py)
|
||||||
REQUIREMENTS="requirements-dev.txt"
|
REQUIREMENTS=requirements-dev.txt
|
||||||
TAG="\n\n\033[0;32m\#\#\# "
|
H1="\n\n\033[0;32m\#\#\# "
|
||||||
END=" \#\#\# \033[0m\n"
|
H1END=" \#\#\# \033[0m\n"
|
||||||
|
|
||||||
|
|
||||||
all: test
|
# Only used to create our venv.
|
||||||
|
SYSTEM_PYTHON=python3
|
||||||
|
|
||||||
|
VENV_ROOT=venv
|
||||||
|
VENV_BIN=$(VENV_ROOT)/bin
|
||||||
|
VENV_PIP=$(VENV_BIN)/pip3
|
||||||
|
VENV_PYTHON=$(VENV_BIN)/python
|
||||||
|
|
||||||
|
|
||||||
init: uninstall-httpie
|
export PATH := $(VENV_BIN):$(PATH)
|
||||||
@echo $(TAG)Installing dev requirements$(END)
|
|
||||||
pip install --upgrade -r $(REQUIREMENTS)
|
|
||||||
|
|
||||||
@echo $(TAG)Installing HTTPie$(END)
|
|
||||||
pip install --upgrade --editable .
|
all: uninstall-httpie install test
|
||||||
|
|
||||||
|
|
||||||
|
install: venv
|
||||||
|
@echo $(H1)Installing dev requirements$(H1END)
|
||||||
|
$(VENV_PIP) install --upgrade -r $(REQUIREMENTS)
|
||||||
|
|
||||||
|
@echo $(H1)Installing HTTPie$(H1END)
|
||||||
|
$(VENV_PIP) install --upgrade --editable .
|
||||||
|
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
clean:
|
||||||
test: init
|
@echo $(H1)Cleaning up$(H1END)
|
||||||
@echo $(TAG)Running tests on the current Python interpreter with coverage $(END)
|
rm -rf $(VENV_ROOT)
|
||||||
py.test --cov ./httpie --cov ./tests --doctest-modules --verbose ./httpie ./tests
|
# Remove symlink for virtualenvwrapper, if we’ve created one.
|
||||||
|
[ -n "$(WORKON_HOME)" -a -L "$(WORKON_HOME)/httpie" -a -f "$(WORKON_HOME)/httpie" ] && rm $(WORKON_HOME)/httpie || true
|
||||||
|
rm -rf .tox *.egg dist build .coverage .cache .pytest_cache httpie.egg-info
|
||||||
|
find . -name '__pycache__' -delete -o -name '*.pyc' -delete
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
test-tox: init
|
venv:
|
||||||
@echo $(TAG)Running tests on all Pythons via Tox$(END)
|
@echo $(H1)Creating a Python environment $(VENV_ROOT) $(H1END)
|
||||||
tox
|
|
||||||
|
$(SYSTEM_PYTHON) -m venv --prompt httpie $(VENV_ROOT)
|
||||||
|
|
||||||
|
@echo
|
||||||
|
@echo done.
|
||||||
|
@echo
|
||||||
|
@echo To active it manually, run:
|
||||||
|
@echo
|
||||||
|
@echo " source $(VENV_BIN)/activate"
|
||||||
|
@echo
|
||||||
|
@echo '(learn more: https://docs.python.org/3/library/venv.html)'
|
||||||
|
@echo
|
||||||
|
@if [ -n "$(WORKON_HOME)" ]; then \
|
||||||
|
echo $(ROOT_DIR) > $(VENV_ROOT)/.project; \
|
||||||
|
if [ ! -d $(WORKON_HOME)/httpie -a ! -L $(WORKON_HOME)/httpie ]; then \
|
||||||
|
ln -s $(ROOT_DIR)/$(VENV_ROOT) $(WORKON_HOME)/httpie ; \
|
||||||
|
echo ''; \
|
||||||
|
echo 'Since you use virtualenvwrapper, we created a symlink'; \
|
||||||
|
echo 'so you can also use "workon httpie" to activate the venv.'; \
|
||||||
|
echo ''; \
|
||||||
|
fi; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Testing
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
test:
|
||||||
|
@echo $(H1)Running tests$(HEADER_EXTRA)$(H1END)
|
||||||
|
$(VENV_BIN)/py.test $(COV) ./httpie $(COV) ./tests --doctest-modules --verbose ./httpie ./tests
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
test-cover: COV=--cov
|
||||||
|
test-cover: HEADER_EXTRA=' (with coverage)'
|
||||||
|
test-cover: test
|
||||||
|
|
||||||
|
|
||||||
|
# test-all is meant to test everything — even this Makefile
|
||||||
|
test-all: clean install test test-tox test-dist pycodestyle
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
@ -37,58 +94,101 @@ test-dist: test-sdist test-bdist-wheel
|
|||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
test-sdist: clean uninstall-httpie
|
test-tox: uninstall-httpie install
|
||||||
@echo $(TAG)Testing sdist build an installation$(END)
|
@echo $(H1)Running tests on all Pythons via Tox$(H1END)
|
||||||
python setup.py sdist
|
$(VENV_BIN)/tox
|
||||||
pip install --force-reinstall --upgrade dist/*.gz
|
|
||||||
which http
|
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
test-bdist-wheel: clean uninstall-httpie
|
test-sdist: clean venv
|
||||||
@echo $(TAG)Testing wheel build an installation$(END)
|
@echo $(H1)Testing sdist build an installation$(H1END)
|
||||||
python setup.py bdist_wheel
|
$(VENV_PYTHON) setup.py sdist
|
||||||
pip install --force-reinstall --upgrade dist/*.whl
|
$(VENV_PIP) install --force-reinstall --upgrade dist/*.gz
|
||||||
which http
|
$(VENV_BIN)/http --version
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
# This tests everything, even this Makefile.
|
test-bdist-wheel: clean venv
|
||||||
test-all: uninstall-all clean init test test-tox test-dist
|
@echo $(H1)Testing wheel build an installation$(H1END)
|
||||||
|
$(VENV_PIP) install wheel
|
||||||
|
$(VENV_PYTHON) setup.py bdist_wheel
|
||||||
|
$(VENV_PIP) install --force-reinstall --upgrade dist/*.whl
|
||||||
|
$(VENV_BIN)/http --version
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
publish: test-all
|
pycodestyle:
|
||||||
@echo $(TAG)Testing wheel build an installation$(END)
|
@echo $(H1)Running pycodestyle$(H1END)
|
||||||
|
@[ -f $(VENV_BIN)/pycodestyle ] || $(VENV_PIP) install pycodestyle
|
||||||
|
$(VENV_BIN)/pycodestyle httpie/ tests/ extras/ *.py
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
codecov-upload:
|
||||||
|
@echo $(H1)Running codecov$(H1END)
|
||||||
|
@[ -f $(VENV_BIN)/codecov ] || $(VENV_PIP) install codecov
|
||||||
|
# $(VENV_BIN)/codecov --required
|
||||||
|
$(VENV_BIN)/codecov
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Publishing to PyPi
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
publish: test-all publish-no-test
|
||||||
|
|
||||||
|
|
||||||
|
publish-no-test:
|
||||||
|
@echo $(H1)Testing wheel build an installation$(H1END)
|
||||||
@echo "$(VERSION)"
|
@echo "$(VERSION)"
|
||||||
@echo "$(VERSION)" | grep -q "dev" && echo "!!!Not publishing dev version!!!" && exit 1
|
@echo "$(VERSION)" | grep -q "dev" && echo '!!!Not publishing dev version!!!' && exit 1 || echo ok
|
||||||
python setup.py register
|
$(VENV_PYTHON) setup.py sdist bdist_wheel
|
||||||
python setup.py sdist upload
|
$(VENV_BIN)/twine upload dist/*
|
||||||
python setup.py bdist_wheel upload
|
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
clean:
|
|
||||||
@echo $(TAG)Cleaning up$(END)
|
|
||||||
rm -rf .tox *.egg dist build .coverage
|
|
||||||
find . -name '__pycache__' -delete -print -o -name '*.pyc' -delete -print
|
|
||||||
@echo
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Uninstalling
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
uninstall-httpie:
|
uninstall-httpie:
|
||||||
@echo $(TAG)Uninstalling httpie$(END)
|
@echo $(H1)Uninstalling httpie$(H1END)
|
||||||
- pip uninstall --yes httpie &2>/dev/null
|
- $(VENV_PIP) uninstall --yes httpie &2>/dev/null
|
||||||
|
|
||||||
@echo "Verifying…"
|
@echo "Verifying…"
|
||||||
cd .. && ! python -m httpie --version &2>/dev/null
|
cd .. && ! $(VENV_PYTHON) -m httpie --version &2>/dev/null
|
||||||
|
|
||||||
@echo "Done"
|
@echo "Done"
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
uninstall-all: uninstall-httpie
|
###############################################################################
|
||||||
|
# Docs
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
@echo $(TAG)Uninstalling httpie requirements$(END)
|
pdf:
|
||||||
- pip uninstall --yes pygments requests
|
# NOTE: rst2pdf needs to be installed manually and against a Python 2
|
||||||
|
@echo "Converting README.rst to PDF…"
|
||||||
|
rst2pdf \
|
||||||
|
--strip-elements-with-class=no-pdf \
|
||||||
|
README.rst \
|
||||||
|
-o README.pdf
|
||||||
|
@echo "Done"
|
||||||
|
@echo
|
||||||
|
|
||||||
@echo $(TAG)Uninstalling development requirements$(END)
|
|
||||||
- pip uninstall --yes -r $(REQUIREMENTS)
|
###############################################################################
|
||||||
|
# Homebrew
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
brew-deps:
|
||||||
|
extras/brew-deps.py
|
||||||
|
|
||||||
|
brew-test:
|
||||||
|
- brew uninstall httpie
|
||||||
|
brew install --build-from-source ./extras/httpie.rb
|
||||||
|
brew test httpie
|
||||||
|
brew audit --strict httpie
|
||||||
|
1017
README.rst
1017
README.rst
File diff suppressed because it is too large
Load Diff
23
appveyor.yml
23
appveyor.yml
@ -1,23 +0,0 @@
|
|||||||
# https://ci.appveyor.com/project/jkbrzt/httpie
|
|
||||||
build: false
|
|
||||||
|
|
||||||
environment:
|
|
||||||
matrix:
|
|
||||||
- PYTHON: "C:/Python27"
|
|
||||||
# Python 3.4 has outdated pip
|
|
||||||
# - PYTHON: "C:/Python34"
|
|
||||||
- PYTHON: "C:/Python35"
|
|
||||||
|
|
||||||
init:
|
|
||||||
- "ECHO %PYTHON%"
|
|
||||||
- ps: "ls C:/Python*"
|
|
||||||
|
|
||||||
install:
|
|
||||||
# FIXME: updating pip fails with PermissionError
|
|
||||||
# - "%PYTHON%/Scripts/pip.exe install -U pip setuptools"
|
|
||||||
- "%PYTHON%/Scripts/pip.exe install -e ."
|
|
||||||
|
|
||||||
test_script:
|
|
||||||
- "%PYTHON%/Scripts/pip.exe --version"
|
|
||||||
- "%PYTHON%/Scripts/http.exe --debug"
|
|
||||||
- "%PYTHON%/python.exe setup.py test"
|
|
63
extras/brew-deps.py
Executable file
63
extras/brew-deps.py
Executable file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Generate Ruby code with URLs and file hashes for packages from PyPi
|
||||||
|
(i.e., httpie itself as well as its dependencies) to be included
|
||||||
|
in the Homebrew formula after a new release of HTTPie has been published
|
||||||
|
on PyPi.
|
||||||
|
|
||||||
|
<https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb>
|
||||||
|
|
||||||
|
"""
|
||||||
|
import hashlib
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
PACKAGES = [
|
||||||
|
'httpie',
|
||||||
|
'Pygments',
|
||||||
|
'requests',
|
||||||
|
'certifi',
|
||||||
|
'urllib3',
|
||||||
|
'idna',
|
||||||
|
'chardet',
|
||||||
|
'PySocks',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_package_meta(package_name):
|
||||||
|
api_url = f'https://pypi.python.org/pypi/{package_name}/json'
|
||||||
|
resp = requests.get(api_url).json()
|
||||||
|
hasher = hashlib.sha256()
|
||||||
|
for release in resp['urls']:
|
||||||
|
download_url = release['url']
|
||||||
|
if download_url.endswith('.tar.gz'):
|
||||||
|
hasher.update(requests.get(download_url).content)
|
||||||
|
return {
|
||||||
|
'name': package_name,
|
||||||
|
'url': download_url,
|
||||||
|
'sha256': hasher.hexdigest(),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f'{package_name}: download not found: {resp}')
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
package_meta_map = {
|
||||||
|
package_name: get_package_meta(package_name)
|
||||||
|
for package_name in PACKAGES
|
||||||
|
}
|
||||||
|
httpie_meta = package_meta_map.pop('httpie')
|
||||||
|
print()
|
||||||
|
print(' url "{url}"'.format(url=httpie_meta['url']))
|
||||||
|
print(' sha256 "{sha256}"'.format(sha256=httpie_meta['sha256']))
|
||||||
|
print()
|
||||||
|
for dep_meta in package_meta_map.values():
|
||||||
|
print(' resource "{name}" do'.format(name=dep_meta['name']))
|
||||||
|
print(' url "{url}"'.format(url=dep_meta['url']))
|
||||||
|
print(' sha256 "{sha256}"'.format(sha256=dep_meta['sha256']))
|
||||||
|
print(' end')
|
||||||
|
print('')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
59
extras/httpie-completion.fish
Normal file
59
extras/httpie-completion.fish
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
function __fish_httpie_auth_types
|
||||||
|
echo "basic"\t"Basic HTTP auth"
|
||||||
|
echo "digest"\t"Digest HTTP auth"
|
||||||
|
end
|
||||||
|
|
||||||
|
function __fish_httpie_styles
|
||||||
|
echo "autumn"
|
||||||
|
echo "borland"
|
||||||
|
echo "bw"
|
||||||
|
echo "colorful"
|
||||||
|
echo "default"
|
||||||
|
echo "emacs"
|
||||||
|
echo "friendly"
|
||||||
|
echo "fruity"
|
||||||
|
echo "igor"
|
||||||
|
echo "manni"
|
||||||
|
echo "monokai"
|
||||||
|
echo "murphy"
|
||||||
|
echo "native"
|
||||||
|
echo "paraiso-dark"
|
||||||
|
echo "paraiso-light"
|
||||||
|
echo "pastie"
|
||||||
|
echo "perldoc"
|
||||||
|
echo "rrt"
|
||||||
|
echo "solarized"
|
||||||
|
echo "tango"
|
||||||
|
echo "trac"
|
||||||
|
echo "vim"
|
||||||
|
echo "vs"
|
||||||
|
echo "xcode"
|
||||||
|
end
|
||||||
|
|
||||||
|
complete -x -c http -s s -l style -d 'Output coloring style (default is "monokai")' -A -a '(__fish_httpie_styles)'
|
||||||
|
complete -c http -s f -l form -d 'Data items from the command line are serialized as form fields'
|
||||||
|
complete -c http -s j -l json -d '(default) Data items from the command line are serialized as a JSON object'
|
||||||
|
complete -x -c http -l pretty -d 'Controls output processing' -a "all colors format none" -A
|
||||||
|
complete -x -c http -s p -l print -d 'String specifying what the output should contain'
|
||||||
|
complete -c http -s v -l verbose -d 'Print the whole request as well as the response'
|
||||||
|
complete -c http -s h -l headers -d 'Print only the response headers'
|
||||||
|
complete -c http -s b -l body -d 'Print only the response body'
|
||||||
|
complete -c http -s S -l stream -d 'Always stream the output by line'
|
||||||
|
complete -c http -s o -l output -d 'Save output to FILE'
|
||||||
|
complete -c http -s d -l download -d 'Do not print the response body to stdout'
|
||||||
|
complete -c http -s c -l continue -d 'Resume an interrupted download'
|
||||||
|
complete -x -c http -l session -d 'Create, or reuse and update a session'
|
||||||
|
complete -x -c http -s a -l auth -d 'If only the username is provided (-a username), HTTPie will prompt for the password'
|
||||||
|
complete -x -c http -l auth-type -d 'The authentication mechanism to be used' -a '(__fish_httpie_auth_types)' -A
|
||||||
|
complete -x -c http -l proxy -d 'String mapping protocol to the URL of the proxy'
|
||||||
|
complete -c http -l follow -d 'Allow full redirects'
|
||||||
|
complete -x -c http -l verify -d 'SSL cert verification'
|
||||||
|
complete -c http -l cert -d 'SSL cert'
|
||||||
|
complete -c http -l cert-key -d 'Private SSL cert key'
|
||||||
|
complete -x -c http -l timeout -d 'Connection timeout in seconds'
|
||||||
|
complete -c http -l check-status -d 'Error with non-200 HTTP status code'
|
||||||
|
complete -c http -l ignore-stdin -d 'Do not attempt to read stdin'
|
||||||
|
complete -c http -l help -d 'Show help'
|
||||||
|
complete -c http -l version -d 'Show version'
|
||||||
|
complete -c http -l traceback -d 'Prints exception traceback should one occur'
|
||||||
|
complete -c http -l debug -d 'Show debugging information'
|
68
extras/httpie.rb
Normal file
68
extras/httpie.rb
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
# The latest Homebrew formula as submitted to Homebrew/homebrew-core.
|
||||||
|
# Only useful for testing until it gets accepted by homebrew maintainers.
|
||||||
|
# (It will need to be updated from the repo version before next release.)
|
||||||
|
#
|
||||||
|
# https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb
|
||||||
|
#
|
||||||
|
class Httpie < Formula
|
||||||
|
include Language::Python::Virtualenv
|
||||||
|
|
||||||
|
desc "User-friendly cURL replacement (command-line HTTP client)"
|
||||||
|
homepage "https://httpie.org/"
|
||||||
|
url "https://files.pythonhosted.org/packages/d5/a4/ab61c1dbfdef33c7b7f5f7df0d79eb5cd55a106601a4acc17f983f320b4a/httpie-1.0.3.tar.gz"
|
||||||
|
sha256 "6d1b6e21da7d3ec030ae95536d4032c1129bdaf9de4adc72c596b87e5f646e80"
|
||||||
|
head "https://github.com/jakubroztocil/httpie.git"
|
||||||
|
|
||||||
|
bottle do
|
||||||
|
cellar :any_skip_relocation
|
||||||
|
sha256 "158258be68ac93de13860be2bef02da6fd8b68aa24b2e6609bcff1ec3f93e7a0" => :mojave
|
||||||
|
sha256 "54352116b6fa2c3bd65f26136fdcb57986dbff8a52de5febf7aea59c126d29e1" => :high_sierra
|
||||||
|
sha256 "9cce71768fe388808e11b26d651b44a6b54219f5406845b4273b5099f5c1f76f" => :sierra
|
||||||
|
end
|
||||||
|
|
||||||
|
depends_on "python"
|
||||||
|
|
||||||
|
resource "Pygments" do
|
||||||
|
url "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz"
|
||||||
|
sha256 "881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "requests" do
|
||||||
|
url "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"
|
||||||
|
sha256 "11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "certifi" do
|
||||||
|
url "https://files.pythonhosted.org/packages/c5/67/5d0548226bcc34468e23a0333978f0e23d28d0b3f0c71a151aef9c3f7680/certifi-2019.6.16.tar.gz"
|
||||||
|
sha256 "945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "urllib3" do
|
||||||
|
url "https://files.pythonhosted.org/packages/4c/13/2386233f7ee40aa8444b47f7463338f3cbdf00c316627558784e3f542f07/urllib3-1.25.3.tar.gz"
|
||||||
|
sha256 "dbe59173209418ae49d485b87d1681aefa36252ee85884c31346debd19463232"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "idna" do
|
||||||
|
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
|
||||||
|
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "chardet" do
|
||||||
|
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
|
||||||
|
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "PySocks" do
|
||||||
|
url "https://files.pythonhosted.org/packages/15/ab/35824cfdee1aac662e3298275fa1e6cbedb52126d1785f8977959b769ccf/PySocks-1.7.0.tar.gz"
|
||||||
|
sha256 "d9031ea45fdfacbe59a99273e9f0448ddb33c1580fe3831c1b09557c5718977c"
|
||||||
|
end
|
||||||
|
|
||||||
|
def install
|
||||||
|
virtualenv_install_with_resources
|
||||||
|
end
|
||||||
|
|
||||||
|
test do
|
||||||
|
raw_url = "https://raw.githubusercontent.com/Homebrew/homebrew-core/master/Formula/httpie.rb"
|
||||||
|
assert_match "PYTHONPATH", shell_output("#{bin}/http --ignore-stdin #{raw_url}")
|
||||||
|
end
|
||||||
|
end
|
BIN
httpie.gif
Normal file
BIN
httpie.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 1019 KiB |
BIN
httpie.png
BIN
httpie.png
Binary file not shown.
Before Width: | Height: | Size: 182 KiB After Width: | Height: | Size: 681 KiB |
@ -2,26 +2,7 @@
|
|||||||
HTTPie - a CLI, cURL-like tool for humans.
|
HTTPie - a CLI, cURL-like tool for humans.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
__version__ = '2.0.0'
|
||||||
__author__ = 'Jakub Roztocil'
|
__author__ = 'Jakub Roztocil'
|
||||||
__version__ = '0.9.4'
|
|
||||||
__licence__ = 'BSD'
|
__licence__ = 'BSD'
|
||||||
|
|
||||||
|
|
||||||
class ExitStatus:
|
|
||||||
"""Exit status code constants."""
|
|
||||||
OK = 0
|
|
||||||
ERROR = 1
|
|
||||||
ERROR_TIMEOUT = 2
|
|
||||||
ERROR_TOO_MANY_REDIRECTS = 6
|
|
||||||
|
|
||||||
# Used only when requested with --check-status:
|
|
||||||
ERROR_HTTP_3XX = 3
|
|
||||||
ERROR_HTTP_4XX = 4
|
|
||||||
ERROR_HTTP_5XX = 5
|
|
||||||
|
|
||||||
|
|
||||||
EXIT_STATUS_LABELS = dict(
|
|
||||||
(value, key)
|
|
||||||
for key, value in ExitStatus.__dict__.items()
|
|
||||||
if key.isupper()
|
|
||||||
)
|
|
||||||
|
@ -3,8 +3,18 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
from .core import main
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
try:
|
||||||
|
from .core import main
|
||||||
|
exit_status = main()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
from httpie.status import ExitStatus
|
||||||
|
exit_status = ExitStatus.ERROR_CTRL_C
|
||||||
|
|
||||||
|
sys.exit(exit_status.value)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
main()
|
||||||
|
0
httpie/cli/__init__.py
Normal file
0
httpie/cli/__init__.py
Normal file
387
httpie/cli/argparser.py
Normal file
387
httpie/cli/argparser.py
Normal file
@ -0,0 +1,387 @@
|
|||||||
|
import argparse
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from argparse import RawDescriptionHelpFormatter
|
||||||
|
from textwrap import dedent
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from httpie.cli.argtypes import AuthCredentials, KeyValueArgType, parse_auth
|
||||||
|
from httpie.cli.constants import (
|
||||||
|
HTTP_GET, HTTP_POST, OUTPUT_OPTIONS, OUTPUT_OPTIONS_DEFAULT,
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED, OUT_RESP_BODY, PRETTY_MAP,
|
||||||
|
PRETTY_STDOUT_TTY_ONLY, SEPARATOR_CREDENTIALS, SEPARATOR_GROUP_ALL_ITEMS,
|
||||||
|
SEPARATOR_GROUP_DATA_ITEMS, URL_SCHEME_RE,
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_OFFLINE,
|
||||||
|
)
|
||||||
|
from httpie.cli.exceptions import ParseError
|
||||||
|
from httpie.cli.requestitems import RequestItems
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.plugins import plugin_manager
|
||||||
|
from httpie.utils import ExplicitNullAuth, get_content_type
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
||||||
|
"""A nicer help formatter.
|
||||||
|
|
||||||
|
Help for arguments can be indented and contain new lines.
|
||||||
|
It will be de-dented and arguments in the help
|
||||||
|
will be separated by a blank line for better readability.
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, max_help_position=6, *args, **kwargs):
|
||||||
|
# A smaller indent for args help.
|
||||||
|
kwargs['max_help_position'] = max_help_position
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def _split_lines(self, text, width):
|
||||||
|
text = dedent(text).strip() + '\n\n'
|
||||||
|
return text.splitlines()
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||||
|
"""Adds additional logic to `argparse.ArgumentParser`.
|
||||||
|
|
||||||
|
Handles all input (CLI args, file args, stdin), applies defaults,
|
||||||
|
and performs extra validation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, formatter_class=HTTPieHelpFormatter, **kwargs):
|
||||||
|
kwargs['add_help'] = False
|
||||||
|
super().__init__(*args, formatter_class=formatter_class, **kwargs)
|
||||||
|
self.env = None
|
||||||
|
self.args = None
|
||||||
|
self.has_stdin_data = False
|
||||||
|
|
||||||
|
# noinspection PyMethodOverriding
|
||||||
|
def parse_args(
|
||||||
|
self,
|
||||||
|
env: Environment,
|
||||||
|
args=None,
|
||||||
|
namespace=None
|
||||||
|
) -> argparse.Namespace:
|
||||||
|
self.env = env
|
||||||
|
self.args, no_options = super().parse_known_args(args, namespace)
|
||||||
|
|
||||||
|
if self.args.debug:
|
||||||
|
self.args.traceback = True
|
||||||
|
|
||||||
|
self.has_stdin_data = (
|
||||||
|
self.env.stdin
|
||||||
|
and not self.args.ignore_stdin
|
||||||
|
and not self.env.stdin_isatty
|
||||||
|
)
|
||||||
|
|
||||||
|
# Arguments processing and environment setup.
|
||||||
|
self._apply_no_options(no_options)
|
||||||
|
self._validate_download_options()
|
||||||
|
self._setup_standard_streams()
|
||||||
|
self._process_output_options()
|
||||||
|
self._process_pretty_options()
|
||||||
|
self._guess_method()
|
||||||
|
self._parse_items()
|
||||||
|
|
||||||
|
if self.has_stdin_data:
|
||||||
|
self._body_from_file(self.env.stdin)
|
||||||
|
if not URL_SCHEME_RE.match(self.args.url):
|
||||||
|
if os.path.basename(env.program_name) == 'https':
|
||||||
|
scheme = 'https://'
|
||||||
|
else:
|
||||||
|
scheme = self.args.default_scheme + "://"
|
||||||
|
|
||||||
|
# See if we're using curl style shorthand for localhost (:3000/foo)
|
||||||
|
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
||||||
|
if shorthand:
|
||||||
|
port = shorthand.group(1)
|
||||||
|
rest = shorthand.group(2)
|
||||||
|
self.args.url = scheme + 'localhost'
|
||||||
|
if port:
|
||||||
|
self.args.url += ':' + port
|
||||||
|
self.args.url += rest
|
||||||
|
else:
|
||||||
|
self.args.url = scheme + self.args.url
|
||||||
|
self._process_auth()
|
||||||
|
|
||||||
|
return self.args
|
||||||
|
|
||||||
|
# noinspection PyShadowingBuiltins
|
||||||
|
def _print_message(self, message, file=None):
|
||||||
|
# Sneak in our stderr/stdout.
|
||||||
|
file = {
|
||||||
|
sys.stdout: self.env.stdout,
|
||||||
|
sys.stderr: self.env.stderr,
|
||||||
|
None: self.env.stderr
|
||||||
|
}.get(file, file)
|
||||||
|
if not hasattr(file, 'buffer') and isinstance(message, str):
|
||||||
|
message = message.encode(self.env.stdout_encoding)
|
||||||
|
super()._print_message(message, file)
|
||||||
|
|
||||||
|
def _setup_standard_streams(self):
|
||||||
|
"""
|
||||||
|
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.args.output_file_specified = bool(self.args.output_file)
|
||||||
|
if self.args.download:
|
||||||
|
# FIXME: Come up with a cleaner solution.
|
||||||
|
if not self.args.output_file and not self.env.stdout_isatty:
|
||||||
|
# Use stdout as the download output file.
|
||||||
|
self.args.output_file = self.env.stdout
|
||||||
|
# With `--download`, we write everything that would normally go to
|
||||||
|
# `stdout` to `stderr` instead. Let's replace the stream so that
|
||||||
|
# we don't have to use many `if`s throughout the codebase.
|
||||||
|
# The response body will be treated separately.
|
||||||
|
self.env.stdout = self.env.stderr
|
||||||
|
self.env.stdout_isatty = self.env.stderr_isatty
|
||||||
|
elif self.args.output_file:
|
||||||
|
# When not `--download`ing, then `--output` simply replaces
|
||||||
|
# `stdout`. The file is opened for appending, which isn't what
|
||||||
|
# we want in this case.
|
||||||
|
self.args.output_file.seek(0)
|
||||||
|
try:
|
||||||
|
self.args.output_file.truncate()
|
||||||
|
except IOError as e:
|
||||||
|
if e.errno == errno.EINVAL:
|
||||||
|
# E.g. /dev/null on Linux.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
self.env.stdout = self.args.output_file
|
||||||
|
self.env.stdout_isatty = False
|
||||||
|
|
||||||
|
def _process_auth(self):
|
||||||
|
# TODO: refactor
|
||||||
|
self.args.auth_plugin = None
|
||||||
|
default_auth_plugin = plugin_manager.get_auth_plugins()[0]
|
||||||
|
auth_type_set = self.args.auth_type is not None
|
||||||
|
url = urlsplit(self.args.url)
|
||||||
|
|
||||||
|
if self.args.auth is None and not auth_type_set:
|
||||||
|
if url.username is not None:
|
||||||
|
# Handle http://username:password@hostname/
|
||||||
|
username = url.username
|
||||||
|
password = url.password or ''
|
||||||
|
self.args.auth = AuthCredentials(
|
||||||
|
key=username,
|
||||||
|
value=password,
|
||||||
|
sep=SEPARATOR_CREDENTIALS,
|
||||||
|
orig=SEPARATOR_CREDENTIALS.join([username, password])
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.args.auth is not None or auth_type_set:
|
||||||
|
if not self.args.auth_type:
|
||||||
|
self.args.auth_type = default_auth_plugin.auth_type
|
||||||
|
plugin = plugin_manager.get_auth_plugin(self.args.auth_type)()
|
||||||
|
|
||||||
|
if plugin.auth_require and self.args.auth is None:
|
||||||
|
self.error('--auth required')
|
||||||
|
|
||||||
|
plugin.raw_auth = self.args.auth
|
||||||
|
self.args.auth_plugin = plugin
|
||||||
|
already_parsed = isinstance(self.args.auth, AuthCredentials)
|
||||||
|
|
||||||
|
if self.args.auth is None or not plugin.auth_parse:
|
||||||
|
self.args.auth = plugin.get_auth()
|
||||||
|
else:
|
||||||
|
if already_parsed:
|
||||||
|
# from the URL
|
||||||
|
credentials = self.args.auth
|
||||||
|
else:
|
||||||
|
credentials = parse_auth(self.args.auth)
|
||||||
|
|
||||||
|
if (not credentials.has_password()
|
||||||
|
and plugin.prompt_password):
|
||||||
|
if self.args.ignore_stdin:
|
||||||
|
# Non-tty stdin read by now
|
||||||
|
self.error(
|
||||||
|
'Unable to prompt for passwords because'
|
||||||
|
' --ignore-stdin is set.'
|
||||||
|
)
|
||||||
|
credentials.prompt_password(url.netloc)
|
||||||
|
self.args.auth = plugin.get_auth(
|
||||||
|
username=credentials.key,
|
||||||
|
password=credentials.value,
|
||||||
|
)
|
||||||
|
if not self.args.auth and self.args.ignore_netrc:
|
||||||
|
# Set a no-op auth to force requests to ignore .netrc
|
||||||
|
# <https://github.com/psf/requests/issues/2773#issuecomment-174312831>
|
||||||
|
self.args.auth = ExplicitNullAuth()
|
||||||
|
|
||||||
|
def _apply_no_options(self, no_options):
|
||||||
|
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
|
||||||
|
its default value. This allows for un-setting of options, e.g.,
|
||||||
|
specified in config.
|
||||||
|
|
||||||
|
"""
|
||||||
|
invalid = []
|
||||||
|
|
||||||
|
for option in no_options:
|
||||||
|
if not option.startswith('--no-'):
|
||||||
|
invalid.append(option)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --no-option => --option
|
||||||
|
inverted = '--' + option[5:]
|
||||||
|
for action in self._actions:
|
||||||
|
if inverted in action.option_strings:
|
||||||
|
setattr(self.args, action.dest, action.default)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
invalid.append(option)
|
||||||
|
|
||||||
|
if invalid:
|
||||||
|
msg = 'unrecognized arguments: %s'
|
||||||
|
self.error(msg % ' '.join(invalid))
|
||||||
|
|
||||||
|
def _body_from_file(self, fd):
|
||||||
|
"""There can only be one source of request data.
|
||||||
|
|
||||||
|
Bytes are always read.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.args.data:
|
||||||
|
self.error('Request body (from stdin or a file) and request '
|
||||||
|
'data (key=value) cannot be mixed. Pass '
|
||||||
|
'--ignore-stdin to let key/value take priority.')
|
||||||
|
self.args.data = getattr(fd, 'buffer', fd).read()
|
||||||
|
|
||||||
|
def _guess_method(self):
|
||||||
|
"""Set `args.method` if not specified to either POST or GET
|
||||||
|
based on whether the request has data or not.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.args.method is None:
|
||||||
|
# Invoked as `http URL'.
|
||||||
|
assert not self.args.request_items
|
||||||
|
if self.has_stdin_data:
|
||||||
|
self.args.method = HTTP_POST
|
||||||
|
else:
|
||||||
|
self.args.method = HTTP_GET
|
||||||
|
|
||||||
|
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
|
||||||
|
elif not re.match('^[a-zA-Z]+$', self.args.method):
|
||||||
|
# Invoked as `http URL item+'. The URL is now in `args.method`
|
||||||
|
# and the first ITEM is now incorrectly in `args.url`.
|
||||||
|
try:
|
||||||
|
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
||||||
|
self.args.request_items.insert(0, KeyValueArgType(
|
||||||
|
*SEPARATOR_GROUP_ALL_ITEMS).__call__(self.args.url))
|
||||||
|
|
||||||
|
except argparse.ArgumentTypeError as e:
|
||||||
|
if self.args.traceback:
|
||||||
|
raise
|
||||||
|
self.error(e.args[0])
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Set the URL correctly
|
||||||
|
self.args.url = self.args.method
|
||||||
|
# Infer the method
|
||||||
|
has_data = (
|
||||||
|
self.has_stdin_data
|
||||||
|
or any(
|
||||||
|
item.sep in SEPARATOR_GROUP_DATA_ITEMS
|
||||||
|
for item in self.args.request_items)
|
||||||
|
)
|
||||||
|
self.args.method = HTTP_POST if has_data else HTTP_GET
|
||||||
|
|
||||||
|
def _parse_items(self):
|
||||||
|
"""
|
||||||
|
Parse `args.request_items` into `args.headers`, `args.data`,
|
||||||
|
`args.params`, and `args.files`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
request_items = RequestItems.from_args(
|
||||||
|
request_item_args=self.args.request_items,
|
||||||
|
as_form=self.args.form,
|
||||||
|
)
|
||||||
|
except ParseError as e:
|
||||||
|
if self.args.traceback:
|
||||||
|
raise
|
||||||
|
self.error(e.args[0])
|
||||||
|
else:
|
||||||
|
self.args.headers = request_items.headers
|
||||||
|
self.args.data = request_items.data
|
||||||
|
self.args.files = request_items.files
|
||||||
|
self.args.params = request_items.params
|
||||||
|
|
||||||
|
if self.args.files and not self.args.form:
|
||||||
|
# `http url @/path/to/file`
|
||||||
|
file_fields = list(self.args.files.keys())
|
||||||
|
if file_fields != ['']:
|
||||||
|
self.error(
|
||||||
|
'Invalid file fields (perhaps you meant --form?): %s'
|
||||||
|
% ','.join(file_fields))
|
||||||
|
|
||||||
|
fn, fd, ct = self.args.files['']
|
||||||
|
self.args.files = {}
|
||||||
|
|
||||||
|
self._body_from_file(fd)
|
||||||
|
|
||||||
|
if 'Content-Type' not in self.args.headers:
|
||||||
|
content_type = get_content_type(fn)
|
||||||
|
if content_type:
|
||||||
|
self.args.headers['Content-Type'] = content_type
|
||||||
|
|
||||||
|
def _process_output_options(self):
|
||||||
|
"""Apply defaults to output options, or validate the provided ones.
|
||||||
|
|
||||||
|
The default output options are stdout-type-sensitive.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def check_options(value, option):
|
||||||
|
unknown = set(value) - OUTPUT_OPTIONS
|
||||||
|
if unknown:
|
||||||
|
self.error('Unknown output options: {0}={1}'.format(
|
||||||
|
option,
|
||||||
|
','.join(unknown)
|
||||||
|
))
|
||||||
|
|
||||||
|
if self.args.verbose:
|
||||||
|
self.args.all = True
|
||||||
|
|
||||||
|
if self.args.output_options is None:
|
||||||
|
if self.args.verbose:
|
||||||
|
self.args.output_options = ''.join(OUTPUT_OPTIONS)
|
||||||
|
elif self.args.offline:
|
||||||
|
self.args.output_options = OUTPUT_OPTIONS_DEFAULT_OFFLINE
|
||||||
|
elif not self.env.stdout_isatty:
|
||||||
|
self.args.output_options = OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED
|
||||||
|
else:
|
||||||
|
self.args.output_options = OUTPUT_OPTIONS_DEFAULT
|
||||||
|
|
||||||
|
if self.args.output_options_history is None:
|
||||||
|
self.args.output_options_history = self.args.output_options
|
||||||
|
|
||||||
|
check_options(self.args.output_options, '--print')
|
||||||
|
check_options(self.args.output_options_history, '--history-print')
|
||||||
|
|
||||||
|
if self.args.download and OUT_RESP_BODY in self.args.output_options:
|
||||||
|
# Response body is always downloaded with --download and it goes
|
||||||
|
# through a different routine, so we remove it.
|
||||||
|
self.args.output_options = str(
|
||||||
|
set(self.args.output_options) - set(OUT_RESP_BODY))
|
||||||
|
|
||||||
|
def _process_pretty_options(self):
|
||||||
|
if self.args.prettify == PRETTY_STDOUT_TTY_ONLY:
|
||||||
|
self.args.prettify = PRETTY_MAP[
|
||||||
|
'all' if self.env.stdout_isatty else 'none']
|
||||||
|
elif (self.args.prettify and self.env.is_windows
|
||||||
|
and self.args.output_file):
|
||||||
|
self.error('Only terminal output can be colorized on Windows.')
|
||||||
|
else:
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
self.args.prettify = PRETTY_MAP[self.args.prettify]
|
||||||
|
|
||||||
|
def _validate_download_options(self):
|
||||||
|
if not self.args.download:
|
||||||
|
if self.args.download_resume:
|
||||||
|
self.error('--continue only works with --download')
|
||||||
|
if self.args.download_resume and not (
|
||||||
|
self.args.download and self.args.output_file):
|
||||||
|
self.error('--continue requires --output to be specified')
|
183
httpie/cli/argtypes.py
Normal file
183
httpie/cli/argtypes.py
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
import argparse
|
||||||
|
import getpass
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from typing import Union, List, Optional
|
||||||
|
|
||||||
|
from httpie.cli.constants import SEPARATOR_CREDENTIALS
|
||||||
|
from httpie.sessions import VALID_SESSION_NAME_PATTERN
|
||||||
|
|
||||||
|
|
||||||
|
class KeyValueArg:
|
||||||
|
"""Base key-value pair parsed from CLI."""
|
||||||
|
|
||||||
|
def __init__(self, key: str, value: Optional[str], sep: str, orig: str):
|
||||||
|
self.key = key
|
||||||
|
self.value = value
|
||||||
|
self.sep = sep
|
||||||
|
self.orig = orig
|
||||||
|
|
||||||
|
def __eq__(self, other: 'KeyValueArg'):
|
||||||
|
return self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self.__dict__)
|
||||||
|
|
||||||
|
|
||||||
|
class SessionNameValidator:
|
||||||
|
|
||||||
|
def __init__(self, error_message: str):
|
||||||
|
self.error_message = error_message
|
||||||
|
|
||||||
|
def __call__(self, value: str) -> str:
|
||||||
|
# Session name can be a path or just a name.
|
||||||
|
if (os.path.sep not in value
|
||||||
|
and not VALID_SESSION_NAME_PATTERN.search(value)):
|
||||||
|
raise argparse.ArgumentError(None, self.error_message)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class Escaped(str):
|
||||||
|
"""Represents an escaped character."""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Escaped({repr(str(self))})"
|
||||||
|
|
||||||
|
|
||||||
|
class KeyValueArgType:
|
||||||
|
"""A key-value pair argument type used with `argparse`.
|
||||||
|
|
||||||
|
Parses a key-value arg and constructs a `KeyValueArg` instance.
|
||||||
|
Used for headers, form data, and other key-value pair types.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
key_value_class = KeyValueArg
|
||||||
|
|
||||||
|
def __init__(self, *separators: str):
|
||||||
|
self.separators = separators
|
||||||
|
self.special_characters = set('\\')
|
||||||
|
for separator in separators:
|
||||||
|
self.special_characters.update(separator)
|
||||||
|
|
||||||
|
def __call__(self, s: str) -> KeyValueArg:
|
||||||
|
"""Parse raw string arg and return `self.key_value_class` instance.
|
||||||
|
|
||||||
|
The best of `self.separators` is determined (first found, longest).
|
||||||
|
Back slash escaped characters aren't considered as separators
|
||||||
|
(or parts thereof). Literal back slash characters have to be escaped
|
||||||
|
as well (r'\\').
|
||||||
|
|
||||||
|
"""
|
||||||
|
tokens = self.tokenize(s)
|
||||||
|
|
||||||
|
# Sorting by length ensures that the longest one will be
|
||||||
|
# chosen as it will overwrite any shorter ones starting
|
||||||
|
# at the same position in the `found` dictionary.
|
||||||
|
separators = sorted(self.separators, key=len)
|
||||||
|
|
||||||
|
for i, token in enumerate(tokens):
|
||||||
|
|
||||||
|
if isinstance(token, Escaped):
|
||||||
|
continue
|
||||||
|
|
||||||
|
found = {}
|
||||||
|
for sep in separators:
|
||||||
|
pos = token.find(sep)
|
||||||
|
if pos != -1:
|
||||||
|
found[pos] = sep
|
||||||
|
|
||||||
|
if found:
|
||||||
|
# Starting first, longest separator found.
|
||||||
|
sep = found[min(found.keys())]
|
||||||
|
|
||||||
|
key, value = token.split(sep, 1)
|
||||||
|
|
||||||
|
# Any preceding tokens are part of the key.
|
||||||
|
key = ''.join(tokens[:i]) + key
|
||||||
|
|
||||||
|
# Any following tokens are part of the value.
|
||||||
|
value += ''.join(tokens[i + 1:])
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise argparse.ArgumentTypeError(f'{s!r} is not a valid value')
|
||||||
|
|
||||||
|
return self.key_value_class(key=key, value=value, sep=sep, orig=s)
|
||||||
|
|
||||||
|
def tokenize(self, s: str) -> List[Union[str, Escaped]]:
|
||||||
|
r"""Tokenize the raw arg string
|
||||||
|
|
||||||
|
There are only two token types - strings and escaped characters:
|
||||||
|
|
||||||
|
>>> KeyValueArgType('=').tokenize(r'foo\=bar\\baz')
|
||||||
|
['foo', Escaped('='), 'bar', Escaped('\\'), 'baz']
|
||||||
|
|
||||||
|
"""
|
||||||
|
tokens = ['']
|
||||||
|
characters = iter(s)
|
||||||
|
for char in characters:
|
||||||
|
if char == '\\':
|
||||||
|
char = next(characters, '')
|
||||||
|
if char not in self.special_characters:
|
||||||
|
tokens[-1] += '\\' + char
|
||||||
|
else:
|
||||||
|
tokens.extend([Escaped(char), ''])
|
||||||
|
else:
|
||||||
|
tokens[-1] += char
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
class AuthCredentials(KeyValueArg):
|
||||||
|
"""Represents parsed credentials."""
|
||||||
|
|
||||||
|
def has_password(self) -> bool:
|
||||||
|
return self.value is not None
|
||||||
|
|
||||||
|
def prompt_password(self, host: str):
|
||||||
|
prompt_text = f'http: password for {self.key}@{host}: '
|
||||||
|
try:
|
||||||
|
self.value = self._getpass(prompt_text)
|
||||||
|
except (EOFError, KeyboardInterrupt):
|
||||||
|
sys.stderr.write('\n')
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _getpass(prompt):
|
||||||
|
# To allow easy mocking.
|
||||||
|
return getpass.getpass(str(prompt))
|
||||||
|
|
||||||
|
|
||||||
|
class AuthCredentialsArgType(KeyValueArgType):
|
||||||
|
"""A key-value arg type that parses credentials."""
|
||||||
|
|
||||||
|
key_value_class = AuthCredentials
|
||||||
|
|
||||||
|
def __call__(self, s):
|
||||||
|
"""Parse credentials from `s`.
|
||||||
|
|
||||||
|
("username" or "username:password").
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return super().__call__(s)
|
||||||
|
except argparse.ArgumentTypeError:
|
||||||
|
# No password provided, will prompt for it later.
|
||||||
|
return self.key_value_class(
|
||||||
|
key=s,
|
||||||
|
value=None,
|
||||||
|
sep=SEPARATOR_CREDENTIALS,
|
||||||
|
orig=s
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
parse_auth = AuthCredentialsArgType(SEPARATOR_CREDENTIALS)
|
||||||
|
|
||||||
|
|
||||||
|
def readable_file_arg(filename):
|
||||||
|
try:
|
||||||
|
with open(filename, 'rb'):
|
||||||
|
return filename
|
||||||
|
except IOError as ex:
|
||||||
|
raise argparse.ArgumentTypeError(f'{filename}: {ex.args[1]}')
|
103
httpie/cli/constants.py
Normal file
103
httpie/cli/constants.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
|
||||||
|
|
||||||
|
"""
|
||||||
|
import re
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Use MultiDict for headers once added to `requests`.
|
||||||
|
# <https://github.com/jakubroztocil/httpie/issues/130>
|
||||||
|
|
||||||
|
|
||||||
|
# ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
|
||||||
|
# <https://tools.ietf.org/html/rfc3986#section-3.1>
|
||||||
|
URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
|
||||||
|
|
||||||
|
HTTP_POST = 'POST'
|
||||||
|
HTTP_GET = 'GET'
|
||||||
|
|
||||||
|
# Various separators used in args
|
||||||
|
SEPARATOR_HEADER = ':'
|
||||||
|
SEPARATOR_HEADER_EMPTY = ';'
|
||||||
|
SEPARATOR_CREDENTIALS = ':'
|
||||||
|
SEPARATOR_PROXY = ':'
|
||||||
|
SEPARATOR_DATA_STRING = '='
|
||||||
|
SEPARATOR_DATA_RAW_JSON = ':='
|
||||||
|
SEPARATOR_FILE_UPLOAD = '@'
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS = '=@'
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE = ':=@'
|
||||||
|
SEPARATOR_QUERY_PARAM = '=='
|
||||||
|
|
||||||
|
# Separators that become request data
|
||||||
|
SEPARATOR_GROUP_DATA_ITEMS = frozenset({
|
||||||
|
SEPARATOR_DATA_STRING,
|
||||||
|
SEPARATOR_DATA_RAW_JSON,
|
||||||
|
SEPARATOR_FILE_UPLOAD,
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE
|
||||||
|
})
|
||||||
|
|
||||||
|
# Separators for items whose value is a filename to be embedded
|
||||||
|
SEPARATOR_GROUP_DATA_EMBED_ITEMS = frozenset({
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Separators for raw JSON items
|
||||||
|
SEPARATOR_GROUP_RAW_JSON_ITEMS = frozenset([
|
||||||
|
SEPARATOR_DATA_RAW_JSON,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
])
|
||||||
|
|
||||||
|
# Separators allowed in ITEM arguments
|
||||||
|
SEPARATOR_GROUP_ALL_ITEMS = frozenset({
|
||||||
|
SEPARATOR_HEADER,
|
||||||
|
SEPARATOR_HEADER_EMPTY,
|
||||||
|
SEPARATOR_QUERY_PARAM,
|
||||||
|
SEPARATOR_DATA_STRING,
|
||||||
|
SEPARATOR_DATA_RAW_JSON,
|
||||||
|
SEPARATOR_FILE_UPLOAD,
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Output options
|
||||||
|
OUT_REQ_HEAD = 'H'
|
||||||
|
OUT_REQ_BODY = 'B'
|
||||||
|
OUT_RESP_HEAD = 'h'
|
||||||
|
OUT_RESP_BODY = 'b'
|
||||||
|
|
||||||
|
OUTPUT_OPTIONS = frozenset({
|
||||||
|
OUT_REQ_HEAD,
|
||||||
|
OUT_REQ_BODY,
|
||||||
|
OUT_RESP_HEAD,
|
||||||
|
OUT_RESP_BODY
|
||||||
|
})
|
||||||
|
|
||||||
|
# Pretty
|
||||||
|
PRETTY_MAP = {
|
||||||
|
'all': ['format', 'colors'],
|
||||||
|
'colors': ['colors'],
|
||||||
|
'format': ['format'],
|
||||||
|
'none': []
|
||||||
|
}
|
||||||
|
PRETTY_STDOUT_TTY_ONLY = object()
|
||||||
|
|
||||||
|
# Defaults
|
||||||
|
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_OFFLINE = OUT_REQ_HEAD + OUT_REQ_BODY
|
||||||
|
|
||||||
|
SSL_VERSION_ARG_MAPPING = {
|
||||||
|
'ssl2.3': 'PROTOCOL_SSLv23',
|
||||||
|
'ssl3': 'PROTOCOL_SSLv3',
|
||||||
|
'tls1': 'PROTOCOL_TLSv1',
|
||||||
|
'tls1.1': 'PROTOCOL_TLSv1_1',
|
||||||
|
'tls1.2': 'PROTOCOL_TLSv1_2',
|
||||||
|
'tls1.3': 'PROTOCOL_TLSv1_3',
|
||||||
|
}
|
||||||
|
SSL_VERSION_ARG_MAPPING = {
|
||||||
|
cli_arg: getattr(ssl, ssl_constant)
|
||||||
|
for cli_arg, ssl_constant in SSL_VERSION_ARG_MAPPING.items()
|
||||||
|
if hasattr(ssl, ssl_constant)
|
||||||
|
}
|
@ -1,61 +1,42 @@
|
|||||||
"""CLI arguments definition.
|
"""
|
||||||
|
CLI arguments definition.
|
||||||
NOTE: the CLI interface may change before reaching v1.0.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from argparse import (FileType, OPTIONAL, SUPPRESS, ZERO_OR_MORE)
|
||||||
from textwrap import dedent, wrap
|
from textwrap import dedent, wrap
|
||||||
# noinspection PyCompatibility
|
|
||||||
from argparse import (RawDescriptionHelpFormatter, FileType,
|
|
||||||
OPTIONAL, ZERO_OR_MORE, SUPPRESS)
|
|
||||||
|
|
||||||
from httpie import __doc__, __version__
|
from httpie import __doc__, __version__
|
||||||
from httpie.plugins.builtin import BuiltinAuthPlugin
|
from httpie.cli.argparser import HTTPieArgumentParser
|
||||||
|
from httpie.cli.argtypes import (
|
||||||
|
KeyValueArgType, SessionNameValidator, readable_file_arg,
|
||||||
|
)
|
||||||
|
from httpie.cli.constants import (
|
||||||
|
OUTPUT_OPTIONS, OUTPUT_OPTIONS_DEFAULT, OUT_REQ_BODY, OUT_REQ_HEAD,
|
||||||
|
OUT_RESP_BODY, OUT_RESP_HEAD, PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY,
|
||||||
|
SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_PROXY, SSL_VERSION_ARG_MAPPING,
|
||||||
|
)
|
||||||
|
from httpie.output.formatters.colors import (
|
||||||
|
AUTO_STYLE, AVAILABLE_STYLES, DEFAULT_STYLE,
|
||||||
|
)
|
||||||
from httpie.plugins import plugin_manager
|
from httpie.plugins import plugin_manager
|
||||||
|
from httpie.plugins.builtin import BuiltinAuthPlugin
|
||||||
from httpie.sessions import DEFAULT_SESSIONS_DIR
|
from httpie.sessions import DEFAULT_SESSIONS_DIR
|
||||||
from httpie.output.formatters.colors import AVAILABLE_STYLES, DEFAULT_STYLE
|
|
||||||
from httpie.input import (HTTPieArgumentParser,
|
|
||||||
AuthCredentialsArgType, KeyValueArgType,
|
|
||||||
SEP_PROXY, SEP_CREDENTIALS, SEP_GROUP_ALL_ITEMS,
|
|
||||||
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
|
|
||||||
OUT_RESP_BODY, OUTPUT_OPTIONS,
|
|
||||||
OUTPUT_OPTIONS_DEFAULT, PRETTY_MAP,
|
|
||||||
PRETTY_STDOUT_TTY_ONLY, SessionNameValidator,
|
|
||||||
readable_file_arg, SSL_VERSION_ARG_MAPPING)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
|
||||||
"""A nicer help formatter.
|
|
||||||
|
|
||||||
Help for arguments can be indented and contain new lines.
|
|
||||||
It will be de-dented and arguments in the help
|
|
||||||
will be separated by a blank line for better readability.
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, max_help_position=6, *args, **kwargs):
|
|
||||||
# A smaller indent for args help.
|
|
||||||
kwargs['max_help_position'] = max_help_position
|
|
||||||
super(HTTPieHelpFormatter, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def _split_lines(self, text, width):
|
|
||||||
text = dedent(text).strip() + '\n\n'
|
|
||||||
return text.splitlines()
|
|
||||||
|
|
||||||
parser = HTTPieArgumentParser(
|
parser = HTTPieArgumentParser(
|
||||||
formatter_class=HTTPieHelpFormatter,
|
prog='http',
|
||||||
description='%s <http://httpie.org>' % __doc__.strip(),
|
description='%s <https://httpie.org>' % __doc__.strip(),
|
||||||
epilog=dedent("""
|
epilog=dedent("""
|
||||||
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
||||||
to its default value.
|
to its default value.
|
||||||
|
|
||||||
Suggestions and bug reports are greatly appreciated:
|
Suggestions and bug reports are greatly appreciated:
|
||||||
|
|
||||||
https://github.com/jkbrzt/httpie/issues
|
https://github.com/jakubroztocil/httpie/issues
|
||||||
|
|
||||||
"""),
|
"""),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Positional arguments.
|
# Positional arguments.
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -69,7 +50,7 @@ positional = parser.add_argument_group(
|
|||||||
""")
|
""")
|
||||||
)
|
)
|
||||||
positional.add_argument(
|
positional.add_argument(
|
||||||
'method',
|
dest='method',
|
||||||
metavar='METHOD',
|
metavar='METHOD',
|
||||||
nargs=OPTIONAL,
|
nargs=OPTIONAL,
|
||||||
default=None,
|
default=None,
|
||||||
@ -85,10 +66,11 @@ positional.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
positional.add_argument(
|
positional.add_argument(
|
||||||
'url',
|
dest='url',
|
||||||
metavar='URL',
|
metavar='URL',
|
||||||
help="""
|
help="""
|
||||||
The scheme defaults to 'http://' if the URL does not include one.
|
The scheme defaults to 'http://' if the URL does not include one.
|
||||||
|
(You can override this with: --default-scheme=https)
|
||||||
|
|
||||||
You can also use a shorthand for localhost
|
You can also use a shorthand for localhost
|
||||||
|
|
||||||
@ -98,10 +80,11 @@ positional.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
positional.add_argument(
|
positional.add_argument(
|
||||||
'items',
|
dest='request_items',
|
||||||
metavar='REQUEST_ITEM',
|
metavar='REQUEST_ITEM',
|
||||||
nargs=ZERO_OR_MORE,
|
nargs=ZERO_OR_MORE,
|
||||||
type=KeyValueArgType(*SEP_GROUP_ALL_ITEMS),
|
default=None,
|
||||||
|
type=KeyValueArgType(*SEPARATOR_GROUP_ALL_ITEMS),
|
||||||
help=r"""
|
help=r"""
|
||||||
Optional key-value pairs to be included in the request. The separator used
|
Optional key-value pairs to be included in the request. The separator used
|
||||||
determines the type:
|
determines the type:
|
||||||
@ -142,7 +125,6 @@ positional.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Content type.
|
# Content type.
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -175,6 +157,28 @@ content_type.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
# Content processing.
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
content_processing = parser.add_argument_group(
|
||||||
|
title='Content Processing Options',
|
||||||
|
description=None
|
||||||
|
)
|
||||||
|
|
||||||
|
content_processing.add_argument(
|
||||||
|
'--compress', '-x',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help="""
|
||||||
|
Content compressed (encoded) with Deflate algorithm.
|
||||||
|
The Content-Encoding header is set to deflate.
|
||||||
|
|
||||||
|
Compression is skipped if it appears that compression ratio is
|
||||||
|
negative. Compression can be forced by repeating the argument.
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Output processing
|
# Output processing
|
||||||
@ -203,22 +207,24 @@ output_processing.add_argument(
|
|||||||
help="""
|
help="""
|
||||||
Output coloring style (default is "{default}"). One of:
|
Output coloring style (default is "{default}"). One of:
|
||||||
|
|
||||||
{available}
|
{available_styles}
|
||||||
|
|
||||||
For this option to work properly, please make sure that the $TERM
|
The "{auto_style}" style follows your terminal's ANSI color styles.
|
||||||
environment variable is set to "xterm-256color" or similar
|
|
||||||
|
For non-{auto_style} styles to work properly, please make sure that the
|
||||||
|
$TERM environment variable is set to "xterm-256color" or similar
|
||||||
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
||||||
|
|
||||||
""".format(
|
""".format(
|
||||||
default=DEFAULT_STYLE,
|
default=DEFAULT_STYLE,
|
||||||
available='\n'.join(
|
available_styles='\n'.join(
|
||||||
'{0}{1}'.format(8*' ', line.strip())
|
'{0}{1}'.format(8 * ' ', line.strip())
|
||||||
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
||||||
).rstrip(),
|
).rstrip(),
|
||||||
|
auto_style=AUTO_STYLE,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Output options
|
# Output options
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -228,49 +234,40 @@ output_options.add_argument(
|
|||||||
'--print', '-p',
|
'--print', '-p',
|
||||||
dest='output_options',
|
dest='output_options',
|
||||||
metavar='WHAT',
|
metavar='WHAT',
|
||||||
help="""
|
help=f"""
|
||||||
String specifying what the output should contain:
|
String specifying what the output should contain:
|
||||||
|
|
||||||
'{req_head}' request headers
|
'{OUT_REQ_HEAD}' request headers
|
||||||
'{req_body}' request body
|
'{OUT_REQ_BODY}' request body
|
||||||
'{res_head}' response headers
|
'{OUT_RESP_HEAD}' response headers
|
||||||
'{res_body}' response body
|
'{OUT_RESP_BODY}' response body
|
||||||
|
|
||||||
The default behaviour is '{default}' (i.e., the response headers and body
|
The default behaviour is '{OUTPUT_OPTIONS_DEFAULT}' (i.e., the response
|
||||||
is printed), if standard output is not redirected. If the output is piped
|
headers and body is printed), if standard output is not redirected.
|
||||||
to another program or to a file, then only the response body is printed
|
If the output is piped to another program or to a file, then only the
|
||||||
by default.
|
response body is printed by default.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
.format(
|
|
||||||
req_head=OUT_REQ_HEAD,
|
|
||||||
req_body=OUT_REQ_BODY,
|
|
||||||
res_head=OUT_RESP_HEAD,
|
|
||||||
res_body=OUT_RESP_BODY,
|
|
||||||
default=OUTPUT_OPTIONS_DEFAULT,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--headers', '-h',
|
'--headers', '-h',
|
||||||
dest='output_options',
|
dest='output_options',
|
||||||
action='store_const',
|
action='store_const',
|
||||||
const=OUT_RESP_HEAD,
|
const=OUT_RESP_HEAD,
|
||||||
help="""
|
help=f"""
|
||||||
Print only the response headers. Shortcut for --print={0}.
|
Print only the response headers. Shortcut for --print={OUT_RESP_HEAD}.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
.format(OUT_RESP_HEAD)
|
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--body', '-b',
|
'--body', '-b',
|
||||||
dest='output_options',
|
dest='output_options',
|
||||||
action='store_const',
|
action='store_const',
|
||||||
const=OUT_RESP_BODY,
|
const=OUT_RESP_BODY,
|
||||||
help="""
|
help=f"""
|
||||||
Print only the response body. Shortcut for --print={0}.
|
Print only the response body. Shortcut for --print={OUT_RESP_BODY}.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
.format(OUT_RESP_BODY)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
@ -282,8 +279,7 @@ output_options.add_argument(
|
|||||||
any intermediary requests/responses (such as redirects).
|
any intermediary requests/responses (such as redirects).
|
||||||
It's a shortcut for: --all --print={0}
|
It's a shortcut for: --all --print={0}
|
||||||
|
|
||||||
"""
|
""".format(''.join(OUTPUT_OPTIONS))
|
||||||
.format(''.join(OUTPUT_OPTIONS))
|
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--all',
|
'--all',
|
||||||
@ -365,13 +361,12 @@ output_options.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Sessions
|
# Sessions
|
||||||
#######################################################################
|
#######################################################################
|
||||||
|
|
||||||
sessions = parser.add_argument_group(title='Sessions')\
|
sessions = parser.add_argument_group(title='Sessions') \
|
||||||
.add_mutually_exclusive_group(required=False)
|
.add_mutually_exclusive_group(required=False)
|
||||||
|
|
||||||
session_name_validator = SessionNameValidator(
|
session_name_validator = SessionNameValidator(
|
||||||
'Session name contains invalid characters.'
|
'Session name contains invalid characters.'
|
||||||
@ -381,17 +376,16 @@ sessions.add_argument(
|
|||||||
'--session',
|
'--session',
|
||||||
metavar='SESSION_NAME_OR_PATH',
|
metavar='SESSION_NAME_OR_PATH',
|
||||||
type=session_name_validator,
|
type=session_name_validator,
|
||||||
help="""
|
help=f"""
|
||||||
Create, or reuse and update a session. Within a session, custom headers,
|
Create, or reuse and update a session. Within a session, custom headers,
|
||||||
auth credential, as well as any cookies sent by the server persist between
|
auth credential, as well as any cookies sent by the server persist between
|
||||||
requests.
|
requests.
|
||||||
|
|
||||||
Session files are stored in:
|
Session files are stored in:
|
||||||
|
|
||||||
{session_dir}/<HOST>/<SESSION_NAME>.json.
|
{DEFAULT_SESSIONS_DIR}/<HOST>/<SESSION_NAME>.json.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
.format(session_dir=DEFAULT_SESSIONS_DIR)
|
|
||||||
)
|
)
|
||||||
sessions.add_argument(
|
sessions.add_argument(
|
||||||
'--session-read-only',
|
'--session-read-only',
|
||||||
@ -412,8 +406,8 @@ sessions.add_argument(
|
|||||||
auth = parser.add_argument_group(title='Authentication')
|
auth = parser.add_argument_group(title='Authentication')
|
||||||
auth.add_argument(
|
auth.add_argument(
|
||||||
'--auth', '-a',
|
'--auth', '-a',
|
||||||
|
default=None,
|
||||||
metavar='USER[:PASS]',
|
metavar='USER[:PASS]',
|
||||||
type=AuthCredentialsArgType(SEP_CREDENTIALS),
|
|
||||||
help="""
|
help="""
|
||||||
If only the username is provided (-a username), HTTPie will prompt
|
If only the username is provided (-a username), HTTPie will prompt
|
||||||
for the password.
|
for the password.
|
||||||
@ -421,18 +415,28 @@ auth.add_argument(
|
|||||||
""",
|
""",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _AuthTypeLazyChoices:
|
||||||
|
# Needed for plugin testing
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
return item in plugin_manager.get_auth_plugin_mapping()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(sorted(plugin_manager.get_auth_plugin_mapping().keys()))
|
||||||
|
|
||||||
|
|
||||||
_auth_plugins = plugin_manager.get_auth_plugins()
|
_auth_plugins = plugin_manager.get_auth_plugins()
|
||||||
auth.add_argument(
|
auth.add_argument(
|
||||||
'--auth-type', '-A',
|
'--auth-type', '-A',
|
||||||
choices=[plugin.auth_type for plugin in _auth_plugins],
|
choices=_AuthTypeLazyChoices(),
|
||||||
default=_auth_plugins[0].auth_type,
|
default=None,
|
||||||
help="""
|
help="""
|
||||||
The authentication mechanism to be used. Defaults to "{default}".
|
The authentication mechanism to be used. Defaults to "{default}".
|
||||||
|
|
||||||
{types}
|
{types}
|
||||||
|
|
||||||
"""
|
""".format(default=_auth_plugins[0].auth_type, types='\n '.join(
|
||||||
.format(default=_auth_plugins[0].auth_type, types='\n '.join(
|
|
||||||
'"{type}": {name}{package}{description}'.format(
|
'"{type}": {name}{package}{description}'.format(
|
||||||
type=plugin.auth_type,
|
type=plugin.auth_type,
|
||||||
name=plugin.name,
|
name=plugin.name,
|
||||||
@ -448,7 +452,15 @@ auth.add_argument(
|
|||||||
for plugin in _auth_plugins
|
for plugin in _auth_plugins
|
||||||
)),
|
)),
|
||||||
)
|
)
|
||||||
|
auth.add_argument(
|
||||||
|
'--ignore-netrc',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help="""
|
||||||
|
Ignore credentials from .netrc.
|
||||||
|
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Network
|
# Network
|
||||||
@ -456,16 +468,25 @@ auth.add_argument(
|
|||||||
|
|
||||||
network = parser.add_argument_group(title='Network')
|
network = parser.add_argument_group(title='Network')
|
||||||
|
|
||||||
|
network.add_argument(
|
||||||
|
'--offline',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help="""
|
||||||
|
Build the request and print it but don’t actually send it.
|
||||||
|
"""
|
||||||
|
)
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--proxy',
|
'--proxy',
|
||||||
default=[],
|
default=[],
|
||||||
action='append',
|
action='append',
|
||||||
metavar='PROTOCOL:PROXY_URL',
|
metavar='PROTOCOL:PROXY_URL',
|
||||||
type=KeyValueArgType(SEP_PROXY),
|
type=KeyValueArgType(SEPARATOR_PROXY),
|
||||||
help="""
|
help="""
|
||||||
String mapping protocol to the URL of the proxy
|
String mapping protocol to the URL of the proxy
|
||||||
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
||||||
different protocols.
|
different protocols. The environment variables $ALL_PROXY, $HTTP_PROXY,
|
||||||
|
and $HTTPS_proxy are supported as well.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
@ -489,14 +510,29 @@ network.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
network.add_argument(
|
||||||
|
'--max-headers',
|
||||||
|
type=int,
|
||||||
|
default=0,
|
||||||
|
help="""
|
||||||
|
The maximum number of response headers to be read before giving up
|
||||||
|
(default 0, i.e., no limit).
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--timeout',
|
'--timeout',
|
||||||
type=float,
|
type=float,
|
||||||
default=30,
|
default=0,
|
||||||
metavar='SECONDS',
|
metavar='SECONDS',
|
||||||
help="""
|
help="""
|
||||||
The connection timeout of the request in seconds. The default value is
|
The connection timeout of the request in seconds.
|
||||||
30 seconds.
|
The default value is 0, i.e., there is no timeout limit.
|
||||||
|
This is not a time limit on the entire response download;
|
||||||
|
rather, an error is reported if the server has not issued a response for
|
||||||
|
timeout seconds (more precisely, if no bytes have been received on
|
||||||
|
the underlying socket for timeout seconds).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
@ -517,7 +553,6 @@ network.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# SSL
|
# SSL
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -527,10 +562,10 @@ ssl.add_argument(
|
|||||||
'--verify',
|
'--verify',
|
||||||
default='yes',
|
default='yes',
|
||||||
help="""
|
help="""
|
||||||
Set to "no" to skip checking the host's SSL certificate. You can also pass
|
Set to "no" (or "false") to skip checking the host's SSL certificate.
|
||||||
the path to a CA_BUNDLE file for private certs. You can also set the
|
Defaults to "yes" ("true"). You can also pass the path to a CA_BUNDLE file
|
||||||
REQUESTS_CA_BUNDLE environment variable. Defaults to "yes".
|
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
|
||||||
|
variable instead.)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
ssl.add_argument(
|
ssl.add_argument(
|
||||||
@ -576,7 +611,7 @@ ssl.add_argument(
|
|||||||
troubleshooting = parser.add_argument_group(title='Troubleshooting')
|
troubleshooting = parser.add_argument_group(title='Troubleshooting')
|
||||||
|
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--ignore-stdin',
|
'--ignore-stdin', '-I',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help="""
|
||||||
@ -611,6 +646,14 @@ troubleshooting.add_argument(
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
troubleshooting.add_argument(
|
||||||
|
'--default-scheme',
|
||||||
|
default="http",
|
||||||
|
help="""
|
||||||
|
The default scheme to use if not specified in the URL.
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--debug',
|
'--debug',
|
||||||
action='store_true',
|
action='store_true',
|
53
httpie/cli/dicts.py
Normal file
53
httpie/cli/dicts.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
|
|
||||||
|
class RequestHeadersDict(CaseInsensitiveDict):
|
||||||
|
"""
|
||||||
|
Headers are case-insensitive and multiple values are currently not supported.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class RequestJSONDataDict(OrderedDict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MultiValueOrderedDict(OrderedDict):
|
||||||
|
"""Multi-value dict for URL parameters and form data."""
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
"""
|
||||||
|
If `key` is assigned more than once, `self[key]` holds a
|
||||||
|
`list` of all the values.
|
||||||
|
|
||||||
|
This allows having multiple fields with the same name in form
|
||||||
|
data and URL params.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert not isinstance(value, list)
|
||||||
|
if key not in self:
|
||||||
|
super().__setitem__(key, value)
|
||||||
|
else:
|
||||||
|
if not isinstance(self[key], list):
|
||||||
|
super().__setitem__(key, [self[key]])
|
||||||
|
self[key].append(value)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestQueryParamsDict(MultiValueOrderedDict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RequestDataDict(MultiValueOrderedDict):
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
for key, values in super(MultiValueOrderedDict, self).items():
|
||||||
|
if not isinstance(values, list):
|
||||||
|
values = [values]
|
||||||
|
for value in values:
|
||||||
|
yield key, value
|
||||||
|
|
||||||
|
|
||||||
|
class RequestFilesDict(RequestDataDict):
|
||||||
|
pass
|
2
httpie/cli/exceptions.py
Normal file
2
httpie/cli/exceptions.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
class ParseError(Exception):
|
||||||
|
pass
|
149
httpie/cli/requestitems.py
Normal file
149
httpie/cli/requestitems.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
import os
|
||||||
|
from io import BytesIO
|
||||||
|
from typing import Callable, Dict, IO, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
from httpie.cli.argtypes import KeyValueArg
|
||||||
|
from httpie.cli.constants import (
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS, SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
SEPARATOR_DATA_RAW_JSON, SEPARATOR_DATA_STRING, SEPARATOR_FILE_UPLOAD,
|
||||||
|
SEPARATOR_HEADER, SEPARATOR_HEADER_EMPTY, SEPARATOR_QUERY_PARAM,
|
||||||
|
)
|
||||||
|
from httpie.cli.dicts import (
|
||||||
|
RequestDataDict, RequestFilesDict, RequestHeadersDict, RequestJSONDataDict,
|
||||||
|
RequestQueryParamsDict,
|
||||||
|
)
|
||||||
|
from httpie.cli.exceptions import ParseError
|
||||||
|
from httpie.utils import (get_content_type, load_json_preserve_order)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestItems:
|
||||||
|
|
||||||
|
def __init__(self, as_form=False):
|
||||||
|
self.headers = RequestHeadersDict()
|
||||||
|
self.data = RequestDataDict() if as_form else RequestJSONDataDict()
|
||||||
|
self.files = RequestFilesDict()
|
||||||
|
self.params = RequestQueryParamsDict()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_args(
|
||||||
|
cls,
|
||||||
|
request_item_args: List[KeyValueArg],
|
||||||
|
as_form=False,
|
||||||
|
) -> 'RequestItems':
|
||||||
|
instance = cls(as_form=as_form)
|
||||||
|
rules: Dict[str, Tuple[Callable, dict]] = {
|
||||||
|
SEPARATOR_HEADER: (
|
||||||
|
process_header_arg,
|
||||||
|
instance.headers,
|
||||||
|
),
|
||||||
|
SEPARATOR_HEADER_EMPTY: (
|
||||||
|
process_empty_header_arg,
|
||||||
|
instance.headers,
|
||||||
|
),
|
||||||
|
SEPARATOR_QUERY_PARAM: (
|
||||||
|
process_query_param_arg,
|
||||||
|
instance.params,
|
||||||
|
),
|
||||||
|
SEPARATOR_FILE_UPLOAD: (
|
||||||
|
process_file_upload_arg,
|
||||||
|
instance.files,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_STRING: (
|
||||||
|
process_data_item_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS: (
|
||||||
|
process_data_embed_file_contents_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_RAW_JSON: (
|
||||||
|
process_data_raw_json_embed_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE: (
|
||||||
|
process_data_embed_raw_json_file_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
for arg in request_item_args:
|
||||||
|
processor_func, target_dict = rules[arg.sep]
|
||||||
|
target_dict[arg.key] = processor_func(arg)
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
JSONType = Union[str, bool, int, list, dict]
|
||||||
|
|
||||||
|
|
||||||
|
def process_header_arg(arg: KeyValueArg) -> Optional[str]:
|
||||||
|
return arg.value or None
|
||||||
|
|
||||||
|
|
||||||
|
def process_empty_header_arg(arg: KeyValueArg) -> str:
|
||||||
|
if arg.value:
|
||||||
|
raise ParseError(
|
||||||
|
'Invalid item "%s" '
|
||||||
|
'(to specify an empty header use `Header;`)'
|
||||||
|
% arg.orig
|
||||||
|
)
|
||||||
|
return arg.value
|
||||||
|
|
||||||
|
|
||||||
|
def process_query_param_arg(arg: KeyValueArg) -> str:
|
||||||
|
return arg.value
|
||||||
|
|
||||||
|
|
||||||
|
def process_file_upload_arg(arg: KeyValueArg) -> Tuple[str, IO, str]:
|
||||||
|
filename = arg.value
|
||||||
|
try:
|
||||||
|
with open(os.path.expanduser(filename), 'rb') as f:
|
||||||
|
contents = f.read()
|
||||||
|
except IOError as e:
|
||||||
|
raise ParseError('"%s": %s' % (arg.orig, e))
|
||||||
|
return (
|
||||||
|
os.path.basename(filename),
|
||||||
|
BytesIO(contents),
|
||||||
|
get_content_type(filename),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_item_arg(arg: KeyValueArg) -> str:
|
||||||
|
return arg.value
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_embed_file_contents_arg(arg: KeyValueArg) -> str:
|
||||||
|
return load_text_file(arg)
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_embed_raw_json_file_arg(arg: KeyValueArg) -> JSONType:
|
||||||
|
contents = load_text_file(arg)
|
||||||
|
value = load_json(arg, contents)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_raw_json_embed_arg(arg: KeyValueArg) -> JSONType:
|
||||||
|
value = load_json(arg, arg.value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def load_text_file(item: KeyValueArg) -> str:
|
||||||
|
path = item.value
|
||||||
|
try:
|
||||||
|
with open(os.path.expanduser(path), 'rb') as f:
|
||||||
|
return f.read().decode()
|
||||||
|
except IOError as e:
|
||||||
|
raise ParseError('"%s": %s' % (item.orig, e))
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise ParseError(
|
||||||
|
'"%s": cannot embed the content of "%s",'
|
||||||
|
' not a UTF8 or ASCII-encoded text file'
|
||||||
|
% (item.orig, item.value)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(arg: KeyValueArg, contents: str) -> JSONType:
|
||||||
|
try:
|
||||||
|
return load_json_preserve_order(contents)
|
||||||
|
except ValueError as e:
|
||||||
|
raise ParseError('"%s": %s' % (arg.orig, e))
|
305
httpie/client.py
305
httpie/client.py
@ -1,121 +1,252 @@
|
|||||||
|
import argparse
|
||||||
|
import http.client
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from pprint import pformat
|
import zlib
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Iterable, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from requests.packages import urllib3
|
|
||||||
|
|
||||||
from httpie import sessions
|
|
||||||
from httpie import __version__
|
from httpie import __version__
|
||||||
from httpie.compat import str
|
from httpie.cli.constants import SSL_VERSION_ARG_MAPPING
|
||||||
from httpie.input import SSL_VERSION_ARG_MAPPING
|
from httpie.cli.dicts import RequestHeadersDict
|
||||||
from httpie.plugins import plugin_manager
|
from httpie.plugins import plugin_manager
|
||||||
from httpie.utils import repr_dict_nice
|
from httpie.sessions import get_httpie_session
|
||||||
|
from httpie.utils import repr_dict
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# https://urllib3.readthedocs.io/en/latest/security.html
|
# noinspection PyPackageRequirements
|
||||||
|
import urllib3
|
||||||
|
# <https://urllib3.readthedocs.io/en/latest/security.html>
|
||||||
urllib3.disable_warnings()
|
urllib3.disable_warnings()
|
||||||
except AttributeError:
|
except (ImportError, AttributeError):
|
||||||
# In some rare cases, the user may have an old version of the requests
|
|
||||||
# or urllib3, and there is no method called "disable_warnings." In these
|
|
||||||
# cases, we don't need to call the method.
|
|
||||||
# They may get some noisy output but execution shouldn't die. Move on.
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
FORM = 'application/x-www-form-urlencoded; charset=utf-8'
|
FORM_CONTENT_TYPE = 'application/x-www-form-urlencoded; charset=utf-8'
|
||||||
JSON = 'application/json'
|
JSON_CONTENT_TYPE = 'application/json'
|
||||||
DEFAULT_UA = 'HTTPie/%s' % __version__
|
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*'
|
||||||
|
DEFAULT_UA = f'HTTPie/{__version__}'
|
||||||
|
|
||||||
|
|
||||||
class HTTPieHTTPAdapter(HTTPAdapter):
|
def collect_messages(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
config_dir: Path,
|
||||||
|
) -> Iterable[Union[requests.PreparedRequest, requests.Response]]:
|
||||||
|
httpie_session = None
|
||||||
|
httpie_session_headers = None
|
||||||
|
if args.session or args.session_read_only:
|
||||||
|
httpie_session = get_httpie_session(
|
||||||
|
config_dir=config_dir,
|
||||||
|
session_name=args.session or args.session_read_only,
|
||||||
|
host=args.headers.get('Host'),
|
||||||
|
url=args.url,
|
||||||
|
)
|
||||||
|
httpie_session_headers = httpie_session.headers
|
||||||
|
|
||||||
|
request_kwargs = make_request_kwargs(
|
||||||
|
args=args,
|
||||||
|
base_headers=httpie_session_headers,
|
||||||
|
)
|
||||||
|
send_kwargs = make_send_kwargs(args)
|
||||||
|
send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args)
|
||||||
|
requests_session = build_requests_session(
|
||||||
|
ssl_version=args.ssl_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
if httpie_session:
|
||||||
|
httpie_session.update_headers(request_kwargs['headers'])
|
||||||
|
requests_session.cookies = httpie_session.cookies
|
||||||
|
if args.auth_plugin:
|
||||||
|
# Save auth from CLI to HTTPie session.
|
||||||
|
httpie_session.auth = {
|
||||||
|
'type': args.auth_plugin.auth_type,
|
||||||
|
'raw_auth': args.auth_plugin.raw_auth,
|
||||||
|
}
|
||||||
|
elif httpie_session.auth:
|
||||||
|
# Apply auth from HTTPie session
|
||||||
|
request_kwargs['auth'] = httpie_session.auth
|
||||||
|
|
||||||
|
if args.debug:
|
||||||
|
# TODO: reflect the split between request and send kwargs.
|
||||||
|
dump_request(request_kwargs)
|
||||||
|
|
||||||
|
request = requests.Request(**request_kwargs)
|
||||||
|
prepared_request = requests_session.prepare_request(request)
|
||||||
|
if args.compress and prepared_request.body:
|
||||||
|
compress_body(prepared_request, always=args.compress > 1)
|
||||||
|
response_count = 0
|
||||||
|
while prepared_request:
|
||||||
|
yield prepared_request
|
||||||
|
if not args.offline:
|
||||||
|
send_kwargs_merged = requests_session.merge_environment_settings(
|
||||||
|
url=prepared_request.url,
|
||||||
|
**send_kwargs_mergeable_from_env,
|
||||||
|
)
|
||||||
|
with max_headers(args.max_headers):
|
||||||
|
response = requests_session.send(
|
||||||
|
request=prepared_request,
|
||||||
|
**send_kwargs_merged,
|
||||||
|
**send_kwargs,
|
||||||
|
)
|
||||||
|
response_count += 1
|
||||||
|
if response.next:
|
||||||
|
if args.max_redirects and response_count == args.max_redirects:
|
||||||
|
raise requests.TooManyRedirects
|
||||||
|
if args.follow:
|
||||||
|
prepared_request = response.next
|
||||||
|
if args.all:
|
||||||
|
yield response
|
||||||
|
continue
|
||||||
|
yield response
|
||||||
|
break
|
||||||
|
|
||||||
|
if httpie_session:
|
||||||
|
if httpie_session.is_new() or not args.session_read_only:
|
||||||
|
httpie_session.cookies = requests_session.cookies
|
||||||
|
httpie_session.save()
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyProtectedMember
|
||||||
|
@contextmanager
|
||||||
|
def max_headers(limit):
|
||||||
|
# <https://github.com/jakubroztocil/httpie/issues/802>
|
||||||
|
orig = http.client._MAXHEADERS
|
||||||
|
http.client._MAXHEADERS = limit or float('Inf')
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
http.client._MAXHEADERS = orig
|
||||||
|
|
||||||
|
|
||||||
|
def compress_body(request: requests.PreparedRequest, always: bool):
|
||||||
|
deflater = zlib.compressobj()
|
||||||
|
body_bytes = (
|
||||||
|
request.body
|
||||||
|
if isinstance(request.body, bytes)
|
||||||
|
else request.body.encode()
|
||||||
|
)
|
||||||
|
deflated_data = deflater.compress(body_bytes)
|
||||||
|
deflated_data += deflater.flush()
|
||||||
|
is_economical = len(deflated_data) < len(body_bytes)
|
||||||
|
if is_economical or always:
|
||||||
|
request.body = deflated_data
|
||||||
|
request.headers['Content-Encoding'] = 'deflate'
|
||||||
|
request.headers['Content-Length'] = str(len(deflated_data))
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPieHTTPSAdapter(HTTPAdapter):
|
||||||
|
|
||||||
def __init__(self, ssl_version=None, **kwargs):
|
def __init__(self, ssl_version=None, **kwargs):
|
||||||
self._ssl_version = ssl_version
|
self._ssl_version = ssl_version
|
||||||
super(HTTPieHTTPAdapter, self).__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
def init_poolmanager(self, *args, **kwargs):
|
def init_poolmanager(self, *args, **kwargs):
|
||||||
kwargs['ssl_version'] = self._ssl_version
|
kwargs['ssl_version'] = self._ssl_version
|
||||||
super(HTTPieHTTPAdapter, self).init_poolmanager(*args, **kwargs)
|
super().init_poolmanager(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def get_requests_session(ssl_version):
|
def build_requests_session(
|
||||||
|
ssl_version: str = None,
|
||||||
|
) -> requests.Session:
|
||||||
requests_session = requests.Session()
|
requests_session = requests.Session()
|
||||||
requests_session.mount(
|
|
||||||
'https://',
|
# Install our adapter.
|
||||||
HTTPieHTTPAdapter(ssl_version=ssl_version)
|
requests_session.mount('https://', HTTPieHTTPSAdapter(
|
||||||
)
|
ssl_version=(
|
||||||
for cls in plugin_manager.get_transport_plugins():
|
SSL_VERSION_ARG_MAPPING[ssl_version]
|
||||||
transport_plugin = cls()
|
if ssl_version else None
|
||||||
requests_session.mount(prefix=transport_plugin.prefix,
|
)
|
||||||
adapter=transport_plugin.get_adapter())
|
))
|
||||||
|
|
||||||
|
# Install adapters from plugins.
|
||||||
|
for plugin_cls in plugin_manager.get_transport_plugins():
|
||||||
|
transport_plugin = plugin_cls()
|
||||||
|
requests_session.mount(
|
||||||
|
prefix=transport_plugin.prefix,
|
||||||
|
adapter=transport_plugin.get_adapter(),
|
||||||
|
)
|
||||||
|
|
||||||
return requests_session
|
return requests_session
|
||||||
|
|
||||||
|
|
||||||
def get_response(args, config_dir):
|
def dump_request(kwargs: dict):
|
||||||
"""Send the request and return a `request.Response`."""
|
sys.stderr.write(
|
||||||
|
f'\n>>> requests.request(**{repr_dict(kwargs)})\n\n')
|
||||||
ssl_version = None
|
|
||||||
if args.ssl_version:
|
|
||||||
ssl_version = SSL_VERSION_ARG_MAPPING[args.ssl_version]
|
|
||||||
|
|
||||||
requests_session = get_requests_session(ssl_version)
|
|
||||||
requests_session.max_redirects = args.max_redirects
|
|
||||||
|
|
||||||
if not args.session and not args.session_read_only:
|
|
||||||
kwargs = get_requests_kwargs(args)
|
|
||||||
if args.debug:
|
|
||||||
dump_request(kwargs)
|
|
||||||
response = requests_session.request(**kwargs)
|
|
||||||
else:
|
|
||||||
response = sessions.get_response(
|
|
||||||
requests_session=requests_session,
|
|
||||||
args=args,
|
|
||||||
config_dir=config_dir,
|
|
||||||
session_name=args.session or args.session_read_only,
|
|
||||||
read_only=bool(args.session_read_only),
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def dump_request(kwargs):
|
def finalize_headers(headers: RequestHeadersDict) -> RequestHeadersDict:
|
||||||
sys.stderr.write('\n>>> requests.request(**%s)\n\n'
|
final_headers = RequestHeadersDict()
|
||||||
% repr_dict_nice(kwargs))
|
for name, value in headers.items():
|
||||||
|
if value is not None:
|
||||||
|
# “leading or trailing LWS MAY be removed without
|
||||||
|
# changing the semantics of the field value”
|
||||||
|
# <https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html>
|
||||||
|
# Also, requests raises `InvalidHeader` for leading spaces.
|
||||||
|
value = value.strip()
|
||||||
|
if isinstance(value, str):
|
||||||
|
# See <https://github.com/jakubroztocil/httpie/issues/212>
|
||||||
|
value = value.encode('utf8')
|
||||||
|
final_headers[name] = value
|
||||||
|
return final_headers
|
||||||
|
|
||||||
|
|
||||||
def encode_headers(headers):
|
def make_default_headers(args: argparse.Namespace) -> RequestHeadersDict:
|
||||||
# This allows for unicode headers which is non-standard but practical.
|
default_headers = RequestHeadersDict({
|
||||||
# See: https://github.com/jkbrzt/httpie/issues/212
|
|
||||||
return dict(
|
|
||||||
(name, value.encode('utf8') if isinstance(value, str) else value)
|
|
||||||
for name, value in headers.items()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_default_headers(args):
|
|
||||||
default_headers = {
|
|
||||||
'User-Agent': DEFAULT_UA
|
'User-Agent': DEFAULT_UA
|
||||||
}
|
})
|
||||||
|
|
||||||
auto_json = args.data and not args.form
|
auto_json = args.data and not args.form
|
||||||
# FIXME: Accept is set to JSON with `http url @./file.txt`.
|
|
||||||
if args.json or auto_json:
|
if args.json or auto_json:
|
||||||
default_headers['Accept'] = 'application/json'
|
default_headers['Accept'] = JSON_ACCEPT
|
||||||
if args.json or (auto_json and args.data):
|
if args.json or (auto_json and args.data):
|
||||||
default_headers['Content-Type'] = JSON
|
default_headers['Content-Type'] = JSON_CONTENT_TYPE
|
||||||
|
|
||||||
elif args.form and not args.files:
|
elif args.form and not args.files:
|
||||||
# If sending files, `requests` will set
|
# If sending files, `requests` will set
|
||||||
# the `Content-Type` for us.
|
# the `Content-Type` for us.
|
||||||
default_headers['Content-Type'] = FORM
|
default_headers['Content-Type'] = FORM_CONTENT_TYPE
|
||||||
return default_headers
|
return default_headers
|
||||||
|
|
||||||
|
|
||||||
def get_requests_kwargs(args, base_headers=None):
|
def make_send_kwargs(args: argparse.Namespace) -> dict:
|
||||||
|
kwargs = {
|
||||||
|
'timeout': args.timeout or None,
|
||||||
|
'allow_redirects': False,
|
||||||
|
}
|
||||||
|
return kwargs
|
||||||
|
|
||||||
|
|
||||||
|
def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict:
|
||||||
|
cert = None
|
||||||
|
if args.cert:
|
||||||
|
cert = args.cert
|
||||||
|
if args.cert_key:
|
||||||
|
cert = cert, args.cert_key
|
||||||
|
kwargs = {
|
||||||
|
'proxies': {p.key: p.value for p in args.proxy},
|
||||||
|
'stream': True,
|
||||||
|
'verify': {
|
||||||
|
'yes': True,
|
||||||
|
'true': True,
|
||||||
|
'no': False,
|
||||||
|
'false': False,
|
||||||
|
}.get(args.verify.lower(), args.verify),
|
||||||
|
'cert': cert,
|
||||||
|
}
|
||||||
|
return kwargs
|
||||||
|
|
||||||
|
|
||||||
|
def make_request_kwargs(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
base_headers: RequestHeadersDict = None
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Translate our `args` into `requests.request` keyword arguments.
|
Translate our `args` into `requests.Request` keyword arguments.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Serialize JSON data, if needed.
|
# Serialize JSON data, if needed.
|
||||||
@ -130,40 +261,20 @@ def get_requests_kwargs(args, base_headers=None):
|
|||||||
data = ''
|
data = ''
|
||||||
|
|
||||||
# Finalize headers.
|
# Finalize headers.
|
||||||
headers = get_default_headers(args)
|
headers = make_default_headers(args)
|
||||||
if base_headers:
|
if base_headers:
|
||||||
headers.update(base_headers)
|
headers.update(base_headers)
|
||||||
headers.update(args.headers)
|
headers.update(args.headers)
|
||||||
headers = encode_headers(headers)
|
headers = finalize_headers(headers)
|
||||||
|
|
||||||
credentials = None
|
|
||||||
if args.auth:
|
|
||||||
auth_plugin = plugin_manager.get_auth_plugin(args.auth_type)()
|
|
||||||
credentials = auth_plugin.get_auth(args.auth.key, args.auth.value)
|
|
||||||
|
|
||||||
cert = None
|
|
||||||
if args.cert:
|
|
||||||
cert = args.cert
|
|
||||||
if args.cert_key:
|
|
||||||
cert = cert, args.cert_key
|
|
||||||
|
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'stream': True,
|
|
||||||
'method': args.method.lower(),
|
'method': args.method.lower(),
|
||||||
'url': args.url,
|
'url': args.url,
|
||||||
'headers': headers,
|
'headers': headers,
|
||||||
'data': data,
|
'data': data,
|
||||||
'verify': {
|
'auth': args.auth,
|
||||||
'yes': True,
|
|
||||||
'no': False
|
|
||||||
}.get(args.verify, args.verify),
|
|
||||||
'cert': cert,
|
|
||||||
'timeout': args.timeout,
|
|
||||||
'auth': credentials,
|
|
||||||
'proxies': dict((p.key, p.value) for p in args.proxy),
|
|
||||||
'files': args.files,
|
|
||||||
'allow_redirects': args.follow,
|
|
||||||
'params': args.params,
|
'params': args.params,
|
||||||
|
'files': args.files,
|
||||||
}
|
}
|
||||||
|
|
||||||
return kwargs
|
return kwargs
|
||||||
|
169
httpie/compat.py
169
httpie/compat.py
@ -1,173 +1,4 @@
|
|||||||
"""
|
|
||||||
Python 2.6, 2.7, and 3.x compatibility.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
is_py2 = sys.version_info[0] == 2
|
|
||||||
is_py26 = sys.version_info[:2] == (2, 6)
|
|
||||||
is_py27 = sys.version_info[:2] == (2, 7)
|
|
||||||
is_py3 = sys.version_info[0] == 3
|
|
||||||
is_pypy = 'pypy' in sys.version.lower()
|
|
||||||
is_windows = 'win32' in str(sys.platform).lower()
|
is_windows = 'win32' in str(sys.platform).lower()
|
||||||
|
|
||||||
|
|
||||||
if is_py2:
|
|
||||||
bytes = str
|
|
||||||
str = unicode
|
|
||||||
elif is_py3:
|
|
||||||
str = str
|
|
||||||
bytes = bytes
|
|
||||||
|
|
||||||
|
|
||||||
try: # pragma: no cover
|
|
||||||
# noinspection PyUnresolvedReferences,PyCompatibility
|
|
||||||
from urllib.parse import urlsplit
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
# noinspection PyUnresolvedReferences,PyCompatibility
|
|
||||||
from urlparse import urlsplit
|
|
||||||
|
|
||||||
try: # pragma: no cover
|
|
||||||
# noinspection PyCompatibility
|
|
||||||
from urllib.request import urlopen
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
# noinspection PyCompatibility
|
|
||||||
from urllib2 import urlopen
|
|
||||||
|
|
||||||
try: # pragma: no cover
|
|
||||||
from collections import OrderedDict
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
# Python 2.6 OrderedDict class, needed for headers, parameters, etc .###
|
|
||||||
# <https://pypi.python.org/pypi/ordereddict/1.1>
|
|
||||||
# noinspection PyCompatibility
|
|
||||||
from UserDict import DictMixin
|
|
||||||
|
|
||||||
# noinspection PyShadowingBuiltins
|
|
||||||
class OrderedDict(dict, DictMixin):
|
|
||||||
# Copyright (c) 2009 Raymond Hettinger
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person
|
|
||||||
# obtaining a copy of this software and associated documentation files
|
|
||||||
# (the "Software"), to deal in the Software without restriction,
|
|
||||||
# including without limitation the rights to use, copy, modify, merge,
|
|
||||||
# publish, distribute, sublicense, and/or sell copies of the Software,
|
|
||||||
# and to permit persons to whom the Software is furnished to do so,
|
|
||||||
# subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be
|
|
||||||
# included in all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
|
||||||
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
|
||||||
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
|
||||||
# OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
# noinspection PyMissingConstructor
|
|
||||||
def __init__(self, *args, **kwds):
|
|
||||||
if len(args) > 1:
|
|
||||||
raise TypeError('expected at most 1 arguments, got %d'
|
|
||||||
% len(args))
|
|
||||||
try:
|
|
||||||
self.__end
|
|
||||||
except AttributeError:
|
|
||||||
self.clear()
|
|
||||||
self.update(*args, **kwds)
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
self.__end = end = []
|
|
||||||
# noinspection PyUnusedLocal
|
|
||||||
end += [None, end, end] # sentinel node for doubly linked list
|
|
||||||
self.__map = {} # key --> [key, prev, next]
|
|
||||||
dict.clear(self)
|
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
if key not in self:
|
|
||||||
end = self.__end
|
|
||||||
curr = end[1]
|
|
||||||
curr[2] = end[1] = self.__map[key] = [key, curr, end]
|
|
||||||
dict.__setitem__(self, key, value)
|
|
||||||
|
|
||||||
def __delitem__(self, key):
|
|
||||||
dict.__delitem__(self, key)
|
|
||||||
key, prev, next = self.__map.pop(key)
|
|
||||||
prev[2] = next
|
|
||||||
next[1] = prev
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
end = self.__end
|
|
||||||
curr = end[2]
|
|
||||||
while curr is not end:
|
|
||||||
yield curr[0]
|
|
||||||
curr = curr[2]
|
|
||||||
|
|
||||||
def __reversed__(self):
|
|
||||||
end = self.__end
|
|
||||||
curr = end[1]
|
|
||||||
while curr is not end:
|
|
||||||
yield curr[0]
|
|
||||||
curr = curr[1]
|
|
||||||
|
|
||||||
def popitem(self, last=True):
|
|
||||||
if not self:
|
|
||||||
raise KeyError('dictionary is empty')
|
|
||||||
if last:
|
|
||||||
key = reversed(self).next()
|
|
||||||
else:
|
|
||||||
key = iter(self).next()
|
|
||||||
value = self.pop(key)
|
|
||||||
return key, value
|
|
||||||
|
|
||||||
def __reduce__(self):
|
|
||||||
items = [[k, self[k]] for k in self]
|
|
||||||
tmp = self.__map, self.__end
|
|
||||||
del self.__map, self.__end
|
|
||||||
inst_dict = vars(self).copy()
|
|
||||||
self.__map, self.__end = tmp
|
|
||||||
if inst_dict:
|
|
||||||
return self.__class__, (items,), inst_dict
|
|
||||||
return self.__class__, (items,)
|
|
||||||
|
|
||||||
def keys(self):
|
|
||||||
return list(self)
|
|
||||||
|
|
||||||
setdefault = DictMixin.setdefault
|
|
||||||
update = DictMixin.update
|
|
||||||
pop = DictMixin.pop
|
|
||||||
values = DictMixin.values
|
|
||||||
items = DictMixin.items
|
|
||||||
iterkeys = DictMixin.iterkeys
|
|
||||||
itervalues = DictMixin.itervalues
|
|
||||||
iteritems = DictMixin.iteritems
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
if not self:
|
|
||||||
return '%s()' % (self.__class__.__name__,)
|
|
||||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
return self.__class__(self)
|
|
||||||
|
|
||||||
# noinspection PyMethodOverriding
|
|
||||||
@classmethod
|
|
||||||
def fromkeys(cls, iterable, value=None):
|
|
||||||
d = cls()
|
|
||||||
for key in iterable:
|
|
||||||
d[key] = value
|
|
||||||
return d
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if isinstance(other, OrderedDict):
|
|
||||||
if len(self) != len(other):
|
|
||||||
return False
|
|
||||||
for p, q in zip(self.items(), other.items()):
|
|
||||||
if p != q:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
return dict.__eq__(self, other)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
@ -1,61 +1,59 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
import errno
|
import errno
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from httpie import __version__
|
from httpie import __version__
|
||||||
from httpie.compat import is_windows
|
from httpie.compat import is_windows
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CONFIG_DIR = str(os.environ.get(
|
DEFAULT_CONFIG_DIR = Path(os.environ.get(
|
||||||
'HTTPIE_CONFIG_DIR',
|
'HTTPIE_CONFIG_DIR',
|
||||||
os.path.expanduser('~/.httpie') if not is_windows else
|
os.path.expanduser('~/.httpie') if not is_windows else
|
||||||
os.path.expandvars(r'%APPDATA%\\httpie')
|
os.path.expandvars(r'%APPDATA%\\httpie')
|
||||||
))
|
))
|
||||||
|
|
||||||
|
|
||||||
class BaseConfigDict(dict):
|
class ConfigFileError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BaseConfigDict(dict):
|
||||||
name = None
|
name = None
|
||||||
helpurl = None
|
helpurl = None
|
||||||
about = None
|
about = None
|
||||||
|
|
||||||
def __getattr__(self, item):
|
def __init__(self, path: Path):
|
||||||
return self[item]
|
super().__init__()
|
||||||
|
self.path = path
|
||||||
|
|
||||||
def _get_path(self):
|
def ensure_directory(self):
|
||||||
"""Return the config file path without side-effects."""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self):
|
|
||||||
"""Return the config file path creating basedir, if needed."""
|
|
||||||
path = self._get_path()
|
|
||||||
try:
|
try:
|
||||||
os.makedirs(os.path.dirname(path), mode=0o700)
|
self.path.parent.mkdir(mode=0o700, parents=True)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST:
|
if e.errno != errno.EEXIST:
|
||||||
raise
|
raise
|
||||||
return path
|
|
||||||
|
|
||||||
def is_new(self):
|
def is_new(self) -> bool:
|
||||||
return not os.path.exists(self._get_path())
|
return not self.path.exists()
|
||||||
|
|
||||||
def load(self):
|
def load(self):
|
||||||
|
config_type = type(self).__name__.lower()
|
||||||
try:
|
try:
|
||||||
with open(self.path, 'rt') as f:
|
with self.path.open('rt') as f:
|
||||||
try:
|
try:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError(
|
raise ConfigFileError(
|
||||||
'Invalid %s JSON: %s [%s]' %
|
f'invalid {config_type} file: {e} [{self.path}]'
|
||||||
(type(self).__name__, str(e), self.path)
|
|
||||||
)
|
)
|
||||||
self.update(data)
|
self.update(data)
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise
|
raise ConfigFileError(f'cannot read {config_type} file: {e}')
|
||||||
|
|
||||||
def save(self):
|
def save(self, fail_silently=False):
|
||||||
self['__meta__'] = {
|
self['__meta__'] = {
|
||||||
'httpie': __version__
|
'httpie': __version__
|
||||||
}
|
}
|
||||||
@ -65,48 +63,41 @@ class BaseConfigDict(dict):
|
|||||||
if self.about:
|
if self.about:
|
||||||
self['__meta__']['about'] = self.about
|
self['__meta__']['about'] = self.about
|
||||||
|
|
||||||
with open(self.path, 'w') as f:
|
self.ensure_directory()
|
||||||
json.dump(self, f, indent=4, sort_keys=True, ensure_ascii=True)
|
|
||||||
f.write('\n')
|
try:
|
||||||
|
with self.path.open('w') as f:
|
||||||
|
json.dump(
|
||||||
|
obj=self,
|
||||||
|
fp=f,
|
||||||
|
indent=4,
|
||||||
|
sort_keys=True,
|
||||||
|
ensure_ascii=True,
|
||||||
|
)
|
||||||
|
f.write('\n')
|
||||||
|
except IOError:
|
||||||
|
if not fail_silently:
|
||||||
|
raise
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
try:
|
try:
|
||||||
os.unlink(self.path)
|
self.path.unlink()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
class Config(BaseConfigDict):
|
class Config(BaseConfigDict):
|
||||||
|
FILENAME = 'config.json'
|
||||||
name = 'config'
|
|
||||||
helpurl = 'https://github.com/jkbrzt/httpie#config'
|
|
||||||
about = 'HTTPie configuration file'
|
|
||||||
|
|
||||||
DEFAULTS = {
|
DEFAULTS = {
|
||||||
'default_options': []
|
'default_options': []
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, directory=DEFAULT_CONFIG_DIR):
|
def __init__(self, directory: Union[str, Path] = DEFAULT_CONFIG_DIR):
|
||||||
super(Config, self).__init__()
|
self.directory = Path(directory)
|
||||||
|
super().__init__(path=self.directory / self.FILENAME)
|
||||||
self.update(self.DEFAULTS)
|
self.update(self.DEFAULTS)
|
||||||
self.directory = directory
|
|
||||||
|
|
||||||
def load(self):
|
@property
|
||||||
super(Config, self).load()
|
def default_options(self) -> list:
|
||||||
self._migrate_implicit_content_type()
|
return self['default_options']
|
||||||
|
|
||||||
def _get_path(self):
|
|
||||||
return os.path.join(self.directory, self.name + '.json')
|
|
||||||
|
|
||||||
def _migrate_implicit_content_type(self):
|
|
||||||
"""Migrate the removed implicit_content_type config option"""
|
|
||||||
try:
|
|
||||||
implicit_content_type = self.pop('implicit_content_type')
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
if implicit_content_type == 'form':
|
|
||||||
self['default_options'].insert(0, '--form')
|
|
||||||
self.save()
|
|
||||||
self.load()
|
|
||||||
|
@ -1,12 +1,21 @@
|
|||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Union, IO, Optional
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import curses
|
||||||
|
except ImportError:
|
||||||
|
curses = None # Compiled w/o curses
|
||||||
|
|
||||||
from httpie.compat import is_windows
|
from httpie.compat import is_windows
|
||||||
from httpie.config import DEFAULT_CONFIG_DIR, Config
|
from httpie.config import DEFAULT_CONFIG_DIR, Config, ConfigFileError
|
||||||
|
|
||||||
from httpie.utils import repr_dict_nice
|
from httpie.utils import repr_dict
|
||||||
|
|
||||||
|
|
||||||
class Environment(object):
|
class Environment:
|
||||||
"""
|
"""
|
||||||
Information about the execution context
|
Information about the execution context
|
||||||
(standard streams, config directory, etc).
|
(standard streams, config directory, etc).
|
||||||
@ -16,29 +25,25 @@ class Environment(object):
|
|||||||
is used by the test suite to simulate various scenarios.
|
is used by the test suite to simulate various scenarios.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
is_windows = is_windows
|
is_windows: bool = is_windows
|
||||||
config_dir = DEFAULT_CONFIG_DIR
|
config_dir: Path = DEFAULT_CONFIG_DIR
|
||||||
stdin = sys.stdin
|
stdin: Optional[IO] = sys.stdin # `None` when closed fd (#791)
|
||||||
stdin_isatty = stdin.isatty()
|
stdin_isatty: bool = stdin.isatty() if stdin else False
|
||||||
stdin_encoding = None
|
stdin_encoding: str = None
|
||||||
stdout = sys.stdout
|
stdout: IO = sys.stdout
|
||||||
stdout_isatty = stdout.isatty()
|
stdout_isatty: bool = stdout.isatty()
|
||||||
stdout_encoding = None
|
stdout_encoding: str = None
|
||||||
stderr = sys.stderr
|
stderr: IO = sys.stderr
|
||||||
stderr_isatty = stderr.isatty()
|
stderr_isatty: bool = stderr.isatty()
|
||||||
colors = 256
|
colors = 256
|
||||||
|
program_name: str = 'http'
|
||||||
if not is_windows:
|
if not is_windows:
|
||||||
import curses
|
if curses:
|
||||||
try:
|
|
||||||
curses.setupterm()
|
|
||||||
try:
|
try:
|
||||||
|
curses.setupterm()
|
||||||
colors = curses.tigetnum('colors')
|
colors = curses.tigetnum('colors')
|
||||||
except TypeError:
|
except curses.error:
|
||||||
# pypy3 (2.4.0)
|
pass
|
||||||
colors = curses.tigetnum(b'colors')
|
|
||||||
except curses.error:
|
|
||||||
pass
|
|
||||||
del curses
|
|
||||||
else:
|
else:
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
import colorama.initialise
|
import colorama.initialise
|
||||||
@ -62,7 +67,7 @@ class Environment(object):
|
|||||||
self.__dict__.update(**kwargs)
|
self.__dict__.update(**kwargs)
|
||||||
|
|
||||||
# Keyword arguments > stream.encoding > default utf8
|
# Keyword arguments > stream.encoding > default utf8
|
||||||
if self.stdin_encoding is None:
|
if self.stdin and self.stdin_encoding is None:
|
||||||
self.stdin_encoding = getattr(
|
self.stdin_encoding = getattr(
|
||||||
self.stdin, 'encoding', None) or 'utf8'
|
self.stdin, 'encoding', None) or 'utf8'
|
||||||
if self.stdout_encoding is None:
|
if self.stdout_encoding is None:
|
||||||
@ -71,30 +76,39 @@ class Environment(object):
|
|||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
from colorama import AnsiToWin32
|
from colorama import AnsiToWin32
|
||||||
if isinstance(self.stdout, AnsiToWin32):
|
if isinstance(self.stdout, AnsiToWin32):
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
actual_stdout = self.stdout.wrapped
|
actual_stdout = self.stdout.wrapped
|
||||||
self.stdout_encoding = getattr(
|
self.stdout_encoding = getattr(
|
||||||
actual_stdout, 'encoding', None) or 'utf8'
|
actual_stdout, 'encoding', None) or 'utf8'
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
if not hasattr(self, '_config'):
|
|
||||||
self._config = Config(directory=self.config_dir)
|
|
||||||
if self._config.is_new():
|
|
||||||
self._config.save()
|
|
||||||
else:
|
|
||||||
self._config.load()
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
defaults = dict(type(self).__dict__)
|
defaults = dict(type(self).__dict__)
|
||||||
actual = dict(defaults)
|
actual = dict(defaults)
|
||||||
actual.update(self.__dict__)
|
actual.update(self.__dict__)
|
||||||
actual['config'] = self.config
|
actual['config'] = self.config
|
||||||
return repr_dict_nice(dict(
|
return repr_dict({
|
||||||
(key, value)
|
key: value
|
||||||
for key, value in actual.items()
|
for key, value in actual.items()
|
||||||
if not key.startswith('_'))
|
if not key.startswith('_')
|
||||||
)
|
})
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<{0} {1}>'.format(type(self).__name__, str(self))
|
return f'<{type(self).__name__} {self}>'
|
||||||
|
|
||||||
|
_config: Config = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self) -> Config:
|
||||||
|
config = self._config
|
||||||
|
if not config:
|
||||||
|
self._config = config = Config(directory=self.config_dir)
|
||||||
|
if not config.is_new():
|
||||||
|
try:
|
||||||
|
config.load()
|
||||||
|
except ConfigFileError as e:
|
||||||
|
self.log_error(e, level='warning')
|
||||||
|
return config
|
||||||
|
|
||||||
|
def log_error(self, msg, level='error'):
|
||||||
|
assert level in ['error', 'warning']
|
||||||
|
self.stderr.write(f'\n{self.program_name}: {level}: {msg}\n\n')
|
||||||
|
374
httpie/core.py
374
httpie/core.py
@ -1,90 +1,125 @@
|
|||||||
"""This module provides the main functionality of HTTPie.
|
import argparse
|
||||||
|
import os
|
||||||
Invocation flow:
|
|
||||||
|
|
||||||
1. Read, validate and process the input (args, `stdin`).
|
|
||||||
2. Create and send a request.
|
|
||||||
3. Stream, and possibly process and format, the parts
|
|
||||||
of the request-response exchange selected by output options.
|
|
||||||
4. Simultaneously write to `stdout`
|
|
||||||
5. Exit.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
import errno
|
|
||||||
import platform
|
import platform
|
||||||
|
import sys
|
||||||
|
from typing import List, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests import __version__ as requests_version
|
|
||||||
from pygments import __version__ as pygments_version
|
from pygments import __version__ as pygments_version
|
||||||
|
from requests import __version__ as requests_version
|
||||||
|
|
||||||
from httpie import __version__ as httpie_version, ExitStatus
|
from httpie import __version__ as httpie_version
|
||||||
from httpie.compat import str, bytes, is_py3
|
from httpie.client import collect_messages
|
||||||
from httpie.client import get_response
|
|
||||||
from httpie.downloads import Downloader
|
|
||||||
from httpie.context import Environment
|
from httpie.context import Environment
|
||||||
|
from httpie.downloads import Downloader
|
||||||
|
from httpie.output.writer import write_message, write_stream
|
||||||
from httpie.plugins import plugin_manager
|
from httpie.plugins import plugin_manager
|
||||||
from httpie.output.streams import (
|
from httpie.status import ExitStatus, http_status_to_exit_status
|
||||||
build_output_stream,
|
|
||||||
write_stream,
|
|
||||||
write_stream_with_colors_win_py3
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_exit_status(http_status, follow=False):
|
def main(
|
||||||
"""Translate HTTP status code to exit status code."""
|
args: List[Union[str, bytes]] = sys.argv,
|
||||||
if 300 <= http_status <= 399 and not follow:
|
env=Environment(),
|
||||||
# Redirect
|
) -> ExitStatus:
|
||||||
return ExitStatus.ERROR_HTTP_3XX
|
"""
|
||||||
elif 400 <= http_status <= 499:
|
The main function.
|
||||||
# Client Error
|
|
||||||
return ExitStatus.ERROR_HTTP_4XX
|
Pre-process args, handle some special types of invocations,
|
||||||
elif 500 <= http_status <= 599:
|
and run the main program with error handling.
|
||||||
# Server Error
|
|
||||||
return ExitStatus.ERROR_HTTP_5XX
|
Return exit status code.
|
||||||
|
|
||||||
|
"""
|
||||||
|
program_name, *args = args
|
||||||
|
env.program_name = os.path.basename(program_name)
|
||||||
|
args = decode_raw_args(args, env.stdin_encoding)
|
||||||
|
plugin_manager.load_installed_plugins()
|
||||||
|
|
||||||
|
from httpie.cli.definition import parser
|
||||||
|
|
||||||
|
if env.config.default_options:
|
||||||
|
args = env.config.default_options + args
|
||||||
|
|
||||||
|
include_debug_info = '--debug' in args
|
||||||
|
include_traceback = include_debug_info or '--traceback' in args
|
||||||
|
|
||||||
|
if include_debug_info:
|
||||||
|
print_debug_info(env)
|
||||||
|
if args == ['--debug']:
|
||||||
|
return ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
exit_status = ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed_args = parser.parse_args(
|
||||||
|
args=args,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR_CTRL_C
|
||||||
|
except SystemExit as e:
|
||||||
|
if e.code != ExitStatus.SUCCESS:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
else:
|
else:
|
||||||
return ExitStatus.OK
|
try:
|
||||||
|
exit_status = program(
|
||||||
|
args=parsed_args,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR_CTRL_C
|
||||||
|
except SystemExit as e:
|
||||||
|
if e.code != ExitStatus.SUCCESS:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
|
except requests.Timeout:
|
||||||
|
exit_status = ExitStatus.ERROR_TIMEOUT
|
||||||
|
env.log_error(f'Request timed out ({parsed_args.timeout}s).')
|
||||||
|
except requests.TooManyRedirects:
|
||||||
|
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
||||||
|
env.log_error(
|
||||||
|
f'Too many redirects'
|
||||||
|
f' (--max-redirects=parsed_args.max_redirects).'
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# TODO: Further distinction between expected and unexpected errors.
|
||||||
|
msg = str(e)
|
||||||
|
if hasattr(e, 'request'):
|
||||||
|
request = e.request
|
||||||
|
if hasattr(request, 'url'):
|
||||||
|
msg = (
|
||||||
|
f'{msg} while doing a {request.method}'
|
||||||
|
f' request to URL: {request.url}'
|
||||||
|
)
|
||||||
|
env.log_error(f'{type(e).__name__}: {msg}')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
|
|
||||||
|
return exit_status
|
||||||
|
|
||||||
|
|
||||||
def print_debug_info(env):
|
def program(
|
||||||
env.stderr.writelines([
|
args: argparse.Namespace,
|
||||||
'HTTPie %s\n' % httpie_version,
|
env: Environment,
|
||||||
'Requests %s\n' % requests_version,
|
) -> ExitStatus:
|
||||||
'Pygments %s\n' % pygments_version,
|
|
||||||
'Python %s\n%s\n' % (sys.version, sys.executable),
|
|
||||||
'%s %s' % (platform.system(), platform.release()),
|
|
||||||
])
|
|
||||||
env.stderr.write('\n\n')
|
|
||||||
env.stderr.write(repr(env))
|
|
||||||
env.stderr.write('\n')
|
|
||||||
|
|
||||||
|
|
||||||
def decode_args(args, stdin_encoding):
|
|
||||||
"""
|
"""
|
||||||
Convert all bytes ags to str
|
The main program without error handling.
|
||||||
by decoding them using stdin encoding.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return [
|
exit_status = ExitStatus.SUCCESS
|
||||||
arg.decode(stdin_encoding)
|
|
||||||
if type(arg) == bytes else arg
|
|
||||||
for arg in args
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def program(args, env, log_error):
|
|
||||||
"""
|
|
||||||
The main program without error handling
|
|
||||||
|
|
||||||
:param args: parsed args (argparse.Namespace)
|
|
||||||
:type env: Environment
|
|
||||||
:param log_error: error log function
|
|
||||||
:return: status code
|
|
||||||
|
|
||||||
"""
|
|
||||||
exit_status = ExitStatus.OK
|
|
||||||
downloader = None
|
downloader = None
|
||||||
show_traceback = args.debug or args.traceback
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if args.download:
|
if args.download:
|
||||||
@ -96,56 +131,38 @@ def program(args, env, log_error):
|
|||||||
)
|
)
|
||||||
downloader.pre_request(args.headers)
|
downloader.pre_request(args.headers)
|
||||||
|
|
||||||
final_response = get_response(args, config_dir=env.config.directory)
|
initial_request = None
|
||||||
if args.all:
|
final_response = None
|
||||||
responses = final_response.history + [final_response]
|
|
||||||
else:
|
|
||||||
responses = [final_response]
|
|
||||||
|
|
||||||
for response in responses:
|
for message in collect_messages(args, env.config.directory):
|
||||||
|
write_message(
|
||||||
if args.check_status or downloader:
|
requests_message=message,
|
||||||
exit_status = get_exit_status(
|
env=env,
|
||||||
http_status=response.status_code,
|
args=args,
|
||||||
follow=args.follow
|
)
|
||||||
)
|
if isinstance(message, requests.PreparedRequest):
|
||||||
if not env.stdout_isatty and exit_status != ExitStatus.OK:
|
if not initial_request:
|
||||||
log_error(
|
initial_request = message
|
||||||
'HTTP %s %s', response.raw.status, response.raw.reason,
|
else:
|
||||||
level='warning'
|
final_response = message
|
||||||
|
if args.check_status or downloader:
|
||||||
|
exit_status = http_status_to_exit_status(
|
||||||
|
http_status=message.status_code,
|
||||||
|
follow=args.follow
|
||||||
)
|
)
|
||||||
|
if (not env.stdout_isatty
|
||||||
|
and exit_status != ExitStatus.SUCCESS):
|
||||||
|
env.log_error(
|
||||||
|
f'HTTP {message.raw.status} {message.raw.reason}',
|
||||||
|
level='warning'
|
||||||
|
)
|
||||||
|
|
||||||
write_stream_kwargs = {
|
if downloader and exit_status == ExitStatus.SUCCESS:
|
||||||
'stream': build_output_stream(
|
|
||||||
args=args,
|
|
||||||
env=env,
|
|
||||||
request=response.request,
|
|
||||||
response=response,
|
|
||||||
output_options=(
|
|
||||||
args.output_options
|
|
||||||
if response is final_response
|
|
||||||
else args.output_options_history
|
|
||||||
)
|
|
||||||
),
|
|
||||||
# NOTE: `env.stdout` will in fact be `stderr` with `--download`
|
|
||||||
'outfile': env.stdout,
|
|
||||||
'flush': env.stdout_isatty or args.stream
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
if env.is_windows and is_py3 and 'colors' in args.prettify:
|
|
||||||
write_stream_with_colors_win_py3(**write_stream_kwargs)
|
|
||||||
else:
|
|
||||||
write_stream(**write_stream_kwargs)
|
|
||||||
except IOError as e:
|
|
||||||
if not show_traceback and e.errno == errno.EPIPE:
|
|
||||||
# Ignore broken pipes unless --traceback.
|
|
||||||
env.stderr.write('\n')
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
if downloader and exit_status == ExitStatus.OK:
|
|
||||||
# Last response body download.
|
# Last response body download.
|
||||||
download_stream, download_to = downloader.start(final_response)
|
download_stream, download_to = downloader.start(
|
||||||
|
initial_url=initial_request.url,
|
||||||
|
final_response=final_response,
|
||||||
|
)
|
||||||
write_stream(
|
write_stream(
|
||||||
stream=download_stream,
|
stream=download_stream,
|
||||||
outfile=download_to,
|
outfile=download_to,
|
||||||
@ -154,107 +171,46 @@ def program(args, env, log_error):
|
|||||||
downloader.finish()
|
downloader.finish()
|
||||||
if downloader.interrupted:
|
if downloader.interrupted:
|
||||||
exit_status = ExitStatus.ERROR
|
exit_status = ExitStatus.ERROR
|
||||||
log_error('Incomplete download: size=%d; downloaded=%d' % (
|
env.log_error(
|
||||||
downloader.status.total_size,
|
'Incomplete download: size=%d; downloaded=%d' % (
|
||||||
downloader.status.downloaded
|
downloader.status.total_size,
|
||||||
))
|
downloader.status.downloaded
|
||||||
|
))
|
||||||
return exit_status
|
return exit_status
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if downloader and not downloader.finished:
|
if downloader and not downloader.finished:
|
||||||
downloader.failed()
|
downloader.failed()
|
||||||
|
|
||||||
if (not isinstance(args, list) and args.output_file and
|
if (not isinstance(args, list) and args.output_file
|
||||||
args.output_file_specified):
|
and args.output_file_specified):
|
||||||
args.output_file.close()
|
args.output_file.close()
|
||||||
|
|
||||||
|
|
||||||
def main(args=sys.argv[1:], env=Environment(), custom_log_error=None):
|
def print_debug_info(env: Environment):
|
||||||
|
env.stderr.writelines([
|
||||||
|
f'HTTPie {httpie_version}\n',
|
||||||
|
f'Requests {requests_version}\n',
|
||||||
|
f'Pygments {pygments_version}\n',
|
||||||
|
f'Python {sys.version}\n{sys.executable}\n',
|
||||||
|
f'{platform.system()} {platform.release()}',
|
||||||
|
])
|
||||||
|
env.stderr.write('\n\n')
|
||||||
|
env.stderr.write(repr(env))
|
||||||
|
env.stderr.write('\n')
|
||||||
|
|
||||||
|
|
||||||
|
def decode_raw_args(
|
||||||
|
args: List[Union[str, bytes]],
|
||||||
|
stdin_encoding: str
|
||||||
|
) -> List[str]:
|
||||||
"""
|
"""
|
||||||
The main function.
|
Convert all bytes args to str
|
||||||
|
by decoding them using stdin encoding.
|
||||||
Pre-process args, handle some special types of invocations,
|
|
||||||
and run the main program with error handling.
|
|
||||||
|
|
||||||
Return exit status code.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
args = decode_args(args, env.stdin_encoding)
|
return [
|
||||||
plugin_manager.load_installed_plugins()
|
arg.decode(stdin_encoding)
|
||||||
|
if type(arg) == bytes else arg
|
||||||
def log_error(msg, *args, **kwargs):
|
for arg in args
|
||||||
msg = msg % args
|
]
|
||||||
level = kwargs.get('level', 'error')
|
|
||||||
assert level in ['error', 'warning']
|
|
||||||
env.stderr.write('\nhttp: %s: %s\n' % (level, msg))
|
|
||||||
|
|
||||||
from httpie.cli import parser
|
|
||||||
|
|
||||||
if env.config.default_options:
|
|
||||||
args = env.config.default_options + args
|
|
||||||
|
|
||||||
if custom_log_error:
|
|
||||||
log_error = custom_log_error
|
|
||||||
|
|
||||||
include_debug_info = '--debug' in args
|
|
||||||
include_traceback = include_debug_info or '--traceback' in args
|
|
||||||
|
|
||||||
if include_debug_info:
|
|
||||||
print_debug_info(env)
|
|
||||||
if args == ['--debug']:
|
|
||||||
return ExitStatus.OK
|
|
||||||
|
|
||||||
exit_status = ExitStatus.OK
|
|
||||||
|
|
||||||
try:
|
|
||||||
parsed_args = parser.parse_args(args=args, env=env)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
env.stderr.write('\n')
|
|
||||||
if include_traceback:
|
|
||||||
raise
|
|
||||||
exit_status = ExitStatus.ERROR
|
|
||||||
except SystemExit as e:
|
|
||||||
if e.code != ExitStatus.OK:
|
|
||||||
env.stderr.write('\n')
|
|
||||||
if include_traceback:
|
|
||||||
raise
|
|
||||||
exit_status = ExitStatus.ERROR
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
exit_status = program(
|
|
||||||
args=parsed_args,
|
|
||||||
env=env,
|
|
||||||
log_error=log_error,
|
|
||||||
)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
env.stderr.write('\n')
|
|
||||||
if include_traceback:
|
|
||||||
raise
|
|
||||||
exit_status = ExitStatus.ERROR
|
|
||||||
except SystemExit as e:
|
|
||||||
if e.code != ExitStatus.OK:
|
|
||||||
env.stderr.write('\n')
|
|
||||||
if include_traceback:
|
|
||||||
raise
|
|
||||||
exit_status = ExitStatus.ERROR
|
|
||||||
except requests.Timeout:
|
|
||||||
exit_status = ExitStatus.ERROR_TIMEOUT
|
|
||||||
log_error('Request timed out (%ss).', parsed_args.timeout)
|
|
||||||
except requests.TooManyRedirects:
|
|
||||||
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
|
||||||
log_error('Too many redirects (--max-redirects=%s).',
|
|
||||||
parsed_args.max_redirects)
|
|
||||||
except Exception as e:
|
|
||||||
# TODO: Further distinction between expected and unexpected errors.
|
|
||||||
msg = str(e)
|
|
||||||
if hasattr(e, 'request'):
|
|
||||||
request = e.request
|
|
||||||
if hasattr(request, 'url'):
|
|
||||||
msg += ' while doing %s request to URL: %s' % (
|
|
||||||
request.method, request.url)
|
|
||||||
log_error('%s: %s', type(e).__name__, msg)
|
|
||||||
if include_traceback:
|
|
||||||
raise
|
|
||||||
exit_status = ExitStatus.ERROR
|
|
||||||
|
|
||||||
return exit_status
|
|
||||||
|
@ -4,24 +4,27 @@ Download mode implementation.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import division
|
from __future__ import division
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import errno
|
|
||||||
import mimetypes
|
|
||||||
import threading
|
import threading
|
||||||
from time import sleep, time
|
|
||||||
from mailbox import Message
|
from mailbox import Message
|
||||||
|
from time import sleep, time
|
||||||
|
from typing import IO, Optional, Tuple
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
from httpie.output.streams import RawStream
|
|
||||||
from httpie.models import HTTPResponse
|
from httpie.models import HTTPResponse
|
||||||
|
from httpie.output.streams import RawStream
|
||||||
from httpie.utils import humanize_bytes
|
from httpie.utils import humanize_bytes
|
||||||
from httpie.compat import urlsplit
|
|
||||||
|
|
||||||
|
|
||||||
PARTIAL_CONTENT = 206
|
PARTIAL_CONTENT = 206
|
||||||
|
|
||||||
|
|
||||||
CLEAR_LINE = '\r\033[K'
|
CLEAR_LINE = '\r\033[K'
|
||||||
PROGRESS = (
|
PROGRESS = (
|
||||||
'{percentage: 6.2f} %'
|
'{percentage: 6.2f} %'
|
||||||
@ -38,11 +41,11 @@ class ContentRangeError(ValueError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def parse_content_range(content_range, resumed_from):
|
def parse_content_range(content_range: str, resumed_from: int) -> int:
|
||||||
"""
|
"""
|
||||||
Parse and validate Content-Range header.
|
Parse and validate Content-Range header.
|
||||||
|
|
||||||
<http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>
|
<https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>
|
||||||
|
|
||||||
:param content_range: the value of a Content-Range response header
|
:param content_range: the value of a Content-Range response header
|
||||||
eg. "bytes 21010-47021/47022"
|
eg. "bytes 21010-47021/47022"
|
||||||
@ -54,8 +57,8 @@ def parse_content_range(content_range, resumed_from):
|
|||||||
raise ContentRangeError('Missing Content-Range')
|
raise ContentRangeError('Missing Content-Range')
|
||||||
|
|
||||||
pattern = (
|
pattern = (
|
||||||
'^bytes (?P<first_byte_pos>\d+)-(?P<last_byte_pos>\d+)'
|
r'^bytes (?P<first_byte_pos>\d+)-(?P<last_byte_pos>\d+)'
|
||||||
'/(\*|(?P<instance_length>\d+))$'
|
r'/(\*|(?P<instance_length>\d+))$'
|
||||||
)
|
)
|
||||||
match = re.match(pattern, content_range)
|
match = re.match(pattern, content_range)
|
||||||
|
|
||||||
@ -79,8 +82,8 @@ def parse_content_range(content_range, resumed_from):
|
|||||||
# byte-content-range- spec MUST ignore it and any content
|
# byte-content-range- spec MUST ignore it and any content
|
||||||
# transferred along with it."
|
# transferred along with it."
|
||||||
if (first_byte_pos >= last_byte_pos
|
if (first_byte_pos >= last_byte_pos
|
||||||
or (instance_length is not None
|
or (instance_length is not None
|
||||||
and instance_length <= last_byte_pos)):
|
and instance_length <= last_byte_pos)):
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Invalid Content-Range returned: %r' % content_range)
|
'Invalid Content-Range returned: %r' % content_range)
|
||||||
|
|
||||||
@ -97,7 +100,9 @@ def parse_content_range(content_range, resumed_from):
|
|||||||
return last_byte_pos + 1
|
return last_byte_pos + 1
|
||||||
|
|
||||||
|
|
||||||
def filename_from_content_disposition(content_disposition):
|
def filename_from_content_disposition(
|
||||||
|
content_disposition: str
|
||||||
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Extract and validate filename from a Content-Disposition header.
|
Extract and validate filename from a Content-Disposition header.
|
||||||
|
|
||||||
@ -105,7 +110,7 @@ def filename_from_content_disposition(content_disposition):
|
|||||||
:return: the filename if present and valid, otherwise `None`
|
:return: the filename if present and valid, otherwise `None`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# attachment; filename=jkbrzt-httpie-0.4.1-20-g40bd8f6.tar.gz
|
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
||||||
|
|
||||||
msg = Message('Content-Disposition: %s' % content_disposition)
|
msg = Message('Content-Disposition: %s' % content_disposition)
|
||||||
filename = msg.get_filename()
|
filename = msg.get_filename()
|
||||||
@ -116,7 +121,7 @@ def filename_from_content_disposition(content_disposition):
|
|||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def filename_from_url(url, content_type):
|
def filename_from_url(url: str, content_type: Optional[str]) -> str:
|
||||||
fn = urlsplit(url).path.rstrip('/')
|
fn = urlsplit(url).path.rstrip('/')
|
||||||
fn = os.path.basename(fn) if fn else 'index'
|
fn = os.path.basename(fn) if fn else 'index'
|
||||||
if '.' not in fn and content_type:
|
if '.' not in fn and content_type:
|
||||||
@ -136,7 +141,7 @@ def filename_from_url(url, content_type):
|
|||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def trim_filename(filename, max_len):
|
def trim_filename(filename: str, max_len: int) -> str:
|
||||||
if len(filename) > max_len:
|
if len(filename) > max_len:
|
||||||
trim_by = len(filename) - max_len
|
trim_by = len(filename) - max_len
|
||||||
name, ext = os.path.splitext(filename)
|
name, ext = os.path.splitext(filename)
|
||||||
@ -147,7 +152,7 @@ def trim_filename(filename, max_len):
|
|||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def get_filename_max_length(directory):
|
def get_filename_max_length(directory: str) -> int:
|
||||||
max_len = 255
|
max_len = 255
|
||||||
try:
|
try:
|
||||||
pathconf = os.pathconf
|
pathconf = os.pathconf
|
||||||
@ -162,14 +167,14 @@ def get_filename_max_length(directory):
|
|||||||
return max_len
|
return max_len
|
||||||
|
|
||||||
|
|
||||||
def trim_filename_if_needed(filename, directory='.', extra=0):
|
def trim_filename_if_needed(filename: str, directory='.', extra=0) -> str:
|
||||||
max_len = get_filename_max_length(directory) - extra
|
max_len = get_filename_max_length(directory) - extra
|
||||||
if len(filename) > max_len:
|
if len(filename) > max_len:
|
||||||
filename = trim_filename(filename, max_len)
|
filename = trim_filename(filename, max_len)
|
||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def get_unique_filename(filename, exists=os.path.exists):
|
def get_unique_filename(filename: str, exists=os.path.exists) -> str:
|
||||||
attempt = 0
|
attempt = 0
|
||||||
while True:
|
while True:
|
||||||
suffix = '-' + str(attempt) if attempt > 0 else ''
|
suffix = '-' + str(attempt) if attempt > 0 else ''
|
||||||
@ -180,10 +185,14 @@ def get_unique_filename(filename, exists=os.path.exists):
|
|||||||
attempt += 1
|
attempt += 1
|
||||||
|
|
||||||
|
|
||||||
class Downloader(object):
|
class Downloader:
|
||||||
|
|
||||||
def __init__(self, output_file=None,
|
def __init__(
|
||||||
resume=False, progress_file=sys.stderr):
|
self,
|
||||||
|
output_file: IO = None,
|
||||||
|
resume: bool = False,
|
||||||
|
progress_file: IO = sys.stderr
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param resume: Should the download resume if partial download
|
:param resume: Should the download resume if partial download
|
||||||
already exists.
|
already exists.
|
||||||
@ -195,24 +204,21 @@ class Downloader(object):
|
|||||||
:param progress_file: Where to report download progress.
|
:param progress_file: Where to report download progress.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
self.finished = False
|
||||||
|
self.status = DownloadStatus()
|
||||||
self._output_file = output_file
|
self._output_file = output_file
|
||||||
self._resume = resume
|
self._resume = resume
|
||||||
self._resumed_from = 0
|
self._resumed_from = 0
|
||||||
self.finished = False
|
|
||||||
|
|
||||||
self.status = Status()
|
|
||||||
self._progress_reporter = ProgressReporterThread(
|
self._progress_reporter = ProgressReporterThread(
|
||||||
status=self.status,
|
status=self.status,
|
||||||
output=progress_file
|
output=progress_file
|
||||||
)
|
)
|
||||||
|
|
||||||
def pre_request(self, request_headers):
|
def pre_request(self, request_headers: dict):
|
||||||
"""Called just before the HTTP request is sent.
|
"""Called just before the HTTP request is sent.
|
||||||
|
|
||||||
Might alter `request_headers`.
|
Might alter `request_headers`.
|
||||||
|
|
||||||
:type request_headers: dict
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Ask the server not to encode the content so that we can resume, etc.
|
# Ask the server not to encode the content so that we can resume, etc.
|
||||||
request_headers['Accept-Encoding'] = 'identity'
|
request_headers['Accept-Encoding'] = 'identity'
|
||||||
@ -224,13 +230,17 @@ class Downloader(object):
|
|||||||
request_headers['Range'] = 'bytes=%d-' % bytes_have
|
request_headers['Range'] = 'bytes=%d-' % bytes_have
|
||||||
self._resumed_from = bytes_have
|
self._resumed_from = bytes_have
|
||||||
|
|
||||||
def start(self, response):
|
def start(
|
||||||
|
self,
|
||||||
|
initial_url: str,
|
||||||
|
final_response: requests.Response
|
||||||
|
) -> Tuple[RawStream, IO]:
|
||||||
"""
|
"""
|
||||||
Initiate and return a stream for `response` body with progress
|
Initiate and return a stream for `response` body with progress
|
||||||
callback attached. Can be called only once.
|
callback attached. Can be called only once.
|
||||||
|
|
||||||
:param response: Initiated response object with headers already fetched
|
:param initial_url: The original requested URL
|
||||||
:type response: requests.models.Response
|
:param final_response: Initiated response object with headers already fetched
|
||||||
|
|
||||||
:return: RawStream, output_file
|
:return: RawStream, output_file
|
||||||
|
|
||||||
@ -238,16 +248,22 @@ class Downloader(object):
|
|||||||
assert not self.status.time_started
|
assert not self.status.time_started
|
||||||
|
|
||||||
# FIXME: some servers still might sent Content-Encoding: gzip
|
# FIXME: some servers still might sent Content-Encoding: gzip
|
||||||
# <https://github.com/jkbrzt/httpie/issues/423>
|
# <https://github.com/jakubroztocil/httpie/issues/423>
|
||||||
try:
|
try:
|
||||||
total_size = int(response.headers['Content-Length'])
|
total_size = int(final_response.headers['Content-Length'])
|
||||||
except (KeyError, ValueError, TypeError):
|
except (KeyError, ValueError, TypeError):
|
||||||
total_size = None
|
total_size = None
|
||||||
|
|
||||||
if self._output_file:
|
if not self._output_file:
|
||||||
if self._resume and response.status_code == PARTIAL_CONTENT:
|
self._output_file = self._get_output_file_from_response(
|
||||||
|
initial_url=initial_url,
|
||||||
|
final_response=final_response,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# `--output, -o` provided
|
||||||
|
if self._resume and final_response.status_code == PARTIAL_CONTENT:
|
||||||
total_size = parse_content_range(
|
total_size = parse_content_range(
|
||||||
response.headers.get('Content-Range'),
|
final_response.headers.get('Content-Range'),
|
||||||
self._resumed_from
|
self._resumed_from
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -258,19 +274,6 @@ class Downloader(object):
|
|||||||
self._output_file.truncate()
|
self._output_file.truncate()
|
||||||
except IOError:
|
except IOError:
|
||||||
pass # stdout
|
pass # stdout
|
||||||
else:
|
|
||||||
# TODO: Should the filename be taken from response.history[0].url?
|
|
||||||
# Output file not specified. Pick a name that doesn't exist yet.
|
|
||||||
filename = None
|
|
||||||
if 'Content-Disposition' in response.headers:
|
|
||||||
filename = filename_from_content_disposition(
|
|
||||||
response.headers['Content-Disposition'])
|
|
||||||
if not filename:
|
|
||||||
filename = filename_from_url(
|
|
||||||
url=response.url,
|
|
||||||
content_type=response.headers.get('Content-Type'),
|
|
||||||
)
|
|
||||||
self._output_file = open(get_unique_filename(filename), mode='a+b')
|
|
||||||
|
|
||||||
self.status.started(
|
self.status.started(
|
||||||
resumed_from=self._resumed_from,
|
resumed_from=self._resumed_from,
|
||||||
@ -278,7 +281,7 @@ class Downloader(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
stream = RawStream(
|
stream = RawStream(
|
||||||
msg=HTTPResponse(response),
|
msg=HTTPResponse(final_response),
|
||||||
with_headers=False,
|
with_headers=False,
|
||||||
with_body=True,
|
with_body=True,
|
||||||
on_body_chunk_downloaded=self.chunk_downloaded,
|
on_body_chunk_downloaded=self.chunk_downloaded,
|
||||||
@ -306,14 +309,14 @@ class Downloader(object):
|
|||||||
self._progress_reporter.stop()
|
self._progress_reporter.stop()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def interrupted(self):
|
def interrupted(self) -> bool:
|
||||||
return (
|
return (
|
||||||
self.finished
|
self.finished
|
||||||
and self.status.total_size
|
and self.status.total_size
|
||||||
and self.status.total_size != self.status.downloaded
|
and self.status.total_size != self.status.downloaded
|
||||||
)
|
)
|
||||||
|
|
||||||
def chunk_downloaded(self, chunk):
|
def chunk_downloaded(self, chunk: bytes):
|
||||||
"""
|
"""
|
||||||
A download progress callback.
|
A download progress callback.
|
||||||
|
|
||||||
@ -324,9 +327,27 @@ class Downloader(object):
|
|||||||
"""
|
"""
|
||||||
self.status.chunk_downloaded(len(chunk))
|
self.status.chunk_downloaded(len(chunk))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_output_file_from_response(
|
||||||
|
initial_url: str,
|
||||||
|
final_response: requests.Response,
|
||||||
|
) -> IO:
|
||||||
|
# Output file not specified. Pick a name that doesn't exist yet.
|
||||||
|
filename = None
|
||||||
|
if 'Content-Disposition' in final_response.headers:
|
||||||
|
filename = filename_from_content_disposition(
|
||||||
|
final_response.headers['Content-Disposition'])
|
||||||
|
if not filename:
|
||||||
|
filename = filename_from_url(
|
||||||
|
url=initial_url,
|
||||||
|
content_type=final_response.headers.get('Content-Type'),
|
||||||
|
)
|
||||||
|
unique_filename = get_unique_filename(filename)
|
||||||
|
return open(unique_filename, mode='a+b')
|
||||||
|
|
||||||
class Status(object):
|
|
||||||
"""Holds details about the downland status."""
|
class DownloadStatus:
|
||||||
|
"""Holds details about the download status."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.downloaded = 0
|
self.downloaded = 0
|
||||||
@ -362,13 +383,15 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
Uses threading to periodically update the status (speed, ETA, etc.).
|
Uses threading to periodically update the status (speed, ETA, etc.).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, status, output, tick=.1, update_interval=1):
|
|
||||||
"""
|
|
||||||
|
|
||||||
:type status: Status
|
def __init__(
|
||||||
:type output: file
|
self,
|
||||||
"""
|
status: DownloadStatus,
|
||||||
super(ProgressReporterThread, self).__init__()
|
output: IO,
|
||||||
|
tick=.1,
|
||||||
|
update_interval=1
|
||||||
|
):
|
||||||
|
super().__init__()
|
||||||
self.status = status
|
self.status = status
|
||||||
self.output = output
|
self.output = output
|
||||||
self._tick = tick
|
self._tick = tick
|
||||||
@ -447,8 +470,8 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
else 0)
|
else 0)
|
||||||
|
|
||||||
def sum_up(self):
|
def sum_up(self):
|
||||||
actually_downloaded = (self.status.downloaded
|
actually_downloaded = (
|
||||||
- self.status.resumed_from)
|
self.status.downloaded - self.status.resumed_from)
|
||||||
time_taken = self.status.time_finished - self.status.time_started
|
time_taken = self.status.time_finished - self.status.time_started
|
||||||
|
|
||||||
self.output.write(CLEAR_LINE)
|
self.output.write(CLEAR_LINE)
|
||||||
|
714
httpie/input.py
714
httpie/input.py
@ -1,714 +0,0 @@
|
|||||||
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
|
|
||||||
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import ssl
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import errno
|
|
||||||
import mimetypes
|
|
||||||
import getpass
|
|
||||||
from io import BytesIO
|
|
||||||
from collections import namedtuple, Iterable
|
|
||||||
# noinspection PyCompatibility
|
|
||||||
from argparse import ArgumentParser, ArgumentTypeError, ArgumentError
|
|
||||||
|
|
||||||
# TODO: Use MultiDict for headers once added to `requests`.
|
|
||||||
# https://github.com/jkbrzt/httpie/issues/130
|
|
||||||
from requests.structures import CaseInsensitiveDict
|
|
||||||
|
|
||||||
from httpie.compat import OrderedDict, urlsplit, str, is_pypy, is_py27
|
|
||||||
from httpie.sessions import VALID_SESSION_NAME_PATTERN
|
|
||||||
from httpie.utils import load_json_preserve_order
|
|
||||||
|
|
||||||
|
|
||||||
# ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
|
|
||||||
# <http://tools.ietf.org/html/rfc3986#section-3.1>
|
|
||||||
URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
|
|
||||||
|
|
||||||
HTTP_POST = 'POST'
|
|
||||||
HTTP_GET = 'GET'
|
|
||||||
HTTP = 'http://'
|
|
||||||
HTTPS = 'https://'
|
|
||||||
|
|
||||||
|
|
||||||
# Various separators used in args
|
|
||||||
SEP_HEADERS = ':'
|
|
||||||
SEP_CREDENTIALS = ':'
|
|
||||||
SEP_PROXY = ':'
|
|
||||||
SEP_DATA = '='
|
|
||||||
SEP_DATA_RAW_JSON = ':='
|
|
||||||
SEP_FILES = '@'
|
|
||||||
SEP_DATA_EMBED_FILE = '=@'
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE = ':=@'
|
|
||||||
SEP_QUERY = '=='
|
|
||||||
|
|
||||||
# Separators that become request data
|
|
||||||
SEP_GROUP_DATA_ITEMS = frozenset([
|
|
||||||
SEP_DATA,
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_FILES,
|
|
||||||
SEP_DATA_EMBED_FILE,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE
|
|
||||||
])
|
|
||||||
|
|
||||||
# Separators for items whose value is a filename to be embedded
|
|
||||||
SEP_GROUP_DATA_EMBED_ITEMS = frozenset([
|
|
||||||
SEP_DATA_EMBED_FILE,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE,
|
|
||||||
])
|
|
||||||
|
|
||||||
# Separators for raw JSON items
|
|
||||||
SEP_GROUP_RAW_JSON_ITEMS = frozenset([
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE,
|
|
||||||
])
|
|
||||||
|
|
||||||
# Separators allowed in ITEM arguments
|
|
||||||
SEP_GROUP_ALL_ITEMS = frozenset([
|
|
||||||
SEP_HEADERS,
|
|
||||||
SEP_QUERY,
|
|
||||||
SEP_DATA,
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_FILES,
|
|
||||||
SEP_DATA_EMBED_FILE,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE,
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
# Output options
|
|
||||||
OUT_REQ_HEAD = 'H'
|
|
||||||
OUT_REQ_BODY = 'B'
|
|
||||||
OUT_RESP_HEAD = 'h'
|
|
||||||
OUT_RESP_BODY = 'b'
|
|
||||||
|
|
||||||
OUTPUT_OPTIONS = frozenset([
|
|
||||||
OUT_REQ_HEAD,
|
|
||||||
OUT_REQ_BODY,
|
|
||||||
OUT_RESP_HEAD,
|
|
||||||
OUT_RESP_BODY
|
|
||||||
])
|
|
||||||
|
|
||||||
# Pretty
|
|
||||||
PRETTY_MAP = {
|
|
||||||
'all': ['format', 'colors'],
|
|
||||||
'colors': ['colors'],
|
|
||||||
'format': ['format'],
|
|
||||||
'none': []
|
|
||||||
}
|
|
||||||
PRETTY_STDOUT_TTY_ONLY = object()
|
|
||||||
|
|
||||||
|
|
||||||
# Defaults
|
|
||||||
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
|
||||||
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
|
||||||
|
|
||||||
|
|
||||||
SSL_VERSION_ARG_MAPPING = {
|
|
||||||
'ssl2.3': 'PROTOCOL_SSLv23',
|
|
||||||
'ssl3': 'PROTOCOL_SSLv3',
|
|
||||||
'tls1': 'PROTOCOL_TLSv1',
|
|
||||||
'tls1.1': 'PROTOCOL_TLSv1_1',
|
|
||||||
'tls1.2': 'PROTOCOL_TLSv1_2',
|
|
||||||
}
|
|
||||||
SSL_VERSION_ARG_MAPPING = dict(
|
|
||||||
(cli_arg, getattr(ssl, ssl_constant))
|
|
||||||
for cli_arg, ssl_constant in SSL_VERSION_ARG_MAPPING.items()
|
|
||||||
if hasattr(ssl, ssl_constant)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPieArgumentParser(ArgumentParser):
|
|
||||||
"""Adds additional logic to `argparse.ArgumentParser`.
|
|
||||||
|
|
||||||
Handles all input (CLI args, file args, stdin), applies defaults,
|
|
||||||
and performs extra validation.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
kwargs['add_help'] = False
|
|
||||||
super(HTTPieArgumentParser, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# noinspection PyMethodOverriding
|
|
||||||
def parse_args(self, env, args=None, namespace=None):
|
|
||||||
|
|
||||||
self.env = env
|
|
||||||
self.args, no_options = super(HTTPieArgumentParser, self)\
|
|
||||||
.parse_known_args(args, namespace)
|
|
||||||
|
|
||||||
if self.args.debug:
|
|
||||||
self.args.traceback = True
|
|
||||||
|
|
||||||
# Arguments processing and environment setup.
|
|
||||||
self._apply_no_options(no_options)
|
|
||||||
self._validate_download_options()
|
|
||||||
self._setup_standard_streams()
|
|
||||||
self._process_output_options()
|
|
||||||
self._process_pretty_options()
|
|
||||||
self._guess_method()
|
|
||||||
self._parse_items()
|
|
||||||
if not self.args.ignore_stdin and not env.stdin_isatty:
|
|
||||||
self._body_from_file(self.env.stdin)
|
|
||||||
if not URL_SCHEME_RE.match(self.args.url):
|
|
||||||
scheme = HTTP
|
|
||||||
|
|
||||||
# See if we're using curl style shorthand for localhost (:3000/foo)
|
|
||||||
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
|
||||||
if shorthand:
|
|
||||||
port = shorthand.group(1)
|
|
||||||
rest = shorthand.group(2)
|
|
||||||
self.args.url = scheme + 'localhost'
|
|
||||||
if port:
|
|
||||||
self.args.url += ':' + port
|
|
||||||
self.args.url += rest
|
|
||||||
else:
|
|
||||||
self.args.url = scheme + self.args.url
|
|
||||||
self._process_auth()
|
|
||||||
|
|
||||||
return self.args
|
|
||||||
|
|
||||||
# noinspection PyShadowingBuiltins
|
|
||||||
def _print_message(self, message, file=None):
|
|
||||||
# Sneak in our stderr/stdout.
|
|
||||||
file = {
|
|
||||||
sys.stdout: self.env.stdout,
|
|
||||||
sys.stderr: self.env.stderr,
|
|
||||||
None: self.env.stderr
|
|
||||||
}.get(file, file)
|
|
||||||
if not hasattr(file, 'buffer') and isinstance(message, str):
|
|
||||||
message = message.encode(self.env.stdout_encoding)
|
|
||||||
super(HTTPieArgumentParser, self)._print_message(message, file)
|
|
||||||
|
|
||||||
def _setup_standard_streams(self):
|
|
||||||
"""
|
|
||||||
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.args.output_file_specified = bool(self.args.output_file)
|
|
||||||
if self.args.download:
|
|
||||||
# FIXME: Come up with a cleaner solution.
|
|
||||||
if not self.args.output_file and not self.env.stdout_isatty:
|
|
||||||
# Use stdout as the download output file.
|
|
||||||
self.args.output_file = self.env.stdout
|
|
||||||
# With `--download`, we write everything that would normally go to
|
|
||||||
# `stdout` to `stderr` instead. Let's replace the stream so that
|
|
||||||
# we don't have to use many `if`s throughout the codebase.
|
|
||||||
# The response body will be treated separately.
|
|
||||||
self.env.stdout = self.env.stderr
|
|
||||||
self.env.stdout_isatty = self.env.stderr_isatty
|
|
||||||
elif self.args.output_file:
|
|
||||||
# When not `--download`ing, then `--output` simply replaces
|
|
||||||
# `stdout`. The file is opened for appending, which isn't what
|
|
||||||
# we want in this case.
|
|
||||||
self.args.output_file.seek(0)
|
|
||||||
try:
|
|
||||||
self.args.output_file.truncate()
|
|
||||||
except IOError as e:
|
|
||||||
if e.errno == errno.EINVAL:
|
|
||||||
# E.g. /dev/null on Linux.
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
self.env.stdout = self.args.output_file
|
|
||||||
self.env.stdout_isatty = False
|
|
||||||
|
|
||||||
def _process_auth(self):
|
|
||||||
"""
|
|
||||||
If only a username provided via --auth, then ask for a password.
|
|
||||||
Or, take credentials from the URL, if provided.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = urlsplit(self.args.url)
|
|
||||||
|
|
||||||
if self.args.auth:
|
|
||||||
if not self.args.auth.has_password():
|
|
||||||
# Stdin already read (if not a tty) so it's save to prompt.
|
|
||||||
if self.args.ignore_stdin:
|
|
||||||
self.error('Unable to prompt for passwords because'
|
|
||||||
' --ignore-stdin is set.')
|
|
||||||
self.args.auth.prompt_password(url.netloc)
|
|
||||||
|
|
||||||
elif url.username is not None:
|
|
||||||
# Handle http://username:password@hostname/
|
|
||||||
username = url.username
|
|
||||||
password = url.password or ''
|
|
||||||
self.args.auth = AuthCredentials(
|
|
||||||
key=username,
|
|
||||||
value=password,
|
|
||||||
sep=SEP_CREDENTIALS,
|
|
||||||
orig=SEP_CREDENTIALS.join([username, password])
|
|
||||||
)
|
|
||||||
|
|
||||||
def _apply_no_options(self, no_options):
|
|
||||||
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
|
|
||||||
its default value. This allows for un-setting of options, e.g.,
|
|
||||||
specified in config.
|
|
||||||
|
|
||||||
"""
|
|
||||||
invalid = []
|
|
||||||
|
|
||||||
for option in no_options:
|
|
||||||
if not option.startswith('--no-'):
|
|
||||||
invalid.append(option)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# --no-option => --option
|
|
||||||
inverted = '--' + option[5:]
|
|
||||||
for action in self._actions:
|
|
||||||
if inverted in action.option_strings:
|
|
||||||
setattr(self.args, action.dest, action.default)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
invalid.append(option)
|
|
||||||
|
|
||||||
if invalid:
|
|
||||||
msg = 'unrecognized arguments: %s'
|
|
||||||
self.error(msg % ' '.join(invalid))
|
|
||||||
|
|
||||||
def _body_from_file(self, fd):
|
|
||||||
"""There can only be one source of request data.
|
|
||||||
|
|
||||||
Bytes are always read.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.args.data:
|
|
||||||
self.error('Request body (from stdin or a file) and request '
|
|
||||||
'data (key=value) cannot be mixed.')
|
|
||||||
self.args.data = getattr(fd, 'buffer', fd).read()
|
|
||||||
|
|
||||||
def _guess_method(self):
|
|
||||||
"""Set `args.method` if not specified to either POST or GET
|
|
||||||
based on whether the request has data or not.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.args.method is None:
|
|
||||||
# Invoked as `http URL'.
|
|
||||||
assert not self.args.items
|
|
||||||
if not self.args.ignore_stdin and not self.env.stdin_isatty:
|
|
||||||
self.args.method = HTTP_POST
|
|
||||||
else:
|
|
||||||
self.args.method = HTTP_GET
|
|
||||||
|
|
||||||
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
|
|
||||||
elif not re.match('^[a-zA-Z]+$', self.args.method):
|
|
||||||
# Invoked as `http URL item+'. The URL is now in `args.method`
|
|
||||||
# and the first ITEM is now incorrectly in `args.url`.
|
|
||||||
try:
|
|
||||||
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
|
||||||
self.args.items.insert(0, KeyValueArgType(
|
|
||||||
*SEP_GROUP_ALL_ITEMS).__call__(self.args.url))
|
|
||||||
|
|
||||||
except ArgumentTypeError as e:
|
|
||||||
if self.args.traceback:
|
|
||||||
raise
|
|
||||||
self.error(e.args[0])
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Set the URL correctly
|
|
||||||
self.args.url = self.args.method
|
|
||||||
# Infer the method
|
|
||||||
has_data = (
|
|
||||||
(not self.args.ignore_stdin and not self.env.stdin_isatty)
|
|
||||||
or any(item.sep in SEP_GROUP_DATA_ITEMS
|
|
||||||
for item in self.args.items)
|
|
||||||
)
|
|
||||||
self.args.method = HTTP_POST if has_data else HTTP_GET
|
|
||||||
|
|
||||||
def _parse_items(self):
|
|
||||||
"""Parse `args.items` into `args.headers`, `args.data`, `args.params`,
|
|
||||||
and `args.files`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
items = parse_items(
|
|
||||||
items=self.args.items,
|
|
||||||
data_class=ParamsDict if self.args.form else OrderedDict
|
|
||||||
)
|
|
||||||
except ParseError as e:
|
|
||||||
if self.args.traceback:
|
|
||||||
raise
|
|
||||||
self.error(e.args[0])
|
|
||||||
else:
|
|
||||||
self.args.headers = items.headers
|
|
||||||
self.args.data = items.data
|
|
||||||
self.args.files = items.files
|
|
||||||
self.args.params = items.params
|
|
||||||
|
|
||||||
if self.args.files and not self.args.form:
|
|
||||||
# `http url @/path/to/file`
|
|
||||||
file_fields = list(self.args.files.keys())
|
|
||||||
if file_fields != ['']:
|
|
||||||
self.error(
|
|
||||||
'Invalid file fields (perhaps you meant --form?): %s'
|
|
||||||
% ','.join(file_fields))
|
|
||||||
|
|
||||||
fn, fd, ct = self.args.files['']
|
|
||||||
self.args.files = {}
|
|
||||||
|
|
||||||
self._body_from_file(fd)
|
|
||||||
|
|
||||||
if 'Content-Type' not in self.args.headers:
|
|
||||||
content_type = get_content_type(fn)
|
|
||||||
if content_type:
|
|
||||||
self.args.headers['Content-Type'] = content_type
|
|
||||||
|
|
||||||
def _process_output_options(self):
|
|
||||||
"""Apply defaults to output options, or validate the provided ones.
|
|
||||||
|
|
||||||
The default output options are stdout-type-sensitive.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def check_options(value, option):
|
|
||||||
unknown = set(value) - OUTPUT_OPTIONS
|
|
||||||
if unknown:
|
|
||||||
self.error('Unknown output options: {0}={1}'.format(
|
|
||||||
option,
|
|
||||||
','.join(unknown)
|
|
||||||
))
|
|
||||||
|
|
||||||
if self.args.verbose:
|
|
||||||
self.args.all = True
|
|
||||||
|
|
||||||
if self.args.output_options is None:
|
|
||||||
if self.args.verbose:
|
|
||||||
self.args.output_options = ''.join(OUTPUT_OPTIONS)
|
|
||||||
else:
|
|
||||||
self.args.output_options = (
|
|
||||||
OUTPUT_OPTIONS_DEFAULT
|
|
||||||
if self.env.stdout_isatty
|
|
||||||
else OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.args.output_options_history is None:
|
|
||||||
self.args.output_options_history = self.args.output_options
|
|
||||||
|
|
||||||
check_options(self.args.output_options, '--print')
|
|
||||||
check_options(self.args.output_options_history, '--history-print')
|
|
||||||
|
|
||||||
if self.args.download and OUT_RESP_BODY in self.args.output_options:
|
|
||||||
# Response body is always downloaded with --download and it goes
|
|
||||||
# through a different routine, so we remove it.
|
|
||||||
self.args.output_options = str(
|
|
||||||
set(self.args.output_options) - set(OUT_RESP_BODY))
|
|
||||||
|
|
||||||
def _process_pretty_options(self):
|
|
||||||
if self.args.prettify == PRETTY_STDOUT_TTY_ONLY:
|
|
||||||
self.args.prettify = PRETTY_MAP[
|
|
||||||
'all' if self.env.stdout_isatty else 'none']
|
|
||||||
elif (self.args.prettify and self.env.is_windows and
|
|
||||||
self.args.output_file):
|
|
||||||
self.error('Only terminal output can be colorized on Windows.')
|
|
||||||
else:
|
|
||||||
# noinspection PyTypeChecker
|
|
||||||
self.args.prettify = PRETTY_MAP[self.args.prettify]
|
|
||||||
|
|
||||||
def _validate_download_options(self):
|
|
||||||
if not self.args.download:
|
|
||||||
if self.args.download_resume:
|
|
||||||
self.error('--continue only works with --download')
|
|
||||||
if self.args.download_resume and not (
|
|
||||||
self.args.download and self.args.output_file):
|
|
||||||
self.error('--continue requires --output to be specified')
|
|
||||||
|
|
||||||
|
|
||||||
class ParseError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class KeyValue(object):
|
|
||||||
"""Base key-value pair parsed from CLI."""
|
|
||||||
|
|
||||||
def __init__(self, key, value, sep, orig):
|
|
||||||
self.key = key
|
|
||||||
self.value = value
|
|
||||||
self.sep = sep
|
|
||||||
self.orig = orig
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.__dict__ == other.__dict__
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return repr(self.__dict__)
|
|
||||||
|
|
||||||
|
|
||||||
class SessionNameValidator(object):
|
|
||||||
|
|
||||||
def __init__(self, error_message):
|
|
||||||
self.error_message = error_message
|
|
||||||
|
|
||||||
def __call__(self, value):
|
|
||||||
# Session name can be a path or just a name.
|
|
||||||
if (os.path.sep not in value
|
|
||||||
and not VALID_SESSION_NAME_PATTERN.search(value)):
|
|
||||||
raise ArgumentError(None, self.error_message)
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class KeyValueArgType(object):
|
|
||||||
"""A key-value pair argument type used with `argparse`.
|
|
||||||
|
|
||||||
Parses a key-value arg and constructs a `KeyValue` instance.
|
|
||||||
Used for headers, form data, and other key-value pair types.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
key_value_class = KeyValue
|
|
||||||
|
|
||||||
def __init__(self, *separators):
|
|
||||||
self.separators = separators
|
|
||||||
self.special_characters = set('\\')
|
|
||||||
for separator in separators:
|
|
||||||
self.special_characters.update(separator)
|
|
||||||
|
|
||||||
def __call__(self, string):
|
|
||||||
"""Parse `string` and return `self.key_value_class()` instance.
|
|
||||||
|
|
||||||
The best of `self.separators` is determined (first found, longest).
|
|
||||||
Back slash escaped characters aren't considered as separators
|
|
||||||
(or parts thereof). Literal back slash characters have to be escaped
|
|
||||||
as well (r'\\').
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Escaped(str):
|
|
||||||
"""Represents an escaped character."""
|
|
||||||
|
|
||||||
def tokenize(string):
|
|
||||||
"""Tokenize `string`. There are only two token types - strings
|
|
||||||
and escaped characters:
|
|
||||||
|
|
||||||
tokenize(r'foo\=bar\\baz')
|
|
||||||
=> ['foo', Escaped('='), 'bar', Escaped('\\'), 'baz']
|
|
||||||
|
|
||||||
"""
|
|
||||||
tokens = ['']
|
|
||||||
characters = iter(string)
|
|
||||||
for char in characters:
|
|
||||||
if char == '\\':
|
|
||||||
char = next(characters, '')
|
|
||||||
if char not in self.special_characters:
|
|
||||||
tokens[-1] += '\\' + char
|
|
||||||
else:
|
|
||||||
tokens.extend([Escaped(char), ''])
|
|
||||||
else:
|
|
||||||
tokens[-1] += char
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
tokens = tokenize(string)
|
|
||||||
|
|
||||||
# Sorting by length ensures that the longest one will be
|
|
||||||
# chosen as it will overwrite any shorter ones starting
|
|
||||||
# at the same position in the `found` dictionary.
|
|
||||||
separators = sorted(self.separators, key=len)
|
|
||||||
|
|
||||||
for i, token in enumerate(tokens):
|
|
||||||
|
|
||||||
if isinstance(token, Escaped):
|
|
||||||
continue
|
|
||||||
|
|
||||||
found = {}
|
|
||||||
for sep in separators:
|
|
||||||
pos = token.find(sep)
|
|
||||||
if pos != -1:
|
|
||||||
found[pos] = sep
|
|
||||||
|
|
||||||
if found:
|
|
||||||
# Starting first, longest separator found.
|
|
||||||
sep = found[min(found.keys())]
|
|
||||||
|
|
||||||
key, value = token.split(sep, 1)
|
|
||||||
|
|
||||||
# Any preceding tokens are part of the key.
|
|
||||||
key = ''.join(tokens[:i]) + key
|
|
||||||
|
|
||||||
# Any following tokens are part of the value.
|
|
||||||
value += ''.join(tokens[i + 1:])
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ArgumentTypeError(
|
|
||||||
u'"%s" is not a valid value' % string)
|
|
||||||
|
|
||||||
return self.key_value_class(
|
|
||||||
key=key, value=value, sep=sep, orig=string)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthCredentials(KeyValue):
|
|
||||||
"""Represents parsed credentials."""
|
|
||||||
|
|
||||||
def _getpass(self, prompt):
|
|
||||||
# To allow mocking.
|
|
||||||
return getpass.getpass(str(prompt))
|
|
||||||
|
|
||||||
def has_password(self):
|
|
||||||
return self.value is not None
|
|
||||||
|
|
||||||
def prompt_password(self, host):
|
|
||||||
try:
|
|
||||||
self.value = self._getpass(
|
|
||||||
'http: password for %s@%s: ' % (self.key, host))
|
|
||||||
except (EOFError, KeyboardInterrupt):
|
|
||||||
sys.stderr.write('\n')
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthCredentialsArgType(KeyValueArgType):
|
|
||||||
"""A key-value arg type that parses credentials."""
|
|
||||||
|
|
||||||
key_value_class = AuthCredentials
|
|
||||||
|
|
||||||
def __call__(self, string):
|
|
||||||
"""Parse credentials from `string`.
|
|
||||||
|
|
||||||
("username" or "username:password").
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return super(AuthCredentialsArgType, self).__call__(string)
|
|
||||||
except ArgumentTypeError:
|
|
||||||
# No password provided, will prompt for it later.
|
|
||||||
return self.key_value_class(
|
|
||||||
key=string,
|
|
||||||
value=None,
|
|
||||||
sep=SEP_CREDENTIALS,
|
|
||||||
orig=string
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RequestItemsDict(OrderedDict):
|
|
||||||
"""Multi-value dict for URL parameters and form data."""
|
|
||||||
|
|
||||||
if is_pypy and is_py27:
|
|
||||||
# Manually set keys when initialized with an iterable as PyPy
|
|
||||||
# doesn't call __setitem__ in such case (pypy3 does).
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
if len(args) == 1 and isinstance(args[0], Iterable):
|
|
||||||
super(RequestItemsDict, self).__init__(**kwargs)
|
|
||||||
for k, v in args[0]:
|
|
||||||
self[k] = v
|
|
||||||
else:
|
|
||||||
super(RequestItemsDict, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# noinspection PyMethodOverriding
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
""" If `key` is assigned more than once, `self[key]` holds a
|
|
||||||
`list` of all the values.
|
|
||||||
|
|
||||||
This allows having multiple fields with the same name in form
|
|
||||||
data and URL params.
|
|
||||||
|
|
||||||
"""
|
|
||||||
assert not isinstance(value, list)
|
|
||||||
if key not in self:
|
|
||||||
super(RequestItemsDict, self).__setitem__(key, value)
|
|
||||||
else:
|
|
||||||
if not isinstance(self[key], list):
|
|
||||||
super(RequestItemsDict, self).__setitem__(key, [self[key]])
|
|
||||||
self[key].append(value)
|
|
||||||
|
|
||||||
|
|
||||||
class ParamsDict(RequestItemsDict):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DataDict(RequestItemsDict):
|
|
||||||
|
|
||||||
def items(self):
|
|
||||||
for key, values in super(RequestItemsDict, self).items():
|
|
||||||
if not isinstance(values, list):
|
|
||||||
values = [values]
|
|
||||||
for value in values:
|
|
||||||
yield key, value
|
|
||||||
|
|
||||||
|
|
||||||
RequestItems = namedtuple('RequestItems',
|
|
||||||
['headers', 'data', 'files', 'params'])
|
|
||||||
|
|
||||||
|
|
||||||
def get_content_type(filename):
|
|
||||||
"""
|
|
||||||
Return the content type for ``filename`` in format appropriate
|
|
||||||
for Content-Type headers, or ``None`` if the file type is unknown
|
|
||||||
to ``mimetypes``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
mime, encoding = mimetypes.guess_type(filename, strict=False)
|
|
||||||
if mime:
|
|
||||||
content_type = mime
|
|
||||||
if encoding:
|
|
||||||
content_type = '%s; charset=%s' % (mime, encoding)
|
|
||||||
return content_type
|
|
||||||
|
|
||||||
|
|
||||||
def parse_items(items,
|
|
||||||
headers_class=CaseInsensitiveDict,
|
|
||||||
data_class=OrderedDict,
|
|
||||||
files_class=DataDict,
|
|
||||||
params_class=ParamsDict):
|
|
||||||
"""Parse `KeyValue` `items` into `data`, `headers`, `files`,
|
|
||||||
and `params`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
headers = []
|
|
||||||
data = []
|
|
||||||
files = []
|
|
||||||
params = []
|
|
||||||
|
|
||||||
for item in items:
|
|
||||||
value = item.value
|
|
||||||
|
|
||||||
if item.sep == SEP_HEADERS:
|
|
||||||
target = headers
|
|
||||||
elif item.sep == SEP_QUERY:
|
|
||||||
target = params
|
|
||||||
elif item.sep == SEP_FILES:
|
|
||||||
try:
|
|
||||||
with open(os.path.expanduser(value), 'rb') as f:
|
|
||||||
value = (os.path.basename(value),
|
|
||||||
BytesIO(f.read()),
|
|
||||||
get_content_type(value))
|
|
||||||
except IOError as e:
|
|
||||||
raise ParseError('"%s": %s' % (item.orig, e))
|
|
||||||
target = files
|
|
||||||
|
|
||||||
elif item.sep in SEP_GROUP_DATA_ITEMS:
|
|
||||||
|
|
||||||
if item.sep in SEP_GROUP_DATA_EMBED_ITEMS:
|
|
||||||
try:
|
|
||||||
with open(os.path.expanduser(value), 'rb') as f:
|
|
||||||
value = f.read().decode('utf8')
|
|
||||||
except IOError as e:
|
|
||||||
raise ParseError('"%s": %s' % (item.orig, e))
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
raise ParseError(
|
|
||||||
'"%s": cannot embed the content of "%s",'
|
|
||||||
' not a UTF8 or ASCII-encoded text file'
|
|
||||||
% (item.orig, item.value)
|
|
||||||
)
|
|
||||||
|
|
||||||
if item.sep in SEP_GROUP_RAW_JSON_ITEMS:
|
|
||||||
try:
|
|
||||||
value = load_json_preserve_order(value)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ParseError('"%s": %s' % (item.orig, e))
|
|
||||||
target = data
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise TypeError(item)
|
|
||||||
|
|
||||||
target.append((item.key, value))
|
|
||||||
|
|
||||||
return RequestItems(headers_class(headers),
|
|
||||||
data_class(data),
|
|
||||||
files_class(files),
|
|
||||||
params_class(params))
|
|
||||||
|
|
||||||
|
|
||||||
def readable_file_arg(filename):
|
|
||||||
try:
|
|
||||||
open(filename, 'rb')
|
|
||||||
except IOError as ex:
|
|
||||||
raise ArgumentTypeError('%s: %s' % (filename, ex.args[1]))
|
|
||||||
return filename
|
|
@ -1,37 +1,38 @@
|
|||||||
from httpie.compat import urlsplit, str
|
from typing import Iterable, Optional
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
|
||||||
class HTTPMessage(object):
|
class HTTPMessage:
|
||||||
"""Abstract class for HTTP messages."""
|
"""Abstract class for HTTP messages."""
|
||||||
|
|
||||||
def __init__(self, orig):
|
def __init__(self, orig):
|
||||||
self._orig = orig
|
self._orig = orig
|
||||||
|
|
||||||
def iter_body(self, chunk_size):
|
def iter_body(self, chunk_size: int) -> Iterable[bytes]:
|
||||||
"""Return an iterator over the body."""
|
"""Return an iterator over the body."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def iter_lines(self, chunk_size):
|
def iter_lines(self, chunk_size: int) -> Iterable[bytes]:
|
||||||
"""Return an iterator over the body yielding (`line`, `line_feed`)."""
|
"""Return an iterator over the body yielding (`line`, `line_feed`)."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self) -> str:
|
||||||
"""Return a `str` with the message's headers."""
|
"""Return a `str` with the message's headers."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def encoding(self):
|
def encoding(self) -> Optional[str]:
|
||||||
"""Return a `str` with the message's encoding, if known."""
|
"""Return a `str` with the message's encoding, if known."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self) -> bytes:
|
||||||
"""Return a `bytes` with the message's body."""
|
"""Return a `bytes` with the message's body."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def content_type(self):
|
def content_type(self) -> str:
|
||||||
"""Return the message content type."""
|
"""Return the message content type."""
|
||||||
ct = self._orig.headers.get('Content-Type', '')
|
ct = self._orig.headers.get('Content-Type', '')
|
||||||
if not isinstance(ct, str):
|
if not isinstance(ct, str):
|
||||||
@ -60,11 +61,7 @@ class HTTPResponse(HTTPMessage):
|
|||||||
20: '2',
|
20: '2',
|
||||||
}[original.version]
|
}[original.version]
|
||||||
|
|
||||||
status_line = 'HTTP/{version} {status} {reason}'.format(
|
status_line = f'HTTP/{version} {original.status} {original.reason}'
|
||||||
version=version,
|
|
||||||
status=original.status,
|
|
||||||
reason=original.reason
|
|
||||||
)
|
|
||||||
headers = [status_line]
|
headers = [status_line]
|
||||||
try:
|
try:
|
||||||
# `original.msg` is a `http.client.HTTPMessage` on Python 3
|
# `original.msg` is a `http.client.HTTPMessage` on Python 3
|
||||||
|
@ -1,29 +1,36 @@
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from typing import Optional, Type
|
||||||
|
|
||||||
import pygments.lexer
|
import pygments.lexer
|
||||||
import pygments.token
|
|
||||||
import pygments.styles
|
|
||||||
import pygments.lexers
|
import pygments.lexers
|
||||||
import pygments.style
|
import pygments.style
|
||||||
|
import pygments.styles
|
||||||
|
import pygments.token
|
||||||
from pygments.formatters.terminal import TerminalFormatter
|
from pygments.formatters.terminal import TerminalFormatter
|
||||||
from pygments.formatters.terminal256 import Terminal256Formatter
|
from pygments.formatters.terminal256 import Terminal256Formatter
|
||||||
|
from pygments.lexer import Lexer
|
||||||
from pygments.lexers.special import TextLexer
|
from pygments.lexers.special import TextLexer
|
||||||
|
from pygments.lexers.text import HttpLexer as PygmentsHttpLexer
|
||||||
from pygments.util import ClassNotFound
|
from pygments.util import ClassNotFound
|
||||||
|
|
||||||
from httpie.compat import is_windows
|
from httpie.compat import is_windows
|
||||||
|
from httpie.context import Environment
|
||||||
from httpie.plugins import FormatterPlugin
|
from httpie.plugins import FormatterPlugin
|
||||||
|
|
||||||
|
|
||||||
AVAILABLE_STYLES = set(pygments.styles.STYLE_MAP.keys())
|
AUTO_STYLE = 'auto' # Follows terminal ANSI color styles
|
||||||
AVAILABLE_STYLES.add('solarized')
|
DEFAULT_STYLE = AUTO_STYLE
|
||||||
|
SOLARIZED_STYLE = 'solarized' # Bundled here
|
||||||
if is_windows:
|
if is_windows:
|
||||||
# Colors on Windows via colorama don't look that
|
# Colors on Windows via colorama don't look that
|
||||||
# great and fruity seems to give the best result there
|
# great and fruity seems to give the best result there.
|
||||||
DEFAULT_STYLE = 'fruity'
|
DEFAULT_STYLE = 'fruity'
|
||||||
else:
|
|
||||||
DEFAULT_STYLE = 'solarized'
|
AVAILABLE_STYLES = set(pygments.styles.get_all_styles())
|
||||||
|
AVAILABLE_STYLES.add(SOLARIZED_STYLE)
|
||||||
|
AVAILABLE_STYLES.add(AUTO_STYLE)
|
||||||
|
|
||||||
|
|
||||||
class ColorFormatter(FormatterPlugin):
|
class ColorFormatter(FormatterPlugin):
|
||||||
@ -36,46 +43,74 @@ class ColorFormatter(FormatterPlugin):
|
|||||||
"""
|
"""
|
||||||
group_name = 'colors'
|
group_name = 'colors'
|
||||||
|
|
||||||
def __init__(self, env, explicit_json=False,
|
def __init__(
|
||||||
color_scheme=DEFAULT_STYLE, **kwargs):
|
self,
|
||||||
super(ColorFormatter, self).__init__(**kwargs)
|
env: Environment,
|
||||||
|
explicit_json=False,
|
||||||
|
color_scheme=DEFAULT_STYLE,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
if not env.colors:
|
if not env.colors:
|
||||||
self.enabled = False
|
self.enabled = False
|
||||||
return
|
return
|
||||||
|
|
||||||
# --json, -j
|
use_auto_style = color_scheme == AUTO_STYLE
|
||||||
self.explicit_json = explicit_json
|
has_256_colors = env.colors == 256
|
||||||
|
if use_auto_style or not has_256_colors:
|
||||||
try:
|
http_lexer = PygmentsHttpLexer()
|
||||||
style_class = pygments.styles.get_style_by_name(color_scheme)
|
formatter = TerminalFormatter()
|
||||||
except ClassNotFound:
|
|
||||||
style_class = Solarized256Style
|
|
||||||
|
|
||||||
if env.colors == 256:
|
|
||||||
fmt_class = Terminal256Formatter
|
|
||||||
else:
|
else:
|
||||||
fmt_class = TerminalFormatter
|
http_lexer = SimplifiedHTTPLexer()
|
||||||
self.formatter = fmt_class(style=style_class)
|
formatter = Terminal256Formatter(
|
||||||
|
style=self.get_style_class(color_scheme)
|
||||||
|
)
|
||||||
|
|
||||||
def format_headers(self, headers):
|
self.explicit_json = explicit_json # --json
|
||||||
return pygments.highlight(headers, HTTPLexer(), self.formatter).strip()
|
self.formatter = formatter
|
||||||
|
self.http_lexer = http_lexer
|
||||||
|
|
||||||
def format_body(self, body, mime):
|
def format_headers(self, headers: str) -> str:
|
||||||
lexer = self.get_lexer(mime, body)
|
return pygments.highlight(
|
||||||
|
code=headers,
|
||||||
|
lexer=self.http_lexer,
|
||||||
|
formatter=self.formatter,
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
def format_body(self, body: str, mime: str) -> str:
|
||||||
|
lexer = self.get_lexer_for_body(mime, body)
|
||||||
if lexer:
|
if lexer:
|
||||||
body = pygments.highlight(body, lexer, self.formatter)
|
body = pygments.highlight(
|
||||||
|
code=body,
|
||||||
|
lexer=lexer,
|
||||||
|
formatter=self.formatter,
|
||||||
|
)
|
||||||
return body.strip()
|
return body.strip()
|
||||||
|
|
||||||
def get_lexer(self, mime, body):
|
def get_lexer_for_body(
|
||||||
|
self, mime: str,
|
||||||
|
body: str
|
||||||
|
) -> Optional[Type[Lexer]]:
|
||||||
return get_lexer(
|
return get_lexer(
|
||||||
mime=mime,
|
mime=mime,
|
||||||
explicit_json=self.explicit_json,
|
explicit_json=self.explicit_json,
|
||||||
body=body,
|
body=body,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_style_class(color_scheme: str) -> Type[pygments.style.Style]:
|
||||||
|
try:
|
||||||
|
return pygments.styles.get_style_by_name(color_scheme)
|
||||||
|
except ClassNotFound:
|
||||||
|
return Solarized256Style
|
||||||
|
|
||||||
def get_lexer(mime, explicit_json=False, body=''):
|
|
||||||
|
|
||||||
|
def get_lexer(
|
||||||
|
mime: str,
|
||||||
|
explicit_json=False,
|
||||||
|
body=''
|
||||||
|
) -> Optional[Type[Lexer]]:
|
||||||
# Build candidate mime type and lexer names.
|
# Build candidate mime type and lexer names.
|
||||||
mime_types, lexer_names = [mime], []
|
mime_types, lexer_names = [mime], []
|
||||||
type_, subtype = mime.split('/', 1)
|
type_, subtype = mime.split('/', 1)
|
||||||
@ -121,7 +156,7 @@ def get_lexer(mime, explicit_json=False, body=''):
|
|||||||
return lexer
|
return lexer
|
||||||
|
|
||||||
|
|
||||||
class HTTPLexer(pygments.lexer.RegexLexer):
|
class SimplifiedHTTPLexer(pygments.lexer.RegexLexer):
|
||||||
"""Simplified HTTP lexer for Pygments.
|
"""Simplified HTTP lexer for Pygments.
|
||||||
|
|
||||||
It only operates on headers and provides a stronger contrast between
|
It only operates on headers and provides a stronger contrast between
|
||||||
|
@ -3,7 +3,7 @@ from httpie.plugins import FormatterPlugin
|
|||||||
|
|
||||||
class HeadersFormatter(FormatterPlugin):
|
class HeadersFormatter(FormatterPlugin):
|
||||||
|
|
||||||
def format_headers(self, headers):
|
def format_headers(self, headers: str) -> str:
|
||||||
"""
|
"""
|
||||||
Sorts headers by name while retaining relative
|
Sorts headers by name while retaining relative
|
||||||
order of multiple headers with the same name.
|
order of multiple headers with the same name.
|
||||||
|
@ -9,14 +9,14 @@ DEFAULT_INDENT = 4
|
|||||||
|
|
||||||
class JSONFormatter(FormatterPlugin):
|
class JSONFormatter(FormatterPlugin):
|
||||||
|
|
||||||
def format_body(self, body, mime):
|
def format_body(self, body: str, mime: str) -> str:
|
||||||
maybe_json = [
|
maybe_json = [
|
||||||
'json',
|
'json',
|
||||||
'javascript',
|
'javascript',
|
||||||
'text',
|
'text',
|
||||||
]
|
]
|
||||||
if (self.kwargs['explicit_json'] or
|
if (self.kwargs['explicit_json']
|
||||||
any(token in mime for token in maybe_json)):
|
or any(token in mime for token in maybe_json)):
|
||||||
try:
|
try:
|
||||||
obj = json.loads(body)
|
obj = json.loads(body)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import re
|
import re
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
from httpie.plugins import plugin_manager
|
from httpie.plugins import plugin_manager, ConverterPlugin
|
||||||
from httpie.context import Environment
|
from httpie.context import Environment
|
||||||
|
|
||||||
|
|
||||||
@ -11,19 +12,20 @@ def is_valid_mime(mime):
|
|||||||
return mime and MIME_RE.match(mime)
|
return mime and MIME_RE.match(mime)
|
||||||
|
|
||||||
|
|
||||||
class Conversion(object):
|
class Conversion:
|
||||||
|
|
||||||
def get_converter(self, mime):
|
@staticmethod
|
||||||
|
def get_converter(mime: str) -> Optional[ConverterPlugin]:
|
||||||
if is_valid_mime(mime):
|
if is_valid_mime(mime):
|
||||||
for converter_class in plugin_manager.get_converters():
|
for converter_class in plugin_manager.get_converters():
|
||||||
if converter_class.supports(mime):
|
if converter_class.supports(mime):
|
||||||
return converter_class(mime)
|
return converter_class(mime)
|
||||||
|
|
||||||
|
|
||||||
class Formatting(object):
|
class Formatting:
|
||||||
"""A delegate class that invokes the actual processors."""
|
"""A delegate class that invokes the actual processors."""
|
||||||
|
|
||||||
def __init__(self, groups, env=Environment(), **kwargs):
|
def __init__(self, groups: List[str], env=Environment(), **kwargs):
|
||||||
"""
|
"""
|
||||||
:param groups: names of processor groups to be applied
|
:param groups: names of processor groups to be applied
|
||||||
:param env: Environment
|
:param env: Environment
|
||||||
@ -38,12 +40,12 @@ class Formatting(object):
|
|||||||
if p.enabled:
|
if p.enabled:
|
||||||
self.enabled_plugins.append(p)
|
self.enabled_plugins.append(p)
|
||||||
|
|
||||||
def format_headers(self, headers):
|
def format_headers(self, headers: str) -> str:
|
||||||
for p in self.enabled_plugins:
|
for p in self.enabled_plugins:
|
||||||
headers = p.format_headers(headers)
|
headers = p.format_headers(headers)
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
def format_body(self, content, mime):
|
def format_body(self, content: str, mime: str) -> str:
|
||||||
if is_valid_mime(mime):
|
if is_valid_mime(mime):
|
||||||
for p in self.enabled_plugins:
|
for p in self.enabled_plugins:
|
||||||
content = p.format_body(content, mime)
|
content = p.format_body(content, mime)
|
||||||
|
@ -1,12 +1,9 @@
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
from functools import partial
|
from typing import Callable, Iterable, Union
|
||||||
|
|
||||||
from httpie.compat import str
|
|
||||||
from httpie.context import Environment
|
from httpie.context import Environment
|
||||||
from httpie.models import HTTPRequest, HTTPResponse
|
from httpie.models import HTTPMessage
|
||||||
from httpie.input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
from httpie.output.processing import Conversion, Formatting
|
||||||
OUT_RESP_HEAD, OUT_RESP_BODY)
|
|
||||||
from httpie.output.processing import Formatting, Conversion
|
|
||||||
|
|
||||||
|
|
||||||
BINARY_SUPPRESSED_NOTICE = (
|
BINARY_SUPPRESSED_NOTICE = (
|
||||||
@ -24,112 +21,16 @@ class BinarySuppressedError(Exception):
|
|||||||
message = BINARY_SUPPRESSED_NOTICE
|
message = BINARY_SUPPRESSED_NOTICE
|
||||||
|
|
||||||
|
|
||||||
def write_stream(stream, outfile, flush):
|
class BaseStream:
|
||||||
"""Write the output stream."""
|
|
||||||
try:
|
|
||||||
# Writing bytes so we use the buffer interface (Python 3).
|
|
||||||
buf = outfile.buffer
|
|
||||||
except AttributeError:
|
|
||||||
buf = outfile
|
|
||||||
|
|
||||||
for chunk in stream:
|
|
||||||
buf.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def write_stream_with_colors_win_py3(stream, outfile, flush):
|
|
||||||
"""Like `write`, but colorized chunks are written as text
|
|
||||||
directly to `outfile` to ensure it gets processed by colorama.
|
|
||||||
Applies only to Windows with Python 3 and colorized terminal output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
color = b'\x1b['
|
|
||||||
encoding = outfile.encoding
|
|
||||||
for chunk in stream:
|
|
||||||
if color in chunk:
|
|
||||||
outfile.write(chunk.decode(encoding))
|
|
||||||
else:
|
|
||||||
outfile.buffer.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def build_output_stream(args, env, request, response, output_options):
|
|
||||||
"""Build and return a chain of iterators over the `request`-`response`
|
|
||||||
exchange each of which yields `bytes` chunks.
|
|
||||||
|
|
||||||
"""
|
|
||||||
req_h = OUT_REQ_HEAD in output_options
|
|
||||||
req_b = OUT_REQ_BODY in output_options
|
|
||||||
resp_h = OUT_RESP_HEAD in output_options
|
|
||||||
resp_b = OUT_RESP_BODY in output_options
|
|
||||||
req = req_h or req_b
|
|
||||||
resp = resp_h or resp_b
|
|
||||||
|
|
||||||
output = []
|
|
||||||
Stream = get_stream_type(env, args)
|
|
||||||
|
|
||||||
if req:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPRequest(request),
|
|
||||||
with_headers=req_h,
|
|
||||||
with_body=req_b))
|
|
||||||
|
|
||||||
if req_b and resp:
|
|
||||||
# Request/Response separator.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
if resp:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPResponse(response),
|
|
||||||
with_headers=resp_h,
|
|
||||||
with_body=resp_b))
|
|
||||||
|
|
||||||
if env.stdout_isatty and resp_b:
|
|
||||||
# Ensure a blank line after the response body.
|
|
||||||
# For terminal output only.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
return chain(*output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_stream_type(env, args):
|
|
||||||
"""Pick the right stream type based on `env` and `args`.
|
|
||||||
Wrap it in a partial with the type-specific args so that
|
|
||||||
we don't need to think what stream we are dealing with.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not env.stdout_isatty and not args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
RawStream,
|
|
||||||
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
|
||||||
if args.stream
|
|
||||||
else RawStream.CHUNK_SIZE
|
|
||||||
)
|
|
||||||
elif args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
PrettyStream if args.stream else BufferedPrettyStream,
|
|
||||||
env=env,
|
|
||||||
conversion=Conversion(),
|
|
||||||
formatting=Formatting(
|
|
||||||
env=env,
|
|
||||||
groups=args.prettify,
|
|
||||||
color_scheme=args.style,
|
|
||||||
explicit_json=args.json,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
Stream = partial(EncodedStream, env=env)
|
|
||||||
|
|
||||||
return Stream
|
|
||||||
|
|
||||||
|
|
||||||
class BaseStream(object):
|
|
||||||
"""Base HTTP message output stream class."""
|
"""Base HTTP message output stream class."""
|
||||||
|
|
||||||
def __init__(self, msg, with_headers=True, with_body=True,
|
def __init__(
|
||||||
on_body_chunk_downloaded=None):
|
self,
|
||||||
|
msg: HTTPMessage,
|
||||||
|
with_headers=True,
|
||||||
|
with_body=True,
|
||||||
|
on_body_chunk_downloaded: Callable[[bytes], None] = None
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param msg: a :class:`models.HTTPMessage` subclass
|
:param msg: a :class:`models.HTTPMessage` subclass
|
||||||
:param with_headers: if `True`, headers will be included
|
:param with_headers: if `True`, headers will be included
|
||||||
@ -142,15 +43,15 @@ class BaseStream(object):
|
|||||||
self.with_body = with_body
|
self.with_body = with_body
|
||||||
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
||||||
|
|
||||||
def get_headers(self):
|
def get_headers(self) -> bytes:
|
||||||
"""Return the headers' bytes."""
|
"""Return the headers' bytes."""
|
||||||
return self.msg.headers.encode('utf8')
|
return self.msg.headers.encode('utf8')
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
"""Return an iterator over the message body."""
|
"""Return an iterator over the message body."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self) -> Iterable[bytes]:
|
||||||
"""Return an iterator over `self.msg`."""
|
"""Return an iterator over `self.msg`."""
|
||||||
if self.with_headers:
|
if self.with_headers:
|
||||||
yield self.get_headers()
|
yield self.get_headers()
|
||||||
@ -175,10 +76,10 @@ class RawStream(BaseStream):
|
|||||||
CHUNK_SIZE_BY_LINE = 1
|
CHUNK_SIZE_BY_LINE = 1
|
||||||
|
|
||||||
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
||||||
super(RawStream, self).__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.chunk_size = chunk_size
|
self.chunk_size = chunk_size
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
return self.msg.iter_body(self.chunk_size)
|
return self.msg.iter_body(self.chunk_size)
|
||||||
|
|
||||||
|
|
||||||
@ -193,26 +94,20 @@ class EncodedStream(BaseStream):
|
|||||||
CHUNK_SIZE = 1
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
def __init__(self, env=Environment(), **kwargs):
|
def __init__(self, env=Environment(), **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
super(EncodedStream, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
if env.stdout_isatty:
|
if env.stdout_isatty:
|
||||||
# Use the encoding supported by the terminal.
|
# Use the encoding supported by the terminal.
|
||||||
output_encoding = env.stdout_encoding
|
output_encoding = env.stdout_encoding
|
||||||
else:
|
else:
|
||||||
# Preserve the message encoding.
|
# Preserve the message encoding.
|
||||||
output_encoding = self.msg.encoding
|
output_encoding = self.msg.encoding
|
||||||
|
|
||||||
# Default to utf8 when unsure.
|
# Default to utf8 when unsure.
|
||||||
self.output_encoding = output_encoding or 'utf8'
|
self.output_encoding = output_encoding or 'utf8'
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
|
|
||||||
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
||||||
|
|
||||||
if b'\0' in line:
|
if b'\0' in line:
|
||||||
raise BinarySuppressedError()
|
raise BinarySuppressedError()
|
||||||
|
|
||||||
yield line.decode(self.msg.encoding) \
|
yield line.decode(self.msg.encoding) \
|
||||||
.encode(self.output_encoding, 'replace') + lf
|
.encode(self.output_encoding, 'replace') + lf
|
||||||
|
|
||||||
@ -228,17 +123,21 @@ class PrettyStream(EncodedStream):
|
|||||||
|
|
||||||
CHUNK_SIZE = 1
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
def __init__(self, conversion, formatting, **kwargs):
|
def __init__(
|
||||||
super(PrettyStream, self).__init__(**kwargs)
|
self, conversion: Conversion,
|
||||||
|
formatting: Formatting,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
super().__init__(**kwargs)
|
||||||
self.formatting = formatting
|
self.formatting = formatting
|
||||||
self.conversion = conversion
|
self.conversion = conversion
|
||||||
self.mime = self.msg.content_type.split(';')[0]
|
self.mime = self.msg.content_type.split(';')[0]
|
||||||
|
|
||||||
def get_headers(self):
|
def get_headers(self) -> bytes:
|
||||||
return self.formatting.format_headers(
|
return self.formatting.format_headers(
|
||||||
self.msg.headers).encode(self.output_encoding)
|
self.msg.headers).encode(self.output_encoding)
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
first_chunk = True
|
first_chunk = True
|
||||||
iter_lines = self.msg.iter_lines(self.CHUNK_SIZE)
|
iter_lines = self.msg.iter_lines(self.CHUNK_SIZE)
|
||||||
for line, lf in iter_lines:
|
for line, lf in iter_lines:
|
||||||
@ -259,7 +158,7 @@ class PrettyStream(EncodedStream):
|
|||||||
yield self.process_body(line) + lf
|
yield self.process_body(line) + lf
|
||||||
first_chunk = False
|
first_chunk = False
|
||||||
|
|
||||||
def process_body(self, chunk):
|
def process_body(self, chunk: Union[str, bytes]) -> bytes:
|
||||||
if not isinstance(chunk, str):
|
if not isinstance(chunk, str):
|
||||||
# Text when a converter has been used,
|
# Text when a converter has been used,
|
||||||
# otherwise it will always be bytes.
|
# otherwise it will always be bytes.
|
||||||
@ -278,7 +177,7 @@ class BufferedPrettyStream(PrettyStream):
|
|||||||
|
|
||||||
CHUNK_SIZE = 1024 * 10
|
CHUNK_SIZE = 1024 * 10
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
# Read the whole body before prettifying it,
|
# Read the whole body before prettifying it,
|
||||||
# but bail out immediately if the body is binary.
|
# but bail out immediately if the body is binary.
|
||||||
converter = None
|
converter = None
|
||||||
|
163
httpie/output/writer.py
Normal file
163
httpie/output/writer.py
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
import argparse
|
||||||
|
import errno
|
||||||
|
from typing import Union, IO, TextIO, Tuple, Type
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.models import HTTPRequest, HTTPResponse
|
||||||
|
from httpie.output.processing import Conversion, Formatting
|
||||||
|
from httpie.output.streams import (
|
||||||
|
RawStream, PrettyStream,
|
||||||
|
BufferedPrettyStream, EncodedStream,
|
||||||
|
BaseStream,
|
||||||
|
)
|
||||||
|
from httpie.cli.constants import (
|
||||||
|
OUT_REQ_BODY, OUT_REQ_HEAD, OUT_RESP_BODY, OUT_RESP_HEAD,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def write_message(
|
||||||
|
requests_message: Union[requests.PreparedRequest, requests.Response],
|
||||||
|
env: Environment,
|
||||||
|
args: argparse.Namespace,
|
||||||
|
):
|
||||||
|
output_options_by_message_type = {
|
||||||
|
requests.PreparedRequest: {
|
||||||
|
'with_headers': OUT_REQ_HEAD in args.output_options,
|
||||||
|
'with_body': OUT_REQ_BODY in args.output_options,
|
||||||
|
},
|
||||||
|
requests.Response: {
|
||||||
|
'with_headers': OUT_RESP_HEAD in args.output_options,
|
||||||
|
'with_body': OUT_RESP_BODY in args.output_options,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
output_options = output_options_by_message_type[type(requests_message)]
|
||||||
|
if not any(output_options.values()):
|
||||||
|
return
|
||||||
|
write_stream_kwargs = {
|
||||||
|
'stream': build_output_stream_for_message(
|
||||||
|
args=args,
|
||||||
|
env=env,
|
||||||
|
requests_message=requests_message,
|
||||||
|
**output_options,
|
||||||
|
),
|
||||||
|
# NOTE: `env.stdout` will in fact be `stderr` with `--download`
|
||||||
|
'outfile': env.stdout,
|
||||||
|
'flush': env.stdout_isatty or args.stream
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
if env.is_windows and 'colors' in args.prettify:
|
||||||
|
write_stream_with_colors_win_py3(**write_stream_kwargs)
|
||||||
|
else:
|
||||||
|
write_stream(**write_stream_kwargs)
|
||||||
|
except IOError as e:
|
||||||
|
show_traceback = args.debug or args.traceback
|
||||||
|
if not show_traceback and e.errno == errno.EPIPE:
|
||||||
|
# Ignore broken pipes unless --traceback.
|
||||||
|
env.stderr.write('\n')
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def write_stream(
|
||||||
|
stream: BaseStream,
|
||||||
|
outfile: Union[IO, TextIO],
|
||||||
|
flush: bool
|
||||||
|
):
|
||||||
|
"""Write the output stream."""
|
||||||
|
try:
|
||||||
|
# Writing bytes so we use the buffer interface (Python 3).
|
||||||
|
buf = outfile.buffer
|
||||||
|
except AttributeError:
|
||||||
|
buf = outfile
|
||||||
|
|
||||||
|
for chunk in stream:
|
||||||
|
buf.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def write_stream_with_colors_win_py3(
|
||||||
|
stream: 'BaseStream',
|
||||||
|
outfile: TextIO,
|
||||||
|
flush: bool
|
||||||
|
):
|
||||||
|
"""Like `write`, but colorized chunks are written as text
|
||||||
|
directly to `outfile` to ensure it gets processed by colorama.
|
||||||
|
Applies only to Windows with Python 3 and colorized terminal output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
color = b'\x1b['
|
||||||
|
encoding = outfile.encoding
|
||||||
|
for chunk in stream:
|
||||||
|
if color in chunk:
|
||||||
|
outfile.write(chunk.decode(encoding))
|
||||||
|
else:
|
||||||
|
outfile.buffer.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def build_output_stream_for_message(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
env: Environment,
|
||||||
|
requests_message: Union[requests.PreparedRequest, requests.Response],
|
||||||
|
with_headers: bool,
|
||||||
|
with_body: bool,
|
||||||
|
):
|
||||||
|
stream_class, stream_kwargs = get_stream_type_and_kwargs(
|
||||||
|
env=env,
|
||||||
|
args=args,
|
||||||
|
)
|
||||||
|
message_class = {
|
||||||
|
requests.PreparedRequest: HTTPRequest,
|
||||||
|
requests.Response: HTTPResponse,
|
||||||
|
}[type(requests_message)]
|
||||||
|
yield from stream_class(
|
||||||
|
msg=message_class(requests_message),
|
||||||
|
with_headers=with_headers,
|
||||||
|
with_body=with_body,
|
||||||
|
**stream_kwargs,
|
||||||
|
)
|
||||||
|
if env.stdout_isatty and with_body:
|
||||||
|
# Ensure a blank line after the response body.
|
||||||
|
# For terminal output only.
|
||||||
|
yield b'\n\n'
|
||||||
|
|
||||||
|
|
||||||
|
def get_stream_type_and_kwargs(
|
||||||
|
env: Environment,
|
||||||
|
args: argparse.Namespace
|
||||||
|
) -> Tuple[Type['BaseStream'], dict]:
|
||||||
|
"""Pick the right stream type and kwargs for it based on `env` and `args`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not env.stdout_isatty and not args.prettify:
|
||||||
|
stream_class = RawStream
|
||||||
|
stream_kwargs = {
|
||||||
|
'chunk_size': (
|
||||||
|
RawStream.CHUNK_SIZE_BY_LINE
|
||||||
|
if args.stream
|
||||||
|
else RawStream.CHUNK_SIZE
|
||||||
|
)
|
||||||
|
}
|
||||||
|
elif args.prettify:
|
||||||
|
stream_class = PrettyStream if args.stream else BufferedPrettyStream
|
||||||
|
stream_kwargs = {
|
||||||
|
'env': env,
|
||||||
|
'conversion': Conversion(),
|
||||||
|
'formatting': Formatting(
|
||||||
|
env=env,
|
||||||
|
groups=args.prettify,
|
||||||
|
color_scheme=args.style,
|
||||||
|
explicit_json=args.json,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
stream_class = EncodedStream
|
||||||
|
stream_kwargs = {
|
||||||
|
'env': env
|
||||||
|
}
|
||||||
|
|
||||||
|
return stream_class, stream_kwargs
|
@ -1,6 +1,6 @@
|
|||||||
"""
|
"""
|
||||||
WARNING: The plugin API is still work in progress and will
|
WARNING: The plugin API is still work in progress and will
|
||||||
probably be completely reworked by v1.0.0.
|
probably be completely reworked in the future.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from httpie.plugins.base import (
|
from httpie.plugins.base import (
|
||||||
@ -15,8 +15,10 @@ from httpie.output.formatters.colors import ColorFormatter
|
|||||||
|
|
||||||
|
|
||||||
plugin_manager = PluginManager()
|
plugin_manager = PluginManager()
|
||||||
plugin_manager.register(BasicAuthPlugin,
|
plugin_manager.register(
|
||||||
DigestAuthPlugin)
|
BasicAuthPlugin,
|
||||||
plugin_manager.register(HeadersFormatter,
|
DigestAuthPlugin,
|
||||||
JSONFormatter,
|
HeadersFormatter,
|
||||||
ColorFormatter)
|
JSONFormatter,
|
||||||
|
ColorFormatter,
|
||||||
|
)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
class BasePlugin(object):
|
class BasePlugin:
|
||||||
|
|
||||||
# The name of the plugin, eg. "My auth".
|
# The name of the plugin, eg. "My auth".
|
||||||
name = None
|
name = None
|
||||||
@ -15,15 +15,41 @@ class AuthPlugin(BasePlugin):
|
|||||||
"""
|
"""
|
||||||
Base auth plugin class.
|
Base auth plugin class.
|
||||||
|
|
||||||
See <https://github.com/jkbrzt/httpie-ntlm> for an example auth plugin.
|
See <https://github.com/httpie/httpie-ntlm> for an example auth plugin.
|
||||||
|
|
||||||
|
See also `test_auth_plugins.py`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# The value that should be passed to --auth-type
|
# The value that should be passed to --auth-type
|
||||||
# to use this auth plugin. Eg. "my-auth"
|
# to use this auth plugin. Eg. "my-auth"
|
||||||
auth_type = None
|
auth_type = None
|
||||||
|
|
||||||
def get_auth(self, username, password):
|
# Set to `False` to make it possible to invoke this auth
|
||||||
|
# plugin without requiring the user to specify credentials
|
||||||
|
# through `--auth, -a`.
|
||||||
|
auth_require = True
|
||||||
|
|
||||||
|
# By default the `-a` argument is parsed for `username:password`.
|
||||||
|
# Set this to `False` to disable the parsing and error handling.
|
||||||
|
auth_parse = True
|
||||||
|
|
||||||
|
# If both `auth_parse` and `prompt_password` are set to `True`,
|
||||||
|
# and the value of `-a` lacks the password part,
|
||||||
|
# then the user will be prompted to type the password in.
|
||||||
|
prompt_password = True
|
||||||
|
|
||||||
|
# Will be set to the raw value of `-a` (if provided) before
|
||||||
|
# `get_auth()` gets called.
|
||||||
|
raw_auth = None
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
"""
|
"""
|
||||||
|
If `auth_parse` is set to `True`, then `username`
|
||||||
|
and `password` contain the parsed credentials.
|
||||||
|
|
||||||
|
Use `self.raw_auth` to access the raw value passed through
|
||||||
|
`--auth, -a`.
|
||||||
|
|
||||||
Return a ``requests.auth.AuthBase`` subclass instance.
|
Return a ``requests.auth.AuthBase`` subclass instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -33,7 +59,7 @@ class AuthPlugin(BasePlugin):
|
|||||||
class TransportPlugin(BasePlugin):
|
class TransportPlugin(BasePlugin):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
http://docs.python-requests.org/en/latest/user/advanced/#transport-adapters
|
https://2.python-requests.org/en/latest/user/advanced/#transport-adapters
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -49,7 +75,7 @@ class TransportPlugin(BasePlugin):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
class ConverterPlugin(object):
|
class ConverterPlugin(BasePlugin):
|
||||||
|
|
||||||
def __init__(self, mime):
|
def __init__(self, mime):
|
||||||
self.mime = mime
|
self.mime = mime
|
||||||
@ -62,7 +88,8 @@ class ConverterPlugin(object):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class FormatterPlugin(object):
|
class FormatterPlugin(BasePlugin):
|
||||||
|
group_name = 'format'
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
@ -74,7 +101,7 @@ class FormatterPlugin(object):
|
|||||||
self.enabled = True
|
self.enabled = True
|
||||||
self.kwargs = kwargs
|
self.kwargs = kwargs
|
||||||
|
|
||||||
def format_headers(self, headers):
|
def format_headers(self, headers: str) -> str:
|
||||||
"""Return processed `headers`
|
"""Return processed `headers`
|
||||||
|
|
||||||
:param headers: The headers as text.
|
:param headers: The headers as text.
|
||||||
@ -82,7 +109,7 @@ class FormatterPlugin(object):
|
|||||||
"""
|
"""
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
def format_body(self, content, mime):
|
def format_body(self, content: str, mime: str) -> str:
|
||||||
"""Return processed `content`.
|
"""Return processed `content`.
|
||||||
|
|
||||||
:param mime: E.g., 'application/atom+xml'.
|
:param mime: E.g., 'application/atom+xml'.
|
||||||
|
@ -5,37 +5,40 @@ import requests.auth
|
|||||||
from httpie.plugins.base import AuthPlugin
|
from httpie.plugins.base import AuthPlugin
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyAbstractClass
|
||||||
class BuiltinAuthPlugin(AuthPlugin):
|
class BuiltinAuthPlugin(AuthPlugin):
|
||||||
|
|
||||||
package_name = '(builtin)'
|
package_name = '(builtin)'
|
||||||
|
|
||||||
|
|
||||||
class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
||||||
|
|
||||||
def __call__(self, r):
|
def __call__(
|
||||||
|
self,
|
||||||
|
request: requests.PreparedRequest
|
||||||
|
) -> requests.PreparedRequest:
|
||||||
"""
|
"""
|
||||||
Override username/password serialization to allow unicode.
|
Override username/password serialization to allow unicode.
|
||||||
|
|
||||||
See https://github.com/jkbrzt/httpie/issues/212
|
See https://github.com/jakubroztocil/httpie/issues/212
|
||||||
|
|
||||||
"""
|
"""
|
||||||
r.headers['Authorization'] = type(self).make_header(
|
request.headers['Authorization'] = type(self).make_header(
|
||||||
self.username, self.password).encode('latin1')
|
self.username, self.password).encode('latin1')
|
||||||
return r
|
return request
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def make_header(username, password):
|
def make_header(username: str, password: str) -> str:
|
||||||
credentials = u'%s:%s' % (username, password)
|
credentials = u'%s:%s' % (username, password)
|
||||||
token = b64encode(credentials.encode('utf8')).strip().decode('latin1')
|
token = b64encode(credentials.encode('utf8')).strip().decode('latin1')
|
||||||
return 'Basic %s' % token
|
return 'Basic %s' % token
|
||||||
|
|
||||||
|
|
||||||
class BasicAuthPlugin(BuiltinAuthPlugin):
|
class BasicAuthPlugin(BuiltinAuthPlugin):
|
||||||
|
|
||||||
name = 'Basic HTTP auth'
|
name = 'Basic HTTP auth'
|
||||||
auth_type = 'basic'
|
auth_type = 'basic'
|
||||||
|
|
||||||
def get_auth(self, username, password):
|
# noinspection PyMethodOverriding
|
||||||
|
def get_auth(self, username: str, password: str) -> HTTPBasicAuth:
|
||||||
return HTTPBasicAuth(username, password)
|
return HTTPBasicAuth(username, password)
|
||||||
|
|
||||||
|
|
||||||
@ -44,5 +47,10 @@ class DigestAuthPlugin(BuiltinAuthPlugin):
|
|||||||
name = 'Digest HTTP auth'
|
name = 'Digest HTTP auth'
|
||||||
auth_type = 'digest'
|
auth_type = 'digest'
|
||||||
|
|
||||||
def get_auth(self, username, password):
|
# noinspection PyMethodOverriding
|
||||||
|
def get_auth(
|
||||||
|
self,
|
||||||
|
username: str,
|
||||||
|
password: str
|
||||||
|
) -> requests.auth.HTTPDigestAuth:
|
||||||
return requests.auth.HTTPDigestAuth(username, password)
|
return requests.auth.HTTPDigestAuth(username, password)
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
|
from operator import attrgetter
|
||||||
|
from typing import Dict, List, Type
|
||||||
|
|
||||||
from pkg_resources import iter_entry_points
|
from pkg_resources import iter_entry_points
|
||||||
from httpie.plugins import AuthPlugin, FormatterPlugin, ConverterPlugin
|
|
||||||
from httpie.plugins.base import TransportPlugin
|
from httpie.plugins import AuthPlugin, ConverterPlugin, FormatterPlugin
|
||||||
|
from httpie.plugins.base import BasePlugin, TransportPlugin
|
||||||
|
|
||||||
|
|
||||||
ENTRY_POINT_NAMES = [
|
ENTRY_POINT_NAMES = [
|
||||||
@ -12,17 +16,17 @@ ENTRY_POINT_NAMES = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class PluginManager(object):
|
class PluginManager(list):
|
||||||
|
|
||||||
def __init__(self):
|
def register(self, *plugins: Type[BasePlugin]):
|
||||||
self._plugins = []
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(self._plugins)
|
|
||||||
|
|
||||||
def register(self, *plugins):
|
|
||||||
for plugin in plugins:
|
for plugin in plugins:
|
||||||
self._plugins.append(plugin)
|
self.append(plugin)
|
||||||
|
|
||||||
|
def unregister(self, plugin: Type[BasePlugin]):
|
||||||
|
self.remove(plugin)
|
||||||
|
|
||||||
|
def filter(self, by_type=Type[BasePlugin]):
|
||||||
|
return [plugin for plugin in self if issubclass(plugin, by_type)]
|
||||||
|
|
||||||
def load_installed_plugins(self):
|
def load_installed_plugins(self):
|
||||||
for entry_point_name in ENTRY_POINT_NAMES:
|
for entry_point_name in ENTRY_POINT_NAMES:
|
||||||
@ -32,34 +36,34 @@ class PluginManager(object):
|
|||||||
self.register(entry_point.load())
|
self.register(entry_point.load())
|
||||||
|
|
||||||
# Auth
|
# Auth
|
||||||
def get_auth_plugins(self):
|
def get_auth_plugins(self) -> List[Type[AuthPlugin]]:
|
||||||
return [plugin for plugin in self if issubclass(plugin, AuthPlugin)]
|
return self.filter(AuthPlugin)
|
||||||
|
|
||||||
def get_auth_plugin_mapping(self):
|
def get_auth_plugin_mapping(self) -> Dict[str, Type[AuthPlugin]]:
|
||||||
return dict((plugin.auth_type, plugin)
|
return {
|
||||||
for plugin in self.get_auth_plugins())
|
plugin.auth_type: plugin for plugin in self.get_auth_plugins()
|
||||||
|
}
|
||||||
|
|
||||||
def get_auth_plugin(self, auth_type):
|
def get_auth_plugin(self, auth_type: str) -> Type[AuthPlugin]:
|
||||||
return self.get_auth_plugin_mapping()[auth_type]
|
return self.get_auth_plugin_mapping()[auth_type]
|
||||||
|
|
||||||
# Output processing
|
# Output processing
|
||||||
def get_formatters(self):
|
def get_formatters(self) -> List[Type[FormatterPlugin]]:
|
||||||
return [plugin for plugin in self
|
return self.filter(FormatterPlugin)
|
||||||
if issubclass(plugin, FormatterPlugin)]
|
|
||||||
|
|
||||||
def get_formatters_grouped(self):
|
def get_formatters_grouped(self) -> Dict[str, List[Type[FormatterPlugin]]]:
|
||||||
groups = {}
|
return {
|
||||||
for group_name, group in groupby(
|
group_name: list(group)
|
||||||
self.get_formatters(),
|
for group_name, group
|
||||||
key=lambda p: getattr(p, 'group_name', 'format')):
|
in groupby(self.get_formatters(), key=attrgetter('group_name'))
|
||||||
groups[group_name] = list(group)
|
}
|
||||||
return groups
|
|
||||||
|
|
||||||
def get_converters(self):
|
def get_converters(self) -> List[Type[ConverterPlugin]]:
|
||||||
return [plugin for plugin in self
|
return self.filter(ConverterPlugin)
|
||||||
if issubclass(plugin, ConverterPlugin)]
|
|
||||||
|
|
||||||
# Adapters
|
# Adapters
|
||||||
def get_transport_plugins(self):
|
def get_transport_plugins(self) -> List[Type[TransportPlugin]]:
|
||||||
return [plugin for plugin in self
|
return self.filter(TransportPlugin)
|
||||||
if issubclass(plugin, TransportPlugin)]
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'<PluginManager: {list(self)}>'
|
||||||
|
@ -1,86 +1,60 @@
|
|||||||
"""Persistent, JSON-serialized sessions.
|
"""
|
||||||
|
Persistent, JSON-serialized sessions.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Union
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from requests.auth import AuthBase
|
||||||
from requests.cookies import RequestsCookieJar, create_cookie
|
from requests.cookies import RequestsCookieJar, create_cookie
|
||||||
|
|
||||||
from httpie.compat import urlsplit
|
from httpie.cli.dicts import RequestHeadersDict
|
||||||
from httpie.config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
from httpie.config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
||||||
from httpie.plugins import plugin_manager
|
from httpie.plugins import plugin_manager
|
||||||
|
|
||||||
|
|
||||||
SESSIONS_DIR_NAME = 'sessions'
|
SESSIONS_DIR_NAME = 'sessions'
|
||||||
DEFAULT_SESSIONS_DIR = os.path.join(DEFAULT_CONFIG_DIR, SESSIONS_DIR_NAME)
|
DEFAULT_SESSIONS_DIR = DEFAULT_CONFIG_DIR / SESSIONS_DIR_NAME
|
||||||
VALID_SESSION_NAME_PATTERN = re.compile('^[a-zA-Z0-9_.-]+$')
|
VALID_SESSION_NAME_PATTERN = re.compile('^[a-zA-Z0-9_.-]+$')
|
||||||
# Request headers starting with these prefixes won't be stored in sessions.
|
# Request headers starting with these prefixes won't be stored in sessions.
|
||||||
# They are specific to each request.
|
# They are specific to each request.
|
||||||
# http://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Requests
|
# <https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Requests>
|
||||||
SESSION_IGNORED_HEADER_PREFIXES = ['Content-', 'If-']
|
SESSION_IGNORED_HEADER_PREFIXES = ['Content-', 'If-']
|
||||||
|
|
||||||
|
|
||||||
def get_response(requests_session, session_name,
|
def get_httpie_session(
|
||||||
config_dir, args, read_only=False):
|
config_dir: Path,
|
||||||
"""Like `client.get_responses`, but applies permanent
|
session_name: str,
|
||||||
aspects of the session to the request.
|
host: Optional[str],
|
||||||
|
url: str,
|
||||||
"""
|
) -> 'Session':
|
||||||
from .client import get_requests_kwargs, dump_request
|
|
||||||
if os.path.sep in session_name:
|
if os.path.sep in session_name:
|
||||||
path = os.path.expanduser(session_name)
|
path = os.path.expanduser(session_name)
|
||||||
else:
|
else:
|
||||||
hostname = (args.headers.get('Host', None) or
|
hostname = host or urlsplit(url).netloc.split('@')[-1]
|
||||||
urlsplit(args.url).netloc.split('@')[-1])
|
|
||||||
if not hostname:
|
if not hostname:
|
||||||
# HACK/FIXME: httpie-unixsocket's URLs have no hostname.
|
# HACK/FIXME: httpie-unixsocket's URLs have no hostname.
|
||||||
hostname = 'localhost'
|
hostname = 'localhost'
|
||||||
|
|
||||||
# host:port => host_port
|
# host:port => host_port
|
||||||
hostname = hostname.replace(':', '_')
|
hostname = hostname.replace(':', '_')
|
||||||
path = os.path.join(config_dir,
|
path = (
|
||||||
SESSIONS_DIR_NAME,
|
config_dir / SESSIONS_DIR_NAME / hostname / f'{session_name}.json'
|
||||||
hostname,
|
)
|
||||||
session_name + '.json')
|
|
||||||
|
|
||||||
session = Session(path)
|
session = Session(path)
|
||||||
session.load()
|
session.load()
|
||||||
|
return session
|
||||||
kwargs = get_requests_kwargs(args, base_headers=session.headers)
|
|
||||||
if args.debug:
|
|
||||||
dump_request(kwargs)
|
|
||||||
session.update_headers(kwargs['headers'])
|
|
||||||
|
|
||||||
if args.auth:
|
|
||||||
session.auth = {
|
|
||||||
'type': args.auth_type,
|
|
||||||
'username': args.auth.key,
|
|
||||||
'password': args.auth.value,
|
|
||||||
}
|
|
||||||
elif session.auth:
|
|
||||||
kwargs['auth'] = session.auth
|
|
||||||
|
|
||||||
requests_session.cookies = session.cookies
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = requests_session.request(**kwargs)
|
|
||||||
except Exception:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
# Existing sessions with `read_only=True` don't get updated.
|
|
||||||
if session.is_new() or not read_only:
|
|
||||||
session.cookies = requests_session.cookies
|
|
||||||
session.save()
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class Session(BaseConfigDict):
|
class Session(BaseConfigDict):
|
||||||
helpurl = 'https://github.com/jkbrzt/httpie#sessions'
|
helpurl = 'https://httpie.org/doc#sessions'
|
||||||
about = 'HTTPie session file'
|
about = 'HTTPie session file'
|
||||||
|
|
||||||
def __init__(self, path, *args, **kwargs):
|
def __init__(self, path: Union[str, Path]):
|
||||||
super(Session, self).__init__(*args, **kwargs)
|
super().__init__(path=Path(path))
|
||||||
self._path = path
|
|
||||||
self['headers'] = {}
|
self['headers'] = {}
|
||||||
self['cookies'] = {}
|
self['cookies'] = {}
|
||||||
self['auth'] = {
|
self['auth'] = {
|
||||||
@ -89,21 +63,17 @@ class Session(BaseConfigDict):
|
|||||||
'password': None
|
'password': None
|
||||||
}
|
}
|
||||||
|
|
||||||
def _get_path(self):
|
def update_headers(self, request_headers: RequestHeadersDict):
|
||||||
return self._path
|
|
||||||
|
|
||||||
def update_headers(self, request_headers):
|
|
||||||
"""
|
"""
|
||||||
Update the session headers with the request ones while ignoring
|
Update the session headers with the request ones while ignoring
|
||||||
certain name prefixes.
|
certain name prefixes.
|
||||||
|
|
||||||
:type request_headers: dict
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
headers = self.headers
|
||||||
for name, value in request_headers.items():
|
for name, value in request_headers.items():
|
||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
continue # Ignore explicitely unset headers
|
continue # Ignore explicitly unset headers
|
||||||
|
|
||||||
value = value.decode('utf8')
|
value = value.decode('utf8')
|
||||||
if name == 'User-Agent' and value.startswith('HTTPie/'):
|
if name == 'User-Agent' and value.startswith('HTTPie/'):
|
||||||
@ -113,14 +83,16 @@ class Session(BaseConfigDict):
|
|||||||
if name.lower().startswith(prefix.lower()):
|
if name.lower().startswith(prefix.lower()):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self['headers'][name] = value
|
headers[name] = value
|
||||||
|
|
||||||
|
self['headers'] = dict(headers)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self) -> RequestHeadersDict:
|
||||||
return self['headers']
|
return RequestHeadersDict(self['headers'])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cookies(self):
|
def cookies(self) -> RequestsCookieJar:
|
||||||
jar = RequestsCookieJar()
|
jar = RequestsCookieJar()
|
||||||
for name, cookie_dict in self['cookies'].items():
|
for name, cookie_dict in self['cookies'].items():
|
||||||
jar.set_cookie(create_cookie(
|
jar.set_cookie(create_cookie(
|
||||||
@ -129,28 +101,46 @@ class Session(BaseConfigDict):
|
|||||||
return jar
|
return jar
|
||||||
|
|
||||||
@cookies.setter
|
@cookies.setter
|
||||||
def cookies(self, jar):
|
def cookies(self, jar: RequestsCookieJar):
|
||||||
"""
|
# <https://docs.python.org/2/library/cookielib.html#cookie-objects>
|
||||||
:type jar: CookieJar
|
|
||||||
"""
|
|
||||||
# http://docs.python.org/2/library/cookielib.html#cookie-objects
|
|
||||||
stored_attrs = ['value', 'path', 'secure', 'expires']
|
stored_attrs = ['value', 'path', 'secure', 'expires']
|
||||||
self['cookies'] = {}
|
self['cookies'] = {}
|
||||||
for cookie in jar:
|
for cookie in jar:
|
||||||
self['cookies'][cookie.name] = dict(
|
self['cookies'][cookie.name] = {
|
||||||
(attname, getattr(cookie, attname))
|
attname: getattr(cookie, attname)
|
||||||
for attname in stored_attrs
|
for attname in stored_attrs
|
||||||
)
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auth(self):
|
def auth(self) -> Optional[AuthBase]:
|
||||||
auth = self.get('auth', None)
|
auth = self.get('auth', None)
|
||||||
if not auth or not auth['type']:
|
if not auth or not auth['type']:
|
||||||
return
|
return
|
||||||
auth_plugin = plugin_manager.get_auth_plugin(auth['type'])()
|
|
||||||
return auth_plugin.get_auth(auth['username'], auth['password'])
|
plugin = plugin_manager.get_auth_plugin(auth['type'])()
|
||||||
|
|
||||||
|
credentials = {'username': None, 'password': None}
|
||||||
|
try:
|
||||||
|
# New style
|
||||||
|
plugin.raw_auth = auth['raw_auth']
|
||||||
|
except KeyError:
|
||||||
|
# Old style
|
||||||
|
credentials = {
|
||||||
|
'username': auth['username'],
|
||||||
|
'password': auth['password'],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
if plugin.auth_parse:
|
||||||
|
from httpie.cli.argtypes import parse_auth
|
||||||
|
parsed = parse_auth(plugin.raw_auth)
|
||||||
|
credentials = {
|
||||||
|
'username': parsed.key,
|
||||||
|
'password': parsed.value,
|
||||||
|
}
|
||||||
|
|
||||||
|
return plugin.get_auth(**credentials)
|
||||||
|
|
||||||
@auth.setter
|
@auth.setter
|
||||||
def auth(self, auth):
|
def auth(self, auth: dict):
|
||||||
assert set(['type', 'username', 'password']) == set(auth.keys())
|
assert {'type', 'raw_auth'} == auth.keys()
|
||||||
self['auth'] = auth
|
self['auth'] = auth
|
||||||
|
40
httpie/status.py
Normal file
40
httpie/status.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from enum import IntEnum, unique
|
||||||
|
|
||||||
|
|
||||||
|
@unique
|
||||||
|
class ExitStatus(IntEnum):
|
||||||
|
"""Program exit status code constants."""
|
||||||
|
SUCCESS = 0
|
||||||
|
ERROR = 1
|
||||||
|
ERROR_TIMEOUT = 2
|
||||||
|
|
||||||
|
# See --check-status
|
||||||
|
ERROR_HTTP_3XX = 3
|
||||||
|
ERROR_HTTP_4XX = 4
|
||||||
|
ERROR_HTTP_5XX = 5
|
||||||
|
|
||||||
|
ERROR_TOO_MANY_REDIRECTS = 6
|
||||||
|
PLUGIN_ERROR = 7
|
||||||
|
# 128+2 SIGINT
|
||||||
|
# <http://www.tldp.org/LDP/abs/html/exitcodes.html>
|
||||||
|
ERROR_CTRL_C = 130
|
||||||
|
|
||||||
|
|
||||||
|
def http_status_to_exit_status(http_status: int, follow=False) -> ExitStatus:
|
||||||
|
"""
|
||||||
|
Translate HTTP status code to exit status code.
|
||||||
|
|
||||||
|
(Relevant only when invoked with --check-status or --download.)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if 300 <= http_status <= 399 and not follow:
|
||||||
|
# Redirect
|
||||||
|
return ExitStatus.ERROR_HTTP_3XX
|
||||||
|
elif 400 <= http_status <= 499:
|
||||||
|
# Client Error
|
||||||
|
return ExitStatus.ERROR_HTTP_4XX
|
||||||
|
elif 500 <= http_status <= 599:
|
||||||
|
# Server Error
|
||||||
|
return ExitStatus.ERROR_HTTP_5XX
|
||||||
|
else:
|
||||||
|
return ExitStatus.SUCCESS
|
@ -1,35 +1,24 @@
|
|||||||
from __future__ import division
|
from __future__ import division
|
||||||
import json
|
import json
|
||||||
|
import mimetypes
|
||||||
|
from collections import OrderedDict
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
from httpie.compat import is_py26, OrderedDict
|
import requests.auth
|
||||||
|
|
||||||
|
|
||||||
def load_json_preserve_order(s):
|
def load_json_preserve_order(s):
|
||||||
if is_py26:
|
|
||||||
return json.loads(s)
|
|
||||||
return json.loads(s, object_pairs_hook=OrderedDict)
|
return json.loads(s, object_pairs_hook=OrderedDict)
|
||||||
|
|
||||||
|
|
||||||
def repr_dict_nice(d):
|
def repr_dict(d: dict) -> str:
|
||||||
def prepare_dict(d):
|
return pformat(d)
|
||||||
for k, v in d.items():
|
|
||||||
if isinstance(v, dict):
|
|
||||||
v = dict(prepare_dict(v))
|
|
||||||
elif isinstance(v, bytes):
|
|
||||||
v = v.decode('utf8')
|
|
||||||
elif not isinstance(v, (int, str)):
|
|
||||||
v = repr(v)
|
|
||||||
yield k, v
|
|
||||||
return json.dumps(
|
|
||||||
dict(prepare_dict(d)),
|
|
||||||
indent=4, sort_keys=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def humanize_bytes(n, precision=2):
|
def humanize_bytes(n, precision=2):
|
||||||
# Author: Doug Latornell
|
# Author: Doug Latornell
|
||||||
# Licence: MIT
|
# Licence: MIT
|
||||||
# URL: http://code.activestate.com/recipes/577081/
|
# URL: https://code.activestate.com/recipes/577081/
|
||||||
"""Return a humanized string representation of a number of bytes.
|
"""Return a humanized string representation of a number of bytes.
|
||||||
|
|
||||||
Assumes `from __future__ import division`.
|
Assumes `from __future__ import division`.
|
||||||
@ -70,3 +59,27 @@ def humanize_bytes(n, precision=2):
|
|||||||
|
|
||||||
# noinspection PyUnboundLocalVariable
|
# noinspection PyUnboundLocalVariable
|
||||||
return '%.*f %s' % (precision, n / factor, suffix)
|
return '%.*f %s' % (precision, n / factor, suffix)
|
||||||
|
|
||||||
|
|
||||||
|
class ExplicitNullAuth(requests.auth.AuthBase):
|
||||||
|
"""Forces requests to ignore the ``.netrc``.
|
||||||
|
<https://github.com/psf/requests/issues/2773#issuecomment-174312831>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __call__(self, r):
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def get_content_type(filename):
|
||||||
|
"""
|
||||||
|
Return the content type for ``filename`` in format appropriate
|
||||||
|
for Content-Type headers, or ``None`` if the file type is unknown
|
||||||
|
to ``mimetypes``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
mime, encoding = mimetypes.guess_type(filename, strict=False)
|
||||||
|
if mime:
|
||||||
|
content_type = mime
|
||||||
|
if encoding:
|
||||||
|
content_type = '%s; charset=%s' % (mime, encoding)
|
||||||
|
return content_type
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
[pytest]
|
|
||||||
norecursedirs = tests/fixtures
|
|
@ -5,3 +5,5 @@ pytest-cov
|
|||||||
pytest-httpbin>=0.0.6
|
pytest-httpbin>=0.0.6
|
||||||
docutils
|
docutils
|
||||||
wheel
|
wheel
|
||||||
|
pycodestyle
|
||||||
|
twine
|
||||||
|
17
setup.cfg
17
setup.cfg
@ -1,2 +1,19 @@
|
|||||||
[wheel]
|
[wheel]
|
||||||
universal = 1
|
universal = 1
|
||||||
|
|
||||||
|
|
||||||
|
[tool:pytest]
|
||||||
|
# <https://docs.pytest.org/en/latest/customize.html>
|
||||||
|
norecursedirs = tests/fixtures
|
||||||
|
|
||||||
|
|
||||||
|
[pycodestyle]
|
||||||
|
# <http://pycodestyle.pycqa.org/en/latest/intro.html#configuration>
|
||||||
|
|
||||||
|
exclude = .git,.idea,__pycache__,build,dist,.tox,.pytest_cache,*.egg-info
|
||||||
|
|
||||||
|
# <http://pycodestyle.pycqa.org/en/latest/intro.html#error-codes>
|
||||||
|
# E241 - multiple spaces after ‘,’
|
||||||
|
# E501 - line too long
|
||||||
|
# W503 - line break before binary operator
|
||||||
|
ignore = E241,E501,W503
|
||||||
|
28
setup.py
28
setup.py
@ -35,10 +35,11 @@ tests_require = [
|
|||||||
|
|
||||||
|
|
||||||
install_requires = [
|
install_requires = [
|
||||||
'requests>=2.3.0',
|
'requests>=2.22.0',
|
||||||
'Pygments>=1.5'
|
'Pygments>=2.5.2',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# Conditional dependencies:
|
# Conditional dependencies:
|
||||||
|
|
||||||
# sdist
|
# sdist
|
||||||
@ -56,10 +57,8 @@ if 'bdist_wheel' not in sys.argv:
|
|||||||
|
|
||||||
# bdist_wheel
|
# bdist_wheel
|
||||||
extras_require = {
|
extras_require = {
|
||||||
# http://wheel.readthedocs.io/en/latest/#defining-conditional-dependencies
|
# https://wheel.readthedocs.io/en/latest/#defining-conditional-dependencies
|
||||||
':python_version == "2.6"'
|
'python_version == "3.0" or python_version == "3.1"': ['argparse>=1.2.1'],
|
||||||
' or python_version == "3.0"'
|
|
||||||
' or python_version == "3.1" ': ['argparse>=1.2.1'],
|
|
||||||
':sys_platform == "win32"': ['colorama>=0.2.4'],
|
':sys_platform == "win32"': ['colorama>=0.2.4'],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -68,13 +67,14 @@ def long_description():
|
|||||||
with codecs.open('README.rst', encoding='utf8') as f:
|
with codecs.open('README.rst', encoding='utf8') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='httpie',
|
name='httpie',
|
||||||
version=httpie.__version__,
|
version=httpie.__version__,
|
||||||
description=httpie.__doc__.strip(),
|
description=httpie.__doc__.strip(),
|
||||||
long_description=long_description(),
|
long_description=long_description(),
|
||||||
url='http://httpie.org/',
|
url='https://httpie.org/',
|
||||||
download_url='https://github.com/jkbrzt/httpie',
|
download_url='https://github.com/jakubroztocil/httpie',
|
||||||
author=httpie.__author__,
|
author=httpie.__author__,
|
||||||
author_email='jakub@roztocil.co',
|
author_email='jakub@roztocil.co',
|
||||||
license=httpie.__licence__,
|
license=httpie.__licence__,
|
||||||
@ -82,6 +82,7 @@ setup(
|
|||||||
entry_points={
|
entry_points={
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'http = httpie.__main__:main',
|
'http = httpie.__main__:main',
|
||||||
|
'https = httpie.__main__:main',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
extras_require=extras_require,
|
extras_require=extras_require,
|
||||||
@ -91,14 +92,9 @@ setup(
|
|||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
'Programming Language :: Python :: 2',
|
'Programming Language :: Python :: 3.5',
|
||||||
'Programming Language :: Python :: 2.6',
|
'Programming Language :: Python :: 3.6',
|
||||||
'Programming Language :: Python :: 2.7',
|
'Programming Language :: Python :: 3.7',
|
||||||
'Programming Language :: Python :: 3',
|
|
||||||
'Programming Language :: Python :: 3.1',
|
|
||||||
'Programming Language :: Python :: 3.2',
|
|
||||||
'Programming Language :: Python :: 3.3',
|
|
||||||
'Programming Language :: Python :: 3.4',
|
|
||||||
'Environment :: Console',
|
'Environment :: Console',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'Intended Audience :: System Administrators',
|
'Intended Audience :: System Administrators',
|
||||||
|
@ -5,4 +5,4 @@ HTTPie Test Suite
|
|||||||
Please see `CONTRIBUTING`_.
|
Please see `CONTRIBUTING`_.
|
||||||
|
|
||||||
|
|
||||||
.. _CONTRIBUTING: https://github.com/jkbrzt/httpie/blob/master/CONTRIBUTING.rst
|
.. _CONTRIBUTING: https://github.com/jakubroztocil/httpie/blob/master/CONTRIBUTING.rst
|
||||||
|
@ -1,14 +1,24 @@
|
|||||||
import pytest
|
import pytest
|
||||||
from pytest_httpbin.plugin import httpbin_ca_bundle
|
from pytest_httpbin import certs
|
||||||
|
|
||||||
|
|
||||||
# Make httpbin's CA trusted by default
|
@pytest.fixture(scope='function', autouse=True)
|
||||||
pytest.fixture(autouse=True)(httpbin_ca_bundle)
|
def httpbin_add_ca_bundle(monkeypatch):
|
||||||
|
"""
|
||||||
|
Make pytest-httpbin's CA trusted by default.
|
||||||
|
|
||||||
|
(Same as `httpbin_ca_bundle`, just auto-used.).
|
||||||
|
|
||||||
|
"""
|
||||||
|
monkeypatch.setenv('REQUESTS_CA_BUNDLE', certs.where())
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def httpbin_secure_untrusted(monkeypatch, httpbin_secure):
|
def httpbin_secure_untrusted(monkeypatch, httpbin_secure):
|
||||||
"""Like the `httpbin_secure` fixture, but without the
|
"""
|
||||||
make-CA-trusted-by-default"""
|
Like the `httpbin_secure` fixture, but without the
|
||||||
|
make-CA-trusted-by-default.
|
||||||
|
|
||||||
|
"""
|
||||||
monkeypatch.delenv('REQUESTS_CA_BUNDLE')
|
monkeypatch.delenv('REQUESTS_CA_BUNDLE')
|
||||||
return httpbin_secure
|
return httpbin_secure
|
||||||
|
@ -2,9 +2,11 @@
|
|||||||
import mock
|
import mock
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from utils import http, add_auth, HTTP_OK, TestEnvironment
|
from httpie.plugins.builtin import HTTPBasicAuth
|
||||||
import httpie.input
|
from httpie.utils import ExplicitNullAuth
|
||||||
import httpie.cli
|
from utils import http, add_auth, HTTP_OK, MockEnvironment
|
||||||
|
import httpie.cli.constants
|
||||||
|
import httpie.cli.definition
|
||||||
|
|
||||||
|
|
||||||
def test_basic_auth(httpbin_both):
|
def test_basic_auth(httpbin_both):
|
||||||
@ -22,7 +24,7 @@ def test_digest_auth(httpbin_both, argument_name):
|
|||||||
assert r.json == {'authenticated': True, 'user': 'user'}
|
assert r.json == {'authenticated': True, 'user': 'user'}
|
||||||
|
|
||||||
|
|
||||||
@mock.patch('httpie.input.AuthCredentials._getpass',
|
@mock.patch('httpie.cli.argtypes.AuthCredentials._getpass',
|
||||||
new=lambda self, prompt: 'password')
|
new=lambda self, prompt: 'password')
|
||||||
def test_password_prompt(httpbin):
|
def test_password_prompt(httpbin):
|
||||||
r = http('--auth', 'user',
|
r = http('--auth', 'user',
|
||||||
@ -55,10 +57,53 @@ def test_credentials_in_url_auth_flag_has_priority(httpbin_both):
|
|||||||
])
|
])
|
||||||
def test_only_username_in_url(url):
|
def test_only_username_in_url(url):
|
||||||
"""
|
"""
|
||||||
https://github.com/jkbrzt/httpie/issues/242
|
https://github.com/jakubroztocil/httpie/issues/242
|
||||||
|
|
||||||
"""
|
"""
|
||||||
args = httpie.cli.parser.parse_args(args=[url], env=TestEnvironment())
|
args = httpie.cli.definition.parser.parse_args(args=[url], env=MockEnvironment())
|
||||||
assert args.auth
|
assert args.auth
|
||||||
assert args.auth.key == 'username'
|
assert args.auth.username == 'username'
|
||||||
assert args.auth.value == ''
|
assert args.auth.password == ''
|
||||||
|
|
||||||
|
|
||||||
|
def test_missing_auth(httpbin):
|
||||||
|
r = http(
|
||||||
|
'--auth-type=basic',
|
||||||
|
'GET',
|
||||||
|
httpbin + '/basic-auth/user/password',
|
||||||
|
tolerate_error_exit_status=True
|
||||||
|
)
|
||||||
|
assert HTTP_OK not in r
|
||||||
|
assert '--auth required' in r.stderr
|
||||||
|
|
||||||
|
|
||||||
|
def test_netrc(httpbin_both):
|
||||||
|
with mock.patch('requests.sessions.get_netrc_auth') as get_netrc_auth:
|
||||||
|
get_netrc_auth.return_value = ('httpie', 'password')
|
||||||
|
r = http(httpbin_both + '/basic-auth/httpie/password')
|
||||||
|
assert get_netrc_auth.call_count == 1
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_ignore_netrc(httpbin_both):
|
||||||
|
with mock.patch('requests.sessions.get_netrc_auth') as get_netrc_auth:
|
||||||
|
get_netrc_auth.return_value = ('httpie', 'password')
|
||||||
|
r = http('--ignore-netrc', httpbin_both + '/basic-auth/httpie/password')
|
||||||
|
assert get_netrc_auth.call_count == 0
|
||||||
|
assert 'HTTP/1.1 401 UNAUTHORIZED' in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_ignore_netrc_null_auth():
|
||||||
|
args = httpie.cli.definition.parser.parse_args(
|
||||||
|
args=['--ignore-netrc', 'example.org'],
|
||||||
|
env=MockEnvironment(),
|
||||||
|
)
|
||||||
|
assert isinstance(args.auth, ExplicitNullAuth)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ignore_netrc_together_with_auth():
|
||||||
|
args = httpie.cli.definition.parser.parse_args(
|
||||||
|
args=['--ignore-netrc', '--auth=username:password', 'example.org'],
|
||||||
|
env=MockEnvironment(),
|
||||||
|
)
|
||||||
|
assert isinstance(args.auth, HTTPBasicAuth)
|
||||||
|
133
tests/test_auth_plugins.py
Normal file
133
tests/test_auth_plugins.py
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
from mock import mock
|
||||||
|
|
||||||
|
from httpie.cli.constants import SEPARATOR_CREDENTIALS
|
||||||
|
from httpie.plugins import AuthPlugin, plugin_manager
|
||||||
|
from utils import http, HTTP_OK
|
||||||
|
|
||||||
|
# TODO: run all these tests in session mode as well
|
||||||
|
|
||||||
|
USERNAME = 'user'
|
||||||
|
PASSWORD = 'password'
|
||||||
|
# Basic auth encoded `USERNAME` and `PASSWORD`
|
||||||
|
# noinspection SpellCheckingInspection
|
||||||
|
BASIC_AUTH_HEADER_VALUE = 'Basic dXNlcjpwYXNzd29yZA=='
|
||||||
|
BASIC_AUTH_URL = '/basic-auth/{0}/{1}'.format(USERNAME, PASSWORD)
|
||||||
|
AUTH_OK = {'authenticated': True, 'user': USERNAME}
|
||||||
|
|
||||||
|
|
||||||
|
def basic_auth(header=BASIC_AUTH_HEADER_VALUE):
|
||||||
|
|
||||||
|
def inner(r):
|
||||||
|
r.headers['Authorization'] = header
|
||||||
|
return r
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_plugin_parse_auth_false(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-parse-false'
|
||||||
|
auth_parse = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert username is None
|
||||||
|
assert password is None
|
||||||
|
assert self.raw_auth == BASIC_AUTH_HEADER_VALUE
|
||||||
|
return basic_auth(self.raw_auth)
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
'--auth',
|
||||||
|
BASIC_AUTH_HEADER_VALUE,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_plugin_require_auth_false(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-require-false'
|
||||||
|
auth_require = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert self.raw_auth is None
|
||||||
|
assert username is None
|
||||||
|
assert password is None
|
||||||
|
return basic_auth()
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_plugin_require_auth_false_and_auth_provided(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-require-false-yet-provided'
|
||||||
|
auth_require = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert self.raw_auth == USERNAME + SEPARATOR_CREDENTIALS + PASSWORD
|
||||||
|
assert username == USERNAME
|
||||||
|
assert password == PASSWORD
|
||||||
|
return basic_auth()
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
'--auth',
|
||||||
|
USERNAME + SEPARATOR_CREDENTIALS + PASSWORD,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch('httpie.cli.argtypes.AuthCredentials._getpass',
|
||||||
|
new=lambda self, prompt: 'UNEXPECTED_PROMPT_RESPONSE')
|
||||||
|
def test_auth_plugin_prompt_password_false(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-prompt-false'
|
||||||
|
prompt_password = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert self.raw_auth == USERNAME
|
||||||
|
assert username == USERNAME
|
||||||
|
assert password is None
|
||||||
|
return basic_auth()
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
'--auth',
|
||||||
|
USERNAME,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
@ -1,15 +1,16 @@
|
|||||||
"""Tests for dealing with binary request and response data."""
|
"""Tests for dealing with binary request and response data."""
|
||||||
from httpie.compat import urlopen
|
import requests
|
||||||
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
|
||||||
from utils import TestEnvironment, http
|
|
||||||
from fixtures import BIN_FILE_PATH, BIN_FILE_CONTENT, BIN_FILE_PATH_ARG
|
from fixtures import BIN_FILE_PATH, BIN_FILE_CONTENT, BIN_FILE_PATH_ARG
|
||||||
|
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
||||||
|
from utils import MockEnvironment, http
|
||||||
|
|
||||||
|
|
||||||
class TestBinaryRequestData:
|
class TestBinaryRequestData:
|
||||||
|
|
||||||
def test_binary_stdin(self, httpbin):
|
def test_binary_stdin(self, httpbin):
|
||||||
with open(BIN_FILE_PATH, 'rb') as stdin:
|
with open(BIN_FILE_PATH, 'rb') as stdin:
|
||||||
env = TestEnvironment(
|
env = MockEnvironment(
|
||||||
stdin=stdin,
|
stdin=stdin,
|
||||||
stdin_isatty=False,
|
stdin_isatty=False,
|
||||||
stdout_isatty=False
|
stdout_isatty=False
|
||||||
@ -18,38 +19,32 @@ class TestBinaryRequestData:
|
|||||||
assert r == BIN_FILE_CONTENT
|
assert r == BIN_FILE_CONTENT
|
||||||
|
|
||||||
def test_binary_file_path(self, httpbin):
|
def test_binary_file_path(self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
r = http('--print=B', 'POST', httpbin.url + '/post',
|
r = http('--print=B', 'POST', httpbin.url + '/post',
|
||||||
'@' + BIN_FILE_PATH_ARG, env=env, )
|
'@' + BIN_FILE_PATH_ARG, env=env, )
|
||||||
assert r == BIN_FILE_CONTENT
|
assert r == BIN_FILE_CONTENT
|
||||||
|
|
||||||
def test_binary_file_form(self, httpbin):
|
def test_binary_file_form(self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
r = http('--print=B', '--form', 'POST', httpbin.url + '/post',
|
r = http('--print=B', '--form', 'POST', httpbin.url + '/post',
|
||||||
'test@' + BIN_FILE_PATH_ARG, env=env)
|
'test@' + BIN_FILE_PATH_ARG, env=env)
|
||||||
assert bytes(BIN_FILE_CONTENT) in bytes(r)
|
assert bytes(BIN_FILE_CONTENT) in bytes(r)
|
||||||
|
|
||||||
|
|
||||||
class TestBinaryResponseData:
|
class TestBinaryResponseData:
|
||||||
url = 'http://www.google.com/favicon.ico'
|
|
||||||
|
|
||||||
@property
|
def test_binary_suppresses_when_terminal(self, httpbin):
|
||||||
def bindata(self):
|
r = http('GET', httpbin + '/bytes/1024?seed=1')
|
||||||
if not hasattr(self, '_bindata'):
|
|
||||||
self._bindata = urlopen(self.url).read()
|
|
||||||
return self._bindata
|
|
||||||
|
|
||||||
def test_binary_suppresses_when_terminal(self):
|
|
||||||
r = http('GET', self.url)
|
|
||||||
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||||
|
|
||||||
def test_binary_suppresses_when_not_terminal_but_pretty(self):
|
def test_binary_suppresses_when_not_terminal_but_pretty(self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
r = http('--pretty=all', 'GET', self.url,
|
r = http('--pretty=all', 'GET', httpbin + '/bytes/1024?seed=1', env=env)
|
||||||
env=env)
|
|
||||||
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||||
|
|
||||||
def test_binary_included_and_correct_when_suitable(self):
|
def test_binary_included_and_correct_when_suitable(self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
r = http('GET', self.url, env=env)
|
url = httpbin + '/bytes/1024?seed=1'
|
||||||
assert r == self.bindata
|
r = http('GET', url, env=env)
|
||||||
|
expected = requests.get(url).content
|
||||||
|
assert r == expected
|
||||||
|
@ -1,42 +1,42 @@
|
|||||||
"""CLI argument parsing related tests."""
|
"""CLI argument parsing related tests."""
|
||||||
import json
|
|
||||||
# noinspection PyCompatibility
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from requests.exceptions import InvalidSchema
|
from requests.exceptions import InvalidSchema
|
||||||
|
|
||||||
from httpie import input
|
import httpie.cli.argparser
|
||||||
from httpie.input import KeyValue, KeyValueArgType, DataDict
|
|
||||||
from httpie import ExitStatus
|
|
||||||
from httpie.cli import parser
|
|
||||||
from utils import TestEnvironment, http, HTTP_OK
|
|
||||||
from fixtures import (
|
from fixtures import (
|
||||||
FILE_PATH_ARG, JSON_FILE_PATH_ARG,
|
FILE_CONTENT, FILE_PATH, FILE_PATH_ARG, JSON_FILE_CONTENT,
|
||||||
JSON_FILE_CONTENT, FILE_CONTENT, FILE_PATH
|
JSON_FILE_PATH_ARG,
|
||||||
)
|
)
|
||||||
|
from httpie.status import ExitStatus
|
||||||
|
from httpie.cli import constants
|
||||||
|
from httpie.cli.definition import parser
|
||||||
|
from httpie.cli.argtypes import KeyValueArg, KeyValueArgType
|
||||||
|
from httpie.cli.requestitems import RequestItems
|
||||||
|
from utils import HTTP_OK, MockEnvironment, http
|
||||||
|
|
||||||
|
|
||||||
class TestItemParsing:
|
class TestItemParsing:
|
||||||
|
key_value_arg = KeyValueArgType(*constants.SEPARATOR_GROUP_ALL_ITEMS)
|
||||||
key_value = KeyValueArgType(*input.SEP_GROUP_ALL_ITEMS)
|
|
||||||
|
|
||||||
def test_invalid_items(self):
|
def test_invalid_items(self):
|
||||||
items = ['no-separator']
|
items = ['no-separator']
|
||||||
for item in items:
|
for item in items:
|
||||||
pytest.raises(argparse.ArgumentTypeError, self.key_value, item)
|
pytest.raises(argparse.ArgumentTypeError, self.key_value_arg, item)
|
||||||
|
|
||||||
def test_escape_separator(self):
|
def test_escape_separator(self):
|
||||||
items = input.parse_items([
|
items = RequestItems.from_args([
|
||||||
# headers
|
# headers
|
||||||
self.key_value(r'foo\:bar:baz'),
|
self.key_value_arg(r'foo\:bar:baz'),
|
||||||
self.key_value(r'jack\@jill:hill'),
|
self.key_value_arg(r'jack\@jill:hill'),
|
||||||
|
|
||||||
# data
|
# data
|
||||||
self.key_value(r'baz\=bar=foo'),
|
self.key_value_arg(r'baz\=bar=foo'),
|
||||||
|
|
||||||
# files
|
# files
|
||||||
self.key_value(r'bar\@baz@%s' % FILE_PATH_ARG),
|
self.key_value_arg(r'bar\@baz@%s' % FILE_PATH_ARG),
|
||||||
])
|
])
|
||||||
# `requests.structures.CaseInsensitiveDict` => `dict`
|
# `requests.structures.CaseInsensitiveDict` => `dict`
|
||||||
headers = dict(items.headers._store.values())
|
headers = dict(items.headers._store.values())
|
||||||
@ -45,45 +45,55 @@ class TestItemParsing:
|
|||||||
'foo:bar': 'baz',
|
'foo:bar': 'baz',
|
||||||
'jack@jill': 'hill',
|
'jack@jill': 'hill',
|
||||||
}
|
}
|
||||||
assert items.data == {'baz=bar': 'foo'}
|
assert items.data == {
|
||||||
|
'baz=bar': 'foo'
|
||||||
|
}
|
||||||
assert 'bar@baz' in items.files
|
assert 'bar@baz' in items.files
|
||||||
|
|
||||||
@pytest.mark.parametrize(('string', 'key', 'sep', 'value'), [
|
@pytest.mark.parametrize(('string', 'key', 'sep', 'value'), [
|
||||||
('path=c:\windows', 'path', '=', 'c:\windows'),
|
('path=c:\\windows', 'path', '=', 'c:\\windows'),
|
||||||
('path=c:\windows\\', 'path', '=', 'c:\windows\\'),
|
('path=c:\\windows\\', 'path', '=', 'c:\\windows\\'),
|
||||||
('path\==c:\windows', 'path=', '=', 'c:\windows'),
|
('path\\==c:\\windows', 'path=', '=', 'c:\\windows'),
|
||||||
])
|
])
|
||||||
def test_backslash_before_non_special_character_does_not_escape(
|
def test_backslash_before_non_special_character_does_not_escape(
|
||||||
self, string, key, sep, value):
|
self, string, key, sep, value
|
||||||
expected = KeyValue(orig=string, key=key, sep=sep, value=value)
|
):
|
||||||
actual = self.key_value(string)
|
expected = KeyValueArg(orig=string, key=key, sep=sep, value=value)
|
||||||
|
actual = self.key_value_arg(string)
|
||||||
assert actual == expected
|
assert actual == expected
|
||||||
|
|
||||||
def test_escape_longsep(self):
|
def test_escape_longsep(self):
|
||||||
items = input.parse_items([
|
items = RequestItems.from_args([
|
||||||
self.key_value(r'bob\:==foo'),
|
self.key_value_arg(r'bob\:==foo'),
|
||||||
])
|
])
|
||||||
assert items.params == {'bob:': 'foo'}
|
assert items.params == {
|
||||||
|
'bob:': 'foo'
|
||||||
|
}
|
||||||
|
|
||||||
def test_valid_items(self):
|
def test_valid_items(self):
|
||||||
items = input.parse_items([
|
items = RequestItems.from_args([
|
||||||
self.key_value('string=value'),
|
self.key_value_arg('string=value'),
|
||||||
self.key_value('header:value'),
|
self.key_value_arg('Header:value'),
|
||||||
self.key_value('list:=["a", 1, {}, false]'),
|
self.key_value_arg('Unset-Header:'),
|
||||||
self.key_value('obj:={"a": "b"}'),
|
self.key_value_arg('Empty-Header;'),
|
||||||
self.key_value('eh:'),
|
self.key_value_arg('list:=["a", 1, {}, false]'),
|
||||||
self.key_value('ed='),
|
self.key_value_arg('obj:={"a": "b"}'),
|
||||||
self.key_value('bool:=true'),
|
self.key_value_arg('ed='),
|
||||||
self.key_value('file@' + FILE_PATH_ARG),
|
self.key_value_arg('bool:=true'),
|
||||||
self.key_value('query==value'),
|
self.key_value_arg('file@' + FILE_PATH_ARG),
|
||||||
self.key_value('string-embed=@' + FILE_PATH_ARG),
|
self.key_value_arg('query==value'),
|
||||||
self.key_value('raw-json-embed:=@' + JSON_FILE_PATH_ARG),
|
self.key_value_arg('string-embed=@' + FILE_PATH_ARG),
|
||||||
|
self.key_value_arg('raw-json-embed:=@' + JSON_FILE_PATH_ARG),
|
||||||
])
|
])
|
||||||
|
|
||||||
# Parsed headers
|
# Parsed headers
|
||||||
# `requests.structures.CaseInsensitiveDict` => `dict`
|
# `requests.structures.CaseInsensitiveDict` => `dict`
|
||||||
headers = dict(items.headers._store.values())
|
headers = dict(items.headers._store.values())
|
||||||
assert headers == {'header': 'value', 'eh': ''}
|
assert headers == {
|
||||||
|
'Header': 'value',
|
||||||
|
'Unset-Header': None,
|
||||||
|
'Empty-Header': ''
|
||||||
|
}
|
||||||
|
|
||||||
# Parsed data
|
# Parsed data
|
||||||
raw_json_embed = items.data.pop('raw-json-embed')
|
raw_json_embed = items.data.pop('raw-json-embed')
|
||||||
@ -94,30 +104,36 @@ class TestItemParsing:
|
|||||||
"string": "value",
|
"string": "value",
|
||||||
"bool": True,
|
"bool": True,
|
||||||
"list": ["a", 1, {}, False],
|
"list": ["a", 1, {}, False],
|
||||||
"obj": {"a": "b"},
|
"obj": {
|
||||||
|
"a": "b"
|
||||||
|
},
|
||||||
"string-embed": FILE_CONTENT,
|
"string-embed": FILE_CONTENT,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Parsed query string parameters
|
# Parsed query string parameters
|
||||||
assert items.params == {'query': 'value'}
|
assert items.params == {
|
||||||
|
'query': 'value'
|
||||||
|
}
|
||||||
|
|
||||||
# Parsed file fields
|
# Parsed file fields
|
||||||
assert 'file' in items.files
|
assert 'file' in items.files
|
||||||
assert (items.files['file'][1].read().strip().decode('utf8')
|
assert (items.files['file'][1].read().strip().
|
||||||
== FILE_CONTENT)
|
decode('utf8') == FILE_CONTENT)
|
||||||
|
|
||||||
def test_multiple_file_fields_with_same_field_name(self):
|
def test_multiple_file_fields_with_same_field_name(self):
|
||||||
items = input.parse_items([
|
items = RequestItems.from_args([
|
||||||
self.key_value('file_field@' + FILE_PATH_ARG),
|
self.key_value_arg('file_field@' + FILE_PATH_ARG),
|
||||||
self.key_value('file_field@' + FILE_PATH_ARG),
|
self.key_value_arg('file_field@' + FILE_PATH_ARG),
|
||||||
])
|
])
|
||||||
assert len(items.files['file_field']) == 2
|
assert len(items.files['file_field']) == 2
|
||||||
|
|
||||||
def test_multiple_text_fields_with_same_field_name(self):
|
def test_multiple_text_fields_with_same_field_name(self):
|
||||||
items = input.parse_items(
|
items = RequestItems.from_args(
|
||||||
[self.key_value('text_field=a'),
|
request_item_args=[
|
||||||
self.key_value('text_field=b')],
|
self.key_value_arg('text_field=a'),
|
||||||
data_class=DataDict
|
self.key_value_arg('text_field=b')
|
||||||
|
],
|
||||||
|
as_form=True,
|
||||||
)
|
)
|
||||||
assert items.data['text_field'] == ['a', 'b']
|
assert items.data['text_field'] == ['a', 'b']
|
||||||
assert list(items.data.items()) == [
|
assert list(items.data.items()) == [
|
||||||
@ -156,44 +172,44 @@ class TestQuerystring:
|
|||||||
|
|
||||||
class TestLocalhostShorthand:
|
class TestLocalhostShorthand:
|
||||||
def test_expand_localhost_shorthand(self):
|
def test_expand_localhost_shorthand(self):
|
||||||
args = parser.parse_args(args=[':'], env=TestEnvironment())
|
args = parser.parse_args(args=[':'], env=MockEnvironment())
|
||||||
assert args.url == 'http://localhost'
|
assert args.url == 'http://localhost'
|
||||||
|
|
||||||
def test_expand_localhost_shorthand_with_slash(self):
|
def test_expand_localhost_shorthand_with_slash(self):
|
||||||
args = parser.parse_args(args=[':/'], env=TestEnvironment())
|
args = parser.parse_args(args=[':/'], env=MockEnvironment())
|
||||||
assert args.url == 'http://localhost/'
|
assert args.url == 'http://localhost/'
|
||||||
|
|
||||||
def test_expand_localhost_shorthand_with_port(self):
|
def test_expand_localhost_shorthand_with_port(self):
|
||||||
args = parser.parse_args(args=[':3000'], env=TestEnvironment())
|
args = parser.parse_args(args=[':3000'], env=MockEnvironment())
|
||||||
assert args.url == 'http://localhost:3000'
|
assert args.url == 'http://localhost:3000'
|
||||||
|
|
||||||
def test_expand_localhost_shorthand_with_path(self):
|
def test_expand_localhost_shorthand_with_path(self):
|
||||||
args = parser.parse_args(args=[':/path'], env=TestEnvironment())
|
args = parser.parse_args(args=[':/path'], env=MockEnvironment())
|
||||||
assert args.url == 'http://localhost/path'
|
assert args.url == 'http://localhost/path'
|
||||||
|
|
||||||
def test_expand_localhost_shorthand_with_port_and_slash(self):
|
def test_expand_localhost_shorthand_with_port_and_slash(self):
|
||||||
args = parser.parse_args(args=[':3000/'], env=TestEnvironment())
|
args = parser.parse_args(args=[':3000/'], env=MockEnvironment())
|
||||||
assert args.url == 'http://localhost:3000/'
|
assert args.url == 'http://localhost:3000/'
|
||||||
|
|
||||||
def test_expand_localhost_shorthand_with_port_and_path(self):
|
def test_expand_localhost_shorthand_with_port_and_path(self):
|
||||||
args = parser.parse_args(args=[':3000/path'], env=TestEnvironment())
|
args = parser.parse_args(args=[':3000/path'], env=MockEnvironment())
|
||||||
assert args.url == 'http://localhost:3000/path'
|
assert args.url == 'http://localhost:3000/path'
|
||||||
|
|
||||||
def test_dont_expand_shorthand_ipv6_as_shorthand(self):
|
def test_dont_expand_shorthand_ipv6_as_shorthand(self):
|
||||||
args = parser.parse_args(args=['::1'], env=TestEnvironment())
|
args = parser.parse_args(args=['::1'], env=MockEnvironment())
|
||||||
assert args.url == 'http://::1'
|
assert args.url == 'http://::1'
|
||||||
|
|
||||||
def test_dont_expand_longer_ipv6_as_shorthand(self):
|
def test_dont_expand_longer_ipv6_as_shorthand(self):
|
||||||
args = parser.parse_args(
|
args = parser.parse_args(
|
||||||
args=['::ffff:c000:0280'],
|
args=['::ffff:c000:0280'],
|
||||||
env=TestEnvironment()
|
env=MockEnvironment()
|
||||||
)
|
)
|
||||||
assert args.url == 'http://::ffff:c000:0280'
|
assert args.url == 'http://::ffff:c000:0280'
|
||||||
|
|
||||||
def test_dont_expand_full_ipv6_as_shorthand(self):
|
def test_dont_expand_full_ipv6_as_shorthand(self):
|
||||||
args = parser.parse_args(
|
args = parser.parse_args(
|
||||||
args=['0000:0000:0000:0000:0000:0000:0000:0001'],
|
args=['0000:0000:0000:0000:0000:0000:0000:0001'],
|
||||||
env=TestEnvironment()
|
env=MockEnvironment()
|
||||||
)
|
)
|
||||||
assert args.url == 'http://0000:0000:0000:0000:0000:0000:0000:0001'
|
assert args.url == 'http://0000:0000:0000:0000:0000:0000:0000:0001'
|
||||||
|
|
||||||
@ -201,92 +217,80 @@ class TestLocalhostShorthand:
|
|||||||
class TestArgumentParser:
|
class TestArgumentParser:
|
||||||
|
|
||||||
def setup_method(self, method):
|
def setup_method(self, method):
|
||||||
self.parser = input.HTTPieArgumentParser()
|
self.parser = httpie.cli.argparser.HTTPieArgumentParser()
|
||||||
|
|
||||||
def test_guess_when_method_set_and_valid(self):
|
def test_guess_when_method_set_and_valid(self):
|
||||||
self.parser.args = argparse.Namespace()
|
self.parser.args = argparse.Namespace()
|
||||||
self.parser.args.method = 'GET'
|
self.parser.args.method = 'GET'
|
||||||
self.parser.args.url = 'http://example.com/'
|
self.parser.args.url = 'http://example.com/'
|
||||||
self.parser.args.items = []
|
self.parser.args.request_items = []
|
||||||
self.parser.args.ignore_stdin = False
|
self.parser.args.ignore_stdin = False
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
self.parser.env = TestEnvironment()
|
|
||||||
|
|
||||||
self.parser._guess_method()
|
self.parser._guess_method()
|
||||||
|
|
||||||
assert self.parser.args.method == 'GET'
|
assert self.parser.args.method == 'GET'
|
||||||
assert self.parser.args.url == 'http://example.com/'
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
assert self.parser.args.items == []
|
assert self.parser.args.request_items == []
|
||||||
|
|
||||||
def test_guess_when_method_not_set(self):
|
def test_guess_when_method_not_set(self):
|
||||||
self.parser.args = argparse.Namespace()
|
self.parser.args = argparse.Namespace()
|
||||||
self.parser.args.method = None
|
self.parser.args.method = None
|
||||||
self.parser.args.url = 'http://example.com/'
|
self.parser.args.url = 'http://example.com/'
|
||||||
self.parser.args.items = []
|
self.parser.args.request_items = []
|
||||||
self.parser.args.ignore_stdin = False
|
self.parser.args.ignore_stdin = False
|
||||||
self.parser.env = TestEnvironment()
|
self.parser.env = MockEnvironment()
|
||||||
|
|
||||||
self.parser._guess_method()
|
self.parser._guess_method()
|
||||||
|
|
||||||
assert self.parser.args.method == 'GET'
|
assert self.parser.args.method == 'GET'
|
||||||
assert self.parser.args.url == 'http://example.com/'
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
assert self.parser.args.items == []
|
assert self.parser.args.request_items == []
|
||||||
|
|
||||||
def test_guess_when_method_set_but_invalid_and_data_field(self):
|
def test_guess_when_method_set_but_invalid_and_data_field(self):
|
||||||
self.parser.args = argparse.Namespace()
|
self.parser.args = argparse.Namespace()
|
||||||
self.parser.args.method = 'http://example.com/'
|
self.parser.args.method = 'http://example.com/'
|
||||||
self.parser.args.url = 'data=field'
|
self.parser.args.url = 'data=field'
|
||||||
self.parser.args.items = []
|
self.parser.args.request_items = []
|
||||||
self.parser.args.ignore_stdin = False
|
self.parser.args.ignore_stdin = False
|
||||||
self.parser.env = TestEnvironment()
|
self.parser.env = MockEnvironment()
|
||||||
self.parser._guess_method()
|
self.parser._guess_method()
|
||||||
|
|
||||||
assert self.parser.args.method == 'POST'
|
assert self.parser.args.method == 'POST'
|
||||||
assert self.parser.args.url == 'http://example.com/'
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
assert self.parser.args.items == [
|
assert self.parser.args.request_items == [
|
||||||
KeyValue(key='data',
|
KeyValueArg(key='data',
|
||||||
value='field',
|
value='field',
|
||||||
sep='=',
|
sep='=',
|
||||||
orig='data=field')
|
orig='data=field')
|
||||||
]
|
]
|
||||||
|
|
||||||
def test_guess_when_method_set_but_invalid_and_header_field(self):
|
def test_guess_when_method_set_but_invalid_and_header_field(self):
|
||||||
self.parser.args = argparse.Namespace()
|
self.parser.args = argparse.Namespace()
|
||||||
self.parser.args.method = 'http://example.com/'
|
self.parser.args.method = 'http://example.com/'
|
||||||
self.parser.args.url = 'test:header'
|
self.parser.args.url = 'test:header'
|
||||||
self.parser.args.items = []
|
self.parser.args.request_items = []
|
||||||
self.parser.args.ignore_stdin = False
|
self.parser.args.ignore_stdin = False
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
self.parser.env = TestEnvironment()
|
|
||||||
|
|
||||||
self.parser._guess_method()
|
self.parser._guess_method()
|
||||||
|
|
||||||
assert self.parser.args.method == 'GET'
|
assert self.parser.args.method == 'GET'
|
||||||
assert self.parser.args.url == 'http://example.com/'
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
assert self.parser.args.items, [
|
assert self.parser.args.request_items, [
|
||||||
KeyValue(key='test',
|
KeyValueArg(key='test',
|
||||||
value='header',
|
value='header',
|
||||||
sep=':',
|
sep=':',
|
||||||
orig='test:header')
|
orig='test:header')
|
||||||
]
|
]
|
||||||
|
|
||||||
def test_guess_when_method_set_but_invalid_and_item_exists(self):
|
def test_guess_when_method_set_but_invalid_and_item_exists(self):
|
||||||
self.parser.args = argparse.Namespace()
|
self.parser.args = argparse.Namespace()
|
||||||
self.parser.args.method = 'http://example.com/'
|
self.parser.args.method = 'http://example.com/'
|
||||||
self.parser.args.url = 'new_item=a'
|
self.parser.args.url = 'new_item=a'
|
||||||
self.parser.args.items = [
|
self.parser.args.request_items = [
|
||||||
KeyValue(
|
KeyValueArg(
|
||||||
key='old_item', value='b', sep='=', orig='old_item=b')
|
key='old_item', value='b', sep='=', orig='old_item=b')
|
||||||
]
|
]
|
||||||
self.parser.args.ignore_stdin = False
|
self.parser.args.ignore_stdin = False
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
self.parser.env = TestEnvironment()
|
|
||||||
|
|
||||||
self.parser._guess_method()
|
self.parser._guess_method()
|
||||||
|
assert self.parser.args.request_items, [
|
||||||
assert self.parser.args.items, [
|
KeyValueArg(key='new_item', value='a', sep='=', orig='new_item=a'),
|
||||||
KeyValue(key='new_item', value='a', sep='=', orig='new_item=a'),
|
KeyValueArg(
|
||||||
KeyValue(
|
|
||||||
key='old_item', value='b', sep='=', orig='old_item=b'),
|
key='old_item', value='b', sep='=', orig='old_item=b'),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -299,17 +303,17 @@ class TestNoOptions:
|
|||||||
|
|
||||||
def test_invalid_no_options(self, httpbin):
|
def test_invalid_no_options(self, httpbin):
|
||||||
r = http('--no-war', 'GET', httpbin.url + '/get',
|
r = http('--no-war', 'GET', httpbin.url + '/get',
|
||||||
error_exit_ok=True)
|
tolerate_error_exit_status=True)
|
||||||
assert r.exit_status == 1
|
assert r.exit_status == ExitStatus.ERROR
|
||||||
assert 'unrecognized arguments: --no-war' in r.stderr
|
assert 'unrecognized arguments: --no-war' in r.stderr
|
||||||
assert 'GET /get HTTP/1.1' not in r
|
assert 'GET /get HTTP/1.1' not in r
|
||||||
|
|
||||||
|
|
||||||
class TestIgnoreStdin:
|
class TestStdin:
|
||||||
|
|
||||||
def test_ignore_stdin(self, httpbin):
|
def test_ignore_stdin(self, httpbin):
|
||||||
with open(FILE_PATH) as f:
|
with open(FILE_PATH) as f:
|
||||||
env = TestEnvironment(stdin=f, stdin_isatty=False)
|
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||||
r = http('--ignore-stdin', '--verbose', httpbin.url + '/get',
|
r = http('--ignore-stdin', '--verbose', httpbin.url + '/get',
|
||||||
env=env)
|
env=env)
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
@ -318,15 +322,33 @@ class TestIgnoreStdin:
|
|||||||
|
|
||||||
def test_ignore_stdin_cannot_prompt_password(self, httpbin):
|
def test_ignore_stdin_cannot_prompt_password(self, httpbin):
|
||||||
r = http('--ignore-stdin', '--auth=no-password', httpbin.url + '/get',
|
r = http('--ignore-stdin', '--auth=no-password', httpbin.url + '/get',
|
||||||
error_exit_ok=True)
|
tolerate_error_exit_status=True)
|
||||||
assert r.exit_status == ExitStatus.ERROR
|
assert r.exit_status == ExitStatus.ERROR
|
||||||
assert 'because --ignore-stdin' in r.stderr
|
assert 'because --ignore-stdin' in r.stderr
|
||||||
|
|
||||||
|
def test_stdin_closed(self, httpbin):
|
||||||
|
r = http(httpbin + '/get', env=MockEnvironment(stdin=None))
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
class TestSchemes:
|
class TestSchemes:
|
||||||
|
|
||||||
def test_custom_scheme(self):
|
def test_invalid_custom_scheme(self):
|
||||||
# InvalidSchema is expected because HTTPie
|
# InvalidSchema is expected because HTTPie
|
||||||
# shouldn't touch a formally valid scheme.
|
# shouldn't touch a formally valid scheme.
|
||||||
with pytest.raises(InvalidSchema):
|
with pytest.raises(InvalidSchema):
|
||||||
http('foo+bar-BAZ.123://bah')
|
http('foo+bar-BAZ.123://bah')
|
||||||
|
|
||||||
|
def test_invalid_scheme_via_via_default_scheme(self):
|
||||||
|
# InvalidSchema is expected because HTTPie
|
||||||
|
# shouldn't touch a formally valid scheme.
|
||||||
|
with pytest.raises(InvalidSchema):
|
||||||
|
http('bah', '--default=scheme=foo+bar-BAZ.123')
|
||||||
|
|
||||||
|
def test_default_scheme_option(self, httpbin_secure):
|
||||||
|
url = '{0}:{1}'.format(httpbin_secure.host, httpbin_secure.port)
|
||||||
|
assert HTTP_OK in http(url, '--default-scheme=https')
|
||||||
|
|
||||||
|
def test_scheme_when_invoked_as_https(self, httpbin_secure):
|
||||||
|
url = '{0}:{1}'.format(httpbin_secure.host, httpbin_secure.port)
|
||||||
|
assert HTTP_OK in http(url, program_name='https')
|
||||||
|
110
tests/test_compress.py
Normal file
110
tests/test_compress.py
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
"""
|
||||||
|
We test against httpbin which doesn't return the request data in a
|
||||||
|
consistent way:
|
||||||
|
|
||||||
|
1. Non-form requests: the `data` field contains base64 encoded version of
|
||||||
|
our zlib-encoded request data.
|
||||||
|
|
||||||
|
2. Form requests: `form` contains a messed up version of the data.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import base64
|
||||||
|
import zlib
|
||||||
|
|
||||||
|
from fixtures import FILE_PATH, FILE_CONTENT
|
||||||
|
from utils import http, HTTP_OK, MockEnvironment
|
||||||
|
|
||||||
|
|
||||||
|
def assert_decompressed_equal(base64_compressed_data, expected_str):
|
||||||
|
compressed_data = base64.b64decode(
|
||||||
|
base64_compressed_data.split(',', 1)[1])
|
||||||
|
data = zlib.decompress(compressed_data)
|
||||||
|
actual_str = data.decode()
|
||||||
|
|
||||||
|
# FIXME: contains a trailing linebreak with an uploaded file
|
||||||
|
actual_str = actual_str.rstrip()
|
||||||
|
|
||||||
|
assert actual_str == expected_str
|
||||||
|
|
||||||
|
|
||||||
|
def test_compress_skip_negative_ratio(httpbin_both):
|
||||||
|
r = http(
|
||||||
|
'--compress',
|
||||||
|
httpbin_both + '/post',
|
||||||
|
'foo=bar',
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'Content-Encoding' not in r.json['headers']
|
||||||
|
assert r.json['json'] == {'foo': 'bar'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_compress_force_with_negative_ratio(httpbin_both):
|
||||||
|
r = http(
|
||||||
|
'--compress',
|
||||||
|
'--compress',
|
||||||
|
httpbin_both + '/post',
|
||||||
|
'foo=bar',
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Content-Encoding'] == 'deflate'
|
||||||
|
assert_decompressed_equal(r.json['data'], '{"foo": "bar"}')
|
||||||
|
|
||||||
|
|
||||||
|
def test_compress_json(httpbin_both):
|
||||||
|
r = http(
|
||||||
|
'--compress',
|
||||||
|
'--compress',
|
||||||
|
httpbin_both + '/post',
|
||||||
|
'foo=bar',
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Content-Encoding'] == 'deflate'
|
||||||
|
assert_decompressed_equal(r.json['data'], '{"foo": "bar"}')
|
||||||
|
assert r.json['json'] is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_compress_form(httpbin_both):
|
||||||
|
r = http(
|
||||||
|
'--form',
|
||||||
|
'--compress',
|
||||||
|
'--compress',
|
||||||
|
httpbin_both + '/post',
|
||||||
|
'foo=bar',
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Content-Encoding'] == 'deflate'
|
||||||
|
assert r.json['data'] == ""
|
||||||
|
assert '"foo": "bar"' not in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_compress_stdin(httpbin_both):
|
||||||
|
with open(FILE_PATH) as f:
|
||||||
|
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||||
|
r = http(
|
||||||
|
'--compress',
|
||||||
|
'--compress',
|
||||||
|
'PATCH',
|
||||||
|
httpbin_both + '/patch',
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Content-Encoding'] == 'deflate'
|
||||||
|
assert_decompressed_equal(r.json['data'], FILE_CONTENT.strip())
|
||||||
|
assert not r.json['json']
|
||||||
|
|
||||||
|
|
||||||
|
def test_compress_file(httpbin_both):
|
||||||
|
r = http(
|
||||||
|
'--form',
|
||||||
|
'--compress',
|
||||||
|
'--compress',
|
||||||
|
'PUT',
|
||||||
|
httpbin_both + '/put',
|
||||||
|
'file@' + FILE_PATH,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Content-Encoding'] == 'deflate'
|
||||||
|
assert r.json['headers']['Content-Type'].startswith(
|
||||||
|
'multipart/form-data; boundary=')
|
||||||
|
assert r.json['files'] == {}
|
||||||
|
assert FILE_CONTENT not in r
|
@ -1,33 +1,50 @@
|
|||||||
from utils import TestEnvironment, http
|
import pytest
|
||||||
|
|
||||||
|
from httpie.compat import is_windows
|
||||||
|
from httpie.config import Config
|
||||||
|
from utils import HTTP_OK, MockEnvironment, http
|
||||||
|
|
||||||
|
|
||||||
def test_default_options(httpbin):
|
def test_default_options(httpbin):
|
||||||
env = TestEnvironment()
|
env = MockEnvironment()
|
||||||
env.config['default_options'] = ['--form']
|
env.config['default_options'] = ['--form']
|
||||||
env.config.save()
|
env.config.save()
|
||||||
r = http(httpbin.url + '/post', 'foo=bar', env=env)
|
r = http(httpbin.url + '/post', 'foo=bar', env=env)
|
||||||
assert r.json['form'] == {"foo": "bar"}
|
assert r.json['form'] == {
|
||||||
|
"foo": "bar"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_config_file_not_valid(httpbin):
|
||||||
|
env = MockEnvironment()
|
||||||
|
env.create_temp_config_dir()
|
||||||
|
with (env.config_dir / Config.FILENAME).open('w') as f:
|
||||||
|
f.write('{invalid json}')
|
||||||
|
r = http(httpbin + '/get', env=env)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'http: warning' in r.stderr
|
||||||
|
assert 'invalid config file' in r.stderr
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(is_windows, reason='cannot chmod 000 on Windows')
|
||||||
|
def test_config_file_inaccessible(httpbin):
|
||||||
|
env = MockEnvironment()
|
||||||
|
env.create_temp_config_dir()
|
||||||
|
config_path = env.config_dir / Config.FILENAME
|
||||||
|
assert not config_path.exists()
|
||||||
|
config_path.touch(0o000)
|
||||||
|
assert config_path.exists()
|
||||||
|
r = http(httpbin + '/get', env=env)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'http: warning' in r.stderr
|
||||||
|
assert 'cannot read config file' in r.stderr
|
||||||
|
|
||||||
|
|
||||||
def test_default_options_overwrite(httpbin):
|
def test_default_options_overwrite(httpbin):
|
||||||
env = TestEnvironment()
|
env = MockEnvironment()
|
||||||
env.config['default_options'] = ['--form']
|
env.config['default_options'] = ['--form']
|
||||||
env.config.save()
|
env.config.save()
|
||||||
r = http('--json', httpbin.url + '/post', 'foo=bar', env=env)
|
r = http('--json', httpbin.url + '/post', 'foo=bar', env=env)
|
||||||
assert r.json['json'] == {"foo": "bar"}
|
assert r.json['json'] == {
|
||||||
|
"foo": "bar"
|
||||||
|
}
|
||||||
def test_migrate_implicit_content_type():
|
|
||||||
config = TestEnvironment().config
|
|
||||||
|
|
||||||
config['implicit_content_type'] = 'json'
|
|
||||||
config.save()
|
|
||||||
config.load()
|
|
||||||
assert 'implicit_content_type' not in config
|
|
||||||
assert not config['default_options']
|
|
||||||
|
|
||||||
config['implicit_content_type'] = 'form'
|
|
||||||
config.save()
|
|
||||||
config.load()
|
|
||||||
assert 'implicit_content_type' not in config
|
|
||||||
assert config['default_options'] == ['--form']
|
|
||||||
|
@ -2,10 +2,26 @@
|
|||||||
Tests for the provided defaults regarding HTTP method, and --json vs. --form.
|
Tests for the provided defaults regarding HTTP method, and --json vs. --form.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from utils import TestEnvironment, http, HTTP_OK
|
from httpie.client import JSON_ACCEPT
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
from fixtures import FILE_PATH
|
from fixtures import FILE_PATH
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_headers_case_insensitive(httpbin):
|
||||||
|
"""
|
||||||
|
<https://github.com/jakubroztocil/httpie/issues/644>
|
||||||
|
"""
|
||||||
|
r = http(
|
||||||
|
'--debug',
|
||||||
|
'--print=H',
|
||||||
|
httpbin.url + '/post',
|
||||||
|
'CONTENT-TYPE:application/json-patch+json',
|
||||||
|
'a=b',
|
||||||
|
)
|
||||||
|
assert 'CONTENT-TYPE: application/json-patch+json' in r
|
||||||
|
assert 'Content-Type' not in r
|
||||||
|
|
||||||
|
|
||||||
class TestImplicitHTTPMethod:
|
class TestImplicitHTTPMethod:
|
||||||
def test_implicit_GET(self, httpbin):
|
def test_implicit_GET(self, httpbin):
|
||||||
r = http(httpbin.url + '/get')
|
r = http(httpbin.url + '/get')
|
||||||
@ -28,7 +44,7 @@ class TestImplicitHTTPMethod:
|
|||||||
|
|
||||||
def test_implicit_POST_stdin(self, httpbin):
|
def test_implicit_POST_stdin(self, httpbin):
|
||||||
with open(FILE_PATH) as f:
|
with open(FILE_PATH) as f:
|
||||||
env = TestEnvironment(stdin_isatty=False, stdin=f)
|
env = MockEnvironment(stdin_isatty=False, stdin=f)
|
||||||
r = http('--form', httpbin.url + '/post', env=env)
|
r = http('--form', httpbin.url + '/post', env=env)
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
|
|
||||||
@ -42,7 +58,7 @@ class TestAutoContentTypeAndAcceptHeaders:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def test_GET_no_data_no_auto_headers(self, httpbin):
|
def test_GET_no_data_no_auto_headers(self, httpbin):
|
||||||
# https://github.com/jkbrzt/httpie/issues/62
|
# https://github.com/jakubroztocil/httpie/issues/62
|
||||||
r = http('GET', httpbin.url + '/headers')
|
r = http('GET', httpbin.url + '/headers')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
assert r.json['headers']['Accept'] == '*/*'
|
assert r.json['headers']['Accept'] == '*/*'
|
||||||
@ -58,22 +74,22 @@ class TestAutoContentTypeAndAcceptHeaders:
|
|||||||
def test_POST_with_data_auto_JSON_headers(self, httpbin):
|
def test_POST_with_data_auto_JSON_headers(self, httpbin):
|
||||||
r = http('POST', httpbin.url + '/post', 'a=b')
|
r = http('POST', httpbin.url + '/post', 'a=b')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
assert '"Accept": "application/json"' in r
|
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||||
assert '"Content-Type": "application/json' in r
|
assert r.json['headers']['Content-Type'] == 'application/json'
|
||||||
|
|
||||||
def test_GET_with_data_auto_JSON_headers(self, httpbin):
|
def test_GET_with_data_auto_JSON_headers(self, httpbin):
|
||||||
# JSON headers should automatically be set also for GET with data.
|
# JSON headers should automatically be set also for GET with data.
|
||||||
r = http('POST', httpbin.url + '/post', 'a=b')
|
r = http('POST', httpbin.url + '/post', 'a=b')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
assert '"Accept": "application/json"' in r, r
|
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||||
assert '"Content-Type": "application/json' in r
|
assert r.json['headers']['Content-Type'] == 'application/json'
|
||||||
|
|
||||||
def test_POST_explicit_JSON_auto_JSON_accept(self, httpbin):
|
def test_POST_explicit_JSON_auto_JSON_accept(self, httpbin):
|
||||||
r = http('--json', 'POST', httpbin.url + '/post')
|
r = http('--json', 'POST', httpbin.url + '/post')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
assert r.json['headers']['Accept'] == 'application/json'
|
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||||
# Make sure Content-Type gets set even with no data.
|
# Make sure Content-Type gets set even with no data.
|
||||||
# https://github.com/jkbrzt/httpie/issues/137
|
# https://github.com/jakubroztocil/httpie/issues/137
|
||||||
assert 'application/json' in r.json['headers']['Content-Type']
|
assert 'application/json' in r.json['headers']['Content-Type']
|
||||||
|
|
||||||
def test_GET_explicit_JSON_explicit_headers(self, httpbin):
|
def test_GET_explicit_JSON_explicit_headers(self, httpbin):
|
||||||
@ -96,11 +112,11 @@ class TestAutoContentTypeAndAcceptHeaders:
|
|||||||
assert '"Content-Type": "application/xml"' in r
|
assert '"Content-Type": "application/xml"' in r
|
||||||
|
|
||||||
def test_print_only_body_when_stdout_redirected_by_default(self, httpbin):
|
def test_print_only_body_when_stdout_redirected_by_default(self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
r = http('GET', httpbin.url + '/get', env=env)
|
r = http('GET', httpbin.url + '/get', env=env)
|
||||||
assert 'HTTP/' not in r
|
assert 'HTTP/' not in r
|
||||||
|
|
||||||
def test_print_overridable_when_stdout_redirected(self, httpbin):
|
def test_print_overridable_when_stdout_redirected(self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
r = http('--print=h', 'GET', httpbin.url + '/get', env=env)
|
r = http('--print=h', 'GET', httpbin.url + '/get', env=env)
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
|
@ -1,15 +1,23 @@
|
|||||||
import os
|
import os
|
||||||
import fnmatch
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from glob import glob
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from utils import TESTS_ROOT
|
from utils import TESTS_ROOT
|
||||||
|
|
||||||
|
|
||||||
|
SOURCE_DIRECTORIES = [
|
||||||
|
'extras',
|
||||||
|
'httpie',
|
||||||
|
'tests',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def has_docutils():
|
def has_docutils():
|
||||||
try:
|
try:
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences,PyPackageRequirements
|
||||||
import docutils
|
import docutils
|
||||||
return True
|
return True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -17,23 +25,41 @@ def has_docutils():
|
|||||||
|
|
||||||
|
|
||||||
def rst_filenames():
|
def rst_filenames():
|
||||||
for root, dirnames, filenames in os.walk(os.path.dirname(TESTS_ROOT)):
|
cwd = os.getcwd()
|
||||||
if '.tox' not in root:
|
os.chdir(TESTS_ROOT.parent)
|
||||||
for filename in fnmatch.filter(filenames, '*.rst'):
|
try:
|
||||||
yield os.path.join(root, filename)
|
yield from glob('*.rst')
|
||||||
|
for directory in SOURCE_DIRECTORIES:
|
||||||
|
yield from glob(f'{directory}/**/*.rst', recursive=True)
|
||||||
|
finally:
|
||||||
|
os.chdir(cwd)
|
||||||
|
|
||||||
|
|
||||||
filenames = list(rst_filenames())
|
filenames = list(sorted(rst_filenames()))
|
||||||
assert filenames
|
assert filenames
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not has_docutils(), reason='docutils not installed')
|
# HACK: hardcoded paths, venv should be irrelevant, etc.
|
||||||
|
# TODO: replaces the process with Python code
|
||||||
|
VENV_BIN = Path(__file__).parent.parent / 'venv/bin'
|
||||||
|
VENV_PYTHON = VENV_BIN / 'python'
|
||||||
|
VENV_RST2PSEUDOXML = VENV_BIN / 'rst2pseudoxml.py'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not os.path.exists(VENV_RST2PSEUDOXML), reason='docutils not installed')
|
||||||
@pytest.mark.parametrize('filename', filenames)
|
@pytest.mark.parametrize('filename', filenames)
|
||||||
def test_rst_file_syntax(filename):
|
def test_rst_file_syntax(filename):
|
||||||
p = subprocess.Popen(
|
p = subprocess.Popen(
|
||||||
['rst2pseudoxml.py', '--report=1', '--exit-status=1', filename],
|
[
|
||||||
|
VENV_PYTHON,
|
||||||
|
VENV_RST2PSEUDOXML,
|
||||||
|
'--report=1',
|
||||||
|
'--exit-status=1',
|
||||||
|
filename,
|
||||||
|
],
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
stdout=subprocess.PIPE
|
stdout=subprocess.PIPE,
|
||||||
|
shell=True,
|
||||||
)
|
)
|
||||||
err = p.communicate()[1]
|
err = p.communicate()[1]
|
||||||
assert p.returncode == 0, err.decode('utf8')
|
assert p.returncode == 0, err.decode('utf8')
|
||||||
|
@ -1,19 +1,20 @@
|
|||||||
import os
|
import os
|
||||||
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import mock
|
import mock
|
||||||
from requests.structures import CaseInsensitiveDict
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
from httpie.compat import urlopen
|
|
||||||
from httpie.downloads import (
|
from httpie.downloads import (
|
||||||
parse_content_range, filename_from_content_disposition, filename_from_url,
|
parse_content_range, filename_from_content_disposition, filename_from_url,
|
||||||
get_unique_filename, ContentRangeError, Downloader,
|
get_unique_filename, ContentRangeError, Downloader,
|
||||||
)
|
)
|
||||||
from utils import http, TestEnvironment
|
from utils import http, MockEnvironment
|
||||||
|
|
||||||
|
|
||||||
class Response(object):
|
class Response:
|
||||||
# noinspection PyDefaultArgument
|
# noinspection PyDefaultArgument
|
||||||
def __init__(self, url, headers={}, status_code=200):
|
def __init__(self, url, headers={}, status_code=200):
|
||||||
self.url = url
|
self.url = url
|
||||||
@ -22,6 +23,7 @@ class Response(object):
|
|||||||
|
|
||||||
|
|
||||||
class TestDownloadUtils:
|
class TestDownloadUtils:
|
||||||
|
|
||||||
def test_Content_Range_parsing(self):
|
def test_Content_Range_parsing(self):
|
||||||
parse = parse_content_range
|
parse = parse_content_range
|
||||||
|
|
||||||
@ -123,7 +125,7 @@ class TestDownloads:
|
|||||||
def test_actual_download(self, httpbin_both, httpbin):
|
def test_actual_download(self, httpbin_both, httpbin):
|
||||||
robots_txt = '/robots.txt'
|
robots_txt = '/robots.txt'
|
||||||
body = urlopen(httpbin + robots_txt).read().decode()
|
body = urlopen(httpbin + robots_txt).read().decode()
|
||||||
env = TestEnvironment(stdin_isatty=True, stdout_isatty=False)
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
r = http('--download', httpbin_both.url + robots_txt, env=env)
|
r = http('--download', httpbin_both.url + robots_txt, env=env)
|
||||||
assert 'Downloading' in r.stderr
|
assert 'Downloading' in r.stderr
|
||||||
assert '[K' in r.stderr
|
assert '[K' in r.stderr
|
||||||
@ -131,35 +133,59 @@ class TestDownloads:
|
|||||||
assert body == r
|
assert body == r
|
||||||
|
|
||||||
def test_download_with_Content_Length(self, httpbin_both):
|
def test_download_with_Content_Length(self, httpbin_both):
|
||||||
devnull = open(os.devnull, 'w')
|
with open(os.devnull, 'w') as devnull:
|
||||||
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
||||||
downloader.start(Response(
|
downloader.start(
|
||||||
url=httpbin_both.url + '/',
|
initial_url='/',
|
||||||
headers={'Content-Length': 10}
|
final_response=Response(
|
||||||
))
|
url=httpbin_both.url + '/',
|
||||||
time.sleep(1.1)
|
headers={'Content-Length': 10}
|
||||||
downloader.chunk_downloaded(b'12345')
|
)
|
||||||
time.sleep(1.1)
|
)
|
||||||
downloader.chunk_downloaded(b'12345')
|
time.sleep(1.1)
|
||||||
downloader.finish()
|
downloader.chunk_downloaded(b'12345')
|
||||||
assert not downloader.interrupted
|
time.sleep(1.1)
|
||||||
|
downloader.chunk_downloaded(b'12345')
|
||||||
|
downloader.finish()
|
||||||
|
assert not downloader.interrupted
|
||||||
|
downloader._progress_reporter.join()
|
||||||
|
|
||||||
def test_download_no_Content_Length(self, httpbin_both):
|
def test_download_no_Content_Length(self, httpbin_both):
|
||||||
devnull = open(os.devnull, 'w')
|
with open(os.devnull, 'w') as devnull:
|
||||||
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
||||||
downloader.start(Response(url=httpbin_both.url + '/'))
|
downloader.start(
|
||||||
time.sleep(1.1)
|
final_response=Response(url=httpbin_both.url + '/'),
|
||||||
downloader.chunk_downloaded(b'12345')
|
initial_url='/'
|
||||||
downloader.finish()
|
)
|
||||||
assert not downloader.interrupted
|
time.sleep(1.1)
|
||||||
|
downloader.chunk_downloaded(b'12345')
|
||||||
|
downloader.finish()
|
||||||
|
assert not downloader.interrupted
|
||||||
|
downloader._progress_reporter.join()
|
||||||
|
|
||||||
def test_download_interrupted(self, httpbin_both):
|
def test_download_interrupted(self, httpbin_both):
|
||||||
devnull = open(os.devnull, 'w')
|
with open(os.devnull, 'w') as devnull:
|
||||||
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
||||||
downloader.start(Response(
|
downloader.start(
|
||||||
url=httpbin_both.url + '/',
|
final_response=Response(
|
||||||
headers={'Content-Length': 5}
|
url=httpbin_both.url + '/',
|
||||||
))
|
headers={'Content-Length': 5}
|
||||||
downloader.chunk_downloaded(b'1234')
|
),
|
||||||
downloader.finish()
|
initial_url='/'
|
||||||
assert downloader.interrupted
|
)
|
||||||
|
downloader.chunk_downloaded(b'1234')
|
||||||
|
downloader.finish()
|
||||||
|
assert downloader.interrupted
|
||||||
|
downloader._progress_reporter.join()
|
||||||
|
|
||||||
|
def test_download_with_redirect_original_url_used_for_filename(self, httpbin):
|
||||||
|
# Redirect from `/redirect/1` to `/get`.
|
||||||
|
expected_filename = '1.json'
|
||||||
|
orig_cwd = os.getcwd()
|
||||||
|
os.chdir(tempfile.mkdtemp(prefix='httpie_download_test_'))
|
||||||
|
try:
|
||||||
|
assert os.listdir('.') == []
|
||||||
|
http('--download', httpbin.url + '/redirect/1')
|
||||||
|
assert os.listdir('.') == [expected_filename]
|
||||||
|
finally:
|
||||||
|
os.chdir(orig_cwd)
|
||||||
|
@ -1,49 +1,41 @@
|
|||||||
import mock
|
import mock
|
||||||
from pytest import raises
|
from pytest import raises
|
||||||
from requests import Request, Timeout
|
from requests import Request
|
||||||
from requests.exceptions import ConnectionError
|
from requests.exceptions import ConnectionError
|
||||||
|
|
||||||
from httpie import ExitStatus
|
from httpie.status import ExitStatus
|
||||||
from httpie.core import main
|
from utils import HTTP_OK, http
|
||||||
|
|
||||||
error_msg = None
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch('httpie.core.get_response')
|
@mock.patch('httpie.core.program')
|
||||||
def test_error(get_response):
|
def test_error(program):
|
||||||
def error(msg, *args, **kwargs):
|
|
||||||
global error_msg
|
|
||||||
error_msg = msg % args
|
|
||||||
|
|
||||||
exc = ConnectionError('Connection aborted')
|
exc = ConnectionError('Connection aborted')
|
||||||
exc.request = Request(method='GET', url='http://www.google.com')
|
exc.request = Request(method='GET', url='http://www.google.com')
|
||||||
get_response.side_effect = exc
|
program.side_effect = exc
|
||||||
ret = main(['--ignore-stdin', 'www.google.com'], custom_log_error=error)
|
r = http('www.google.com', tolerate_error_exit_status=True)
|
||||||
assert ret == ExitStatus.ERROR
|
assert r.exit_status == ExitStatus.ERROR
|
||||||
assert error_msg == (
|
error_msg = (
|
||||||
'ConnectionError: '
|
'ConnectionError: '
|
||||||
'Connection aborted while doing GET request to URL: '
|
'Connection aborted while doing a GET request to URL: '
|
||||||
'http://www.google.com')
|
'http://www.google.com'
|
||||||
|
)
|
||||||
|
assert error_msg in r.stderr
|
||||||
|
|
||||||
|
|
||||||
@mock.patch('httpie.core.get_response')
|
@mock.patch('httpie.core.program')
|
||||||
def test_error_traceback(get_response):
|
def test_error_traceback(program):
|
||||||
exc = ConnectionError('Connection aborted')
|
exc = ConnectionError('Connection aborted')
|
||||||
exc.request = Request(method='GET', url='http://www.google.com')
|
exc.request = Request(method='GET', url='http://www.google.com')
|
||||||
get_response.side_effect = exc
|
program.side_effect = exc
|
||||||
with raises(ConnectionError):
|
with raises(ConnectionError):
|
||||||
main(['--ignore-stdin', '--traceback', 'www.google.com'])
|
http('--traceback', 'www.google.com')
|
||||||
|
|
||||||
|
|
||||||
@mock.patch('httpie.core.get_response')
|
def test_max_headers_limit(httpbin_both):
|
||||||
def test_timeout(get_response):
|
with raises(ConnectionError) as e:
|
||||||
def error(msg, *args, **kwargs):
|
http('--max-headers=1', httpbin_both + '/get')
|
||||||
global error_msg
|
assert 'got more than 1 headers' in str(e.value)
|
||||||
error_msg = msg % args
|
|
||||||
|
|
||||||
exc = Timeout('Request timed out')
|
|
||||||
exc.request = Request(method='GET', url='http://www.google.com')
|
def test_max_headers_no_limit(httpbin_both):
|
||||||
get_response.side_effect = exc
|
assert HTTP_OK in http('--max-headers=0', httpbin_both + '/get')
|
||||||
ret = main(['--ignore-stdin', 'www.google.com'], custom_log_error=error)
|
|
||||||
assert ret == ExitStatus.ERROR_TIMEOUT
|
|
||||||
assert error_msg == 'Request timed out (30s).'
|
|
||||||
|
@ -1,33 +1,49 @@
|
|||||||
from httpie import ExitStatus
|
import mock
|
||||||
from utils import TestEnvironment, http, HTTP_OK
|
|
||||||
|
from httpie.status import ExitStatus
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
|
|
||||||
|
|
||||||
|
def test_keyboard_interrupt_during_arg_parsing_exit_status(httpbin):
|
||||||
|
with mock.patch('httpie.cli.definition.parser.parse_args',
|
||||||
|
side_effect=KeyboardInterrupt()):
|
||||||
|
r = http('GET', httpbin.url + '/get', tolerate_error_exit_status=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_CTRL_C
|
||||||
|
|
||||||
|
|
||||||
|
def test_keyboard_interrupt_in_program_exit_status(httpbin):
|
||||||
|
with mock.patch('httpie.core.program',
|
||||||
|
side_effect=KeyboardInterrupt()):
|
||||||
|
r = http('GET', httpbin.url + '/get', tolerate_error_exit_status=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_CTRL_C
|
||||||
|
|
||||||
|
|
||||||
def test_ok_response_exits_0(httpbin):
|
def test_ok_response_exits_0(httpbin):
|
||||||
r = http('GET', httpbin.url + '/status/200')
|
r = http('GET', httpbin.url + '/get')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
assert r.exit_status == ExitStatus.OK
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def test_error_response_exits_0_without_check_status(httpbin):
|
def test_error_response_exits_0_without_check_status(httpbin):
|
||||||
r = http('GET', httpbin.url + '/status/500')
|
r = http('GET', httpbin.url + '/status/500')
|
||||||
assert '500 INTERNAL SERVER ERRO' in r
|
assert '500 INTERNAL SERVER ERROR' in r
|
||||||
assert r.exit_status == ExitStatus.OK
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
assert not r.stderr
|
assert not r.stderr
|
||||||
|
|
||||||
|
|
||||||
def test_timeout_exit_status(httpbin):
|
def test_timeout_exit_status(httpbin):
|
||||||
|
|
||||||
r = http('--timeout=0.01', 'GET', httpbin.url + '/delay/0.02',
|
r = http('--timeout=0.01', 'GET', httpbin.url + '/delay/0.5',
|
||||||
error_exit_ok=True)
|
tolerate_error_exit_status=True)
|
||||||
assert r.exit_status == ExitStatus.ERROR_TIMEOUT
|
assert r.exit_status == ExitStatus.ERROR_TIMEOUT
|
||||||
|
|
||||||
|
|
||||||
def test_3xx_check_status_exits_3_and_stderr_when_stdout_redirected(
|
def test_3xx_check_status_exits_3_and_stderr_when_stdout_redirected(
|
||||||
httpbin):
|
httpbin):
|
||||||
env = TestEnvironment(stdout_isatty=False)
|
env = MockEnvironment(stdout_isatty=False)
|
||||||
r = http('--check-status', '--headers',
|
r = http('--check-status', '--headers',
|
||||||
'GET', httpbin.url + '/status/301',
|
'GET', httpbin.url + '/status/301',
|
||||||
env=env, error_exit_ok=True)
|
env=env, tolerate_error_exit_status=True)
|
||||||
assert '301 MOVED PERMANENTLY' in r
|
assert '301 MOVED PERMANENTLY' in r
|
||||||
assert r.exit_status == ExitStatus.ERROR_HTTP_3XX
|
assert r.exit_status == ExitStatus.ERROR_HTTP_3XX
|
||||||
assert '301 moved permanently' in r.stderr.lower()
|
assert '301 moved permanently' in r.stderr.lower()
|
||||||
@ -36,15 +52,15 @@ def test_3xx_check_status_exits_3_and_stderr_when_stdout_redirected(
|
|||||||
def test_3xx_check_status_redirects_allowed_exits_0(httpbin):
|
def test_3xx_check_status_redirects_allowed_exits_0(httpbin):
|
||||||
r = http('--check-status', '--follow',
|
r = http('--check-status', '--follow',
|
||||||
'GET', httpbin.url + '/status/301',
|
'GET', httpbin.url + '/status/301',
|
||||||
error_exit_ok=True)
|
tolerate_error_exit_status=True)
|
||||||
# The redirect will be followed so 200 is expected.
|
# The redirect will be followed so 200 is expected.
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
assert r.exit_status == ExitStatus.OK
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def test_4xx_check_status_exits_4(httpbin):
|
def test_4xx_check_status_exits_4(httpbin):
|
||||||
r = http('--check-status', 'GET', httpbin.url + '/status/401',
|
r = http('--check-status', 'GET', httpbin.url + '/status/401',
|
||||||
error_exit_ok=True)
|
tolerate_error_exit_status=True)
|
||||||
assert '401 UNAUTHORIZED' in r
|
assert '401 UNAUTHORIZED' in r
|
||||||
assert r.exit_status == ExitStatus.ERROR_HTTP_4XX
|
assert r.exit_status == ExitStatus.ERROR_HTTP_4XX
|
||||||
# Also stderr should be empty since stdout isn't redirected.
|
# Also stderr should be empty since stdout isn't redirected.
|
||||||
@ -53,6 +69,6 @@ def test_4xx_check_status_exits_4(httpbin):
|
|||||||
|
|
||||||
def test_5xx_check_status_exits_5(httpbin):
|
def test_5xx_check_status_exits_5(httpbin):
|
||||||
r = http('--check-status', 'GET', httpbin.url + '/status/500',
|
r = http('--check-status', 'GET', httpbin.url + '/status/500',
|
||||||
error_exit_ok=True)
|
tolerate_error_exit_status=True)
|
||||||
assert '500 INTERNAL SERVER ERROR' in r
|
assert '500 INTERNAL SERVER ERROR' in r
|
||||||
assert r.exit_status == ExitStatus.ERROR_HTTP_5XX
|
assert r.exit_status == ExitStatus.ERROR_HTTP_5XX
|
||||||
|
@ -1,29 +1,53 @@
|
|||||||
"""High-level tests."""
|
"""High-level tests."""
|
||||||
|
import io
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from utils import TestEnvironment, http, HTTP_OK
|
|
||||||
|
import httpie.__main__
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.status import ExitStatus
|
||||||
|
from httpie.cli.exceptions import ParseError
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
from fixtures import FILE_PATH, FILE_CONTENT
|
from fixtures import FILE_PATH, FILE_CONTENT
|
||||||
|
|
||||||
import httpie
|
import httpie
|
||||||
from httpie.compat import is_py26
|
|
||||||
|
|
||||||
|
def test_main_entry_point():
|
||||||
|
# Patch stdin to bypass pytest capture
|
||||||
|
with mock.patch.object(Environment, 'stdin', io.StringIO()):
|
||||||
|
with pytest.raises(SystemExit) as e:
|
||||||
|
httpie.__main__.main()
|
||||||
|
assert e.value.code == ExitStatus.ERROR
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch('httpie.core.main')
|
||||||
|
def test_main_entry_point_keyboard_interrupt(main):
|
||||||
|
main.side_effect = KeyboardInterrupt()
|
||||||
|
with mock.patch.object(Environment, 'stdin', io.StringIO()):
|
||||||
|
with pytest.raises(SystemExit) as e:
|
||||||
|
httpie.__main__.main()
|
||||||
|
assert e.value.code == ExitStatus.ERROR_CTRL_C
|
||||||
|
|
||||||
|
|
||||||
def test_debug():
|
def test_debug():
|
||||||
r = http('--debug')
|
r = http('--debug')
|
||||||
assert r.exit_status == httpie.ExitStatus.OK
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
assert 'HTTPie %s' % httpie.__version__ in r.stderr
|
assert 'HTTPie %s' % httpie.__version__ in r.stderr
|
||||||
|
|
||||||
|
|
||||||
def test_help():
|
def test_help():
|
||||||
r = http('--help', error_exit_ok=True)
|
r = http('--help', tolerate_error_exit_status=True)
|
||||||
assert r.exit_status == httpie.ExitStatus.OK
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
assert 'https://github.com/jkbrzt/httpie/issues' in r
|
assert 'https://github.com/jakubroztocil/httpie/issues' in r
|
||||||
|
|
||||||
|
|
||||||
def test_version():
|
def test_version():
|
||||||
r = http('--version', error_exit_ok=True)
|
r = http('--version', tolerate_error_exit_status=True)
|
||||||
assert r.exit_status == httpie.ExitStatus.OK
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
# FIXME: py3 has version in stdout, py2 in stderr
|
# FIXME: py3 has version in stdout, py2 in stderr
|
||||||
assert httpie.__version__ == r.stderr.strip() + r.strip()
|
assert httpie.__version__ == r.strip()
|
||||||
|
|
||||||
|
|
||||||
def test_GET(httpbin_both):
|
def test_GET(httpbin_both):
|
||||||
@ -62,12 +86,18 @@ def test_POST_form_multiple_values(httpbin_both):
|
|||||||
|
|
||||||
def test_POST_stdin(httpbin_both):
|
def test_POST_stdin(httpbin_both):
|
||||||
with open(FILE_PATH) as f:
|
with open(FILE_PATH) as f:
|
||||||
env = TestEnvironment(stdin=f, stdin_isatty=False)
|
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||||
r = http('--form', 'POST', httpbin_both + '/post', env=env)
|
r = http('--form', 'POST', httpbin_both + '/post', env=env)
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
assert FILE_CONTENT in r
|
assert FILE_CONTENT in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_POST_file(httpbin_both):
|
||||||
|
r = http('--form', 'POST', httpbin_both + '/post', 'file@' + FILE_PATH)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert FILE_CONTENT in r
|
||||||
|
|
||||||
|
|
||||||
def test_headers(httpbin_both):
|
def test_headers(httpbin_both):
|
||||||
r = http('GET', httpbin_both + '/headers', 'Foo:bar')
|
r = http('GET', httpbin_both + '/headers', 'Foo:bar')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
@ -75,10 +105,36 @@ def test_headers(httpbin_both):
|
|||||||
assert '"Foo": "bar"' in r
|
assert '"Foo": "bar"' in r
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
def test_headers_unset(httpbin_both):
|
||||||
is_py26,
|
r = http('GET', httpbin_both + '/headers')
|
||||||
reason='the `object_pairs_hook` arg for `json.loads()` is Py>2.6 only'
|
assert 'Accept' in r.json['headers'] # default Accept present
|
||||||
)
|
|
||||||
|
r = http('GET', httpbin_both + '/headers', 'Accept:')
|
||||||
|
assert 'Accept' not in r.json['headers'] # default Accept unset
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip('unimplemented')
|
||||||
|
def test_unset_host_header(httpbin_both):
|
||||||
|
r = http('GET', httpbin_both + '/headers')
|
||||||
|
assert 'Host' in r.json['headers'] # default Host present
|
||||||
|
|
||||||
|
r = http('GET', httpbin_both + '/headers', 'Host:')
|
||||||
|
assert 'Host' not in r.json['headers'] # default Host unset
|
||||||
|
|
||||||
|
|
||||||
|
def test_headers_empty_value(httpbin_both):
|
||||||
|
r = http('GET', httpbin_both + '/headers')
|
||||||
|
assert r.json['headers']['Accept'] # default Accept has value
|
||||||
|
|
||||||
|
r = http('GET', httpbin_both + '/headers', 'Accept;')
|
||||||
|
assert r.json['headers']['Accept'] == '' # Accept has no value
|
||||||
|
|
||||||
|
|
||||||
|
def test_headers_empty_value_with_value_gives_error(httpbin):
|
||||||
|
with pytest.raises(ParseError):
|
||||||
|
http('GET', httpbin + '/headers', 'Accept;SYNTAX_ERROR')
|
||||||
|
|
||||||
|
|
||||||
def test_json_input_preserve_order(httpbin_both):
|
def test_json_input_preserve_order(httpbin_both):
|
||||||
r = http('PATCH', httpbin_both + '/patch',
|
r = http('PATCH', httpbin_both + '/patch',
|
||||||
'order:={"map":{"1":"first","2":"second"}}')
|
'order:={"map":{"1":"first","2":"second"}}')
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
import os
|
import os
|
||||||
from tempfile import gettempdir
|
from tempfile import gettempdir
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from utils import TestEnvironment, http, HTTP_OK, COLOR, CRLF
|
from utils import MockEnvironment, http, HTTP_OK, COLOR, CRLF
|
||||||
from httpie import ExitStatus
|
from httpie.status import ExitStatus
|
||||||
from httpie.compat import urlopen
|
|
||||||
from httpie.output.formatters.colors import get_lexer
|
from httpie.output.formatters.colors import get_lexer
|
||||||
|
|
||||||
|
|
||||||
@ -15,7 +15,7 @@ def test_output_option(httpbin, stdout_isatty):
|
|||||||
url = httpbin + '/robots.txt'
|
url = httpbin + '/robots.txt'
|
||||||
|
|
||||||
r = http('--output', output_filename, url,
|
r = http('--output', output_filename, url,
|
||||||
env=TestEnvironment(stdout_isatty=stdout_isatty))
|
env=MockEnvironment(stdout_isatty=stdout_isatty))
|
||||||
assert r == ''
|
assert r == ''
|
||||||
|
|
||||||
expected_body = urlopen(url).read().decode()
|
expected_body = urlopen(url).read().decode()
|
||||||
@ -33,7 +33,7 @@ class TestVerboseFlag:
|
|||||||
assert r.count('__test__') == 2
|
assert r.count('__test__') == 2
|
||||||
|
|
||||||
def test_verbose_form(self, httpbin):
|
def test_verbose_form(self, httpbin):
|
||||||
# https://github.com/jkbrzt/httpie/issues/53
|
# https://github.com/jakubroztocil/httpie/issues/53
|
||||||
r = http('--verbose', '--form', 'POST', httpbin.url + '/post',
|
r = http('--verbose', '--form', 'POST', httpbin.url + '/post',
|
||||||
'A=B', 'C=D')
|
'A=B', 'C=D')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
@ -86,7 +86,7 @@ class TestPrettyOptions:
|
|||||||
"""Test the --pretty flag handling."""
|
"""Test the --pretty flag handling."""
|
||||||
|
|
||||||
def test_pretty_enabled_by_default(self, httpbin):
|
def test_pretty_enabled_by_default(self, httpbin):
|
||||||
env = TestEnvironment(colors=256)
|
env = MockEnvironment(colors=256)
|
||||||
r = http('GET', httpbin.url + '/get', env=env)
|
r = http('GET', httpbin.url + '/get', env=env)
|
||||||
assert COLOR in r
|
assert COLOR in r
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ class TestPrettyOptions:
|
|||||||
assert COLOR not in r
|
assert COLOR not in r
|
||||||
|
|
||||||
def test_force_pretty(self, httpbin):
|
def test_force_pretty(self, httpbin):
|
||||||
env = TestEnvironment(stdout_isatty=False, colors=256)
|
env = MockEnvironment(stdout_isatty=False, colors=256)
|
||||||
r = http('--pretty=all', 'GET', httpbin.url + '/get', env=env, )
|
r = http('--pretty=all', 'GET', httpbin.url + '/get', env=env, )
|
||||||
assert COLOR in r
|
assert COLOR in r
|
||||||
|
|
||||||
@ -108,13 +108,13 @@ class TestPrettyOptions:
|
|||||||
match any lexer.
|
match any lexer.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
env = TestEnvironment(colors=256)
|
env = MockEnvironment(colors=256)
|
||||||
r = http('--print=B', '--pretty=all', httpbin.url + '/post',
|
r = http('--print=B', '--pretty=all', httpbin.url + '/post',
|
||||||
'Content-Type:text/foo+json', 'a=b', env=env)
|
'Content-Type:text/foo+json', 'a=b', env=env)
|
||||||
assert COLOR in r
|
assert COLOR in r
|
||||||
|
|
||||||
def test_colors_option(self, httpbin):
|
def test_colors_option(self, httpbin):
|
||||||
env = TestEnvironment(colors=256)
|
env = MockEnvironment(colors=256)
|
||||||
r = http('--print=B', '--pretty=colors',
|
r = http('--print=B', '--pretty=colors',
|
||||||
'GET', httpbin.url + '/get', 'a=b',
|
'GET', httpbin.url + '/get', 'a=b',
|
||||||
env=env)
|
env=env)
|
||||||
@ -123,7 +123,7 @@ class TestPrettyOptions:
|
|||||||
assert COLOR in r
|
assert COLOR in r
|
||||||
|
|
||||||
def test_format_option(self, httpbin):
|
def test_format_option(self, httpbin):
|
||||||
env = TestEnvironment(colors=256)
|
env = MockEnvironment(colors=256)
|
||||||
r = http('--print=B', '--pretty=format',
|
r = http('--print=B', '--pretty=format',
|
||||||
'GET', httpbin.url + '/get', 'a=b',
|
'GET', httpbin.url + '/get', 'a=b',
|
||||||
env=env)
|
env=env)
|
||||||
@ -161,7 +161,7 @@ class TestLineEndings:
|
|||||||
|
|
||||||
def test_CRLF_formatted_response(self, httpbin):
|
def test_CRLF_formatted_response(self, httpbin):
|
||||||
r = http('--pretty=format', 'GET', httpbin.url + '/get')
|
r = http('--pretty=format', 'GET', httpbin.url + '/get')
|
||||||
assert r.exit_status == ExitStatus.OK
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
self._validate_crlf(r)
|
self._validate_crlf(r)
|
||||||
|
|
||||||
def test_CRLF_ugly_request(self, httpbin):
|
def test_CRLF_ugly_request(self, httpbin):
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""High-level tests."""
|
"""High-level tests."""
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from httpie import ExitStatus
|
from httpie.status import ExitStatus
|
||||||
from utils import http, HTTP_OK
|
from utils import http, HTTP_OK
|
||||||
|
|
||||||
|
|
||||||
@ -28,20 +28,25 @@ def test_follow_all_output_options_used_for_redirects(httpbin):
|
|||||||
assert r.count('GET /') == 3
|
assert r.count('GET /') == 3
|
||||||
assert HTTP_OK not in r
|
assert HTTP_OK not in r
|
||||||
|
|
||||||
|
#
|
||||||
def test_follow_redirect_output_options(httpbin):
|
# def test_follow_redirect_output_options(httpbin):
|
||||||
r = http('--check-status',
|
# r = http('--check-status',
|
||||||
'--follow',
|
# '--follow',
|
||||||
'--all',
|
# '--all',
|
||||||
'--print=h',
|
# '--print=h',
|
||||||
'--history-print=H',
|
# '--history-print=H',
|
||||||
httpbin.url + '/redirect/2')
|
# httpbin.url + '/redirect/2')
|
||||||
assert r.count('GET /') == 2
|
# assert r.count('GET /') == 2
|
||||||
assert 'HTTP/1.1 302 FOUND' not in r
|
# assert 'HTTP/1.1 302 FOUND' not in r
|
||||||
assert HTTP_OK in r
|
# assert HTTP_OK in r
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
def test_max_redirects(httpbin):
|
def test_max_redirects(httpbin):
|
||||||
r = http('--max-redirects=1', '--follow', httpbin.url + '/redirect/3',
|
r = http(
|
||||||
error_exit_ok=True)
|
'--max-redirects=1',
|
||||||
|
'--follow',
|
||||||
|
httpbin.url + '/redirect/3',
|
||||||
|
tolerate_error_exit_status=True,
|
||||||
|
)
|
||||||
assert r.exit_status == ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
assert r.exit_status == ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
||||||
|
@ -7,7 +7,7 @@ from httpie.compat import is_windows
|
|||||||
|
|
||||||
def test_Host_header_overwrite(httpbin):
|
def test_Host_header_overwrite(httpbin):
|
||||||
"""
|
"""
|
||||||
https://github.com/jkbrzt/httpie/issues/235
|
https://github.com/jakubroztocil/httpie/issues/235
|
||||||
|
|
||||||
"""
|
"""
|
||||||
host = 'httpbin.org'
|
host = 'httpbin.org'
|
||||||
@ -21,7 +21,7 @@ def test_Host_header_overwrite(httpbin):
|
|||||||
@pytest.mark.skipif(is_windows, reason='Unix-only')
|
@pytest.mark.skipif(is_windows, reason='Unix-only')
|
||||||
def test_output_devnull(httpbin):
|
def test_output_devnull(httpbin):
|
||||||
"""
|
"""
|
||||||
https://github.com/jkbrzt/httpie/issues/252
|
https://github.com/jakubroztocil/httpie/issues/252
|
||||||
|
|
||||||
"""
|
"""
|
||||||
http('--output=/dev/null', httpbin + '/get')
|
http('--output=/dev/null', httpbin + '/get')
|
||||||
|
@ -7,11 +7,11 @@ from tempfile import gettempdir
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from httpie.plugins.builtin import HTTPBasicAuth
|
from httpie.plugins.builtin import HTTPBasicAuth
|
||||||
from utils import TestEnvironment, mk_config_dir, http, HTTP_OK
|
from utils import MockEnvironment, mk_config_dir, http, HTTP_OK
|
||||||
from fixtures import UNICODE
|
from fixtures import UNICODE
|
||||||
|
|
||||||
|
|
||||||
class SessionTestBase(object):
|
class SessionTestBase:
|
||||||
|
|
||||||
def start_session(self, httpbin):
|
def start_session(self, httpbin):
|
||||||
"""Create and reuse a unique config dir for each test."""
|
"""Create and reuse a unique config dir for each test."""
|
||||||
@ -24,12 +24,12 @@ class SessionTestBase(object):
|
|||||||
"""
|
"""
|
||||||
Return an environment.
|
Return an environment.
|
||||||
|
|
||||||
Each environment created withing a test method
|
Each environment created within a test method
|
||||||
will share the same config_dir. It is necessary
|
will share the same config_dir. It is necessary
|
||||||
for session files being reused.
|
for session files being reused.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return TestEnvironment(config_dir=self.config_dir)
|
return MockEnvironment(config_dir=self.config_dir)
|
||||||
|
|
||||||
|
|
||||||
class TestSessionFlow(SessionTestBase):
|
class TestSessionFlow(SessionTestBase):
|
||||||
@ -44,11 +44,16 @@ class TestSessionFlow(SessionTestBase):
|
|||||||
authorization, and response cookies.
|
authorization, and response cookies.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
super(TestSessionFlow, self).start_session(httpbin)
|
super().start_session(httpbin)
|
||||||
r1 = http('--follow', '--session=test', '--auth=username:password',
|
r1 = http(
|
||||||
'GET', httpbin.url + '/cookies/set?hello=world',
|
'--follow',
|
||||||
'Hello:World',
|
'--session=test',
|
||||||
env=self.env())
|
'--auth=username:password',
|
||||||
|
'GET',
|
||||||
|
httpbin.url + '/cookies/set?hello=world',
|
||||||
|
'Hello:World',
|
||||||
|
env=self.env()
|
||||||
|
)
|
||||||
assert HTTP_OK in r1
|
assert HTTP_OK in r1
|
||||||
|
|
||||||
def test_session_created_and_reused(self, httpbin):
|
def test_session_created_and_reused(self, httpbin):
|
||||||
@ -81,8 +86,8 @@ class TestSessionFlow(SessionTestBase):
|
|||||||
assert HTTP_OK in r4
|
assert HTTP_OK in r4
|
||||||
assert r4.json['headers']['Hello'] == 'World2'
|
assert r4.json['headers']['Hello'] == 'World2'
|
||||||
assert r4.json['headers']['Cookie'] == 'hello=world2'
|
assert r4.json['headers']['Cookie'] == 'hello=world2'
|
||||||
assert (r2.json['headers']['Authorization'] !=
|
assert (r2.json['headers']['Authorization']
|
||||||
r4.json['headers']['Authorization'])
|
!= r4.json['headers']['Authorization'])
|
||||||
|
|
||||||
def test_session_read_only(self, httpbin):
|
def test_session_read_only(self, httpbin):
|
||||||
self.start_session(httpbin)
|
self.start_session(httpbin)
|
||||||
@ -130,20 +135,16 @@ class TestSession(SessionTestBase):
|
|||||||
|
|
||||||
def test_session_by_path(self, httpbin):
|
def test_session_by_path(self, httpbin):
|
||||||
self.start_session(httpbin)
|
self.start_session(httpbin)
|
||||||
session_path = os.path.join(self.config_dir, 'session-by-path.json')
|
session_path = self.config_dir / 'session-by-path.json'
|
||||||
r1 = http('--session=' + session_path, 'GET', httpbin.url + '/get',
|
r1 = http('--session', str(session_path), 'GET', httpbin.url + '/get',
|
||||||
'Foo:Bar', env=self.env())
|
'Foo:Bar', env=self.env())
|
||||||
assert HTTP_OK in r1
|
assert HTTP_OK in r1
|
||||||
|
|
||||||
r2 = http('--session=' + session_path, 'GET', httpbin.url + '/get',
|
r2 = http('--session', str(session_path), 'GET', httpbin.url + '/get',
|
||||||
env=self.env())
|
env=self.env())
|
||||||
assert HTTP_OK in r2
|
assert HTTP_OK in r2
|
||||||
assert r2.json['headers']['Foo'] == 'Bar'
|
assert r2.json['headers']['Foo'] == 'Bar'
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
|
||||||
sys.version_info >= (3,),
|
|
||||||
reason="This test fails intermittently on Python 3 - "
|
|
||||||
"see https://github.com/jkbrzt/httpie/issues/282")
|
|
||||||
def test_session_unicode(self, httpbin):
|
def test_session_unicode(self, httpbin):
|
||||||
self.start_session(httpbin)
|
self.start_session(httpbin)
|
||||||
|
|
||||||
@ -157,14 +158,14 @@ class TestSession(SessionTestBase):
|
|||||||
assert HTTP_OK in r2
|
assert HTTP_OK in r2
|
||||||
|
|
||||||
# FIXME: Authorization *sometimes* is not present on Python3
|
# FIXME: Authorization *sometimes* is not present on Python3
|
||||||
assert (r2.json['headers']['Authorization'] ==
|
assert (r2.json['headers']['Authorization']
|
||||||
HTTPBasicAuth.make_header(u'test', UNICODE))
|
== HTTPBasicAuth.make_header(u'test', UNICODE))
|
||||||
# httpbin doesn't interpret utf8 headers
|
# httpbin doesn't interpret utf8 headers
|
||||||
assert UNICODE in r2
|
assert UNICODE in r2
|
||||||
|
|
||||||
def test_session_default_header_value_overwritten(self, httpbin):
|
def test_session_default_header_value_overwritten(self, httpbin):
|
||||||
self.start_session(httpbin)
|
self.start_session(httpbin)
|
||||||
# https://github.com/jkbrzt/httpie/issues/180
|
# https://github.com/jakubroztocil/httpie/issues/180
|
||||||
r1 = http('--session=test',
|
r1 = http('--session=test',
|
||||||
httpbin.url + '/headers', 'User-Agent:custom',
|
httpbin.url + '/headers', 'User-Agent:custom',
|
||||||
env=self.env())
|
env=self.env())
|
||||||
@ -176,7 +177,7 @@ class TestSession(SessionTestBase):
|
|||||||
assert r2.json['headers']['User-Agent'] == 'custom'
|
assert r2.json['headers']['User-Agent'] == 'custom'
|
||||||
|
|
||||||
def test_download_in_session(self, httpbin):
|
def test_download_in_session(self, httpbin):
|
||||||
# https://github.com/jkbrzt/httpie/issues/412
|
# https://github.com/jakubroztocil/httpie/issues/412
|
||||||
self.start_session(httpbin)
|
self.start_session(httpbin)
|
||||||
cwd = os.getcwd()
|
cwd = os.getcwd()
|
||||||
os.chdir(gettempdir())
|
os.chdir(gettempdir())
|
||||||
|
@ -2,17 +2,31 @@ import os
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_httpbin.certs
|
import pytest_httpbin.certs
|
||||||
from requests.exceptions import SSLError
|
import requests.exceptions
|
||||||
|
|
||||||
from httpie import ExitStatus
|
from httpie.status import ExitStatus
|
||||||
from httpie.input import SSL_VERSION_ARG_MAPPING
|
from httpie.cli.constants import SSL_VERSION_ARG_MAPPING
|
||||||
from utils import http, HTTP_OK, TESTS_ROOT
|
from utils import HTTP_OK, TESTS_ROOT, http
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Handle OpenSSL errors, if installed.
|
||||||
|
# See <https://github.com/jakubroztocil/httpie/issues/729>
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
import OpenSSL.SSL
|
||||||
|
ssl_errors = (
|
||||||
|
requests.exceptions.SSLError,
|
||||||
|
OpenSSL.SSL.Error,
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
ssl_errors = (
|
||||||
|
requests.exceptions.SSLError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
CLIENT_CERT = os.path.join(TESTS_ROOT, 'client_certs', 'client.crt')
|
CLIENT_CERT = os.path.join(TESTS_ROOT, 'client_certs', 'client.crt')
|
||||||
CLIENT_KEY = os.path.join(TESTS_ROOT, 'client_certs', 'client.key')
|
CLIENT_KEY = os.path.join(TESTS_ROOT, 'client_certs', 'client.key')
|
||||||
CLIENT_PEM = os.path.join(TESTS_ROOT, 'client_certs', 'client.pem')
|
CLIENT_PEM = os.path.join(TESTS_ROOT, 'client_certs', 'client.pem')
|
||||||
|
|
||||||
# FIXME:
|
# FIXME:
|
||||||
# We test against a local httpbin instance which uses a self-signed cert.
|
# We test against a local httpbin instance which uses a self-signed cert.
|
||||||
# Requests without --verify=<CA_BUNDLE> will fail with a verification error.
|
# Requests without --verify=<CA_BUNDLE> will fail with a verification error.
|
||||||
@ -28,10 +42,10 @@ def test_ssl_version(httpbin_secure, ssl_version):
|
|||||||
httpbin_secure + '/get'
|
httpbin_secure + '/get'
|
||||||
)
|
)
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
except SSLError as e:
|
except ssl_errors as e:
|
||||||
if ssl_version == 'ssl3':
|
if ssl_version == 'ssl3':
|
||||||
# pytest-httpbin doesn't support ssl3
|
# pytest-httpbin doesn't support ssl3
|
||||||
assert 'SSLV3_ALERT_HANDSHAKE_FAILURE' in str(e)
|
pass
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -52,17 +66,17 @@ class TestClientCert:
|
|||||||
def test_cert_file_not_found(self, httpbin_secure):
|
def test_cert_file_not_found(self, httpbin_secure):
|
||||||
r = http(httpbin_secure + '/get',
|
r = http(httpbin_secure + '/get',
|
||||||
'--cert', '/__not_found__',
|
'--cert', '/__not_found__',
|
||||||
error_exit_ok=True)
|
tolerate_error_exit_status=True)
|
||||||
assert r.exit_status == ExitStatus.ERROR
|
assert r.exit_status == ExitStatus.ERROR
|
||||||
assert 'No such file or directory' in r.stderr
|
assert 'No such file or directory' in r.stderr
|
||||||
|
|
||||||
def test_cert_file_invalid(self, httpbin_secure):
|
def test_cert_file_invalid(self, httpbin_secure):
|
||||||
with pytest.raises(SSLError):
|
with pytest.raises(ssl_errors):
|
||||||
http(httpbin_secure + '/get',
|
http(httpbin_secure + '/get',
|
||||||
'--cert', __file__)
|
'--cert', __file__)
|
||||||
|
|
||||||
def test_cert_ok_but_missing_key(self, httpbin_secure):
|
def test_cert_ok_but_missing_key(self, httpbin_secure):
|
||||||
with pytest.raises(SSLError):
|
with pytest.raises(ssl_errors):
|
||||||
http(httpbin_secure + '/get',
|
http(httpbin_secure + '/get',
|
||||||
'--cert', CLIENT_CERT)
|
'--cert', CLIENT_CERT)
|
||||||
|
|
||||||
@ -73,21 +87,29 @@ class TestServerCert:
|
|||||||
r = http(httpbin_secure.url + '/get', '--verify=no')
|
r = http(httpbin_secure.url + '/get', '--verify=no')
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('verify_value', ['false', 'fALse'])
|
||||||
|
def test_verify_false_OK(self, httpbin_secure, verify_value):
|
||||||
|
r = http(httpbin_secure.url + '/get', '--verify', verify_value)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
def test_verify_custom_ca_bundle_path(
|
def test_verify_custom_ca_bundle_path(
|
||||||
self, httpbin_secure_untrusted):
|
self, httpbin_secure_untrusted
|
||||||
|
):
|
||||||
r = http(httpbin_secure_untrusted + '/get', '--verify', CA_BUNDLE)
|
r = http(httpbin_secure_untrusted + '/get', '--verify', CA_BUNDLE)
|
||||||
assert HTTP_OK in r
|
assert HTTP_OK in r
|
||||||
|
|
||||||
def test_self_signed_server_cert_by_default_raises_ssl_error(
|
def test_self_signed_server_cert_by_default_raises_ssl_error(
|
||||||
self,
|
self,
|
||||||
httpbin_secure_untrusted):
|
httpbin_secure_untrusted
|
||||||
with pytest.raises(SSLError):
|
):
|
||||||
|
with pytest.raises(ssl_errors):
|
||||||
http(httpbin_secure_untrusted.url + '/get')
|
http(httpbin_secure_untrusted.url + '/get')
|
||||||
|
|
||||||
def test_verify_custom_ca_bundle_invalid_path(self, httpbin_secure):
|
def test_verify_custom_ca_bundle_invalid_path(self, httpbin_secure):
|
||||||
with pytest.raises(SSLError):
|
# since 2.14.0 requests raises IOError
|
||||||
|
with pytest.raises(ssl_errors + (IOError,)):
|
||||||
http(httpbin_secure.url + '/get', '--verify', '/__not_found__')
|
http(httpbin_secure.url + '/get', '--verify', '/__not_found__')
|
||||||
|
|
||||||
def test_verify_custom_ca_bundle_invalid_bundle(self, httpbin_secure):
|
def test_verify_custom_ca_bundle_invalid_bundle(self, httpbin_secure):
|
||||||
with pytest.raises(SSLError):
|
with pytest.raises(ssl_errors):
|
||||||
http(httpbin_secure.url + '/get', '--verify', __file__)
|
http(httpbin_secure.url + '/get', '--verify', __file__)
|
||||||
|
@ -2,7 +2,7 @@ import pytest
|
|||||||
|
|
||||||
from httpie.compat import is_windows
|
from httpie.compat import is_windows
|
||||||
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
||||||
from utils import http, TestEnvironment
|
from utils import http, MockEnvironment
|
||||||
from fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
from fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
||||||
|
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ from fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
|||||||
def test_pretty_redirected_stream(httpbin):
|
def test_pretty_redirected_stream(httpbin):
|
||||||
"""Test that --stream works with prettified redirected output."""
|
"""Test that --stream works with prettified redirected output."""
|
||||||
with open(BIN_FILE_PATH, 'rb') as f:
|
with open(BIN_FILE_PATH, 'rb') as f:
|
||||||
env = TestEnvironment(colors=256, stdin=f,
|
env = MockEnvironment(colors=256, stdin=f,
|
||||||
stdin_isatty=False,
|
stdin_isatty=False,
|
||||||
stdout_isatty=False)
|
stdout_isatty=False)
|
||||||
r = http('--verbose', '--pretty=all', '--stream', 'GET',
|
r = http('--verbose', '--pretty=all', '--stream', 'GET',
|
||||||
@ -26,7 +26,7 @@ def test_encoded_stream(httpbin):
|
|||||||
"""Test that --stream works with non-prettified
|
"""Test that --stream works with non-prettified
|
||||||
redirected terminal output."""
|
redirected terminal output."""
|
||||||
with open(BIN_FILE_PATH, 'rb') as f:
|
with open(BIN_FILE_PATH, 'rb') as f:
|
||||||
env = TestEnvironment(stdin=f, stdin_isatty=False)
|
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||||
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||||
httpbin.url + '/get', env=env)
|
httpbin.url + '/get', env=env)
|
||||||
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||||
@ -36,7 +36,7 @@ def test_redirected_stream(httpbin):
|
|||||||
"""Test that --stream works with non-prettified
|
"""Test that --stream works with non-prettified
|
||||||
redirected terminal output."""
|
redirected terminal output."""
|
||||||
with open(BIN_FILE_PATH, 'rb') as f:
|
with open(BIN_FILE_PATH, 'rb') as f:
|
||||||
env = TestEnvironment(stdout_isatty=False,
|
env = MockEnvironment(stdout_isatty=False,
|
||||||
stdin_isatty=False,
|
stdin_isatty=False,
|
||||||
stdin=f)
|
stdin=f)
|
||||||
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||||
|
@ -2,8 +2,8 @@ import os
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from httpie.input import ParseError
|
from httpie.cli.exceptions import ParseError
|
||||||
from utils import TestEnvironment, http, HTTP_OK
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
from fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT
|
from fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT
|
||||||
|
|
||||||
|
|
||||||
@ -62,14 +62,19 @@ class TestRequestBodyFromFilePath:
|
|||||||
|
|
||||||
def test_request_body_from_file_by_path_no_field_name_allowed(
|
def test_request_body_from_file_by_path_no_field_name_allowed(
|
||||||
self, httpbin):
|
self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=True)
|
env = MockEnvironment(stdin_isatty=True)
|
||||||
r = http('POST', httpbin.url + '/post', 'field-name@' + FILE_PATH_ARG,
|
r = http('POST', httpbin.url + '/post', 'field-name@' + FILE_PATH_ARG,
|
||||||
env=env, error_exit_ok=True)
|
env=env, tolerate_error_exit_status=True)
|
||||||
assert 'perhaps you meant --form?' in r.stderr
|
assert 'perhaps you meant --form?' in r.stderr
|
||||||
|
|
||||||
def test_request_body_from_file_by_path_no_data_items_allowed(
|
def test_request_body_from_file_by_path_no_data_items_allowed(
|
||||||
self, httpbin):
|
self, httpbin):
|
||||||
env = TestEnvironment(stdin_isatty=False)
|
env = MockEnvironment(stdin_isatty=False)
|
||||||
r = http('POST', httpbin.url + '/post', '@' + FILE_PATH_ARG, 'foo=bar',
|
r = http(
|
||||||
env=env, error_exit_ok=True)
|
'POST',
|
||||||
|
httpbin.url + '/post',
|
||||||
|
'@' + FILE_PATH_ARG, 'foo=bar',
|
||||||
|
env=env,
|
||||||
|
tolerate_error_exit_status=True,
|
||||||
|
)
|
||||||
assert 'cannot be mixed' in r.stderr
|
assert 'cannot be mixed' in r.stderr
|
||||||
|
@ -4,7 +4,7 @@ import tempfile
|
|||||||
import pytest
|
import pytest
|
||||||
from httpie.context import Environment
|
from httpie.context import Environment
|
||||||
|
|
||||||
from utils import TestEnvironment, http
|
from utils import MockEnvironment, http
|
||||||
from httpie.compat import is_windows
|
from httpie.compat import is_windows
|
||||||
|
|
||||||
|
|
||||||
@ -20,12 +20,12 @@ class TestWindowsOnly:
|
|||||||
|
|
||||||
class TestFakeWindows:
|
class TestFakeWindows:
|
||||||
def test_output_file_pretty_not_allowed_on_windows(self, httpbin):
|
def test_output_file_pretty_not_allowed_on_windows(self, httpbin):
|
||||||
env = TestEnvironment(is_windows=True)
|
env = MockEnvironment(is_windows=True)
|
||||||
output_file = os.path.join(
|
output_file = os.path.join(
|
||||||
tempfile.gettempdir(),
|
tempfile.gettempdir(),
|
||||||
self.test_output_file_pretty_not_allowed_on_windows.__name__
|
self.test_output_file_pretty_not_allowed_on_windows.__name__
|
||||||
)
|
)
|
||||||
r = http('--output', output_file,
|
r = http('--output', output_file,
|
||||||
'--pretty=all', 'GET', httpbin.url + '/get',
|
'--pretty=all', 'GET', httpbin.url + '/get',
|
||||||
env=env, error_exit_ok=True)
|
env=env, tolerate_error_exit_status=True)
|
||||||
assert 'Only terminal output can be colorized on Windows' in r.stderr
|
assert 'Only terminal output can be colorized on Windows' in r.stderr
|
||||||
|
@ -5,17 +5,20 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
from httpie import ExitStatus, EXIT_STATUS_LABELS
|
from httpie.status import ExitStatus
|
||||||
|
from httpie.config import Config
|
||||||
from httpie.context import Environment
|
from httpie.context import Environment
|
||||||
from httpie.core import main
|
from httpie.core import main
|
||||||
from httpie.compat import bytes, str
|
|
||||||
|
|
||||||
|
|
||||||
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
|
TESTS_ROOT = Path(__file__).parent
|
||||||
CRLF = '\r\n'
|
CRLF = '\r\n'
|
||||||
COLOR = '\x1b['
|
COLOR = '\x1b['
|
||||||
HTTP_OK = '200 OK'
|
HTTP_OK = '200 OK'
|
||||||
|
# noinspection GrazieInspection
|
||||||
HTTP_OK_COLOR = (
|
HTTP_OK_COLOR = (
|
||||||
'HTTP\x1b[39m\x1b[38;5;245m/\x1b[39m\x1b'
|
'HTTP\x1b[39m\x1b[38;5;245m/\x1b[39m\x1b'
|
||||||
'[38;5;37m1.1\x1b[39m\x1b[38;5;245m \x1b[39m\x1b[38;5;37m200'
|
'[38;5;37m1.1\x1b[39m\x1b[38;5;245m \x1b[39m\x1b[38;5;37m200'
|
||||||
@ -23,9 +26,9 @@ HTTP_OK_COLOR = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def mk_config_dir():
|
def mk_config_dir() -> Path:
|
||||||
dirname = tempfile.mkdtemp(prefix='httpie_config_')
|
dirname = tempfile.mkdtemp(prefix='httpie_config_')
|
||||||
return dirname
|
return Path(dirname)
|
||||||
|
|
||||||
|
|
||||||
def add_auth(url, auth):
|
def add_auth(url, auth):
|
||||||
@ -33,14 +36,14 @@ def add_auth(url, auth):
|
|||||||
return proto + '://' + auth + '@' + rest
|
return proto + '://' + auth + '@' + rest
|
||||||
|
|
||||||
|
|
||||||
class TestEnvironment(Environment):
|
class MockEnvironment(Environment):
|
||||||
"""Environment subclass with reasonable defaults for testing."""
|
"""Environment subclass with reasonable defaults for testing."""
|
||||||
colors = 0
|
colors = 0
|
||||||
stdin_isatty = True,
|
stdin_isatty = True,
|
||||||
stdout_isatty = True
|
stdout_isatty = True
|
||||||
is_windows = False
|
is_windows = False
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, create_temp_config_dir=True, **kwargs):
|
||||||
if 'stdout' not in kwargs:
|
if 'stdout' not in kwargs:
|
||||||
kwargs['stdout'] = tempfile.TemporaryFile(
|
kwargs['stdout'] = tempfile.TemporaryFile(
|
||||||
mode='w+b',
|
mode='w+b',
|
||||||
@ -51,32 +54,41 @@ class TestEnvironment(Environment):
|
|||||||
mode='w+t',
|
mode='w+t',
|
||||||
prefix='httpie_stderr'
|
prefix='httpie_stderr'
|
||||||
)
|
)
|
||||||
super(TestEnvironment, self).__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
self._create_temp_config_dir = create_temp_config_dir
|
||||||
self._delete_config_dir = False
|
self._delete_config_dir = False
|
||||||
|
self._temp_dir = Path(tempfile.gettempdir())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def config(self):
|
def config(self) -> Config:
|
||||||
if not self.config_dir.startswith(tempfile.gettempdir()):
|
if (self._create_temp_config_dir
|
||||||
self.config_dir = mk_config_dir()
|
and self._temp_dir not in self.config_dir.parents):
|
||||||
self._delete_config_dir = True
|
self.create_temp_config_dir()
|
||||||
return super(TestEnvironment, self).config
|
return super().config
|
||||||
|
|
||||||
|
def create_temp_config_dir(self):
|
||||||
|
self.config_dir = mk_config_dir()
|
||||||
|
self._delete_config_dir = True
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
|
self.stdout.close()
|
||||||
|
self.stderr.close()
|
||||||
if self._delete_config_dir:
|
if self._delete_config_dir:
|
||||||
assert self.config_dir.startswith(tempfile.gettempdir())
|
assert self._temp_dir in self.config_dir.parents
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
rmtree(self.config_dir)
|
rmtree(self.config_dir, ignore_errors=True)
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
|
# noinspection PyBroadException
|
||||||
try:
|
try:
|
||||||
self.cleanup()
|
self.cleanup()
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class BaseCLIResponse(object):
|
class BaseCLIResponse:
|
||||||
"""
|
"""
|
||||||
Represents the result of simulated `$ http' invocation via `http()`.
|
Represents the result of simulated `$ http' invocation via `http()`.
|
||||||
|
|
||||||
Holds and provides access to:
|
Holds and provides access to:
|
||||||
|
|
||||||
@ -85,9 +97,9 @@ class BaseCLIResponse(object):
|
|||||||
- exit_status output: print(self.exit_status)
|
- exit_status output: print(self.exit_status)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
stderr = None
|
stderr: str = None
|
||||||
json = None
|
json: dict = None
|
||||||
exit_status = None
|
exit_status: ExitStatus = None
|
||||||
|
|
||||||
|
|
||||||
class BytesCLIResponse(bytes, BaseCLIResponse):
|
class BytesCLIResponse(bytes, BaseCLIResponse):
|
||||||
@ -104,10 +116,10 @@ class BytesCLIResponse(bytes, BaseCLIResponse):
|
|||||||
class StrCLIResponse(str, BaseCLIResponse):
|
class StrCLIResponse(str, BaseCLIResponse):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def json(self):
|
def json(self) -> Optional[dict]:
|
||||||
"""
|
"""
|
||||||
Return deserialized JSON body, if one included in the output
|
Return deserialized the request or response JSON body,
|
||||||
and is parseable.
|
if one (and only one) included in the output and is parsable.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_json'):
|
if not hasattr(self, '_json'):
|
||||||
@ -119,8 +131,8 @@ class StrCLIResponse(str, BaseCLIResponse):
|
|||||||
elif self.strip().startswith('{'):
|
elif self.strip().startswith('{'):
|
||||||
# Looks like JSON body.
|
# Looks like JSON body.
|
||||||
self._json = json.loads(self)
|
self._json = json.loads(self)
|
||||||
elif (self.count('Content-Type:') == 1 and
|
elif (self.count('Content-Type:') == 1
|
||||||
'application/json' in self):
|
and 'application/json' in self):
|
||||||
# Looks like a whole JSON HTTP message,
|
# Looks like a whole JSON HTTP message,
|
||||||
# try to extract its body.
|
# try to extract its body.
|
||||||
try:
|
try:
|
||||||
@ -129,6 +141,7 @@ class StrCLIResponse(str, BaseCLIResponse):
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
# noinspection PyAttributeOutsideInit
|
||||||
self._json = json.loads(j)
|
self._json = json.loads(j)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
@ -139,7 +152,12 @@ class ExitStatusError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def http(*args, **kwargs):
|
def http(
|
||||||
|
*args,
|
||||||
|
program_name='http',
|
||||||
|
tolerate_error_exit_status=False,
|
||||||
|
**kwargs,
|
||||||
|
) -> Union[StrCLIResponse, BytesCLIResponse]:
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
"""
|
"""
|
||||||
Run HTTPie and capture stderr/out and exit status.
|
Run HTTPie and capture stderr/out and exit status.
|
||||||
@ -159,7 +177,7 @@ def http(*args, **kwargs):
|
|||||||
|
|
||||||
Exceptions are propagated.
|
Exceptions are propagated.
|
||||||
|
|
||||||
If you pass ``error_exit_ok=True``, then error exit statuses
|
If you pass ``tolerate_error_exit_status=True``, then error exit statuses
|
||||||
won't result into an exception.
|
won't result into an exception.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
@ -171,7 +189,7 @@ def http(*args, **kwargs):
|
|||||||
>>> type(r) == StrCLIResponse
|
>>> type(r) == StrCLIResponse
|
||||||
True
|
True
|
||||||
>>> r.exit_status
|
>>> r.exit_status
|
||||||
0
|
<ExitStatus.SUCCESS: 0>
|
||||||
>>> r.stderr
|
>>> r.stderr
|
||||||
''
|
''
|
||||||
>>> 'HTTP/1.1 200 OK' in r
|
>>> 'HTTP/1.1 200 OK' in r
|
||||||
@ -180,10 +198,9 @@ def http(*args, **kwargs):
|
|||||||
True
|
True
|
||||||
|
|
||||||
"""
|
"""
|
||||||
error_exit_ok = kwargs.pop('error_exit_ok', False)
|
|
||||||
env = kwargs.get('env')
|
env = kwargs.get('env')
|
||||||
if not env:
|
if not env:
|
||||||
env = kwargs['env'] = TestEnvironment()
|
env = kwargs['env'] = MockEnvironment()
|
||||||
|
|
||||||
stdout = env.stdout
|
stdout = env.stdout
|
||||||
stderr = env.stderr
|
stderr = env.stderr
|
||||||
@ -192,11 +209,13 @@ def http(*args, **kwargs):
|
|||||||
args_with_config_defaults = args + env.config.default_options
|
args_with_config_defaults = args + env.config.default_options
|
||||||
add_to_args = []
|
add_to_args = []
|
||||||
if '--debug' not in args_with_config_defaults:
|
if '--debug' not in args_with_config_defaults:
|
||||||
if '--traceback' not in args_with_config_defaults:
|
if (not tolerate_error_exit_status
|
||||||
|
and '--traceback' not in args_with_config_defaults):
|
||||||
add_to_args.append('--traceback')
|
add_to_args.append('--traceback')
|
||||||
if not any('--timeout' in arg for arg in args_with_config_defaults):
|
if not any('--timeout' in arg for arg in args_with_config_defaults):
|
||||||
add_to_args.append('--timeout=3')
|
add_to_args.append('--timeout=3')
|
||||||
args = add_to_args + args
|
|
||||||
|
complete_args = [program_name, *add_to_args, *args]
|
||||||
|
|
||||||
def dump_stderr():
|
def dump_stderr():
|
||||||
stderr.seek(0)
|
stderr.seek(0)
|
||||||
@ -204,12 +223,12 @@ def http(*args, **kwargs):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
exit_status = main(args=args, **kwargs)
|
exit_status = main(args=complete_args, **kwargs)
|
||||||
if '--download' in args:
|
if '--download' in args:
|
||||||
# Let the progress reporter thread finish.
|
# Let the progress reporter thread finish.
|
||||||
time.sleep(.5)
|
time.sleep(.5)
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
if error_exit_ok:
|
if tolerate_error_exit_status:
|
||||||
exit_status = ExitStatus.ERROR
|
exit_status = ExitStatus.ERROR
|
||||||
else:
|
else:
|
||||||
dump_stderr()
|
dump_stderr()
|
||||||
@ -219,14 +238,12 @@ def http(*args, **kwargs):
|
|||||||
sys.stderr.write(stderr.read())
|
sys.stderr.write(stderr.read())
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
if not error_exit_ok and exit_status != ExitStatus.OK:
|
if (not tolerate_error_exit_status
|
||||||
|
and exit_status != ExitStatus.SUCCESS):
|
||||||
dump_stderr()
|
dump_stderr()
|
||||||
raise ExitStatusError(
|
raise ExitStatusError(
|
||||||
'httpie.core.main() unexpectedly returned'
|
'httpie.core.main() unexpectedly returned'
|
||||||
' a non-zero exit status: {0} ({1})'.format(
|
f' a non-zero exit status: {exit_status}'
|
||||||
exit_status,
|
|
||||||
EXIT_STATUS_LABELS[exit_status]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
stdout.seek(0)
|
stdout.seek(0)
|
||||||
@ -235,15 +252,13 @@ def http(*args, **kwargs):
|
|||||||
try:
|
try:
|
||||||
output = output.decode('utf8')
|
output = output.decode('utf8')
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# noinspection PyArgumentList
|
|
||||||
r = BytesCLIResponse(output)
|
r = BytesCLIResponse(output)
|
||||||
else:
|
else:
|
||||||
# noinspection PyArgumentList
|
|
||||||
r = StrCLIResponse(output)
|
r = StrCLIResponse(output)
|
||||||
r.stderr = stderr.read()
|
r.stderr = stderr.read()
|
||||||
r.exit_status = exit_status
|
r.exit_status = exit_status
|
||||||
|
|
||||||
if r.exit_status != ExitStatus.OK:
|
if r.exit_status != ExitStatus.SUCCESS:
|
||||||
sys.stderr.write(r.stderr)
|
sys.stderr.write(r.stderr)
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
Reference in New Issue
Block a user