mirror of
https://github.com/httpie/cli.git
synced 2025-08-11 10:33:54 +02:00
Compare commits
592 Commits
Author | SHA1 | Date | |
---|---|---|---|
0eba037037 | |||
3898129e9c | |||
b88e88d2e3 | |||
d1407baf76 | |||
d5032ca859 | |||
f6a19cf552 | |||
74979f3b33 | |||
698eb51e60 | |||
ae8030c930 | |||
2e96d7ffbb | |||
b5625e3d75 | |||
932d3224f4 | |||
b596fedf13 | |||
96444f3345 | |||
89b66f1608 | |||
a7d570916d | |||
ab5a50cee8 | |||
91961c6b51 | |||
256ea7d49d | |||
2cd6ea3050 | |||
37dddf5bf7 | |||
e508c631f2 | |||
55530c8c6d | |||
eb929cbc04 | |||
2490bb25ca | |||
2038fa02e3 | |||
59d51ad513 | |||
61568f1def | |||
f93f4fa7c7 | |||
bf73b5701e | |||
7917f1b40c | |||
a50660cc70 | |||
749b1e2aca | |||
137889a267 | |||
c9c6f0fae5 | |||
6fd1ea0e5a | |||
8f7676a2a9 | |||
87e661c5f1 | |||
8ca333dda0 | |||
0f4dce98c7 | |||
05547224ce | |||
6301fee3d2 | |||
a803e845a5 | |||
11be041e06 | |||
7f5fd130c5 | |||
ec899d70b7 | |||
4d3b4fa0be | |||
27c557e983 | |||
7f24f7d34c | |||
4b61108005 | |||
8b189725fd | |||
1719ebded6 | |||
c5d6a4ad8e | |||
91e1fe2d0f | |||
ca7f41de53 | |||
46e24dd6b5 | |||
803127e8c9 | |||
4c138959ea | |||
91a28973bd | |||
02b28093a8 | |||
d64e7d8a6a | |||
8841b8bf46 | |||
6472ca55e1 | |||
37c3307018 | |||
0aab796960 | |||
95c33e31a2 | |||
9af833da30 | |||
dfe6245cd6 | |||
555761f3cb | |||
643735ef23 | |||
7a45f14542 | |||
e993f83355 | |||
d726a4cd92 | |||
8d3f09497b | |||
31c78c2885 | |||
9776a6dea0 | |||
f1d4861fae | |||
d99e1ff492 | |||
a196d1d451 | |||
02209c2db1 | |||
9886f01f91 | |||
a4f796fe69 | |||
c948f98b05 | |||
b0fde07cfd | |||
f74670fac1 | |||
7321b9fa4e | |||
cf8d5eb3e8 | |||
64af72eb88 | |||
de38f86730 | |||
244ad15c92 | |||
586f45e634 | |||
b1b4743663 | |||
5600b4a2d3 | |||
9261167a1f | |||
519654e21b | |||
4840499a43 | |||
ee6cdf4ab3 | |||
98003f545d | |||
0046ed73c6 | |||
66a6475064 | |||
97804802c0 | |||
c9296a9a45 | |||
64a41c2601 | |||
0af6ae1be4 | |||
d0fc10cf1a | |||
fe1d0b0a1e | |||
f133dbf22c | |||
9d93b07a9d | |||
761cdbf8be | |||
3a3aecca45 | |||
fb3a26586a | |||
cc9083f541 | |||
9ae86f3b4f | |||
3a6fd074a1 | |||
da59381b0b | |||
6de2d6c2cb | |||
b9b033ed0c | |||
64d6363565 | |||
923b7acbe6 | |||
2efc0db8d4 | |||
2bf71af286 | |||
0b84180485 | |||
5a1bd4ba83 | |||
3f7ed35238 | |||
47fd392c74 | |||
54a63a810e | |||
a49774d3ab | |||
b879d38b07 | |||
0913e8b2ef | |||
4fef4b9a75 | |||
bfc23b1412 | |||
6267f21f21 | |||
e9aba543b1 | |||
9b23a4ac9a | |||
b96eba336d | |||
48a6d234cb | |||
c6f2b32e36 | |||
64f6f69037 | |||
6bdfc7a071 | |||
497a91711a | |||
f515ef72d0 | |||
22a2fddc79 | |||
1847eaa299 | |||
e387c1d43e | |||
fc6d89913f | |||
d584686744 | |||
b565be4318 | |||
87e44ae639 | |||
0d08732397 | |||
c53a778f60 | |||
5efc9010cc | |||
08e883fcfe | |||
c4b309164f | |||
8e96238323 | |||
8a9206eceb | |||
8ac3c5961c | |||
487c7a9221 | |||
6d65668355 | |||
3e5115e4a2 | |||
2b8b572f22 | |||
af737fd338 | |||
ee375b6942 | |||
6b06d92a59 | |||
becb63de9a | |||
86c8abc485 | |||
8f6bee9196 | |||
9c2c058ae5 | |||
6238b59e72 | |||
702c21aa91 | |||
aab5cd9da0 | |||
8c0f0b578c | |||
bb4881a873 | |||
3a1726b4ed | |||
e1fa57d228 | |||
bfc64bce21 | |||
595dc51b2d | |||
83fa772247 | |||
49a0fb6e0f | |||
41e822ca2f | |||
1124d68946 | |||
c3735d0422 | |||
364b91cbc4 | |||
c8e06b55e1 | |||
5acbc904b7 | |||
0c7c248dce | |||
caf60cbc65 | |||
2b0e642842 | |||
e25948f6a0 | |||
b565b4628e | |||
65081b2f12 | |||
963b2746f5 | |||
098257c0be | |||
30eb0c2f26 | |||
9fbe745987 | |||
01a546eedd | |||
eba6b63c55 | |||
ec245a1e80 | |||
33eb9acd92 | |||
293295cad6 | |||
557911b606 | |||
5300b0b490 | |||
001bda1945 | |||
7c68d87c10 | |||
35a99fe04b | |||
76e15b227c | |||
8881ebf033 | |||
25d1e8e418 | |||
7ce6eb148e | |||
6e1dbadff9 | |||
a6ebc44a48 | |||
5e03aeceb7 | |||
13ee9389aa | |||
bb49a1f979 | |||
4e574e6b8e | |||
529981af7a | |||
6731cb881a | |||
f7d1b739e2 | |||
5bdf4a3bae | |||
2d9414d34c | |||
20823c1702 | |||
5dbd104c3b | |||
13a979ad11 | |||
4cfa143bfe | |||
d24f30d0af | |||
66e168b2af | |||
564670566c | |||
ecbbad816a | |||
0432694661 | |||
dc4da527db | |||
38e8ef14ec | |||
c73dcaf63d | |||
fb85509e91 | |||
a2dca1e3bb | |||
c2dae62af0 | |||
ae7008ee96 | |||
f6824f7ade | |||
7fd46e0b0d | |||
d4067fcb6d | |||
20f01709ea | |||
56afd1adb9 | |||
5e87a2d7e5 | |||
d30e28c2c7 | |||
0d2d24eac7 | |||
e2751e5fa3 | |||
2a25d71aa4 | |||
01ca7f0eb2 | |||
4f8d6c013b | |||
e83e554ffb | |||
345f5a02a2 | |||
f96f0ef9ed | |||
74e4d0b678 | |||
0fc1f61f3d | |||
c50413a9c1 | |||
9f8c452e7e | |||
776328c818 | |||
9312fabc01 | |||
48ce934dfa | |||
3625bb6fa1 | |||
a97f0d52f6 | |||
41b0286f37 | |||
fee54b04d8 | |||
73e0455896 | |||
3b217daddc | |||
e5e5d0ce6d | |||
f43e473de1 | |||
0a002ec554 | |||
576ee83d82 | |||
e42f7b8fc9 | |||
b44e16ed0f | |||
ed08ab133e | |||
5408fb0fb9 | |||
e18b609ef7 | |||
356e043651 | |||
c6d4f6cdf6 | |||
dc1371d4d6 | |||
e2235e56dc | |||
763935b77f | |||
6435532f72 | |||
11a37067e7 | |||
25f0156502 | |||
0f8d04b4df | |||
e385ed6a99 | |||
01fdab55e9 | |||
1127557742 | |||
5898879395 | |||
8c33e5e3d3 | |||
10da7b63a3 | |||
df193a373f | |||
c2f8c36952 | |||
56f498c153 | |||
59e22b16b8 | |||
d32d6f29a9 | |||
274dddfb45 | |||
deb7b747cc | |||
018e1f68de | |||
ac69d4311b | |||
c75c4fa2a6 | |||
a6a79e92e4 | |||
ea76542150 | |||
c6690e0182 | |||
c82c9f0ae4 | |||
84b81c00ea | |||
34c6958dc8 | |||
4722076335 | |||
f14a0ad37d | |||
4cadc1d4c0 | |||
c3e5456aba | |||
33489c9a91 | |||
4e2b6b0ccc | |||
b034c8703a | |||
ab3d2656af | |||
c42bd0051a | |||
288cb4fdeb | |||
8771d759fe | |||
2cdca36960 | |||
8dc4f04fda | |||
dadc0cd27c | |||
59fd42244a | |||
6afe9c32c4 | |||
cc0ba03290 | |||
fad84a962e | |||
4f755a8bde | |||
21ee981fc6 | |||
6259b5dd3b | |||
45df860124 | |||
277da1ff93 | |||
1ded5c2a97 | |||
69bd72ce95 | |||
8bf6db471b | |||
b1cc069fce | |||
ed484c278b | |||
aec0f04f5d | |||
8eb460a6f3 | |||
5fe5958b06 | |||
0e1c17daa1 | |||
307517e7ef | |||
d60a04da2d | |||
9ea89ffefe | |||
bebeb2100d | |||
2b51cb6687 | |||
fa4bd033ef | |||
f8c1104429 | |||
be9d9281b7 | |||
ced0838598 | |||
d8b819b03f | |||
6fd0f23f39 | |||
483546d781 | |||
daf3573908 | |||
62407f781f | |||
cbbaac13ea | |||
6aad79d71c | |||
c1f26347fc | |||
29a0147dd5 | |||
ab0d1fd8d0 | |||
35a3dd2855 | |||
ece85c0f0c | |||
798cd4f0ec | |||
1a43c0e5f7 | |||
fdabbc6048 | |||
5f3de558cb | |||
fdae686e12 | |||
1c181a5d25 | |||
a228399801 | |||
bada3b45f1 | |||
e4bc363f9e | |||
24957e3b61 | |||
fb437591da | |||
b7fc89acdc | |||
2e88aa53cf | |||
9e62151bec | |||
ecc59591f1 | |||
f855de16c2 | |||
7f8adad313 | |||
51c19cfe10 | |||
dd7f1c4cce | |||
45784c7260 | |||
868baaba4e | |||
5760b780a0 | |||
2e5d14238f | |||
3b3eff01b7 | |||
42f454eb6b | |||
40d95b650c | |||
bc0d17c04c | |||
985f65ef52 | |||
dd0a4ab87a | |||
07aaefa232 | |||
419ca85e62 | |||
596fdc8c7e | |||
6e7e2f2eea | |||
748794257c | |||
55fa975ae5 | |||
e6e94398ae | |||
fbd44640e6 | |||
43915b5fc0 | |||
f1e1299104 | |||
86ebb9b741 | |||
873102d5eb | |||
337c05f95c | |||
a786f17997 | |||
753a8d04e4 | |||
3ff03524ff | |||
a5a83c5b77 | |||
9682f955b5 | |||
0d21ff022e | |||
996e314482 | |||
687a6a734d | |||
b125ce5eae | |||
92a4352f10 | |||
c0f1fb61ac | |||
17358be1ae | |||
338d39c841 | |||
530d6c5e27 | |||
6c66d91f59 | |||
ed6485498b | |||
59b6020105 | |||
12f2d99bfd | |||
5fbafc18bc | |||
df07927843 | |||
d3d78afb6a | |||
25b1be7c8a | |||
22c993bab8 | |||
b2ec4f797f | |||
a2b12f75ea | |||
0481957715 | |||
c301305a59 | |||
2078ece95a | |||
43f7b84a1e | |||
f1cd289d51 | |||
24f46ff3ef | |||
afe521ef73 | |||
58b51a8277 | |||
6aa711c69f | |||
d2d1023921 | |||
b0effe07d9 | |||
af873effb6 | |||
5084f18568 | |||
1035710956 | |||
5d2b3f5552 | |||
ca36f1de04 | |||
0f96348fd1 | |||
2fd84ec1da | |||
e3c83fca6f | |||
529f3bd9b6 | |||
2a72ae23d5 | |||
79329ed1c6 | |||
040d981f00 | |||
8c892edd4f | |||
a02a1eb562 | |||
5e556612d9 | |||
f5904d92c3 | |||
541c75ed5c | |||
8e170b059c | |||
b44bc0928f | |||
f283de6968 | |||
77955c9837 | |||
4449da456a | |||
f9b5b3a65d | |||
10f7fc163b | |||
5743363ac9 | |||
7036ec69ff | |||
02c66e14df | |||
ea8132b3d6 | |||
e4c68063b9 | |||
9c2207844e | |||
b51775bb06 | |||
f26272f83f | |||
81518f9315 | |||
858555abb5 | |||
3e1b62fb20 | |||
d9eca19b8f | |||
5a989b6075 | |||
29a564ef56 | |||
2aa53e4be3 | |||
faec00fd99 | |||
76ab8b84be | |||
14763e619d | |||
0e6875bf83 | |||
bd50a6adb1 | |||
f67a11c165 | |||
64b9a86c52 | |||
c8ae697eec | |||
82e16c4f27 | |||
05db75bdb1 | |||
c06598a0c4 | |||
18f3700b77 | |||
d05063f019 | |||
7c3f8c021e | |||
a95d8bb42d | |||
411822d3b2 | |||
bae8519e29 | |||
87806acc56 | |||
1169a3eb23 | |||
43bc6d0c98 | |||
eca1ffaedb | |||
0bd218eab0 | |||
609950f327 | |||
bbc820bf2e | |||
84a521a827 | |||
a3352af1d4 | |||
e8a1c051f9 | |||
3478cbd9ff | |||
77dcd6e919 | |||
467d126b6c | |||
8ec32fe7f3 | |||
282cc455e3 | |||
56d33a8e51 | |||
15e62ad26d | |||
5c29a4e551 | |||
0c45c7cb39 | |||
8158fa8c45 | |||
5065c4f878 | |||
e3af74da46 | |||
5c3d24ec09 | |||
091a8b2692 | |||
95a0884f95 | |||
8fb1e106ee | |||
78c83da721 | |||
aeccac5cbd | |||
e2dabbfaf7 | |||
272e66bf37 | |||
4a0d387f86 | |||
6a86164510 | |||
e1348da118 | |||
0e1b651a1c | |||
631e332dad | |||
33422312c5 | |||
1d987c5b4d | |||
3c2de34285 | |||
b10d973019 | |||
492ee392bd | |||
af4aa3a761 | |||
27faf06327 | |||
f658d24c93 | |||
ea42d32f69 | |||
3f63133b7c | |||
3f8a000847 | |||
f02169ea71 | |||
e5d758e4ce | |||
ce2169f4fe | |||
bdea7be456 | |||
887f70f595 | |||
3d079942f4 | |||
3cb124bba7 | |||
6f28624134 | |||
941c0a8c3c | |||
b880e996d0 | |||
6071fff4af | |||
746a1899f3 | |||
bbbae3ae25 | |||
e62620d4ad | |||
a2918d877d | |||
733771fd9e | |||
76ab6e49d5 | |||
c33775e785 | |||
09810d55ba | |||
29877bc8ad | |||
af6bda11af | |||
b01906a45c | |||
2c885b0981 | |||
b3a34aba44 | |||
dd7197c60b | |||
a3aae12d9c | |||
d4363a560d | |||
b9d7220b10 | |||
14583a2efa | |||
43cc3e7ddb | |||
f1224da526 | |||
e0cc63c7eb | |||
52dd6adaa3 | |||
1aa77017d5 | |||
748a0a480d | |||
01df344a07 | |||
b1074ccb4f | |||
7a84163d1c | |||
a31d552d1c | |||
5a037b2e13 | |||
6af42b1827 | |||
bee10e5eed | |||
bcdf194bae | |||
0e267d8efa | |||
927acc283e | |||
817165f5ff | |||
4fe3deb9d9 | |||
9034546b80 | |||
2c12fd99f9 | |||
70eb97dece | |||
8a52bef559 | |||
711168a899 | |||
81c99886fd | |||
2e535d8345 | |||
0bcd4d2fb0 | |||
d5bc564e4f |
17
.editorconfig
Normal file
17
.editorconfig
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# http://editorconfig.org
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[*.yml]
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[Makefile]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 8
|
21
.gitignore
vendored
21
.gitignore
vendored
@ -1,10 +1,13 @@
|
|||||||
dist
|
|
||||||
httpie.egg-info
|
|
||||||
build
|
|
||||||
*.pyc
|
|
||||||
.tox
|
|
||||||
README.html
|
|
||||||
.coverage
|
|
||||||
htmlcov
|
|
||||||
.idea
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
.idea/
|
||||||
|
__pycache__/
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
*.egg-info
|
||||||
|
.cache/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
*.pyc
|
||||||
|
*.egg
|
||||||
|
htmlcov
|
||||||
|
.pytest_cache/
|
||||||
|
95
.travis.yml
95
.travis.yml
@ -1,9 +1,96 @@
|
|||||||
|
# <https://travis-ci.org/jakubroztocil/httpie>
|
||||||
|
sudo: false
|
||||||
language: python
|
language: python
|
||||||
|
os:
|
||||||
|
- linux
|
||||||
|
env:
|
||||||
|
global:
|
||||||
|
- NEWEST_PYTHON=3.7
|
||||||
python:
|
python:
|
||||||
- 2.6
|
# <https://docs.travis-ci.com/user/languages/python/>
|
||||||
|
|
||||||
- 2.7
|
- 2.7
|
||||||
|
|
||||||
|
# Python 3.4 fails installing packages
|
||||||
|
# <https://travis-ci.org/jakubroztocil/httpie/jobs/403263566#L636>
|
||||||
|
# - 3.4
|
||||||
|
|
||||||
|
- 3.5
|
||||||
|
- 3.6
|
||||||
|
# - 3.7 # is done in the matrix below as described in travis-ci/travis-ci#9069
|
||||||
- pypy
|
- pypy
|
||||||
- 3.3
|
|
||||||
script: python setup.py test
|
# pypy3 currently fails because of a Flask issue
|
||||||
|
# - pypy3
|
||||||
|
|
||||||
|
cache: pip
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
# Add manually defined OS X builds
|
||||||
|
# <https://docs.travis-ci.com/user/multi-os/#Python-example-(unsupported-languages)>
|
||||||
|
- os: osx
|
||||||
|
language: generic
|
||||||
|
env:
|
||||||
|
# Stock OSX Python
|
||||||
|
- TOXENV=py27-osx-builtin
|
||||||
|
- BREW_PYTHON_PACKAGE=
|
||||||
|
- os: osx
|
||||||
|
language: generic
|
||||||
|
env:
|
||||||
|
# Latest Python 2.7 from Homebrew
|
||||||
|
- TOXENV=py27
|
||||||
|
- BREW_PYTHON_PACKAGE=python@2
|
||||||
|
- os: osx
|
||||||
|
language: generic
|
||||||
|
env:
|
||||||
|
# Latest Python 3.x from Homebrew
|
||||||
|
- TOXENV=py37 # <= needs to be kept up-to-date to reflect latest minor version
|
||||||
|
- BREW_PYTHON_PACKAGE=python@3
|
||||||
|
# Travis Python 3.7 must run sudo on
|
||||||
|
- os: linux
|
||||||
|
python: 3.7
|
||||||
|
env: TOXENV=py37
|
||||||
|
sudo: true # Required for Python 3.7
|
||||||
|
dist: xenial # Required for Python 3.7
|
||||||
|
# Add a codestyle-only build
|
||||||
|
- os: linux
|
||||||
|
python: 3.6
|
||||||
|
env: CODESTYLE_ONLY=true
|
||||||
install:
|
install:
|
||||||
- pip install . --use-mirrors
|
- |
|
||||||
|
if [[ $TRAVIS_OS_NAME == 'osx' ]]; then
|
||||||
|
if [[ -n "$BREW_PYTHON_PACKAGE" ]]; then
|
||||||
|
brew update
|
||||||
|
if ! brew list --versions "$BREW_PYTHON_PACKAGE" >/dev/null; then
|
||||||
|
brew install "$BREW_PYTHON_PACKAGE"
|
||||||
|
elif ! brew outdated "$BREW_PYTHON_PACKAGE"; then
|
||||||
|
brew upgrade "$BREW_PYTHON_PACKAGE"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
sudo pip2 install tox
|
||||||
|
fi
|
||||||
|
script:
|
||||||
|
- |
|
||||||
|
if [[ $TRAVIS_OS_NAME == 'linux' ]]; then
|
||||||
|
if [[ $CODESTYLE_ONLY ]]; then
|
||||||
|
make pycodestyle
|
||||||
|
else
|
||||||
|
make test
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
PATH="/usr/local/bin:$PATH" tox -e "$TOXENV"
|
||||||
|
fi
|
||||||
|
after_success:
|
||||||
|
- |
|
||||||
|
if [[ $TRAVIS_PYTHON_VERSION == $NEWEST_PYTHON && $TRAVIS_OS_NAME == 'linux' ]]; then
|
||||||
|
make coveralls
|
||||||
|
fi
|
||||||
|
notifications:
|
||||||
|
webhooks:
|
||||||
|
# options: [always|never|change] default: always
|
||||||
|
on_success: always
|
||||||
|
on_failure: always
|
||||||
|
on_start: always
|
||||||
|
urls:
|
||||||
|
# https://gitter.im/jkbrzt/httpie
|
||||||
|
- https://webhooks.gitter.im/e/c42fcd359a110d02830b
|
||||||
|
11
AUTHORS.rst
11
AUTHORS.rst
@ -2,12 +2,14 @@
|
|||||||
HTTPie authors
|
HTTPie authors
|
||||||
==============
|
==============
|
||||||
|
|
||||||
* `Jakub Roztocil <https://github.com/jkbr>`_
|
* `Jakub Roztocil <https://github.com/jakubroztocil>`_
|
||||||
|
|
||||||
|
|
||||||
Patches and ideas
|
Patches and ideas
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
|
`Complete list of contributors on GitHub <https://github.com/jakubroztocil/httpie/graphs/contributors>`_
|
||||||
|
|
||||||
* `Cláudia T. Delgado <https://github.com/claudiatd>`_ (logo)
|
* `Cláudia T. Delgado <https://github.com/claudiatd>`_ (logo)
|
||||||
* `Hank Gay <https://github.com/gthank>`_
|
* `Hank Gay <https://github.com/gthank>`_
|
||||||
* `Jake Basile <https://github.com/jakebasile>`_
|
* `Jake Basile <https://github.com/jakebasile>`_
|
||||||
@ -29,3 +31,10 @@ Patches and ideas
|
|||||||
* `Davey Shafik <https://github.com/dshafik>`_
|
* `Davey Shafik <https://github.com/dshafik>`_
|
||||||
* `cido <https://github.com/cido>`_
|
* `cido <https://github.com/cido>`_
|
||||||
* `Justin Bonnar <https://github.com/jargonjustin>`_
|
* `Justin Bonnar <https://github.com/jargonjustin>`_
|
||||||
|
* `Nathan LaFreniere <https://github.com/nlf>`_
|
||||||
|
* `Matthias Lehmann <https://github.com/matleh>`_
|
||||||
|
* `Dennis Brakhane <https://github.com/brakhane>`_
|
||||||
|
* `Matt Layman <https://github.com/mblayman>`_
|
||||||
|
* `Edward Yang <https://github.com/honorabrutroll>`_
|
||||||
|
|
||||||
|
|
||||||
|
364
CHANGELOG.rst
Normal file
364
CHANGELOG.rst
Normal file
@ -0,0 +1,364 @@
|
|||||||
|
==========
|
||||||
|
Change Log
|
||||||
|
==========
|
||||||
|
|
||||||
|
This document records all notable changes to `HTTPie <http://httpie.org>`_.
|
||||||
|
This project adheres to `Semantic Versioning <http://semver.org/>`_.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.3-dev`_ (unreleased)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
* No changes yet.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.2`_ (2018-11-14)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
* Fixed tests for installation with pyOpenSSL.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.1`_ (2018-11-14)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
* Removed external URL calls from tests.
|
||||||
|
|
||||||
|
|
||||||
|
`1.0.0`_ (2018-11-02)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
* Added ``--style=auto`` which follows the terminal ANSI color styles.
|
||||||
|
* Added support for selecting TLS 1.3 via ``--ssl=tls1.3``
|
||||||
|
(available once implemented in upstream libraries).
|
||||||
|
* Added ``true``/``false`` as valid values for ``--verify``
|
||||||
|
(in addition to ``yes``/``no``) and the boolean value is case-insensitive.
|
||||||
|
* Changed the default ``--style`` from ``solarized`` to ``auto`` (on Windows it stays ``fruity``).
|
||||||
|
* Fixed default headers being incorrectly case-sensitive.
|
||||||
|
* Removed Python 2.6 support.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.9`_ (2016-12-08)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Fixed README.
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.8`_ (2016-12-08)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Extended auth plugin API.
|
||||||
|
* Added exit status code ``7`` for plugin errors.
|
||||||
|
* Added support for ``curses``-less Python installations.
|
||||||
|
* Fixed ``REQUEST_ITEM`` arg incorrectly being reported as required.
|
||||||
|
* Improved ``CTRL-C`` interrupt handling.
|
||||||
|
* Added the standard exit status code ``130`` for keyboard interrupts.
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.6`_ (2016-08-13)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added Python 3 as a dependency for Homebrew installations
|
||||||
|
to ensure some of the newer HTTP features work out of the box
|
||||||
|
for macOS users (starting with HTTPie 0.9.4.).
|
||||||
|
* Added the ability to unset a request header with ``Header:``, and send an
|
||||||
|
empty value with ``Header;``.
|
||||||
|
* Added ``--default-scheme <URL_SCHEME>`` to enable things like
|
||||||
|
``$ alias https='http --default-scheme=https``.
|
||||||
|
* Added ``-I`` as a shortcut for ``--ignore-stdin``.
|
||||||
|
* Added fish shell completion (located in ``extras/httpie-completion.fish``
|
||||||
|
in the Github repo).
|
||||||
|
* Updated ``requests`` to 2.10.0 so that SOCKS support can be added via
|
||||||
|
``pip install requests[socks]``.
|
||||||
|
* Changed the default JSON ``Accept`` header from ``application/json``
|
||||||
|
to ``application/json, */*``.
|
||||||
|
* Changed the pre-processing of request HTTP headers so that any leading
|
||||||
|
and trailing whitespace is removed.
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.4`_ (2016-07-01)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added ``Content-Type`` of files uploaded in ``multipart/form-data`` requests
|
||||||
|
* Added ``--ssl=<PROTOCOL>`` to specify the desired SSL/TLS protocol version
|
||||||
|
to use for HTTPS requests.
|
||||||
|
* Added JSON detection with ``--json, -j`` to work around incorrect
|
||||||
|
``Content-Type``
|
||||||
|
* Added ``--all`` to show intermediate responses such as redirects (with ``--follow``)
|
||||||
|
* Added ``--history-print, -P WHAT`` to specify formatting of intermediate responses
|
||||||
|
* Added ``--max-redirects=N`` (default 30)
|
||||||
|
* Added ``-A`` as short name for ``--auth-type``
|
||||||
|
* Added ``-F`` as short name for ``--follow``
|
||||||
|
* Removed the ``implicit_content_type`` config option
|
||||||
|
(use ``"default_options": ["--form"]`` instead)
|
||||||
|
* Redirected ``stdout`` doesn't trigger an error anymore when ``--output FILE``
|
||||||
|
is set
|
||||||
|
* Changed the default ``--style`` back to ``solarized`` for better support
|
||||||
|
of light and dark terminals
|
||||||
|
* Improved ``--debug`` output
|
||||||
|
* Fixed ``--session`` when used with ``--download``
|
||||||
|
* Fixed ``--download`` to trim too long filenames before saving the file
|
||||||
|
* Fixed the handling of ``Content-Type`` with multiple ``+subtype`` parts
|
||||||
|
* Removed the XML formatter as the implementation suffered from multiple issues
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.3`_ (2016-01-01)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Changed the default color ``--style`` from ``solarized`` to ``monokai``
|
||||||
|
* Added basic Bash autocomplete support (need to be installed manually)
|
||||||
|
* Added request details to connection error messages
|
||||||
|
* Fixed ``'requests.packages.urllib3' has no attribute 'disable_warnings'``
|
||||||
|
errors that occurred in some installations
|
||||||
|
* Fixed colors and formatting on Windows
|
||||||
|
* Fixed ``--auth`` prompt on Windows
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.2`_ (2015-02-24)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Fixed compatibility with Requests 2.5.1
|
||||||
|
* Changed the default JSON ``Content-Type`` to ``application/json`` as UTF-8
|
||||||
|
is the default JSON encoding
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.1`_ (2015-02-07)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added support for Requests transport adapter plugins
|
||||||
|
(see `httpie-unixsocket <https://github.com/httpie/httpie-unixsocket>`_
|
||||||
|
and `httpie-http2 <https://github.com/httpie/httpie-http2>`_)
|
||||||
|
|
||||||
|
|
||||||
|
`0.9.0`_ (2015-01-31)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added ``--cert`` and ``--cert-key`` parameters to specify a client side
|
||||||
|
certificate and private key for SSL
|
||||||
|
* Improved unicode support
|
||||||
|
* Improved terminal color depth detection via ``curses``
|
||||||
|
* To make it easier to deal with Windows paths in request items, ``\``
|
||||||
|
now only escapes special characters (the ones that are used as key-value
|
||||||
|
separators by HTTPie)
|
||||||
|
* Switched from ``unittest`` to ``pytest``
|
||||||
|
* Added Python `wheel` support
|
||||||
|
* Various test suite improvements
|
||||||
|
* Added ``CONTRIBUTING``
|
||||||
|
* Fixed ``User-Agent`` overwriting when used within a session
|
||||||
|
* Fixed handling of empty passwords in URL credentials
|
||||||
|
* Fixed multiple file uploads with the same form field name
|
||||||
|
* Fixed ``--output=/dev/null`` on Linux
|
||||||
|
* Miscellaneous bugfixes
|
||||||
|
|
||||||
|
|
||||||
|
`0.8.0`_ (2014-01-25)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added ``field=@file.txt`` and ``field:=@file.json`` for embedding
|
||||||
|
the contents of text and JSON files into request data
|
||||||
|
* Added curl-style shorthand for localhost
|
||||||
|
* Fixed request ``Host`` header value output so that it doesn't contain
|
||||||
|
credentials, if included in the URL
|
||||||
|
|
||||||
|
|
||||||
|
`0.7.1`_ (2013-09-24)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added ``--ignore-stdin``
|
||||||
|
* Added support for auth plugins
|
||||||
|
* Improved ``--help`` output
|
||||||
|
* Improved ``Content-Disposition`` parsing for ``--download`` mode
|
||||||
|
* Update to Requests 2.0.0
|
||||||
|
|
||||||
|
|
||||||
|
`0.6.0`_ (2013-06-03)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* XML data is now formatted
|
||||||
|
* ``--session`` and ``--session-read-only`` now also accept paths to
|
||||||
|
session files (eg. ``http --session=/tmp/session.json example.org``)
|
||||||
|
|
||||||
|
|
||||||
|
`0.5.1`_ (2013-05-13)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* ``Content-*`` and ``If-*`` request headers are not stored in sessions
|
||||||
|
anymore as they are request-specific
|
||||||
|
|
||||||
|
|
||||||
|
`0.5.0`_ (2013-04-27)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added a download mode via ``--download``
|
||||||
|
* Fixes miscellaneous bugs
|
||||||
|
|
||||||
|
|
||||||
|
`0.4.1`_ (2013-02-26)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Fixed ``setup.py``
|
||||||
|
|
||||||
|
|
||||||
|
`0.4.0`_ (2013-02-22)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added Python 3.3 compatibility
|
||||||
|
* Added Requests >= v1.0.4 compatibility
|
||||||
|
* Added support for credentials in URL
|
||||||
|
* Added ``--no-option`` for every ``--option`` to be config-friendly
|
||||||
|
* Mutually exclusive arguments can be specified multiple times. The
|
||||||
|
last value is used
|
||||||
|
|
||||||
|
|
||||||
|
`0.3.0`_ (2012-09-21)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Allow output redirection on Windows
|
||||||
|
* Added configuration file
|
||||||
|
* Added persistent session support
|
||||||
|
* Renamed ``--allow-redirects`` to ``--follow``
|
||||||
|
* Improved the usability of ``http --help``
|
||||||
|
* Fixed installation on Windows with Python 3
|
||||||
|
* Fixed colorized output on Windows with Python 3
|
||||||
|
* CRLF HTTP header field separation in the output
|
||||||
|
* Added exit status code ``2`` for timed-out requests
|
||||||
|
* Added the option to separate colorizing and formatting
|
||||||
|
(``--pretty=all``, ``--pretty=colors`` and ``--pretty=format``)
|
||||||
|
``--ugly`` has bee removed in favor of ``--pretty=none``
|
||||||
|
|
||||||
|
|
||||||
|
`0.2.7`_ (2012-08-07)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added compatibility with Requests 0.13.6
|
||||||
|
* Added streamed terminal output. ``--stream, -S`` can be used to enable
|
||||||
|
streaming also with ``--pretty`` and to ensure a more frequent output
|
||||||
|
flushing
|
||||||
|
* Added support for efficient large file downloads
|
||||||
|
* Sort headers by name (unless ``--pretty=none``)
|
||||||
|
* Response body is fetched only when needed (e.g., not with ``--headers``)
|
||||||
|
* Improved content type matching
|
||||||
|
* Updated Solarized color scheme
|
||||||
|
* Windows: Added ``--output FILE`` to store output into a file
|
||||||
|
(piping results in corrupted data on Windows)
|
||||||
|
* Proper handling of binary requests and responses
|
||||||
|
* Fixed printing of ``multipart/form-data`` requests
|
||||||
|
* Renamed ``--traceback`` to ``--debug``
|
||||||
|
|
||||||
|
|
||||||
|
`0.2.6`_ (2012-07-26)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* The short option for ``--headers`` is now ``-h`` (``-t`` has been
|
||||||
|
removed, for usage use ``--help``)
|
||||||
|
* Form data and URL parameters can have multiple fields with the same name
|
||||||
|
(e.g.,``http -f url a=1 a=2``)
|
||||||
|
* Added ``--check-status`` to exit with an error on HTTP 3xx, 4xx and
|
||||||
|
5xx (3, 4, and 5, respectively)
|
||||||
|
* If the output is piped to another program or redirected to a file,
|
||||||
|
the default behaviour is to only print the response body
|
||||||
|
(It can still be overwritten via the ``--print`` flag.)
|
||||||
|
* Improved highlighting of HTTP headers
|
||||||
|
* Added query string parameters (``param==value``)
|
||||||
|
* Added support for terminal colors under Windows
|
||||||
|
|
||||||
|
|
||||||
|
`0.2.5`_ (2012-07-17)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Unicode characters in prettified JSON now don't get escaped for
|
||||||
|
improved readability
|
||||||
|
* --auth now prompts for a password if only a username provided
|
||||||
|
* Added support for request payloads from a file path with automatic
|
||||||
|
``Content-Type`` (``http URL @/path``)
|
||||||
|
* Fixed missing query string when displaying the request headers via
|
||||||
|
``--verbose``
|
||||||
|
* Fixed Content-Type for requests with no data
|
||||||
|
|
||||||
|
|
||||||
|
`0.2.2`_ (2012-06-24)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* The ``METHOD`` positional argument can now be omitted (defaults to
|
||||||
|
``GET``, or to ``POST`` with data)
|
||||||
|
* Fixed --verbose --form
|
||||||
|
* Added support for Tox
|
||||||
|
|
||||||
|
|
||||||
|
`0.2.1`_ (2012-06-13)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added compatibility with ``requests-0.12.1``
|
||||||
|
* Dropped custom JSON and HTTP lexers in favor of the ones newly included
|
||||||
|
in ``pygments-1.5``
|
||||||
|
|
||||||
|
|
||||||
|
`0.2.0`_ (2012-04-25)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Added Python 3 support
|
||||||
|
* Added the ability to print the HTTP request as well as the response
|
||||||
|
(see ``--print`` and ``--verbose``)
|
||||||
|
* Added support for Digest authentication
|
||||||
|
* Added file upload support
|
||||||
|
(``http -f POST file_field_name@/path/to/file``)
|
||||||
|
* Improved syntax highlighting for JSON
|
||||||
|
* Added support for field name escaping
|
||||||
|
* Many bug fixes
|
||||||
|
|
||||||
|
|
||||||
|
`0.1.6`_ (2012-03-04)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Fixed ``setup.py``
|
||||||
|
|
||||||
|
|
||||||
|
`0.1.5`_ (2012-03-04)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Many improvements and bug fixes
|
||||||
|
|
||||||
|
|
||||||
|
`0.1.4`_ (2012-02-28)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Many improvements and bug fixes
|
||||||
|
|
||||||
|
|
||||||
|
`0.1.0`_ (2012-02-25)
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
* Initial public release
|
||||||
|
|
||||||
|
|
||||||
|
.. _`0.1.0`: https://github.com/jakubroztocil/httpie/commit/b966efa
|
||||||
|
.. _0.1.4: https://github.com/jakubroztocil/httpie/compare/b966efa...0.1.4
|
||||||
|
.. _0.1.5: https://github.com/jakubroztocil/httpie/compare/0.1.4...0.1.5
|
||||||
|
.. _0.1.6: https://github.com/jakubroztocil/httpie/compare/0.1.5...0.1.6
|
||||||
|
.. _0.2.0: https://github.com/jakubroztocil/httpie/compare/0.1.6...0.2.0
|
||||||
|
.. _0.2.1: https://github.com/jakubroztocil/httpie/compare/0.2.0...0.2.1
|
||||||
|
.. _0.2.2: https://github.com/jakubroztocil/httpie/compare/0.2.1...0.2.2
|
||||||
|
.. _0.2.5: https://github.com/jakubroztocil/httpie/compare/0.2.2...0.2.5
|
||||||
|
.. _0.2.6: https://github.com/jakubroztocil/httpie/compare/0.2.5...0.2.6
|
||||||
|
.. _0.2.7: https://github.com/jakubroztocil/httpie/compare/0.2.5...0.2.7
|
||||||
|
.. _0.3.0: https://github.com/jakubroztocil/httpie/compare/0.2.7...0.3.0
|
||||||
|
.. _0.4.0: https://github.com/jakubroztocil/httpie/compare/0.3.0...0.4.0
|
||||||
|
.. _0.4.1: https://github.com/jakubroztocil/httpie/compare/0.4.0...0.4.1
|
||||||
|
.. _0.5.0: https://github.com/jakubroztocil/httpie/compare/0.4.1...0.5.0
|
||||||
|
.. _0.5.1: https://github.com/jakubroztocil/httpie/compare/0.5.0...0.5.1
|
||||||
|
.. _0.6.0: https://github.com/jakubroztocil/httpie/compare/0.5.1...0.6.0
|
||||||
|
.. _0.7.1: https://github.com/jakubroztocil/httpie/compare/0.6.0...0.7.1
|
||||||
|
.. _0.8.0: https://github.com/jakubroztocil/httpie/compare/0.7.1...0.8.0
|
||||||
|
.. _0.9.0: https://github.com/jakubroztocil/httpie/compare/0.8.0...0.9.0
|
||||||
|
.. _0.9.1: https://github.com/jakubroztocil/httpie/compare/0.9.0...0.9.1
|
||||||
|
.. _0.9.2: https://github.com/jakubroztocil/httpie/compare/0.9.1...0.9.2
|
||||||
|
.. _0.9.3: https://github.com/jakubroztocil/httpie/compare/0.9.2...0.9.3
|
||||||
|
.. _0.9.4: https://github.com/jakubroztocil/httpie/compare/0.9.3...0.9.4
|
||||||
|
.. _0.9.6: https://github.com/jakubroztocil/httpie/compare/0.9.4...0.9.6
|
||||||
|
.. _0.9.8: https://github.com/jakubroztocil/httpie/compare/0.9.6...0.9.8
|
||||||
|
.. _0.9.9: https://github.com/jakubroztocil/httpie/compare/0.9.8...0.9.9
|
||||||
|
.. _1.0.0: https://github.com/jakubroztocil/httpie/compare/0.9.9...1.0.0
|
||||||
|
.. _1.0.1: https://github.com/jakubroztocil/httpie/compare/1.0.0...1.0.1
|
||||||
|
.. _1.0.2: https://github.com/jakubroztocil/httpie/compare/1.0.1...1.0.2
|
||||||
|
.. _1.0.3-dev: https://github.com/jakubroztocil/httpie/compare/1.0.2...master
|
117
CONTRIBUTING.rst
Normal file
117
CONTRIBUTING.rst
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
######################
|
||||||
|
Contributing to HTTPie
|
||||||
|
######################
|
||||||
|
|
||||||
|
Bug reports and code and documentation patches are welcome. You can
|
||||||
|
help this project also by using the development version of HTTPie
|
||||||
|
and by reporting any bugs you might encounter.
|
||||||
|
|
||||||
|
1. Reporting bugs
|
||||||
|
=================
|
||||||
|
|
||||||
|
**It's important that you provide the full command argument list
|
||||||
|
as well as the output of the failing command.**
|
||||||
|
Use the ``--debug`` flag and copy&paste both the command and its output
|
||||||
|
to your bug report, e.g.:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
$ http --debug [COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR]
|
||||||
|
[COMPLETE OUTPUT]
|
||||||
|
|
||||||
|
|
||||||
|
2. Contributing Code and Docs
|
||||||
|
=============================
|
||||||
|
|
||||||
|
Before working on a new feature or a bug, please browse `existing issues`_
|
||||||
|
to see whether it has been previously discussed. If the change in question
|
||||||
|
is a bigger one, it's always good to discuss before you start working on
|
||||||
|
it.
|
||||||
|
|
||||||
|
|
||||||
|
Creating Development Environment
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Go to https://github.com/jakubroztocil/httpie and fork the project repository.
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
git clone https://github.com/<YOU>/httpie
|
||||||
|
|
||||||
|
cd httpie
|
||||||
|
|
||||||
|
git checkout -b my_topical_branch
|
||||||
|
|
||||||
|
# (Recommended: create a new virtualenv)
|
||||||
|
|
||||||
|
# Install dev. requirements and also HTTPie (in editable mode
|
||||||
|
# so that the `http' command will point to your working copy):
|
||||||
|
make init
|
||||||
|
|
||||||
|
|
||||||
|
Making Changes
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Please make sure your changes conform to `Style Guide for Python Code`_ (PEP8)
|
||||||
|
and that ``make pycodestyle`` passes.
|
||||||
|
|
||||||
|
|
||||||
|
Testing
|
||||||
|
-------
|
||||||
|
|
||||||
|
Before opening a pull requests, please make sure the `test suite`_ passes
|
||||||
|
in all of the `supported Python environments`_. You should also add tests
|
||||||
|
for any new features and bug fixes.
|
||||||
|
|
||||||
|
HTTPie uses `pytest`_ and `Tox`_ for testing.
|
||||||
|
|
||||||
|
|
||||||
|
Running all tests:
|
||||||
|
******************
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
# Run all tests on the current Python interpreter with coverage
|
||||||
|
make test
|
||||||
|
|
||||||
|
# Run all tests in all of the supported and available Pythons via Tox
|
||||||
|
make test-tox
|
||||||
|
|
||||||
|
# Run all tests for code as well as packaging, etc.
|
||||||
|
make test-all
|
||||||
|
|
||||||
|
# Test PEP8 compliance
|
||||||
|
make pycodestyle
|
||||||
|
|
||||||
|
|
||||||
|
Running specific tests:
|
||||||
|
***********************
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
# Run specific tests on the current Python
|
||||||
|
py.test tests/test_uploads.py
|
||||||
|
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload
|
||||||
|
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok
|
||||||
|
|
||||||
|
# Run specific tests on the on all Pythons via Tox
|
||||||
|
# (change to `tox -e py37' to limit Python version)
|
||||||
|
tox -- tests/test_uploads.py --verbose
|
||||||
|
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload --verbose
|
||||||
|
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok --verbose
|
||||||
|
|
||||||
|
-----
|
||||||
|
|
||||||
|
See `Makefile`_ for additional development utilities.
|
||||||
|
Don't forget to add yourself to `AUTHORS`_!
|
||||||
|
|
||||||
|
|
||||||
|
.. _Tox: http://tox.testrun.org
|
||||||
|
.. _supported Python environments: https://github.com/jakubroztocil/httpie/blob/master/tox.ini
|
||||||
|
.. _existing issues: https://github.com/jakubroztocil/httpie/issues?state=open
|
||||||
|
.. _AUTHORS: https://github.com/jakubroztocil/httpie/blob/master/AUTHORS.rst
|
||||||
|
.. _Makefile: https://github.com/jakubroztocil/httpie/blob/master/Makefile
|
||||||
|
.. _pytest: http://pytest.org/
|
||||||
|
.. _Style Guide for Python Code: http://python.org/dev/peps/pep-0008/
|
||||||
|
.. _test suite: https://github.com/jakubroztocil/httpie/tree/master/tests
|
2
LICENSE
2
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Copyright © 2012 Jakub Roztocil <jakub@roztocil.name>
|
Copyright © 2012-2017 Jakub Roztocil <jakub@roztocil.co>
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
modification, are permitted provided that the following conditions are met:
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
@ -1 +1,4 @@
|
|||||||
include README.rst LICENSE
|
include LICENSE
|
||||||
|
include README.rst
|
||||||
|
include CHANGELOG.rst
|
||||||
|
include AUTHORS.rst
|
||||||
|
148
Makefile
Normal file
148
Makefile
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
###############################################################################
|
||||||
|
# See ./CONTRIBUTING.rst
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
VERSION=$(shell grep __version__ httpie/__init__.py)
|
||||||
|
REQUIREMENTS="requirements-dev.txt"
|
||||||
|
TAG="\n\n\033[0;32m\#\#\# "
|
||||||
|
END=" \#\#\# \033[0m\n"
|
||||||
|
|
||||||
|
|
||||||
|
all: test
|
||||||
|
|
||||||
|
|
||||||
|
init: uninstall-httpie
|
||||||
|
@echo $(TAG)Installing dev requirements$(END)
|
||||||
|
pip install --upgrade -r $(REQUIREMENTS)
|
||||||
|
|
||||||
|
@echo $(TAG)Installing HTTPie$(END)
|
||||||
|
pip install --upgrade --editable .
|
||||||
|
|
||||||
|
@echo
|
||||||
|
|
||||||
|
clean:
|
||||||
|
@echo $(TAG)Cleaning up$(END)
|
||||||
|
rm -rf .tox *.egg dist build .coverage .cache .pytest_cache httpie.egg-info
|
||||||
|
find . -name '__pycache__' -delete -print -o -name '*.pyc' -delete -print
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Testing
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
test: init
|
||||||
|
@echo $(TAG)Running tests on the current Python interpreter with coverage $(END)
|
||||||
|
py.test --cov ./httpie --cov ./tests --doctest-modules --verbose ./httpie ./tests
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
# test-all is meant to test everything — even this Makefile
|
||||||
|
test-all: uninstall-all clean init test test-tox test-dist pycodestyle
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
test-dist: test-sdist test-bdist-wheel
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
test-tox: init
|
||||||
|
@echo $(TAG)Running tests on all Pythons via Tox$(END)
|
||||||
|
tox
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
test-sdist: clean uninstall-httpie
|
||||||
|
@echo $(TAG)Testing sdist build an installation$(END)
|
||||||
|
python setup.py sdist
|
||||||
|
pip install --force-reinstall --upgrade dist/*.gz
|
||||||
|
which http
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
test-bdist-wheel: clean uninstall-httpie
|
||||||
|
@echo $(TAG)Testing wheel build an installation$(END)
|
||||||
|
python setup.py bdist_wheel
|
||||||
|
pip install --force-reinstall --upgrade dist/*.whl
|
||||||
|
which http
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
pycodestyle:
|
||||||
|
which pycodestyle || pip install pycodestyle
|
||||||
|
pycodestyle
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
coveralls:
|
||||||
|
which coveralls || pip install python-coveralls
|
||||||
|
coveralls
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Publishing to PyPi
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
publish: test-all publish-no-test
|
||||||
|
|
||||||
|
|
||||||
|
publish-no-test:
|
||||||
|
@echo $(TAG)Testing wheel build an installation$(END)
|
||||||
|
@echo "$(VERSION)"
|
||||||
|
@echo "$(VERSION)" | grep -q "dev" && echo '!!!Not publishing dev version!!!' && exit 1 || echo ok
|
||||||
|
python setup.py register
|
||||||
|
python setup.py sdist upload
|
||||||
|
python setup.py bdist_wheel upload
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Uninstalling
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
uninstall-httpie:
|
||||||
|
@echo $(TAG)Uninstalling httpie$(END)
|
||||||
|
- pip uninstall --yes httpie &2>/dev/null
|
||||||
|
|
||||||
|
@echo "Verifying…"
|
||||||
|
cd .. && ! python -m httpie --version &2>/dev/null
|
||||||
|
|
||||||
|
@echo "Done"
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
uninstall-all: uninstall-httpie
|
||||||
|
|
||||||
|
@echo $(TAG)Uninstalling httpie requirements$(END)
|
||||||
|
- pip uninstall --yes pygments requests
|
||||||
|
|
||||||
|
@echo $(TAG)Uninstalling development requirements$(END)
|
||||||
|
- pip uninstall --yes -r $(REQUIREMENTS)
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Docs
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
pdf:
|
||||||
|
# NOTE: rst2pdf needs to be installed manually and against a Python 2
|
||||||
|
@echo "Converting README.rst to PDF…"
|
||||||
|
rst2pdf \
|
||||||
|
--strip-elements-with-class=no-pdf \
|
||||||
|
README.rst \
|
||||||
|
-o README.pdf
|
||||||
|
@echo "Done"
|
||||||
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
# Utils
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
homebrew-formula-vars:
|
||||||
|
extras/get-homebrew-formula-vars.py
|
1263
README.rst
1263
README.rst
File diff suppressed because it is too large
Load Diff
62
extras/get-homebrew-formula-vars.py
Executable file
62
extras/get-homebrew-formula-vars.py
Executable file
@ -0,0 +1,62 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Generate URLs and file hashes to be included in the Homebrew formula
|
||||||
|
after a new release of HTTPie has been published on PyPi.
|
||||||
|
|
||||||
|
https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb
|
||||||
|
|
||||||
|
"""
|
||||||
|
import hashlib
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
PACKAGES = [
|
||||||
|
'httpie',
|
||||||
|
'pygments',
|
||||||
|
'requests',
|
||||||
|
'certifi',
|
||||||
|
'urllib3',
|
||||||
|
'idna',
|
||||||
|
'chardet',
|
||||||
|
'PySocks',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_package_meta(package_name):
|
||||||
|
api_url = 'https://pypi.python.org/pypi/{}/json'.format(package_name)
|
||||||
|
resp = requests.get(api_url).json()
|
||||||
|
hasher = hashlib.sha256()
|
||||||
|
for release in resp['urls']:
|
||||||
|
download_url = release['url']
|
||||||
|
if download_url.endswith('.tar.gz'):
|
||||||
|
hasher.update(requests.get(download_url).content)
|
||||||
|
return {
|
||||||
|
'name': package_name,
|
||||||
|
'url': download_url,
|
||||||
|
'sha256': hasher.hexdigest(),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
'{}: download not found: {}'.format(package_name, resp))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
package_meta_map = {
|
||||||
|
package_name: get_package_meta(package_name)
|
||||||
|
for package_name in PACKAGES
|
||||||
|
}
|
||||||
|
httpie_meta = package_meta_map.pop('httpie')
|
||||||
|
print()
|
||||||
|
print(' url "{url}"'.format(url=httpie_meta['url']))
|
||||||
|
print(' sha256 "{sha256}"'.format(sha256=httpie_meta['sha256']))
|
||||||
|
print()
|
||||||
|
for dep_meta in package_meta_map.values():
|
||||||
|
print(' resource "{name}" do'.format(name=dep_meta['name']))
|
||||||
|
print(' url "{url}"'.format(url=dep_meta['url']))
|
||||||
|
print(' sha256 "{sha256}"'.format(sha256=dep_meta['sha256']))
|
||||||
|
print(' end')
|
||||||
|
print('')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
23
extras/httpie-completion.bash
Normal file
23
extras/httpie-completion.bash
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
|
||||||
|
_http_complete() {
|
||||||
|
local cur_word=${COMP_WORDS[COMP_CWORD]}
|
||||||
|
local prev_word=${COMP_WORDS[COMP_CWORD - 1]}
|
||||||
|
|
||||||
|
if [[ "$cur_word" == -* ]]; then
|
||||||
|
_http_complete_options "$cur_word"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
complete -o default -F _http_complete http
|
||||||
|
|
||||||
|
_http_complete_options() {
|
||||||
|
local cur_word=$1
|
||||||
|
local options="-j --json -f --form --pretty -s --style -p --print
|
||||||
|
-v --verbose -h --headers -b --body -S --stream -o --output -d --download
|
||||||
|
-c --continue --session --session-read-only -a --auth --auth-type --proxy
|
||||||
|
--follow --verify --cert --cert-key --timeout --check-status --ignore-stdin
|
||||||
|
--help --version --traceback --debug"
|
||||||
|
COMPREPLY=( $( compgen -W "$options" -- "$cur_word" ) )
|
||||||
|
}
|
59
extras/httpie-completion.fish
Normal file
59
extras/httpie-completion.fish
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
function __fish_httpie_auth_types
|
||||||
|
echo "basic"\t"Basic HTTP auth"
|
||||||
|
echo "digest"\t"Digest HTTP auth"
|
||||||
|
end
|
||||||
|
|
||||||
|
function __fish_httpie_styles
|
||||||
|
echo "autumn"
|
||||||
|
echo "borland"
|
||||||
|
echo "bw"
|
||||||
|
echo "colorful"
|
||||||
|
echo "default"
|
||||||
|
echo "emacs"
|
||||||
|
echo "friendly"
|
||||||
|
echo "fruity"
|
||||||
|
echo "igor"
|
||||||
|
echo "manni"
|
||||||
|
echo "monokai"
|
||||||
|
echo "murphy"
|
||||||
|
echo "native"
|
||||||
|
echo "paraiso-dark"
|
||||||
|
echo "paraiso-light"
|
||||||
|
echo "pastie"
|
||||||
|
echo "perldoc"
|
||||||
|
echo "rrt"
|
||||||
|
echo "solarized"
|
||||||
|
echo "tango"
|
||||||
|
echo "trac"
|
||||||
|
echo "vim"
|
||||||
|
echo "vs"
|
||||||
|
echo "xcode"
|
||||||
|
end
|
||||||
|
|
||||||
|
complete -x -c http -s s -l style -d 'Output coloring style (default is "monokai")' -A -a '(__fish_httpie_styles)'
|
||||||
|
complete -c http -s f -l form -d 'Data items from the command line are serialized as form fields'
|
||||||
|
complete -c http -s j -l json -d '(default) Data items from the command line are serialized as a JSON object'
|
||||||
|
complete -x -c http -l pretty -d 'Controls output processing' -a "all colors format none" -A
|
||||||
|
complete -x -c http -s p -l print -d 'String specifying what the output should contain'
|
||||||
|
complete -c http -s v -l verbose -d 'Print the whole request as well as the response'
|
||||||
|
complete -c http -s h -l headers -d 'Print only the response headers'
|
||||||
|
complete -c http -s b -l body -d 'Print only the response body'
|
||||||
|
complete -c http -s S -l stream -d 'Always stream the output by line'
|
||||||
|
complete -c http -s o -l output -d 'Save output to FILE'
|
||||||
|
complete -c http -s d -l download -d 'Do not print the response body to stdout'
|
||||||
|
complete -c http -s c -l continue -d 'Resume an interrupted download'
|
||||||
|
complete -x -c http -l session -d 'Create, or reuse and update a session'
|
||||||
|
complete -x -c http -s a -l auth -d 'If only the username is provided (-a username), HTTPie will prompt for the password'
|
||||||
|
complete -x -c http -l auth-type -d 'The authentication mechanism to be used' -a '(__fish_httpie_auth_types)' -A
|
||||||
|
complete -x -c http -l proxy -d 'String mapping protocol to the URL of the proxy'
|
||||||
|
complete -c http -l follow -d 'Allow full redirects'
|
||||||
|
complete -x -c http -l verify -d 'SSL cert verification'
|
||||||
|
complete -c http -l cert -d 'SSL cert'
|
||||||
|
complete -c http -l cert-key -d 'Private SSL cert key'
|
||||||
|
complete -x -c http -l timeout -d 'Connection timeout in seconds'
|
||||||
|
complete -c http -l check-status -d 'Error with non-200 HTTP status code'
|
||||||
|
complete -c http -l ignore-stdin -d 'Do not attempt to read stdin'
|
||||||
|
complete -c http -l help -d 'Show help'
|
||||||
|
complete -c http -l version -d 'Show version'
|
||||||
|
complete -c http -l traceback -d 'Prints exception traceback should one occur'
|
||||||
|
complete -c http -l debug -d 'Show debugging information'
|
69
extras/httpie.rb
Normal file
69
extras/httpie.rb
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# The latest Homebrew formula as submitted to Homebrew/homebrew-core.
|
||||||
|
# Only useful for testing until it gets accepted by homebrew maintainers.
|
||||||
|
# (It will need to be updated from the repo version before next release.)
|
||||||
|
#
|
||||||
|
# https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb
|
||||||
|
#
|
||||||
|
class Httpie < Formula
|
||||||
|
include Language::Python::Virtualenv
|
||||||
|
|
||||||
|
desc "User-friendly cURL replacement (command-line HTTP client)"
|
||||||
|
homepage "https://httpie.org/"
|
||||||
|
url "https://files.pythonhosted.org/packages/44/ee/7177b743400d7f82a69bf30cb3c24ea4bb1f4aea68878bc540f732bf4940/httpie-1.0.0.tar.gz"
|
||||||
|
sha256 "1650342d2eca2622092196bf106ab8f68ea2dbb2ed265d37191185618e159a25"
|
||||||
|
head "https://github.com/jakubroztocil/httpie.git"
|
||||||
|
|
||||||
|
bottle do
|
||||||
|
cellar :any_skip_relocation
|
||||||
|
sha256 "7e9db255e324dd63b66106ca62ed7e4e81f6634c624dec3ff49c293aba1072a6" => :mojave
|
||||||
|
sha256 "437504a11416284b17d3a801c267d0fd5e15416f38cff3abf7ed99b096b4828a" => :high_sierra
|
||||||
|
sha256 "10b25fc787076719b1f1f9c242c5e9d872ebd1c7a6d83e6f1af983a17cd8ca55" => :sierra
|
||||||
|
sha256 "1bd35480d1ef401bdad9c322e7c1624aefc9b5056530ab990e327d0bc397e4fb" => :el_capitan
|
||||||
|
end
|
||||||
|
|
||||||
|
depends_on "python" ["3.6.5_1"]
|
||||||
|
|
||||||
|
resource "pygments" do
|
||||||
|
url "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz"
|
||||||
|
sha256 "dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "requests" do
|
||||||
|
url "https://files.pythonhosted.org/packages/97/10/92d25b93e9c266c94b76a5548f020f3f1dd0eb40649cb1993532c0af8f4c/requests-2.20.0.tar.gz"
|
||||||
|
sha256 "99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "certifi" do
|
||||||
|
url "https://files.pythonhosted.org/packages/41/b6/4f0cefba47656583217acd6cd797bc2db1fede0d53090fdc28ad2c8e0716/certifi-2018.10.15.tar.gz"
|
||||||
|
sha256 "6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "urllib3" do
|
||||||
|
url "https://files.pythonhosted.org/packages/a5/74/05ffd00b4b5c08306939c485869f5dc40cbc27357195b0a98b18e4c48893/urllib3-1.24.tar.gz"
|
||||||
|
sha256 "41c3db2fc01e5b907288010dec72f9d0a74e37d6994e6eb56849f59fea2265ae"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "idna" do
|
||||||
|
url "https://files.pythonhosted.org/packages/65/c4/80f97e9c9628f3cac9b98bfca0402ede54e0563b56482e3e6e45c43c4935/idna-2.7.tar.gz"
|
||||||
|
sha256 "684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "chardet" do
|
||||||
|
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
|
||||||
|
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "PySocks" do
|
||||||
|
url "https://files.pythonhosted.org/packages/53/12/6bf1d764f128636cef7408e8156b7235b150ea31650d0260969215bb8e7d/PySocks-1.6.8.tar.gz"
|
||||||
|
sha256 "3fe52c55890a248676fd69dc9e3c4e811718b777834bcaab7a8125cf9deac672"
|
||||||
|
end
|
||||||
|
|
||||||
|
def install
|
||||||
|
virtualenv_install_with_resources
|
||||||
|
end
|
||||||
|
|
||||||
|
test do
|
||||||
|
raw_url = "https://raw.githubusercontent.com/Homebrew/homebrew-core/master/Formula/httpie.rb"
|
||||||
|
assert_match "PYTHONPATH", shell_output("#{bin}/http --ignore-stdin #{raw_url}")
|
||||||
|
end
|
||||||
|
end
|
BIN
httpie.png
BIN
httpie.png
Binary file not shown.
Before Width: | Height: | Size: 446 KiB After Width: | Height: | Size: 681 KiB |
@ -2,18 +2,31 @@
|
|||||||
HTTPie - a CLI, cURL-like tool for humans.
|
HTTPie - a CLI, cURL-like tool for humans.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
__version__ = '1.0.2'
|
||||||
__author__ = 'Jakub Roztocil'
|
__author__ = 'Jakub Roztocil'
|
||||||
__version__ = '0.7.1'
|
|
||||||
__licence__ = 'BSD'
|
__licence__ = 'BSD'
|
||||||
|
|
||||||
|
|
||||||
class ExitStatus:
|
class ExitStatus:
|
||||||
"""Exit status code constants."""
|
"""Program exit code constants."""
|
||||||
OK = 0
|
SUCCESS = 0
|
||||||
ERROR = 1
|
ERROR = 1
|
||||||
|
PLUGIN_ERROR = 7
|
||||||
|
|
||||||
|
# 128+2 SIGINT <http://www.tldp.org/LDP/abs/html/exitcodes.html>
|
||||||
|
ERROR_CTRL_C = 130
|
||||||
|
|
||||||
ERROR_TIMEOUT = 2
|
ERROR_TIMEOUT = 2
|
||||||
|
ERROR_TOO_MANY_REDIRECTS = 6
|
||||||
|
|
||||||
# Used only when requested with --check-status:
|
# Used only when requested with --check-status:
|
||||||
ERROR_HTTP_3XX = 3
|
ERROR_HTTP_3XX = 3
|
||||||
ERROR_HTTP_4XX = 4
|
ERROR_HTTP_4XX = 4
|
||||||
ERROR_HTTP_5XX = 5
|
ERROR_HTTP_5XX = 5
|
||||||
|
|
||||||
|
|
||||||
|
EXIT_STATUS_LABELS = {
|
||||||
|
value: key
|
||||||
|
for key, value in ExitStatus.__dict__.items()
|
||||||
|
if key.isupper()
|
||||||
|
}
|
||||||
|
@ -3,8 +3,16 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
from .core import main
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
try:
|
||||||
|
from .core import main
|
||||||
|
sys.exit(main())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
from . import ExitStatus
|
||||||
|
sys.exit(ExitStatus.ERROR_CTRL_C)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(main())
|
main()
|
||||||
|
258
httpie/cli.py
258
httpie/cli.py
@ -3,22 +3,29 @@
|
|||||||
NOTE: the CLI interface may change before reaching v1.0.
|
NOTE: the CLI interface may change before reaching v1.0.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
# noinspection PyCompatibility
|
||||||
|
from argparse import (
|
||||||
|
RawDescriptionHelpFormatter, FileType,
|
||||||
|
OPTIONAL, ZERO_OR_MORE, SUPPRESS
|
||||||
|
)
|
||||||
from textwrap import dedent, wrap
|
from textwrap import dedent, wrap
|
||||||
#noinspection PyCompatibility
|
|
||||||
from argparse import (RawDescriptionHelpFormatter, FileType,
|
|
||||||
OPTIONAL, ZERO_OR_MORE, SUPPRESS)
|
|
||||||
|
|
||||||
from . import __doc__
|
from httpie import __doc__, __version__
|
||||||
from . import __version__
|
from httpie.input import (
|
||||||
from .plugins.builtin import BuiltinAuthPlugin
|
HTTPieArgumentParser, KeyValueArgType,
|
||||||
from .plugins import plugin_manager
|
SEP_PROXY, SEP_GROUP_ALL_ITEMS,
|
||||||
from .sessions import DEFAULT_SESSIONS_DIR
|
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
|
||||||
from .output import AVAILABLE_STYLES, DEFAULT_STYLE
|
OUT_RESP_BODY, OUTPUT_OPTIONS,
|
||||||
from .input import (Parser, AuthCredentialsArgType, KeyValueArgType,
|
OUTPUT_OPTIONS_DEFAULT, PRETTY_MAP,
|
||||||
SEP_PROXY, SEP_CREDENTIALS, SEP_GROUP_ITEMS,
|
PRETTY_STDOUT_TTY_ONLY, SessionNameValidator,
|
||||||
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
|
readable_file_arg, SSL_VERSION_ARG_MAPPING
|
||||||
OUT_RESP_BODY, OUTPUT_OPTIONS, OUTPUT_OPTIONS_DEFAULT,
|
)
|
||||||
PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY, SessionNameValidator)
|
from httpie.output.formatters.colors import (
|
||||||
|
AVAILABLE_STYLES, DEFAULT_STYLE, AUTO_STYLE
|
||||||
|
)
|
||||||
|
from httpie.plugins import plugin_manager
|
||||||
|
from httpie.plugins.builtin import BuiltinAuthPlugin
|
||||||
|
from httpie.sessions import DEFAULT_SESSIONS_DIR
|
||||||
|
|
||||||
|
|
||||||
class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
||||||
@ -39,7 +46,9 @@ class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
|||||||
text = dedent(text).strip() + '\n\n'
|
text = dedent(text).strip() + '\n\n'
|
||||||
return text.splitlines()
|
return text.splitlines()
|
||||||
|
|
||||||
parser = Parser(
|
|
||||||
|
parser = HTTPieArgumentParser(
|
||||||
|
prog='http',
|
||||||
formatter_class=HTTPieHelpFormatter,
|
formatter_class=HTTPieHelpFormatter,
|
||||||
description='%s <http://httpie.org>' % __doc__.strip(),
|
description='%s <http://httpie.org>' % __doc__.strip(),
|
||||||
epilog=dedent("""
|
epilog=dedent("""
|
||||||
@ -48,9 +57,9 @@ parser = Parser(
|
|||||||
|
|
||||||
Suggestions and bug reports are greatly appreciated:
|
Suggestions and bug reports are greatly appreciated:
|
||||||
|
|
||||||
https://github.com/jkbr/httpie/issues
|
https://github.com/jakubroztocil/httpie/issues
|
||||||
|
|
||||||
""")
|
"""),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -59,7 +68,7 @@ parser = Parser(
|
|||||||
#######################################################################
|
#######################################################################
|
||||||
|
|
||||||
positional = parser.add_argument_group(
|
positional = parser.add_argument_group(
|
||||||
title='Positional arguments',
|
title='Positional Arguments',
|
||||||
description=dedent("""
|
description=dedent("""
|
||||||
These arguments come after any flags and in the order they are listed here.
|
These arguments come after any flags and in the order they are listed here.
|
||||||
Only URL is required.
|
Only URL is required.
|
||||||
@ -87,14 +96,21 @@ positional.add_argument(
|
|||||||
metavar='URL',
|
metavar='URL',
|
||||||
help="""
|
help="""
|
||||||
The scheme defaults to 'http://' if the URL does not include one.
|
The scheme defaults to 'http://' if the URL does not include one.
|
||||||
|
(You can override this with: --default-scheme=https)
|
||||||
|
|
||||||
|
You can also use a shorthand for localhost
|
||||||
|
|
||||||
|
$ http :3000 # => http://localhost:3000
|
||||||
|
$ http :/foo # => http://localhost/foo
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
positional.add_argument(
|
positional.add_argument(
|
||||||
'items',
|
'items',
|
||||||
metavar='REQUEST ITEM',
|
metavar='REQUEST_ITEM',
|
||||||
nargs=ZERO_OR_MORE,
|
nargs=ZERO_OR_MORE,
|
||||||
type=KeyValueArgType(*SEP_GROUP_ITEMS),
|
default=None,
|
||||||
|
type=KeyValueArgType(*SEP_GROUP_ALL_ITEMS),
|
||||||
help=r"""
|
help=r"""
|
||||||
Optional key-value pairs to be included in the request. The separator used
|
Optional key-value pairs to be included in the request. The separator used
|
||||||
determines the type:
|
determines the type:
|
||||||
@ -112,13 +128,21 @@ positional.add_argument(
|
|||||||
|
|
||||||
name=HTTPie language=Python description='CLI HTTP client'
|
name=HTTPie language=Python description='CLI HTTP client'
|
||||||
|
|
||||||
|
':=' Non-string JSON data fields (only with --json, -j):
|
||||||
|
|
||||||
|
awesome:=true amount:=42 colors:='["red", "green", "blue"]'
|
||||||
|
|
||||||
'@' Form file fields (only with --form, -f):
|
'@' Form file fields (only with --form, -f):
|
||||||
|
|
||||||
cs@~/Documents/CV.pdf
|
cs@~/Documents/CV.pdf
|
||||||
|
|
||||||
':=' Non-string JSON data fields (only with --json, -j):
|
'=@' A data field like '=', but takes a file path and embeds its content:
|
||||||
|
|
||||||
awesome:=true amount:=42 colors:='["red", "green", "blue"]'
|
essay=@Documents/essay.txt
|
||||||
|
|
||||||
|
':=@' A raw JSON field like ':=', but takes a file path and embeds its content:
|
||||||
|
|
||||||
|
package:=@./package.json
|
||||||
|
|
||||||
You can use a backslash to escape a colliding separator in the field name:
|
You can use a backslash to escape a colliding separator in the field name:
|
||||||
|
|
||||||
@ -133,7 +157,7 @@ positional.add_argument(
|
|||||||
#######################################################################
|
#######################################################################
|
||||||
|
|
||||||
content_type = parser.add_argument_group(
|
content_type = parser.add_argument_group(
|
||||||
title='Predefined content types',
|
title='Predefined Content Types',
|
||||||
description=None
|
description=None
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -165,7 +189,7 @@ content_type.add_argument(
|
|||||||
# Output processing
|
# Output processing
|
||||||
#######################################################################
|
#######################################################################
|
||||||
|
|
||||||
output_processing = parser.add_argument_group(title='Output processing')
|
output_processing = parser.add_argument_group(title='Output Processing')
|
||||||
|
|
||||||
output_processing.add_argument(
|
output_processing.add_argument(
|
||||||
'--pretty',
|
'--pretty',
|
||||||
@ -186,22 +210,23 @@ output_processing.add_argument(
|
|||||||
default=DEFAULT_STYLE,
|
default=DEFAULT_STYLE,
|
||||||
choices=AVAILABLE_STYLES,
|
choices=AVAILABLE_STYLES,
|
||||||
help="""
|
help="""
|
||||||
Output coloring style (default is "{default}"). On of:
|
Output coloring style (default is "{default}"). One of:
|
||||||
|
|
||||||
{available}
|
{available_styles}
|
||||||
|
|
||||||
For this option to work properly, please make sure that the $TERM
|
The "{auto_style}" style follows your terminal's ANSI color styles.
|
||||||
environment variable is set to "xterm-256color" or similar
|
|
||||||
|
For non-{auto_style} styles to work properly, please make sure that the
|
||||||
|
$TERM environment variable is set to "xterm-256color" or similar
|
||||||
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
||||||
|
|
||||||
"""
|
""".format(
|
||||||
.format(
|
|
||||||
default=DEFAULT_STYLE,
|
default=DEFAULT_STYLE,
|
||||||
available='\n'.join(
|
available_styles='\n'.join(
|
||||||
'{0: >20}'.format(line.strip())
|
'{0}{1}'.format(8 * ' ', line.strip())
|
||||||
for line in
|
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
||||||
wrap(' '.join(sorted(AVAILABLE_STYLES)), 60)
|
).rstrip(),
|
||||||
),
|
auto_style=AUTO_STYLE,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -209,7 +234,7 @@ output_processing.add_argument(
|
|||||||
#######################################################################
|
#######################################################################
|
||||||
# Output options
|
# Output options
|
||||||
#######################################################################
|
#######################################################################
|
||||||
output_options = parser.add_argument_group(title='Output options')
|
output_options = parser.add_argument_group(title='Output Options')
|
||||||
|
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--print', '-p',
|
'--print', '-p',
|
||||||
@ -237,17 +262,6 @@ output_options.add_argument(
|
|||||||
default=OUTPUT_OPTIONS_DEFAULT,
|
default=OUTPUT_OPTIONS_DEFAULT,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
|
||||||
'--verbose', '-v',
|
|
||||||
dest='output_options',
|
|
||||||
action='store_const',
|
|
||||||
const=''.join(OUTPUT_OPTIONS),
|
|
||||||
help="""
|
|
||||||
Print the whole request as well as the response. Shortcut for --print={0}.
|
|
||||||
|
|
||||||
"""
|
|
||||||
.format(''.join(OUTPUT_OPTIONS))
|
|
||||||
)
|
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--headers', '-h',
|
'--headers', '-h',
|
||||||
dest='output_options',
|
dest='output_options',
|
||||||
@ -271,6 +285,42 @@ output_options.add_argument(
|
|||||||
.format(OUT_RESP_BODY)
|
.format(OUT_RESP_BODY)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
output_options.add_argument(
|
||||||
|
'--verbose', '-v',
|
||||||
|
dest='verbose',
|
||||||
|
action='store_true',
|
||||||
|
help="""
|
||||||
|
Verbose output. Print the whole request as well as the response. Also print
|
||||||
|
any intermediary requests/responses (such as redirects).
|
||||||
|
It's a shortcut for: --all --print={0}
|
||||||
|
|
||||||
|
"""
|
||||||
|
.format(''.join(OUTPUT_OPTIONS))
|
||||||
|
)
|
||||||
|
output_options.add_argument(
|
||||||
|
'--all',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help="""
|
||||||
|
By default, only the final request/response is shown. Use this flag to show
|
||||||
|
any intermediary requests/responses as well. Intermediary requests include
|
||||||
|
followed redirects (with --follow), the first unauthorized request when
|
||||||
|
Digest auth is used (--auth=digest), etc.
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
output_options.add_argument(
|
||||||
|
'--history-print', '-P',
|
||||||
|
dest='output_options_history',
|
||||||
|
metavar='WHAT',
|
||||||
|
help="""
|
||||||
|
The same as --print, -p but applies only to intermediary requests/responses
|
||||||
|
(such as redirects) when their inclusion is enabled with --all. If this
|
||||||
|
options is not specified, then they are formatted the same way as the final
|
||||||
|
response.
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--stream', '-S',
|
'--stream', '-S',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@ -295,8 +345,9 @@ output_options.add_argument(
|
|||||||
dest='output_file',
|
dest='output_file',
|
||||||
metavar='FILE',
|
metavar='FILE',
|
||||||
help="""
|
help="""
|
||||||
Save output to FILE. If --download is set, then only the response body is
|
Save output to FILE instead of stdout. If --download is also set, then only
|
||||||
saved to the file. Other parts of the HTTP exchange are printed to stderr.
|
the response body is saved to FILE. Other parts of the HTTP exchange are
|
||||||
|
printed to stderr.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -365,7 +416,6 @@ sessions.add_argument(
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Authentication
|
# Authentication
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -374,8 +424,8 @@ sessions.add_argument(
|
|||||||
auth = parser.add_argument_group(title='Authentication')
|
auth = parser.add_argument_group(title='Authentication')
|
||||||
auth.add_argument(
|
auth.add_argument(
|
||||||
'--auth', '-a',
|
'--auth', '-a',
|
||||||
|
default=None,
|
||||||
metavar='USER[:PASS]',
|
metavar='USER[:PASS]',
|
||||||
type=AuthCredentialsArgType(SEP_CREDENTIALS),
|
|
||||||
help="""
|
help="""
|
||||||
If only the username is provided (-a username), HTTPie will prompt
|
If only the username is provided (-a username), HTTPie will prompt
|
||||||
for the password.
|
for the password.
|
||||||
@ -383,11 +433,22 @@ auth.add_argument(
|
|||||||
""",
|
""",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _AuthTypeLazyChoices(object):
|
||||||
|
# Needed for plugin testing
|
||||||
|
|
||||||
|
def __contains__(self, item):
|
||||||
|
return item in plugin_manager.get_auth_plugin_mapping()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(sorted(plugin_manager.get_auth_plugin_mapping().keys()))
|
||||||
|
|
||||||
|
|
||||||
_auth_plugins = plugin_manager.get_auth_plugins()
|
_auth_plugins = plugin_manager.get_auth_plugins()
|
||||||
auth.add_argument(
|
auth.add_argument(
|
||||||
'--auth-type',
|
'--auth-type', '-A',
|
||||||
choices=[plugin.auth_type for plugin in _auth_plugins],
|
choices=_AuthTypeLazyChoices(),
|
||||||
default=_auth_plugins[0].auth_type,
|
default=None,
|
||||||
help="""
|
help="""
|
||||||
The authentication mechanism to be used. Defaults to "{default}".
|
The authentication mechanism to be used. Defaults to "{default}".
|
||||||
|
|
||||||
@ -422,31 +483,31 @@ network.add_argument(
|
|||||||
'--proxy',
|
'--proxy',
|
||||||
default=[],
|
default=[],
|
||||||
action='append',
|
action='append',
|
||||||
metavar='PROTOCOL:HOST',
|
metavar='PROTOCOL:PROXY_URL',
|
||||||
type=KeyValueArgType(SEP_PROXY),
|
type=KeyValueArgType(SEP_PROXY),
|
||||||
help="""
|
help="""
|
||||||
String mapping protocol to the URL of the proxy (e.g. http:foo.bar:3128).
|
String mapping protocol to the URL of the proxy
|
||||||
You can specify multiple proxies with different protocols.
|
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
||||||
|
different protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--follow',
|
'--follow', '-F',
|
||||||
default=False,
|
default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="""
|
help="""
|
||||||
Set this flag if full redirects are allowed (e.g. re-POST-ing of data at
|
Follow 30x Location redirects.
|
||||||
new Location).
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--verify',
|
'--max-redirects',
|
||||||
default='yes',
|
type=int,
|
||||||
|
default=30,
|
||||||
help="""
|
help="""
|
||||||
Set to "no" to skip checking the host's SSL certificate. You can also pass
|
By default, requests have a limit of 30 redirects (works with --follow).
|
||||||
the path to a CA_BUNDLE file for private certs. You can also set the
|
|
||||||
REQUESTS_CA_BUNDLE environment variable. Defaults to "yes".
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
@ -480,6 +541,57 @@ network.add_argument(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
# SSL
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
ssl = parser.add_argument_group(title='SSL')
|
||||||
|
ssl.add_argument(
|
||||||
|
'--verify',
|
||||||
|
default='yes',
|
||||||
|
help="""
|
||||||
|
Set to "no" (or "false") to skip checking the host's SSL certificate.
|
||||||
|
Defaults to "yes" ("true"). You can also pass the path to a CA_BUNDLE file
|
||||||
|
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
|
||||||
|
variable instead.)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
ssl.add_argument(
|
||||||
|
'--ssl', # TODO: Maybe something more general, such as --secure-protocol?
|
||||||
|
dest='ssl_version',
|
||||||
|
choices=list(sorted(SSL_VERSION_ARG_MAPPING.keys())),
|
||||||
|
help="""
|
||||||
|
The desired protocol version to use. This will default to
|
||||||
|
SSL v2.3 which will negotiate the highest protocol that both
|
||||||
|
the server and your installation of OpenSSL support. Available protocols
|
||||||
|
may vary depending on OpenSSL installation (only the supported ones
|
||||||
|
are shown here).
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
ssl.add_argument(
|
||||||
|
'--cert',
|
||||||
|
default=None,
|
||||||
|
type=readable_file_arg,
|
||||||
|
help="""
|
||||||
|
You can specify a local cert to use as client side SSL certificate.
|
||||||
|
This file may either contain both private key and certificate or you may
|
||||||
|
specify --cert-key separately.
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
ssl.add_argument(
|
||||||
|
'--cert-key',
|
||||||
|
default=None,
|
||||||
|
type=readable_file_arg,
|
||||||
|
help="""
|
||||||
|
The private key to use with SSL. Only needed if --cert is given and the
|
||||||
|
certificate file does not contain the private key.
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Troubleshooting
|
# Troubleshooting
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -487,7 +599,7 @@ network.add_argument(
|
|||||||
troubleshooting = parser.add_argument_group(title='Troubleshooting')
|
troubleshooting = parser.add_argument_group(title='Troubleshooting')
|
||||||
|
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--ignore-stdin',
|
'--ignore-stdin', '-I',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help="""
|
||||||
@ -518,7 +630,15 @@ troubleshooting.add_argument(
|
|||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help="""
|
||||||
Prints exception traceback should one occur.
|
Prints the exception traceback should one occur.
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
troubleshooting.add_argument(
|
||||||
|
'--default-scheme',
|
||||||
|
default="http",
|
||||||
|
help="""
|
||||||
|
The default scheme to use if not specified in the URL.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
@ -527,8 +647,8 @@ troubleshooting.add_argument(
|
|||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help="""
|
||||||
Prints exception traceback should one occur, and also other information
|
Prints the exception traceback should one occur, as well as other
|
||||||
that is useful for debugging HTTPie itself and for reporting bugs.
|
information useful for debugging HTTPie itself and for reporting bugs.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
165
httpie/client.py
165
httpie/client.py
@ -1,91 +1,174 @@
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from pprint import pformat
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
from . import sessions
|
from httpie import sessions
|
||||||
from . import __version__
|
from httpie import __version__
|
||||||
from .plugins import plugin_manager
|
from httpie.compat import str
|
||||||
|
from httpie.input import SSL_VERSION_ARG_MAPPING
|
||||||
|
from httpie.plugins import plugin_manager
|
||||||
|
from httpie.utils import repr_dict_nice
|
||||||
|
|
||||||
|
try:
|
||||||
|
# https://urllib3.readthedocs.io/en/latest/security.html
|
||||||
|
# noinspection PyPackageRequirements
|
||||||
|
import urllib3
|
||||||
|
urllib3.disable_warnings()
|
||||||
|
except (ImportError, AttributeError):
|
||||||
|
# In some rare cases, the user may have an old version of the requests
|
||||||
|
# or urllib3, and there is no method called "disable_warnings." In these
|
||||||
|
# cases, we don't need to call the method.
|
||||||
|
# They may get some noisy output but execution shouldn't die. Move on.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
FORM = 'application/x-www-form-urlencoded; charset=utf-8'
|
FORM_CONTENT_TYPE = 'application/x-www-form-urlencoded; charset=utf-8'
|
||||||
JSON = 'application/json; charset=utf-8'
|
JSON_CONTENT_TYPE = 'application/json'
|
||||||
|
JSON_ACCEPT = '{0}, */*'.format(JSON_CONTENT_TYPE)
|
||||||
DEFAULT_UA = 'HTTPie/%s' % __version__
|
DEFAULT_UA = 'HTTPie/%s' % __version__
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPieHTTPAdapter(HTTPAdapter):
|
||||||
|
|
||||||
|
def __init__(self, ssl_version=None, **kwargs):
|
||||||
|
self._ssl_version = ssl_version
|
||||||
|
super(HTTPieHTTPAdapter, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
def init_poolmanager(self, *args, **kwargs):
|
||||||
|
kwargs['ssl_version'] = self._ssl_version
|
||||||
|
super(HTTPieHTTPAdapter, self).init_poolmanager(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_requests_session(ssl_version):
|
||||||
|
requests_session = requests.Session()
|
||||||
|
requests_session.mount(
|
||||||
|
'https://',
|
||||||
|
HTTPieHTTPAdapter(ssl_version=ssl_version)
|
||||||
|
)
|
||||||
|
for cls in plugin_manager.get_transport_plugins():
|
||||||
|
transport_plugin = cls()
|
||||||
|
requests_session.mount(prefix=transport_plugin.prefix,
|
||||||
|
adapter=transport_plugin.get_adapter())
|
||||||
|
return requests_session
|
||||||
|
|
||||||
|
|
||||||
def get_response(args, config_dir):
|
def get_response(args, config_dir):
|
||||||
"""Send the request and return a `request.Response`."""
|
"""Send the request and return a `request.Response`."""
|
||||||
|
|
||||||
requests_kwargs = get_requests_kwargs(args)
|
ssl_version = None
|
||||||
|
if args.ssl_version:
|
||||||
|
ssl_version = SSL_VERSION_ARG_MAPPING[args.ssl_version]
|
||||||
|
|
||||||
if args.debug:
|
requests_session = get_requests_session(ssl_version)
|
||||||
sys.stderr.write('\n>>> requests.request(%s)\n\n'
|
requests_session.max_redirects = args.max_redirects
|
||||||
% pformat(requests_kwargs))
|
|
||||||
|
|
||||||
if not args.session and not args.session_read_only:
|
if not args.session and not args.session_read_only:
|
||||||
response = requests.request(**requests_kwargs)
|
kwargs = get_requests_kwargs(args)
|
||||||
|
if args.debug:
|
||||||
|
dump_request(kwargs)
|
||||||
|
response = requests_session.request(**kwargs)
|
||||||
else:
|
else:
|
||||||
response = sessions.get_response(
|
response = sessions.get_response(
|
||||||
|
requests_session=requests_session,
|
||||||
args=args,
|
args=args,
|
||||||
config_dir=config_dir,
|
config_dir=config_dir,
|
||||||
session_name=args.session or args.session_read_only,
|
session_name=args.session or args.session_read_only,
|
||||||
requests_kwargs=requests_kwargs,
|
|
||||||
read_only=bool(args.session_read_only),
|
read_only=bool(args.session_read_only),
|
||||||
)
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def get_requests_kwargs(args):
|
def dump_request(kwargs):
|
||||||
"""Translate our `args` into `requests.request` keyword arguments."""
|
sys.stderr.write('\n>>> requests.request(**%s)\n\n'
|
||||||
|
% repr_dict_nice(kwargs))
|
||||||
|
|
||||||
implicit_headers = {
|
|
||||||
|
def finalize_headers(headers):
|
||||||
|
final_headers = {}
|
||||||
|
for name, value in headers.items():
|
||||||
|
if value is not None:
|
||||||
|
|
||||||
|
# >leading or trailing LWS MAY be removed without
|
||||||
|
# >changing the semantics of the field value"
|
||||||
|
# -https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html
|
||||||
|
# Also, requests raises `InvalidHeader` for leading spaces.
|
||||||
|
value = value.strip()
|
||||||
|
|
||||||
|
if isinstance(value, str):
|
||||||
|
# See: https://github.com/jakubroztocil/httpie/issues/212
|
||||||
|
value = value.encode('utf8')
|
||||||
|
|
||||||
|
final_headers[name] = value
|
||||||
|
return final_headers
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_headers(args):
|
||||||
|
default_headers = CaseInsensitiveDict({
|
||||||
'User-Agent': DEFAULT_UA
|
'User-Agent': DEFAULT_UA
|
||||||
}
|
})
|
||||||
|
|
||||||
auto_json = args.data and not args.form
|
auto_json = args.data and not args.form
|
||||||
# FIXME: Accept is set to JSON with `http url @./file.txt`.
|
|
||||||
if args.json or auto_json:
|
if args.json or auto_json:
|
||||||
implicit_headers['Accept'] = 'application/json'
|
default_headers['Accept'] = JSON_ACCEPT
|
||||||
if args.json or (auto_json and args.data):
|
if args.json or (auto_json and args.data):
|
||||||
implicit_headers['Content-Type'] = JSON
|
default_headers['Content-Type'] = JSON_CONTENT_TYPE
|
||||||
|
|
||||||
if isinstance(args.data, dict):
|
|
||||||
if args.data:
|
|
||||||
args.data = json.dumps(args.data)
|
|
||||||
else:
|
|
||||||
# We need to set data to an empty string to prevent requests
|
|
||||||
# from assigning an empty list to `response.request.data`.
|
|
||||||
args.data = ''
|
|
||||||
|
|
||||||
elif args.form and not args.files:
|
elif args.form and not args.files:
|
||||||
# If sending files, `requests` will set
|
# If sending files, `requests` will set
|
||||||
# the `Content-Type` for us.
|
# the `Content-Type` for us.
|
||||||
implicit_headers['Content-Type'] = FORM
|
default_headers['Content-Type'] = FORM_CONTENT_TYPE
|
||||||
|
return default_headers
|
||||||
|
|
||||||
for name, value in implicit_headers.items():
|
|
||||||
if name not in args.headers:
|
|
||||||
args.headers[name] = value
|
|
||||||
|
|
||||||
credentials = None
|
def get_requests_kwargs(args, base_headers=None):
|
||||||
if args.auth:
|
"""
|
||||||
auth_plugin = plugin_manager.get_auth_plugin(args.auth_type)()
|
Translate our `args` into `requests.request` keyword arguments.
|
||||||
credentials = auth_plugin.get_auth(args.auth.key, args.auth.value)
|
|
||||||
|
"""
|
||||||
|
# Serialize JSON data, if needed.
|
||||||
|
data = args.data
|
||||||
|
auto_json = data and not args.form
|
||||||
|
if (args.json or auto_json) and isinstance(data, dict):
|
||||||
|
if data:
|
||||||
|
data = json.dumps(data)
|
||||||
|
else:
|
||||||
|
# We need to set data to an empty string to prevent requests
|
||||||
|
# from assigning an empty list to `response.request.data`.
|
||||||
|
data = ''
|
||||||
|
|
||||||
|
# Finalize headers.
|
||||||
|
headers = get_default_headers(args)
|
||||||
|
if base_headers:
|
||||||
|
headers.update(base_headers)
|
||||||
|
headers.update(args.headers)
|
||||||
|
headers = finalize_headers(headers)
|
||||||
|
|
||||||
|
cert = None
|
||||||
|
if args.cert:
|
||||||
|
cert = args.cert
|
||||||
|
if args.cert_key:
|
||||||
|
cert = cert, args.cert_key
|
||||||
|
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'stream': True,
|
'stream': True,
|
||||||
'method': args.method.lower(),
|
'method': args.method.lower(),
|
||||||
'url': args.url,
|
'url': args.url,
|
||||||
'headers': args.headers,
|
'headers': headers,
|
||||||
'data': args.data,
|
'data': data,
|
||||||
'verify': {
|
'verify': {
|
||||||
'yes': True,
|
'yes': True,
|
||||||
'no': False
|
'true': True,
|
||||||
}.get(args.verify, args.verify),
|
'no': False,
|
||||||
|
'false': False,
|
||||||
|
}.get(args.verify.lower(), args.verify),
|
||||||
|
'cert': cert,
|
||||||
'timeout': args.timeout,
|
'timeout': args.timeout,
|
||||||
'auth': credentials,
|
'auth': args.auth,
|
||||||
'proxies': dict((p.key, p.value) for p in args.proxy),
|
'proxies': {p.key: p.value for p in args.proxy},
|
||||||
'files': args.files,
|
'files': args.files,
|
||||||
'allow_redirects': args.follow,
|
'allow_redirects': args.follow,
|
||||||
'params': args.params,
|
'params': args.params,
|
||||||
|
@ -1,19 +1,39 @@
|
|||||||
"""
|
"""
|
||||||
Python 2/3 compatibility.
|
Python 2.7, and 3.x compatibility.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
#noinspection PyUnresolvedReferences
|
import sys
|
||||||
from requests.compat import (
|
|
||||||
is_windows,
|
|
||||||
bytes,
|
|
||||||
str,
|
|
||||||
is_py3,
|
|
||||||
is_py26,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
#noinspection PyUnresolvedReferences,PyCompatibility
|
is_py2 = sys.version_info[0] == 2
|
||||||
|
is_py27 = sys.version_info[:2] == (2, 7)
|
||||||
|
is_py3 = sys.version_info[0] == 3
|
||||||
|
is_pypy = 'pypy' in sys.version.lower()
|
||||||
|
is_windows = 'win32' in str(sys.platform).lower()
|
||||||
|
|
||||||
|
|
||||||
|
if is_py2:
|
||||||
|
# noinspection PyShadowingBuiltins
|
||||||
|
bytes = str
|
||||||
|
# noinspection PyUnresolvedReferences,PyShadowingBuiltins
|
||||||
|
str = unicode
|
||||||
|
elif is_py3:
|
||||||
|
# noinspection PyShadowingBuiltins
|
||||||
|
str = str
|
||||||
|
# noinspection PyShadowingBuiltins
|
||||||
|
bytes = bytes
|
||||||
|
|
||||||
|
|
||||||
|
try: # pragma: no cover
|
||||||
|
# noinspection PyUnresolvedReferences,PyCompatibility
|
||||||
from urllib.parse import urlsplit
|
from urllib.parse import urlsplit
|
||||||
except ImportError:
|
except ImportError: # pragma: no cover
|
||||||
#noinspection PyUnresolvedReferences,PyCompatibility
|
# noinspection PyUnresolvedReferences,PyCompatibility
|
||||||
from urlparse import urlsplit
|
from urlparse import urlsplit
|
||||||
|
|
||||||
|
try: # pragma: no cover
|
||||||
|
# noinspection PyCompatibility
|
||||||
|
from urllib.request import urlopen
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
# noinspection PyCompatibility,PyUnresolvedReferences
|
||||||
|
from urllib2 import urlopen
|
||||||
|
@ -2,15 +2,15 @@ import os
|
|||||||
import json
|
import json
|
||||||
import errno
|
import errno
|
||||||
|
|
||||||
from . import __version__
|
from httpie import __version__
|
||||||
from .compat import is_windows
|
from httpie.compat import is_windows
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CONFIG_DIR = os.environ.get(
|
DEFAULT_CONFIG_DIR = str(os.environ.get(
|
||||||
'HTTPIE_CONFIG_DIR',
|
'HTTPIE_CONFIG_DIR',
|
||||||
os.path.expanduser('~/.httpie') if not is_windows else
|
os.path.expanduser('~/.httpie') if not is_windows else
|
||||||
os.path.expandvars(r'%APPDATA%\\httpie')
|
os.path.expandvars(r'%APPDATA%\\httpie')
|
||||||
)
|
))
|
||||||
|
|
||||||
|
|
||||||
class BaseConfigDict(dict):
|
class BaseConfigDict(dict):
|
||||||
@ -18,19 +18,13 @@ class BaseConfigDict(dict):
|
|||||||
name = None
|
name = None
|
||||||
helpurl = None
|
helpurl = None
|
||||||
about = None
|
about = None
|
||||||
directory = DEFAULT_CONFIG_DIR
|
|
||||||
|
|
||||||
def __init__(self, directory=None, *args, **kwargs):
|
|
||||||
super(BaseConfigDict, self).__init__(*args, **kwargs)
|
|
||||||
if directory:
|
|
||||||
self.directory = directory
|
|
||||||
|
|
||||||
def __getattr__(self, item):
|
def __getattr__(self, item):
|
||||||
return self[item]
|
return self[item]
|
||||||
|
|
||||||
def _get_path(self):
|
def _get_path(self):
|
||||||
"""Return the config file path without side-effects."""
|
"""Return the config file path without side-effects."""
|
||||||
return os.path.join(self.directory, self.name + '.json')
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
@ -43,7 +37,6 @@ class BaseConfigDict(dict):
|
|||||||
raise
|
raise
|
||||||
return path
|
return path
|
||||||
|
|
||||||
@property
|
|
||||||
def is_new(self):
|
def is_new(self):
|
||||||
return not os.path.exists(self._get_path())
|
return not os.path.exists(self._get_path())
|
||||||
|
|
||||||
@ -55,7 +48,7 @@ class BaseConfigDict(dict):
|
|||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Invalid %s JSON: %s [%s]' %
|
'Invalid %s JSON: %s [%s]' %
|
||||||
(type(self).__name__, e.message, self.path)
|
(type(self).__name__, str(e), self.path)
|
||||||
)
|
)
|
||||||
self.update(data)
|
self.update(data)
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
@ -87,14 +80,33 @@ class BaseConfigDict(dict):
|
|||||||
class Config(BaseConfigDict):
|
class Config(BaseConfigDict):
|
||||||
|
|
||||||
name = 'config'
|
name = 'config'
|
||||||
helpurl = 'https://github.com/jkbr/httpie#config'
|
helpurl = 'https://httpie.org/doc#config'
|
||||||
about = 'HTTPie configuration file'
|
about = 'HTTPie configuration file'
|
||||||
|
|
||||||
DEFAULTS = {
|
DEFAULTS = {
|
||||||
'implicit_content_type': 'json',
|
|
||||||
'default_options': []
|
'default_options': []
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, directory=DEFAULT_CONFIG_DIR):
|
||||||
super(Config, self).__init__(*args, **kwargs)
|
super(Config, self).__init__()
|
||||||
self.update(self.DEFAULTS)
|
self.update(self.DEFAULTS)
|
||||||
|
self.directory = directory
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
super(Config, self).load()
|
||||||
|
self._migrate_implicit_content_type()
|
||||||
|
|
||||||
|
def _get_path(self):
|
||||||
|
return os.path.join(self.directory, self.name + '.json')
|
||||||
|
|
||||||
|
def _migrate_implicit_content_type(self):
|
||||||
|
"""Migrate the removed implicit_content_type config option"""
|
||||||
|
try:
|
||||||
|
implicit_content_type = self.pop('implicit_content_type')
|
||||||
|
except KeyError:
|
||||||
|
self.save()
|
||||||
|
else:
|
||||||
|
if implicit_content_type == 'form':
|
||||||
|
self['default_options'].insert(0, '--form')
|
||||||
|
self.save()
|
||||||
|
self.load()
|
||||||
|
99
httpie/context.py
Normal file
99
httpie/context.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import sys
|
||||||
|
try:
|
||||||
|
import curses
|
||||||
|
except ImportError:
|
||||||
|
curses = None # Compiled w/o curses
|
||||||
|
|
||||||
|
from httpie.compat import is_windows
|
||||||
|
from httpie.config import DEFAULT_CONFIG_DIR, Config
|
||||||
|
|
||||||
|
from httpie.utils import repr_dict_nice
|
||||||
|
|
||||||
|
|
||||||
|
class Environment(object):
|
||||||
|
"""
|
||||||
|
Information about the execution context
|
||||||
|
(standard streams, config directory, etc).
|
||||||
|
|
||||||
|
By default, it represents the actual environment.
|
||||||
|
All of the attributes can be overwritten though, which
|
||||||
|
is used by the test suite to simulate various scenarios.
|
||||||
|
|
||||||
|
"""
|
||||||
|
is_windows = is_windows
|
||||||
|
config_dir = DEFAULT_CONFIG_DIR
|
||||||
|
stdin = sys.stdin
|
||||||
|
stdin_isatty = stdin.isatty()
|
||||||
|
stdin_encoding = None
|
||||||
|
stdout = sys.stdout
|
||||||
|
stdout_isatty = stdout.isatty()
|
||||||
|
stdout_encoding = None
|
||||||
|
stderr = sys.stderr
|
||||||
|
stderr_isatty = stderr.isatty()
|
||||||
|
colors = 256
|
||||||
|
if not is_windows:
|
||||||
|
if curses:
|
||||||
|
try:
|
||||||
|
curses.setupterm()
|
||||||
|
colors = curses.tigetnum('colors')
|
||||||
|
except curses.error:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
import colorama.initialise
|
||||||
|
stdout = colorama.initialise.wrap_stream(
|
||||||
|
stdout, convert=None, strip=None,
|
||||||
|
autoreset=True, wrap=True
|
||||||
|
)
|
||||||
|
stderr = colorama.initialise.wrap_stream(
|
||||||
|
stderr, convert=None, strip=None,
|
||||||
|
autoreset=True, wrap=True
|
||||||
|
)
|
||||||
|
del colorama
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""
|
||||||
|
Use keyword arguments to overwrite
|
||||||
|
any of the class attributes for this instance.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
|
||||||
|
self.__dict__.update(**kwargs)
|
||||||
|
|
||||||
|
# Keyword arguments > stream.encoding > default utf8
|
||||||
|
if self.stdin_encoding is None:
|
||||||
|
self.stdin_encoding = getattr(
|
||||||
|
self.stdin, 'encoding', None) or 'utf8'
|
||||||
|
if self.stdout_encoding is None:
|
||||||
|
actual_stdout = self.stdout
|
||||||
|
if is_windows:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
from colorama import AnsiToWin32
|
||||||
|
if isinstance(self.stdout, AnsiToWin32):
|
||||||
|
actual_stdout = self.stdout.wrapped
|
||||||
|
self.stdout_encoding = getattr(
|
||||||
|
actual_stdout, 'encoding', None) or 'utf8'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self):
|
||||||
|
if not hasattr(self, '_config'):
|
||||||
|
self._config = Config(directory=self.config_dir)
|
||||||
|
if self._config.is_new():
|
||||||
|
self._config.save()
|
||||||
|
else:
|
||||||
|
self._config.load()
|
||||||
|
return self._config
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
defaults = dict(type(self).__dict__)
|
||||||
|
actual = dict(defaults)
|
||||||
|
actual.update(self.__dict__)
|
||||||
|
actual['config'] = self.config
|
||||||
|
return repr_dict_nice(dict(
|
||||||
|
(key, value)
|
||||||
|
for key, value in actual.items()
|
||||||
|
if not key.startswith('_'))
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<{0} {1}>'.format(type(self).__name__, str(self))
|
311
httpie/core.py
311
httpie/core.py
@ -2,29 +2,33 @@
|
|||||||
|
|
||||||
Invocation flow:
|
Invocation flow:
|
||||||
|
|
||||||
1. Read, validate and process the input (args, `stdin`).
|
1. Read, validate and process the input (args, `stdin`).
|
||||||
2. Create and send a request.
|
2. Create and send a request.
|
||||||
3. Stream, and possibly process and format, the requested parts
|
3. Stream, and possibly process and format, the parts
|
||||||
of the request-response exchange.
|
of the request-response exchange selected by output options.
|
||||||
4. Simultaneously write to `stdout`
|
4. Simultaneously write to `stdout`
|
||||||
5. Exit.
|
5. Exit.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
import errno
|
import errno
|
||||||
|
import platform
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from httpie import __version__ as httpie_version
|
|
||||||
from requests import __version__ as requests_version
|
from requests import __version__ as requests_version
|
||||||
from pygments import __version__ as pygments_version
|
from pygments import __version__ as pygments_version
|
||||||
|
|
||||||
from .compat import str, is_py3
|
from httpie import __version__ as httpie_version, ExitStatus
|
||||||
from .client import get_response
|
from httpie.compat import str, bytes, is_py3
|
||||||
from .downloads import Download
|
from httpie.client import get_response
|
||||||
from .models import Environment
|
from httpie.downloads import Downloader
|
||||||
from .output import build_output_stream, write, write_with_colors_win_py3
|
from httpie.context import Environment
|
||||||
from . import ExitStatus
|
from httpie.plugins import plugin_manager
|
||||||
from .plugins import plugin_manager
|
from httpie.output.streams import (
|
||||||
|
build_output_stream,
|
||||||
|
write_stream,
|
||||||
|
write_stream_with_colors_win_py3
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_exit_status(http_status, follow=False):
|
def get_exit_status(http_status, follow=False):
|
||||||
@ -39,133 +43,218 @@ def get_exit_status(http_status, follow=False):
|
|||||||
# Server Error
|
# Server Error
|
||||||
return ExitStatus.ERROR_HTTP_5XX
|
return ExitStatus.ERROR_HTTP_5XX
|
||||||
else:
|
else:
|
||||||
return ExitStatus.OK
|
return ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def print_debug_info(env):
|
def print_debug_info(env):
|
||||||
sys.stderr.writelines([
|
env.stderr.writelines([
|
||||||
'HTTPie %s\n' % httpie_version,
|
'HTTPie %s\n' % httpie_version,
|
||||||
'HTTPie data: %s\n' % env.config.directory,
|
|
||||||
'Requests %s\n' % requests_version,
|
'Requests %s\n' % requests_version,
|
||||||
'Pygments %s\n' % pygments_version,
|
'Pygments %s\n' % pygments_version,
|
||||||
'Python %s %s\n' % (sys.version, sys.platform)
|
'Python %s\n%s\n' % (sys.version, sys.executable),
|
||||||
|
'%s %s' % (platform.system(), platform.release()),
|
||||||
])
|
])
|
||||||
|
env.stderr.write('\n\n')
|
||||||
|
env.stderr.write(repr(env))
|
||||||
|
env.stderr.write('\n')
|
||||||
|
|
||||||
|
|
||||||
def main(args=sys.argv[1:], env=Environment()):
|
def decode_args(args, stdin_encoding):
|
||||||
"""Run the main program and write the output to ``env.stdout``.
|
"""
|
||||||
|
Convert all bytes args to str
|
||||||
Return exit status code.
|
by decoding them using stdin encoding.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
plugin_manager.load_installed_plugins()
|
return [
|
||||||
from .cli import parser
|
arg.decode(stdin_encoding)
|
||||||
|
if type(arg) == bytes else arg
|
||||||
|
for arg in args
|
||||||
|
]
|
||||||
|
|
||||||
if env.config.default_options:
|
|
||||||
args = env.config.default_options + args
|
|
||||||
|
|
||||||
def error(msg, *args, **kwargs):
|
def program(args, env, log_error):
|
||||||
msg = msg % args
|
"""
|
||||||
level = kwargs.get('level', 'error')
|
The main program without error handling
|
||||||
env.stderr.write('\nhttp: %s: %s\n' % (level, msg))
|
|
||||||
|
|
||||||
debug = '--debug' in args
|
:param args: parsed args (argparse.Namespace)
|
||||||
traceback = debug or '--traceback' in args
|
:type env: Environment
|
||||||
exit_status = ExitStatus.OK
|
:param log_error: error log function
|
||||||
|
:return: status code
|
||||||
|
|
||||||
if debug:
|
"""
|
||||||
print_debug_info(env)
|
exit_status = ExitStatus.SUCCESS
|
||||||
if args == ['--debug']:
|
downloader = None
|
||||||
return exit_status
|
show_traceback = args.debug or args.traceback
|
||||||
|
|
||||||
download = None
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
args = parser.parse_args(args=args, env=env)
|
|
||||||
|
|
||||||
if args.download:
|
if args.download:
|
||||||
args.follow = True # --download implies --follow.
|
args.follow = True # --download implies --follow.
|
||||||
download = Download(
|
downloader = Downloader(
|
||||||
output_file=args.output_file,
|
output_file=args.output_file,
|
||||||
progress_file=env.stderr,
|
progress_file=env.stderr,
|
||||||
resume=args.download_resume
|
resume=args.download_resume
|
||||||
)
|
)
|
||||||
download.pre_request(args.headers)
|
downloader.pre_request(args.headers)
|
||||||
|
|
||||||
response = get_response(args, config_dir=env.config.directory)
|
final_response = get_response(args, config_dir=env.config.directory)
|
||||||
|
if args.all:
|
||||||
|
responses = final_response.history + [final_response]
|
||||||
|
else:
|
||||||
|
responses = [final_response]
|
||||||
|
|
||||||
if args.check_status or download:
|
for response in responses:
|
||||||
|
|
||||||
exit_status = get_exit_status(
|
if args.check_status or downloader:
|
||||||
http_status=response.status_code,
|
exit_status = get_exit_status(
|
||||||
follow=args.follow
|
http_status=response.status_code,
|
||||||
)
|
follow=args.follow
|
||||||
|
|
||||||
if not env.stdout_isatty and exit_status != ExitStatus.OK:
|
|
||||||
error('HTTP %s %s',
|
|
||||||
response.raw.status,
|
|
||||||
response.raw.reason,
|
|
||||||
level='warning')
|
|
||||||
|
|
||||||
write_kwargs = {
|
|
||||||
'stream': build_output_stream(
|
|
||||||
args, env, response.request, response),
|
|
||||||
|
|
||||||
# This will in fact be `stderr` with `--download`
|
|
||||||
'outfile': env.stdout,
|
|
||||||
|
|
||||||
'flush': env.stdout_isatty or args.stream
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
|
|
||||||
if env.is_windows and is_py3 and 'colors' in args.prettify:
|
|
||||||
write_with_colors_win_py3(**write_kwargs)
|
|
||||||
else:
|
|
||||||
write(**write_kwargs)
|
|
||||||
|
|
||||||
if download and exit_status == ExitStatus.OK:
|
|
||||||
# Response body download.
|
|
||||||
download_stream, download_to = download.start(response)
|
|
||||||
write(
|
|
||||||
stream=download_stream,
|
|
||||||
outfile=download_to,
|
|
||||||
flush=False,
|
|
||||||
)
|
)
|
||||||
download.finish()
|
if not env.stdout_isatty and exit_status != ExitStatus.SUCCESS:
|
||||||
if download.interrupted:
|
log_error(
|
||||||
exit_status = ExitStatus.ERROR
|
'HTTP %s %s', response.raw.status, response.raw.reason,
|
||||||
error('Incomplete download: size=%d; downloaded=%d' % (
|
level='warning'
|
||||||
download.status.total_size,
|
)
|
||||||
download.status.downloaded
|
|
||||||
))
|
|
||||||
|
|
||||||
except IOError as e:
|
write_stream_kwargs = {
|
||||||
if not traceback and e.errno == errno.EPIPE:
|
'stream': build_output_stream(
|
||||||
# Ignore broken pipes unless --traceback.
|
args=args,
|
||||||
env.stderr.write('\n')
|
env=env,
|
||||||
else:
|
request=response.request,
|
||||||
raise
|
response=response,
|
||||||
except (KeyboardInterrupt, SystemExit):
|
output_options=(
|
||||||
if traceback:
|
args.output_options
|
||||||
raise
|
if response is final_response
|
||||||
env.stderr.write('\n')
|
else args.output_options_history
|
||||||
exit_status = ExitStatus.ERROR
|
)
|
||||||
|
),
|
||||||
|
# NOTE: `env.stdout` will in fact be `stderr` with `--download`
|
||||||
|
'outfile': env.stdout,
|
||||||
|
'flush': env.stdout_isatty or args.stream
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
if env.is_windows and is_py3 and 'colors' in args.prettify:
|
||||||
|
write_stream_with_colors_win_py3(**write_stream_kwargs)
|
||||||
|
else:
|
||||||
|
write_stream(**write_stream_kwargs)
|
||||||
|
except IOError as e:
|
||||||
|
if not show_traceback and e.errno == errno.EPIPE:
|
||||||
|
# Ignore broken pipes unless --traceback.
|
||||||
|
env.stderr.write('\n')
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
except requests.Timeout:
|
if downloader and exit_status == ExitStatus.SUCCESS:
|
||||||
exit_status = ExitStatus.ERROR_TIMEOUT
|
# Last response body download.
|
||||||
error('Request timed out (%ss).', args.timeout)
|
download_stream, download_to = downloader.start(final_response)
|
||||||
|
write_stream(
|
||||||
except Exception as e:
|
stream=download_stream,
|
||||||
# TODO: Better distinction between expected and unexpected errors.
|
outfile=download_to,
|
||||||
# Network errors vs. bugs, etc.
|
flush=False,
|
||||||
if traceback:
|
)
|
||||||
raise
|
downloader.finish()
|
||||||
error('%s: %s', type(e).__name__, str(e))
|
if downloader.interrupted:
|
||||||
exit_status = ExitStatus.ERROR
|
exit_status = ExitStatus.ERROR
|
||||||
|
log_error('Incomplete download: size=%d; downloaded=%d' % (
|
||||||
|
downloader.status.total_size,
|
||||||
|
downloader.status.downloaded
|
||||||
|
))
|
||||||
|
return exit_status
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if download and not download.finished:
|
if downloader and not downloader.finished:
|
||||||
download.failed()
|
downloader.failed()
|
||||||
|
|
||||||
|
if (not isinstance(args, list) and args.output_file
|
||||||
|
and args.output_file_specified):
|
||||||
|
args.output_file.close()
|
||||||
|
|
||||||
|
|
||||||
|
def main(args=sys.argv[1:], env=Environment(), custom_log_error=None):
|
||||||
|
"""
|
||||||
|
The main function.
|
||||||
|
|
||||||
|
Pre-process args, handle some special types of invocations,
|
||||||
|
and run the main program with error handling.
|
||||||
|
|
||||||
|
Return exit status code.
|
||||||
|
|
||||||
|
"""
|
||||||
|
args = decode_args(args, env.stdin_encoding)
|
||||||
|
plugin_manager.load_installed_plugins()
|
||||||
|
|
||||||
|
def log_error(msg, *args, **kwargs):
|
||||||
|
msg = msg % args
|
||||||
|
level = kwargs.get('level', 'error')
|
||||||
|
assert level in ['error', 'warning']
|
||||||
|
env.stderr.write('\nhttp: %s: %s\n' % (level, msg))
|
||||||
|
|
||||||
|
from httpie.cli import parser
|
||||||
|
|
||||||
|
if env.config.default_options:
|
||||||
|
args = env.config.default_options + args
|
||||||
|
|
||||||
|
if custom_log_error:
|
||||||
|
log_error = custom_log_error
|
||||||
|
|
||||||
|
include_debug_info = '--debug' in args
|
||||||
|
include_traceback = include_debug_info or '--traceback' in args
|
||||||
|
|
||||||
|
if include_debug_info:
|
||||||
|
print_debug_info(env)
|
||||||
|
if args == ['--debug']:
|
||||||
|
return ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
exit_status = ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed_args = parser.parse_args(args=args, env=env)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR_CTRL_C
|
||||||
|
except SystemExit as e:
|
||||||
|
if e.code != ExitStatus.SUCCESS:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
exit_status = program(
|
||||||
|
args=parsed_args,
|
||||||
|
env=env,
|
||||||
|
log_error=log_error,
|
||||||
|
)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR_CTRL_C
|
||||||
|
except SystemExit as e:
|
||||||
|
if e.code != ExitStatus.SUCCESS:
|
||||||
|
env.stderr.write('\n')
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
|
except requests.Timeout:
|
||||||
|
exit_status = ExitStatus.ERROR_TIMEOUT
|
||||||
|
log_error('Request timed out (%ss).', parsed_args.timeout)
|
||||||
|
except requests.TooManyRedirects:
|
||||||
|
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
||||||
|
log_error('Too many redirects (--max-redirects=%s).',
|
||||||
|
parsed_args.max_redirects)
|
||||||
|
except Exception as e:
|
||||||
|
# TODO: Further distinction between expected and unexpected errors.
|
||||||
|
msg = str(e)
|
||||||
|
if hasattr(e, 'request'):
|
||||||
|
request = e.request
|
||||||
|
if hasattr(request, 'url'):
|
||||||
|
msg += ' while doing %s request to URL: %s' % (
|
||||||
|
request.method, request.url)
|
||||||
|
log_error('%s: %s', type(e).__name__, msg)
|
||||||
|
if include_traceback:
|
||||||
|
raise
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
|
|
||||||
return exit_status
|
return exit_status
|
||||||
|
@ -7,15 +7,16 @@ from __future__ import division
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
import errno
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import threading
|
import threading
|
||||||
from time import sleep, time
|
from time import sleep, time
|
||||||
from mailbox import Message
|
from mailbox import Message
|
||||||
|
|
||||||
from .output import RawStream
|
from httpie.output.streams import RawStream
|
||||||
from .models import HTTPResponse
|
from httpie.models import HTTPResponse
|
||||||
from .utils import humanize_bytes
|
from httpie.utils import humanize_bytes
|
||||||
from .compat import urlsplit
|
from httpie.compat import urlsplit
|
||||||
|
|
||||||
|
|
||||||
PARTIAL_CONTENT = 206
|
PARTIAL_CONTENT = 206
|
||||||
@ -53,8 +54,8 @@ def parse_content_range(content_range, resumed_from):
|
|||||||
raise ContentRangeError('Missing Content-Range')
|
raise ContentRangeError('Missing Content-Range')
|
||||||
|
|
||||||
pattern = (
|
pattern = (
|
||||||
'^bytes (?P<first_byte_pos>\d+)-(?P<last_byte_pos>\d+)'
|
r'^bytes (?P<first_byte_pos>\d+)-(?P<last_byte_pos>\d+)'
|
||||||
'/(\*|(?P<instance_length>\d+))$'
|
r'/(\*|(?P<instance_length>\d+))$'
|
||||||
)
|
)
|
||||||
match = re.match(pattern, content_range)
|
match = re.match(pattern, content_range)
|
||||||
|
|
||||||
@ -84,8 +85,8 @@ def parse_content_range(content_range, resumed_from):
|
|||||||
'Invalid Content-Range returned: %r' % content_range)
|
'Invalid Content-Range returned: %r' % content_range)
|
||||||
|
|
||||||
if (first_byte_pos != resumed_from
|
if (first_byte_pos != resumed_from
|
||||||
or (instance_length is not None
|
or (instance_length is not None
|
||||||
and last_byte_pos + 1 != instance_length)):
|
and last_byte_pos + 1 != instance_length)):
|
||||||
# Not what we asked for.
|
# Not what we asked for.
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Unexpected Content-Range returned (%r)'
|
'Unexpected Content-Range returned (%r)'
|
||||||
@ -104,7 +105,7 @@ def filename_from_content_disposition(content_disposition):
|
|||||||
:return: the filename if present and valid, otherwise `None`
|
:return: the filename if present and valid, otherwise `None`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# attachment; filename=jkbr-httpie-0.4.1-20-g40bd8f6.tar.gz
|
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
||||||
|
|
||||||
msg = Message('Content-Disposition: %s' % content_disposition)
|
msg = Message('Content-Disposition: %s' % content_disposition)
|
||||||
filename = msg.get_filename()
|
filename = msg.get_filename()
|
||||||
@ -135,16 +136,51 @@ def filename_from_url(url, content_type):
|
|||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def get_unique_filename(fn, exists=os.path.exists):
|
def trim_filename(filename, max_len):
|
||||||
|
if len(filename) > max_len:
|
||||||
|
trim_by = len(filename) - max_len
|
||||||
|
name, ext = os.path.splitext(filename)
|
||||||
|
if trim_by >= len(name):
|
||||||
|
filename = filename[:-trim_by]
|
||||||
|
else:
|
||||||
|
filename = name[:-trim_by] + ext
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
def get_filename_max_length(directory):
|
||||||
|
max_len = 255
|
||||||
|
try:
|
||||||
|
pathconf = os.pathconf
|
||||||
|
except AttributeError:
|
||||||
|
pass # non-posix
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
max_len = pathconf(directory, 'PC_NAME_MAX')
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EINVAL:
|
||||||
|
raise
|
||||||
|
return max_len
|
||||||
|
|
||||||
|
|
||||||
|
def trim_filename_if_needed(filename, directory='.', extra=0):
|
||||||
|
max_len = get_filename_max_length(directory) - extra
|
||||||
|
if len(filename) > max_len:
|
||||||
|
filename = trim_filename(filename, max_len)
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
|
def get_unique_filename(filename, exists=os.path.exists):
|
||||||
attempt = 0
|
attempt = 0
|
||||||
while True:
|
while True:
|
||||||
suffix = '-' + str(attempt) if attempt > 0 else ''
|
suffix = '-' + str(attempt) if attempt > 0 else ''
|
||||||
if not exists(fn + suffix):
|
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
|
||||||
return fn + suffix
|
try_filename += suffix
|
||||||
|
if not exists(try_filename):
|
||||||
|
return try_filename
|
||||||
attempt += 1
|
attempt += 1
|
||||||
|
|
||||||
|
|
||||||
class Download(object):
|
class Downloader(object):
|
||||||
|
|
||||||
def __init__(self, output_file=None,
|
def __init__(self, output_file=None,
|
||||||
resume=False, progress_file=sys.stderr):
|
resume=False, progress_file=sys.stderr):
|
||||||
@ -155,10 +191,8 @@ class Download(object):
|
|||||||
|
|
||||||
:param output_file: The file to store response body in. If not
|
:param output_file: The file to store response body in. If not
|
||||||
provided, it will be guessed from the response.
|
provided, it will be guessed from the response.
|
||||||
:type output_file: file
|
|
||||||
|
|
||||||
:param progress_file: Where to report download progress.
|
:param progress_file: Where to report download progress.
|
||||||
:type progress_file: file
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._output_file = output_file
|
self._output_file = output_file
|
||||||
@ -180,8 +214,8 @@ class Download(object):
|
|||||||
:type request_headers: dict
|
:type request_headers: dict
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Disable content encoding so that we can resume, etc.
|
# Ask the server not to encode the content so that we can resume, etc.
|
||||||
request_headers['Accept-Encoding'] = None
|
request_headers['Accept-Encoding'] = 'identity'
|
||||||
if self._resume:
|
if self._resume:
|
||||||
bytes_have = os.path.getsize(self._output_file.name)
|
bytes_have = os.path.getsize(self._output_file.name)
|
||||||
if bytes_have:
|
if bytes_have:
|
||||||
@ -203,6 +237,8 @@ class Download(object):
|
|||||||
"""
|
"""
|
||||||
assert not self.status.time_started
|
assert not self.status.time_started
|
||||||
|
|
||||||
|
# FIXME: some servers still might sent Content-Encoding: gzip
|
||||||
|
# <https://github.com/jakubroztocil/httpie/issues/423>
|
||||||
try:
|
try:
|
||||||
total_size = int(response.headers['Content-Length'])
|
total_size = int(response.headers['Content-Length'])
|
||||||
except (KeyError, ValueError, TypeError):
|
except (KeyError, ValueError, TypeError):
|
||||||
@ -225,16 +261,16 @@ class Download(object):
|
|||||||
else:
|
else:
|
||||||
# TODO: Should the filename be taken from response.history[0].url?
|
# TODO: Should the filename be taken from response.history[0].url?
|
||||||
# Output file not specified. Pick a name that doesn't exist yet.
|
# Output file not specified. Pick a name that doesn't exist yet.
|
||||||
fn = None
|
filename = None
|
||||||
if 'Content-Disposition' in response.headers:
|
if 'Content-Disposition' in response.headers:
|
||||||
fn = filename_from_content_disposition(
|
filename = filename_from_content_disposition(
|
||||||
response.headers['Content-Disposition'])
|
response.headers['Content-Disposition'])
|
||||||
if not fn:
|
if not filename:
|
||||||
fn = filename_from_url(
|
filename = filename_from_url(
|
||||||
url=response.url,
|
url=response.url,
|
||||||
content_type=response.headers.get('Content-Type'),
|
content_type=response.headers.get('Content-Type'),
|
||||||
)
|
)
|
||||||
self._output_file = open(get_unique_filename(fn), mode='a+b')
|
self._output_file = open(get_unique_filename(filename), mode='a+b')
|
||||||
|
|
||||||
self.status.started(
|
self.status.started(
|
||||||
resumed_from=self._resumed_from,
|
resumed_from=self._resumed_from,
|
||||||
@ -301,8 +337,7 @@ class Status(object):
|
|||||||
|
|
||||||
def started(self, resumed_from=0, total_size=None):
|
def started(self, resumed_from=0, total_size=None):
|
||||||
assert self.time_started is None
|
assert self.time_started is None
|
||||||
if total_size is not None:
|
self.total_size = total_size
|
||||||
self.total_size = total_size
|
|
||||||
self.downloaded = self.resumed_from = resumed_from
|
self.downloaded = self.resumed_from = resumed_from
|
||||||
self.time_started = time()
|
self.time_started = time()
|
||||||
|
|
||||||
@ -412,16 +447,25 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
else 0)
|
else 0)
|
||||||
|
|
||||||
def sum_up(self):
|
def sum_up(self):
|
||||||
actually_downloaded = (self.status.downloaded
|
actually_downloaded = (
|
||||||
- self.status.resumed_from)
|
self.status.downloaded - self.status.resumed_from)
|
||||||
time_taken = self.status.time_finished - self.status.time_started
|
time_taken = self.status.time_finished - self.status.time_started
|
||||||
|
|
||||||
self.output.write(CLEAR_LINE)
|
self.output.write(CLEAR_LINE)
|
||||||
|
|
||||||
|
try:
|
||||||
|
speed = actually_downloaded / time_taken
|
||||||
|
except ZeroDivisionError:
|
||||||
|
# Either time is 0 (not all systems provide `time.time`
|
||||||
|
# with a better precision than 1 second), and/or nothing
|
||||||
|
# has been downloaded.
|
||||||
|
speed = actually_downloaded
|
||||||
|
|
||||||
self.output.write(SUMMARY.format(
|
self.output.write(SUMMARY.format(
|
||||||
downloaded=humanize_bytes(actually_downloaded),
|
downloaded=humanize_bytes(actually_downloaded),
|
||||||
total=(self.status.total_size
|
total=(self.status.total_size
|
||||||
and humanize_bytes(self.status.total_size)),
|
and humanize_bytes(self.status.total_size)),
|
||||||
speed=humanize_bytes(actually_downloaded / time_taken),
|
speed=humanize_bytes(speed),
|
||||||
time=time_taken,
|
time=time_taken,
|
||||||
))
|
))
|
||||||
self.output.flush()
|
self.output.flush()
|
||||||
|
448
httpie/input.py
448
httpie/input.py
@ -2,57 +2,78 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
|
import ssl
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import json
|
import errno
|
||||||
import mimetypes
|
import mimetypes
|
||||||
from getpass import getpass
|
import getpass
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
#noinspection PyCompatibility
|
from collections import namedtuple, Iterable, OrderedDict
|
||||||
|
# noinspection PyCompatibility
|
||||||
from argparse import ArgumentParser, ArgumentTypeError, ArgumentError
|
from argparse import ArgumentParser, ArgumentTypeError, ArgumentError
|
||||||
|
|
||||||
try:
|
|
||||||
from collections import OrderedDict
|
|
||||||
except ImportError:
|
|
||||||
OrderedDict = dict
|
|
||||||
|
|
||||||
# TODO: Use MultiDict for headers once added to `requests`.
|
# TODO: Use MultiDict for headers once added to `requests`.
|
||||||
# https://github.com/jkbr/httpie/issues/130
|
# https://github.com/jakubroztocil/httpie/issues/130
|
||||||
|
from httpie.plugins import plugin_manager
|
||||||
from requests.structures import CaseInsensitiveDict
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
from .compat import urlsplit, str
|
from httpie.compat import urlsplit, str, is_pypy, is_py27
|
||||||
from .sessions import VALID_SESSION_NAME_PATTERN
|
from httpie.sessions import VALID_SESSION_NAME_PATTERN
|
||||||
|
from httpie.utils import load_json_preserve_order
|
||||||
|
|
||||||
|
|
||||||
|
# ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
|
||||||
|
# <http://tools.ietf.org/html/rfc3986#section-3.1>
|
||||||
|
URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
|
||||||
|
|
||||||
HTTP_POST = 'POST'
|
HTTP_POST = 'POST'
|
||||||
HTTP_GET = 'GET'
|
HTTP_GET = 'GET'
|
||||||
HTTP = 'http://'
|
|
||||||
HTTPS = 'https://'
|
|
||||||
|
|
||||||
|
|
||||||
# Various separators used in args
|
# Various separators used in args
|
||||||
SEP_HEADERS = ':'
|
SEP_HEADERS = ':'
|
||||||
|
SEP_HEADERS_EMPTY = ';'
|
||||||
SEP_CREDENTIALS = ':'
|
SEP_CREDENTIALS = ':'
|
||||||
SEP_PROXY = ':'
|
SEP_PROXY = ':'
|
||||||
SEP_DATA = '='
|
SEP_DATA = '='
|
||||||
SEP_DATA_RAW_JSON = ':='
|
SEP_DATA_RAW_JSON = ':='
|
||||||
SEP_FILES = '@'
|
SEP_FILES = '@'
|
||||||
|
SEP_DATA_EMBED_FILE = '=@'
|
||||||
|
SEP_DATA_EMBED_RAW_JSON_FILE = ':=@'
|
||||||
SEP_QUERY = '=='
|
SEP_QUERY = '=='
|
||||||
|
|
||||||
# Separators that become request data
|
# Separators that become request data
|
||||||
SEP_GROUP_DATA_ITEMS = frozenset([
|
SEP_GROUP_DATA_ITEMS = frozenset([
|
||||||
SEP_DATA,
|
SEP_DATA,
|
||||||
SEP_DATA_RAW_JSON,
|
SEP_DATA_RAW_JSON,
|
||||||
SEP_FILES
|
SEP_FILES,
|
||||||
|
SEP_DATA_EMBED_FILE,
|
||||||
|
SEP_DATA_EMBED_RAW_JSON_FILE
|
||||||
|
])
|
||||||
|
|
||||||
|
# Separators for items whose value is a filename to be embedded
|
||||||
|
SEP_GROUP_DATA_EMBED_ITEMS = frozenset([
|
||||||
|
SEP_DATA_EMBED_FILE,
|
||||||
|
SEP_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
])
|
||||||
|
|
||||||
|
# Separators for raw JSON items
|
||||||
|
SEP_GROUP_RAW_JSON_ITEMS = frozenset([
|
||||||
|
SEP_DATA_RAW_JSON,
|
||||||
|
SEP_DATA_EMBED_RAW_JSON_FILE,
|
||||||
])
|
])
|
||||||
|
|
||||||
# Separators allowed in ITEM arguments
|
# Separators allowed in ITEM arguments
|
||||||
SEP_GROUP_ITEMS = frozenset([
|
SEP_GROUP_ALL_ITEMS = frozenset([
|
||||||
SEP_HEADERS,
|
SEP_HEADERS,
|
||||||
|
SEP_HEADERS_EMPTY,
|
||||||
SEP_QUERY,
|
SEP_QUERY,
|
||||||
SEP_DATA,
|
SEP_DATA,
|
||||||
SEP_DATA_RAW_JSON,
|
SEP_DATA_RAW_JSON,
|
||||||
SEP_FILES
|
SEP_FILES,
|
||||||
|
SEP_DATA_EMBED_FILE,
|
||||||
|
SEP_DATA_EMBED_RAW_JSON_FILE,
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
@ -84,7 +105,22 @@ OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
|||||||
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
||||||
|
|
||||||
|
|
||||||
class Parser(ArgumentParser):
|
SSL_VERSION_ARG_MAPPING = {
|
||||||
|
'ssl2.3': 'PROTOCOL_SSLv23',
|
||||||
|
'ssl3': 'PROTOCOL_SSLv3',
|
||||||
|
'tls1': 'PROTOCOL_TLSv1',
|
||||||
|
'tls1.1': 'PROTOCOL_TLSv1_1',
|
||||||
|
'tls1.2': 'PROTOCOL_TLSv1_2',
|
||||||
|
'tls1.3': 'PROTOCOL_TLSv1_3',
|
||||||
|
}
|
||||||
|
SSL_VERSION_ARG_MAPPING = {
|
||||||
|
cli_arg: getattr(ssl, ssl_constant)
|
||||||
|
for cli_arg, ssl_constant in SSL_VERSION_ARG_MAPPING.items()
|
||||||
|
if hasattr(ssl, ssl_constant)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPieArgumentParser(ArgumentParser):
|
||||||
"""Adds additional logic to `argparse.ArgumentParser`.
|
"""Adds additional logic to `argparse.ArgumentParser`.
|
||||||
|
|
||||||
Handles all input (CLI args, file args, stdin), applies defaults,
|
Handles all input (CLI args, file args, stdin), applies defaults,
|
||||||
@ -94,13 +130,13 @@ class Parser(ArgumentParser):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
kwargs['add_help'] = False
|
kwargs['add_help'] = False
|
||||||
super(Parser, self).__init__(*args, **kwargs)
|
super(HTTPieArgumentParser, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
#noinspection PyMethodOverriding
|
# noinspection PyMethodOverriding
|
||||||
def parse_args(self, env, args=None, namespace=None):
|
def parse_args(self, env, args=None, namespace=None):
|
||||||
|
|
||||||
self.env = env
|
self.env = env
|
||||||
self.args, no_options = super(Parser, self)\
|
self.args, no_options = super(HTTPieArgumentParser, self)\
|
||||||
.parse_known_args(args, namespace)
|
.parse_known_args(args, namespace)
|
||||||
|
|
||||||
if self.args.debug:
|
if self.args.debug:
|
||||||
@ -108,7 +144,6 @@ class Parser(ArgumentParser):
|
|||||||
|
|
||||||
# Arguments processing and environment setup.
|
# Arguments processing and environment setup.
|
||||||
self._apply_no_options(no_options)
|
self._apply_no_options(no_options)
|
||||||
self._apply_config()
|
|
||||||
self._validate_download_options()
|
self._validate_download_options()
|
||||||
self._setup_standard_streams()
|
self._setup_standard_streams()
|
||||||
self._process_output_options()
|
self._process_output_options()
|
||||||
@ -117,10 +152,20 @@ class Parser(ArgumentParser):
|
|||||||
self._parse_items()
|
self._parse_items()
|
||||||
if not self.args.ignore_stdin and not env.stdin_isatty:
|
if not self.args.ignore_stdin and not env.stdin_isatty:
|
||||||
self._body_from_file(self.env.stdin)
|
self._body_from_file(self.env.stdin)
|
||||||
if not (self.args.url.startswith((HTTP, HTTPS))):
|
if not URL_SCHEME_RE.match(self.args.url):
|
||||||
# Default to 'https://' if invoked as `https args`.
|
scheme = self.args.default_scheme + "://"
|
||||||
scheme = HTTPS if self.env.progname == 'https' else HTTP
|
|
||||||
self.args.url = scheme + self.args.url
|
# See if we're using curl style shorthand for localhost (:3000/foo)
|
||||||
|
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
||||||
|
if shorthand:
|
||||||
|
port = shorthand.group(1)
|
||||||
|
rest = shorthand.group(2)
|
||||||
|
self.args.url = scheme + 'localhost'
|
||||||
|
if port:
|
||||||
|
self.args.url += ':' + port
|
||||||
|
self.args.url += rest
|
||||||
|
else:
|
||||||
|
self.args.url = scheme + self.args.url
|
||||||
self._process_auth()
|
self._process_auth()
|
||||||
|
|
||||||
return self.args
|
return self.args
|
||||||
@ -133,71 +178,96 @@ class Parser(ArgumentParser):
|
|||||||
sys.stderr: self.env.stderr,
|
sys.stderr: self.env.stderr,
|
||||||
None: self.env.stderr
|
None: self.env.stderr
|
||||||
}.get(file, file)
|
}.get(file, file)
|
||||||
|
if not hasattr(file, 'buffer') and isinstance(message, str):
|
||||||
super(Parser, self)._print_message(message, file)
|
message = message.encode(self.env.stdout_encoding)
|
||||||
|
super(HTTPieArgumentParser, self)._print_message(message, file)
|
||||||
|
|
||||||
def _setup_standard_streams(self):
|
def _setup_standard_streams(self):
|
||||||
"""
|
"""
|
||||||
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
|
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not self.env.stdout_isatty and self.args.output_file:
|
self.args.output_file_specified = bool(self.args.output_file)
|
||||||
self.error('Cannot use --output, -o with redirected output.')
|
|
||||||
|
|
||||||
# FIXME: Come up with a cleaner solution.
|
|
||||||
if self.args.download:
|
if self.args.download:
|
||||||
|
# FIXME: Come up with a cleaner solution.
|
||||||
if not self.env.stdout_isatty:
|
if not self.args.output_file and not self.env.stdout_isatty:
|
||||||
# Use stdout as tge download output file.
|
# Use stdout as the download output file.
|
||||||
self.args.output_file = self.env.stdout
|
self.args.output_file = self.env.stdout
|
||||||
|
|
||||||
# With `--download`, we write everything that would normally go to
|
# With `--download`, we write everything that would normally go to
|
||||||
# `stdout` to `stderr` instead. Let's replace the stream so that
|
# `stdout` to `stderr` instead. Let's replace the stream so that
|
||||||
# we don't have to use many `if`s throughout the codebase.
|
# we don't have to use many `if`s throughout the codebase.
|
||||||
# The response body will be treated separately.
|
# The response body will be treated separately.
|
||||||
self.env.stdout = self.env.stderr
|
self.env.stdout = self.env.stderr
|
||||||
self.env.stdout_isatty = self.env.stderr_isatty
|
self.env.stdout_isatty = self.env.stderr_isatty
|
||||||
|
|
||||||
elif self.args.output_file:
|
elif self.args.output_file:
|
||||||
# When not `--download`ing, then `--output` simply replaces
|
# When not `--download`ing, then `--output` simply replaces
|
||||||
# `stdout`. The file is opened for appending, which isn't what
|
# `stdout`. The file is opened for appending, which isn't what
|
||||||
# we want in this case.
|
# we want in this case.
|
||||||
self.args.output_file.seek(0)
|
self.args.output_file.seek(0)
|
||||||
self.args.output_file.truncate()
|
try:
|
||||||
|
self.args.output_file.truncate()
|
||||||
|
except IOError as e:
|
||||||
|
if e.errno == errno.EINVAL:
|
||||||
|
# E.g. /dev/null on Linux.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise
|
||||||
self.env.stdout = self.args.output_file
|
self.env.stdout = self.args.output_file
|
||||||
self.env.stdout_isatty = False
|
self.env.stdout_isatty = False
|
||||||
|
|
||||||
def _apply_config(self):
|
|
||||||
if (not self.args.json
|
|
||||||
and self.env.config.implicit_content_type == 'form'):
|
|
||||||
self.args.form = True
|
|
||||||
|
|
||||||
def _process_auth(self):
|
def _process_auth(self):
|
||||||
"""
|
# TODO: refactor
|
||||||
If only a username provided via --auth, then ask for a password.
|
self.args.auth_plugin = None
|
||||||
Or, take credentials from the URL, if provided.
|
default_auth_plugin = plugin_manager.get_auth_plugins()[0]
|
||||||
|
auth_type_set = self.args.auth_type is not None
|
||||||
"""
|
|
||||||
url = urlsplit(self.args.url)
|
url = urlsplit(self.args.url)
|
||||||
|
|
||||||
if self.args.auth:
|
if self.args.auth is None and not auth_type_set:
|
||||||
if not self.args.auth.has_password():
|
if url.username is not None:
|
||||||
# Stdin already read (if not a tty) so it's save to prompt.
|
# Handle http://username:password@hostname/
|
||||||
if self.args.ignore_stdin:
|
username = url.username
|
||||||
self.error('Unable to prompt for passwords because'
|
password = url.password or ''
|
||||||
' --ignore-stdin is set.')
|
self.args.auth = AuthCredentials(
|
||||||
self.args.auth.prompt_password(url.netloc)
|
key=username,
|
||||||
|
value=password,
|
||||||
|
sep=SEP_CREDENTIALS,
|
||||||
|
orig=SEP_CREDENTIALS.join([username, password])
|
||||||
|
)
|
||||||
|
|
||||||
elif url.username is not None:
|
if self.args.auth is not None or auth_type_set:
|
||||||
# Handle http://username:password@hostname/
|
if not self.args.auth_type:
|
||||||
username, password = url.username, url.password
|
self.args.auth_type = default_auth_plugin.auth_type
|
||||||
self.args.auth = AuthCredentials(
|
plugin = plugin_manager.get_auth_plugin(self.args.auth_type)()
|
||||||
key=username,
|
|
||||||
value=password,
|
if plugin.auth_require and self.args.auth is None:
|
||||||
sep=SEP_CREDENTIALS,
|
self.error('--auth required')
|
||||||
orig=SEP_CREDENTIALS.join([username, password])
|
|
||||||
)
|
plugin.raw_auth = self.args.auth
|
||||||
|
self.args.auth_plugin = plugin
|
||||||
|
already_parsed = isinstance(self.args.auth, AuthCredentials)
|
||||||
|
|
||||||
|
if self.args.auth is None or not plugin.auth_parse:
|
||||||
|
self.args.auth = plugin.get_auth()
|
||||||
|
else:
|
||||||
|
if already_parsed:
|
||||||
|
# from the URL
|
||||||
|
credentials = self.args.auth
|
||||||
|
else:
|
||||||
|
credentials = parse_auth(self.args.auth)
|
||||||
|
|
||||||
|
if (not credentials.has_password()
|
||||||
|
and plugin.prompt_password):
|
||||||
|
if self.args.ignore_stdin:
|
||||||
|
# Non-tty stdin read by now
|
||||||
|
self.error(
|
||||||
|
'Unable to prompt for passwords because'
|
||||||
|
' --ignore-stdin is set.'
|
||||||
|
)
|
||||||
|
credentials.prompt_password(url.netloc)
|
||||||
|
self.args.auth = plugin.get_auth(
|
||||||
|
username=credentials.key,
|
||||||
|
password=credentials.value,
|
||||||
|
)
|
||||||
|
|
||||||
def _apply_no_options(self, no_options):
|
def _apply_no_options(self, no_options):
|
||||||
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
|
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
|
||||||
@ -233,7 +303,8 @@ class Parser(ArgumentParser):
|
|||||||
"""
|
"""
|
||||||
if self.args.data:
|
if self.args.data:
|
||||||
self.error('Request body (from stdin or a file) and request '
|
self.error('Request body (from stdin or a file) and request '
|
||||||
'data (key=value) cannot be mixed.')
|
'data (key=value) cannot be mixed. Pass '
|
||||||
|
'--ignore-stdin to let key/value take priority.')
|
||||||
self.args.data = getattr(fd, 'buffer', fd).read()
|
self.args.data = getattr(fd, 'buffer', fd).read()
|
||||||
|
|
||||||
def _guess_method(self):
|
def _guess_method(self):
|
||||||
@ -255,23 +326,21 @@ class Parser(ArgumentParser):
|
|||||||
# and the first ITEM is now incorrectly in `args.url`.
|
# and the first ITEM is now incorrectly in `args.url`.
|
||||||
try:
|
try:
|
||||||
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
||||||
self.args.items.insert(
|
self.args.items.insert(0, KeyValueArgType(
|
||||||
0,
|
*SEP_GROUP_ALL_ITEMS).__call__(self.args.url))
|
||||||
KeyValueArgType(*SEP_GROUP_ITEMS).__call__(self.args.url)
|
|
||||||
)
|
|
||||||
|
|
||||||
except ArgumentTypeError as e:
|
except ArgumentTypeError as e:
|
||||||
if self.args.traceback:
|
if self.args.traceback:
|
||||||
raise
|
raise
|
||||||
self.error(e.message)
|
self.error(e.args[0])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Set the URL correctly
|
# Set the URL correctly
|
||||||
self.args.url = self.args.method
|
self.args.url = self.args.method
|
||||||
# Infer the method
|
# Infer the method
|
||||||
has_data = (
|
has_data = (
|
||||||
(not self.args.ignore_stdin and
|
(not self.args.ignore_stdin and not self.env.stdin_isatty)
|
||||||
not self.env.stdin_isatty) or any(
|
or any(
|
||||||
item.sep in SEP_GROUP_DATA_ITEMS
|
item.sep in SEP_GROUP_DATA_ITEMS
|
||||||
for item in self.args.items
|
for item in self.args.items
|
||||||
)
|
)
|
||||||
@ -283,21 +352,20 @@ class Parser(ArgumentParser):
|
|||||||
and `args.files`.
|
and `args.files`.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.args.headers = CaseInsensitiveDict()
|
|
||||||
self.args.data = ParamDict() if self.args.form else OrderedDict()
|
|
||||||
self.args.files = OrderedDict()
|
|
||||||
self.args.params = ParamDict()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parse_items(items=self.args.items,
|
items = parse_items(
|
||||||
headers=self.args.headers,
|
items=self.args.items,
|
||||||
data=self.args.data,
|
data_class=ParamsDict if self.args.form else OrderedDict
|
||||||
files=self.args.files,
|
)
|
||||||
params=self.args.params)
|
|
||||||
except ParseError as e:
|
except ParseError as e:
|
||||||
if self.args.traceback:
|
if self.args.traceback:
|
||||||
raise
|
raise
|
||||||
self.error(e.message)
|
self.error(e.args[0])
|
||||||
|
else:
|
||||||
|
self.args.headers = items.headers
|
||||||
|
self.args.data = items.data
|
||||||
|
self.args.files = items.files
|
||||||
|
self.args.params = items.params
|
||||||
|
|
||||||
if self.args.files and not self.args.form:
|
if self.args.files and not self.args.form:
|
||||||
# `http url @/path/to/file`
|
# `http url @/path/to/file`
|
||||||
@ -307,17 +375,14 @@ class Parser(ArgumentParser):
|
|||||||
'Invalid file fields (perhaps you meant --form?): %s'
|
'Invalid file fields (perhaps you meant --form?): %s'
|
||||||
% ','.join(file_fields))
|
% ','.join(file_fields))
|
||||||
|
|
||||||
fn, fd = self.args.files['']
|
fn, fd, ct = self.args.files['']
|
||||||
self.args.files = {}
|
self.args.files = {}
|
||||||
|
|
||||||
self._body_from_file(fd)
|
self._body_from_file(fd)
|
||||||
|
|
||||||
if 'Content-Type' not in self.args.headers:
|
if 'Content-Type' not in self.args.headers:
|
||||||
mime, encoding = mimetypes.guess_type(fn, strict=False)
|
content_type = get_content_type(fn)
|
||||||
if mime:
|
if content_type:
|
||||||
content_type = mime
|
|
||||||
if encoding:
|
|
||||||
content_type = '%s; charset=%s' % (mime, encoding)
|
|
||||||
self.args.headers['Content-Type'] = content_type
|
self.args.headers['Content-Type'] = content_type
|
||||||
|
|
||||||
def _process_output_options(self):
|
def _process_output_options(self):
|
||||||
@ -326,18 +391,32 @@ class Parser(ArgumentParser):
|
|||||||
The default output options are stdout-type-sensitive.
|
The default output options are stdout-type-sensitive.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not self.args.output_options:
|
def check_options(value, option):
|
||||||
self.args.output_options = (
|
unknown = set(value) - OUTPUT_OPTIONS
|
||||||
OUTPUT_OPTIONS_DEFAULT
|
if unknown:
|
||||||
if self.env.stdout_isatty
|
self.error('Unknown output options: {0}={1}'.format(
|
||||||
else OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED
|
option,
|
||||||
)
|
','.join(unknown)
|
||||||
|
))
|
||||||
|
|
||||||
unknown_output_options = set(self.args.output_options) - OUTPUT_OPTIONS
|
if self.args.verbose:
|
||||||
if unknown_output_options:
|
self.args.all = True
|
||||||
self.error(
|
|
||||||
'Unknown output options: %s' % ','.join(unknown_output_options)
|
if self.args.output_options is None:
|
||||||
)
|
if self.args.verbose:
|
||||||
|
self.args.output_options = ''.join(OUTPUT_OPTIONS)
|
||||||
|
else:
|
||||||
|
self.args.output_options = (
|
||||||
|
OUTPUT_OPTIONS_DEFAULT
|
||||||
|
if self.env.stdout_isatty
|
||||||
|
else OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.args.output_options_history is None:
|
||||||
|
self.args.output_options_history = self.args.output_options
|
||||||
|
|
||||||
|
check_options(self.args.output_options, '--print')
|
||||||
|
check_options(self.args.output_options_history, '--history-print')
|
||||||
|
|
||||||
if self.args.download and OUT_RESP_BODY in self.args.output_options:
|
if self.args.download and OUT_RESP_BODY in self.args.output_options:
|
||||||
# Response body is always downloaded with --download and it goes
|
# Response body is always downloaded with --download and it goes
|
||||||
@ -349,7 +428,8 @@ class Parser(ArgumentParser):
|
|||||||
if self.args.prettify == PRETTY_STDOUT_TTY_ONLY:
|
if self.args.prettify == PRETTY_STDOUT_TTY_ONLY:
|
||||||
self.args.prettify = PRETTY_MAP[
|
self.args.prettify = PRETTY_MAP[
|
||||||
'all' if self.env.stdout_isatty else 'none']
|
'all' if self.env.stdout_isatty else 'none']
|
||||||
elif self.args.prettify and self.env.is_windows:
|
elif (self.args.prettify and self.env.is_windows
|
||||||
|
and self.args.output_file):
|
||||||
self.error('Only terminal output can be colorized on Windows.')
|
self.error('Only terminal output can be colorized on Windows.')
|
||||||
else:
|
else:
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
@ -380,6 +460,9 @@ class KeyValue(object):
|
|||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.__dict__ == other.__dict__
|
return self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self.__dict__)
|
||||||
|
|
||||||
|
|
||||||
class SessionNameValidator(object):
|
class SessionNameValidator(object):
|
||||||
|
|
||||||
@ -406,6 +489,9 @@ class KeyValueArgType(object):
|
|||||||
|
|
||||||
def __init__(self, *separators):
|
def __init__(self, *separators):
|
||||||
self.separators = separators
|
self.separators = separators
|
||||||
|
self.special_characters = set('\\')
|
||||||
|
for separator in separators:
|
||||||
|
self.special_characters.update(separator)
|
||||||
|
|
||||||
def __call__(self, string):
|
def __call__(self, string):
|
||||||
"""Parse `string` and return `self.key_value_class()` instance.
|
"""Parse `string` and return `self.key_value_class()` instance.
|
||||||
@ -420,8 +506,8 @@ class KeyValueArgType(object):
|
|||||||
class Escaped(str):
|
class Escaped(str):
|
||||||
"""Represents an escaped character."""
|
"""Represents an escaped character."""
|
||||||
|
|
||||||
def tokenize(s):
|
def tokenize(string):
|
||||||
"""Tokenize `s`. There are only two token types - strings
|
r"""Tokenize `string`. There are only two token types - strings
|
||||||
and escaped characters:
|
and escaped characters:
|
||||||
|
|
||||||
tokenize(r'foo\=bar\\baz')
|
tokenize(r'foo\=bar\\baz')
|
||||||
@ -429,16 +515,16 @@ class KeyValueArgType(object):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
tokens = ['']
|
tokens = ['']
|
||||||
esc = False
|
characters = iter(string)
|
||||||
for c in s:
|
for char in characters:
|
||||||
if esc:
|
if char == '\\':
|
||||||
tokens.extend([Escaped(c), ''])
|
char = next(characters, '')
|
||||||
esc = False
|
if char not in self.special_characters:
|
||||||
else:
|
tokens[-1] += '\\' + char
|
||||||
if c == '\\':
|
|
||||||
esc = True
|
|
||||||
else:
|
else:
|
||||||
tokens[-1] += c
|
tokens.extend([Escaped(char), ''])
|
||||||
|
else:
|
||||||
|
tokens[-1] += char
|
||||||
return tokens
|
return tokens
|
||||||
|
|
||||||
tokens = tokenize(string)
|
tokens = tokenize(string)
|
||||||
@ -475,7 +561,7 @@ class KeyValueArgType(object):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
raise ArgumentTypeError(
|
raise ArgumentTypeError(
|
||||||
'"%s" is not a valid value' % string)
|
u'"%s" is not a valid value' % string)
|
||||||
|
|
||||||
return self.key_value_class(
|
return self.key_value_class(
|
||||||
key=key, value=value, sep=sep, orig=string)
|
key=key, value=value, sep=sep, orig=string)
|
||||||
@ -486,7 +572,7 @@ class AuthCredentials(KeyValue):
|
|||||||
|
|
||||||
def _getpass(self, prompt):
|
def _getpass(self, prompt):
|
||||||
# To allow mocking.
|
# To allow mocking.
|
||||||
return getpass.getpass(prompt)
|
return getpass.getpass(str(prompt))
|
||||||
|
|
||||||
def has_password(self):
|
def has_password(self):
|
||||||
return self.value is not None
|
return self.value is not None
|
||||||
@ -523,10 +609,24 @@ class AuthCredentialsArgType(KeyValueArgType):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ParamDict(OrderedDict):
|
parse_auth = AuthCredentialsArgType(SEP_CREDENTIALS)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestItemsDict(OrderedDict):
|
||||||
"""Multi-value dict for URL parameters and form data."""
|
"""Multi-value dict for URL parameters and form data."""
|
||||||
|
|
||||||
#noinspection PyMethodOverriding
|
if is_pypy and is_py27:
|
||||||
|
# Manually set keys when initialized with an iterable as PyPy
|
||||||
|
# doesn't call __setitem__ in such case (pypy3 does).
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
if len(args) == 1 and isinstance(args[0], Iterable):
|
||||||
|
super(RequestItemsDict, self).__init__(**kwargs)
|
||||||
|
for k, v in args[0]:
|
||||||
|
self[k] = v
|
||||||
|
else:
|
||||||
|
super(RequestItemsDict, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# noinspection PyMethodOverriding
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
""" If `key` is assigned more than once, `self[key]` holds a
|
""" If `key` is assigned more than once, `self[key]` holds a
|
||||||
`list` of all the values.
|
`list` of all the values.
|
||||||
@ -535,34 +635,75 @@ class ParamDict(OrderedDict):
|
|||||||
data and URL params.
|
data and URL params.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
assert not isinstance(value, list)
|
||||||
if key not in self:
|
if key not in self:
|
||||||
super(ParamDict, self).__setitem__(key, value)
|
super(RequestItemsDict, self).__setitem__(key, value)
|
||||||
else:
|
else:
|
||||||
if not isinstance(self[key], list):
|
if not isinstance(self[key], list):
|
||||||
super(ParamDict, self).__setitem__(key, [self[key]])
|
super(RequestItemsDict, self).__setitem__(key, [self[key]])
|
||||||
self[key].append(value)
|
self[key].append(value)
|
||||||
|
|
||||||
|
|
||||||
def parse_items(items, data=None, headers=None, files=None, params=None):
|
class ParamsDict(RequestItemsDict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DataDict(RequestItemsDict):
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
for key, values in super(RequestItemsDict, self).items():
|
||||||
|
if not isinstance(values, list):
|
||||||
|
values = [values]
|
||||||
|
for value in values:
|
||||||
|
yield key, value
|
||||||
|
|
||||||
|
|
||||||
|
RequestItems = namedtuple('RequestItems',
|
||||||
|
['headers', 'data', 'files', 'params'])
|
||||||
|
|
||||||
|
|
||||||
|
def get_content_type(filename):
|
||||||
|
"""
|
||||||
|
Return the content type for ``filename`` in format appropriate
|
||||||
|
for Content-Type headers, or ``None`` if the file type is unknown
|
||||||
|
to ``mimetypes``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
mime, encoding = mimetypes.guess_type(filename, strict=False)
|
||||||
|
if mime:
|
||||||
|
content_type = mime
|
||||||
|
if encoding:
|
||||||
|
content_type = '%s; charset=%s' % (mime, encoding)
|
||||||
|
return content_type
|
||||||
|
|
||||||
|
|
||||||
|
def parse_items(items,
|
||||||
|
headers_class=CaseInsensitiveDict,
|
||||||
|
data_class=OrderedDict,
|
||||||
|
files_class=DataDict,
|
||||||
|
params_class=ParamsDict):
|
||||||
"""Parse `KeyValue` `items` into `data`, `headers`, `files`,
|
"""Parse `KeyValue` `items` into `data`, `headers`, `files`,
|
||||||
and `params`.
|
and `params`.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if headers is None:
|
headers = []
|
||||||
headers = CaseInsensitiveDict()
|
data = []
|
||||||
if data is None:
|
files = []
|
||||||
data = OrderedDict()
|
params = []
|
||||||
if files is None:
|
|
||||||
files = OrderedDict()
|
|
||||||
if params is None:
|
|
||||||
params = ParamDict()
|
|
||||||
|
|
||||||
for item in items:
|
for item in items:
|
||||||
|
|
||||||
value = item.value
|
value = item.value
|
||||||
key = item.key
|
|
||||||
|
|
||||||
if item.sep == SEP_HEADERS:
|
if item.sep == SEP_HEADERS:
|
||||||
|
if value == '':
|
||||||
|
# No value => unset the header
|
||||||
|
value = None
|
||||||
|
target = headers
|
||||||
|
elif item.sep == SEP_HEADERS_EMPTY:
|
||||||
|
if item.value:
|
||||||
|
raise ParseError(
|
||||||
|
'Invalid item "%s" '
|
||||||
|
'(to specify an empty header use `Header;`)'
|
||||||
|
% item.orig
|
||||||
|
)
|
||||||
target = headers
|
target = headers
|
||||||
elif item.sep == SEP_QUERY:
|
elif item.sep == SEP_QUERY:
|
||||||
target = params
|
target = params
|
||||||
@ -570,23 +711,48 @@ def parse_items(items, data=None, headers=None, files=None, params=None):
|
|||||||
try:
|
try:
|
||||||
with open(os.path.expanduser(value), 'rb') as f:
|
with open(os.path.expanduser(value), 'rb') as f:
|
||||||
value = (os.path.basename(value),
|
value = (os.path.basename(value),
|
||||||
BytesIO(f.read()))
|
BytesIO(f.read()),
|
||||||
|
get_content_type(value))
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
raise ParseError(
|
raise ParseError('"%s": %s' % (item.orig, e))
|
||||||
'Invalid argument "%s": %s' % (item.orig, e))
|
|
||||||
target = files
|
target = files
|
||||||
|
|
||||||
elif item.sep in [SEP_DATA, SEP_DATA_RAW_JSON]:
|
elif item.sep in SEP_GROUP_DATA_ITEMS:
|
||||||
if item.sep == SEP_DATA_RAW_JSON:
|
|
||||||
|
if item.sep in SEP_GROUP_DATA_EMBED_ITEMS:
|
||||||
try:
|
try:
|
||||||
value = json.loads(item.value)
|
with open(os.path.expanduser(value), 'rb') as f:
|
||||||
except ValueError:
|
value = f.read().decode('utf8')
|
||||||
raise ParseError('"%s" is not valid JSON' % item.orig)
|
except IOError as e:
|
||||||
|
raise ParseError('"%s": %s' % (item.orig, e))
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise ParseError(
|
||||||
|
'"%s": cannot embed the content of "%s",'
|
||||||
|
' not a UTF8 or ASCII-encoded text file'
|
||||||
|
% (item.orig, item.value)
|
||||||
|
)
|
||||||
|
|
||||||
|
if item.sep in SEP_GROUP_RAW_JSON_ITEMS:
|
||||||
|
try:
|
||||||
|
value = load_json_preserve_order(value)
|
||||||
|
except ValueError as e:
|
||||||
|
raise ParseError('"%s": %s' % (item.orig, e))
|
||||||
target = data
|
target = data
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise TypeError(item)
|
raise TypeError(item)
|
||||||
|
|
||||||
target[key] = value
|
target.append((item.key, value))
|
||||||
|
|
||||||
return headers, data, files, params
|
return RequestItems(headers_class(headers),
|
||||||
|
data_class(data),
|
||||||
|
files_class(files),
|
||||||
|
params_class(params))
|
||||||
|
|
||||||
|
|
||||||
|
def readable_file_arg(filename):
|
||||||
|
try:
|
||||||
|
open(filename, 'rb')
|
||||||
|
except IOError as ex:
|
||||||
|
raise ArgumentTypeError('%s: %s' % (filename, ex.args[1]))
|
||||||
|
return filename
|
||||||
|
@ -1,59 +1,4 @@
|
|||||||
import os
|
from httpie.compat import urlsplit, str
|
||||||
import sys
|
|
||||||
|
|
||||||
from .config import DEFAULT_CONFIG_DIR, Config
|
|
||||||
from .compat import urlsplit, is_windows, bytes, str
|
|
||||||
|
|
||||||
|
|
||||||
class Environment(object):
|
|
||||||
"""Holds information about the execution context.
|
|
||||||
|
|
||||||
Groups various aspects of the environment in a changeable object
|
|
||||||
and allows for mocking.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
is_windows = is_windows
|
|
||||||
|
|
||||||
progname = os.path.basename(sys.argv[0])
|
|
||||||
if progname not in ['http', 'https']:
|
|
||||||
progname = 'http'
|
|
||||||
|
|
||||||
config_dir = DEFAULT_CONFIG_DIR
|
|
||||||
|
|
||||||
# Can be set to 0 to disable colors completely.
|
|
||||||
colors = 256 if '256color' in os.environ.get('TERM', '') else 88
|
|
||||||
|
|
||||||
stdin = sys.stdin
|
|
||||||
stdin_isatty = sys.stdin.isatty()
|
|
||||||
|
|
||||||
stdout_isatty = sys.stdout.isatty()
|
|
||||||
stderr_isatty = sys.stderr.isatty()
|
|
||||||
if is_windows:
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
from colorama.initialise import wrap_stream
|
|
||||||
stdout = wrap_stream(sys.stdout, convert=None,
|
|
||||||
strip=None, autoreset=True, wrap=True)
|
|
||||||
stderr = wrap_stream(sys.stderr, convert=None,
|
|
||||||
strip=None, autoreset=True, wrap=True)
|
|
||||||
else:
|
|
||||||
stdout = sys.stdout
|
|
||||||
stderr = sys.stderr
|
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
|
||||||
assert all(hasattr(type(self), attr)
|
|
||||||
for attr in kwargs.keys())
|
|
||||||
self.__dict__.update(**kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
if not hasattr(self, '_config'):
|
|
||||||
self._config = Config(directory=self.config_dir)
|
|
||||||
if self._config.is_new:
|
|
||||||
self._config.save()
|
|
||||||
else:
|
|
||||||
self._config.load()
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPMessage(object):
|
class HTTPMessage(object):
|
||||||
@ -88,7 +33,10 @@ class HTTPMessage(object):
|
|||||||
@property
|
@property
|
||||||
def content_type(self):
|
def content_type(self):
|
||||||
"""Return the message content type."""
|
"""Return the message content type."""
|
||||||
return self._orig.headers.get('Content-Type', '')
|
ct = self._orig.headers.get('Content-Type', '')
|
||||||
|
if not isinstance(ct, str):
|
||||||
|
ct = ct.decode('utf8')
|
||||||
|
return ct
|
||||||
|
|
||||||
|
|
||||||
class HTTPResponse(HTTPMessage):
|
class HTTPResponse(HTTPMessage):
|
||||||
@ -100,12 +48,20 @@ class HTTPResponse(HTTPMessage):
|
|||||||
def iter_lines(self, chunk_size):
|
def iter_lines(self, chunk_size):
|
||||||
return ((line, b'\n') for line in self._orig.iter_lines(chunk_size))
|
return ((line, b'\n') for line in self._orig.iter_lines(chunk_size))
|
||||||
|
|
||||||
#noinspection PyProtectedMember
|
# noinspection PyProtectedMember
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
original = self._orig.raw._original_response
|
original = self._orig.raw._original_response
|
||||||
|
|
||||||
|
version = {
|
||||||
|
9: '0.9',
|
||||||
|
10: '1.0',
|
||||||
|
11: '1.1',
|
||||||
|
20: '2',
|
||||||
|
}[original.version]
|
||||||
|
|
||||||
status_line = 'HTTP/{version} {status} {reason}'.format(
|
status_line = 'HTTP/{version} {status} {reason}'.format(
|
||||||
version='.'.join(str(original.version)),
|
version=version,
|
||||||
status=original.status,
|
status=original.status,
|
||||||
reason=original.reason
|
reason=original.reason
|
||||||
)
|
)
|
||||||
@ -153,16 +109,24 @@ class HTTPRequest(HTTPMessage):
|
|||||||
)
|
)
|
||||||
|
|
||||||
headers = dict(self._orig.headers)
|
headers = dict(self._orig.headers)
|
||||||
|
if 'Host' not in self._orig.headers:
|
||||||
|
headers['Host'] = url.netloc.split('@')[-1]
|
||||||
|
|
||||||
if 'Host' not in headers:
|
headers = [
|
||||||
headers['Host'] = url.netloc
|
'%s: %s' % (
|
||||||
|
name,
|
||||||
headers = ['%s: %s' % (name, value)
|
value if isinstance(value, str) else value.decode('utf8')
|
||||||
for name, value in headers.items()]
|
)
|
||||||
|
for name, value in headers.items()
|
||||||
|
]
|
||||||
|
|
||||||
headers.insert(0, request_line)
|
headers.insert(0, request_line)
|
||||||
|
headers = '\r\n'.join(headers).strip()
|
||||||
|
|
||||||
return '\r\n'.join(headers).strip()
|
if isinstance(headers, bytes):
|
||||||
|
# Python < 3
|
||||||
|
headers = headers.decode('utf8')
|
||||||
|
return headers
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def encoding(self):
|
def encoding(self):
|
||||||
|
526
httpie/output.py
526
httpie/output.py
@ -1,526 +0,0 @@
|
|||||||
"""Output streaming, processing and formatting.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
import xml.dom.minidom
|
|
||||||
from functools import partial
|
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
import pygments
|
|
||||||
from pygments import token, lexer
|
|
||||||
from pygments.styles import get_style_by_name, STYLE_MAP
|
|
||||||
from pygments.lexers import get_lexer_for_mimetype, get_lexer_by_name
|
|
||||||
from pygments.formatters.terminal import TerminalFormatter
|
|
||||||
from pygments.formatters.terminal256 import Terminal256Formatter
|
|
||||||
from pygments.util import ClassNotFound
|
|
||||||
|
|
||||||
from .compat import is_windows
|
|
||||||
from .solarized import Solarized256Style
|
|
||||||
from .models import HTTPRequest, HTTPResponse, Environment
|
|
||||||
from .input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
|
||||||
OUT_RESP_HEAD, OUT_RESP_BODY)
|
|
||||||
|
|
||||||
|
|
||||||
# The default number of spaces to indent when pretty printing
|
|
||||||
DEFAULT_INDENT = 4
|
|
||||||
|
|
||||||
# Colors on Windows via colorama don't look that
|
|
||||||
# great and fruity seems to give the best result there.
|
|
||||||
AVAILABLE_STYLES = set(STYLE_MAP.keys())
|
|
||||||
AVAILABLE_STYLES.add('solarized')
|
|
||||||
DEFAULT_STYLE = 'solarized' if not is_windows else 'fruity'
|
|
||||||
|
|
||||||
|
|
||||||
BINARY_SUPPRESSED_NOTICE = (
|
|
||||||
b'\n'
|
|
||||||
b'+-----------------------------------------+\n'
|
|
||||||
b'| NOTE: binary data not shown in terminal |\n'
|
|
||||||
b'+-----------------------------------------+'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BinarySuppressedError(Exception):
|
|
||||||
"""An error indicating that the body is binary and won't be written,
|
|
||||||
e.g., for terminal output)."""
|
|
||||||
|
|
||||||
message = BINARY_SUPPRESSED_NOTICE
|
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# Output Streams
|
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
|
|
||||||
def write(stream, outfile, flush):
|
|
||||||
"""Write the output stream."""
|
|
||||||
try:
|
|
||||||
# Writing bytes so we use the buffer interface (Python 3).
|
|
||||||
buf = outfile.buffer
|
|
||||||
except AttributeError:
|
|
||||||
buf = outfile
|
|
||||||
|
|
||||||
for chunk in stream:
|
|
||||||
buf.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def write_with_colors_win_py3(stream, outfile, flush):
|
|
||||||
"""Like `write`, but colorized chunks are written as text
|
|
||||||
directly to `outfile` to ensure it gets processed by colorama.
|
|
||||||
Applies only to Windows with Python 3 and colorized terminal output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
color = b'\x1b['
|
|
||||||
encoding = outfile.encoding
|
|
||||||
for chunk in stream:
|
|
||||||
if color in chunk:
|
|
||||||
outfile.write(chunk.decode(encoding))
|
|
||||||
else:
|
|
||||||
outfile.buffer.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def build_output_stream(args, env, request, response):
|
|
||||||
"""Build and return a chain of iterators over the `request`-`response`
|
|
||||||
exchange each of which yields `bytes` chunks.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
req_h = OUT_REQ_HEAD in args.output_options
|
|
||||||
req_b = OUT_REQ_BODY in args.output_options
|
|
||||||
resp_h = OUT_RESP_HEAD in args.output_options
|
|
||||||
resp_b = OUT_RESP_BODY in args.output_options
|
|
||||||
req = req_h or req_b
|
|
||||||
resp = resp_h or resp_b
|
|
||||||
|
|
||||||
output = []
|
|
||||||
Stream = get_stream_type(env, args)
|
|
||||||
|
|
||||||
if req:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPRequest(request),
|
|
||||||
with_headers=req_h,
|
|
||||||
with_body=req_b))
|
|
||||||
|
|
||||||
if req_b and resp:
|
|
||||||
# Request/Response separator.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
if resp:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPResponse(response),
|
|
||||||
with_headers=resp_h,
|
|
||||||
with_body=resp_b))
|
|
||||||
|
|
||||||
if env.stdout_isatty and resp_b:
|
|
||||||
# Ensure a blank line after the response body.
|
|
||||||
# For terminal output only.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
return chain(*output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_stream_type(env, args):
|
|
||||||
"""Pick the right stream type based on `env` and `args`.
|
|
||||||
Wrap it in a partial with the type-specific args so that
|
|
||||||
we don't need to think what stream we are dealing with.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not env.stdout_isatty and not args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
RawStream,
|
|
||||||
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
|
||||||
if args.stream
|
|
||||||
else RawStream.CHUNK_SIZE
|
|
||||||
)
|
|
||||||
elif args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
PrettyStream if args.stream else BufferedPrettyStream,
|
|
||||||
env=env,
|
|
||||||
processor=OutputProcessor(
|
|
||||||
env=env, groups=args.prettify, pygments_style=args.style),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
Stream = partial(EncodedStream, env=env)
|
|
||||||
|
|
||||||
return Stream
|
|
||||||
|
|
||||||
|
|
||||||
class BaseStream(object):
|
|
||||||
"""Base HTTP message output stream class."""
|
|
||||||
|
|
||||||
def __init__(self, msg, with_headers=True, with_body=True,
|
|
||||||
on_body_chunk_downloaded=None):
|
|
||||||
"""
|
|
||||||
:param msg: a :class:`models.HTTPMessage` subclass
|
|
||||||
:param with_headers: if `True`, headers will be included
|
|
||||||
:param with_body: if `True`, body will be included
|
|
||||||
|
|
||||||
"""
|
|
||||||
assert with_headers or with_body
|
|
||||||
self.msg = msg
|
|
||||||
self.with_headers = with_headers
|
|
||||||
self.with_body = with_body
|
|
||||||
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
|
||||||
|
|
||||||
def _get_headers(self):
|
|
||||||
"""Return the headers' bytes."""
|
|
||||||
return self.msg.headers.encode('ascii')
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
"""Return an iterator over the message body."""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
"""Return an iterator over `self.msg`."""
|
|
||||||
if self.with_headers:
|
|
||||||
yield self._get_headers()
|
|
||||||
yield b'\r\n\r\n'
|
|
||||||
|
|
||||||
if self.with_body:
|
|
||||||
try:
|
|
||||||
for chunk in self._iter_body():
|
|
||||||
yield chunk
|
|
||||||
if self.on_body_chunk_downloaded:
|
|
||||||
self.on_body_chunk_downloaded(chunk)
|
|
||||||
except BinarySuppressedError as e:
|
|
||||||
if self.with_headers:
|
|
||||||
yield b'\n'
|
|
||||||
yield e.message
|
|
||||||
|
|
||||||
|
|
||||||
class RawStream(BaseStream):
|
|
||||||
"""The message is streamed in chunks with no processing."""
|
|
||||||
|
|
||||||
CHUNK_SIZE = 1024 * 100
|
|
||||||
CHUNK_SIZE_BY_LINE = 1
|
|
||||||
|
|
||||||
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
|
||||||
super(RawStream, self).__init__(**kwargs)
|
|
||||||
self.chunk_size = chunk_size
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
return self.msg.iter_body(self.chunk_size)
|
|
||||||
|
|
||||||
|
|
||||||
class EncodedStream(BaseStream):
|
|
||||||
"""Encoded HTTP message stream.
|
|
||||||
|
|
||||||
The message bytes are converted to an encoding suitable for
|
|
||||||
`self.env.stdout`. Unicode errors are replaced and binary data
|
|
||||||
is suppressed. The body is always streamed by line.
|
|
||||||
|
|
||||||
"""
|
|
||||||
CHUNK_SIZE = 1
|
|
||||||
|
|
||||||
def __init__(self, env=Environment(), **kwargs):
|
|
||||||
|
|
||||||
super(EncodedStream, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
if env.stdout_isatty:
|
|
||||||
# Use the encoding supported by the terminal.
|
|
||||||
output_encoding = getattr(env.stdout, 'encoding', None)
|
|
||||||
else:
|
|
||||||
# Preserve the message encoding.
|
|
||||||
output_encoding = self.msg.encoding
|
|
||||||
|
|
||||||
# Default to utf8 when unsure.
|
|
||||||
self.output_encoding = output_encoding or 'utf8'
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
|
|
||||||
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
|
||||||
|
|
||||||
if b'\0' in line:
|
|
||||||
raise BinarySuppressedError()
|
|
||||||
|
|
||||||
yield line.decode(self.msg.encoding)\
|
|
||||||
.encode(self.output_encoding, 'replace') + lf
|
|
||||||
|
|
||||||
|
|
||||||
class PrettyStream(EncodedStream):
|
|
||||||
"""In addition to :class:`EncodedStream` behaviour, this stream applies
|
|
||||||
content processing.
|
|
||||||
|
|
||||||
Useful for long-lived HTTP responses that stream by lines
|
|
||||||
such as the Twitter streaming API.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
CHUNK_SIZE = 1
|
|
||||||
|
|
||||||
def __init__(self, processor, **kwargs):
|
|
||||||
super(PrettyStream, self).__init__(**kwargs)
|
|
||||||
self.processor = processor
|
|
||||||
|
|
||||||
def _get_headers(self):
|
|
||||||
return self.processor.process_headers(
|
|
||||||
self.msg.headers).encode(self.output_encoding)
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
|
||||||
if b'\0' in line:
|
|
||||||
raise BinarySuppressedError()
|
|
||||||
yield self._process_body(line) + lf
|
|
||||||
|
|
||||||
def _process_body(self, chunk):
|
|
||||||
return (self.processor
|
|
||||||
.process_body(
|
|
||||||
content=chunk.decode(self.msg.encoding, 'replace'),
|
|
||||||
content_type=self.msg.content_type,
|
|
||||||
encoding=self.msg.encoding)
|
|
||||||
.encode(self.output_encoding, 'replace'))
|
|
||||||
|
|
||||||
|
|
||||||
class BufferedPrettyStream(PrettyStream):
|
|
||||||
"""The same as :class:`PrettyStream` except that the body is fully
|
|
||||||
fetched before it's processed.
|
|
||||||
|
|
||||||
Suitable regular HTTP responses.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
CHUNK_SIZE = 1024 * 10
|
|
||||||
|
|
||||||
def _iter_body(self):
|
|
||||||
|
|
||||||
# Read the whole body before prettifying it,
|
|
||||||
# but bail out immediately if the body is binary.
|
|
||||||
body = bytearray()
|
|
||||||
for chunk in self.msg.iter_body(self.CHUNK_SIZE):
|
|
||||||
if b'\0' in chunk:
|
|
||||||
raise BinarySuppressedError()
|
|
||||||
body.extend(chunk)
|
|
||||||
|
|
||||||
yield self._process_body(body)
|
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# Processing
|
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
class HTTPLexer(lexer.RegexLexer):
|
|
||||||
"""Simplified HTTP lexer for Pygments.
|
|
||||||
|
|
||||||
It only operates on headers and provides a stronger contrast between
|
|
||||||
their names and values than the original one bundled with Pygments
|
|
||||||
(:class:`pygments.lexers.text import HttpLexer`), especially when
|
|
||||||
Solarized color scheme is used.
|
|
||||||
|
|
||||||
"""
|
|
||||||
name = 'HTTP'
|
|
||||||
aliases = ['http']
|
|
||||||
filenames = ['*.http']
|
|
||||||
tokens = {
|
|
||||||
'root': [
|
|
||||||
# Request-Line
|
|
||||||
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
|
||||||
lexer.bygroups(
|
|
||||||
token.Name.Function,
|
|
||||||
token.Text,
|
|
||||||
token.Name.Namespace,
|
|
||||||
token.Text,
|
|
||||||
token.Keyword.Reserved,
|
|
||||||
token.Operator,
|
|
||||||
token.Number
|
|
||||||
)),
|
|
||||||
# Response Status-Line
|
|
||||||
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
|
||||||
lexer.bygroups(
|
|
||||||
token.Keyword.Reserved, # 'HTTP'
|
|
||||||
token.Operator, # '/'
|
|
||||||
token.Number, # Version
|
|
||||||
token.Text,
|
|
||||||
token.Number, # Status code
|
|
||||||
token.Text,
|
|
||||||
token.Name.Exception, # Reason
|
|
||||||
)),
|
|
||||||
# Header
|
|
||||||
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
|
|
||||||
token.Name.Attribute, # Name
|
|
||||||
token.Text,
|
|
||||||
token.Operator, # Colon
|
|
||||||
token.Text,
|
|
||||||
token.String # Value
|
|
||||||
))
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class BaseProcessor(object):
|
|
||||||
"""Base, noop output processor class."""
|
|
||||||
|
|
||||||
enabled = True
|
|
||||||
|
|
||||||
def __init__(self, env=Environment(), **kwargs):
|
|
||||||
"""
|
|
||||||
:param env: an class:`Environment` instance
|
|
||||||
:param kwargs: additional keyword argument that some
|
|
||||||
processor might require.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.env = env
|
|
||||||
self.kwargs = kwargs
|
|
||||||
|
|
||||||
def process_headers(self, headers):
|
|
||||||
"""Return processed `headers`
|
|
||||||
|
|
||||||
:param headers: The headers as text.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return headers
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
"""Return processed `content`.
|
|
||||||
|
|
||||||
:param content: The body content as text
|
|
||||||
:param content_type: Full content type, e.g., 'application/atom+xml'.
|
|
||||||
:param subtype: E.g. 'xml'.
|
|
||||||
:param encoding: The original content encoding.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
class JSONProcessor(BaseProcessor):
|
|
||||||
"""JSON body processor."""
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
if subtype == 'json':
|
|
||||||
try:
|
|
||||||
# Indent the JSON data, sort keys by name, and
|
|
||||||
# avoid unicode escapes to improve readability.
|
|
||||||
content = json.dumps(json.loads(content),
|
|
||||||
sort_keys=True,
|
|
||||||
ensure_ascii=False,
|
|
||||||
indent=DEFAULT_INDENT)
|
|
||||||
except ValueError:
|
|
||||||
# Invalid JSON but we don't care.
|
|
||||||
pass
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
class XMLProcessor(BaseProcessor):
|
|
||||||
"""XML body processor."""
|
|
||||||
# TODO: tests
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
if subtype == 'xml':
|
|
||||||
try:
|
|
||||||
# Pretty print the XML
|
|
||||||
doc = xml.dom.minidom.parseString(content.encode(encoding))
|
|
||||||
content = doc.toprettyxml(indent=' ' * DEFAULT_INDENT)
|
|
||||||
except xml.parsers.expat.ExpatError:
|
|
||||||
# Ignore invalid XML errors (skips attempting to pretty print)
|
|
||||||
pass
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
class PygmentsProcessor(BaseProcessor):
|
|
||||||
"""A processor that applies syntax-highlighting using Pygments
|
|
||||||
to the headers, and to the body as well if its content type is recognized.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(PygmentsProcessor, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# Cache that speeds up when we process streamed body by line.
|
|
||||||
self.lexers_by_type = {}
|
|
||||||
|
|
||||||
if not self.env.colors:
|
|
||||||
self.enabled = False
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
style = get_style_by_name(
|
|
||||||
self.kwargs.get('pygments_style', DEFAULT_STYLE))
|
|
||||||
except ClassNotFound:
|
|
||||||
style = Solarized256Style
|
|
||||||
|
|
||||||
if self.env.is_windows or self.env.colors == 256:
|
|
||||||
fmt_class = Terminal256Formatter
|
|
||||||
else:
|
|
||||||
fmt_class = TerminalFormatter
|
|
||||||
self.formatter = fmt_class(style=style)
|
|
||||||
|
|
||||||
def process_headers(self, headers):
|
|
||||||
return pygments.highlight(
|
|
||||||
headers, HTTPLexer(), self.formatter).strip()
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, subtype, encoding):
|
|
||||||
try:
|
|
||||||
lexer = self.lexers_by_type.get(content_type)
|
|
||||||
if not lexer:
|
|
||||||
try:
|
|
||||||
lexer = get_lexer_for_mimetype(content_type)
|
|
||||||
except ClassNotFound:
|
|
||||||
lexer = get_lexer_by_name(subtype)
|
|
||||||
self.lexers_by_type[content_type] = lexer
|
|
||||||
except ClassNotFound:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
content = pygments.highlight(content, lexer, self.formatter)
|
|
||||||
return content.strip()
|
|
||||||
|
|
||||||
|
|
||||||
class HeadersProcessor(BaseProcessor):
|
|
||||||
"""Sorts headers by name retaining relative order of multiple headers
|
|
||||||
with the same name.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def process_headers(self, headers):
|
|
||||||
lines = headers.splitlines()
|
|
||||||
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
|
|
||||||
return '\r\n'.join(lines[:1] + headers)
|
|
||||||
|
|
||||||
|
|
||||||
class OutputProcessor(object):
|
|
||||||
"""A delegate class that invokes the actual processors."""
|
|
||||||
|
|
||||||
installed_processors = {
|
|
||||||
'format': [
|
|
||||||
HeadersProcessor,
|
|
||||||
JSONProcessor,
|
|
||||||
XMLProcessor
|
|
||||||
],
|
|
||||||
'colors': [
|
|
||||||
PygmentsProcessor
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, groups, env=Environment(), **kwargs):
|
|
||||||
"""
|
|
||||||
:param env: a :class:`models.Environment` instance
|
|
||||||
:param groups: the groups of processors to be applied
|
|
||||||
:param kwargs: additional keyword arguments for processors
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.processors = []
|
|
||||||
for group in groups:
|
|
||||||
for cls in self.installed_processors[group]:
|
|
||||||
processor = cls(env, **kwargs)
|
|
||||||
if processor.enabled:
|
|
||||||
self.processors.append(processor)
|
|
||||||
|
|
||||||
def process_headers(self, headers):
|
|
||||||
for processor in self.processors:
|
|
||||||
headers = processor.process_headers(headers)
|
|
||||||
return headers
|
|
||||||
|
|
||||||
def process_body(self, content, content_type, encoding):
|
|
||||||
# e.g., 'application/atom+xml'
|
|
||||||
content_type = content_type.split(';')[0]
|
|
||||||
# e.g., 'xml'
|
|
||||||
subtype = content_type.split('/')[-1].split('+')[-1]
|
|
||||||
|
|
||||||
for processor in self.processors:
|
|
||||||
content = processor.process_body(
|
|
||||||
content,
|
|
||||||
content_type,
|
|
||||||
subtype,
|
|
||||||
encoding
|
|
||||||
)
|
|
||||||
|
|
||||||
return content
|
|
0
httpie/output/__init__.py
Normal file
0
httpie/output/__init__.py
Normal file
0
httpie/output/formatters/__init__.py
Normal file
0
httpie/output/formatters/__init__.py
Normal file
261
httpie/output/formatters/colors.py
Normal file
261
httpie/output/formatters/colors.py
Normal file
@ -0,0 +1,261 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
import json
|
||||||
|
|
||||||
|
import pygments.lexer
|
||||||
|
import pygments.token
|
||||||
|
import pygments.styles
|
||||||
|
import pygments.lexers
|
||||||
|
import pygments.style
|
||||||
|
from pygments.formatters.terminal import TerminalFormatter
|
||||||
|
from pygments.formatters.terminal256 import Terminal256Formatter
|
||||||
|
from pygments.lexers.special import TextLexer
|
||||||
|
from pygments.lexers.text import HttpLexer as PygmentsHttpLexer
|
||||||
|
from pygments.util import ClassNotFound
|
||||||
|
|
||||||
|
from httpie.compat import is_windows
|
||||||
|
from httpie.plugins import FormatterPlugin
|
||||||
|
|
||||||
|
|
||||||
|
AUTO_STYLE = 'auto' # Follows terminal ANSI color styles
|
||||||
|
DEFAULT_STYLE = AUTO_STYLE
|
||||||
|
SOLARIZED_STYLE = 'solarized' # Bundled here
|
||||||
|
if is_windows:
|
||||||
|
# Colors on Windows via colorama don't look that
|
||||||
|
# great and fruity seems to give the best result there.
|
||||||
|
DEFAULT_STYLE = 'fruity'
|
||||||
|
|
||||||
|
|
||||||
|
AVAILABLE_STYLES = set(pygments.styles.get_all_styles())
|
||||||
|
AVAILABLE_STYLES.add(SOLARIZED_STYLE)
|
||||||
|
AVAILABLE_STYLES.add(AUTO_STYLE)
|
||||||
|
|
||||||
|
|
||||||
|
class ColorFormatter(FormatterPlugin):
|
||||||
|
"""
|
||||||
|
Colorize using Pygments
|
||||||
|
|
||||||
|
This processor that applies syntax highlighting to the headers,
|
||||||
|
and also to the body if its content type is recognized.
|
||||||
|
|
||||||
|
"""
|
||||||
|
group_name = 'colors'
|
||||||
|
|
||||||
|
def __init__(self, env, explicit_json=False,
|
||||||
|
color_scheme=DEFAULT_STYLE, **kwargs):
|
||||||
|
super(ColorFormatter, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
if not env.colors:
|
||||||
|
self.enabled = False
|
||||||
|
return
|
||||||
|
|
||||||
|
use_auto_style = color_scheme == AUTO_STYLE
|
||||||
|
has_256_colors = env.colors == 256
|
||||||
|
if use_auto_style or not has_256_colors:
|
||||||
|
http_lexer = PygmentsHttpLexer()
|
||||||
|
formatter = TerminalFormatter()
|
||||||
|
else:
|
||||||
|
http_lexer = SimplifiedHTTPLexer()
|
||||||
|
formatter = Terminal256Formatter(
|
||||||
|
style=self.get_style_class(color_scheme)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.explicit_json = explicit_json # --json
|
||||||
|
self.formatter = formatter
|
||||||
|
self.http_lexer = http_lexer
|
||||||
|
|
||||||
|
def format_headers(self, headers):
|
||||||
|
return pygments.highlight(
|
||||||
|
code=headers,
|
||||||
|
lexer=self.http_lexer,
|
||||||
|
formatter=self.formatter,
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
def format_body(self, body, mime):
|
||||||
|
lexer = self.get_lexer_for_body(mime, body)
|
||||||
|
if lexer:
|
||||||
|
body = pygments.highlight(
|
||||||
|
code=body,
|
||||||
|
lexer=lexer,
|
||||||
|
formatter=self.formatter,
|
||||||
|
)
|
||||||
|
return body.strip()
|
||||||
|
|
||||||
|
def get_lexer_for_body(self, mime, body):
|
||||||
|
return get_lexer(
|
||||||
|
mime=mime,
|
||||||
|
explicit_json=self.explicit_json,
|
||||||
|
body=body,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_style_class(self, color_scheme):
|
||||||
|
try:
|
||||||
|
return pygments.styles.get_style_by_name(color_scheme)
|
||||||
|
except ClassNotFound:
|
||||||
|
return Solarized256Style
|
||||||
|
|
||||||
|
|
||||||
|
def get_lexer(mime, explicit_json=False, body=''):
|
||||||
|
|
||||||
|
# Build candidate mime type and lexer names.
|
||||||
|
mime_types, lexer_names = [mime], []
|
||||||
|
type_, subtype = mime.split('/', 1)
|
||||||
|
if '+' not in subtype:
|
||||||
|
lexer_names.append(subtype)
|
||||||
|
else:
|
||||||
|
subtype_name, subtype_suffix = subtype.split('+', 1)
|
||||||
|
lexer_names.extend([subtype_name, subtype_suffix])
|
||||||
|
mime_types.extend([
|
||||||
|
'%s/%s' % (type_, subtype_name),
|
||||||
|
'%s/%s' % (type_, subtype_suffix)
|
||||||
|
])
|
||||||
|
|
||||||
|
# As a last resort, if no lexer feels responsible, and
|
||||||
|
# the subtype contains 'json', take the JSON lexer
|
||||||
|
if 'json' in subtype:
|
||||||
|
lexer_names.append('json')
|
||||||
|
|
||||||
|
# Try to resolve the right lexer.
|
||||||
|
lexer = None
|
||||||
|
for mime_type in mime_types:
|
||||||
|
try:
|
||||||
|
lexer = pygments.lexers.get_lexer_for_mimetype(mime_type)
|
||||||
|
break
|
||||||
|
except ClassNotFound:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
for name in lexer_names:
|
||||||
|
try:
|
||||||
|
lexer = pygments.lexers.get_lexer_by_name(name)
|
||||||
|
except ClassNotFound:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if explicit_json and body and (not lexer or isinstance(lexer, TextLexer)):
|
||||||
|
# JSON response with an incorrect Content-Type?
|
||||||
|
try:
|
||||||
|
json.loads(body) # FIXME: the body also gets parsed in json.py
|
||||||
|
except ValueError:
|
||||||
|
pass # Nope
|
||||||
|
else:
|
||||||
|
lexer = pygments.lexers.get_lexer_by_name('json')
|
||||||
|
|
||||||
|
return lexer
|
||||||
|
|
||||||
|
|
||||||
|
class SimplifiedHTTPLexer(pygments.lexer.RegexLexer):
|
||||||
|
"""Simplified HTTP lexer for Pygments.
|
||||||
|
|
||||||
|
It only operates on headers and provides a stronger contrast between
|
||||||
|
their names and values than the original one bundled with Pygments
|
||||||
|
(:class:`pygments.lexers.text import HttpLexer`), especially when
|
||||||
|
Solarized color scheme is used.
|
||||||
|
|
||||||
|
"""
|
||||||
|
name = 'HTTP'
|
||||||
|
aliases = ['http']
|
||||||
|
filenames = ['*.http']
|
||||||
|
tokens = {
|
||||||
|
'root': [
|
||||||
|
# Request-Line
|
||||||
|
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
|
||||||
|
pygments.lexer.bygroups(
|
||||||
|
pygments.token.Name.Function,
|
||||||
|
pygments.token.Text,
|
||||||
|
pygments.token.Name.Namespace,
|
||||||
|
pygments.token.Text,
|
||||||
|
pygments.token.Keyword.Reserved,
|
||||||
|
pygments.token.Operator,
|
||||||
|
pygments.token.Number
|
||||||
|
)),
|
||||||
|
# Response Status-Line
|
||||||
|
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
|
||||||
|
pygments.lexer.bygroups(
|
||||||
|
pygments.token.Keyword.Reserved, # 'HTTP'
|
||||||
|
pygments.token.Operator, # '/'
|
||||||
|
pygments.token.Number, # Version
|
||||||
|
pygments.token.Text,
|
||||||
|
pygments.token.Number, # Status code
|
||||||
|
pygments.token.Text,
|
||||||
|
pygments.token.Name.Exception, # Reason
|
||||||
|
)),
|
||||||
|
# Header
|
||||||
|
(r'(.*?)( *)(:)( *)(.+)', pygments.lexer.bygroups(
|
||||||
|
pygments.token.Name.Attribute, # Name
|
||||||
|
pygments.token.Text,
|
||||||
|
pygments.token.Operator, # Colon
|
||||||
|
pygments.token.Text,
|
||||||
|
pygments.token.String # Value
|
||||||
|
))
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Solarized256Style(pygments.style.Style):
|
||||||
|
"""
|
||||||
|
solarized256
|
||||||
|
------------
|
||||||
|
|
||||||
|
A Pygments style inspired by Solarized's 256 color mode.
|
||||||
|
|
||||||
|
:copyright: (c) 2011 by Hank Gay, (c) 2012 by John Mastro.
|
||||||
|
:license: BSD, see LICENSE for more details.
|
||||||
|
|
||||||
|
"""
|
||||||
|
BASE03 = "#1c1c1c"
|
||||||
|
BASE02 = "#262626"
|
||||||
|
BASE01 = "#4e4e4e"
|
||||||
|
BASE00 = "#585858"
|
||||||
|
BASE0 = "#808080"
|
||||||
|
BASE1 = "#8a8a8a"
|
||||||
|
BASE2 = "#d7d7af"
|
||||||
|
BASE3 = "#ffffd7"
|
||||||
|
YELLOW = "#af8700"
|
||||||
|
ORANGE = "#d75f00"
|
||||||
|
RED = "#af0000"
|
||||||
|
MAGENTA = "#af005f"
|
||||||
|
VIOLET = "#5f5faf"
|
||||||
|
BLUE = "#0087ff"
|
||||||
|
CYAN = "#00afaf"
|
||||||
|
GREEN = "#5f8700"
|
||||||
|
|
||||||
|
background_color = BASE03
|
||||||
|
styles = {
|
||||||
|
pygments.token.Keyword: GREEN,
|
||||||
|
pygments.token.Keyword.Constant: ORANGE,
|
||||||
|
pygments.token.Keyword.Declaration: BLUE,
|
||||||
|
pygments.token.Keyword.Namespace: ORANGE,
|
||||||
|
pygments.token.Keyword.Reserved: BLUE,
|
||||||
|
pygments.token.Keyword.Type: RED,
|
||||||
|
pygments.token.Name.Attribute: BASE1,
|
||||||
|
pygments.token.Name.Builtin: BLUE,
|
||||||
|
pygments.token.Name.Builtin.Pseudo: BLUE,
|
||||||
|
pygments.token.Name.Class: BLUE,
|
||||||
|
pygments.token.Name.Constant: ORANGE,
|
||||||
|
pygments.token.Name.Decorator: BLUE,
|
||||||
|
pygments.token.Name.Entity: ORANGE,
|
||||||
|
pygments.token.Name.Exception: YELLOW,
|
||||||
|
pygments.token.Name.Function: BLUE,
|
||||||
|
pygments.token.Name.Tag: BLUE,
|
||||||
|
pygments.token.Name.Variable: BLUE,
|
||||||
|
pygments.token.String: CYAN,
|
||||||
|
pygments.token.String.Backtick: BASE01,
|
||||||
|
pygments.token.String.Char: CYAN,
|
||||||
|
pygments.token.String.Doc: CYAN,
|
||||||
|
pygments.token.String.Escape: RED,
|
||||||
|
pygments.token.String.Heredoc: CYAN,
|
||||||
|
pygments.token.String.Regex: RED,
|
||||||
|
pygments.token.Number: CYAN,
|
||||||
|
pygments.token.Operator: BASE1,
|
||||||
|
pygments.token.Operator.Word: GREEN,
|
||||||
|
pygments.token.Comment: BASE01,
|
||||||
|
pygments.token.Comment.Preproc: GREEN,
|
||||||
|
pygments.token.Comment.Special: GREEN,
|
||||||
|
pygments.token.Generic.Deleted: CYAN,
|
||||||
|
pygments.token.Generic.Emph: 'italic',
|
||||||
|
pygments.token.Generic.Error: RED,
|
||||||
|
pygments.token.Generic.Heading: ORANGE,
|
||||||
|
pygments.token.Generic.Inserted: GREEN,
|
||||||
|
pygments.token.Generic.Strong: 'bold',
|
||||||
|
pygments.token.Generic.Subheading: ORANGE,
|
||||||
|
pygments.token.Token: BASE1,
|
||||||
|
pygments.token.Token.Other: ORANGE,
|
||||||
|
}
|
14
httpie/output/formatters/headers.py
Normal file
14
httpie/output/formatters/headers.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from httpie.plugins import FormatterPlugin
|
||||||
|
|
||||||
|
|
||||||
|
class HeadersFormatter(FormatterPlugin):
|
||||||
|
|
||||||
|
def format_headers(self, headers):
|
||||||
|
"""
|
||||||
|
Sorts headers by name while retaining relative
|
||||||
|
order of multiple headers with the same name.
|
||||||
|
|
||||||
|
"""
|
||||||
|
lines = headers.splitlines()
|
||||||
|
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
|
||||||
|
return '\r\n'.join(lines[:1] + headers)
|
33
httpie/output/formatters/json.py
Normal file
33
httpie/output/formatters/json.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
import json
|
||||||
|
|
||||||
|
from httpie.plugins import FormatterPlugin
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_INDENT = 4
|
||||||
|
|
||||||
|
|
||||||
|
class JSONFormatter(FormatterPlugin):
|
||||||
|
|
||||||
|
def format_body(self, body, mime):
|
||||||
|
maybe_json = [
|
||||||
|
'json',
|
||||||
|
'javascript',
|
||||||
|
'text',
|
||||||
|
]
|
||||||
|
if (self.kwargs['explicit_json']
|
||||||
|
or any(token in mime for token in maybe_json)):
|
||||||
|
try:
|
||||||
|
obj = json.loads(body)
|
||||||
|
except ValueError:
|
||||||
|
pass # Invalid JSON, ignore.
|
||||||
|
else:
|
||||||
|
# Indent, sort keys by name, and avoid
|
||||||
|
# unicode escapes to improve readability.
|
||||||
|
body = json.dumps(
|
||||||
|
obj=obj,
|
||||||
|
sort_keys=True,
|
||||||
|
ensure_ascii=False,
|
||||||
|
indent=DEFAULT_INDENT
|
||||||
|
)
|
||||||
|
return body
|
50
httpie/output/processing.py
Normal file
50
httpie/output/processing.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
from httpie.plugins import plugin_manager
|
||||||
|
from httpie.context import Environment
|
||||||
|
|
||||||
|
|
||||||
|
MIME_RE = re.compile(r'^[^/]+/[^/]+$')
|
||||||
|
|
||||||
|
|
||||||
|
def is_valid_mime(mime):
|
||||||
|
return mime and MIME_RE.match(mime)
|
||||||
|
|
||||||
|
|
||||||
|
class Conversion(object):
|
||||||
|
|
||||||
|
def get_converter(self, mime):
|
||||||
|
if is_valid_mime(mime):
|
||||||
|
for converter_class in plugin_manager.get_converters():
|
||||||
|
if converter_class.supports(mime):
|
||||||
|
return converter_class(mime)
|
||||||
|
|
||||||
|
|
||||||
|
class Formatting(object):
|
||||||
|
"""A delegate class that invokes the actual processors."""
|
||||||
|
|
||||||
|
def __init__(self, groups, env=Environment(), **kwargs):
|
||||||
|
"""
|
||||||
|
:param groups: names of processor groups to be applied
|
||||||
|
:param env: Environment
|
||||||
|
:param kwargs: additional keyword arguments for processors
|
||||||
|
|
||||||
|
"""
|
||||||
|
available_plugins = plugin_manager.get_formatters_grouped()
|
||||||
|
self.enabled_plugins = []
|
||||||
|
for group in groups:
|
||||||
|
for cls in available_plugins[group]:
|
||||||
|
p = cls(env=env, **kwargs)
|
||||||
|
if p.enabled:
|
||||||
|
self.enabled_plugins.append(p)
|
||||||
|
|
||||||
|
def format_headers(self, headers):
|
||||||
|
for p in self.enabled_plugins:
|
||||||
|
headers = p.format_headers(headers)
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def format_body(self, content, mime):
|
||||||
|
if is_valid_mime(mime):
|
||||||
|
for p in self.enabled_plugins:
|
||||||
|
content = p.format_body(content, mime)
|
||||||
|
return content
|
297
httpie/output/streams.py
Normal file
297
httpie/output/streams.py
Normal file
@ -0,0 +1,297 @@
|
|||||||
|
from itertools import chain
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from httpie.compat import str
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.models import HTTPRequest, HTTPResponse
|
||||||
|
from httpie.input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
||||||
|
OUT_RESP_HEAD, OUT_RESP_BODY)
|
||||||
|
from httpie.output.processing import Formatting, Conversion
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SUPPRESSED_NOTICE = (
|
||||||
|
b'\n'
|
||||||
|
b'+-----------------------------------------+\n'
|
||||||
|
b'| NOTE: binary data not shown in terminal |\n'
|
||||||
|
b'+-----------------------------------------+'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BinarySuppressedError(Exception):
|
||||||
|
"""An error indicating that the body is binary and won't be written,
|
||||||
|
e.g., for terminal output)."""
|
||||||
|
|
||||||
|
message = BINARY_SUPPRESSED_NOTICE
|
||||||
|
|
||||||
|
|
||||||
|
def write_stream(stream, outfile, flush):
|
||||||
|
"""Write the output stream."""
|
||||||
|
try:
|
||||||
|
# Writing bytes so we use the buffer interface (Python 3).
|
||||||
|
buf = outfile.buffer
|
||||||
|
except AttributeError:
|
||||||
|
buf = outfile
|
||||||
|
|
||||||
|
for chunk in stream:
|
||||||
|
buf.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def write_stream_with_colors_win_py3(stream, outfile, flush):
|
||||||
|
"""Like `write`, but colorized chunks are written as text
|
||||||
|
directly to `outfile` to ensure it gets processed by colorama.
|
||||||
|
Applies only to Windows with Python 3 and colorized terminal output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
color = b'\x1b['
|
||||||
|
encoding = outfile.encoding
|
||||||
|
for chunk in stream:
|
||||||
|
if color in chunk:
|
||||||
|
outfile.write(chunk.decode(encoding))
|
||||||
|
else:
|
||||||
|
outfile.buffer.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def build_output_stream(args, env, request, response, output_options):
|
||||||
|
"""Build and return a chain of iterators over the `request`-`response`
|
||||||
|
exchange each of which yields `bytes` chunks.
|
||||||
|
|
||||||
|
"""
|
||||||
|
req_h = OUT_REQ_HEAD in output_options
|
||||||
|
req_b = OUT_REQ_BODY in output_options
|
||||||
|
resp_h = OUT_RESP_HEAD in output_options
|
||||||
|
resp_b = OUT_RESP_BODY in output_options
|
||||||
|
req = req_h or req_b
|
||||||
|
resp = resp_h or resp_b
|
||||||
|
|
||||||
|
output = []
|
||||||
|
Stream = get_stream_type(env, args)
|
||||||
|
|
||||||
|
if req:
|
||||||
|
output.append(Stream(
|
||||||
|
msg=HTTPRequest(request),
|
||||||
|
with_headers=req_h,
|
||||||
|
with_body=req_b))
|
||||||
|
|
||||||
|
if req_b and resp:
|
||||||
|
# Request/Response separator.
|
||||||
|
output.append([b'\n\n'])
|
||||||
|
|
||||||
|
if resp:
|
||||||
|
output.append(Stream(
|
||||||
|
msg=HTTPResponse(response),
|
||||||
|
with_headers=resp_h,
|
||||||
|
with_body=resp_b))
|
||||||
|
|
||||||
|
if env.stdout_isatty and resp_b:
|
||||||
|
# Ensure a blank line after the response body.
|
||||||
|
# For terminal output only.
|
||||||
|
output.append([b'\n\n'])
|
||||||
|
|
||||||
|
return chain(*output)
|
||||||
|
|
||||||
|
|
||||||
|
def get_stream_type(env, args):
|
||||||
|
"""Pick the right stream type based on `env` and `args`.
|
||||||
|
Wrap it in a partial with the type-specific args so that
|
||||||
|
we don't need to think what stream we are dealing with.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not env.stdout_isatty and not args.prettify:
|
||||||
|
Stream = partial(
|
||||||
|
RawStream,
|
||||||
|
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
||||||
|
if args.stream
|
||||||
|
else RawStream.CHUNK_SIZE
|
||||||
|
)
|
||||||
|
elif args.prettify:
|
||||||
|
Stream = partial(
|
||||||
|
PrettyStream if args.stream else BufferedPrettyStream,
|
||||||
|
env=env,
|
||||||
|
conversion=Conversion(),
|
||||||
|
formatting=Formatting(
|
||||||
|
env=env,
|
||||||
|
groups=args.prettify,
|
||||||
|
color_scheme=args.style,
|
||||||
|
explicit_json=args.json,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
Stream = partial(EncodedStream, env=env)
|
||||||
|
|
||||||
|
return Stream
|
||||||
|
|
||||||
|
|
||||||
|
class BaseStream(object):
|
||||||
|
"""Base HTTP message output stream class."""
|
||||||
|
|
||||||
|
def __init__(self, msg, with_headers=True, with_body=True,
|
||||||
|
on_body_chunk_downloaded=None):
|
||||||
|
"""
|
||||||
|
:param msg: a :class:`models.HTTPMessage` subclass
|
||||||
|
:param with_headers: if `True`, headers will be included
|
||||||
|
:param with_body: if `True`, body will be included
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert with_headers or with_body
|
||||||
|
self.msg = msg
|
||||||
|
self.with_headers = with_headers
|
||||||
|
self.with_body = with_body
|
||||||
|
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
||||||
|
|
||||||
|
def get_headers(self):
|
||||||
|
"""Return the headers' bytes."""
|
||||||
|
return self.msg.headers.encode('utf8')
|
||||||
|
|
||||||
|
def iter_body(self):
|
||||||
|
"""Return an iterator over the message body."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Return an iterator over `self.msg`."""
|
||||||
|
if self.with_headers:
|
||||||
|
yield self.get_headers()
|
||||||
|
yield b'\r\n\r\n'
|
||||||
|
|
||||||
|
if self.with_body:
|
||||||
|
try:
|
||||||
|
for chunk in self.iter_body():
|
||||||
|
yield chunk
|
||||||
|
if self.on_body_chunk_downloaded:
|
||||||
|
self.on_body_chunk_downloaded(chunk)
|
||||||
|
except BinarySuppressedError as e:
|
||||||
|
if self.with_headers:
|
||||||
|
yield b'\n'
|
||||||
|
yield e.message
|
||||||
|
|
||||||
|
|
||||||
|
class RawStream(BaseStream):
|
||||||
|
"""The message is streamed in chunks with no processing."""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1024 * 100
|
||||||
|
CHUNK_SIZE_BY_LINE = 1
|
||||||
|
|
||||||
|
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
||||||
|
super(RawStream, self).__init__(**kwargs)
|
||||||
|
self.chunk_size = chunk_size
|
||||||
|
|
||||||
|
def iter_body(self):
|
||||||
|
return self.msg.iter_body(self.chunk_size)
|
||||||
|
|
||||||
|
|
||||||
|
class EncodedStream(BaseStream):
|
||||||
|
"""Encoded HTTP message stream.
|
||||||
|
|
||||||
|
The message bytes are converted to an encoding suitable for
|
||||||
|
`self.env.stdout`. Unicode errors are replaced and binary data
|
||||||
|
is suppressed. The body is always streamed by line.
|
||||||
|
|
||||||
|
"""
|
||||||
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
|
def __init__(self, env=Environment(), **kwargs):
|
||||||
|
|
||||||
|
super(EncodedStream, self).__init__(**kwargs)
|
||||||
|
|
||||||
|
if env.stdout_isatty:
|
||||||
|
# Use the encoding supported by the terminal.
|
||||||
|
output_encoding = env.stdout_encoding
|
||||||
|
else:
|
||||||
|
# Preserve the message encoding.
|
||||||
|
output_encoding = self.msg.encoding
|
||||||
|
|
||||||
|
# Default to utf8 when unsure.
|
||||||
|
self.output_encoding = output_encoding or 'utf8'
|
||||||
|
|
||||||
|
def iter_body(self):
|
||||||
|
|
||||||
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
||||||
|
|
||||||
|
if b'\0' in line:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
|
||||||
|
yield line.decode(self.msg.encoding) \
|
||||||
|
.encode(self.output_encoding, 'replace') + lf
|
||||||
|
|
||||||
|
|
||||||
|
class PrettyStream(EncodedStream):
|
||||||
|
"""In addition to :class:`EncodedStream` behaviour, this stream applies
|
||||||
|
content processing.
|
||||||
|
|
||||||
|
Useful for long-lived HTTP responses that stream by lines
|
||||||
|
such as the Twitter streaming API.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
|
def __init__(self, conversion, formatting, **kwargs):
|
||||||
|
super(PrettyStream, self).__init__(**kwargs)
|
||||||
|
self.formatting = formatting
|
||||||
|
self.conversion = conversion
|
||||||
|
self.mime = self.msg.content_type.split(';')[0]
|
||||||
|
|
||||||
|
def get_headers(self):
|
||||||
|
return self.formatting.format_headers(
|
||||||
|
self.msg.headers).encode(self.output_encoding)
|
||||||
|
|
||||||
|
def iter_body(self):
|
||||||
|
first_chunk = True
|
||||||
|
iter_lines = self.msg.iter_lines(self.CHUNK_SIZE)
|
||||||
|
for line, lf in iter_lines:
|
||||||
|
if b'\0' in line:
|
||||||
|
if first_chunk:
|
||||||
|
converter = self.conversion.get_converter(self.mime)
|
||||||
|
if converter:
|
||||||
|
body = bytearray()
|
||||||
|
# noinspection PyAssignmentToLoopOrWithParameter
|
||||||
|
for line, lf in chain([(line, lf)], iter_lines):
|
||||||
|
body.extend(line)
|
||||||
|
body.extend(lf)
|
||||||
|
self.mime, body = converter.convert(body)
|
||||||
|
assert isinstance(body, str)
|
||||||
|
yield self.process_body(body)
|
||||||
|
return
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
yield self.process_body(line) + lf
|
||||||
|
first_chunk = False
|
||||||
|
|
||||||
|
def process_body(self, chunk):
|
||||||
|
if not isinstance(chunk, str):
|
||||||
|
# Text when a converter has been used,
|
||||||
|
# otherwise it will always be bytes.
|
||||||
|
chunk = chunk.decode(self.msg.encoding, 'replace')
|
||||||
|
chunk = self.formatting.format_body(content=chunk, mime=self.mime)
|
||||||
|
return chunk.encode(self.output_encoding, 'replace')
|
||||||
|
|
||||||
|
|
||||||
|
class BufferedPrettyStream(PrettyStream):
|
||||||
|
"""The same as :class:`PrettyStream` except that the body is fully
|
||||||
|
fetched before it's processed.
|
||||||
|
|
||||||
|
Suitable regular HTTP responses.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
CHUNK_SIZE = 1024 * 10
|
||||||
|
|
||||||
|
def iter_body(self):
|
||||||
|
# Read the whole body before prettifying it,
|
||||||
|
# but bail out immediately if the body is binary.
|
||||||
|
converter = None
|
||||||
|
body = bytearray()
|
||||||
|
|
||||||
|
for chunk in self.msg.iter_body(self.CHUNK_SIZE):
|
||||||
|
if not converter and b'\0' in chunk:
|
||||||
|
converter = self.conversion.get_converter(self.mime)
|
||||||
|
if not converter:
|
||||||
|
raise BinarySuppressedError()
|
||||||
|
body.extend(chunk)
|
||||||
|
|
||||||
|
if converter:
|
||||||
|
self.mime, body = converter.convert(body)
|
||||||
|
|
||||||
|
yield self.process_body(body)
|
@ -1,9 +1,22 @@
|
|||||||
from .base import AuthPlugin
|
"""
|
||||||
from .manager import PluginManager
|
WARNING: The plugin API is still work in progress and will
|
||||||
from .builtin import BasicAuthPlugin, DigestAuthPlugin
|
probably be completely reworked by v1.0.0.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from httpie.plugins.base import (
|
||||||
|
AuthPlugin, FormatterPlugin,
|
||||||
|
ConverterPlugin, TransportPlugin
|
||||||
|
)
|
||||||
|
from httpie.plugins.manager import PluginManager
|
||||||
|
from httpie.plugins.builtin import BasicAuthPlugin, DigestAuthPlugin
|
||||||
|
from httpie.output.formatters.headers import HeadersFormatter
|
||||||
|
from httpie.output.formatters.json import JSONFormatter
|
||||||
|
from httpie.output.formatters.colors import ColorFormatter
|
||||||
|
|
||||||
|
|
||||||
plugin_manager = PluginManager()
|
plugin_manager = PluginManager()
|
||||||
plugin_manager.register(BasicAuthPlugin)
|
plugin_manager.register(BasicAuthPlugin,
|
||||||
plugin_manager.register(DigestAuthPlugin)
|
DigestAuthPlugin)
|
||||||
|
plugin_manager.register(HeadersFormatter,
|
||||||
|
JSONFormatter,
|
||||||
|
ColorFormatter)
|
||||||
|
@ -1,14 +1,4 @@
|
|||||||
class AuthPlugin(object):
|
class BasePlugin(object):
|
||||||
"""
|
|
||||||
Base auth plugin class.
|
|
||||||
|
|
||||||
See <https://github.com/jkbr/httpie-ntlm> for an example auth plugin.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# The value that should be passed to --auth-type
|
|
||||||
# to use this auth plugin. Eg. "my-auth"
|
|
||||||
auth_type = None
|
|
||||||
|
|
||||||
# The name of the plugin, eg. "My auth".
|
# The name of the plugin, eg. "My auth".
|
||||||
name = None
|
name = None
|
||||||
@ -20,9 +10,109 @@ class AuthPlugin(object):
|
|||||||
# This be set automatically once the plugin has been loaded.
|
# This be set automatically once the plugin has been loaded.
|
||||||
package_name = None
|
package_name = None
|
||||||
|
|
||||||
def get_auth(self, username, password):
|
|
||||||
|
class AuthPlugin(BasePlugin):
|
||||||
|
"""
|
||||||
|
Base auth plugin class.
|
||||||
|
|
||||||
|
See <https://github.com/httpie/httpie-ntlm> for an example auth plugin.
|
||||||
|
|
||||||
|
See also `test_auth_plugins.py`
|
||||||
|
|
||||||
|
"""
|
||||||
|
# The value that should be passed to --auth-type
|
||||||
|
# to use this auth plugin. Eg. "my-auth"
|
||||||
|
auth_type = None
|
||||||
|
|
||||||
|
# Set to `False` to make it possible to invoke this auth
|
||||||
|
# plugin without requiring the user to specify credentials
|
||||||
|
# through `--auth, -a`.
|
||||||
|
auth_require = True
|
||||||
|
|
||||||
|
# By default the `-a` argument is parsed for `username:password`.
|
||||||
|
# Set this to `False` to disable the parsing and error handling.
|
||||||
|
auth_parse = True
|
||||||
|
|
||||||
|
# If both `auth_parse` and `prompt_password` are set to `True`,
|
||||||
|
# and the value of `-a` lacks the password part,
|
||||||
|
# then the user will be prompted to type the password in.
|
||||||
|
prompt_password = True
|
||||||
|
|
||||||
|
# Will be set to the raw value of `-a` (if provided) before
|
||||||
|
# `get_auth()` gets called.
|
||||||
|
raw_auth = None
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
"""
|
"""
|
||||||
|
If `auth_parse` is set to `True`, then `username`
|
||||||
|
and `password` contain the parsed credentials.
|
||||||
|
|
||||||
|
Use `self.raw_auth` to access the raw value passed through
|
||||||
|
`--auth, -a`.
|
||||||
|
|
||||||
Return a ``requests.auth.AuthBase`` subclass instance.
|
Return a ``requests.auth.AuthBase`` subclass instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class TransportPlugin(BasePlugin):
|
||||||
|
"""
|
||||||
|
|
||||||
|
http://docs.python-requests.org/en/latest/user/advanced/#transport-adapters
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# The URL prefix the adapter should be mount to.
|
||||||
|
prefix = None
|
||||||
|
|
||||||
|
def get_adapter(self):
|
||||||
|
"""
|
||||||
|
Return a ``requests.adapters.BaseAdapter`` subclass instance to be
|
||||||
|
mounted to ``self.prefix``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class ConverterPlugin(object):
|
||||||
|
|
||||||
|
def __init__(self, mime):
|
||||||
|
self.mime = mime
|
||||||
|
|
||||||
|
def convert(self, content_bytes):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def supports(cls, mime):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class FormatterPlugin(object):
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
"""
|
||||||
|
:param env: an class:`Environment` instance
|
||||||
|
:param kwargs: additional keyword argument that some
|
||||||
|
processor might require.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.enabled = True
|
||||||
|
self.kwargs = kwargs
|
||||||
|
|
||||||
|
def format_headers(self, headers):
|
||||||
|
"""Return processed `headers`
|
||||||
|
|
||||||
|
:param headers: The headers as text.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return headers
|
||||||
|
|
||||||
|
def format_body(self, content, mime):
|
||||||
|
"""Return processed `content`.
|
||||||
|
|
||||||
|
:param mime: E.g., 'application/atom+xml'.
|
||||||
|
:param content: The body content as text
|
||||||
|
|
||||||
|
"""
|
||||||
|
return content
|
||||||
|
@ -1,20 +1,44 @@
|
|||||||
|
from base64 import b64encode
|
||||||
|
|
||||||
import requests.auth
|
import requests.auth
|
||||||
|
|
||||||
from .base import AuthPlugin
|
from httpie.plugins.base import AuthPlugin
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyAbstractClass
|
||||||
class BuiltinAuthPlugin(AuthPlugin):
|
class BuiltinAuthPlugin(AuthPlugin):
|
||||||
|
|
||||||
package_name = '(builtin)'
|
package_name = '(builtin)'
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
||||||
|
|
||||||
|
def __call__(self, r):
|
||||||
|
"""
|
||||||
|
Override username/password serialization to allow unicode.
|
||||||
|
|
||||||
|
See https://github.com/jakubroztocil/httpie/issues/212
|
||||||
|
|
||||||
|
"""
|
||||||
|
r.headers['Authorization'] = type(self).make_header(
|
||||||
|
self.username, self.password).encode('latin1')
|
||||||
|
return r
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def make_header(username, password):
|
||||||
|
credentials = u'%s:%s' % (username, password)
|
||||||
|
token = b64encode(credentials.encode('utf8')).strip().decode('latin1')
|
||||||
|
return 'Basic %s' % token
|
||||||
|
|
||||||
|
|
||||||
class BasicAuthPlugin(BuiltinAuthPlugin):
|
class BasicAuthPlugin(BuiltinAuthPlugin):
|
||||||
|
|
||||||
name = 'Basic HTTP auth'
|
name = 'Basic HTTP auth'
|
||||||
auth_type = 'basic'
|
auth_type = 'basic'
|
||||||
|
|
||||||
|
# noinspection PyMethodOverriding
|
||||||
def get_auth(self, username, password):
|
def get_auth(self, username, password):
|
||||||
return requests.auth.HTTPBasicAuth(username, password)
|
return HTTPBasicAuth(username, password)
|
||||||
|
|
||||||
|
|
||||||
class DigestAuthPlugin(BuiltinAuthPlugin):
|
class DigestAuthPlugin(BuiltinAuthPlugin):
|
||||||
@ -22,5 +46,6 @@ class DigestAuthPlugin(BuiltinAuthPlugin):
|
|||||||
name = 'Digest HTTP auth'
|
name = 'Digest HTTP auth'
|
||||||
auth_type = 'digest'
|
auth_type = 'digest'
|
||||||
|
|
||||||
|
# noinspection PyMethodOverriding
|
||||||
def get_auth(self, username, password):
|
def get_auth(self, username, password):
|
||||||
return requests.auth.HTTPDigestAuth(username, password)
|
return requests.auth.HTTPDigestAuth(username, password)
|
||||||
|
@ -1,8 +1,14 @@
|
|||||||
|
from itertools import groupby
|
||||||
from pkg_resources import iter_entry_points
|
from pkg_resources import iter_entry_points
|
||||||
|
from httpie.plugins import AuthPlugin, FormatterPlugin, ConverterPlugin
|
||||||
|
from httpie.plugins.base import TransportPlugin
|
||||||
|
|
||||||
|
|
||||||
ENTRY_POINT_NAMES = [
|
ENTRY_POINT_NAMES = [
|
||||||
'httpie.plugins.auth.v1'
|
'httpie.plugins.auth.v1',
|
||||||
|
'httpie.plugins.formatter.v1',
|
||||||
|
'httpie.plugins.converter.v1',
|
||||||
|
'httpie.plugins.transport.v1',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -14,22 +20,48 @@ class PluginManager(object):
|
|||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self._plugins)
|
return iter(self._plugins)
|
||||||
|
|
||||||
def register(self, plugin):
|
def register(self, *plugins):
|
||||||
self._plugins.append(plugin)
|
for plugin in plugins:
|
||||||
|
self._plugins.append(plugin)
|
||||||
|
|
||||||
def get_auth_plugins(self):
|
def unregister(self, plugin):
|
||||||
return list(self._plugins)
|
self._plugins.remove(plugin)
|
||||||
|
|
||||||
def get_auth_plugin_mapping(self):
|
|
||||||
return dict((plugin.auth_type, plugin) for plugin in self)
|
|
||||||
|
|
||||||
def get_auth_plugin(self, auth_type):
|
|
||||||
return self.get_auth_plugin_mapping()[auth_type]
|
|
||||||
|
|
||||||
def load_installed_plugins(self):
|
def load_installed_plugins(self):
|
||||||
|
|
||||||
for entry_point_name in ENTRY_POINT_NAMES:
|
for entry_point_name in ENTRY_POINT_NAMES:
|
||||||
for entry_point in iter_entry_points(entry_point_name):
|
for entry_point in iter_entry_points(entry_point_name):
|
||||||
plugin = entry_point.load()
|
plugin = entry_point.load()
|
||||||
plugin.package_name = entry_point.dist.key
|
plugin.package_name = entry_point.dist.key
|
||||||
self.register(entry_point.load())
|
self.register(entry_point.load())
|
||||||
|
|
||||||
|
# Auth
|
||||||
|
def get_auth_plugins(self):
|
||||||
|
return [plugin for plugin in self if issubclass(plugin, AuthPlugin)]
|
||||||
|
|
||||||
|
def get_auth_plugin_mapping(self):
|
||||||
|
return {plugin.auth_type: plugin for plugin in self.get_auth_plugins()}
|
||||||
|
|
||||||
|
def get_auth_plugin(self, auth_type):
|
||||||
|
return self.get_auth_plugin_mapping()[auth_type]
|
||||||
|
|
||||||
|
# Output processing
|
||||||
|
def get_formatters(self):
|
||||||
|
return [plugin for plugin in self
|
||||||
|
if issubclass(plugin, FormatterPlugin)]
|
||||||
|
|
||||||
|
def get_formatters_grouped(self):
|
||||||
|
groups = {}
|
||||||
|
for group_name, group in groupby(
|
||||||
|
self.get_formatters(),
|
||||||
|
key=lambda p: getattr(p, 'group_name', 'format')):
|
||||||
|
groups[group_name] = list(group)
|
||||||
|
return groups
|
||||||
|
|
||||||
|
def get_converters(self):
|
||||||
|
return [plugin for plugin in self
|
||||||
|
if issubclass(plugin, ConverterPlugin)]
|
||||||
|
|
||||||
|
# Adapters
|
||||||
|
def get_transport_plugins(self):
|
||||||
|
return [plugin for plugin in self
|
||||||
|
if issubclass(plugin, TransportPlugin)]
|
||||||
|
@ -4,11 +4,10 @@
|
|||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import requests
|
|
||||||
from requests.cookies import RequestsCookieJar, create_cookie
|
from requests.cookies import RequestsCookieJar, create_cookie
|
||||||
|
|
||||||
from .compat import urlsplit
|
from httpie.compat import urlsplit
|
||||||
from .config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
from httpie.config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
||||||
from httpie.plugins import plugin_manager
|
from httpie.plugins import plugin_manager
|
||||||
|
|
||||||
|
|
||||||
@ -21,21 +20,21 @@ VALID_SESSION_NAME_PATTERN = re.compile('^[a-zA-Z0-9_.-]+$')
|
|||||||
SESSION_IGNORED_HEADER_PREFIXES = ['Content-', 'If-']
|
SESSION_IGNORED_HEADER_PREFIXES = ['Content-', 'If-']
|
||||||
|
|
||||||
|
|
||||||
def get_response(session_name, requests_kwargs, config_dir, args,
|
def get_response(requests_session, session_name,
|
||||||
read_only=False):
|
config_dir, args, read_only=False):
|
||||||
"""Like `client.get_response`, but applies permanent
|
"""Like `client.get_responses`, but applies permanent
|
||||||
aspects of the session to the request.
|
aspects of the session to the request.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from .client import get_requests_kwargs, dump_request
|
||||||
if os.path.sep in session_name:
|
if os.path.sep in session_name:
|
||||||
path = os.path.expanduser(session_name)
|
path = os.path.expanduser(session_name)
|
||||||
else:
|
else:
|
||||||
hostname = (
|
hostname = (args.headers.get('Host', None)
|
||||||
requests_kwargs['headers'].get('Host', None)
|
or urlsplit(args.url).netloc.split('@')[-1])
|
||||||
or urlsplit(requests_kwargs['url']).netloc.split('@')[-1]
|
if not hostname:
|
||||||
)
|
# HACK/FIXME: httpie-unixsocket's URLs have no hostname.
|
||||||
|
hostname = 'localhost'
|
||||||
assert re.match('^[a-zA-Z0-9_.:-]+$', hostname)
|
|
||||||
|
|
||||||
# host:port => host_port
|
# host:port => host_port
|
||||||
hostname = hostname.replace(':', '_')
|
hostname = hostname.replace(':', '_')
|
||||||
@ -47,36 +46,35 @@ def get_response(session_name, requests_kwargs, config_dir, args,
|
|||||||
session = Session(path)
|
session = Session(path)
|
||||||
session.load()
|
session.load()
|
||||||
|
|
||||||
request_headers = requests_kwargs.get('headers', {})
|
kwargs = get_requests_kwargs(args, base_headers=session.headers)
|
||||||
requests_kwargs['headers'] = dict(session.headers, **request_headers)
|
if args.debug:
|
||||||
session.update_headers(request_headers)
|
dump_request(kwargs)
|
||||||
|
session.update_headers(kwargs['headers'])
|
||||||
|
|
||||||
if args.auth:
|
if args.auth_plugin:
|
||||||
session.auth = {
|
session.auth = {
|
||||||
'type': args.auth_type,
|
'type': args.auth_plugin.auth_type,
|
||||||
'username': args.auth.key,
|
'raw_auth': args.auth_plugin.raw_auth,
|
||||||
'password': args.auth.value,
|
|
||||||
}
|
}
|
||||||
elif session.auth:
|
elif session.auth:
|
||||||
requests_kwargs['auth'] = session.auth
|
kwargs['auth'] = session.auth
|
||||||
|
|
||||||
requests_session = requests.Session()
|
|
||||||
requests_session.cookies = session.cookies
|
requests_session.cookies = session.cookies
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests_session.request(**requests_kwargs)
|
response = requests_session.request(**kwargs)
|
||||||
except Exception:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
# Existing sessions with `read_only=True` don't get updated.
|
# Existing sessions with `read_only=True` don't get updated.
|
||||||
if session.is_new or not read_only:
|
if session.is_new() or not read_only:
|
||||||
session.cookies = requests_session.cookies
|
session.cookies = requests_session.cookies
|
||||||
session.save()
|
session.save()
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
class Session(BaseConfigDict):
|
class Session(BaseConfigDict):
|
||||||
helpurl = 'https://github.com/jkbr/httpie#sessions'
|
helpurl = 'https://httpie.org/doc#sessions'
|
||||||
about = 'HTTPie session file'
|
about = 'HTTPie session file'
|
||||||
|
|
||||||
def __init__(self, path, *args, **kwargs):
|
def __init__(self, path, *args, **kwargs):
|
||||||
@ -103,6 +101,10 @@ class Session(BaseConfigDict):
|
|||||||
"""
|
"""
|
||||||
for name, value in request_headers.items():
|
for name, value in request_headers.items():
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
continue # Ignore explicitely unset headers
|
||||||
|
|
||||||
|
value = value.decode('utf8')
|
||||||
if name == 'User-Agent' and value.startswith('HTTPie/'):
|
if name == 'User-Agent' and value.startswith('HTTPie/'):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -134,20 +136,41 @@ class Session(BaseConfigDict):
|
|||||||
stored_attrs = ['value', 'path', 'secure', 'expires']
|
stored_attrs = ['value', 'path', 'secure', 'expires']
|
||||||
self['cookies'] = {}
|
self['cookies'] = {}
|
||||||
for cookie in jar:
|
for cookie in jar:
|
||||||
self['cookies'][cookie.name] = dict(
|
self['cookies'][cookie.name] = {
|
||||||
(attname, getattr(cookie, attname))
|
attname: getattr(cookie, attname)
|
||||||
for attname in stored_attrs
|
for attname in stored_attrs
|
||||||
)
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auth(self):
|
def auth(self):
|
||||||
auth = self.get('auth', None)
|
auth = self.get('auth', None)
|
||||||
if not auth or not auth['type']:
|
if not auth or not auth['type']:
|
||||||
return
|
return
|
||||||
auth_plugin = plugin_manager.get_auth_plugin(auth['type'])()
|
|
||||||
return auth_plugin.get_auth(auth['username'], auth['password'])
|
plugin = plugin_manager.get_auth_plugin(auth['type'])()
|
||||||
|
|
||||||
|
credentials = {'username': None, 'password': None}
|
||||||
|
try:
|
||||||
|
# New style
|
||||||
|
plugin.raw_auth = auth['raw_auth']
|
||||||
|
except KeyError:
|
||||||
|
# Old style
|
||||||
|
credentials = {
|
||||||
|
'username': auth['username'],
|
||||||
|
'password': auth['password'],
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
if plugin.auth_parse:
|
||||||
|
from httpie.input import parse_auth
|
||||||
|
parsed = parse_auth(plugin.raw_auth)
|
||||||
|
credentials = {
|
||||||
|
'username': parsed.key,
|
||||||
|
'password': parsed.value,
|
||||||
|
}
|
||||||
|
|
||||||
|
return plugin.get_auth(**credentials)
|
||||||
|
|
||||||
@auth.setter
|
@auth.setter
|
||||||
def auth(self, auth):
|
def auth(self, auth):
|
||||||
assert set(['type', 'username', 'password']) == set(auth.keys())
|
assert set(['type', 'raw_auth']) == set(auth.keys())
|
||||||
self['auth'] = auth
|
self['auth'] = auth
|
||||||
|
@ -1,111 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
|
||||||
solarized256
|
|
||||||
------------
|
|
||||||
|
|
||||||
A Pygments style inspired by Solarized's 256 color mode.
|
|
||||||
|
|
||||||
:copyright: (c) 2011 by Hank Gay, (c) 2012 by John Mastro.
|
|
||||||
:license: BSD, see LICENSE for more details.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from pygments.style import Style
|
|
||||||
from pygments.token import Token, Comment, Name, Keyword, Generic, Number, \
|
|
||||||
Operator, String
|
|
||||||
|
|
||||||
BASE03 = "#1c1c1c"
|
|
||||||
BASE02 = "#262626"
|
|
||||||
BASE01 = "#4e4e4e"
|
|
||||||
BASE00 = "#585858"
|
|
||||||
BASE0 = "#808080"
|
|
||||||
BASE1 = "#8a8a8a"
|
|
||||||
BASE2 = "#d7d7af"
|
|
||||||
BASE3 = "#ffffd7"
|
|
||||||
YELLOW = "#af8700"
|
|
||||||
ORANGE = "#d75f00"
|
|
||||||
RED = "#af0000"
|
|
||||||
MAGENTA = "#af005f"
|
|
||||||
VIOLET = "#5f5faf"
|
|
||||||
BLUE = "#0087ff"
|
|
||||||
CYAN = "#00afaf"
|
|
||||||
GREEN = "#5f8700"
|
|
||||||
|
|
||||||
|
|
||||||
class Solarized256Style(Style):
|
|
||||||
background_color = BASE03
|
|
||||||
styles = {
|
|
||||||
Keyword: GREEN,
|
|
||||||
Keyword.Constant: ORANGE,
|
|
||||||
Keyword.Declaration: BLUE,
|
|
||||||
Keyword.Namespace: ORANGE,
|
|
||||||
#Keyword.Pseudo
|
|
||||||
Keyword.Reserved: BLUE,
|
|
||||||
Keyword.Type: RED,
|
|
||||||
|
|
||||||
#Name
|
|
||||||
Name.Attribute: BASE1,
|
|
||||||
Name.Builtin: BLUE,
|
|
||||||
Name.Builtin.Pseudo: BLUE,
|
|
||||||
Name.Class: BLUE,
|
|
||||||
Name.Constant: ORANGE,
|
|
||||||
Name.Decorator: BLUE,
|
|
||||||
Name.Entity: ORANGE,
|
|
||||||
Name.Exception: YELLOW,
|
|
||||||
Name.Function: BLUE,
|
|
||||||
#Name.Label
|
|
||||||
#Name.Namespace
|
|
||||||
#Name.Other
|
|
||||||
Name.Tag: BLUE,
|
|
||||||
Name.Variable: BLUE,
|
|
||||||
#Name.Variable.Class
|
|
||||||
#Name.Variable.Global
|
|
||||||
#Name.Variable.Instance
|
|
||||||
|
|
||||||
#Literal
|
|
||||||
#Literal.Date
|
|
||||||
String: CYAN,
|
|
||||||
String.Backtick: BASE01,
|
|
||||||
String.Char: CYAN,
|
|
||||||
String.Doc: CYAN,
|
|
||||||
#String.Double
|
|
||||||
String.Escape: RED,
|
|
||||||
String.Heredoc: CYAN,
|
|
||||||
#String.Interpol
|
|
||||||
#String.Other
|
|
||||||
String.Regex: RED,
|
|
||||||
#String.Single
|
|
||||||
#String.Symbol
|
|
||||||
Number: CYAN,
|
|
||||||
#Number.Float
|
|
||||||
#Number.Hex
|
|
||||||
#Number.Integer
|
|
||||||
#Number.Integer.Long
|
|
||||||
#Number.Oct
|
|
||||||
|
|
||||||
Operator: BASE1,
|
|
||||||
Operator.Word: GREEN,
|
|
||||||
|
|
||||||
#Punctuation: ORANGE,
|
|
||||||
|
|
||||||
Comment: BASE01,
|
|
||||||
#Comment.Multiline
|
|
||||||
Comment.Preproc: GREEN,
|
|
||||||
#Comment.Single
|
|
||||||
Comment.Special: GREEN,
|
|
||||||
|
|
||||||
#Generic
|
|
||||||
Generic.Deleted: CYAN,
|
|
||||||
Generic.Emph: 'italic',
|
|
||||||
Generic.Error: RED,
|
|
||||||
Generic.Heading: ORANGE,
|
|
||||||
Generic.Inserted: GREEN,
|
|
||||||
#Generic.Output
|
|
||||||
#Generic.Prompt
|
|
||||||
Generic.Strong: 'bold',
|
|
||||||
Generic.Subheading: ORANGE,
|
|
||||||
#Generic.Traceback
|
|
||||||
|
|
||||||
Token: BASE1,
|
|
||||||
Token.Other: ORANGE,
|
|
||||||
}
|
|
@ -1,4 +1,26 @@
|
|||||||
from __future__ import division
|
from __future__ import division
|
||||||
|
import json
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
def load_json_preserve_order(s):
|
||||||
|
return json.loads(s, object_pairs_hook=OrderedDict)
|
||||||
|
|
||||||
|
|
||||||
|
def repr_dict_nice(d):
|
||||||
|
def prepare_dict(d):
|
||||||
|
for k, v in d.items():
|
||||||
|
if isinstance(v, dict):
|
||||||
|
v = dict(prepare_dict(v))
|
||||||
|
elif isinstance(v, bytes):
|
||||||
|
v = v.decode('utf8')
|
||||||
|
elif not isinstance(v, (int, str)):
|
||||||
|
v = repr(v)
|
||||||
|
yield k, v
|
||||||
|
return json.dumps(
|
||||||
|
dict(prepare_dict(d)),
|
||||||
|
indent=4, sort_keys=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def humanize_bytes(n, precision=2):
|
def humanize_bytes(n, precision=2):
|
||||||
@ -10,20 +32,20 @@ def humanize_bytes(n, precision=2):
|
|||||||
Assumes `from __future__ import division`.
|
Assumes `from __future__ import division`.
|
||||||
|
|
||||||
>>> humanize_bytes(1)
|
>>> humanize_bytes(1)
|
||||||
'1 byte'
|
'1 B'
|
||||||
>>> humanize_bytes(1024)
|
>>> humanize_bytes(1024, precision=1)
|
||||||
'1.0 kB'
|
'1.0 kB'
|
||||||
>>> humanize_bytes(1024 * 123)
|
>>> humanize_bytes(1024 * 123, precision=1)
|
||||||
'123.0 kB'
|
'123.0 kB'
|
||||||
>>> humanize_bytes(1024 * 12342)
|
>>> humanize_bytes(1024 * 12342, precision=1)
|
||||||
'12.1 MB'
|
'12.1 MB'
|
||||||
>>> humanize_bytes(1024 * 12342, 2)
|
>>> humanize_bytes(1024 * 12342, precision=2)
|
||||||
'12.05 MB'
|
'12.05 MB'
|
||||||
>>> humanize_bytes(1024 * 1234, 2)
|
>>> humanize_bytes(1024 * 1234, precision=2)
|
||||||
'1.21 MB'
|
'1.21 MB'
|
||||||
>>> humanize_bytes(1024 * 1234 * 1111, 2)
|
>>> humanize_bytes(1024 * 1234 * 1111, precision=2)
|
||||||
'1.31 GB'
|
'1.31 GB'
|
||||||
>>> humanize_bytes(1024 * 1234 * 1111, 1)
|
>>> humanize_bytes(1024 * 1234 * 1111, precision=1)
|
||||||
'1.3 GB'
|
'1.3 GB'
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@ -43,4 +65,5 @@ def humanize_bytes(n, precision=2):
|
|||||||
if n >= factor:
|
if n >= factor:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
# noinspection PyUnboundLocalVariable
|
||||||
return '%.*f %s' % (precision, n / factor, suffix)
|
return '%.*f %s' % (precision, n / factor, suffix)
|
||||||
|
@ -1,3 +1,8 @@
|
|||||||
tox
|
tox
|
||||||
git+git://github.com/kennethreitz/httpbin.git@7c96875e87a448f08fb1981e85eb79e77d592d98
|
mock
|
||||||
|
pytest
|
||||||
|
pytest-cov
|
||||||
|
pytest-httpbin>=0.0.6
|
||||||
docutils
|
docutils
|
||||||
|
wheel
|
||||||
|
pycodestyle
|
||||||
|
19
setup.cfg
Normal file
19
setup.cfg
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
[wheel]
|
||||||
|
universal = 1
|
||||||
|
|
||||||
|
|
||||||
|
[tool:pytest]
|
||||||
|
# <https://docs.pytest.org/en/latest/customize.html>
|
||||||
|
norecursedirs = tests/fixtures
|
||||||
|
|
||||||
|
|
||||||
|
[pycodestyle]
|
||||||
|
# <http://pycodestyle.pycqa.org/en/latest/intro.html#configuration>
|
||||||
|
|
||||||
|
exclude = .git,.idea,__pycache__,build,dist,.tox,.pytest_cache,*.egg-info
|
||||||
|
|
||||||
|
# <http://pycodestyle.pycqa.org/en/latest/intro.html#error-codes>
|
||||||
|
# E241 - multiple spaces after ‘,’
|
||||||
|
# E501 - line too long
|
||||||
|
# W503 - line break before binary operator
|
||||||
|
ignore = E241,E501,W503
|
87
setup.py
87
setup.py
@ -1,28 +1,66 @@
|
|||||||
import os
|
# This is purely the result of trial and error.
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import codecs
|
import codecs
|
||||||
from setuptools import setup
|
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
from setuptools.command.test import test as TestCommand
|
||||||
|
|
||||||
import httpie
|
import httpie
|
||||||
|
|
||||||
|
|
||||||
if sys.argv[-1] == 'test':
|
class PyTest(TestCommand):
|
||||||
status = os.system('python tests/tests.py')
|
# `$ python setup.py test' simply installs minimal requirements
|
||||||
sys.exit(1 if status > 127 else status)
|
# and runs the tests with no fancy stuff like parallel execution.
|
||||||
|
def finalize_options(self):
|
||||||
|
TestCommand.finalize_options(self)
|
||||||
|
self.test_args = [
|
||||||
|
'--doctest-modules', '--verbose',
|
||||||
|
'./httpie', './tests'
|
||||||
|
]
|
||||||
|
self.test_suite = True
|
||||||
|
|
||||||
|
def run_tests(self):
|
||||||
|
import pytest
|
||||||
|
sys.exit(pytest.main(self.test_args))
|
||||||
|
|
||||||
|
|
||||||
requirements = [
|
tests_require = [
|
||||||
'requests>=2.0.0',
|
# Pytest needs to come last.
|
||||||
'Pygments>=1.5'
|
# https://bitbucket.org/pypa/setuptools/issue/196/
|
||||||
|
'pytest-httpbin',
|
||||||
|
'pytest',
|
||||||
|
'mock',
|
||||||
]
|
]
|
||||||
try:
|
|
||||||
#noinspection PyUnresolvedReferences
|
|
||||||
import argparse
|
|
||||||
except ImportError:
|
|
||||||
requirements.append('argparse>=1.2.1')
|
|
||||||
|
|
||||||
if 'win32' in str(sys.platform).lower():
|
|
||||||
# Terminal colors for Windows
|
install_requires = [
|
||||||
requirements.append('colorama>=0.2.4')
|
'requests>=2.18.4',
|
||||||
|
'Pygments>=2.1.3'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Conditional dependencies:
|
||||||
|
|
||||||
|
# sdist
|
||||||
|
if 'bdist_wheel' not in sys.argv:
|
||||||
|
try:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
import argparse
|
||||||
|
except ImportError:
|
||||||
|
install_requires.append('argparse>=1.2.1')
|
||||||
|
|
||||||
|
if 'win32' in str(sys.platform).lower():
|
||||||
|
# Terminal colors for Windows
|
||||||
|
install_requires.append('colorama>=0.2.4')
|
||||||
|
|
||||||
|
|
||||||
|
# bdist_wheel
|
||||||
|
extras_require = {
|
||||||
|
# http://wheel.readthedocs.io/en/latest/#defining-conditional-dependencies
|
||||||
|
'python_version == "3.0" or python_version == "3.1"': ['argparse>=1.2.1'],
|
||||||
|
':sys_platform == "win32"': ['colorama>=0.2.4'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def long_description():
|
def long_description():
|
||||||
@ -36,24 +74,31 @@ setup(
|
|||||||
description=httpie.__doc__.strip(),
|
description=httpie.__doc__.strip(),
|
||||||
long_description=long_description(),
|
long_description=long_description(),
|
||||||
url='http://httpie.org/',
|
url='http://httpie.org/',
|
||||||
download_url='https://github.com/jkbr/httpie',
|
download_url='https://github.com/jakubroztocil/httpie',
|
||||||
author=httpie.__author__,
|
author=httpie.__author__,
|
||||||
author_email='jakub@roztocil.name',
|
author_email='jakub@roztocil.co',
|
||||||
license=httpie.__licence__,
|
license=httpie.__licence__,
|
||||||
packages=['httpie', 'httpie.plugins'],
|
packages=find_packages(),
|
||||||
entry_points={
|
entry_points={
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'http = httpie.__main__:main',
|
'http = httpie.__main__:main',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
install_requires=requirements,
|
extras_require=extras_require,
|
||||||
|
install_requires=install_requires,
|
||||||
|
tests_require=tests_require,
|
||||||
|
cmdclass={'test': PyTest},
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
'Programming Language :: Python :: 2.6',
|
|
||||||
'Programming Language :: Python :: 2.7',
|
'Programming Language :: Python :: 2.7',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
'Programming Language :: Python :: 3.1',
|
'Programming Language :: Python :: 3.1',
|
||||||
'Programming Language :: Python :: 3.2',
|
'Programming Language :: Python :: 3.2',
|
||||||
|
'Programming Language :: Python :: 3.3',
|
||||||
|
'Programming Language :: Python :: 3.4',
|
||||||
|
'Programming Language :: Python :: 3.5',
|
||||||
|
'Programming Language :: Python :: 3.6',
|
||||||
'Environment :: Console',
|
'Environment :: Console',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'Intended Audience :: System Administrators',
|
'Intended Audience :: System Administrators',
|
||||||
|
8
tests/README.rst
Normal file
8
tests/README.rst
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
HTTPie Test Suite
|
||||||
|
=================
|
||||||
|
|
||||||
|
|
||||||
|
Please see `CONTRIBUTING`_.
|
||||||
|
|
||||||
|
|
||||||
|
.. _CONTRIBUTING: https://github.com/jakubroztocil/httpie/blob/master/CONTRIBUTING.rst
|
29
tests/client_certs/client.crt
Normal file
29
tests/client_certs/client.crt
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFAjCCAuoCAQEwDQYJKoZIhvcNAQEFBQAwSTELMAkGA1UEBhMCVVMxCzAJBgNV
|
||||||
|
BAgTAkNBMQswCQYDVQQHEwJTRjEPMA0GA1UEChMGSFRUUGllMQ8wDQYDVQQDEwZI
|
||||||
|
VFRQaWUwHhcNMTUwMTIzMjIyNTM2WhcNMTYwMTIzMjIyNTM2WjBFMQswCQYDVQQG
|
||||||
|
EwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lk
|
||||||
|
Z2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAu6aP
|
||||||
|
iR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/Rn5mCMKmD506JrFV8fktQ
|
||||||
|
M6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535lb9V9hHjAgy60QgJBgSE7
|
||||||
|
lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et0RQiWIi7S6vpDRpZFxRi
|
||||||
|
gtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQI6JadczU0JyVVjJVTny3
|
||||||
|
ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ2nc+OrJwYLvOp1cG/zYl
|
||||||
|
GHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK3gEbMz3y+YTlVNPo108H
|
||||||
|
JI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZdVH3feAhTfDZbpSxhpRo
|
||||||
|
Ja84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQFTCjN22UhPP0PrqY3ngEj
|
||||||
|
1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5Vr89NO08QtnLwQduusVkc
|
||||||
|
4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0EyV2z6pZiH6HK1r5Xwaq
|
||||||
|
0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEAATANBgkqhkiG9w0BAQUF
|
||||||
|
AAOCAgEAQgIicN/uWtaYYBVEVeMGMdxzpp2pv3AaCfQMoVGaQu9VLydK/GBlYOqj
|
||||||
|
AGPjdmQ7p4ISlduXqslu646+RxZ+H6TSSj0NTF4FyR8LPckRPiePNlsGp3u6ffix
|
||||||
|
PX0554Ks+JYyFJ7qyMhsilqCYtw8prX9lj8fjzbWWXlgJFH/SRZw4xdcJ1yYA9sQ
|
||||||
|
fBHxveCWFS1ibX5+QGy/+7jPb99MP38HEIt9vTMW5aiwXeIbipXohWqcJhxL9GXz
|
||||||
|
KPsrt9a++rLjqsquhZL4uCksGmI4Gv0FQQswgSyHSSQzagee5VRB68WYSAyYdvzi
|
||||||
|
YCfkNcbQtOOQWGx4rsEdENViPs1GEZkWJJ1h9pmWzZl0U9c3cnABffK7o9v6ap2F
|
||||||
|
NrnU5H/7jLuBiUJFzqwkgAjANLRZ6hLj6h/grcnIIThJwg6KaXvpEh4UkHuqHYBF
|
||||||
|
Fq1BWZIWU25ASggEVIsCPXC2+I1oGhxK1DN/J+wIht9MBWWlQWVMZAQsBkszNZrh
|
||||||
|
nzdfMoQZTG5bT4Bf0bI5LmPaY0xBxXA1f4TLuqrEAziOjRX3vIQV4i33nZZJvPcC
|
||||||
|
mCoyhAUpTJm+OI90ePll+vBO1ENAx7EMHqNe6eCChZ/9DUsVxxtaorVq1l0xWons
|
||||||
|
ynOCgx46hGE12/oiRIKq/wGMpv6ClfJhW1N5nJahDqoIMEvnNaQ=
|
||||||
|
-----END CERTIFICATE-----
|
51
tests/client_certs/client.key
Normal file
51
tests/client_certs/client.key
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIJKAIBAAKCAgEAu6aPiR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/R
|
||||||
|
n5mCMKmD506JrFV8fktQM6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535l
|
||||||
|
b9V9hHjAgy60QgJBgSE7lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et
|
||||||
|
0RQiWIi7S6vpDRpZFxRigtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQ
|
||||||
|
I6JadczU0JyVVjJVTny3ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ
|
||||||
|
2nc+OrJwYLvOp1cG/zYlGHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK
|
||||||
|
3gEbMz3y+YTlVNPo108HJI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZ
|
||||||
|
dVH3feAhTfDZbpSxhpRoJa84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQF
|
||||||
|
TCjN22UhPP0PrqY3ngEj1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5V
|
||||||
|
r89NO08QtnLwQduusVkc4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0
|
||||||
|
EyV2z6pZiH6HK1r5Xwaq0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEA
|
||||||
|
AQKCAgBOY1DYlZYg8/eXAhuDDkayYYzDuny1ylG8c4F9nFYVCxB2GZ1Wz3icPWP1
|
||||||
|
j1BhpkBgPbPeLfM+O0V1H6eCdVvapKOxXM52mDuHO3TJP6P8lOZgZOOY6RUK7qp0
|
||||||
|
4mC4plqYx7oto23CBLoOdgMtM937rG0SLGDfIF6z8sI0XCMRkqPpRviNu5xxYYTk
|
||||||
|
IoczSwtmYcSZJRjHhk4AGnmicDbMPRlJ2k2E0euHhI9wMAyQFUFnhLJlQGALj6pj
|
||||||
|
DtYvcM1EAUN46EXK66bXQq8zgozYS0WIJ6+wOUKQMSIgUGCF6Rvm3ZTt9xwOxxW8
|
||||||
|
wxebvfYVTJgIdh2Nfusgmye9Debl73f+k9/O4RsvYc5J5w2n4IxKqQrfCZrZqevZ
|
||||||
|
s+KvARkuQbXrHPanvEd8MPrRZ6FOAdiZYAbB9OvzuKCbEkgag8GPjMMAvrjT49N2
|
||||||
|
qp9gwGgnzczQYn+vLblJuRzofcblvLE+sxKKDE8qrfcOjN1murZP7714y5E3NmEZ
|
||||||
|
NB2NTHveTflYI1HJ1tznI1C40GdBYH4GwT/0he53rBcjNaPhyP7j3cTR1doRfZap
|
||||||
|
2oz8KE/Sij3Zb6b8r7hi+Lcwpa9txZftro7XNOJIX7ZT5B4KMiXowtCHbkMMnL6k
|
||||||
|
48tRBpyX20MqDFezBRCK7lfGhU1Coms8UcDHoFXLuGY/sAYEcQKCAQEA9D9/PD1t
|
||||||
|
e90haG6nLl4LKP5wH2wB2BK1RRBERqOVqwSmdSgn3/+GkpeYWKdhN2jyYn6qnpJQ
|
||||||
|
hXXYGtHAAHuw0dGXnOmgcsyZSlAWPzpMYRYrSh3ds8JVJdV2d58yS0ty3Ae3W6aW
|
||||||
|
p4SRuhf8yIMgOmE+TARCU1rJdke9jIIl2TQmnpJahlsZeGLEmEXE99EhB5VoshRJ
|
||||||
|
hLXNn3xTtkQz3tNR0rMAtXI6SIMB00FFEG1+jClza6PYriT9dkORI5LSVqXDEpxR
|
||||||
|
C41PvYMKTAloWd0hZ2gdfwAcJScoAv75L10uR7O1IeQI+Et5h2tj4a/OfzILa0d5
|
||||||
|
BYMmVsTa3NZXLQKCAQEAxK3uJKmoN2uswJQSKpX4WziVBgbaZdpCNnAWhfJZYIcP
|
||||||
|
zlIuv9qOc/DIPiy9Sa9XtITSkcONAwRowtI783AtXeAelyo3W7A2aLIfBBZAXDzJ
|
||||||
|
8KMc9xMDPebvUhlPSzg4bNwvduukAnktlzCjrRWPXRoSfloSpFkFPP4GwTdVcf17
|
||||||
|
1mkki6rK4rbHmIoCITlZkNbUBCcu20ivK6N3pvH1wN123bxnF7lwvB5qizdFO5P7
|
||||||
|
xRVIoCdCXQ0+WK2ZokCa/r44rcp4ffgrLoO/WRlo4yERIa9NwaucIrXmotKX8kYc
|
||||||
|
YYpFzrGs72DljS7TBZCOqek5fNQBNK79BA2hNcJ1FQKCAQBC+M44hFdq6T1p1z18
|
||||||
|
F0lUGkBAPWtcBfUyVL2D6QL2+7Vw1mvonbYWp/6cAHlFqj8cBsNd65ysm51/7ReK
|
||||||
|
il/3iFLcMatPDw7RM5iGCcQ7ssp37iyGR7j1QMzVDA/MWYnLD0qVlN4mXNFgh4dG
|
||||||
|
q73AhD2CtoBBPtmS1yUATAd4wTX9sP+la4FWYy6o2iiiEvPNkog8nBd0ji0tl/eU
|
||||||
|
OKtIZAVBkteU6RdWHqX3eSQo1v0mDY+aajjVt0rQjMJVUMLgA1+z0KzgUAUXX8EJ
|
||||||
|
DGNSkLHCGuhLlIojHdN4ztUgyZoRCxOVkWNsQbW3Dhk7HuuuMNi0t8pVWpq+nAev
|
||||||
|
Gg6ZAoIBAQC0mMk9nRO7oAGG6/Aqbn8YtEISwKQ2Nk3qUs47vKdZPWvEFi6bOILp
|
||||||
|
70TP4qEFUh6EwhngguGuzZOsoQMvq+fcdXlhcQBYDtxHEpfsVspOZ/s+HWjxbuHh
|
||||||
|
K3bBuj/XYA5f12c2GXYGV2MHm0AQJOX5pYEpyGepxZxLvy5QqRCqlQnrfaxzGycl
|
||||||
|
OpTYepEuFM0rdDhGf/xEmt9OgNHT2AXDTRhizycS39Kmyn8myl+mL2JWPA7uEF6d
|
||||||
|
txVytCWImS45kE3XNz2g3go4sf04QV7QgIKMnb4Wgg/ix4i6JgokC0DwR9mFzBxx
|
||||||
|
ylW+aCqYx35YgrGo77sTt0LZP/KxvJdpAoIBAF7YfhR1wFbW2L8sJ4gAbjPUWOMu
|
||||||
|
JUfE4FhdLcSdqCo+N8uN0qawJxXltBKfjeeoH0CDh9Yv0qqalVdSOKS9BPAa1zJc
|
||||||
|
o2kBcT8AVwoPS5oxa9eDT+7iHPMF4BErB2IGv3yYwpjqSZBJ9TsTu1B6iTf5hOL5
|
||||||
|
9pqcv/LjfcwtWu2XMCVoZj2Q8iYv55l3jJ1ByF/UDVezWajE69avvJkQZrMZmuBw
|
||||||
|
UuHelP/7anRyyelh7RkndZpPCExGmuO7pd5aG25/mBs0i34R1PElAtt8AN36f5Tk
|
||||||
|
1GxIltTNtLk4Mivwp9aZ1vf9s5FAhgPDvfGV5yFoKYmA/65ZlrKx0zlFNng=
|
||||||
|
-----END RSA PRIVATE KEY-----
|
87
tests/client_certs/client.pem
Normal file
87
tests/client_certs/client.pem
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
Bag Attributes
|
||||||
|
localKeyID: 93 0C 3E A7 82 62 36 37 5E 73 9B 05 C4 98 DF DC 04 5C B4 C9
|
||||||
|
subject=/C=AU/ST=Some-State/O=Internet Widgits Pty Ltd
|
||||||
|
issuer=/C=US/ST=CA/L=SF/O=HTTPie/CN=HTTPie
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFAjCCAuoCAQEwDQYJKoZIhvcNAQEFBQAwSTELMAkGA1UEBhMCVVMxCzAJBgNV
|
||||||
|
BAgTAkNBMQswCQYDVQQHEwJTRjEPMA0GA1UEChMGSFRUUGllMQ8wDQYDVQQDEwZI
|
||||||
|
VFRQaWUwHhcNMTUwMTIzMjIyNTM2WhcNMTYwMTIzMjIyNTM2WjBFMQswCQYDVQQG
|
||||||
|
EwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lk
|
||||||
|
Z2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAu6aP
|
||||||
|
iR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/Rn5mCMKmD506JrFV8fktQ
|
||||||
|
M6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535lb9V9hHjAgy60QgJBgSE7
|
||||||
|
lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et0RQiWIi7S6vpDRpZFxRi
|
||||||
|
gtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQI6JadczU0JyVVjJVTny3
|
||||||
|
ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ2nc+OrJwYLvOp1cG/zYl
|
||||||
|
GHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK3gEbMz3y+YTlVNPo108H
|
||||||
|
JI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZdVH3feAhTfDZbpSxhpRo
|
||||||
|
Ja84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQFTCjN22UhPP0PrqY3ngEj
|
||||||
|
1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5Vr89NO08QtnLwQduusVkc
|
||||||
|
4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0EyV2z6pZiH6HK1r5Xwaq
|
||||||
|
0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEAATANBgkqhkiG9w0BAQUF
|
||||||
|
AAOCAgEAQgIicN/uWtaYYBVEVeMGMdxzpp2pv3AaCfQMoVGaQu9VLydK/GBlYOqj
|
||||||
|
AGPjdmQ7p4ISlduXqslu646+RxZ+H6TSSj0NTF4FyR8LPckRPiePNlsGp3u6ffix
|
||||||
|
PX0554Ks+JYyFJ7qyMhsilqCYtw8prX9lj8fjzbWWXlgJFH/SRZw4xdcJ1yYA9sQ
|
||||||
|
fBHxveCWFS1ibX5+QGy/+7jPb99MP38HEIt9vTMW5aiwXeIbipXohWqcJhxL9GXz
|
||||||
|
KPsrt9a++rLjqsquhZL4uCksGmI4Gv0FQQswgSyHSSQzagee5VRB68WYSAyYdvzi
|
||||||
|
YCfkNcbQtOOQWGx4rsEdENViPs1GEZkWJJ1h9pmWzZl0U9c3cnABffK7o9v6ap2F
|
||||||
|
NrnU5H/7jLuBiUJFzqwkgAjANLRZ6hLj6h/grcnIIThJwg6KaXvpEh4UkHuqHYBF
|
||||||
|
Fq1BWZIWU25ASggEVIsCPXC2+I1oGhxK1DN/J+wIht9MBWWlQWVMZAQsBkszNZrh
|
||||||
|
nzdfMoQZTG5bT4Bf0bI5LmPaY0xBxXA1f4TLuqrEAziOjRX3vIQV4i33nZZJvPcC
|
||||||
|
mCoyhAUpTJm+OI90ePll+vBO1ENAx7EMHqNe6eCChZ/9DUsVxxtaorVq1l0xWons
|
||||||
|
ynOCgx46hGE12/oiRIKq/wGMpv6ClfJhW1N5nJahDqoIMEvnNaQ=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
Bag Attributes
|
||||||
|
localKeyID: 93 0C 3E A7 82 62 36 37 5E 73 9B 05 C4 98 DF DC 04 5C B4 C9
|
||||||
|
Key Attributes: <No Attributes>
|
||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIJKAIBAAKCAgEAu6aPiR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/R
|
||||||
|
n5mCMKmD506JrFV8fktQM6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535l
|
||||||
|
b9V9hHjAgy60QgJBgSE7lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et
|
||||||
|
0RQiWIi7S6vpDRpZFxRigtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQ
|
||||||
|
I6JadczU0JyVVjJVTny3ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ
|
||||||
|
2nc+OrJwYLvOp1cG/zYlGHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK
|
||||||
|
3gEbMz3y+YTlVNPo108HJI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZ
|
||||||
|
dVH3feAhTfDZbpSxhpRoJa84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQF
|
||||||
|
TCjN22UhPP0PrqY3ngEj1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5V
|
||||||
|
r89NO08QtnLwQduusVkc4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0
|
||||||
|
EyV2z6pZiH6HK1r5Xwaq0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEA
|
||||||
|
AQKCAgBOY1DYlZYg8/eXAhuDDkayYYzDuny1ylG8c4F9nFYVCxB2GZ1Wz3icPWP1
|
||||||
|
j1BhpkBgPbPeLfM+O0V1H6eCdVvapKOxXM52mDuHO3TJP6P8lOZgZOOY6RUK7qp0
|
||||||
|
4mC4plqYx7oto23CBLoOdgMtM937rG0SLGDfIF6z8sI0XCMRkqPpRviNu5xxYYTk
|
||||||
|
IoczSwtmYcSZJRjHhk4AGnmicDbMPRlJ2k2E0euHhI9wMAyQFUFnhLJlQGALj6pj
|
||||||
|
DtYvcM1EAUN46EXK66bXQq8zgozYS0WIJ6+wOUKQMSIgUGCF6Rvm3ZTt9xwOxxW8
|
||||||
|
wxebvfYVTJgIdh2Nfusgmye9Debl73f+k9/O4RsvYc5J5w2n4IxKqQrfCZrZqevZ
|
||||||
|
s+KvARkuQbXrHPanvEd8MPrRZ6FOAdiZYAbB9OvzuKCbEkgag8GPjMMAvrjT49N2
|
||||||
|
qp9gwGgnzczQYn+vLblJuRzofcblvLE+sxKKDE8qrfcOjN1murZP7714y5E3NmEZ
|
||||||
|
NB2NTHveTflYI1HJ1tznI1C40GdBYH4GwT/0he53rBcjNaPhyP7j3cTR1doRfZap
|
||||||
|
2oz8KE/Sij3Zb6b8r7hi+Lcwpa9txZftro7XNOJIX7ZT5B4KMiXowtCHbkMMnL6k
|
||||||
|
48tRBpyX20MqDFezBRCK7lfGhU1Coms8UcDHoFXLuGY/sAYEcQKCAQEA9D9/PD1t
|
||||||
|
e90haG6nLl4LKP5wH2wB2BK1RRBERqOVqwSmdSgn3/+GkpeYWKdhN2jyYn6qnpJQ
|
||||||
|
hXXYGtHAAHuw0dGXnOmgcsyZSlAWPzpMYRYrSh3ds8JVJdV2d58yS0ty3Ae3W6aW
|
||||||
|
p4SRuhf8yIMgOmE+TARCU1rJdke9jIIl2TQmnpJahlsZeGLEmEXE99EhB5VoshRJ
|
||||||
|
hLXNn3xTtkQz3tNR0rMAtXI6SIMB00FFEG1+jClza6PYriT9dkORI5LSVqXDEpxR
|
||||||
|
C41PvYMKTAloWd0hZ2gdfwAcJScoAv75L10uR7O1IeQI+Et5h2tj4a/OfzILa0d5
|
||||||
|
BYMmVsTa3NZXLQKCAQEAxK3uJKmoN2uswJQSKpX4WziVBgbaZdpCNnAWhfJZYIcP
|
||||||
|
zlIuv9qOc/DIPiy9Sa9XtITSkcONAwRowtI783AtXeAelyo3W7A2aLIfBBZAXDzJ
|
||||||
|
8KMc9xMDPebvUhlPSzg4bNwvduukAnktlzCjrRWPXRoSfloSpFkFPP4GwTdVcf17
|
||||||
|
1mkki6rK4rbHmIoCITlZkNbUBCcu20ivK6N3pvH1wN123bxnF7lwvB5qizdFO5P7
|
||||||
|
xRVIoCdCXQ0+WK2ZokCa/r44rcp4ffgrLoO/WRlo4yERIa9NwaucIrXmotKX8kYc
|
||||||
|
YYpFzrGs72DljS7TBZCOqek5fNQBNK79BA2hNcJ1FQKCAQBC+M44hFdq6T1p1z18
|
||||||
|
F0lUGkBAPWtcBfUyVL2D6QL2+7Vw1mvonbYWp/6cAHlFqj8cBsNd65ysm51/7ReK
|
||||||
|
il/3iFLcMatPDw7RM5iGCcQ7ssp37iyGR7j1QMzVDA/MWYnLD0qVlN4mXNFgh4dG
|
||||||
|
q73AhD2CtoBBPtmS1yUATAd4wTX9sP+la4FWYy6o2iiiEvPNkog8nBd0ji0tl/eU
|
||||||
|
OKtIZAVBkteU6RdWHqX3eSQo1v0mDY+aajjVt0rQjMJVUMLgA1+z0KzgUAUXX8EJ
|
||||||
|
DGNSkLHCGuhLlIojHdN4ztUgyZoRCxOVkWNsQbW3Dhk7HuuuMNi0t8pVWpq+nAev
|
||||||
|
Gg6ZAoIBAQC0mMk9nRO7oAGG6/Aqbn8YtEISwKQ2Nk3qUs47vKdZPWvEFi6bOILp
|
||||||
|
70TP4qEFUh6EwhngguGuzZOsoQMvq+fcdXlhcQBYDtxHEpfsVspOZ/s+HWjxbuHh
|
||||||
|
K3bBuj/XYA5f12c2GXYGV2MHm0AQJOX5pYEpyGepxZxLvy5QqRCqlQnrfaxzGycl
|
||||||
|
OpTYepEuFM0rdDhGf/xEmt9OgNHT2AXDTRhizycS39Kmyn8myl+mL2JWPA7uEF6d
|
||||||
|
txVytCWImS45kE3XNz2g3go4sf04QV7QgIKMnb4Wgg/ix4i6JgokC0DwR9mFzBxx
|
||||||
|
ylW+aCqYx35YgrGo77sTt0LZP/KxvJdpAoIBAF7YfhR1wFbW2L8sJ4gAbjPUWOMu
|
||||||
|
JUfE4FhdLcSdqCo+N8uN0qawJxXltBKfjeeoH0CDh9Yv0qqalVdSOKS9BPAa1zJc
|
||||||
|
o2kBcT8AVwoPS5oxa9eDT+7iHPMF4BErB2IGv3yYwpjqSZBJ9TsTu1B6iTf5hOL5
|
||||||
|
9pqcv/LjfcwtWu2XMCVoZj2Q8iYv55l3jJ1ByF/UDVezWajE69avvJkQZrMZmuBw
|
||||||
|
UuHelP/7anRyyelh7RkndZpPCExGmuO7pd5aG25/mBs0i34R1PElAtt8AN36f5Tk
|
||||||
|
1GxIltTNtLk4Mivwp9aZ1vf9s5FAhgPDvfGV5yFoKYmA/65ZlrKx0zlFNng=
|
||||||
|
-----END RSA PRIVATE KEY-----
|
24
tests/conftest.py
Normal file
24
tests/conftest.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import pytest
|
||||||
|
from pytest_httpbin import certs
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function', autouse=True)
|
||||||
|
def httpbin_add_ca_bundle(monkeypatch):
|
||||||
|
"""
|
||||||
|
Make pytest-httpbin's CA trusted by default.
|
||||||
|
|
||||||
|
(Same as `httpbin_ca_bundle`, just auto-used.).
|
||||||
|
|
||||||
|
"""
|
||||||
|
monkeypatch.setenv('REQUESTS_CA_BUNDLE', certs.where())
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def httpbin_secure_untrusted(monkeypatch, httpbin_secure):
|
||||||
|
"""
|
||||||
|
Like the `httpbin_secure` fixture, but without the
|
||||||
|
make-CA-trusted-by-default.
|
||||||
|
|
||||||
|
"""
|
||||||
|
monkeypatch.delenv('REQUESTS_CA_BUNDLE')
|
||||||
|
return httpbin_secure
|
40
tests/fixtures/__init__.py
vendored
Normal file
40
tests/fixtures/__init__.py
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
"""Test data"""
|
||||||
|
from os import path
|
||||||
|
import codecs
|
||||||
|
|
||||||
|
|
||||||
|
def patharg(path):
|
||||||
|
"""
|
||||||
|
Back slashes need to be escaped in ITEM args,
|
||||||
|
even in Windows paths.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return path.replace('\\', '\\\\\\')
|
||||||
|
|
||||||
|
|
||||||
|
FIXTURES_ROOT = path.join(path.abspath(path.dirname(__file__)))
|
||||||
|
FILE_PATH = path.join(FIXTURES_ROOT, 'test.txt')
|
||||||
|
JSON_FILE_PATH = path.join(FIXTURES_ROOT, 'test.json')
|
||||||
|
BIN_FILE_PATH = path.join(FIXTURES_ROOT, 'test.bin')
|
||||||
|
|
||||||
|
|
||||||
|
FILE_PATH_ARG = patharg(FILE_PATH)
|
||||||
|
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
|
||||||
|
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
with codecs.open(FILE_PATH, encoding='utf8') as f:
|
||||||
|
# Strip because we don't want new lines in the data so that we can
|
||||||
|
# easily count occurrences also when embedded in JSON (where the new
|
||||||
|
# line would be escaped).
|
||||||
|
FILE_CONTENT = f.read().strip()
|
||||||
|
|
||||||
|
|
||||||
|
with codecs.open(JSON_FILE_PATH, encoding='utf8') as f:
|
||||||
|
JSON_FILE_CONTENT = f.read()
|
||||||
|
|
||||||
|
|
||||||
|
with open(BIN_FILE_PATH, 'rb') as f:
|
||||||
|
BIN_FILE_CONTENT = f.read()
|
||||||
|
|
||||||
|
UNICODE = FILE_CONTENT
|
1
tests/fixtures/file.txt
vendored
1
tests/fixtures/file.txt
vendored
@ -1 +0,0 @@
|
|||||||
__test_file_content__
|
|
1
tests/fixtures/file2.txt
vendored
1
tests/fixtures/file2.txt
vendored
@ -1 +0,0 @@
|
|||||||
__test_file_content__
|
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
4
tests/fixtures/test.json
vendored
Normal file
4
tests/fixtures/test.json
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"name": "Jakub Roztočil",
|
||||||
|
"unicode": "χρυσαφὶ 太陽 เลิศ ♜♞♝♛♚♝♞♜ оживлённым तान्यहानि 有朋"
|
||||||
|
}
|
1
tests/fixtures/test.txt
vendored
Normal file
1
tests/fixtures/test.txt
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
[one line of UTF8-encoded unicode text] χρυσαφὶ 太陽 เลิศ ♜♞♝♛♚♝♞♜ оживлённым तान्यहानि 有朋 ஸ்றீனிவாஸ ٱلرَّحْمـَبنِ
|
75
tests/test_auth.py
Normal file
75
tests/test_auth.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
"""HTTP authentication-related tests."""
|
||||||
|
import mock
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from utils import http, add_auth, HTTP_OK, MockEnvironment
|
||||||
|
import httpie.input
|
||||||
|
import httpie.cli
|
||||||
|
|
||||||
|
|
||||||
|
def test_basic_auth(httpbin_both):
|
||||||
|
r = http('--auth=user:password',
|
||||||
|
'GET', httpbin_both + '/basic-auth/user/password')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == {'authenticated': True, 'user': 'user'}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('argument_name', ['--auth-type', '-A'])
|
||||||
|
def test_digest_auth(httpbin_both, argument_name):
|
||||||
|
r = http(argument_name + '=digest', '--auth=user:password',
|
||||||
|
'GET', httpbin_both.url + '/digest-auth/auth/user/password')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == {'authenticated': True, 'user': 'user'}
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch('httpie.input.AuthCredentials._getpass',
|
||||||
|
new=lambda self, prompt: 'password')
|
||||||
|
def test_password_prompt(httpbin):
|
||||||
|
r = http('--auth', 'user',
|
||||||
|
'GET', httpbin.url + '/basic-auth/user/password')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == {'authenticated': True, 'user': 'user'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_credentials_in_url(httpbin_both):
|
||||||
|
url = add_auth(httpbin_both.url + '/basic-auth/user/password',
|
||||||
|
auth='user:password')
|
||||||
|
r = http('GET', url)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == {'authenticated': True, 'user': 'user'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_credentials_in_url_auth_flag_has_priority(httpbin_both):
|
||||||
|
"""When credentials are passed in URL and via -a at the same time,
|
||||||
|
then the ones from -a are used."""
|
||||||
|
url = add_auth(httpbin_both.url + '/basic-auth/user/password',
|
||||||
|
auth='user:wrong')
|
||||||
|
r = http('--auth=user:password', 'GET', url)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == {'authenticated': True, 'user': 'user'}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('url', [
|
||||||
|
'username@example.org',
|
||||||
|
'username:@example.org',
|
||||||
|
])
|
||||||
|
def test_only_username_in_url(url):
|
||||||
|
"""
|
||||||
|
https://github.com/jakubroztocil/httpie/issues/242
|
||||||
|
|
||||||
|
"""
|
||||||
|
args = httpie.cli.parser.parse_args(args=[url], env=MockEnvironment())
|
||||||
|
assert args.auth
|
||||||
|
assert args.auth.username == 'username'
|
||||||
|
assert args.auth.password == ''
|
||||||
|
|
||||||
|
|
||||||
|
def test_missing_auth(httpbin):
|
||||||
|
r = http(
|
||||||
|
'--auth-type=basic',
|
||||||
|
'GET',
|
||||||
|
httpbin + '/basic-auth/user/password',
|
||||||
|
error_exit_ok=True
|
||||||
|
)
|
||||||
|
assert HTTP_OK not in r
|
||||||
|
assert '--auth required' in r.stderr
|
133
tests/test_auth_plugins.py
Normal file
133
tests/test_auth_plugins.py
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
from mock import mock
|
||||||
|
|
||||||
|
from httpie.input import SEP_CREDENTIALS
|
||||||
|
from httpie.plugins import AuthPlugin, plugin_manager
|
||||||
|
from utils import http, HTTP_OK
|
||||||
|
|
||||||
|
# TODO: run all these tests in session mode as well
|
||||||
|
|
||||||
|
USERNAME = 'user'
|
||||||
|
PASSWORD = 'password'
|
||||||
|
# Basic auth encoded `USERNAME` and `PASSWORD`
|
||||||
|
# noinspection SpellCheckingInspection
|
||||||
|
BASIC_AUTH_HEADER_VALUE = 'Basic dXNlcjpwYXNzd29yZA=='
|
||||||
|
BASIC_AUTH_URL = '/basic-auth/{0}/{1}'.format(USERNAME, PASSWORD)
|
||||||
|
AUTH_OK = {'authenticated': True, 'user': USERNAME}
|
||||||
|
|
||||||
|
|
||||||
|
def basic_auth(header=BASIC_AUTH_HEADER_VALUE):
|
||||||
|
|
||||||
|
def inner(r):
|
||||||
|
r.headers['Authorization'] = header
|
||||||
|
return r
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_plugin_parse_auth_false(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-parse-false'
|
||||||
|
auth_parse = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert username is None
|
||||||
|
assert password is None
|
||||||
|
assert self.raw_auth == BASIC_AUTH_HEADER_VALUE
|
||||||
|
return basic_auth(self.raw_auth)
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
'--auth',
|
||||||
|
BASIC_AUTH_HEADER_VALUE,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_plugin_require_auth_false(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-require-false'
|
||||||
|
auth_require = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert self.raw_auth is None
|
||||||
|
assert username is None
|
||||||
|
assert password is None
|
||||||
|
return basic_auth()
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
||||||
|
|
||||||
|
|
||||||
|
def test_auth_plugin_require_auth_false_and_auth_provided(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-require-false-yet-provided'
|
||||||
|
auth_require = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert self.raw_auth == USERNAME + SEP_CREDENTIALS + PASSWORD
|
||||||
|
assert username == USERNAME
|
||||||
|
assert password == PASSWORD
|
||||||
|
return basic_auth()
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
'--auth',
|
||||||
|
USERNAME + SEP_CREDENTIALS + PASSWORD,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch('httpie.input.AuthCredentials._getpass',
|
||||||
|
new=lambda self, prompt: 'UNEXPECTED_PROMPT_RESPONSE')
|
||||||
|
def test_auth_plugin_prompt_password_false(httpbin):
|
||||||
|
|
||||||
|
class Plugin(AuthPlugin):
|
||||||
|
auth_type = 'test-prompt-false'
|
||||||
|
prompt_password = False
|
||||||
|
|
||||||
|
def get_auth(self, username=None, password=None):
|
||||||
|
assert self.raw_auth == USERNAME
|
||||||
|
assert username == USERNAME
|
||||||
|
assert password is None
|
||||||
|
return basic_auth()
|
||||||
|
|
||||||
|
plugin_manager.register(Plugin)
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
httpbin + BASIC_AUTH_URL,
|
||||||
|
'--auth-type',
|
||||||
|
Plugin.auth_type,
|
||||||
|
'--auth',
|
||||||
|
USERNAME,
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json == AUTH_OK
|
||||||
|
finally:
|
||||||
|
plugin_manager.unregister(Plugin)
|
50
tests/test_binary.py
Normal file
50
tests/test_binary.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
"""Tests for dealing with binary request and response data."""
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from fixtures import BIN_FILE_PATH, BIN_FILE_CONTENT, BIN_FILE_PATH_ARG
|
||||||
|
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
||||||
|
from utils import MockEnvironment, http
|
||||||
|
|
||||||
|
|
||||||
|
class TestBinaryRequestData:
|
||||||
|
|
||||||
|
def test_binary_stdin(self, httpbin):
|
||||||
|
with open(BIN_FILE_PATH, 'rb') as stdin:
|
||||||
|
env = MockEnvironment(
|
||||||
|
stdin=stdin,
|
||||||
|
stdin_isatty=False,
|
||||||
|
stdout_isatty=False
|
||||||
|
)
|
||||||
|
r = http('--print=B', 'POST', httpbin.url + '/post', env=env)
|
||||||
|
assert r == BIN_FILE_CONTENT
|
||||||
|
|
||||||
|
def test_binary_file_path(self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
|
r = http('--print=B', 'POST', httpbin.url + '/post',
|
||||||
|
'@' + BIN_FILE_PATH_ARG, env=env, )
|
||||||
|
assert r == BIN_FILE_CONTENT
|
||||||
|
|
||||||
|
def test_binary_file_form(self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
|
r = http('--print=B', '--form', 'POST', httpbin.url + '/post',
|
||||||
|
'test@' + BIN_FILE_PATH_ARG, env=env)
|
||||||
|
assert bytes(BIN_FILE_CONTENT) in bytes(r)
|
||||||
|
|
||||||
|
|
||||||
|
class TestBinaryResponseData:
|
||||||
|
|
||||||
|
def test_binary_suppresses_when_terminal(self, httpbin):
|
||||||
|
r = http('GET', httpbin + '/bytes/1024')
|
||||||
|
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||||
|
|
||||||
|
def test_binary_suppresses_when_not_terminal_but_pretty(self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
|
r = http('--pretty=all', 'GET', httpbin + '/bytes/1024', env=env)
|
||||||
|
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||||
|
|
||||||
|
def test_binary_included_and_correct_when_suitable(self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
|
url = httpbin + '/bytes/1024?seed=1'
|
||||||
|
r = http('GET', url, env=env)
|
||||||
|
expected = requests.get(url).content
|
||||||
|
assert r == expected
|
347
tests/test_cli.py
Normal file
347
tests/test_cli.py
Normal file
@ -0,0 +1,347 @@
|
|||||||
|
"""CLI argument parsing related tests."""
|
||||||
|
import json
|
||||||
|
# noinspection PyCompatibility
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from requests.exceptions import InvalidSchema
|
||||||
|
|
||||||
|
from httpie import input
|
||||||
|
from httpie.input import KeyValue, KeyValueArgType, DataDict
|
||||||
|
from httpie import ExitStatus
|
||||||
|
from httpie.cli import parser
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
|
from fixtures import (
|
||||||
|
FILE_PATH_ARG, JSON_FILE_PATH_ARG,
|
||||||
|
JSON_FILE_CONTENT, FILE_CONTENT, FILE_PATH
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestItemParsing:
|
||||||
|
|
||||||
|
key_value = KeyValueArgType(*input.SEP_GROUP_ALL_ITEMS)
|
||||||
|
|
||||||
|
def test_invalid_items(self):
|
||||||
|
items = ['no-separator']
|
||||||
|
for item in items:
|
||||||
|
pytest.raises(argparse.ArgumentTypeError, self.key_value, item)
|
||||||
|
|
||||||
|
def test_escape_separator(self):
|
||||||
|
items = input.parse_items([
|
||||||
|
# headers
|
||||||
|
self.key_value(r'foo\:bar:baz'),
|
||||||
|
self.key_value(r'jack\@jill:hill'),
|
||||||
|
|
||||||
|
# data
|
||||||
|
self.key_value(r'baz\=bar=foo'),
|
||||||
|
|
||||||
|
# files
|
||||||
|
self.key_value(r'bar\@baz@%s' % FILE_PATH_ARG),
|
||||||
|
])
|
||||||
|
# `requests.structures.CaseInsensitiveDict` => `dict`
|
||||||
|
headers = dict(items.headers._store.values())
|
||||||
|
|
||||||
|
assert headers == {
|
||||||
|
'foo:bar': 'baz',
|
||||||
|
'jack@jill': 'hill',
|
||||||
|
}
|
||||||
|
assert items.data == {'baz=bar': 'foo'}
|
||||||
|
assert 'bar@baz' in items.files
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(('string', 'key', 'sep', 'value'), [
|
||||||
|
('path=c:\\windows', 'path', '=', 'c:\\windows'),
|
||||||
|
('path=c:\\windows\\', 'path', '=', 'c:\\windows\\'),
|
||||||
|
('path\\==c:\\windows', 'path=', '=', 'c:\\windows'),
|
||||||
|
])
|
||||||
|
def test_backslash_before_non_special_character_does_not_escape(
|
||||||
|
self, string, key, sep, value):
|
||||||
|
expected = KeyValue(orig=string, key=key, sep=sep, value=value)
|
||||||
|
actual = self.key_value(string)
|
||||||
|
assert actual == expected
|
||||||
|
|
||||||
|
def test_escape_longsep(self):
|
||||||
|
items = input.parse_items([
|
||||||
|
self.key_value(r'bob\:==foo'),
|
||||||
|
])
|
||||||
|
assert items.params == {'bob:': 'foo'}
|
||||||
|
|
||||||
|
def test_valid_items(self):
|
||||||
|
items = input.parse_items([
|
||||||
|
self.key_value('string=value'),
|
||||||
|
self.key_value('Header:value'),
|
||||||
|
self.key_value('Unset-Header:'),
|
||||||
|
self.key_value('Empty-Header;'),
|
||||||
|
self.key_value('list:=["a", 1, {}, false]'),
|
||||||
|
self.key_value('obj:={"a": "b"}'),
|
||||||
|
self.key_value('ed='),
|
||||||
|
self.key_value('bool:=true'),
|
||||||
|
self.key_value('file@' + FILE_PATH_ARG),
|
||||||
|
self.key_value('query==value'),
|
||||||
|
self.key_value('string-embed=@' + FILE_PATH_ARG),
|
||||||
|
self.key_value('raw-json-embed:=@' + JSON_FILE_PATH_ARG),
|
||||||
|
])
|
||||||
|
|
||||||
|
# Parsed headers
|
||||||
|
# `requests.structures.CaseInsensitiveDict` => `dict`
|
||||||
|
headers = dict(items.headers._store.values())
|
||||||
|
assert headers == {
|
||||||
|
'Header': 'value',
|
||||||
|
'Unset-Header': None,
|
||||||
|
'Empty-Header': ''
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parsed data
|
||||||
|
raw_json_embed = items.data.pop('raw-json-embed')
|
||||||
|
assert raw_json_embed == json.loads(JSON_FILE_CONTENT)
|
||||||
|
items.data['string-embed'] = items.data['string-embed'].strip()
|
||||||
|
assert dict(items.data) == {
|
||||||
|
"ed": "",
|
||||||
|
"string": "value",
|
||||||
|
"bool": True,
|
||||||
|
"list": ["a", 1, {}, False],
|
||||||
|
"obj": {"a": "b"},
|
||||||
|
"string-embed": FILE_CONTENT,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Parsed query string parameters
|
||||||
|
assert items.params == {'query': 'value'}
|
||||||
|
|
||||||
|
# Parsed file fields
|
||||||
|
assert 'file' in items.files
|
||||||
|
assert (items.files['file'][1].read().strip().
|
||||||
|
decode('utf8') == FILE_CONTENT)
|
||||||
|
|
||||||
|
def test_multiple_file_fields_with_same_field_name(self):
|
||||||
|
items = input.parse_items([
|
||||||
|
self.key_value('file_field@' + FILE_PATH_ARG),
|
||||||
|
self.key_value('file_field@' + FILE_PATH_ARG),
|
||||||
|
])
|
||||||
|
assert len(items.files['file_field']) == 2
|
||||||
|
|
||||||
|
def test_multiple_text_fields_with_same_field_name(self):
|
||||||
|
items = input.parse_items(
|
||||||
|
[self.key_value('text_field=a'),
|
||||||
|
self.key_value('text_field=b')],
|
||||||
|
data_class=DataDict
|
||||||
|
)
|
||||||
|
assert items.data['text_field'] == ['a', 'b']
|
||||||
|
assert list(items.data.items()) == [
|
||||||
|
('text_field', 'a'),
|
||||||
|
('text_field', 'b'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class TestQuerystring:
|
||||||
|
def test_query_string_params_in_url(self, httpbin):
|
||||||
|
r = http('--print=Hhb', 'GET', httpbin.url + '/get?a=1&b=2')
|
||||||
|
path = '/get?a=1&b=2'
|
||||||
|
url = httpbin.url + path
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'GET %s HTTP/1.1' % path in r
|
||||||
|
assert '"url": "%s"' % url in r
|
||||||
|
|
||||||
|
def test_query_string_params_items(self, httpbin):
|
||||||
|
r = http('--print=Hhb', 'GET', httpbin.url + '/get', 'a==1')
|
||||||
|
path = '/get?a=1'
|
||||||
|
url = httpbin.url + path
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'GET %s HTTP/1.1' % path in r
|
||||||
|
assert '"url": "%s"' % url in r
|
||||||
|
|
||||||
|
def test_query_string_params_in_url_and_items_with_duplicates(self,
|
||||||
|
httpbin):
|
||||||
|
r = http('--print=Hhb', 'GET',
|
||||||
|
httpbin.url + '/get?a=1&a=1', 'a==1', 'a==1')
|
||||||
|
path = '/get?a=1&a=1&a=1&a=1'
|
||||||
|
url = httpbin.url + path
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'GET %s HTTP/1.1' % path in r
|
||||||
|
assert '"url": "%s"' % url in r
|
||||||
|
|
||||||
|
|
||||||
|
class TestLocalhostShorthand:
|
||||||
|
def test_expand_localhost_shorthand(self):
|
||||||
|
args = parser.parse_args(args=[':'], env=MockEnvironment())
|
||||||
|
assert args.url == 'http://localhost'
|
||||||
|
|
||||||
|
def test_expand_localhost_shorthand_with_slash(self):
|
||||||
|
args = parser.parse_args(args=[':/'], env=MockEnvironment())
|
||||||
|
assert args.url == 'http://localhost/'
|
||||||
|
|
||||||
|
def test_expand_localhost_shorthand_with_port(self):
|
||||||
|
args = parser.parse_args(args=[':3000'], env=MockEnvironment())
|
||||||
|
assert args.url == 'http://localhost:3000'
|
||||||
|
|
||||||
|
def test_expand_localhost_shorthand_with_path(self):
|
||||||
|
args = parser.parse_args(args=[':/path'], env=MockEnvironment())
|
||||||
|
assert args.url == 'http://localhost/path'
|
||||||
|
|
||||||
|
def test_expand_localhost_shorthand_with_port_and_slash(self):
|
||||||
|
args = parser.parse_args(args=[':3000/'], env=MockEnvironment())
|
||||||
|
assert args.url == 'http://localhost:3000/'
|
||||||
|
|
||||||
|
def test_expand_localhost_shorthand_with_port_and_path(self):
|
||||||
|
args = parser.parse_args(args=[':3000/path'], env=MockEnvironment())
|
||||||
|
assert args.url == 'http://localhost:3000/path'
|
||||||
|
|
||||||
|
def test_dont_expand_shorthand_ipv6_as_shorthand(self):
|
||||||
|
args = parser.parse_args(args=['::1'], env=MockEnvironment())
|
||||||
|
assert args.url == 'http://::1'
|
||||||
|
|
||||||
|
def test_dont_expand_longer_ipv6_as_shorthand(self):
|
||||||
|
args = parser.parse_args(
|
||||||
|
args=['::ffff:c000:0280'],
|
||||||
|
env=MockEnvironment()
|
||||||
|
)
|
||||||
|
assert args.url == 'http://::ffff:c000:0280'
|
||||||
|
|
||||||
|
def test_dont_expand_full_ipv6_as_shorthand(self):
|
||||||
|
args = parser.parse_args(
|
||||||
|
args=['0000:0000:0000:0000:0000:0000:0000:0001'],
|
||||||
|
env=MockEnvironment()
|
||||||
|
)
|
||||||
|
assert args.url == 'http://0000:0000:0000:0000:0000:0000:0000:0001'
|
||||||
|
|
||||||
|
|
||||||
|
class TestArgumentParser:
|
||||||
|
|
||||||
|
def setup_method(self, method):
|
||||||
|
self.parser = input.HTTPieArgumentParser()
|
||||||
|
|
||||||
|
def test_guess_when_method_set_and_valid(self):
|
||||||
|
self.parser.args = argparse.Namespace()
|
||||||
|
self.parser.args.method = 'GET'
|
||||||
|
self.parser.args.url = 'http://example.com/'
|
||||||
|
self.parser.args.items = []
|
||||||
|
self.parser.args.ignore_stdin = False
|
||||||
|
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
|
|
||||||
|
self.parser._guess_method()
|
||||||
|
|
||||||
|
assert self.parser.args.method == 'GET'
|
||||||
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
|
assert self.parser.args.items == []
|
||||||
|
|
||||||
|
def test_guess_when_method_not_set(self):
|
||||||
|
self.parser.args = argparse.Namespace()
|
||||||
|
self.parser.args.method = None
|
||||||
|
self.parser.args.url = 'http://example.com/'
|
||||||
|
self.parser.args.items = []
|
||||||
|
self.parser.args.ignore_stdin = False
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
|
|
||||||
|
self.parser._guess_method()
|
||||||
|
|
||||||
|
assert self.parser.args.method == 'GET'
|
||||||
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
|
assert self.parser.args.items == []
|
||||||
|
|
||||||
|
def test_guess_when_method_set_but_invalid_and_data_field(self):
|
||||||
|
self.parser.args = argparse.Namespace()
|
||||||
|
self.parser.args.method = 'http://example.com/'
|
||||||
|
self.parser.args.url = 'data=field'
|
||||||
|
self.parser.args.items = []
|
||||||
|
self.parser.args.ignore_stdin = False
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
|
self.parser._guess_method()
|
||||||
|
|
||||||
|
assert self.parser.args.method == 'POST'
|
||||||
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
|
assert self.parser.args.items == [
|
||||||
|
KeyValue(key='data',
|
||||||
|
value='field',
|
||||||
|
sep='=',
|
||||||
|
orig='data=field')
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_guess_when_method_set_but_invalid_and_header_field(self):
|
||||||
|
self.parser.args = argparse.Namespace()
|
||||||
|
self.parser.args.method = 'http://example.com/'
|
||||||
|
self.parser.args.url = 'test:header'
|
||||||
|
self.parser.args.items = []
|
||||||
|
self.parser.args.ignore_stdin = False
|
||||||
|
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
|
|
||||||
|
self.parser._guess_method()
|
||||||
|
|
||||||
|
assert self.parser.args.method == 'GET'
|
||||||
|
assert self.parser.args.url == 'http://example.com/'
|
||||||
|
assert self.parser.args.items, [
|
||||||
|
KeyValue(key='test',
|
||||||
|
value='header',
|
||||||
|
sep=':',
|
||||||
|
orig='test:header')
|
||||||
|
]
|
||||||
|
|
||||||
|
def test_guess_when_method_set_but_invalid_and_item_exists(self):
|
||||||
|
self.parser.args = argparse.Namespace()
|
||||||
|
self.parser.args.method = 'http://example.com/'
|
||||||
|
self.parser.args.url = 'new_item=a'
|
||||||
|
self.parser.args.items = [
|
||||||
|
KeyValue(
|
||||||
|
key='old_item', value='b', sep='=', orig='old_item=b')
|
||||||
|
]
|
||||||
|
self.parser.args.ignore_stdin = False
|
||||||
|
|
||||||
|
self.parser.env = MockEnvironment()
|
||||||
|
|
||||||
|
self.parser._guess_method()
|
||||||
|
|
||||||
|
assert self.parser.args.items, [
|
||||||
|
KeyValue(key='new_item', value='a', sep='=', orig='new_item=a'),
|
||||||
|
KeyValue(
|
||||||
|
key='old_item', value='b', sep='=', orig='old_item=b'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class TestNoOptions:
|
||||||
|
|
||||||
|
def test_valid_no_options(self, httpbin):
|
||||||
|
r = http('--verbose', '--no-verbose', 'GET', httpbin.url + '/get')
|
||||||
|
assert 'GET /get HTTP/1.1' not in r
|
||||||
|
|
||||||
|
def test_invalid_no_options(self, httpbin):
|
||||||
|
r = http('--no-war', 'GET', httpbin.url + '/get',
|
||||||
|
error_exit_ok=True)
|
||||||
|
assert r.exit_status == 1
|
||||||
|
assert 'unrecognized arguments: --no-war' in r.stderr
|
||||||
|
assert 'GET /get HTTP/1.1' not in r
|
||||||
|
|
||||||
|
|
||||||
|
class TestIgnoreStdin:
|
||||||
|
|
||||||
|
def test_ignore_stdin(self, httpbin):
|
||||||
|
with open(FILE_PATH) as f:
|
||||||
|
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||||
|
r = http('--ignore-stdin', '--verbose', httpbin.url + '/get',
|
||||||
|
env=env)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'GET /get HTTP' in r, "Don't default to POST."
|
||||||
|
assert FILE_CONTENT not in r, "Don't send stdin data."
|
||||||
|
|
||||||
|
def test_ignore_stdin_cannot_prompt_password(self, httpbin):
|
||||||
|
r = http('--ignore-stdin', '--auth=no-password', httpbin.url + '/get',
|
||||||
|
error_exit_ok=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR
|
||||||
|
assert 'because --ignore-stdin' in r.stderr
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchemes:
|
||||||
|
|
||||||
|
def test_invalid_custom_scheme(self):
|
||||||
|
# InvalidSchema is expected because HTTPie
|
||||||
|
# shouldn't touch a formally valid scheme.
|
||||||
|
with pytest.raises(InvalidSchema):
|
||||||
|
http('foo+bar-BAZ.123://bah')
|
||||||
|
|
||||||
|
def test_invalid_scheme_via_via_default_scheme(self):
|
||||||
|
# InvalidSchema is expected because HTTPie
|
||||||
|
# shouldn't touch a formally valid scheme.
|
||||||
|
with pytest.raises(InvalidSchema):
|
||||||
|
http('bah', '--default=scheme=foo+bar-BAZ.123')
|
||||||
|
|
||||||
|
def test_default_scheme(self, httpbin_secure):
|
||||||
|
url = '{0}:{1}'.format(httpbin_secure.host, httpbin_secure.port)
|
||||||
|
assert HTTP_OK in http(url, '--default-scheme=https')
|
40
tests/test_config.py
Normal file
40
tests/test_config.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from httpie import __version__
|
||||||
|
from utils import MockEnvironment, http
|
||||||
|
from httpie.context import Environment
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_options(httpbin):
|
||||||
|
env = MockEnvironment()
|
||||||
|
env.config['default_options'] = ['--form']
|
||||||
|
env.config.save()
|
||||||
|
r = http(httpbin.url + '/post', 'foo=bar', env=env)
|
||||||
|
assert r.json['form'] == {"foo": "bar"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_options_overwrite(httpbin):
|
||||||
|
env = MockEnvironment()
|
||||||
|
env.config['default_options'] = ['--form']
|
||||||
|
env.config.save()
|
||||||
|
r = http('--json', httpbin.url + '/post', 'foo=bar', env=env)
|
||||||
|
assert r.json['json'] == {"foo": "bar"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_migrate_implicit_content_type():
|
||||||
|
config = MockEnvironment().config
|
||||||
|
|
||||||
|
config['implicit_content_type'] = 'json'
|
||||||
|
config.save()
|
||||||
|
config.load()
|
||||||
|
assert 'implicit_content_type' not in config
|
||||||
|
assert not config['default_options']
|
||||||
|
|
||||||
|
config['implicit_content_type'] = 'form'
|
||||||
|
config.save()
|
||||||
|
config.load()
|
||||||
|
assert 'implicit_content_type' not in config
|
||||||
|
assert config['default_options'] == ['--form']
|
||||||
|
|
||||||
|
|
||||||
|
def test_current_version():
|
||||||
|
version = Environment().config['__meta__']['httpie']
|
||||||
|
assert version == __version__
|
122
tests/test_defaults.py
Normal file
122
tests/test_defaults.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
"""
|
||||||
|
Tests for the provided defaults regarding HTTP method, and --json vs. --form.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from httpie.client import JSON_ACCEPT
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
|
from fixtures import FILE_PATH
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_headers_case_insensitive(httpbin):
|
||||||
|
"""
|
||||||
|
<https://github.com/jakubroztocil/httpie/issues/644>
|
||||||
|
"""
|
||||||
|
r = http(
|
||||||
|
'--debug',
|
||||||
|
'--print=H',
|
||||||
|
httpbin.url + '/post',
|
||||||
|
'CONTENT-TYPE:application/json-patch+json',
|
||||||
|
'a=b',
|
||||||
|
)
|
||||||
|
assert 'CONTENT-TYPE: application/json-patch+json' in r
|
||||||
|
assert 'Content-Type' not in r
|
||||||
|
|
||||||
|
|
||||||
|
class TestImplicitHTTPMethod:
|
||||||
|
def test_implicit_GET(self, httpbin):
|
||||||
|
r = http(httpbin.url + '/get')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
def test_implicit_GET_with_headers(self, httpbin):
|
||||||
|
r = http(httpbin.url + '/headers', 'Foo:bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Foo'] == 'bar'
|
||||||
|
|
||||||
|
def test_implicit_POST_json(self, httpbin):
|
||||||
|
r = http(httpbin.url + '/post', 'hello=world')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['json'] == {'hello': 'world'}
|
||||||
|
|
||||||
|
def test_implicit_POST_form(self, httpbin):
|
||||||
|
r = http('--form', httpbin.url + '/post', 'foo=bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['form'] == {'foo': 'bar'}
|
||||||
|
|
||||||
|
def test_implicit_POST_stdin(self, httpbin):
|
||||||
|
with open(FILE_PATH) as f:
|
||||||
|
env = MockEnvironment(stdin_isatty=False, stdin=f)
|
||||||
|
r = http('--form', httpbin.url + '/post', env=env)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
class TestAutoContentTypeAndAcceptHeaders:
|
||||||
|
"""
|
||||||
|
Test that Accept and Content-Type correctly defaults to JSON,
|
||||||
|
but can still be overridden. The same with Content-Type when --form
|
||||||
|
-f is used.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_GET_no_data_no_auto_headers(self, httpbin):
|
||||||
|
# https://github.com/jakubroztocil/httpie/issues/62
|
||||||
|
r = http('GET', httpbin.url + '/headers')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Accept'] == '*/*'
|
||||||
|
assert 'Content-Type' not in r.json['headers']
|
||||||
|
|
||||||
|
def test_POST_no_data_no_auto_headers(self, httpbin):
|
||||||
|
# JSON headers shouldn't be automatically set for POST with no data.
|
||||||
|
r = http('POST', httpbin.url + '/post')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert '"Accept": "*/*"' in r
|
||||||
|
assert '"Content-Type": "application/json' not in r
|
||||||
|
|
||||||
|
def test_POST_with_data_auto_JSON_headers(self, httpbin):
|
||||||
|
r = http('POST', httpbin.url + '/post', 'a=b')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||||
|
assert r.json['headers']['Content-Type'] == 'application/json'
|
||||||
|
|
||||||
|
def test_GET_with_data_auto_JSON_headers(self, httpbin):
|
||||||
|
# JSON headers should automatically be set also for GET with data.
|
||||||
|
r = http('POST', httpbin.url + '/post', 'a=b')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||||
|
assert r.json['headers']['Content-Type'] == 'application/json'
|
||||||
|
|
||||||
|
def test_POST_explicit_JSON_auto_JSON_accept(self, httpbin):
|
||||||
|
r = http('--json', 'POST', httpbin.url + '/post')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['headers']['Accept'] == JSON_ACCEPT
|
||||||
|
# Make sure Content-Type gets set even with no data.
|
||||||
|
# https://github.com/jakubroztocil/httpie/issues/137
|
||||||
|
assert 'application/json' in r.json['headers']['Content-Type']
|
||||||
|
|
||||||
|
def test_GET_explicit_JSON_explicit_headers(self, httpbin):
|
||||||
|
r = http('--json', 'GET', httpbin.url + '/headers',
|
||||||
|
'Accept:application/xml',
|
||||||
|
'Content-Type:application/xml')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert '"Accept": "application/xml"' in r
|
||||||
|
assert '"Content-Type": "application/xml"' in r
|
||||||
|
|
||||||
|
def test_POST_form_auto_Content_Type(self, httpbin):
|
||||||
|
r = http('--form', 'POST', httpbin.url + '/post')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert '"Content-Type": "application/x-www-form-urlencoded' in r
|
||||||
|
|
||||||
|
def test_POST_form_Content_Type_override(self, httpbin):
|
||||||
|
r = http('--form', 'POST', httpbin.url + '/post',
|
||||||
|
'Content-Type:application/xml')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert '"Content-Type": "application/xml"' in r
|
||||||
|
|
||||||
|
def test_print_only_body_when_stdout_redirected_by_default(self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
|
r = http('GET', httpbin.url + '/get', env=env)
|
||||||
|
assert 'HTTP/' not in r
|
||||||
|
|
||||||
|
def test_print_overridable_when_stdout_redirected(self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
|
r = http('--print=h', 'GET', httpbin.url + '/get', env=env)
|
||||||
|
assert HTTP_OK in r
|
39
tests/test_docs.py
Normal file
39
tests/test_docs.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import os
|
||||||
|
import fnmatch
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from utils import TESTS_ROOT
|
||||||
|
|
||||||
|
|
||||||
|
def has_docutils():
|
||||||
|
try:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
import docutils
|
||||||
|
return True
|
||||||
|
except ImportError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def rst_filenames():
|
||||||
|
for root, dirnames, filenames in os.walk(os.path.dirname(TESTS_ROOT)):
|
||||||
|
if '.tox' not in root:
|
||||||
|
for filename in fnmatch.filter(filenames, '*.rst'):
|
||||||
|
yield os.path.join(root, filename)
|
||||||
|
|
||||||
|
|
||||||
|
filenames = list(rst_filenames())
|
||||||
|
assert filenames
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not has_docutils(), reason='docutils not installed')
|
||||||
|
@pytest.mark.parametrize('filename', filenames)
|
||||||
|
def test_rst_file_syntax(filename):
|
||||||
|
p = subprocess.Popen(
|
||||||
|
['rst2pseudoxml.py', '--report=1', '--exit-status=1', filename],
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE
|
||||||
|
)
|
||||||
|
err = p.communicate()[1]
|
||||||
|
assert p.returncode == 0, err.decode('utf8')
|
165
tests/test_downloads.py
Normal file
165
tests/test_downloads.py
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import mock
|
||||||
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
|
from httpie.compat import urlopen
|
||||||
|
from httpie.downloads import (
|
||||||
|
parse_content_range, filename_from_content_disposition, filename_from_url,
|
||||||
|
get_unique_filename, ContentRangeError, Downloader,
|
||||||
|
)
|
||||||
|
from utils import http, MockEnvironment
|
||||||
|
|
||||||
|
|
||||||
|
class Response(object):
|
||||||
|
# noinspection PyDefaultArgument
|
||||||
|
def __init__(self, url, headers={}, status_code=200):
|
||||||
|
self.url = url
|
||||||
|
self.headers = CaseInsensitiveDict(headers)
|
||||||
|
self.status_code = status_code
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloadUtils:
|
||||||
|
def test_Content_Range_parsing(self):
|
||||||
|
parse = parse_content_range
|
||||||
|
|
||||||
|
assert parse('bytes 100-199/200', 100) == 200
|
||||||
|
assert parse('bytes 100-199/*', 100) == 200
|
||||||
|
|
||||||
|
# missing
|
||||||
|
pytest.raises(ContentRangeError, parse, None, 100)
|
||||||
|
|
||||||
|
# syntax error
|
||||||
|
pytest.raises(ContentRangeError, parse, 'beers 100-199/*', 100)
|
||||||
|
|
||||||
|
# unexpected range
|
||||||
|
pytest.raises(ContentRangeError, parse, 'bytes 100-199/*', 99)
|
||||||
|
|
||||||
|
# invalid instance-length
|
||||||
|
pytest.raises(ContentRangeError, parse, 'bytes 100-199/199', 100)
|
||||||
|
|
||||||
|
# invalid byte-range-resp-spec
|
||||||
|
pytest.raises(ContentRangeError, parse, 'bytes 100-99/199', 100)
|
||||||
|
|
||||||
|
# invalid byte-range-resp-spec
|
||||||
|
pytest.raises(ContentRangeError, parse, 'bytes 100-100/*', 100)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('header, expected_filename', [
|
||||||
|
('attachment; filename=hello-WORLD_123.txt', 'hello-WORLD_123.txt'),
|
||||||
|
('attachment; filename=".hello-WORLD_123.txt"', 'hello-WORLD_123.txt'),
|
||||||
|
('attachment; filename="white space.txt"', 'white space.txt'),
|
||||||
|
(r'attachment; filename="\"quotes\".txt"', '"quotes".txt'),
|
||||||
|
('attachment; filename=/etc/hosts', 'hosts'),
|
||||||
|
('attachment; filename=', None)
|
||||||
|
])
|
||||||
|
def test_Content_Disposition_parsing(self, header, expected_filename):
|
||||||
|
assert filename_from_content_disposition(header) == expected_filename
|
||||||
|
|
||||||
|
def test_filename_from_url(self):
|
||||||
|
assert 'foo.txt' == filename_from_url(
|
||||||
|
url='http://example.org/foo',
|
||||||
|
content_type='text/plain'
|
||||||
|
)
|
||||||
|
assert 'foo.html' == filename_from_url(
|
||||||
|
url='http://example.org/foo',
|
||||||
|
content_type='text/html; charset=utf8'
|
||||||
|
)
|
||||||
|
assert 'foo' == filename_from_url(
|
||||||
|
url='http://example.org/foo',
|
||||||
|
content_type=None
|
||||||
|
)
|
||||||
|
assert 'foo' == filename_from_url(
|
||||||
|
url='http://example.org/foo',
|
||||||
|
content_type='x-foo/bar'
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'orig_name, unique_on_attempt, expected',
|
||||||
|
[
|
||||||
|
# Simple
|
||||||
|
('foo.bar', 0, 'foo.bar'),
|
||||||
|
('foo.bar', 1, 'foo.bar-1'),
|
||||||
|
('foo.bar', 10, 'foo.bar-10'),
|
||||||
|
# Trim
|
||||||
|
('A' * 20, 0, 'A' * 10),
|
||||||
|
('A' * 20, 1, 'A' * 8 + '-1'),
|
||||||
|
('A' * 20, 10, 'A' * 7 + '-10'),
|
||||||
|
# Trim before ext
|
||||||
|
('A' * 20 + '.txt', 0, 'A' * 6 + '.txt'),
|
||||||
|
('A' * 20 + '.txt', 1, 'A' * 4 + '.txt-1'),
|
||||||
|
# Trim at the end
|
||||||
|
('foo.' + 'A' * 20, 0, 'foo.' + 'A' * 6),
|
||||||
|
('foo.' + 'A' * 20, 1, 'foo.' + 'A' * 4 + '-1'),
|
||||||
|
('foo.' + 'A' * 20, 10, 'foo.' + 'A' * 3 + '-10'),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
@mock.patch('httpie.downloads.get_filename_max_length')
|
||||||
|
def test_unique_filename(self, get_filename_max_length,
|
||||||
|
orig_name, unique_on_attempt,
|
||||||
|
expected):
|
||||||
|
|
||||||
|
def attempts(unique_on_attempt=0):
|
||||||
|
# noinspection PyUnresolvedReferences,PyUnusedLocal
|
||||||
|
def exists(filename):
|
||||||
|
if exists.attempt == unique_on_attempt:
|
||||||
|
return False
|
||||||
|
exists.attempt += 1
|
||||||
|
return True
|
||||||
|
|
||||||
|
exists.attempt = 0
|
||||||
|
return exists
|
||||||
|
|
||||||
|
get_filename_max_length.return_value = 10
|
||||||
|
|
||||||
|
actual = get_unique_filename(orig_name, attempts(unique_on_attempt))
|
||||||
|
assert expected == actual
|
||||||
|
|
||||||
|
|
||||||
|
class TestDownloads:
|
||||||
|
# TODO: more tests
|
||||||
|
|
||||||
|
def test_actual_download(self, httpbin_both, httpbin):
|
||||||
|
robots_txt = '/robots.txt'
|
||||||
|
body = urlopen(httpbin + robots_txt).read().decode()
|
||||||
|
env = MockEnvironment(stdin_isatty=True, stdout_isatty=False)
|
||||||
|
r = http('--download', httpbin_both.url + robots_txt, env=env)
|
||||||
|
assert 'Downloading' in r.stderr
|
||||||
|
assert '[K' in r.stderr
|
||||||
|
assert 'Done' in r.stderr
|
||||||
|
assert body == r
|
||||||
|
|
||||||
|
def test_download_with_Content_Length(self, httpbin_both):
|
||||||
|
devnull = open(os.devnull, 'w')
|
||||||
|
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
||||||
|
downloader.start(Response(
|
||||||
|
url=httpbin_both.url + '/',
|
||||||
|
headers={'Content-Length': 10}
|
||||||
|
))
|
||||||
|
time.sleep(1.1)
|
||||||
|
downloader.chunk_downloaded(b'12345')
|
||||||
|
time.sleep(1.1)
|
||||||
|
downloader.chunk_downloaded(b'12345')
|
||||||
|
downloader.finish()
|
||||||
|
assert not downloader.interrupted
|
||||||
|
|
||||||
|
def test_download_no_Content_Length(self, httpbin_both):
|
||||||
|
devnull = open(os.devnull, 'w')
|
||||||
|
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
||||||
|
downloader.start(Response(url=httpbin_both.url + '/'))
|
||||||
|
time.sleep(1.1)
|
||||||
|
downloader.chunk_downloaded(b'12345')
|
||||||
|
downloader.finish()
|
||||||
|
assert not downloader.interrupted
|
||||||
|
|
||||||
|
def test_download_interrupted(self, httpbin_both):
|
||||||
|
devnull = open(os.devnull, 'w')
|
||||||
|
downloader = Downloader(output_file=devnull, progress_file=devnull)
|
||||||
|
downloader.start(Response(
|
||||||
|
url=httpbin_both.url + '/',
|
||||||
|
headers={'Content-Length': 5}
|
||||||
|
))
|
||||||
|
downloader.chunk_downloaded(b'1234')
|
||||||
|
downloader.finish()
|
||||||
|
assert downloader.interrupted
|
49
tests/test_errors.py
Normal file
49
tests/test_errors.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import mock
|
||||||
|
from pytest import raises
|
||||||
|
from requests import Request, Timeout
|
||||||
|
from requests.exceptions import ConnectionError
|
||||||
|
|
||||||
|
from httpie import ExitStatus
|
||||||
|
from httpie.core import main
|
||||||
|
|
||||||
|
error_msg = None
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch('httpie.core.get_response')
|
||||||
|
def test_error(get_response):
|
||||||
|
def error(msg, *args, **kwargs):
|
||||||
|
global error_msg
|
||||||
|
error_msg = msg % args
|
||||||
|
|
||||||
|
exc = ConnectionError('Connection aborted')
|
||||||
|
exc.request = Request(method='GET', url='http://www.google.com')
|
||||||
|
get_response.side_effect = exc
|
||||||
|
ret = main(['--ignore-stdin', 'www.google.com'], custom_log_error=error)
|
||||||
|
assert ret == ExitStatus.ERROR
|
||||||
|
assert error_msg == (
|
||||||
|
'ConnectionError: '
|
||||||
|
'Connection aborted while doing GET request to URL: '
|
||||||
|
'http://www.google.com')
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch('httpie.core.get_response')
|
||||||
|
def test_error_traceback(get_response):
|
||||||
|
exc = ConnectionError('Connection aborted')
|
||||||
|
exc.request = Request(method='GET', url='http://www.google.com')
|
||||||
|
get_response.side_effect = exc
|
||||||
|
with raises(ConnectionError):
|
||||||
|
main(['--ignore-stdin', '--traceback', 'www.google.com'])
|
||||||
|
|
||||||
|
|
||||||
|
@mock.patch('httpie.core.get_response')
|
||||||
|
def test_timeout(get_response):
|
||||||
|
def error(msg, *args, **kwargs):
|
||||||
|
global error_msg
|
||||||
|
error_msg = msg % args
|
||||||
|
|
||||||
|
exc = Timeout('Request timed out')
|
||||||
|
exc.request = Request(method='GET', url='http://www.google.com')
|
||||||
|
get_response.side_effect = exc
|
||||||
|
ret = main(['--ignore-stdin', 'www.google.com'], custom_log_error=error)
|
||||||
|
assert ret == ExitStatus.ERROR_TIMEOUT
|
||||||
|
assert error_msg == 'Request timed out (30s).'
|
74
tests/test_exit_status.py
Normal file
74
tests/test_exit_status.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
import mock
|
||||||
|
|
||||||
|
from httpie import ExitStatus
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
|
|
||||||
|
|
||||||
|
def test_keyboard_interrupt_during_arg_parsing_exit_status(httpbin):
|
||||||
|
with mock.patch('httpie.cli.parser.parse_args',
|
||||||
|
side_effect=KeyboardInterrupt()):
|
||||||
|
r = http('GET', httpbin.url + '/get', error_exit_ok=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_CTRL_C
|
||||||
|
|
||||||
|
|
||||||
|
def test_keyboard_interrupt_in_program_exit_status(httpbin):
|
||||||
|
with mock.patch('httpie.core.program',
|
||||||
|
side_effect=KeyboardInterrupt()):
|
||||||
|
r = http('GET', httpbin.url + '/get', error_exit_ok=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_CTRL_C
|
||||||
|
|
||||||
|
|
||||||
|
def test_ok_response_exits_0(httpbin):
|
||||||
|
r = http('GET', httpbin.url + '/get')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
|
||||||
|
def test_error_response_exits_0_without_check_status(httpbin):
|
||||||
|
r = http('GET', httpbin.url + '/status/500')
|
||||||
|
assert '500 INTERNAL SERVER ERROR' in r
|
||||||
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
assert not r.stderr
|
||||||
|
|
||||||
|
|
||||||
|
def test_timeout_exit_status(httpbin):
|
||||||
|
|
||||||
|
r = http('--timeout=0.01', 'GET', httpbin.url + '/delay/0.5',
|
||||||
|
error_exit_ok=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_TIMEOUT
|
||||||
|
|
||||||
|
|
||||||
|
def test_3xx_check_status_exits_3_and_stderr_when_stdout_redirected(
|
||||||
|
httpbin):
|
||||||
|
env = MockEnvironment(stdout_isatty=False)
|
||||||
|
r = http('--check-status', '--headers',
|
||||||
|
'GET', httpbin.url + '/status/301',
|
||||||
|
env=env, error_exit_ok=True)
|
||||||
|
assert '301 MOVED PERMANENTLY' in r
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_HTTP_3XX
|
||||||
|
assert '301 moved permanently' in r.stderr.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_3xx_check_status_redirects_allowed_exits_0(httpbin):
|
||||||
|
r = http('--check-status', '--follow',
|
||||||
|
'GET', httpbin.url + '/status/301',
|
||||||
|
error_exit_ok=True)
|
||||||
|
# The redirect will be followed so 200 is expected.
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
|
||||||
|
|
||||||
|
def test_4xx_check_status_exits_4(httpbin):
|
||||||
|
r = http('--check-status', 'GET', httpbin.url + '/status/401',
|
||||||
|
error_exit_ok=True)
|
||||||
|
assert '401 UNAUTHORIZED' in r
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_HTTP_4XX
|
||||||
|
# Also stderr should be empty since stdout isn't redirected.
|
||||||
|
assert not r.stderr
|
||||||
|
|
||||||
|
|
||||||
|
def test_5xx_check_status_exits_5(httpbin):
|
||||||
|
r = http('--check-status', 'GET', httpbin.url + '/status/500',
|
||||||
|
error_exit_ok=True)
|
||||||
|
assert '500 INTERNAL SERVER ERROR' in r
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_HTTP_5XX
|
114
tests/test_httpie.py
Normal file
114
tests/test_httpie.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
"""High-level tests."""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from httpie.input import ParseError
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
|
from fixtures import FILE_PATH, FILE_CONTENT
|
||||||
|
|
||||||
|
import httpie
|
||||||
|
|
||||||
|
|
||||||
|
def test_debug():
|
||||||
|
r = http('--debug')
|
||||||
|
assert r.exit_status == httpie.ExitStatus.SUCCESS
|
||||||
|
assert 'HTTPie %s' % httpie.__version__ in r.stderr
|
||||||
|
|
||||||
|
|
||||||
|
def test_help():
|
||||||
|
r = http('--help', error_exit_ok=True)
|
||||||
|
assert r.exit_status == httpie.ExitStatus.SUCCESS
|
||||||
|
assert 'https://github.com/jakubroztocil/httpie/issues' in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_version():
|
||||||
|
r = http('--version', error_exit_ok=True)
|
||||||
|
assert r.exit_status == httpie.ExitStatus.SUCCESS
|
||||||
|
# FIXME: py3 has version in stdout, py2 in stderr
|
||||||
|
assert httpie.__version__ == r.stderr.strip() + r.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def test_GET(httpbin_both):
|
||||||
|
r = http('GET', httpbin_both + '/get')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_DELETE(httpbin_both):
|
||||||
|
r = http('DELETE', httpbin_both + '/delete')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_PUT(httpbin_both):
|
||||||
|
r = http('PUT', httpbin_both + '/put', 'foo=bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['json']['foo'] == 'bar'
|
||||||
|
|
||||||
|
|
||||||
|
def test_POST_JSON_data(httpbin_both):
|
||||||
|
r = http('POST', httpbin_both + '/post', 'foo=bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['json']['foo'] == 'bar'
|
||||||
|
|
||||||
|
|
||||||
|
def test_POST_form(httpbin_both):
|
||||||
|
r = http('--form', 'POST', httpbin_both + '/post', 'foo=bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert '"foo": "bar"' in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_POST_form_multiple_values(httpbin_both):
|
||||||
|
r = http('--form', 'POST', httpbin_both + '/post', 'foo=bar', 'foo=baz')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['form'] == {'foo': ['bar', 'baz']}
|
||||||
|
|
||||||
|
|
||||||
|
def test_POST_stdin(httpbin_both):
|
||||||
|
with open(FILE_PATH) as f:
|
||||||
|
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||||
|
r = http('--form', 'POST', httpbin_both + '/post', env=env)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert FILE_CONTENT in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_headers(httpbin_both):
|
||||||
|
r = http('GET', httpbin_both + '/headers', 'Foo:bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert '"User-Agent": "HTTPie' in r, r
|
||||||
|
assert '"Foo": "bar"' in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_headers_unset(httpbin_both):
|
||||||
|
r = http('GET', httpbin_both + '/headers')
|
||||||
|
assert 'Accept' in r.json['headers'] # default Accept present
|
||||||
|
|
||||||
|
r = http('GET', httpbin_both + '/headers', 'Accept:')
|
||||||
|
assert 'Accept' not in r.json['headers'] # default Accept unset
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip('unimplemented')
|
||||||
|
def test_unset_host_header(httpbin_both):
|
||||||
|
r = http('GET', httpbin_both + '/headers')
|
||||||
|
assert 'Host' in r.json['headers'] # default Host present
|
||||||
|
|
||||||
|
r = http('GET', httpbin_both + '/headers', 'Host:')
|
||||||
|
assert 'Host' not in r.json['headers'] # default Host unset
|
||||||
|
|
||||||
|
|
||||||
|
def test_headers_empty_value(httpbin_both):
|
||||||
|
r = http('GET', httpbin_both + '/headers')
|
||||||
|
assert r.json['headers']['Accept'] # default Accept has value
|
||||||
|
|
||||||
|
r = http('GET', httpbin_both + '/headers', 'Accept;')
|
||||||
|
assert r.json['headers']['Accept'] == '' # Accept has no value
|
||||||
|
|
||||||
|
|
||||||
|
def test_headers_empty_value_with_value_gives_error(httpbin):
|
||||||
|
with pytest.raises(ParseError):
|
||||||
|
http('GET', httpbin + '/headers', 'Accept;SYNTAX_ERROR')
|
||||||
|
|
||||||
|
|
||||||
|
def test_json_input_preserve_order(httpbin_both):
|
||||||
|
r = http('PATCH', httpbin_both + '/patch',
|
||||||
|
'order:={"map":{"1":"first","2":"second"}}')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['data'] == \
|
||||||
|
'{"order": {"map": {"1": "first", "2": "second"}}}'
|
173
tests/test_output.py
Normal file
173
tests/test_output.py
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
import os
|
||||||
|
from tempfile import gettempdir
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK, COLOR, CRLF
|
||||||
|
from httpie import ExitStatus
|
||||||
|
from httpie.compat import urlopen
|
||||||
|
from httpie.output.formatters.colors import get_lexer
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('stdout_isatty', [True, False])
|
||||||
|
def test_output_option(httpbin, stdout_isatty):
|
||||||
|
output_filename = os.path.join(gettempdir(), test_output_option.__name__)
|
||||||
|
url = httpbin + '/robots.txt'
|
||||||
|
|
||||||
|
r = http('--output', output_filename, url,
|
||||||
|
env=MockEnvironment(stdout_isatty=stdout_isatty))
|
||||||
|
assert r == ''
|
||||||
|
|
||||||
|
expected_body = urlopen(url).read().decode()
|
||||||
|
with open(output_filename, 'r') as f:
|
||||||
|
actual_body = f.read()
|
||||||
|
|
||||||
|
assert actual_body == expected_body
|
||||||
|
|
||||||
|
|
||||||
|
class TestVerboseFlag:
|
||||||
|
def test_verbose(self, httpbin):
|
||||||
|
r = http('--verbose',
|
||||||
|
'GET', httpbin.url + '/get', 'test-header:__test__')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.count('__test__') == 2
|
||||||
|
|
||||||
|
def test_verbose_form(self, httpbin):
|
||||||
|
# https://github.com/jakubroztocil/httpie/issues/53
|
||||||
|
r = http('--verbose', '--form', 'POST', httpbin.url + '/post',
|
||||||
|
'A=B', 'C=D')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'A=B&C=D' in r
|
||||||
|
|
||||||
|
def test_verbose_json(self, httpbin):
|
||||||
|
r = http('--verbose',
|
||||||
|
'POST', httpbin.url + '/post', 'foo=bar', 'baz=bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert '"baz": "bar"' in r
|
||||||
|
|
||||||
|
def test_verbose_implies_all(self, httpbin):
|
||||||
|
r = http('--verbose', '--follow', httpbin + '/redirect/1')
|
||||||
|
assert 'GET /redirect/1 HTTP/1.1' in r
|
||||||
|
assert 'HTTP/1.1 302 FOUND' in r
|
||||||
|
assert 'GET /get HTTP/1.1' in r
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
class TestColors:
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
argnames=['mime', 'explicit_json', 'body', 'expected_lexer_name'],
|
||||||
|
argvalues=[
|
||||||
|
('application/json', False, None, 'JSON'),
|
||||||
|
('application/json+foo', False, None, 'JSON'),
|
||||||
|
('application/foo+json', False, None, 'JSON'),
|
||||||
|
('application/json-foo', False, None, 'JSON'),
|
||||||
|
('application/x-json', False, None, 'JSON'),
|
||||||
|
('foo/json', False, None, 'JSON'),
|
||||||
|
('foo/json+bar', False, None, 'JSON'),
|
||||||
|
('foo/bar+json', False, None, 'JSON'),
|
||||||
|
('foo/json-foo', False, None, 'JSON'),
|
||||||
|
('foo/x-json', False, None, 'JSON'),
|
||||||
|
('application/vnd.comverge.grid+hal+json', False, None, 'JSON'),
|
||||||
|
('text/plain', True, '{}', 'JSON'),
|
||||||
|
('text/plain', True, 'foo', 'Text only'),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_get_lexer(self, mime, explicit_json, body, expected_lexer_name):
|
||||||
|
lexer = get_lexer(mime, body=body, explicit_json=explicit_json)
|
||||||
|
assert lexer is not None
|
||||||
|
assert lexer.name == expected_lexer_name
|
||||||
|
|
||||||
|
def test_get_lexer_not_found(self):
|
||||||
|
assert get_lexer('xxx/yyy') is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestPrettyOptions:
|
||||||
|
"""Test the --pretty flag handling."""
|
||||||
|
|
||||||
|
def test_pretty_enabled_by_default(self, httpbin):
|
||||||
|
env = MockEnvironment(colors=256)
|
||||||
|
r = http('GET', httpbin.url + '/get', env=env)
|
||||||
|
assert COLOR in r
|
||||||
|
|
||||||
|
def test_pretty_enabled_by_default_unless_stdout_redirected(self, httpbin):
|
||||||
|
r = http('GET', httpbin.url + '/get')
|
||||||
|
assert COLOR not in r
|
||||||
|
|
||||||
|
def test_force_pretty(self, httpbin):
|
||||||
|
env = MockEnvironment(stdout_isatty=False, colors=256)
|
||||||
|
r = http('--pretty=all', 'GET', httpbin.url + '/get', env=env, )
|
||||||
|
assert COLOR in r
|
||||||
|
|
||||||
|
def test_force_ugly(self, httpbin):
|
||||||
|
r = http('--pretty=none', 'GET', httpbin.url + '/get')
|
||||||
|
assert COLOR not in r
|
||||||
|
|
||||||
|
def test_subtype_based_pygments_lexer_match(self, httpbin):
|
||||||
|
"""Test that media subtype is used if type/subtype doesn't
|
||||||
|
match any lexer.
|
||||||
|
|
||||||
|
"""
|
||||||
|
env = MockEnvironment(colors=256)
|
||||||
|
r = http('--print=B', '--pretty=all', httpbin.url + '/post',
|
||||||
|
'Content-Type:text/foo+json', 'a=b', env=env)
|
||||||
|
assert COLOR in r
|
||||||
|
|
||||||
|
def test_colors_option(self, httpbin):
|
||||||
|
env = MockEnvironment(colors=256)
|
||||||
|
r = http('--print=B', '--pretty=colors',
|
||||||
|
'GET', httpbin.url + '/get', 'a=b',
|
||||||
|
env=env)
|
||||||
|
# Tests that the JSON data isn't formatted.
|
||||||
|
assert not r.strip().count('\n')
|
||||||
|
assert COLOR in r
|
||||||
|
|
||||||
|
def test_format_option(self, httpbin):
|
||||||
|
env = MockEnvironment(colors=256)
|
||||||
|
r = http('--print=B', '--pretty=format',
|
||||||
|
'GET', httpbin.url + '/get', 'a=b',
|
||||||
|
env=env)
|
||||||
|
# Tests that the JSON data is formatted.
|
||||||
|
assert r.strip().count('\n') == 2
|
||||||
|
assert COLOR not in r
|
||||||
|
|
||||||
|
|
||||||
|
class TestLineEndings:
|
||||||
|
"""
|
||||||
|
Test that CRLF is properly used in headers
|
||||||
|
and as the headers/body separator.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def _validate_crlf(self, msg):
|
||||||
|
lines = iter(msg.splitlines(True))
|
||||||
|
for header in lines:
|
||||||
|
if header == CRLF:
|
||||||
|
break
|
||||||
|
assert header.endswith(CRLF), repr(header)
|
||||||
|
else:
|
||||||
|
assert 0, 'CRLF between headers and body not found in %r' % msg
|
||||||
|
body = ''.join(lines)
|
||||||
|
assert CRLF not in body
|
||||||
|
return body
|
||||||
|
|
||||||
|
def test_CRLF_headers_only(self, httpbin):
|
||||||
|
r = http('--headers', 'GET', httpbin.url + '/get')
|
||||||
|
body = self._validate_crlf(r)
|
||||||
|
assert not body, 'Garbage after headers: %r' % r
|
||||||
|
|
||||||
|
def test_CRLF_ugly_response(self, httpbin):
|
||||||
|
r = http('--pretty=none', 'GET', httpbin.url + '/get')
|
||||||
|
self._validate_crlf(r)
|
||||||
|
|
||||||
|
def test_CRLF_formatted_response(self, httpbin):
|
||||||
|
r = http('--pretty=format', 'GET', httpbin.url + '/get')
|
||||||
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
self._validate_crlf(r)
|
||||||
|
|
||||||
|
def test_CRLF_ugly_request(self, httpbin):
|
||||||
|
r = http('--pretty=none', '--print=HB', 'GET', httpbin.url + '/get')
|
||||||
|
self._validate_crlf(r)
|
||||||
|
|
||||||
|
def test_CRLF_formatted_request(self, httpbin):
|
||||||
|
r = http('--pretty=format', '--print=HB', 'GET', httpbin.url + '/get')
|
||||||
|
self._validate_crlf(r)
|
47
tests/test_redirects.py
Normal file
47
tests/test_redirects.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
"""High-level tests."""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from httpie import ExitStatus
|
||||||
|
from utils import http, HTTP_OK
|
||||||
|
|
||||||
|
|
||||||
|
def test_follow_all_redirects_shown(httpbin):
|
||||||
|
r = http('--follow', '--all', httpbin.url + '/redirect/2')
|
||||||
|
assert r.count('HTTP/1.1') == 3
|
||||||
|
assert r.count('HTTP/1.1 302 FOUND', 2)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('follow_flag', ['--follow', '-F'])
|
||||||
|
def test_follow_without_all_redirects_hidden(httpbin, follow_flag):
|
||||||
|
r = http(follow_flag, httpbin.url + '/redirect/2')
|
||||||
|
assert r.count('HTTP/1.1') == 1
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_follow_all_output_options_used_for_redirects(httpbin):
|
||||||
|
r = http('--check-status',
|
||||||
|
'--follow',
|
||||||
|
'--all',
|
||||||
|
'--print=H',
|
||||||
|
httpbin.url + '/redirect/2')
|
||||||
|
assert r.count('GET /') == 3
|
||||||
|
assert HTTP_OK not in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_follow_redirect_output_options(httpbin):
|
||||||
|
r = http('--check-status',
|
||||||
|
'--follow',
|
||||||
|
'--all',
|
||||||
|
'--print=h',
|
||||||
|
'--history-print=H',
|
||||||
|
httpbin.url + '/redirect/2')
|
||||||
|
assert r.count('GET /') == 2
|
||||||
|
assert 'HTTP/1.1 302 FOUND' not in r
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_max_redirects(httpbin):
|
||||||
|
r = http('--max-redirects=1', '--follow', httpbin.url + '/redirect/3',
|
||||||
|
error_exit_ok=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
27
tests/test_regressions.py
Normal file
27
tests/test_regressions.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
"""Miscellaneous regression tests"""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from utils import http, HTTP_OK
|
||||||
|
from httpie.compat import is_windows
|
||||||
|
|
||||||
|
|
||||||
|
def test_Host_header_overwrite(httpbin):
|
||||||
|
"""
|
||||||
|
https://github.com/jakubroztocil/httpie/issues/235
|
||||||
|
|
||||||
|
"""
|
||||||
|
host = 'httpbin.org'
|
||||||
|
url = httpbin.url + '/get'
|
||||||
|
r = http('--print=hH', url, 'host:{0}'.format(host))
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.lower().count('host:') == 1
|
||||||
|
assert 'host: {0}'.format(host) in r
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(is_windows, reason='Unix-only')
|
||||||
|
def test_output_devnull(httpbin):
|
||||||
|
"""
|
||||||
|
https://github.com/jakubroztocil/httpie/issues/252
|
||||||
|
|
||||||
|
"""
|
||||||
|
http('--output=/dev/null', httpbin + '/get')
|
187
tests/test_sessions.py
Normal file
187
tests/test_sessions.py
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
from tempfile import gettempdir
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from httpie.plugins.builtin import HTTPBasicAuth
|
||||||
|
from utils import MockEnvironment, mk_config_dir, http, HTTP_OK
|
||||||
|
from fixtures import UNICODE
|
||||||
|
|
||||||
|
|
||||||
|
class SessionTestBase(object):
|
||||||
|
|
||||||
|
def start_session(self, httpbin):
|
||||||
|
"""Create and reuse a unique config dir for each test."""
|
||||||
|
self.config_dir = mk_config_dir()
|
||||||
|
|
||||||
|
def teardown_method(self, method):
|
||||||
|
shutil.rmtree(self.config_dir)
|
||||||
|
|
||||||
|
def env(self):
|
||||||
|
"""
|
||||||
|
Return an environment.
|
||||||
|
|
||||||
|
Each environment created withing a test method
|
||||||
|
will share the same config_dir. It is necessary
|
||||||
|
for session files being reused.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return MockEnvironment(config_dir=self.config_dir)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSessionFlow(SessionTestBase):
|
||||||
|
"""
|
||||||
|
These tests start with an existing session created in `setup_method()`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def start_session(self, httpbin):
|
||||||
|
"""
|
||||||
|
Start a full-blown session with a custom request header,
|
||||||
|
authorization, and response cookies.
|
||||||
|
|
||||||
|
"""
|
||||||
|
super(TestSessionFlow, self).start_session(httpbin)
|
||||||
|
r1 = http('--follow', '--session=test', '--auth=username:password',
|
||||||
|
'GET', httpbin.url + '/cookies/set?hello=world',
|
||||||
|
'Hello:World',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r1
|
||||||
|
|
||||||
|
def test_session_created_and_reused(self, httpbin):
|
||||||
|
self.start_session(httpbin)
|
||||||
|
# Verify that the session created in setup_method() has been used.
|
||||||
|
r2 = http('--session=test',
|
||||||
|
'GET', httpbin.url + '/get', env=self.env())
|
||||||
|
assert HTTP_OK in r2
|
||||||
|
assert r2.json['headers']['Hello'] == 'World'
|
||||||
|
assert r2.json['headers']['Cookie'] == 'hello=world'
|
||||||
|
assert 'Basic ' in r2.json['headers']['Authorization']
|
||||||
|
|
||||||
|
def test_session_update(self, httpbin):
|
||||||
|
self.start_session(httpbin)
|
||||||
|
# Get a response to a request from the original session.
|
||||||
|
r2 = http('--session=test', 'GET', httpbin.url + '/get',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r2
|
||||||
|
|
||||||
|
# Make a request modifying the session data.
|
||||||
|
r3 = http('--follow', '--session=test', '--auth=username:password2',
|
||||||
|
'GET', httpbin.url + '/cookies/set?hello=world2',
|
||||||
|
'Hello:World2',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r3
|
||||||
|
|
||||||
|
# Get a response to a request from the updated session.
|
||||||
|
r4 = http('--session=test', 'GET', httpbin.url + '/get',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r4
|
||||||
|
assert r4.json['headers']['Hello'] == 'World2'
|
||||||
|
assert r4.json['headers']['Cookie'] == 'hello=world2'
|
||||||
|
assert (r2.json['headers']['Authorization']
|
||||||
|
!= r4.json['headers']['Authorization'])
|
||||||
|
|
||||||
|
def test_session_read_only(self, httpbin):
|
||||||
|
self.start_session(httpbin)
|
||||||
|
# Get a response from the original session.
|
||||||
|
r2 = http('--session=test', 'GET', httpbin.url + '/get',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r2
|
||||||
|
|
||||||
|
# Make a request modifying the session data but
|
||||||
|
# with --session-read-only.
|
||||||
|
r3 = http('--follow', '--session-read-only=test',
|
||||||
|
'--auth=username:password2', 'GET',
|
||||||
|
httpbin.url + '/cookies/set?hello=world2', 'Hello:World2',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r3
|
||||||
|
|
||||||
|
# Get a response from the updated session.
|
||||||
|
r4 = http('--session=test', 'GET', httpbin.url + '/get',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r4
|
||||||
|
|
||||||
|
# Origin can differ on Travis.
|
||||||
|
del r2.json['origin'], r4.json['origin']
|
||||||
|
# Different for each request.
|
||||||
|
|
||||||
|
# Should be the same as before r3.
|
||||||
|
assert r2.json == r4.json
|
||||||
|
|
||||||
|
|
||||||
|
class TestSession(SessionTestBase):
|
||||||
|
"""Stand-alone session tests."""
|
||||||
|
|
||||||
|
def test_session_ignored_header_prefixes(self, httpbin):
|
||||||
|
self.start_session(httpbin)
|
||||||
|
r1 = http('--session=test', 'GET', httpbin.url + '/get',
|
||||||
|
'Content-Type: text/plain',
|
||||||
|
'If-Unmodified-Since: Sat, 29 Oct 1994 19:43:31 GMT',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r1
|
||||||
|
r2 = http('--session=test', 'GET', httpbin.url + '/get',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r2
|
||||||
|
assert 'Content-Type' not in r2.json['headers']
|
||||||
|
assert 'If-Unmodified-Since' not in r2.json['headers']
|
||||||
|
|
||||||
|
def test_session_by_path(self, httpbin):
|
||||||
|
self.start_session(httpbin)
|
||||||
|
session_path = os.path.join(self.config_dir, 'session-by-path.json')
|
||||||
|
r1 = http('--session=' + session_path, 'GET', httpbin.url + '/get',
|
||||||
|
'Foo:Bar', env=self.env())
|
||||||
|
assert HTTP_OK in r1
|
||||||
|
|
||||||
|
r2 = http('--session=' + session_path, 'GET', httpbin.url + '/get',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r2
|
||||||
|
assert r2.json['headers']['Foo'] == 'Bar'
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info >= (3,),
|
||||||
|
reason="This test fails intermittently on Python 3 - "
|
||||||
|
"see https://github.com/jakubroztocil/httpie/issues/282")
|
||||||
|
def test_session_unicode(self, httpbin):
|
||||||
|
self.start_session(httpbin)
|
||||||
|
|
||||||
|
r1 = http('--session=test', u'--auth=test:' + UNICODE,
|
||||||
|
'GET', httpbin.url + '/get', u'Test:%s' % UNICODE,
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r1
|
||||||
|
|
||||||
|
r2 = http('--session=test', '--verbose', 'GET',
|
||||||
|
httpbin.url + '/get', env=self.env())
|
||||||
|
assert HTTP_OK in r2
|
||||||
|
|
||||||
|
# FIXME: Authorization *sometimes* is not present on Python3
|
||||||
|
assert (r2.json['headers']['Authorization']
|
||||||
|
== HTTPBasicAuth.make_header(u'test', UNICODE))
|
||||||
|
# httpbin doesn't interpret utf8 headers
|
||||||
|
assert UNICODE in r2
|
||||||
|
|
||||||
|
def test_session_default_header_value_overwritten(self, httpbin):
|
||||||
|
self.start_session(httpbin)
|
||||||
|
# https://github.com/jakubroztocil/httpie/issues/180
|
||||||
|
r1 = http('--session=test',
|
||||||
|
httpbin.url + '/headers', 'User-Agent:custom',
|
||||||
|
env=self.env())
|
||||||
|
assert HTTP_OK in r1
|
||||||
|
assert r1.json['headers']['User-Agent'] == 'custom'
|
||||||
|
|
||||||
|
r2 = http('--session=test', httpbin.url + '/headers', env=self.env())
|
||||||
|
assert HTTP_OK in r2
|
||||||
|
assert r2.json['headers']['User-Agent'] == 'custom'
|
||||||
|
|
||||||
|
def test_download_in_session(self, httpbin):
|
||||||
|
# https://github.com/jakubroztocil/httpie/issues/412
|
||||||
|
self.start_session(httpbin)
|
||||||
|
cwd = os.getcwd()
|
||||||
|
os.chdir(gettempdir())
|
||||||
|
try:
|
||||||
|
http('--session=test', '--download',
|
||||||
|
httpbin.url + '/get', env=self.env())
|
||||||
|
finally:
|
||||||
|
os.chdir(cwd)
|
115
tests/test_ssl.py
Normal file
115
tests/test_ssl.py
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import pytest_httpbin.certs
|
||||||
|
import requests.exceptions
|
||||||
|
|
||||||
|
from httpie import ExitStatus
|
||||||
|
from httpie.input import SSL_VERSION_ARG_MAPPING
|
||||||
|
from utils import HTTP_OK, TESTS_ROOT, http
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Handle OpenSSL errors, if installed.
|
||||||
|
# See <https://github.com/jakubroztocil/httpie/issues/729>
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
import OpenSSL.SSL
|
||||||
|
ssl_errors = (
|
||||||
|
requests.exceptions.SSLError,
|
||||||
|
OpenSSL.SSL.Error,
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
ssl_errors = (
|
||||||
|
requests.exceptions.SSLError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
CLIENT_CERT = os.path.join(TESTS_ROOT, 'client_certs', 'client.crt')
|
||||||
|
CLIENT_KEY = os.path.join(TESTS_ROOT, 'client_certs', 'client.key')
|
||||||
|
CLIENT_PEM = os.path.join(TESTS_ROOT, 'client_certs', 'client.pem')
|
||||||
|
# FIXME:
|
||||||
|
# We test against a local httpbin instance which uses a self-signed cert.
|
||||||
|
# Requests without --verify=<CA_BUNDLE> will fail with a verification error.
|
||||||
|
# See: https://github.com/kevin1024/pytest-httpbin#https-support
|
||||||
|
CA_BUNDLE = pytest_httpbin.certs.where()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('ssl_version', SSL_VERSION_ARG_MAPPING.keys())
|
||||||
|
def test_ssl_version(httpbin_secure, ssl_version):
|
||||||
|
try:
|
||||||
|
r = http(
|
||||||
|
'--ssl', ssl_version,
|
||||||
|
httpbin_secure + '/get'
|
||||||
|
)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
except ssl_errors as e:
|
||||||
|
if ssl_version == 'ssl3':
|
||||||
|
# pytest-httpbin doesn't support ssl3
|
||||||
|
assert 'SSLV3_ALERT_HANDSHAKE_FAILURE' in str(e)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class TestClientCert:
|
||||||
|
|
||||||
|
def test_cert_and_key(self, httpbin_secure):
|
||||||
|
r = http(httpbin_secure + '/get',
|
||||||
|
'--cert', CLIENT_CERT,
|
||||||
|
'--cert-key', CLIENT_KEY)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
def test_cert_pem(self, httpbin_secure):
|
||||||
|
r = http(httpbin_secure + '/get',
|
||||||
|
'--cert', CLIENT_PEM)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
def test_cert_file_not_found(self, httpbin_secure):
|
||||||
|
r = http(httpbin_secure + '/get',
|
||||||
|
'--cert', '/__not_found__',
|
||||||
|
error_exit_ok=True)
|
||||||
|
assert r.exit_status == ExitStatus.ERROR
|
||||||
|
assert 'No such file or directory' in r.stderr
|
||||||
|
|
||||||
|
def test_cert_file_invalid(self, httpbin_secure):
|
||||||
|
with pytest.raises(ssl_errors):
|
||||||
|
http(httpbin_secure + '/get',
|
||||||
|
'--cert', __file__)
|
||||||
|
|
||||||
|
def test_cert_ok_but_missing_key(self, httpbin_secure):
|
||||||
|
with pytest.raises(ssl_errors):
|
||||||
|
http(httpbin_secure + '/get',
|
||||||
|
'--cert', CLIENT_CERT)
|
||||||
|
|
||||||
|
|
||||||
|
class TestServerCert:
|
||||||
|
|
||||||
|
def test_verify_no_OK(self, httpbin_secure):
|
||||||
|
r = http(httpbin_secure.url + '/get', '--verify=no')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('verify_value', ['false', 'fALse'])
|
||||||
|
def test_verify_false_OK(self, httpbin_secure, verify_value):
|
||||||
|
r = http(httpbin_secure.url + '/get', '--verify', verify_value)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
def test_verify_custom_ca_bundle_path(
|
||||||
|
self, httpbin_secure_untrusted
|
||||||
|
):
|
||||||
|
r = http(httpbin_secure_untrusted + '/get', '--verify', CA_BUNDLE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
def test_self_signed_server_cert_by_default_raises_ssl_error(
|
||||||
|
self,
|
||||||
|
httpbin_secure_untrusted
|
||||||
|
):
|
||||||
|
with pytest.raises(ssl_errors):
|
||||||
|
http(httpbin_secure_untrusted.url + '/get')
|
||||||
|
|
||||||
|
def test_verify_custom_ca_bundle_invalid_path(self, httpbin_secure):
|
||||||
|
# since 2.14.0 requests raises IOError
|
||||||
|
with pytest.raises(ssl_errors + (IOError,)):
|
||||||
|
http(httpbin_secure.url + '/get', '--verify', '/__not_found__')
|
||||||
|
|
||||||
|
def test_verify_custom_ca_bundle_invalid_bundle(self, httpbin_secure):
|
||||||
|
with pytest.raises(ssl_errors):
|
||||||
|
http(httpbin_secure.url + '/get', '--verify', __file__)
|
44
tests/test_stream.py
Normal file
44
tests/test_stream.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from httpie.compat import is_windows
|
||||||
|
from httpie.output.streams import BINARY_SUPPRESSED_NOTICE
|
||||||
|
from utils import http, MockEnvironment
|
||||||
|
from fixtures import BIN_FILE_CONTENT, BIN_FILE_PATH
|
||||||
|
|
||||||
|
|
||||||
|
# GET because httpbin 500s with binary POST body.
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(is_windows,
|
||||||
|
reason='Pretty redirect not supported under Windows')
|
||||||
|
def test_pretty_redirected_stream(httpbin):
|
||||||
|
"""Test that --stream works with prettified redirected output."""
|
||||||
|
with open(BIN_FILE_PATH, 'rb') as f:
|
||||||
|
env = MockEnvironment(colors=256, stdin=f,
|
||||||
|
stdin_isatty=False,
|
||||||
|
stdout_isatty=False)
|
||||||
|
r = http('--verbose', '--pretty=all', '--stream', 'GET',
|
||||||
|
httpbin.url + '/get', env=env)
|
||||||
|
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_encoded_stream(httpbin):
|
||||||
|
"""Test that --stream works with non-prettified
|
||||||
|
redirected terminal output."""
|
||||||
|
with open(BIN_FILE_PATH, 'rb') as f:
|
||||||
|
env = MockEnvironment(stdin=f, stdin_isatty=False)
|
||||||
|
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||||
|
httpbin.url + '/get', env=env)
|
||||||
|
assert BINARY_SUPPRESSED_NOTICE.decode() in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_redirected_stream(httpbin):
|
||||||
|
"""Test that --stream works with non-prettified
|
||||||
|
redirected terminal output."""
|
||||||
|
with open(BIN_FILE_PATH, 'rb') as f:
|
||||||
|
env = MockEnvironment(stdout_isatty=False,
|
||||||
|
stdin_isatty=False,
|
||||||
|
stdin=f)
|
||||||
|
r = http('--pretty=none', '--stream', '--verbose', 'GET',
|
||||||
|
httpbin.url + '/get', env=env)
|
||||||
|
assert BIN_FILE_CONTENT in r
|
97
tests/test_unicode.py
Normal file
97
tests/test_unicode.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
"""
|
||||||
|
Various unicode handling related tests.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from utils import http, HTTP_OK
|
||||||
|
from fixtures import UNICODE
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_headers(httpbin):
|
||||||
|
# httpbin doesn't interpret utf8 headers
|
||||||
|
r = http(httpbin.url + '/headers', u'Test:%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_headers_verbose(httpbin):
|
||||||
|
# httpbin doesn't interpret utf8 headers
|
||||||
|
r = http('--verbose', httpbin.url + '/headers', u'Test:%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert UNICODE in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_form_item(httpbin):
|
||||||
|
r = http('--form', 'POST', httpbin.url + '/post', u'test=%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['form'] == {'test': UNICODE}
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_form_item_verbose(httpbin):
|
||||||
|
r = http('--verbose', '--form',
|
||||||
|
'POST', httpbin.url + '/post', u'test=%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert UNICODE in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_json_item(httpbin):
|
||||||
|
r = http('--json', 'POST', httpbin.url + '/post', u'test=%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['json'] == {'test': UNICODE}
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_json_item_verbose(httpbin):
|
||||||
|
r = http('--verbose', '--json',
|
||||||
|
'POST', httpbin.url + '/post', u'test=%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert UNICODE in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_raw_json_item(httpbin):
|
||||||
|
r = http('--json', 'POST', httpbin.url + '/post',
|
||||||
|
u'test:={ "%s" : [ "%s" ] }' % (UNICODE, UNICODE))
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['json'] == {'test': {UNICODE: [UNICODE]}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_raw_json_item_verbose(httpbin):
|
||||||
|
r = http('--json', 'POST', httpbin.url + '/post',
|
||||||
|
u'test:={ "%s" : [ "%s" ] }' % (UNICODE, UNICODE))
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['json'] == {'test': {UNICODE: [UNICODE]}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_url_query_arg_item(httpbin):
|
||||||
|
r = http(httpbin.url + '/get', u'test==%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['args'] == {'test': UNICODE}, r
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_url_query_arg_item_verbose(httpbin):
|
||||||
|
r = http('--verbose', httpbin.url + '/get', u'test==%s' % UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert UNICODE in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_url(httpbin):
|
||||||
|
r = http(httpbin.url + u'/get?test=' + UNICODE)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.json['args'] == {'test': UNICODE}
|
||||||
|
|
||||||
|
# def test_unicode_url_verbose(self):
|
||||||
|
# r = http(httpbin.url + '--verbose', u'/get?test=' + UNICODE)
|
||||||
|
# assert HTTP_OK in r
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_basic_auth(httpbin):
|
||||||
|
# it doesn't really authenticate us because httpbin
|
||||||
|
# doesn't interpret the utf8-encoded auth
|
||||||
|
http('--verbose', '--auth', u'test:%s' % UNICODE,
|
||||||
|
httpbin.url + u'/basic-auth/test/' + UNICODE)
|
||||||
|
|
||||||
|
|
||||||
|
def test_unicode_digest_auth(httpbin):
|
||||||
|
# it doesn't really authenticate us because httpbin
|
||||||
|
# doesn't interpret the utf8-encoded auth
|
||||||
|
http('--auth-type=digest',
|
||||||
|
'--auth', u'test:%s' % UNICODE,
|
||||||
|
httpbin.url + u'/digest-auth/auth/test/' + UNICODE)
|
75
tests/test_uploads.py
Normal file
75
tests/test_uploads.py
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from httpie.input import ParseError
|
||||||
|
from utils import MockEnvironment, http, HTTP_OK
|
||||||
|
from fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT
|
||||||
|
|
||||||
|
|
||||||
|
class TestMultipartFormDataFileUpload:
|
||||||
|
|
||||||
|
def test_non_existent_file_raises_parse_error(self, httpbin):
|
||||||
|
with pytest.raises(ParseError):
|
||||||
|
http('--form',
|
||||||
|
'POST', httpbin.url + '/post', 'foo@/__does_not_exist__')
|
||||||
|
|
||||||
|
def test_upload_ok(self, httpbin):
|
||||||
|
r = http('--form', '--verbose', 'POST', httpbin.url + '/post',
|
||||||
|
'test-file@%s' % FILE_PATH_ARG, 'foo=bar')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert 'Content-Disposition: form-data; name="foo"' in r
|
||||||
|
assert 'Content-Disposition: form-data; name="test-file";' \
|
||||||
|
' filename="%s"' % os.path.basename(FILE_PATH) in r
|
||||||
|
assert FILE_CONTENT in r
|
||||||
|
assert '"foo": "bar"' in r
|
||||||
|
assert 'Content-Type: text/plain' in r
|
||||||
|
|
||||||
|
def test_upload_multiple_fields_with_the_same_name(self, httpbin):
|
||||||
|
r = http('--form', '--verbose', 'POST', httpbin.url + '/post',
|
||||||
|
'test-file@%s' % FILE_PATH_ARG,
|
||||||
|
'test-file@%s' % FILE_PATH_ARG)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert r.count('Content-Disposition: form-data; name="test-file";'
|
||||||
|
' filename="%s"' % os.path.basename(FILE_PATH)) == 2
|
||||||
|
# Should be 4, but is 3 because httpbin
|
||||||
|
# doesn't seem to support filed field lists
|
||||||
|
assert r.count(FILE_CONTENT) in [3, 4]
|
||||||
|
assert r.count('Content-Type: text/plain') == 2
|
||||||
|
|
||||||
|
|
||||||
|
class TestRequestBodyFromFilePath:
|
||||||
|
"""
|
||||||
|
`http URL @file'
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_request_body_from_file_by_path(self, httpbin):
|
||||||
|
r = http('--verbose',
|
||||||
|
'POST', httpbin.url + '/post', '@' + FILE_PATH_ARG)
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert FILE_CONTENT in r, r
|
||||||
|
assert '"Content-Type": "text/plain"' in r
|
||||||
|
|
||||||
|
def test_request_body_from_file_by_path_with_explicit_content_type(
|
||||||
|
self, httpbin):
|
||||||
|
r = http('--verbose',
|
||||||
|
'POST', httpbin.url + '/post', '@' + FILE_PATH_ARG,
|
||||||
|
'Content-Type:text/plain; charset=utf8')
|
||||||
|
assert HTTP_OK in r
|
||||||
|
assert FILE_CONTENT in r
|
||||||
|
assert 'Content-Type: text/plain; charset=utf8' in r
|
||||||
|
|
||||||
|
def test_request_body_from_file_by_path_no_field_name_allowed(
|
||||||
|
self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=True)
|
||||||
|
r = http('POST', httpbin.url + '/post', 'field-name@' + FILE_PATH_ARG,
|
||||||
|
env=env, error_exit_ok=True)
|
||||||
|
assert 'perhaps you meant --form?' in r.stderr
|
||||||
|
|
||||||
|
def test_request_body_from_file_by_path_no_data_items_allowed(
|
||||||
|
self, httpbin):
|
||||||
|
env = MockEnvironment(stdin_isatty=False)
|
||||||
|
r = http('POST', httpbin.url + '/post', '@' + FILE_PATH_ARG, 'foo=bar',
|
||||||
|
env=env, error_exit_ok=True)
|
||||||
|
assert 'cannot be mixed' in r.stderr
|
31
tests/test_windows.py
Normal file
31
tests/test_windows.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from httpie.context import Environment
|
||||||
|
|
||||||
|
from utils import MockEnvironment, http
|
||||||
|
from httpie.compat import is_windows
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not is_windows, reason='windows-only')
|
||||||
|
class TestWindowsOnly:
|
||||||
|
|
||||||
|
@pytest.mark.skipif(True,
|
||||||
|
reason='this test for some reason kills the process')
|
||||||
|
def test_windows_colorized_output(self, httpbin):
|
||||||
|
# Spits out the colorized output.
|
||||||
|
http(httpbin.url + '/get', env=Environment())
|
||||||
|
|
||||||
|
|
||||||
|
class TestFakeWindows:
|
||||||
|
def test_output_file_pretty_not_allowed_on_windows(self, httpbin):
|
||||||
|
env = MockEnvironment(is_windows=True)
|
||||||
|
output_file = os.path.join(
|
||||||
|
tempfile.gettempdir(),
|
||||||
|
self.test_output_file_pretty_not_allowed_on_windows.__name__
|
||||||
|
)
|
||||||
|
r = http('--output', output_file,
|
||||||
|
'--pretty=all', 'GET', httpbin.url + '/get',
|
||||||
|
env=env, error_exit_ok=True)
|
||||||
|
assert 'Only terminal output can be colorized on Windows' in r.stderr
|
1678
tests/tests.py
1678
tests/tests.py
File diff suppressed because it is too large
Load Diff
254
tests/utils.py
Normal file
254
tests/utils.py
Normal file
@ -0,0 +1,254 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
"""Utilities for HTTPie test suite."""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from httpie import ExitStatus, EXIT_STATUS_LABELS
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.core import main
|
||||||
|
from httpie.compat import bytes, str
|
||||||
|
|
||||||
|
|
||||||
|
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
CRLF = '\r\n'
|
||||||
|
COLOR = '\x1b['
|
||||||
|
HTTP_OK = '200 OK'
|
||||||
|
HTTP_OK_COLOR = (
|
||||||
|
'HTTP\x1b[39m\x1b[38;5;245m/\x1b[39m\x1b'
|
||||||
|
'[38;5;37m1.1\x1b[39m\x1b[38;5;245m \x1b[39m\x1b[38;5;37m200'
|
||||||
|
'\x1b[39m\x1b[38;5;245m \x1b[39m\x1b[38;5;136mOK'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def mk_config_dir():
|
||||||
|
dirname = tempfile.mkdtemp(prefix='httpie_config_')
|
||||||
|
return dirname
|
||||||
|
|
||||||
|
|
||||||
|
def add_auth(url, auth):
|
||||||
|
proto, rest = url.split('://', 1)
|
||||||
|
return proto + '://' + auth + '@' + rest
|
||||||
|
|
||||||
|
|
||||||
|
class MockEnvironment(Environment):
|
||||||
|
"""Environment subclass with reasonable defaults for testing."""
|
||||||
|
colors = 0
|
||||||
|
stdin_isatty = True,
|
||||||
|
stdout_isatty = True
|
||||||
|
is_windows = False
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
if 'stdout' not in kwargs:
|
||||||
|
kwargs['stdout'] = tempfile.TemporaryFile(
|
||||||
|
mode='w+b',
|
||||||
|
prefix='httpie_stdout'
|
||||||
|
)
|
||||||
|
if 'stderr' not in kwargs:
|
||||||
|
kwargs['stderr'] = tempfile.TemporaryFile(
|
||||||
|
mode='w+t',
|
||||||
|
prefix='httpie_stderr'
|
||||||
|
)
|
||||||
|
super(MockEnvironment, self).__init__(**kwargs)
|
||||||
|
self._delete_config_dir = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self):
|
||||||
|
if not self.config_dir.startswith(tempfile.gettempdir()):
|
||||||
|
self.config_dir = mk_config_dir()
|
||||||
|
self._delete_config_dir = True
|
||||||
|
return super(MockEnvironment, self).config
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
if self._delete_config_dir:
|
||||||
|
assert self.config_dir.startswith(tempfile.gettempdir())
|
||||||
|
from shutil import rmtree
|
||||||
|
rmtree(self.config_dir)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
try:
|
||||||
|
self.cleanup()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BaseCLIResponse(object):
|
||||||
|
"""
|
||||||
|
Represents the result of simulated `$ http' invocation via `http()`.
|
||||||
|
|
||||||
|
Holds and provides access to:
|
||||||
|
|
||||||
|
- stdout output: print(self)
|
||||||
|
- stderr output: print(self.stderr)
|
||||||
|
- exit_status output: print(self.exit_status)
|
||||||
|
|
||||||
|
"""
|
||||||
|
stderr = None
|
||||||
|
json = None
|
||||||
|
exit_status = None
|
||||||
|
|
||||||
|
|
||||||
|
class BytesCLIResponse(bytes, BaseCLIResponse):
|
||||||
|
"""
|
||||||
|
Used as a fallback when a StrCLIResponse cannot be used.
|
||||||
|
|
||||||
|
E.g. when the output contains binary data or when it is colorized.
|
||||||
|
|
||||||
|
`.json` will always be None.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class StrCLIResponse(str, BaseCLIResponse):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def json(self):
|
||||||
|
"""
|
||||||
|
Return deserialized JSON body, if one included in the output
|
||||||
|
and is parseable.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not hasattr(self, '_json'):
|
||||||
|
self._json = None
|
||||||
|
# De-serialize JSON body if possible.
|
||||||
|
if COLOR in self:
|
||||||
|
# Colorized output cannot be parsed.
|
||||||
|
pass
|
||||||
|
elif self.strip().startswith('{'):
|
||||||
|
# Looks like JSON body.
|
||||||
|
self._json = json.loads(self)
|
||||||
|
elif (self.count('Content-Type:') == 1
|
||||||
|
and 'application/json' in self):
|
||||||
|
# Looks like a whole JSON HTTP message,
|
||||||
|
# try to extract its body.
|
||||||
|
try:
|
||||||
|
j = self.strip()[self.strip().rindex('\r\n\r\n'):]
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self._json = json.loads(j)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return self._json
|
||||||
|
|
||||||
|
|
||||||
|
class ExitStatusError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def http(*args, **kwargs):
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
"""
|
||||||
|
Run HTTPie and capture stderr/out and exit status.
|
||||||
|
|
||||||
|
Invoke `httpie.core.main()` with `args` and `kwargs`,
|
||||||
|
and return a `CLIResponse` subclass instance.
|
||||||
|
|
||||||
|
The return value is either a `StrCLIResponse`, or `BytesCLIResponse`
|
||||||
|
if unable to decode the output.
|
||||||
|
|
||||||
|
The response has the following attributes:
|
||||||
|
|
||||||
|
`stdout` is represented by the instance itself (print r)
|
||||||
|
`stderr`: text written to stderr
|
||||||
|
`exit_status`: the exit status
|
||||||
|
`json`: decoded JSON (if possible) or `None`
|
||||||
|
|
||||||
|
Exceptions are propagated.
|
||||||
|
|
||||||
|
If you pass ``error_exit_ok=True``, then error exit statuses
|
||||||
|
won't result into an exception.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
$ http --auth=user:password GET httpbin.org/basic-auth/user/password
|
||||||
|
|
||||||
|
>>> httpbin = getfixture('httpbin')
|
||||||
|
>>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw')
|
||||||
|
>>> type(r) == StrCLIResponse
|
||||||
|
True
|
||||||
|
>>> r.exit_status
|
||||||
|
0
|
||||||
|
>>> r.stderr
|
||||||
|
''
|
||||||
|
>>> 'HTTP/1.1 200 OK' in r
|
||||||
|
True
|
||||||
|
>>> r.json == {'authenticated': True, 'user': 'user'}
|
||||||
|
True
|
||||||
|
|
||||||
|
"""
|
||||||
|
error_exit_ok = kwargs.pop('error_exit_ok', False)
|
||||||
|
env = kwargs.get('env')
|
||||||
|
if not env:
|
||||||
|
env = kwargs['env'] = MockEnvironment()
|
||||||
|
|
||||||
|
stdout = env.stdout
|
||||||
|
stderr = env.stderr
|
||||||
|
|
||||||
|
args = list(args)
|
||||||
|
args_with_config_defaults = args + env.config.default_options
|
||||||
|
add_to_args = []
|
||||||
|
if '--debug' not in args_with_config_defaults:
|
||||||
|
if not error_exit_ok and '--traceback' not in args_with_config_defaults:
|
||||||
|
add_to_args.append('--traceback')
|
||||||
|
if not any('--timeout' in arg for arg in args_with_config_defaults):
|
||||||
|
add_to_args.append('--timeout=3')
|
||||||
|
args = add_to_args + args
|
||||||
|
|
||||||
|
def dump_stderr():
|
||||||
|
stderr.seek(0)
|
||||||
|
sys.stderr.write(stderr.read())
|
||||||
|
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
exit_status = main(args=args, **kwargs)
|
||||||
|
if '--download' in args:
|
||||||
|
# Let the progress reporter thread finish.
|
||||||
|
time.sleep(.5)
|
||||||
|
except SystemExit:
|
||||||
|
if error_exit_ok:
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
|
else:
|
||||||
|
dump_stderr()
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
stderr.seek(0)
|
||||||
|
sys.stderr.write(stderr.read())
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
if not error_exit_ok and exit_status != ExitStatus.SUCCESS:
|
||||||
|
dump_stderr()
|
||||||
|
raise ExitStatusError(
|
||||||
|
'httpie.core.main() unexpectedly returned'
|
||||||
|
' a non-zero exit status: {0} ({1})'.format(
|
||||||
|
exit_status,
|
||||||
|
EXIT_STATUS_LABELS[exit_status]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout.seek(0)
|
||||||
|
stderr.seek(0)
|
||||||
|
output = stdout.read()
|
||||||
|
try:
|
||||||
|
output = output.decode('utf8')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# noinspection PyArgumentList
|
||||||
|
r = BytesCLIResponse(output)
|
||||||
|
else:
|
||||||
|
# noinspection PyArgumentList
|
||||||
|
r = StrCLIResponse(output)
|
||||||
|
r.stderr = stderr.read()
|
||||||
|
r.exit_status = exit_status
|
||||||
|
|
||||||
|
if r.exit_status != ExitStatus.SUCCESS:
|
||||||
|
sys.stderr.write(r.stderr)
|
||||||
|
|
||||||
|
return r
|
||||||
|
|
||||||
|
finally:
|
||||||
|
stdout.close()
|
||||||
|
stderr.close()
|
||||||
|
env.cleanup()
|
27
tox.ini
27
tox.ini
@ -1,13 +1,26 @@
|
|||||||
# Tox (http://tox.testrun.org/) is a tool for running tests
|
# Tox (http://tox.testrun.org/) is a tool for running tests
|
||||||
# in multiple virtualenvs. This configuration file will run the
|
# in multiple virtualenvs. See ./CONTRIBUTING.rst
|
||||||
# test suite on all supported python versions. To use it, "pip install tox"
|
|
||||||
# and then run "tox" from this directory.
|
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = py26, py27, py33, pypy
|
# pypy3 currently fails because of a Flask issue
|
||||||
|
envlist = py27, py37, pypy
|
||||||
|
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
commands = {envpython} setup.py test
|
deps =
|
||||||
|
mock
|
||||||
|
pytest
|
||||||
|
pytest-httpbin>=0.0.6
|
||||||
|
|
||||||
[testenv:py26]
|
|
||||||
deps = argparse
|
commands =
|
||||||
|
# NOTE: the order of the directories in posargs seems to matter.
|
||||||
|
# When changed, then many ImportMismatchError exceptions occurrs.
|
||||||
|
py.test \
|
||||||
|
--verbose \
|
||||||
|
--doctest-modules \
|
||||||
|
{posargs:./httpie ./tests}
|
||||||
|
|
||||||
|
[testenv:py27-osx-builtin]
|
||||||
|
basepython = /usr/bin/python2.7
|
||||||
|
Reference in New Issue
Block a user