mirror of
https://github.com/httpie/cli.git
synced 2025-08-14 01:08:22 +02:00
Compare commits
389 Commits
Author | SHA1 | Date | |
---|---|---|---|
4eaa4d67c5 | |||
9764cc74a4 | |||
778360cde1 | |||
60a7ed4e7b | |||
185af7c9f1 | |||
7e9e7c783f | |||
6039bd8582 | |||
e7d8b9cece | |||
a62391e789 | |||
41666d897f | |||
71008bbedb | |||
85110643e7 | |||
fdd486415a | |||
6c501d23c3 | |||
d10e108b5f | |||
8618f12fce | |||
dac0d716c1 | |||
0340f8caf5 | |||
d7caeaf372 | |||
54c8612452 | |||
4ff22defe4 | |||
c50e287c57 | |||
6bd6648545 | |||
6633b5ae9b | |||
c6cbc7dfa5 | |||
11399dde76 | |||
da47e37c44 | |||
a66af2497a | |||
a94d6d807c | |||
de13423839 | |||
04d05a8abd | |||
2f8d7f77bd | |||
aee77a23af | |||
64c31d554a | |||
41c251ec7c | |||
147a066dbe | |||
b7300c1096 | |||
5d4e7a9a18 | |||
5717fb1ad5 | |||
9c38da96b0 | |||
e5bda98ee7 | |||
c8d70e8c0b | |||
ae6f57dc76 | |||
b4c94e0f26 | |||
7ceb313ccf | |||
3228e74df5 | |||
dc4309771e | |||
07a0359316 | |||
2d55c01c7e | |||
1470ca0c77 | |||
9857693ebf | |||
da03a0656e | |||
61f1ffd0eb | |||
9792513c68 | |||
350f973f70 | |||
8d35a12d27 | |||
8374a9ed83 | |||
a61f9e1114 | |||
611b278b63 | |||
175e36da6b | |||
19e1e26d97 | |||
9b5aedb02d | |||
3865fabf09 | |||
355befcbfc | |||
fc7a349d36 | |||
06ef27c576 | |||
0e556ec3a8 | |||
464b5b4c1d | |||
264d45cdf5 | |||
0ff0874fa3 | |||
39314887c4 | |||
f9a488d47e | |||
0001297f41 | |||
e2d43c14ce | |||
a3a08a9a22 | |||
7cbdf2c608 | |||
1274d869f6 | |||
611bcdaab1 | |||
fc45bf0fe3 | |||
56c4ba1794 | |||
8f83bfe767 | |||
a32ad344dd | |||
c53fbe5ae3 | |||
070ba9fa5a | |||
82ee071362 | |||
97dffb35a2 | |||
18af03ac18 | |||
904dd4107a | |||
8efabc86e6 | |||
cc20488f49 | |||
b918972862 | |||
84c7327057 | |||
e944dbd7fa | |||
157f3a1840 | |||
61dbadb730 | |||
7be25d0751 | |||
5d5a8b4091 | |||
bb36897054 | |||
173e622567 | |||
3426030370 | |||
d014498713 | |||
5414d1853e | |||
1ac8f69651 | |||
3c07a25326 | |||
cf78a12e46 | |||
0f1e098cc4 | |||
0401d7b31c | |||
795627f965 | |||
21cc008cb2 | |||
77b8c37cb0 | |||
db685d58b5 | |||
44ae67685d | |||
6922a0c912 | |||
2afdc958c6 | |||
57b1baf1d1 | |||
1828da6a50 | |||
0629f2ff42 | |||
d71b7eee81 | |||
9883a46575 | |||
2409077a6d | |||
02971b938d | |||
f7e77efe4b | |||
5d8bd0da7c | |||
3c6e7c73fe | |||
d64c0ee415 | |||
311a5ede70 | |||
f64c90010f | |||
8456ddb27c | |||
cf254680b7 | |||
42c4a7596b | |||
1573058811 | |||
51bc8fb2c6 | |||
a69d6f44fd | |||
507cd6e255 | |||
759e4400d0 | |||
8cb1af7376 | |||
2f8d330b57 | |||
32d8b481e9 | |||
75f1e02215 | |||
70ba84dc48 | |||
5a5b42340f | |||
299250b3c3 | |||
6925d930da | |||
c1948f8340 | |||
b80ba040ac | |||
b7754f92ce | |||
e4e40e5b06 | |||
d12af4a569 | |||
c431ed7728 | |||
16ef08a159 | |||
100872b5cf | |||
664cebfbcc | |||
743f9738a3 | |||
69445c106c | |||
1813cf6156 | |||
a23b0e39e5 | |||
06dec4e6c6 | |||
ce185bd0fa | |||
1e1dbfeba0 | |||
5a908aa411 | |||
6cd934d1b8 | |||
d32c8cab12 | |||
5ce7c190e9 | |||
1aa1366f99 | |||
2c7f24e3e5 | |||
c90d039a0b | |||
ae22d4e754 | |||
69e1067a2c | |||
7e38f9ccf0 | |||
d546081340 | |||
6421c145d9 | |||
61e7cd786e | |||
4bd2e622a5 | |||
a4a1e8d43b | |||
ebf2139fd5 | |||
6c84cebed4 | |||
10246366da | |||
a448b0d928 | |||
0541490dda | |||
3704db9b6d | |||
d1665b08d2 | |||
1a4e0c2646 | |||
0d480139e4 | |||
9931747901 | |||
8891afa3b7 | |||
4f493d51f8 | |||
cf937b6b79 | |||
14677bd25d | |||
49e71d252f | |||
d6f25b1017 | |||
a434cddd42 | |||
55d7af86fd | |||
978aace86c | |||
ecdeffe7c8 | |||
9500ce136a | |||
93d07cfe57 | |||
5945845420 | |||
3ee5b49256 | |||
bb024757b6 | |||
d35864e79d | |||
8a106781be | |||
23dd80563f | |||
2bab69d9fb | |||
826489950d | |||
b86598886e | |||
c240162cab | |||
26e29612f2 | |||
37200eb055 | |||
9c68d7dd87 | |||
7ee519ef46 | |||
c4627cc882 | |||
492687b0da | |||
caeef2fb7c | |||
aae596d472 | |||
cb51faec51 | |||
c2a0cef76e | |||
493e98c833 | |||
ca02e51420 | |||
cd085cbc0d | |||
27d57ce773 | |||
4c4efff56a | |||
a53505f26e | |||
165dc36f8d | |||
5df3a91619 | |||
7dbceafc01 | |||
d62d6a77d1 | |||
0a81facccf | |||
3e20ade645 | |||
0c47094109 | |||
defe4bc76d | |||
afee6a7970 | |||
7b676dd583 | |||
5af0874ed3 | |||
e11a2d1346 | |||
b2044fc18d | |||
d9a2d665ad | |||
e83e275dff | |||
4a99495466 | |||
495f67229a | |||
45b9bae3dc | |||
774ff148cd | |||
70a78249c1 | |||
fc85988368 | |||
83bd8059de | |||
3af5f1f305 | |||
4351650691 | |||
770976a66e | |||
29b692d597 | |||
8936d1b71e | |||
4f32b76223 | |||
c9d770017e | |||
cdf691c212 | |||
684a4708d7 | |||
5754e33a75 | |||
14fe7dbb27 | |||
3a6ac7d126 | |||
e9080e6b22 | |||
c73858b9c3 | |||
7340b2b64d | |||
8d246415fd | |||
381dd4f619 | |||
e6bad645ed | |||
6e9cd139a6 | |||
deee2dffd0 | |||
c3be722188 | |||
a7e5228712 | |||
5d628756ab | |||
364edc4bd8 | |||
ce5ca6c480 | |||
4b524e6a8c | |||
e4a3ce8b9d | |||
348cc7d5c5 | |||
ab3ea24630 | |||
cd5116705c | |||
38bc578744 | |||
1bc54d4cb4 | |||
fe8b547cc7 | |||
5aa9ed795e | |||
c82d9b629f | |||
e8b22d8b51 | |||
585cc0c039 | |||
615d887513 | |||
89faec994a | |||
490eeaa650 | |||
f1ab816ecd | |||
6e2c31a5a9 | |||
0608b5869f | |||
fcc3aaf873 | |||
dcd6b63e45 | |||
ab2bda3ffe | |||
7390869cd6 | |||
0af486d1b7 | |||
6cb822255d | |||
f202f338a4 | |||
f0058eeaee | |||
a23b636a63 | |||
fc497daf7d | |||
b48ba74ce2 | |||
9bae27354e | |||
d9b3a16fa6 | |||
f031b8cc8b | |||
2dbafe27ed | |||
3affc245c4 | |||
85da430d16 | |||
a42b275ae2 | |||
37fa67cd3c | |||
0df4db7bb4 | |||
374c371ef1 | |||
64c81fc2ec | |||
0252c2642e | |||
b53ace480a | |||
79b0f65fef | |||
ed6156084f | |||
92fe452f92 | |||
0169151aa3 | |||
525449f044 | |||
3c4a5e7304 | |||
d9aadeef51 | |||
2bb54da368 | |||
3fa583e591 | |||
b7767b3c62 | |||
a5d9a839e5 | |||
2ffd8d9d9b | |||
7f80408945 | |||
3ec5c4a643 | |||
3909a436a9 | |||
a77f660ba7 | |||
548857f35a | |||
8741438484 | |||
3176785a5f | |||
c8fd4c2d6e | |||
99f8a8c23d | |||
f866778421 | |||
5a4392076a | |||
bece3c77bb | |||
c946b3d34f | |||
45e8e4e4ea | |||
bd3208cf24 | |||
4dffac7a25 | |||
a34b3d9d87 | |||
30624e66ec | |||
d603502960 | |||
09cd85918e | |||
b947d4826a | |||
e8ef5a783f | |||
82a224a658 | |||
9da5c41704 | |||
224519e0e2 | |||
aba3b1ec01 | |||
466df77b6b | |||
3ea75a3577 | |||
3e24827f4d | |||
1dc67a6a38 | |||
a5713f7190 | |||
0f654388fc | |||
63df735fef | |||
2579827418 | |||
9bd8b4e8f7 | |||
d998013655 | |||
ced9212c1f | |||
07da8ea852 | |||
8e04a24b90 | |||
8512a630f9 | |||
2da2cec83c | |||
a4d8f1f22e | |||
5ec954c03d | |||
2deaccf2d1 | |||
46c4f4e225 | |||
2d16494845 | |||
bb4f101c1e | |||
82081c889b | |||
05fc9c480a | |||
e93de1fbe7 | |||
a969013bdd | |||
65601f09b2 | |||
0f439a5dab | |||
b3d2c1876e | |||
c297af0012 | |||
f27b626a96 | |||
c1d5a4a109 | |||
db3016a602 | |||
4dd9dbd314 | |||
29df4cd4f3 | |||
4d299a5531 | |||
add6601009 | |||
fa96041ec8 | |||
3dccb2e325 | |||
0a0de1755e | |||
c5ca9d248e |
@ -1,4 +1,4 @@
|
|||||||
# http://editorconfig.org
|
# https://editorconfig.org
|
||||||
root = true
|
root = true
|
||||||
|
|
||||||
[*]
|
[*]
|
||||||
|
12
.github/FUNDING.yml
vendored
12
.github/FUNDING.yml
vendored
@ -1,12 +0,0 @@
|
|||||||
# These are supported funding model platforms
|
|
||||||
|
|
||||||
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
|
||||||
patreon: # Replace with a single Patreon username
|
|
||||||
open_collective: # Replace with a single Open Collective username
|
|
||||||
ko_fi: # Replace with a single Ko-fi username
|
|
||||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
|
||||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
|
||||||
liberapay: # Replace with a single Liberapay username
|
|
||||||
issuehunt: # Replace with a single IssueHunt username
|
|
||||||
otechie: # Replace with a single Otechie username
|
|
||||||
custom: https://paypal.me/roztocil
|
|
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Report a possible bug in HTTPie
|
||||||
|
title: ''
|
||||||
|
labels: "new, bug"
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Checklist**
|
||||||
|
|
||||||
|
- [ ] I've searched for similar issues.
|
||||||
|
- [ ] I'm using the latest version of HTTPie.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**What are the steps to reproduce the problem?**
|
||||||
|
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
|
||||||
|
|
||||||
|
**What is the expected result?**
|
||||||
|
|
||||||
|
|
||||||
|
**What happens instead?**
|
||||||
|
|
||||||
|
|
||||||
|
**Debug output**
|
||||||
|
|
||||||
|
Please re-run the command with `--debug`, then copy the entire command & output and paste both below:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ http --debug <COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR>
|
||||||
|
<COMPLETE OUTPUT>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
**Provide any additional information, screenshots, or code examples below:**
|
24
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
24
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an enhancement for HTTPie
|
||||||
|
title: ''
|
||||||
|
labels: "new, enhancement"
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
**Checklist**
|
||||||
|
|
||||||
|
- [ ] I've searched for similar feature requests.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**What enhancement would you like to see?**
|
||||||
|
|
||||||
|
|
||||||
|
**What problem does it solve?**
|
||||||
|
|
||||||
|
E.g. “I'm always frustrated when [...]”, “I’m trying to do […] so that […]”.
|
||||||
|
|
||||||
|
|
||||||
|
**Provide any additional information, screenshots, or code examples below:**
|
||||||
|
|
10
.github/ISSUE_TEMPLATE/other.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/other.md
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
name: Other
|
||||||
|
about: Anything else that isn't a feature or a bug
|
||||||
|
title: ''
|
||||||
|
labels: "new"
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
If you have a general question, please consider asking on Discord: https://httpie.io/chat
|
9
.github/dependabot.yml
vendored
Normal file
9
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
# GitHub Actions
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
assignees:
|
||||||
|
- BoboTiG
|
35
.github/workflows/build.yml
vendored
Normal file
35
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
name: Build
|
||||||
|
on: [push, pull_request]
|
||||||
|
jobs:
|
||||||
|
extras:
|
||||||
|
# Run coverage and extra tests only once
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- run: python -m pip install --upgrade pip setuptools wheel
|
||||||
|
- run: make install
|
||||||
|
- run: make codestyle
|
||||||
|
- run: make test-cover
|
||||||
|
- run: make codecov-upload
|
||||||
|
env:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_REPO_TOKEN }}
|
||||||
|
- run: make test-dist
|
||||||
|
test:
|
||||||
|
# Run core HTTPie tests everywhere
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macOS-latest, windows-latest]
|
||||||
|
python-version: [3.6, 3.7, 3.8, 3.9, "3.10.0-rc.1"]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- run: python -m pip install --upgrade pip setuptools wheel
|
||||||
|
- run: python -m pip install --upgrade '.[dev]'
|
||||||
|
- run: python -m pytest --verbose ./httpie ./tests
|
145
.gitignore
vendored
145
.gitignore
vendored
@ -1,13 +1,146 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
.idea/
|
.idea/
|
||||||
__pycache__/
|
|
||||||
dist/
|
|
||||||
build/
|
|
||||||
*.egg-info
|
*.egg-info
|
||||||
.cache/
|
.cache/
|
||||||
.tox/
|
|
||||||
.coverage
|
|
||||||
*.pyc
|
*.pyc
|
||||||
*.egg
|
|
||||||
htmlcov
|
htmlcov
|
||||||
|
|
||||||
|
|
||||||
|
##############################################################################
|
||||||
|
# The below is GitHub template for Python project. gitignore.
|
||||||
|
# <https://github.com/github/gitignore/blob/master/Python.gitignore>
|
||||||
|
##############################################################################
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
pip-wheel-metadata/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# Packit
|
||||||
|
/httpie.spec
|
||||||
|
/httpie-*.rpm
|
||||||
|
/httpie-*.tar.gz
|
||||||
|
21
.packit.yaml
Normal file
21
.packit.yaml
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
# See the documentation for more information:
|
||||||
|
# https://packit.dev/docs/configuration/
|
||||||
|
specfile_path: httpie.spec
|
||||||
|
actions:
|
||||||
|
# the current Fedora Rawhide specfile has some patches
|
||||||
|
# so we get it from @hroncok's (= churchyard in Fedora) fork for now
|
||||||
|
# once we have a new release, we'll use: https://src.fedoraproject.org/rpms/httpie/raw/rawhide/f/httpie.spec
|
||||||
|
post-upstream-clone: "wget https://src.fedoraproject.org/fork/churchyard/rpms/httpie/raw/packit/f/httpie.spec -O httpie.spec"
|
||||||
|
jobs:
|
||||||
|
- job: copr_build
|
||||||
|
trigger: pull_request
|
||||||
|
metadata:
|
||||||
|
targets:
|
||||||
|
- fedora-all
|
||||||
|
additional_repos:
|
||||||
|
- "https://kojipkgs.fedoraproject.org/repos/f$releasever-build/latest/$basearch/"
|
||||||
|
- job: propose_downstream
|
||||||
|
trigger: release
|
||||||
|
metadata:
|
||||||
|
dist_git_branches:
|
||||||
|
- rawhide
|
96
.travis.yml
96
.travis.yml
@ -1,96 +0,0 @@
|
|||||||
# <https://travis-ci.org/jakubroztocil/httpie>
|
|
||||||
sudo: false
|
|
||||||
language: python
|
|
||||||
os:
|
|
||||||
- linux
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- NEWEST_PYTHON=3.7
|
|
||||||
python:
|
|
||||||
# <https://docs.travis-ci.com/user/languages/python/>
|
|
||||||
|
|
||||||
- 2.7
|
|
||||||
|
|
||||||
# Python 3.4 fails installing packages
|
|
||||||
# <https://travis-ci.org/jakubroztocil/httpie/jobs/403263566#L636>
|
|
||||||
# - 3.4
|
|
||||||
|
|
||||||
- 3.5
|
|
||||||
- 3.6
|
|
||||||
# - 3.7 # is done in the matrix below as described in travis-ci/travis-ci#9069
|
|
||||||
- pypy
|
|
||||||
|
|
||||||
# pypy3 currently fails because of a Flask issue
|
|
||||||
# - pypy3
|
|
||||||
|
|
||||||
cache: pip
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
# Add manually defined OS X builds
|
|
||||||
# <https://docs.travis-ci.com/user/multi-os/#Python-example-(unsupported-languages)>
|
|
||||||
- os: osx
|
|
||||||
language: generic
|
|
||||||
env:
|
|
||||||
# Stock OSX Python
|
|
||||||
- TOXENV=py27-osx-builtin
|
|
||||||
- BREW_PYTHON_PACKAGE=
|
|
||||||
- os: osx
|
|
||||||
language: generic
|
|
||||||
env:
|
|
||||||
# Latest Python 2.7 from Homebrew
|
|
||||||
- TOXENV=py27
|
|
||||||
- BREW_PYTHON_PACKAGE=python@2
|
|
||||||
- os: osx
|
|
||||||
language: generic
|
|
||||||
env:
|
|
||||||
# Latest Python 3.x from Homebrew
|
|
||||||
- TOXENV=py37 # <= needs to be kept up-to-date to reflect latest minor version
|
|
||||||
- BREW_PYTHON_PACKAGE=python@3
|
|
||||||
# Travis Python 3.7 must run sudo on
|
|
||||||
- os: linux
|
|
||||||
python: 3.7
|
|
||||||
env: TOXENV=py37
|
|
||||||
sudo: true # Required for Python 3.7
|
|
||||||
dist: xenial # Required for Python 3.7
|
|
||||||
# Add a codestyle-only build
|
|
||||||
- os: linux
|
|
||||||
python: 3.6
|
|
||||||
env: CODESTYLE_ONLY=true
|
|
||||||
install:
|
|
||||||
- |
|
|
||||||
if [[ $TRAVIS_OS_NAME == 'osx' ]]; then
|
|
||||||
if [[ -n "$BREW_PYTHON_PACKAGE" ]]; then
|
|
||||||
brew update
|
|
||||||
if ! brew list --versions "$BREW_PYTHON_PACKAGE" >/dev/null; then
|
|
||||||
brew install "$BREW_PYTHON_PACKAGE"
|
|
||||||
elif ! brew outdated "$BREW_PYTHON_PACKAGE"; then
|
|
||||||
brew upgrade "$BREW_PYTHON_PACKAGE"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
sudo pip2 install tox
|
|
||||||
fi
|
|
||||||
script:
|
|
||||||
- |
|
|
||||||
if [[ $TRAVIS_OS_NAME == 'linux' ]]; then
|
|
||||||
if [[ $CODESTYLE_ONLY ]]; then
|
|
||||||
make pycodestyle
|
|
||||||
else
|
|
||||||
make test
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
PATH="/usr/local/bin:$PATH" tox -e "$TOXENV"
|
|
||||||
fi
|
|
||||||
after_success:
|
|
||||||
- |
|
|
||||||
if [[ $TRAVIS_PYTHON_VERSION == $NEWEST_PYTHON && $TRAVIS_OS_NAME == 'linux' ]]; then
|
|
||||||
make coveralls
|
|
||||||
fi
|
|
||||||
notifications:
|
|
||||||
webhooks:
|
|
||||||
# options: [always|never|change] default: always
|
|
||||||
on_success: always
|
|
||||||
on_failure: always
|
|
||||||
on_start: always
|
|
||||||
urls:
|
|
||||||
# https://gitter.im/jkbrzt/httpie
|
|
||||||
- https://webhooks.gitter.im/e/c42fcd359a110d02830b
|
|
41
AUTHORS.md
Normal file
41
AUTHORS.md
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
# HTTPie authors
|
||||||
|
|
||||||
|
- [Jakub Roztocil](https://github.com/jakubroztocil)
|
||||||
|
|
||||||
|
## Patches, features, ideas
|
||||||
|
|
||||||
|
[Complete list of contributors on GitHub](https://github.com/httpie/httpie/graphs/contributors)
|
||||||
|
|
||||||
|
- [Cláudia T. Delgado](https://github.com/claudiatd)
|
||||||
|
- [Hank Gay](https://github.com/gthank)
|
||||||
|
- [Jake Basile](https://github.com/jakebasile)
|
||||||
|
- [Vladimir Berkutov](https://github.com/dair-targ)
|
||||||
|
- [Jakob Kramer](https://github.com/gandaro)
|
||||||
|
- [Chris Faulkner](https://github.com/faulkner)
|
||||||
|
- [Alen Mujezinovic](https://github.com/flashingpumpkin)
|
||||||
|
- [Praful Mathur](https://github.com/tictactix)
|
||||||
|
- [Marc Abramowitz](https://github.com/msabramo)
|
||||||
|
- [Ismail Badawi](https://github.com/isbadawi)
|
||||||
|
- [Laurent Bachelier](https://github.com/laurentb)
|
||||||
|
- [Isman Firmansyah](https://github.com/iromli)
|
||||||
|
- [Simon Olofsson](https://github.com/simono)
|
||||||
|
- [Churkin Oleg](https://github.com/Bahus)
|
||||||
|
- [Jökull Sólberg Auðunsson](https://github.com/jokull)
|
||||||
|
- [Matthew M. Boedicker](https://github.com/mmb)
|
||||||
|
- [marblar](https://github.com/marblar)
|
||||||
|
- [Tomek Wójcik](https://github.com/tomekwojcik)
|
||||||
|
- [Davey Shafik](https://github.com/dshafik)
|
||||||
|
- [cido](https://github.com/cido)
|
||||||
|
- [Justin Bonnar](https://github.com/jargonjustin)
|
||||||
|
- [Nathan LaFreniere](https://github.com/nlf)
|
||||||
|
- [Matthias Lehmann](https://github.com/matleh)
|
||||||
|
- [Dennis Brakhane](https://github.com/brakhane)
|
||||||
|
- [Matt Layman](https://github.com/mblayman)
|
||||||
|
- [Edward Yang](https://github.com/honorabrutroll)
|
||||||
|
- [Aleksandr Vinokurov](https://github.com/aleksandr-vin)
|
||||||
|
- [Jeff Byrnes](https://github.com/jeffbyrnes)
|
||||||
|
- [Denis Belavin](https://github.com/LuckyDenis)
|
||||||
|
- [Mickaël Schoentgen](https://github.com/BoboTiG)
|
||||||
|
- [Elena Lape](https://github.com/elenalape)
|
||||||
|
- [Rohit Sehgal](https://github.com/r0hi7)
|
||||||
|
- [Bartłomiej Jacak](https://github.com/bartekjacak)
|
40
AUTHORS.rst
40
AUTHORS.rst
@ -1,40 +0,0 @@
|
|||||||
==============
|
|
||||||
HTTPie authors
|
|
||||||
==============
|
|
||||||
|
|
||||||
* `Jakub Roztocil <https://github.com/jakubroztocil>`_
|
|
||||||
|
|
||||||
|
|
||||||
Patches and ideas
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
`Complete list of contributors on GitHub <https://github.com/jakubroztocil/httpie/graphs/contributors>`_
|
|
||||||
|
|
||||||
* `Cláudia T. Delgado <https://github.com/claudiatd>`_ (logo)
|
|
||||||
* `Hank Gay <https://github.com/gthank>`_
|
|
||||||
* `Jake Basile <https://github.com/jakebasile>`_
|
|
||||||
* `Vladimir Berkutov <https://github.com/dair-targ>`_
|
|
||||||
* `Jakob Kramer <https://github.com/gandaro>`_
|
|
||||||
* `Chris Faulkner <https://github.com/faulkner>`_
|
|
||||||
* `Alen Mujezinovic <https://github.com/flashingpumpkin>`_
|
|
||||||
* `Praful Mathur <https://github.com/tictactix>`_
|
|
||||||
* `Marc Abramowitz <https://github.com/msabramo>`_
|
|
||||||
* `Ismail Badawi <https://github.com/isbadawi>`_
|
|
||||||
* `Laurent Bachelier <https://github.com/laurentb>`_
|
|
||||||
* `Isman Firmansyah <https://github.com/iromli>`_
|
|
||||||
* `Simon Olofsson <https://github.com/simono>`_
|
|
||||||
* `Churkin Oleg <https://github.com/Bahus>`_
|
|
||||||
* `Jökull Sólberg Auðunsson <https://github.com/jokull>`_
|
|
||||||
* `Matthew M. Boedicker <https://github.com/mmb>`_
|
|
||||||
* `marblar <https://github.com/marblar>`_
|
|
||||||
* `Tomek Wójcik <https://github.com/tomekwojcik>`_
|
|
||||||
* `Davey Shafik <https://github.com/dshafik>`_
|
|
||||||
* `cido <https://github.com/cido>`_
|
|
||||||
* `Justin Bonnar <https://github.com/jargonjustin>`_
|
|
||||||
* `Nathan LaFreniere <https://github.com/nlf>`_
|
|
||||||
* `Matthias Lehmann <https://github.com/matleh>`_
|
|
||||||
* `Dennis Brakhane <https://github.com/brakhane>`_
|
|
||||||
* `Matt Layman <https://github.com/mblayman>`_
|
|
||||||
* `Edward Yang <https://github.com/honorabrutroll>`_
|
|
||||||
|
|
||||||
|
|
367
CHANGELOG.md
Normal file
367
CHANGELOG.md
Normal file
@ -0,0 +1,367 @@
|
|||||||
|
# Change Log
|
||||||
|
|
||||||
|
This document records all notable changes to [HTTPie](https://httpie.io).
|
||||||
|
This project adheres to [Semantic Versioning](https://semver.org/).
|
||||||
|
|
||||||
|
## [2.5.0](https://github.com/httpie/httpie/compare/2.4.0...2.5.0) (2021-09-06)
|
||||||
|
|
||||||
|
- Added `--raw` to allow specifying the raw request body without extra processing as
|
||||||
|
an alternative to `stdin`. ([#534](https://github.com/httpie/httpie/issues/534))
|
||||||
|
- Added support for XML formatting. ([#1129](https://github.com/httpie/httpie/issues/1129))
|
||||||
|
- Added internal support for file-like object responses to improve adapter plugin support. ([#1094](https://github.com/httpie/httpie/issues/1094))
|
||||||
|
- Fixed `--continue --download` with a single byte to be downloaded left. ([#1032](https://github.com/httpie/httpie/issues/1032))
|
||||||
|
- Fixed `--verbose` HTTP 307 redirects with streamed request body. ([#1088](https://github.com/httpie/httpie/issues/1088))
|
||||||
|
- Fixed handling of session files with `Cookie:` followed by other headers. ([#1126](https://github.com/httpie/httpie/issues/1126))
|
||||||
|
|
||||||
|
## [2.4.0](https://github.com/httpie/httpie/compare/2.3.0...2.4.0) (2021-02-06)
|
||||||
|
|
||||||
|
- Added support for `--session` cookie expiration based on `Set-Cookie: max-age=<n>`. ([#1029](https://github.com/httpie/httpie/issues/1029))
|
||||||
|
- Show a `--check-status` warning with `--quiet` as well, not only when the output is redirected. ([#1026](https://github.com/httpie/httpie/issues/1026))
|
||||||
|
- Fixed upload with `--session` ([#1020](https://github.com/httpie/httpie/issues/1020)).
|
||||||
|
- Fixed a missing blank line between request and response ([#1006](https://github.com/httpie/httpie/issues/1006)).
|
||||||
|
|
||||||
|
## [2.3.0](https://github.com/httpie/httpie/compare/2.2.0...2.3.0) (2020-10-25)
|
||||||
|
|
||||||
|
- Added support for streamed uploads ([#201](https://github.com/httpie/httpie/issues/201)).
|
||||||
|
- Added support for multipart upload streaming ([#684](https://github.com/httpie/httpie/issues/684)).
|
||||||
|
- Added support for body-from-file upload streaming (`http pie.dev/post @file`).
|
||||||
|
- Added `--chunked` to enable chunked transfer encoding ([#753](https://github.com/httpie/httpie/issues/753)).
|
||||||
|
- Added `--multipart` to allow `multipart/form-data` encoding for non-file `--form` requests as well.
|
||||||
|
- Added support for preserving field order in multipart requests ([#903](https://github.com/httpie/httpie/issues/903)).
|
||||||
|
- Added `--boundary` to allow a custom boundary string for `multipart/form-data` requests.
|
||||||
|
- Added support for combining cookies specified on the CLI and in a session file ([#932](https://github.com/httpie/httpie/issues/932)).
|
||||||
|
- Added out of the box SOCKS support with no extra installation ([#904](https://github.com/httpie/httpie/issues/904)).
|
||||||
|
- Added `--quiet, -q` flag to enforce silent behaviour.
|
||||||
|
- Fixed the handling of invalid `expires` dates in `Set-Cookie` headers ([#963](https://github.com/httpie/httpie/issues/963)).
|
||||||
|
- Removed Tox testing entirely ([#943](https://github.com/httpie/httpie/issues/943)).
|
||||||
|
|
||||||
|
## [2.2.0](https://github.com/httpie/httpie/compare/2.1.0...2.2.0) (2020-06-18)
|
||||||
|
|
||||||
|
- Added support for custom content types for uploaded files ([#668](https://github.com/httpie/httpie/issues/668)).
|
||||||
|
- Added support for `$XDG_CONFIG_HOME` ([#920](https://github.com/httpie/httpie/issues/920)).
|
||||||
|
- Added support for `Set-Cookie`-triggered cookie expiration ([#853](https://github.com/httpie/httpie/issues/853)).
|
||||||
|
- Added `--format-options` to allow disabling sorting, etc. ([#128](https://github.com/httpie/httpie/issues/128))
|
||||||
|
- Added `--sorted` and `--unsorted` shortcuts for (un)setting all sorting-related `--format-options`. ([#128](https://github.com/httpie/httpie/issues/128))
|
||||||
|
- Added `--ciphers` to allow configuring OpenSSL ciphers ([#870](https://github.com/httpie/httpie/issues/870)).
|
||||||
|
- Added `netrc` support for auth plugins. Enabled for `--auth-type=basic`
|
||||||
|
and `digest`, 3rd parties may opt in ([#718](https://github.com/httpie/httpie/issues/718), [#719](https://github.com/httpie/httpie/issues/719), [#852](https://github.com/httpie/httpie/issues/852), [#934](https://github.com/httpie/httpie/issues/934)).
|
||||||
|
- Fixed built-in plugins-related circular imports ([#925](https://github.com/httpie/httpie/issues/925)).
|
||||||
|
|
||||||
|
## [2.1.0](https://github.com/httpie/httpie/compare/2.0.0...2.1.0) (2020-04-18)
|
||||||
|
|
||||||
|
- Added `--path-as-is` to bypass dot segment (`/../` or `/./`)
|
||||||
|
URL squashing ([#895](https://github.com/httpie/httpie/issues/895)).
|
||||||
|
- Changed the default `Accept` header value for JSON requests from
|
||||||
|
`application/json, */*` to `application/json, */*;q=0.5`
|
||||||
|
to clearly indicate preference ([#488](https://github.com/httpie/httpie/issues/488)).
|
||||||
|
- Fixed `--form` file upload mixed with redirected `stdin` error handling
|
||||||
|
([#840](https://github.com/httpie/httpie/issues/840)).
|
||||||
|
|
||||||
|
## [2.0.0](https://github.com/httpie/httpie/compare/1.0.3...2.0.0) (2020-01-12)
|
||||||
|
|
||||||
|
- Removed Python 2.7 support ([EOL Jan 2020](https://www.python.org/doc/sunset-python-2/).
|
||||||
|
- Added `--offline` to allow building an HTTP request and printing it but not
|
||||||
|
actually sending it over the network.
|
||||||
|
- Replaced the old collect-all-then-process handling of HTTP communication
|
||||||
|
with one-by-one processing of each HTTP request or response as they become
|
||||||
|
available. This means that you can see headers immediately,
|
||||||
|
see what is being sent even if the request fails, etc.
|
||||||
|
- Removed automatic config file creation to avoid concurrency issues.
|
||||||
|
- Removed the default 30-second connection `--timeout` limit.
|
||||||
|
- Removed Python’s default limit of 100 response headers.
|
||||||
|
- Added `--max-headers` to allow setting the max header limit.
|
||||||
|
- Added `--compress` to allow request body compression.
|
||||||
|
- Added `--ignore-netrc` to allow bypassing credentials from `.netrc`.
|
||||||
|
- Added `https` alias command with `https://` as the default scheme.
|
||||||
|
- Added `$ALL_PROXY` documentation.
|
||||||
|
- Added type annotations throughout the codebase.
|
||||||
|
- Added `tests/` to the PyPi package for the convenience of
|
||||||
|
downstream package maintainers.
|
||||||
|
- Fixed an error when `stdin` was a closed fd.
|
||||||
|
- Improved `--debug` output formatting.
|
||||||
|
|
||||||
|
## [1.0.3](https://github.com/httpie/httpie/compare/1.0.2...1.0.3) (2019-08-26)
|
||||||
|
|
||||||
|
- Fixed CVE-2019-10751 — the way the output filename is generated for
|
||||||
|
`--download` requests without `--output` resulting in a redirect has
|
||||||
|
been changed to only consider the initial URL as the base for the generated
|
||||||
|
filename, and not the final one. This fixes a potential security issue under
|
||||||
|
the following scenario:
|
||||||
|
|
||||||
|
1. A `--download` request with no explicit `--output` is made (e.g.,
|
||||||
|
`$ http -d example.org/file.txt`), instructing httpie to
|
||||||
|
[generate the output filename](https://httpie.org/doc#downloaded-filename)
|
||||||
|
from the `Content-Disposition` response header, or from the URL if the header
|
||||||
|
is not provided.
|
||||||
|
2. The server handling the request has been modified by an attacker and
|
||||||
|
instead of the expected response the URL returns a redirect to another
|
||||||
|
URL, e.g., `attacker.example.org/.bash_profile`, whose response does
|
||||||
|
not provide a `Content-Disposition` header (i.e., the base for the
|
||||||
|
generated filename becomes `.bash_profile` instead of `file.txt`).
|
||||||
|
3. Your current directory doesn’t already contain `.bash_profile`
|
||||||
|
(i.e., no unique suffix is added to the generated filename).
|
||||||
|
4. You don’t notice the potentially unexpected output filename
|
||||||
|
as reported by httpie in the console output
|
||||||
|
(e.g., `Downloading 100.00 B to ".bash_profile"`).
|
||||||
|
|
||||||
|
Reported by Raul Onitza and Giulio Comi.
|
||||||
|
|
||||||
|
## [1.0.2](https://github.com/httpie/httpie/compare/1.0.1...1.0.2) (2018-11-14)
|
||||||
|
|
||||||
|
- Fixed tests for installation with pyOpenSSL.
|
||||||
|
|
||||||
|
## [1.0.1](https://github.com/httpie/httpie/compare/1.0.0...1.0.1) (2018-11-14)
|
||||||
|
|
||||||
|
- Removed external URL calls from tests.
|
||||||
|
|
||||||
|
## [1.0.0](https://github.com/httpie/httpie/compare/0.9.9...1.0.0) (2018-11-02)
|
||||||
|
|
||||||
|
- Added `--style=auto` which follows the terminal ANSI color styles.
|
||||||
|
- Added support for selecting TLS 1.3 via `--ssl=tls1.3`
|
||||||
|
(available once implemented in upstream libraries).
|
||||||
|
- Added `true`/`false` as valid values for `--verify`
|
||||||
|
(in addition to `yes`/`no`) and the boolean value is case-insensitive.
|
||||||
|
- Changed the default `--style` from `solarized` to `auto` (on Windows it stays `fruity`).
|
||||||
|
- Fixed default headers being incorrectly case-sensitive.
|
||||||
|
- Removed Python 2.6 support.
|
||||||
|
|
||||||
|
## [0.9.9](https://github.com/httpie/httpie/compare/0.9.8...0.9.9) (2016-12-08)
|
||||||
|
|
||||||
|
- Fixed README.
|
||||||
|
|
||||||
|
## [0.9.8](https://github.com/httpie/httpie/compare/0.9.6...0.9.8) (2016-12-08)
|
||||||
|
|
||||||
|
- Extended auth plugin API.
|
||||||
|
- Added exit status code `7` for plugin errors.
|
||||||
|
- Added support for `curses`-less Python installations.
|
||||||
|
- Fixed `REQUEST_ITEM` arg incorrectly being reported as required.
|
||||||
|
- Improved `CTRL-C` interrupt handling.
|
||||||
|
- Added the standard exit status code `130` for keyboard interrupts.
|
||||||
|
|
||||||
|
## [0.9.6](https://github.com/httpie/httpie/compare/0.9.4...0.9.6) (2016-08-13)
|
||||||
|
|
||||||
|
- Added Python 3 as a dependency for Homebrew installations
|
||||||
|
to ensure some of the newer HTTP features work out of the box
|
||||||
|
for macOS users (starting with HTTPie 0.9.4.).
|
||||||
|
- Added the ability to unset a request header with `Header:`, and send an
|
||||||
|
empty value with `Header;`.
|
||||||
|
- Added `--default-scheme <URL_SCHEME>` to enable things like
|
||||||
|
`$ alias https='http --default-scheme=https`.
|
||||||
|
- Added `-I` as a shortcut for `--ignore-stdin`.
|
||||||
|
- Added fish shell completion (located in `extras/httpie-completion.fish`
|
||||||
|
in the GitHub repo).
|
||||||
|
- Updated `requests` to 2.10.0 so that SOCKS support can be added via
|
||||||
|
`pip install requests[socks]`.
|
||||||
|
- Changed the default JSON `Accept` header from `application/json`
|
||||||
|
to `application/json, */*`.
|
||||||
|
- Changed the pre-processing of request HTTP headers so that any leading
|
||||||
|
and trailing whitespace is removed.
|
||||||
|
|
||||||
|
## [0.9.4](https://github.com/httpie/httpie/compare/0.9.3...0.9.4) (2016-07-01)
|
||||||
|
|
||||||
|
- Added `Content-Type` of files uploaded in `multipart/form-data` requests
|
||||||
|
- Added `--ssl=<PROTOCOL>` to specify the desired SSL/TLS protocol version
|
||||||
|
to use for HTTPS requests.
|
||||||
|
- Added JSON detection with `--json, -j` to work around incorrect
|
||||||
|
`Content-Type`
|
||||||
|
- Added `--all` to show intermediate responses such as redirects (with `--follow`)
|
||||||
|
- Added `--history-print, -P WHAT` to specify formatting of intermediate responses
|
||||||
|
- Added `--max-redirects=N` (default 30)
|
||||||
|
- Added `-A` as short name for `--auth-type`
|
||||||
|
- Added `-F` as short name for `--follow`
|
||||||
|
- Removed the `implicit_content_type` config option
|
||||||
|
(use `"default_options": ["--form"]` instead)
|
||||||
|
- Redirected `stdout` doesn't trigger an error anymore when `--output FILE`
|
||||||
|
is set
|
||||||
|
- Changed the default `--style` back to `solarized` for better support
|
||||||
|
of light and dark terminals
|
||||||
|
- Improved `--debug` output
|
||||||
|
- Fixed `--session` when used with `--download`
|
||||||
|
- Fixed `--download` to trim too long filenames before saving the file
|
||||||
|
- Fixed the handling of `Content-Type` with multiple `+subtype` parts
|
||||||
|
- Removed the XML formatter as the implementation suffered from multiple issues
|
||||||
|
|
||||||
|
## [0.9.3](https://github.com/httpie/httpie/compare/0.9.2...0.9.3) (2016-01-01)
|
||||||
|
|
||||||
|
- Changed the default color `--style` from `solarized` to `monokai`
|
||||||
|
- Added basic Bash autocomplete support (need to be installed manually)
|
||||||
|
- Added request details to connection error messages
|
||||||
|
- Fixed `'requests.packages.urllib3' has no attribute 'disable_warnings'`
|
||||||
|
errors that occurred in some installations
|
||||||
|
- Fixed colors and formatting on Windows
|
||||||
|
- Fixed `--auth` prompt on Windows
|
||||||
|
|
||||||
|
## [0.9.2](https://github.com/httpie/httpie/compare/0.9.1...0.9.2) (2015-02-24)
|
||||||
|
|
||||||
|
- Fixed compatibility with Requests 2.5.1
|
||||||
|
- Changed the default JSON `Content-Type` to `application/json` as UTF-8
|
||||||
|
is the default JSON encoding
|
||||||
|
|
||||||
|
## [0.9.1](https://github.com/httpie/httpie/compare/0.9.0...0.9.1) (2015-02-07)
|
||||||
|
|
||||||
|
- Added support for Requests transport adapter plugins
|
||||||
|
(see [httpie-unixsocket](https://github.com/httpie/httpie-unixsocket)
|
||||||
|
and [httpie-http2](https://github.com/httpie/httpie-http2))
|
||||||
|
|
||||||
|
## [0.9.0](https://github.com/httpie/httpie/compare/0.8.0...0.9.0) (2015-01-31)
|
||||||
|
|
||||||
|
- Added `--cert` and `--cert-key` parameters to specify a client side
|
||||||
|
certificate and private key for SSL
|
||||||
|
- Improved unicode support
|
||||||
|
- Improved terminal color depth detection via `curses`
|
||||||
|
- To make it easier to deal with Windows paths in request items, `\`
|
||||||
|
now only escapes special characters (the ones that are used as key-value
|
||||||
|
separators by HTTPie)
|
||||||
|
- Switched from `unittest` to `pytest`
|
||||||
|
- Added Python `wheel` support
|
||||||
|
- Various test suite improvements
|
||||||
|
- Added `CONTRIBUTING`
|
||||||
|
- Fixed `User-Agent` overwriting when used within a session
|
||||||
|
- Fixed handling of empty passwords in URL credentials
|
||||||
|
- Fixed multiple file uploads with the same form field name
|
||||||
|
- Fixed `--output=/dev/null` on Linux
|
||||||
|
- Miscellaneous bugfixes
|
||||||
|
|
||||||
|
## [0.8.0](https://github.com/httpie/httpie/compare/0.7.1...0.8.0) (2014-01-25)
|
||||||
|
|
||||||
|
- Added `field=@file.txt` and `field:=@file.json` for embedding
|
||||||
|
the contents of text and JSON files into request data
|
||||||
|
- Added curl-style shorthand for localhost
|
||||||
|
- Fixed request `Host` header value output so that it doesn't contain
|
||||||
|
credentials, if included in the URL
|
||||||
|
|
||||||
|
## [0.7.1](https://github.com/httpie/httpie/compare/0.6.0...0.7.1) (2013-09-24)
|
||||||
|
|
||||||
|
- Added `--ignore-stdin`
|
||||||
|
- Added support for auth plugins
|
||||||
|
- Improved `--help` output
|
||||||
|
- Improved `Content-Disposition` parsing for `--download` mode
|
||||||
|
- Update to Requests 2.0.0
|
||||||
|
|
||||||
|
## [0.6.0](https://github.com/httpie/httpie/compare/0.5.1...0.6.0) (2013-06-03)
|
||||||
|
|
||||||
|
- XML data is now formatted
|
||||||
|
- `--session` and `--session-read-only` now also accept paths to
|
||||||
|
session files (eg. `http --session=/tmp/session.json example.org`)
|
||||||
|
|
||||||
|
## [0.5.1](https://github.com/httpie/httpie/compare/0.5.0...0.5.1) (2013-05-13)
|
||||||
|
|
||||||
|
- `Content-*` and `If-*` request headers are not stored in sessions
|
||||||
|
anymore as they are request-specific
|
||||||
|
|
||||||
|
## [0.5.0](https://github.com/httpie/httpie/compare/0.4.1...0.5.0) (2013-04-27)
|
||||||
|
|
||||||
|
- Added a download mode via `--download`
|
||||||
|
- Fixes miscellaneous bugs
|
||||||
|
|
||||||
|
## [0.4.1](https://github.com/httpie/httpie/compare/0.4.0...0.4.1) (2013-02-26)
|
||||||
|
|
||||||
|
- Fixed `setup.py`
|
||||||
|
|
||||||
|
## [0.4.0](https://github.com/httpie/httpie/compare/0.3.0...0.4.0) (2013-02-22)
|
||||||
|
|
||||||
|
- Added Python 3.3 compatibility
|
||||||
|
- Added Requests >= v1.0.4 compatibility
|
||||||
|
- Added support for credentials in URL
|
||||||
|
- Added `--no-option` for every `--option` to be config-friendly
|
||||||
|
- Mutually exclusive arguments can be specified multiple times. The
|
||||||
|
last value is used
|
||||||
|
|
||||||
|
## [0.3.0](https://github.com/httpie/httpie/compare/0.2.7...0.3.0) (2012-09-21)
|
||||||
|
|
||||||
|
- Allow output redirection on Windows
|
||||||
|
- Added configuration file
|
||||||
|
- Added persistent session support
|
||||||
|
- Renamed `--allow-redirects` to `--follow`
|
||||||
|
- Improved the usability of `http --help`
|
||||||
|
- Fixed installation on Windows with Python 3
|
||||||
|
- Fixed colorized output on Windows with Python 3
|
||||||
|
- CRLF HTTP header field separation in the output
|
||||||
|
- Added exit status code `2` for timed-out requests
|
||||||
|
- Added the option to separate colorizing and formatting
|
||||||
|
(`--pretty=all`, `--pretty=colors` and `--pretty=format`)
|
||||||
|
`--ugly` has bee removed in favor of `--pretty=none`
|
||||||
|
|
||||||
|
## [0.2.7](https://github.com/httpie/httpie/compare/0.2.5...0.2.7) (2012-08-07)
|
||||||
|
|
||||||
|
- Added compatibility with Requests 0.13.6
|
||||||
|
- Added streamed terminal output. `--stream, -S` can be used to enable
|
||||||
|
streaming also with `--pretty` and to ensure a more frequent output
|
||||||
|
flushing
|
||||||
|
- Added support for efficient large file downloads
|
||||||
|
- Sort headers by name (unless `--pretty=none`)
|
||||||
|
- Response body is fetched only when needed (e.g., not with `--headers`)
|
||||||
|
- Improved content type matching
|
||||||
|
- Updated Solarized color scheme
|
||||||
|
- Windows: Added `--output FILE` to store output into a file
|
||||||
|
(piping results in corrupted data on Windows)
|
||||||
|
- Proper handling of binary requests and responses
|
||||||
|
- Fixed printing of `multipart/form-data` requests
|
||||||
|
- Renamed `--traceback` to `--debug`
|
||||||
|
|
||||||
|
## [0.2.6](https://github.com/httpie/httpie/compare/0.2.5...0.2.6) (2012-07-26)
|
||||||
|
|
||||||
|
- The short option for `--headers` is now `-h` (`-t` has been
|
||||||
|
removed, for usage use `--help`)
|
||||||
|
- Form data and URL parameters can have multiple fields with the same name
|
||||||
|
(e.g.,`http -f url a=1 a=2`)
|
||||||
|
- Added `--check-status` to exit with an error on HTTP 3xx, 4xx and
|
||||||
|
5xx (3, 4, and 5, respectively)
|
||||||
|
- If the output is piped to another program or redirected to a file,
|
||||||
|
the default behaviour is to only print the response body
|
||||||
|
(It can still be overwritten via the `--print` flag.)
|
||||||
|
- Improved highlighting of HTTP headers
|
||||||
|
- Added query string parameters (`param==value`)
|
||||||
|
- Added support for terminal colors under Windows
|
||||||
|
|
||||||
|
## [0.2.5](https://github.com/httpie/httpie/compare/0.2.2...0.2.5) (2012-07-17)
|
||||||
|
|
||||||
|
- Unicode characters in prettified JSON now don't get escaped for
|
||||||
|
improved readability
|
||||||
|
- --auth now prompts for a password if only a username provided
|
||||||
|
- Added support for request payloads from a file path with automatic
|
||||||
|
`Content-Type` (`http URL @/path`)
|
||||||
|
- Fixed missing query string when displaying the request headers via
|
||||||
|
`--verbose`
|
||||||
|
- Fixed Content-Type for requests with no data
|
||||||
|
|
||||||
|
## [0.2.2](https://github.com/httpie/httpie/compare/0.2.1...0.2.2) (2012-06-24)
|
||||||
|
|
||||||
|
- The `METHOD` positional argument can now be omitted (defaults to
|
||||||
|
`GET`, or to `POST` with data)
|
||||||
|
- Fixed --verbose --form
|
||||||
|
- Added support for Tox
|
||||||
|
|
||||||
|
## [0.2.1](https://github.com/httpie/httpie/compare/0.2.0...0.2.1) (2012-06-13)
|
||||||
|
|
||||||
|
- Added compatibility with `requests-0.12.1`
|
||||||
|
- Dropped custom JSON and HTTP lexers in favor of the ones newly included
|
||||||
|
in `pygments-1.5`
|
||||||
|
|
||||||
|
## [0.2.0](https://github.com/httpie/httpie/compare/0.1.6...0.2.0) (2012-04-25)
|
||||||
|
|
||||||
|
- Added Python 3 support
|
||||||
|
- Added the ability to print the HTTP request as well as the response
|
||||||
|
(see `--print` and `--verbose`)
|
||||||
|
- Added support for Digest authentication
|
||||||
|
- Added file upload support
|
||||||
|
(`http -f POST file_field_name@/path/to/file`)
|
||||||
|
- Improved syntax highlighting for JSON
|
||||||
|
- Added support for field name escaping
|
||||||
|
- Many bug fixes
|
||||||
|
|
||||||
|
## [0.1.6](https://github.com/httpie/httpie/compare/0.1.5...0.1.6) (2012-03-04)
|
||||||
|
|
||||||
|
- Fixed `setup.py`
|
||||||
|
|
||||||
|
## [0.1.5](https://github.com/httpie/httpie/compare/0.1.4...0.1.5) (2012-03-04)
|
||||||
|
|
||||||
|
- Many improvements and bug fixes
|
||||||
|
|
||||||
|
## [0.1.4](https://github.com/httpie/httpie/compare/b966efa...0.1.4) (2012-02-28)
|
||||||
|
|
||||||
|
- Many improvements and bug fixes
|
||||||
|
|
||||||
|
## [0.1.0](https://github.com/httpie/httpie/commit/b966efa) (2012-02-25)
|
||||||
|
|
||||||
|
- Initial public release
|
386
CHANGELOG.rst
386
CHANGELOG.rst
@ -1,386 +0,0 @@
|
|||||||
==========
|
|
||||||
Change Log
|
|
||||||
==========
|
|
||||||
|
|
||||||
This document records all notable changes to `HTTPie <http://httpie.org>`_.
|
|
||||||
This project adheres to `Semantic Versioning <http://semver.org/>`_.
|
|
||||||
|
|
||||||
|
|
||||||
`1.0.3`_ (2019-08-26)
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
* Fixed CVE-2019-10751 — the way the output filename is generated for
|
|
||||||
``--download`` requests without ``--output`` resulting in a redirect has
|
|
||||||
been changed to only consider the initial URL as the base for the generated
|
|
||||||
filename, and not the final one. This fixes a potential security issue under
|
|
||||||
the following scenario:
|
|
||||||
|
|
||||||
1. A ``--download`` request with no explicit ``--output`` is made (e.g.,
|
|
||||||
``$ http -d example.org/file.txt``), instructing httpie to
|
|
||||||
`generate the output filename <https://httpie.org/doc#downloaded-file-name>`_
|
|
||||||
from the ``Content-Disposition`` response, or from the URL if the header
|
|
||||||
is not provided.
|
|
||||||
2. The server handling the request has been modified by an attacker and
|
|
||||||
instead of the expected response the URL returns a redirect to another
|
|
||||||
URL, e.g., ``attacker.example.org/.bash_profile``, whose response does
|
|
||||||
not provide a ``Content-Disposition`` header (i.e., the base for the
|
|
||||||
generated filename becomes ``.bash_profile`` instead of ``file.txt``).
|
|
||||||
3. Your current directory doesn’t already contain ``.bash_profile``
|
|
||||||
(i.e., no unique suffix is added to the generated filename).
|
|
||||||
4. You don’t notice the potentially unexpected output filename
|
|
||||||
as reported by httpie in the console output
|
|
||||||
(e.g., ``Downloading 100.00 B to ".bash_profile"``).
|
|
||||||
|
|
||||||
Reported by Raul Onitza and Giulio Comi.
|
|
||||||
|
|
||||||
|
|
||||||
`1.0.2`_ (2018-11-14)
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
* Fixed tests for installation with pyOpenSSL.
|
|
||||||
|
|
||||||
|
|
||||||
`1.0.1`_ (2018-11-14)
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
* Removed external URL calls from tests.
|
|
||||||
|
|
||||||
|
|
||||||
`1.0.0`_ (2018-11-02)
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
* Added ``--style=auto`` which follows the terminal ANSI color styles.
|
|
||||||
* Added support for selecting TLS 1.3 via ``--ssl=tls1.3``
|
|
||||||
(available once implemented in upstream libraries).
|
|
||||||
* Added ``true``/``false`` as valid values for ``--verify``
|
|
||||||
(in addition to ``yes``/``no``) and the boolean value is case-insensitive.
|
|
||||||
* Changed the default ``--style`` from ``solarized`` to ``auto`` (on Windows it stays ``fruity``).
|
|
||||||
* Fixed default headers being incorrectly case-sensitive.
|
|
||||||
* Removed Python 2.6 support.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.9`_ (2016-12-08)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Fixed README.
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.8`_ (2016-12-08)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Extended auth plugin API.
|
|
||||||
* Added exit status code ``7`` for plugin errors.
|
|
||||||
* Added support for ``curses``-less Python installations.
|
|
||||||
* Fixed ``REQUEST_ITEM`` arg incorrectly being reported as required.
|
|
||||||
* Improved ``CTRL-C`` interrupt handling.
|
|
||||||
* Added the standard exit status code ``130`` for keyboard interrupts.
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.6`_ (2016-08-13)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added Python 3 as a dependency for Homebrew installations
|
|
||||||
to ensure some of the newer HTTP features work out of the box
|
|
||||||
for macOS users (starting with HTTPie 0.9.4.).
|
|
||||||
* Added the ability to unset a request header with ``Header:``, and send an
|
|
||||||
empty value with ``Header;``.
|
|
||||||
* Added ``--default-scheme <URL_SCHEME>`` to enable things like
|
|
||||||
``$ alias https='http --default-scheme=https``.
|
|
||||||
* Added ``-I`` as a shortcut for ``--ignore-stdin``.
|
|
||||||
* Added fish shell completion (located in ``extras/httpie-completion.fish``
|
|
||||||
in the Github repo).
|
|
||||||
* Updated ``requests`` to 2.10.0 so that SOCKS support can be added via
|
|
||||||
``pip install requests[socks]``.
|
|
||||||
* Changed the default JSON ``Accept`` header from ``application/json``
|
|
||||||
to ``application/json, */*``.
|
|
||||||
* Changed the pre-processing of request HTTP headers so that any leading
|
|
||||||
and trailing whitespace is removed.
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.4`_ (2016-07-01)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added ``Content-Type`` of files uploaded in ``multipart/form-data`` requests
|
|
||||||
* Added ``--ssl=<PROTOCOL>`` to specify the desired SSL/TLS protocol version
|
|
||||||
to use for HTTPS requests.
|
|
||||||
* Added JSON detection with ``--json, -j`` to work around incorrect
|
|
||||||
``Content-Type``
|
|
||||||
* Added ``--all`` to show intermediate responses such as redirects (with ``--follow``)
|
|
||||||
* Added ``--history-print, -P WHAT`` to specify formatting of intermediate responses
|
|
||||||
* Added ``--max-redirects=N`` (default 30)
|
|
||||||
* Added ``-A`` as short name for ``--auth-type``
|
|
||||||
* Added ``-F`` as short name for ``--follow``
|
|
||||||
* Removed the ``implicit_content_type`` config option
|
|
||||||
(use ``"default_options": ["--form"]`` instead)
|
|
||||||
* Redirected ``stdout`` doesn't trigger an error anymore when ``--output FILE``
|
|
||||||
is set
|
|
||||||
* Changed the default ``--style`` back to ``solarized`` for better support
|
|
||||||
of light and dark terminals
|
|
||||||
* Improved ``--debug`` output
|
|
||||||
* Fixed ``--session`` when used with ``--download``
|
|
||||||
* Fixed ``--download`` to trim too long filenames before saving the file
|
|
||||||
* Fixed the handling of ``Content-Type`` with multiple ``+subtype`` parts
|
|
||||||
* Removed the XML formatter as the implementation suffered from multiple issues
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.3`_ (2016-01-01)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Changed the default color ``--style`` from ``solarized`` to ``monokai``
|
|
||||||
* Added basic Bash autocomplete support (need to be installed manually)
|
|
||||||
* Added request details to connection error messages
|
|
||||||
* Fixed ``'requests.packages.urllib3' has no attribute 'disable_warnings'``
|
|
||||||
errors that occurred in some installations
|
|
||||||
* Fixed colors and formatting on Windows
|
|
||||||
* Fixed ``--auth`` prompt on Windows
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.2`_ (2015-02-24)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Fixed compatibility with Requests 2.5.1
|
|
||||||
* Changed the default JSON ``Content-Type`` to ``application/json`` as UTF-8
|
|
||||||
is the default JSON encoding
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.1`_ (2015-02-07)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added support for Requests transport adapter plugins
|
|
||||||
(see `httpie-unixsocket <https://github.com/httpie/httpie-unixsocket>`_
|
|
||||||
and `httpie-http2 <https://github.com/httpie/httpie-http2>`_)
|
|
||||||
|
|
||||||
|
|
||||||
`0.9.0`_ (2015-01-31)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added ``--cert`` and ``--cert-key`` parameters to specify a client side
|
|
||||||
certificate and private key for SSL
|
|
||||||
* Improved unicode support
|
|
||||||
* Improved terminal color depth detection via ``curses``
|
|
||||||
* To make it easier to deal with Windows paths in request items, ``\``
|
|
||||||
now only escapes special characters (the ones that are used as key-value
|
|
||||||
separators by HTTPie)
|
|
||||||
* Switched from ``unittest`` to ``pytest``
|
|
||||||
* Added Python `wheel` support
|
|
||||||
* Various test suite improvements
|
|
||||||
* Added ``CONTRIBUTING``
|
|
||||||
* Fixed ``User-Agent`` overwriting when used within a session
|
|
||||||
* Fixed handling of empty passwords in URL credentials
|
|
||||||
* Fixed multiple file uploads with the same form field name
|
|
||||||
* Fixed ``--output=/dev/null`` on Linux
|
|
||||||
* Miscellaneous bugfixes
|
|
||||||
|
|
||||||
|
|
||||||
`0.8.0`_ (2014-01-25)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added ``field=@file.txt`` and ``field:=@file.json`` for embedding
|
|
||||||
the contents of text and JSON files into request data
|
|
||||||
* Added curl-style shorthand for localhost
|
|
||||||
* Fixed request ``Host`` header value output so that it doesn't contain
|
|
||||||
credentials, if included in the URL
|
|
||||||
|
|
||||||
|
|
||||||
`0.7.1`_ (2013-09-24)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added ``--ignore-stdin``
|
|
||||||
* Added support for auth plugins
|
|
||||||
* Improved ``--help`` output
|
|
||||||
* Improved ``Content-Disposition`` parsing for ``--download`` mode
|
|
||||||
* Update to Requests 2.0.0
|
|
||||||
|
|
||||||
|
|
||||||
`0.6.0`_ (2013-06-03)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* XML data is now formatted
|
|
||||||
* ``--session`` and ``--session-read-only`` now also accept paths to
|
|
||||||
session files (eg. ``http --session=/tmp/session.json example.org``)
|
|
||||||
|
|
||||||
|
|
||||||
`0.5.1`_ (2013-05-13)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* ``Content-*`` and ``If-*`` request headers are not stored in sessions
|
|
||||||
anymore as they are request-specific
|
|
||||||
|
|
||||||
|
|
||||||
`0.5.0`_ (2013-04-27)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added a download mode via ``--download``
|
|
||||||
* Fixes miscellaneous bugs
|
|
||||||
|
|
||||||
|
|
||||||
`0.4.1`_ (2013-02-26)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Fixed ``setup.py``
|
|
||||||
|
|
||||||
|
|
||||||
`0.4.0`_ (2013-02-22)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added Python 3.3 compatibility
|
|
||||||
* Added Requests >= v1.0.4 compatibility
|
|
||||||
* Added support for credentials in URL
|
|
||||||
* Added ``--no-option`` for every ``--option`` to be config-friendly
|
|
||||||
* Mutually exclusive arguments can be specified multiple times. The
|
|
||||||
last value is used
|
|
||||||
|
|
||||||
|
|
||||||
`0.3.0`_ (2012-09-21)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Allow output redirection on Windows
|
|
||||||
* Added configuration file
|
|
||||||
* Added persistent session support
|
|
||||||
* Renamed ``--allow-redirects`` to ``--follow``
|
|
||||||
* Improved the usability of ``http --help``
|
|
||||||
* Fixed installation on Windows with Python 3
|
|
||||||
* Fixed colorized output on Windows with Python 3
|
|
||||||
* CRLF HTTP header field separation in the output
|
|
||||||
* Added exit status code ``2`` for timed-out requests
|
|
||||||
* Added the option to separate colorizing and formatting
|
|
||||||
(``--pretty=all``, ``--pretty=colors`` and ``--pretty=format``)
|
|
||||||
``--ugly`` has bee removed in favor of ``--pretty=none``
|
|
||||||
|
|
||||||
|
|
||||||
`0.2.7`_ (2012-08-07)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added compatibility with Requests 0.13.6
|
|
||||||
* Added streamed terminal output. ``--stream, -S`` can be used to enable
|
|
||||||
streaming also with ``--pretty`` and to ensure a more frequent output
|
|
||||||
flushing
|
|
||||||
* Added support for efficient large file downloads
|
|
||||||
* Sort headers by name (unless ``--pretty=none``)
|
|
||||||
* Response body is fetched only when needed (e.g., not with ``--headers``)
|
|
||||||
* Improved content type matching
|
|
||||||
* Updated Solarized color scheme
|
|
||||||
* Windows: Added ``--output FILE`` to store output into a file
|
|
||||||
(piping results in corrupted data on Windows)
|
|
||||||
* Proper handling of binary requests and responses
|
|
||||||
* Fixed printing of ``multipart/form-data`` requests
|
|
||||||
* Renamed ``--traceback`` to ``--debug``
|
|
||||||
|
|
||||||
|
|
||||||
`0.2.6`_ (2012-07-26)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* The short option for ``--headers`` is now ``-h`` (``-t`` has been
|
|
||||||
removed, for usage use ``--help``)
|
|
||||||
* Form data and URL parameters can have multiple fields with the same name
|
|
||||||
(e.g.,``http -f url a=1 a=2``)
|
|
||||||
* Added ``--check-status`` to exit with an error on HTTP 3xx, 4xx and
|
|
||||||
5xx (3, 4, and 5, respectively)
|
|
||||||
* If the output is piped to another program or redirected to a file,
|
|
||||||
the default behaviour is to only print the response body
|
|
||||||
(It can still be overwritten via the ``--print`` flag.)
|
|
||||||
* Improved highlighting of HTTP headers
|
|
||||||
* Added query string parameters (``param==value``)
|
|
||||||
* Added support for terminal colors under Windows
|
|
||||||
|
|
||||||
|
|
||||||
`0.2.5`_ (2012-07-17)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Unicode characters in prettified JSON now don't get escaped for
|
|
||||||
improved readability
|
|
||||||
* --auth now prompts for a password if only a username provided
|
|
||||||
* Added support for request payloads from a file path with automatic
|
|
||||||
``Content-Type`` (``http URL @/path``)
|
|
||||||
* Fixed missing query string when displaying the request headers via
|
|
||||||
``--verbose``
|
|
||||||
* Fixed Content-Type for requests with no data
|
|
||||||
|
|
||||||
|
|
||||||
`0.2.2`_ (2012-06-24)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* The ``METHOD`` positional argument can now be omitted (defaults to
|
|
||||||
``GET``, or to ``POST`` with data)
|
|
||||||
* Fixed --verbose --form
|
|
||||||
* Added support for Tox
|
|
||||||
|
|
||||||
|
|
||||||
`0.2.1`_ (2012-06-13)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added compatibility with ``requests-0.12.1``
|
|
||||||
* Dropped custom JSON and HTTP lexers in favor of the ones newly included
|
|
||||||
in ``pygments-1.5``
|
|
||||||
|
|
||||||
|
|
||||||
`0.2.0`_ (2012-04-25)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Added Python 3 support
|
|
||||||
* Added the ability to print the HTTP request as well as the response
|
|
||||||
(see ``--print`` and ``--verbose``)
|
|
||||||
* Added support for Digest authentication
|
|
||||||
* Added file upload support
|
|
||||||
(``http -f POST file_field_name@/path/to/file``)
|
|
||||||
* Improved syntax highlighting for JSON
|
|
||||||
* Added support for field name escaping
|
|
||||||
* Many bug fixes
|
|
||||||
|
|
||||||
|
|
||||||
`0.1.6`_ (2012-03-04)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Fixed ``setup.py``
|
|
||||||
|
|
||||||
|
|
||||||
`0.1.5`_ (2012-03-04)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Many improvements and bug fixes
|
|
||||||
|
|
||||||
|
|
||||||
`0.1.4`_ (2012-02-28)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Many improvements and bug fixes
|
|
||||||
|
|
||||||
|
|
||||||
`0.1.0`_ (2012-02-25)
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
* Initial public release
|
|
||||||
|
|
||||||
|
|
||||||
.. _`0.1.0`: https://github.com/jakubroztocil/httpie/commit/b966efa
|
|
||||||
.. _0.1.4: https://github.com/jakubroztocil/httpie/compare/b966efa...0.1.4
|
|
||||||
.. _0.1.5: https://github.com/jakubroztocil/httpie/compare/0.1.4...0.1.5
|
|
||||||
.. _0.1.6: https://github.com/jakubroztocil/httpie/compare/0.1.5...0.1.6
|
|
||||||
.. _0.2.0: https://github.com/jakubroztocil/httpie/compare/0.1.6...0.2.0
|
|
||||||
.. _0.2.1: https://github.com/jakubroztocil/httpie/compare/0.2.0...0.2.1
|
|
||||||
.. _0.2.2: https://github.com/jakubroztocil/httpie/compare/0.2.1...0.2.2
|
|
||||||
.. _0.2.5: https://github.com/jakubroztocil/httpie/compare/0.2.2...0.2.5
|
|
||||||
.. _0.2.6: https://github.com/jakubroztocil/httpie/compare/0.2.5...0.2.6
|
|
||||||
.. _0.2.7: https://github.com/jakubroztocil/httpie/compare/0.2.5...0.2.7
|
|
||||||
.. _0.3.0: https://github.com/jakubroztocil/httpie/compare/0.2.7...0.3.0
|
|
||||||
.. _0.4.0: https://github.com/jakubroztocil/httpie/compare/0.3.0...0.4.0
|
|
||||||
.. _0.4.1: https://github.com/jakubroztocil/httpie/compare/0.4.0...0.4.1
|
|
||||||
.. _0.5.0: https://github.com/jakubroztocil/httpie/compare/0.4.1...0.5.0
|
|
||||||
.. _0.5.1: https://github.com/jakubroztocil/httpie/compare/0.5.0...0.5.1
|
|
||||||
.. _0.6.0: https://github.com/jakubroztocil/httpie/compare/0.5.1...0.6.0
|
|
||||||
.. _0.7.1: https://github.com/jakubroztocil/httpie/compare/0.6.0...0.7.1
|
|
||||||
.. _0.8.0: https://github.com/jakubroztocil/httpie/compare/0.7.1...0.8.0
|
|
||||||
.. _0.9.0: https://github.com/jakubroztocil/httpie/compare/0.8.0...0.9.0
|
|
||||||
.. _0.9.1: https://github.com/jakubroztocil/httpie/compare/0.9.0...0.9.1
|
|
||||||
.. _0.9.2: https://github.com/jakubroztocil/httpie/compare/0.9.1...0.9.2
|
|
||||||
.. _0.9.3: https://github.com/jakubroztocil/httpie/compare/0.9.2...0.9.3
|
|
||||||
.. _0.9.4: https://github.com/jakubroztocil/httpie/compare/0.9.3...0.9.4
|
|
||||||
.. _0.9.6: https://github.com/jakubroztocil/httpie/compare/0.9.4...0.9.6
|
|
||||||
.. _0.9.8: https://github.com/jakubroztocil/httpie/compare/0.9.6...0.9.8
|
|
||||||
.. _0.9.9: https://github.com/jakubroztocil/httpie/compare/0.9.8...0.9.9
|
|
||||||
.. _1.0.0: https://github.com/jakubroztocil/httpie/compare/0.9.9...1.0.0
|
|
||||||
.. _1.0.1: https://github.com/jakubroztocil/httpie/compare/1.0.0...1.0.1
|
|
||||||
.. _1.0.2: https://github.com/jakubroztocil/httpie/compare/1.0.1...1.0.2
|
|
||||||
.. _1.0.3: https://github.com/jakubroztocil/httpie/compare/1.0.2...1.0.3
|
|
74
CODE_OF_CONDUCT.md
Normal file
74
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as
|
||||||
|
contributors and maintainers pledge to making participation in our project and
|
||||||
|
our community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||||
|
level of experience, education, socio-economic status, nationality, personal
|
||||||
|
appearance, race, religion, or sexual identity and orientation.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment
|
||||||
|
include:
|
||||||
|
|
||||||
|
- Using welcoming and inclusive language
|
||||||
|
- Being respectful of differing viewpoints and experiences
|
||||||
|
- Gracefully accepting constructive criticism
|
||||||
|
- Focusing on what is best for the community
|
||||||
|
- Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||||
|
advances
|
||||||
|
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
- Public or private harassment
|
||||||
|
- Publishing others' private information, such as a physical or electronic
|
||||||
|
address, without explicit permission
|
||||||
|
- Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable
|
||||||
|
behavior and are expected to take appropriate and fair corrective action in
|
||||||
|
response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community. Examples of
|
||||||
|
representing a project or community include using an official project e-mail
|
||||||
|
address, posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event. Representation of a project may be
|
||||||
|
further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting the project team at jakub@roztocil.co. All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. The project team is
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||||
|
Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||||
|
faith may face temporary or permanent repercussions as determined by other
|
||||||
|
members of the project's leadership.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org),
|
||||||
|
version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see
|
||||||
|
https://www.contributor-covenant.org/faq
|
196
CONTRIBUTING.md
Normal file
196
CONTRIBUTING.md
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
# Contributing to HTTPie
|
||||||
|
|
||||||
|
Bug reports and code and documentation patches are welcome. You can
|
||||||
|
help this project also by using the development version of HTTPie
|
||||||
|
and by reporting any bugs you might encounter.
|
||||||
|
|
||||||
|
## 1. Reporting bugs
|
||||||
|
|
||||||
|
**It's important that you provide the full command argument list
|
||||||
|
as well as the output of the failing command.**
|
||||||
|
|
||||||
|
Use the `--debug` flag and copy&paste both the command and its output
|
||||||
|
to your bug report, e.g.:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ http --debug <COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR>
|
||||||
|
<COMPLETE OUTPUT>
|
||||||
|
```
|
||||||
|
|
||||||
|
## 2. Contributing Code and Docs
|
||||||
|
|
||||||
|
Before working on a new feature or a bug, please browse [existing issues](https://github.com/httpie/httpie/issues)
|
||||||
|
to see whether it has previously been discussed.
|
||||||
|
|
||||||
|
If your change alters HTTPie’s behaviour or interface, it's a good idea to
|
||||||
|
discuss it before you start working on it.
|
||||||
|
|
||||||
|
If you are fixing an issue, the first step should be to create a test case that
|
||||||
|
reproduces the incorrect behaviour. That will also help you to build an
|
||||||
|
understanding of the issue at hand.
|
||||||
|
|
||||||
|
**Pull requests introducing code changes without tests
|
||||||
|
will generally not get merged. The same goes for PRs changing HTTPie’s
|
||||||
|
behaviour and not providing documentation.**
|
||||||
|
|
||||||
|
Conversely, PRs consisting of documentation improvements or tests
|
||||||
|
for existing-yet-previously-untested behavior will very likely be merged.
|
||||||
|
Therefore, docs and tests improvements are a great candidate for your first
|
||||||
|
contribution.
|
||||||
|
|
||||||
|
Consider also adding a [CHANGELOG](https://github.com/httpie/httpie/blob/master/CHANGELOG.md) entry for your changes.
|
||||||
|
|
||||||
|
### Development Environment
|
||||||
|
|
||||||
|
#### Getting the code
|
||||||
|
|
||||||
|
Go to https://github.com/httpie/httpie and fork the project repository.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone your fork
|
||||||
|
$ git clone git@github.com:<YOU>/httpie.git
|
||||||
|
|
||||||
|
# Enter the project directory
|
||||||
|
$ cd httpie
|
||||||
|
|
||||||
|
# Create a branch for your changes
|
||||||
|
$ git checkout -b my_topical_branch
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Setup
|
||||||
|
|
||||||
|
The [Makefile](https://github.com/httpie/httpie/blob/master/Makefile) contains a bunch of tasks to get you started. Just run
|
||||||
|
the following command, which:
|
||||||
|
|
||||||
|
- Creates an isolated Python virtual environment inside `./venv`
|
||||||
|
(via the standard library [venv](https://docs.python.org/3/library/venv.html) tool);
|
||||||
|
- installs all dependencies and also installs HTTPie
|
||||||
|
(in editable mode so that the `http` command will point to your
|
||||||
|
working copy).
|
||||||
|
- and runs tests (It is the same as running `make install test`).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ make
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Python virtual environment
|
||||||
|
|
||||||
|
Activate the Python virtual environment—created via the `make install`
|
||||||
|
task during [setup](#setup) for your active shell session using the following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ source venv/bin/activate
|
||||||
|
```
|
||||||
|
|
||||||
|
(If you use `virtualenvwrapper`, you can also use `workon httpie` to
|
||||||
|
activate the environment — we have created a symlink for you. It’s a bit of
|
||||||
|
a hack but it works™.)
|
||||||
|
|
||||||
|
You should now see `(httpie)` next to your shell prompt, and
|
||||||
|
the `http` command should point to your development copy:
|
||||||
|
|
||||||
|
```
|
||||||
|
(httpie) ~/Code/httpie $ which http
|
||||||
|
/Users/<user>/Code/httpie/venv/bin/http
|
||||||
|
(httpie) ~/Code/httpie $ http --version
|
||||||
|
2.0.0-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
(Btw, you don’t need to activate the virtual environment if you just want
|
||||||
|
run some of the `make` tasks. You can also invoke the development
|
||||||
|
version of HTTPie directly with `./venv/bin/http` without having to activate
|
||||||
|
the environment first. The same goes for `./venv/bin/pytest`, etc.).
|
||||||
|
|
||||||
|
### Making Changes
|
||||||
|
|
||||||
|
Please make sure your changes conform to [Style Guide for Python Code](https://python.org/dev/peps/pep-0008/) (PEP8)
|
||||||
|
and that `make pycodestyle` passes.
|
||||||
|
|
||||||
|
### Testing & CI
|
||||||
|
|
||||||
|
Please add tests for any new features and bug fixes.
|
||||||
|
|
||||||
|
When you open a Pull Request, [GitHub Actions](https://github.com/httpie/httpie/actions) will automatically run HTTPie’s [test suite](https://github.com/httpie/httpie/tree/master/tests) against your code, so please make sure all checks pass.
|
||||||
|
|
||||||
|
#### Running tests locally
|
||||||
|
|
||||||
|
HTTPie uses the [pytest](https://pytest.org/) runner.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run tests on the current Python interpreter with coverage.
|
||||||
|
$ make test
|
||||||
|
|
||||||
|
# Run tests with coverage
|
||||||
|
$ make test-cover
|
||||||
|
|
||||||
|
# Test PEP8 compliance
|
||||||
|
$ make codestyle
|
||||||
|
|
||||||
|
# Run extended tests — for code as well as .md files syntax, packaging, etc.
|
||||||
|
$ make test-all
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Running specific tests
|
||||||
|
|
||||||
|
After you have activated your virtual environment (see [setup](#setup)), you
|
||||||
|
can run specific tests from the terminal:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run specific tests on the current Python
|
||||||
|
$ python -m pytest tests/test_uploads.py
|
||||||
|
$ python -m pytest tests/test_uploads.py::TestMultipartFormDataFileUpload
|
||||||
|
$ python -m pytest tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok
|
||||||
|
```
|
||||||
|
|
||||||
|
See [Makefile](https://github.com/httpie/httpie/blob/master/Makefile) for additional development utilities.
|
||||||
|
|
||||||
|
#### Windows
|
||||||
|
|
||||||
|
If you are on a Windows machine and not able to run `make`,
|
||||||
|
follow the next steps for a basic setup. As a prerequisite, you need to have
|
||||||
|
Python 3.6+ installed.
|
||||||
|
|
||||||
|
Create a virtual environment and activate it:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
C:\> python -m venv --prompt httpie venv
|
||||||
|
C:\> venv\Scripts\activate
|
||||||
|
```
|
||||||
|
|
||||||
|
Install HTTPie in editable mode with all the dependencies:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
C:\> python -m pip install --upgrade -e . -r requirements-dev.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
You should now see `(httpie)` next to your shell prompt, and
|
||||||
|
the `http` command should point to your development copy:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
# In PowerShell:
|
||||||
|
(httpie) PS C:\Users\ovezovs\httpie> Get-Command http
|
||||||
|
CommandType Name Version Source
|
||||||
|
----------- ---- ------- ------
|
||||||
|
Application http.exe 0.0.0.0 C:\Users\ovezovs\httpie\venv\Scripts\http.exe
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# In CMD:
|
||||||
|
(httpie) C:\Users\ovezovs\httpie> where http
|
||||||
|
C:\Users\ovezovs\httpie\venv\Scripts\http.exe
|
||||||
|
C:\Users\ovezovs\AppData\Local\Programs\Python\Python38-32\Scripts\http.exe
|
||||||
|
|
||||||
|
(httpie) C:\Users\ovezovs\httpie> http --version
|
||||||
|
2.3.0-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `pytest` to run tests locally with an active virtual environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
$ python -m pytest
|
||||||
|
```
|
||||||
|
|
||||||
|
______________________________________________________________________
|
||||||
|
|
||||||
|
Finally, feel free to add yourself to [AUTHORS](https://github.com/httpie/httpie/blob/master/AUTHORS.md)!
|
117
CONTRIBUTING.rst
117
CONTRIBUTING.rst
@ -1,117 +0,0 @@
|
|||||||
######################
|
|
||||||
Contributing to HTTPie
|
|
||||||
######################
|
|
||||||
|
|
||||||
Bug reports and code and documentation patches are welcome. You can
|
|
||||||
help this project also by using the development version of HTTPie
|
|
||||||
and by reporting any bugs you might encounter.
|
|
||||||
|
|
||||||
1. Reporting bugs
|
|
||||||
=================
|
|
||||||
|
|
||||||
**It's important that you provide the full command argument list
|
|
||||||
as well as the output of the failing command.**
|
|
||||||
Use the ``--debug`` flag and copy&paste both the command and its output
|
|
||||||
to your bug report, e.g.:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ http --debug [COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR]
|
|
||||||
[COMPLETE OUTPUT]
|
|
||||||
|
|
||||||
|
|
||||||
2. Contributing Code and Docs
|
|
||||||
=============================
|
|
||||||
|
|
||||||
Before working on a new feature or a bug, please browse `existing issues`_
|
|
||||||
to see whether it has been previously discussed. If the change in question
|
|
||||||
is a bigger one, it's always good to discuss before you start working on
|
|
||||||
it.
|
|
||||||
|
|
||||||
|
|
||||||
Creating Development Environment
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
Go to https://github.com/jakubroztocil/httpie and fork the project repository.
|
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
git clone https://github.com/<YOU>/httpie
|
|
||||||
|
|
||||||
cd httpie
|
|
||||||
|
|
||||||
git checkout -b my_topical_branch
|
|
||||||
|
|
||||||
# (Recommended: create a new virtualenv)
|
|
||||||
|
|
||||||
# Install dev. requirements and also HTTPie (in editable mode
|
|
||||||
# so that the `http' command will point to your working copy):
|
|
||||||
make init
|
|
||||||
|
|
||||||
|
|
||||||
Making Changes
|
|
||||||
--------------
|
|
||||||
|
|
||||||
Please make sure your changes conform to `Style Guide for Python Code`_ (PEP8)
|
|
||||||
and that ``make pycodestyle`` passes.
|
|
||||||
|
|
||||||
|
|
||||||
Testing
|
|
||||||
-------
|
|
||||||
|
|
||||||
Before opening a pull requests, please make sure the `test suite`_ passes
|
|
||||||
in all of the `supported Python environments`_. You should also add tests
|
|
||||||
for any new features and bug fixes.
|
|
||||||
|
|
||||||
HTTPie uses `pytest`_ and `Tox`_ for testing.
|
|
||||||
|
|
||||||
|
|
||||||
Running all tests:
|
|
||||||
******************
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
# Run all tests on the current Python interpreter with coverage
|
|
||||||
make test
|
|
||||||
|
|
||||||
# Run all tests in all of the supported and available Pythons via Tox
|
|
||||||
make test-tox
|
|
||||||
|
|
||||||
# Run all tests for code as well as packaging, etc.
|
|
||||||
make test-all
|
|
||||||
|
|
||||||
# Test PEP8 compliance
|
|
||||||
make pycodestyle
|
|
||||||
|
|
||||||
|
|
||||||
Running specific tests:
|
|
||||||
***********************
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
# Run specific tests on the current Python
|
|
||||||
py.test tests/test_uploads.py
|
|
||||||
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload
|
|
||||||
py.test tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok
|
|
||||||
|
|
||||||
# Run specific tests on the on all Pythons via Tox
|
|
||||||
# (change to `tox -e py37' to limit Python version)
|
|
||||||
tox -- tests/test_uploads.py --verbose
|
|
||||||
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload --verbose
|
|
||||||
tox -- tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok --verbose
|
|
||||||
|
|
||||||
-----
|
|
||||||
|
|
||||||
See `Makefile`_ for additional development utilities.
|
|
||||||
Don't forget to add yourself to `AUTHORS`_!
|
|
||||||
|
|
||||||
|
|
||||||
.. _Tox: http://tox.testrun.org
|
|
||||||
.. _supported Python environments: https://github.com/jakubroztocil/httpie/blob/master/tox.ini
|
|
||||||
.. _existing issues: https://github.com/jakubroztocil/httpie/issues?state=open
|
|
||||||
.. _AUTHORS: https://github.com/jakubroztocil/httpie/blob/master/AUTHORS.rst
|
|
||||||
.. _Makefile: https://github.com/jakubroztocil/httpie/blob/master/Makefile
|
|
||||||
.. _pytest: http://pytest.org/
|
|
||||||
.. _Style Guide for Python Code: http://python.org/dev/peps/pep-0008/
|
|
||||||
.. _test suite: https://github.com/jakubroztocil/httpie/tree/master/tests
|
|
10
LICENSE
10
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Copyright © 2012-2019 Jakub Roztocil <jakub@roztocil.co>
|
Copyright © 2012-2021 Jakub Roztocil <jakub@roztocil.co>
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
Redistribution and use in source and binary forms, with or without
|
||||||
modification, are permitted provided that the following conditions are met:
|
modification, are permitted provided that the following conditions are met:
|
||||||
@ -10,14 +10,14 @@ modification, are permitted provided that the following conditions are met:
|
|||||||
notice, this list of conditions and the following disclaimer in the
|
notice, this list of conditions and the following disclaimer in the
|
||||||
documentation and/or other materials provided with the distribution.
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
3. Neither the name of The author nor the names of its contributors may
|
3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
be used to endorse or promote products derived from this software
|
may be used to endorse or promote products derived from this software
|
||||||
without specific prior written permission.
|
without specific prior written permission.
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR AND CONTRIBUTORS BE LIABLE FOR
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
10
MANIFEST.in
10
MANIFEST.in
@ -1,4 +1,8 @@
|
|||||||
include LICENSE
|
include LICENSE
|
||||||
include README.rst
|
include README.md
|
||||||
include CHANGELOG.rst
|
include CHANGELOG.md
|
||||||
include AUTHORS.rst
|
include AUTHORS.md
|
||||||
|
include docs/README.md
|
||||||
|
|
||||||
|
# <https://github.com/httpie/httpie/issues/182>
|
||||||
|
recursive-include tests/ *
|
||||||
|
174
Makefile
174
Makefile
@ -1,45 +1,93 @@
|
|||||||
###############################################################################
|
###############################################################################
|
||||||
# See ./CONTRIBUTING.rst
|
# See ./CONTRIBUTING.md
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
.PHONY: build
|
||||||
|
|
||||||
|
ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
|
||||||
VERSION=$(shell grep __version__ httpie/__init__.py)
|
VERSION=$(shell grep __version__ httpie/__init__.py)
|
||||||
REQUIREMENTS="requirements-dev.txt"
|
H1="\n\n\033[0;32m\#\#\# "
|
||||||
TAG="\n\n\033[0;32m\#\#\# "
|
H1END=" \#\#\# \033[0m\n"
|
||||||
END=" \#\#\# \033[0m\n"
|
|
||||||
|
|
||||||
|
|
||||||
all: test
|
# Only used to create our venv.
|
||||||
|
SYSTEM_PYTHON=python3
|
||||||
|
|
||||||
|
VENV_ROOT=venv
|
||||||
|
VENV_BIN=$(VENV_ROOT)/bin
|
||||||
|
VENV_PIP=$(VENV_BIN)/pip3
|
||||||
|
VENV_PYTHON=$(VENV_BIN)/python
|
||||||
|
|
||||||
|
|
||||||
init: uninstall-httpie
|
export PATH := $(VENV_BIN):$(PATH)
|
||||||
@echo $(TAG)Installing dev requirements$(END)
|
|
||||||
pip install --upgrade -r $(REQUIREMENTS)
|
|
||||||
|
|
||||||
@echo $(TAG)Installing HTTPie$(END)
|
|
||||||
pip install --upgrade --editable .
|
all: uninstall-httpie install test
|
||||||
|
|
||||||
|
|
||||||
|
install: venv
|
||||||
|
@echo $(H1)Installing dev requirements$(H1END)
|
||||||
|
$(VENV_PIP) install --upgrade --editable '.[dev]'
|
||||||
|
|
||||||
|
@echo $(H1)Installing HTTPie$(H1END)
|
||||||
|
$(VENV_PIP) install --upgrade --editable .
|
||||||
|
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
@echo $(TAG)Cleaning up$(END)
|
@echo $(H1)Cleaning up$(H1END)
|
||||||
rm -rf .tox *.egg dist build .coverage .cache .pytest_cache httpie.egg-info
|
rm -rf $(VENV_ROOT)
|
||||||
find . -name '__pycache__' -delete -print -o -name '*.pyc' -delete -print
|
# Remove symlink for virtualenvwrapper, if we’ve created one.
|
||||||
|
[ -n "$(WORKON_HOME)" -a -L "$(WORKON_HOME)/httpie" -a -f "$(WORKON_HOME)/httpie" ] && rm $(WORKON_HOME)/httpie || true
|
||||||
|
rm -rf *.egg dist build .coverage .cache .pytest_cache httpie.egg-info
|
||||||
|
find . -name '__pycache__' -delete -o -name '*.pyc' -delete
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
venv:
|
||||||
|
@echo $(H1)Creating a Python environment $(VENV_ROOT) $(H1END)
|
||||||
|
|
||||||
|
$(SYSTEM_PYTHON) -m venv --prompt httpie $(VENV_ROOT)
|
||||||
|
|
||||||
|
@echo
|
||||||
|
@echo done.
|
||||||
|
@echo
|
||||||
|
@echo To active it manually, run:
|
||||||
|
@echo
|
||||||
|
@echo " source $(VENV_BIN)/activate"
|
||||||
|
@echo
|
||||||
|
@echo '(learn more: https://docs.python.org/3/library/venv.html)'
|
||||||
|
@echo
|
||||||
|
@if [ -n "$(WORKON_HOME)" ]; then \
|
||||||
|
echo $(ROOT_DIR) > $(VENV_ROOT)/.project; \
|
||||||
|
if [ ! -d $(WORKON_HOME)/httpie -a ! -L $(WORKON_HOME)/httpie ]; then \
|
||||||
|
ln -s $(ROOT_DIR)/$(VENV_ROOT) $(WORKON_HOME)/httpie ; \
|
||||||
|
echo ''; \
|
||||||
|
echo 'Since you use virtualenvwrapper, we created a symlink'; \
|
||||||
|
echo 'so you can also use "workon httpie" to activate the venv.'; \
|
||||||
|
echo ''; \
|
||||||
|
fi; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
# Testing
|
# Testing
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
test: init
|
test:
|
||||||
@echo $(TAG)Running tests on the current Python interpreter with coverage $(END)
|
@echo $(H1)Running tests$(HEADER_EXTRA)$(H1END)
|
||||||
py.test --cov ./httpie --cov ./tests --doctest-modules --verbose ./httpie ./tests
|
$(VENV_BIN)/python -m pytest $(COV) ./httpie $(COV) ./tests --doctest-modules --verbose ./httpie ./tests
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
|
test-cover: COV=--cov
|
||||||
|
test-cover: HEADER_EXTRA=' (with coverage)'
|
||||||
|
test-cover: test
|
||||||
|
|
||||||
|
|
||||||
# test-all is meant to test everything — even this Makefile
|
# test-all is meant to test everything — even this Makefile
|
||||||
test-all: uninstall-all clean init test test-tox test-dist pycodestyle
|
test-all: clean install test test-dist codestyle
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
@ -47,37 +95,43 @@ test-dist: test-sdist test-bdist-wheel
|
|||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
test-tox: init
|
test-sdist: clean venv
|
||||||
@echo $(TAG)Running tests on all Pythons via Tox$(END)
|
@echo $(H1)Testing sdist build an installation$(H1END)
|
||||||
tox
|
$(VENV_PYTHON) setup.py sdist
|
||||||
|
$(VENV_PIP) install --force-reinstall --upgrade dist/*.gz
|
||||||
|
$(VENV_BIN)/http --version
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
test-sdist: clean uninstall-httpie
|
test-bdist-wheel: clean venv
|
||||||
@echo $(TAG)Testing sdist build an installation$(END)
|
@echo $(H1)Testing wheel build an installation$(H1END)
|
||||||
python setup.py sdist
|
$(VENV_PIP) install wheel
|
||||||
pip install --force-reinstall --upgrade dist/*.gz
|
$(VENV_PYTHON) setup.py bdist_wheel
|
||||||
which http
|
$(VENV_PIP) install --force-reinstall --upgrade dist/*.whl
|
||||||
|
$(VENV_BIN)/http --version
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
test-bdist-wheel: clean uninstall-httpie
|
twine-check:
|
||||||
@echo $(TAG)Testing wheel build an installation$(END)
|
twine check dist/*
|
||||||
python setup.py bdist_wheel
|
|
||||||
pip install --force-reinstall --upgrade dist/*.whl
|
|
||||||
which http
|
# Kept for convenience, "make codestyle" is preferred though
|
||||||
|
pycodestyle: codestyle
|
||||||
|
|
||||||
|
|
||||||
|
codestyle:
|
||||||
|
@echo $(H1)Running flake8$(H1END)
|
||||||
|
@[ -f $(VENV_BIN)/flake8 ] || $(VENV_PIP) install --upgrade --editable '.[dev]'
|
||||||
|
$(VENV_BIN)/flake8 httpie/ tests/ extras/ *.py
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
pycodestyle:
|
codecov-upload:
|
||||||
which pycodestyle || pip install pycodestyle
|
@echo $(H1)Running codecov$(H1END)
|
||||||
pycodestyle
|
@[ -f $(VENV_BIN)/codecov ] || $(VENV_PIP) install codecov
|
||||||
@echo
|
# $(VENV_BIN)/codecov --required
|
||||||
|
$(VENV_BIN)/codecov
|
||||||
|
|
||||||
coveralls:
|
|
||||||
which coveralls || pip install python-coveralls
|
|
||||||
coveralls
|
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
@ -86,15 +140,21 @@ coveralls:
|
|||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
|
|
||||||
|
build:
|
||||||
|
rm -rf build/
|
||||||
|
$(VENV_PYTHON) setup.py sdist bdist_wheel
|
||||||
|
|
||||||
|
|
||||||
publish: test-all publish-no-test
|
publish: test-all publish-no-test
|
||||||
|
|
||||||
|
|
||||||
publish-no-test:
|
publish-no-test:
|
||||||
@echo $(TAG)Testing wheel build an installation$(END)
|
@echo $(H1)Testing wheel build an installation$(H1END)
|
||||||
@echo "$(VERSION)"
|
@echo "$(VERSION)"
|
||||||
@echo "$(VERSION)" | grep -q "dev" && echo '!!!Not publishing dev version!!!' && exit 1 || echo ok
|
@echo "$(VERSION)" | grep -q "dev" && echo '!!!Not publishing dev version!!!' && exit 1 || echo ok
|
||||||
python setup.py sdist bdist_wheel
|
make build
|
||||||
twine upload dist/*
|
make twine-check
|
||||||
|
$(VENV_BIN)/twine upload --repository=httpie dist/*
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
@ -104,40 +164,16 @@ publish-no-test:
|
|||||||
###############################################################################
|
###############################################################################
|
||||||
|
|
||||||
uninstall-httpie:
|
uninstall-httpie:
|
||||||
@echo $(TAG)Uninstalling httpie$(END)
|
@echo $(H1)Uninstalling httpie$(H1END)
|
||||||
- pip uninstall --yes httpie &2>/dev/null
|
- $(VENV_PIP) uninstall --yes httpie &2>/dev/null
|
||||||
|
|
||||||
@echo "Verifying…"
|
@echo "Verifying…"
|
||||||
cd .. && ! python -m httpie --version &2>/dev/null
|
cd .. && ! $(VENV_PYTHON) -m httpie --version &2>/dev/null
|
||||||
|
|
||||||
@echo "Done"
|
@echo "Done"
|
||||||
@echo
|
@echo
|
||||||
|
|
||||||
|
|
||||||
uninstall-all: uninstall-httpie
|
|
||||||
|
|
||||||
@echo $(TAG)Uninstalling httpie requirements$(END)
|
|
||||||
- pip uninstall --yes pygments requests
|
|
||||||
|
|
||||||
@echo $(TAG)Uninstalling development requirements$(END)
|
|
||||||
- pip uninstall --yes -r $(REQUIREMENTS)
|
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
|
||||||
# Docs
|
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
pdf:
|
|
||||||
# NOTE: rst2pdf needs to be installed manually and against a Python 2
|
|
||||||
@echo "Converting README.rst to PDF…"
|
|
||||||
rst2pdf \
|
|
||||||
--strip-elements-with-class=no-pdf \
|
|
||||||
README.rst \
|
|
||||||
-o README.pdf
|
|
||||||
@echo "Done"
|
|
||||||
@echo
|
|
||||||
|
|
||||||
|
|
||||||
###############################################################################
|
###############################################################################
|
||||||
# Homebrew
|
# Homebrew
|
||||||
###############################################################################
|
###############################################################################
|
||||||
|
84
README.md
Normal file
84
README.md
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
<br/>
|
||||||
|
<a href="https://httpie.io" target="blank_">
|
||||||
|
<img height="100" alt="HTTPie" src="https://raw.githubusercontent.com/httpie/httpie/master/docs/httpie-logo.svg" />
|
||||||
|
</a>
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
# HTTPie: human-friendly CLI HTTP client for the API era
|
||||||
|
|
||||||
|
HTTPie (pronounced _aitch-tee-tee-pie_) is a command-line HTTP client.
|
||||||
|
Its goal is to make CLI interaction with web services as human-friendly as possible.
|
||||||
|
HTTPie is designed for testing, debugging, and generally interacting with APIs & HTTP servers.
|
||||||
|
The `http` & `https` commands allow for creating and sending arbitrary HTTP requests.
|
||||||
|
They use simple and natural syntax and provide formatted and colorized output.
|
||||||
|
|
||||||
|
[](https://httpie.org/docs)
|
||||||
|
[](https://pypi.python.org/pypi/httpie)
|
||||||
|
[](https://github.com/httpie/httpie/actions)
|
||||||
|
[](https://codecov.io/gh/httpie/httpie)
|
||||||
|
[](https://twitter.com/httpie)
|
||||||
|
[](https://httpie.io/chat)
|
||||||
|
|
||||||
|
<img src="https://raw.githubusercontent.com/httpie/httpie/master/docs/httpie-animation.gif" alt="HTTPie in action" width="100%"/>
|
||||||
|
|
||||||
|
## Getting started
|
||||||
|
|
||||||
|
- [Installation instructions →](https://httpie.io/docs#installation)
|
||||||
|
- [Full documentation →](https://httpie.io/docs)
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Expressive and intuitive syntax
|
||||||
|
- Formatted and colorized terminal output
|
||||||
|
- Built-in JSON support
|
||||||
|
- Forms and file uploads
|
||||||
|
- HTTPS, proxies, and authentication
|
||||||
|
- Arbitrary request data
|
||||||
|
- Custom headers
|
||||||
|
- Persistent sessions
|
||||||
|
- `wget`-like downloads
|
||||||
|
|
||||||
|
[See for all features →](https://httpie.io/docs)
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
Hello World:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ https httpie.io/hello
|
||||||
|
```
|
||||||
|
|
||||||
|
Custom [HTTP method](https://httpie.io/docs#http-method), [HTTP headers](https://httpie.io/docs#http-headers) and [JSON](https://httpie.io/docs#json) data:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ http PUT pie.dev/put X-API-Token:123 name=John
|
||||||
|
```
|
||||||
|
|
||||||
|
Build and print a request without sending it using [offline mode](https://httpie.io/docs#offline-mode):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ http --offline pie.dev/post hello=offline
|
||||||
|
```
|
||||||
|
|
||||||
|
Use [GitHub API](https://developer.github.com/v3/issues/comments/#create-a-comment) to post a comment on an [Issue](https://github.com/httpie/httpie/issues/83) with [authentication](https://httpie.io/docs#authentication):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ http -a USERNAME POST https://api.github.com/repos/httpie/httpie/issues/83/comments body='HTTPie is awesome! :heart:'
|
||||||
|
```
|
||||||
|
|
||||||
|
[See more examples →](https://httpie.io/docs#examples)
|
||||||
|
|
||||||
|
## Community & support
|
||||||
|
|
||||||
|
- Visit the [HTTPie website](https://httpie.io) for full documentation and useful links.
|
||||||
|
- Join our [Discord server](https://httpie.io/chat) is to ask questions, discuss features, and for general API chat.
|
||||||
|
- Tweet at [@httpie](https://twitter.com/httpie) on Twitter.
|
||||||
|
- Use [StackOverflow](https://stackoverflow.com/questions/tagged/httpie) to ask questions and include a `httpie` tag.
|
||||||
|
- Create [GitHub Issues](https://github.com/httpie/httpie/issues) for bug reports and feature requests.
|
||||||
|
- Subscribe to the [HTTPie newsletter](https://httpie.io) for occasional updates.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Have a look through existing [Issues](https://github.com/httpie/httpie/issues) and [Pull Requests](https://github.com/httpie/httpie/pulls) that you could help with. If you'd like to request a feature or report a bug, please [create a GitHub Issue](https://github.com/httpie/httpie/issues) using one of the templates provided.
|
||||||
|
|
||||||
|
[See contribution guide →](https://github.com/httpie/httpie/blob/master/CONTRIBUTING.md)
|
1753
README.rst
1753
README.rst
File diff suppressed because it is too large
Load Diff
1683
docs/README.md
Normal file
1683
docs/README.md
Normal file
File diff suppressed because it is too large
Load Diff
Before Width: | Height: | Size: 1019 KiB After Width: | Height: | Size: 1019 KiB |
1
docs/httpie-logo.svg
Normal file
1
docs/httpie-logo.svg
Normal file
@ -0,0 +1 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1635.31 470"><defs><style>.cls-1{fill:#4b78e6;}</style></defs><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><path class="cls-1" d="M1322.19,73.91h0a36.56,36.56,0,0,1,36.56-36.29h3.41a36.56,36.56,0,0,1,36.56,36.83h0a36.56,36.56,0,0,1-36.56,36.29h-3.41A36.56,36.56,0,0,1,1322.19,73.91Zm6.16,276.93V142.35a7.94,7.94,0,0,1,8-7.94h48.32a7.93,7.93,0,0,1,7.94,7.94V350.84a7.94,7.94,0,0,1-7.94,7.94H1336.3A8,8,0,0,1,1328.35,350.84Z"/><path class="cls-1" d="M1635.31,233.34c0-61.06-33.28-105.09-101.71-105.09-72.17,0-114.82,45.45-114.82,123.08,0,74.79,46.86,113.6,113.89,113.6,56.83,0,85.93-27.17,98.33-63.86a8,8,0,0,0-5.34-10.28l-40.32-11.39a8,8,0,0,0-9.54,4.73c-5.77,14.37-16.57,25.42-42.2,25.42-29.32,0-46.06-13.62-50.74-44.29a7.17,7.17,0,0,0,.81.08h143.7a8,8,0,0,0,7.94-7.94V242.23c0-.09,0-.18,0-.28C1635.31,239.17,1635.31,236.36,1635.31,233.34Zm-103.58-51.6c28.59,0,43.12,15.15,45,45H1483C1487.21,195,1503.61,181.74,1531.73,181.74Z"/><path class="cls-1" d="M581.91,358.75H533.56a7.93,7.93,0,0,1-7.94-7.94V76.39a7.93,7.93,0,0,1,7.94-7.94h48.35a7.93,7.93,0,0,1,7.94,7.94v84.66a6,6,0,0,0,11.22,2.77c13.45-25.56,34.68-35.33,60.42-35.69,38.66-.55,70,31.45,70,70.12V350.81a7.94,7.94,0,0,1-7.94,7.94H675.63a7.94,7.94,0,0,1-7.94-7.94V227.1c0-23.21-10.32-40.73-37-40.73-25.79,0-40.8,15.15-40.8,40.73V350.81A7.93,7.93,0,0,1,581.91,358.75Z"/><path class="cls-1" d="M1052.84,306.12a7.94,7.94,0,0,0-9.77-6.78c-6.47,1.55-13.73,3.05-20.35,3.05-19.23,0-25.79-8.52-25.79-26.52V188.26h50.67a7.94,7.94,0,0,0,7.94-7.94v-38.1a8,8,0,0,0-7.94-7.95H996.93V85.86A7.94,7.94,0,0,0,989,77.92H941.1a7.93,7.93,0,0,0-7.94,7.94v48.41H842.67V85.86a7.94,7.94,0,0,0-7.94-7.94H786.84a7.93,7.93,0,0,0-7.94,7.94v48.41H761.05a7.94,7.94,0,0,0-7.94,7.95v38.1a7.93,7.93,0,0,0,7.94,7.94H778.9v99.93c0,42.62,21.57,77.19,73.15,77.19,21.16,0,32.43-2.5,46.08-6.56a8,8,0,0,0,5.65-8.56l-5.2-44.14a7.94,7.94,0,0,0-9.77-6.78c-6.47,1.55-13.73,3.05-20.35,3.05-19.23,0-25.79-8.52-25.79-26.52V188.26h90.49v99.93c0,42.62,21.57,77.19,73.14,77.19,21.17,0,32.44-2.5,46.09-6.56a8,8,0,0,0,5.65-8.56Z"/><path class="cls-1" d="M1219.14,365.27c-28.49,0-49.51-10.92-62.87-35.86a6,6,0,0,0-11.19,2.84v82.83a7.93,7.93,0,0,1-7.94,7.94h-48.32a7.94,7.94,0,0,1-8-7.94V142.21a8,8,0,0,1,8-7.94h48.32a7.94,7.94,0,0,1,7.94,7.94v18.95c0,6.13,8.21,8.3,11.15,2.92,13.74-25.16,35.63-36,64.31-36,53.43,0,81.08,44,81.08,116.92C1301.62,320.78,1273,365.27,1219.14,365.27Zm19.21-119.76c0-37.39-14.06-59.17-46.4-59.17-29.53,0-46.87,20.35-46.87,57.28v4.26c0,36.45,17.34,59.17,46.87,59.17C1223.82,307.05,1238.35,284.33,1238.35,245.51Z"/><path class="cls-1" d="M394.41,102.12C394,45.31,346.61,0,289.8,0H104.69C48.31,0,1.09,44.6,0,101A103.07,103.07,0,0,0,103,205.92h82.7a6,6,0,0,1,2.39,11.42L61.31,272.91A103.09,103.09,0,0,0,0,367.83C.43,424.65,47.79,470,104.62,470H148c57.23,0,104.88-45.9,104.79-103.13a103.1,103.1,0,0,0-64.49-95.31,5.94,5.94,0,0,1-.1-10.94l145-63.58A103.08,103.08,0,0,0,394.41,102.12Z"/></g></g></svg>
|
After Width: | Height: | Size: 2.9 KiB |
@ -12,23 +12,39 @@ import hashlib
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
VERSIONS = {
|
||||||
|
# By default, we use the latest packages. But sometimes Requests has a maximum supported versions.
|
||||||
|
# Take a look here before making a release: <https://github.com/psf/requests/blob/master/setup.py>
|
||||||
|
'idna': '2.10',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
PACKAGES = [
|
PACKAGES = [
|
||||||
'httpie',
|
'httpie',
|
||||||
'Pygments',
|
'Pygments',
|
||||||
'requests',
|
'requests',
|
||||||
|
'requests-toolbelt',
|
||||||
'certifi',
|
'certifi',
|
||||||
'urllib3',
|
'urllib3',
|
||||||
'idna',
|
'idna',
|
||||||
'chardet',
|
'chardet',
|
||||||
'PySocks',
|
'PySocks',
|
||||||
|
'defusedxml',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_package_meta(package_name):
|
def get_package_meta(package_name):
|
||||||
api_url = 'https://pypi.python.org/pypi/{}/json'.format(package_name)
|
api_url = f'https://pypi.org/pypi/{package_name}/json'
|
||||||
resp = requests.get(api_url).json()
|
resp = requests.get(api_url).json()
|
||||||
hasher = hashlib.sha256()
|
hasher = hashlib.sha256()
|
||||||
for release in resp['urls']:
|
version = VERSIONS.get(package_name)
|
||||||
|
if package_name not in VERSIONS:
|
||||||
|
# Latest version
|
||||||
|
release_bundle = resp['urls']
|
||||||
|
else:
|
||||||
|
release_bundle = resp['releases'][version]
|
||||||
|
|
||||||
|
for release in release_bundle:
|
||||||
download_url = release['url']
|
download_url = release['url']
|
||||||
if download_url.endswith('.tar.gz'):
|
if download_url.endswith('.tar.gz'):
|
||||||
hasher.update(requests.get(download_url).content)
|
hasher.update(requests.get(download_url).content)
|
||||||
@ -38,8 +54,7 @@ def get_package_meta(package_name):
|
|||||||
'sha256': hasher.hexdigest(),
|
'sha256': hasher.hexdigest(),
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(
|
raise RuntimeError(f'{package_name}: download not found: {resp}')
|
||||||
'{}: download not found: {}'.format(package_name, resp))
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -1,11 +1,8 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
|
|
||||||
_http_complete() {
|
_http_complete() {
|
||||||
local cur_word=${COMP_WORDS[COMP_CWORD]}
|
local cur_word=${COMP_WORDS[COMP_CWORD]}
|
||||||
local prev_word=${COMP_WORDS[COMP_CWORD - 1]}
|
local prev_word=${COMP_WORDS[COMP_CWORD - 1]}
|
||||||
|
|
||||||
if [[ "$cur_word" == -* ]]; then
|
if [[ "$cur_word" == -* ]]; then
|
||||||
_http_complete_options "$cur_word"
|
_http_complete_options "$cur_word"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@ -18,6 +15,6 @@ _http_complete_options() {
|
|||||||
-v --verbose -h --headers -b --body -S --stream -o --output -d --download
|
-v --verbose -h --headers -b --body -S --stream -o --output -d --download
|
||||||
-c --continue --session --session-read-only -a --auth --auth-type --proxy
|
-c --continue --session --session-read-only -a --auth --auth-type --proxy
|
||||||
--follow --verify --cert --cert-key --timeout --check-status --ignore-stdin
|
--follow --verify --cert --cert-key --timeout --check-status --ignore-stdin
|
||||||
--help --version --traceback --debug"
|
--help --version --traceback --debug --raw"
|
||||||
COMPREPLY=( $( compgen -W "$options" -- "$cur_word" ) )
|
COMPREPLY=( $( compgen -W "$options" -- "$cur_word" ) )
|
||||||
}
|
}
|
||||||
|
@ -1,59 +1,137 @@
|
|||||||
function __fish_httpie_auth_types
|
|
||||||
echo "basic"\t"Basic HTTP auth"
|
|
||||||
echo "digest"\t"Digest HTTP auth"
|
|
||||||
end
|
|
||||||
|
|
||||||
function __fish_httpie_styles
|
function __fish_httpie_styles
|
||||||
echo "autumn"
|
echo "
|
||||||
echo "borland"
|
abap
|
||||||
echo "bw"
|
algol
|
||||||
echo "colorful"
|
algol_nu
|
||||||
echo "default"
|
arduino
|
||||||
echo "emacs"
|
auto
|
||||||
echo "friendly"
|
autumn
|
||||||
echo "fruity"
|
borland
|
||||||
echo "igor"
|
bw
|
||||||
echo "manni"
|
colorful
|
||||||
echo "monokai"
|
default
|
||||||
echo "murphy"
|
emacs
|
||||||
echo "native"
|
friendly
|
||||||
echo "paraiso-dark"
|
fruity
|
||||||
echo "paraiso-light"
|
gruvbox-dark
|
||||||
echo "pastie"
|
gruvbox-light
|
||||||
echo "perldoc"
|
igor
|
||||||
echo "rrt"
|
inkpot
|
||||||
echo "solarized"
|
lovelace
|
||||||
echo "tango"
|
manni
|
||||||
echo "trac"
|
material
|
||||||
echo "vim"
|
monokai
|
||||||
echo "vs"
|
murphy
|
||||||
echo "xcode"
|
native
|
||||||
|
paraiso-dark
|
||||||
|
paraiso-light
|
||||||
|
pastie
|
||||||
|
perldoc
|
||||||
|
rainbow_dash
|
||||||
|
rrt
|
||||||
|
sas
|
||||||
|
solarized
|
||||||
|
solarized-dark
|
||||||
|
solarized-light
|
||||||
|
stata
|
||||||
|
stata-dark
|
||||||
|
stata-light
|
||||||
|
tango
|
||||||
|
trac
|
||||||
|
vim
|
||||||
|
vs
|
||||||
|
xcode
|
||||||
|
zenburn"
|
||||||
end
|
end
|
||||||
|
|
||||||
complete -x -c http -s s -l style -d 'Output coloring style (default is "monokai")' -A -a '(__fish_httpie_styles)'
|
function __fish_httpie_auth_types
|
||||||
complete -c http -s f -l form -d 'Data items from the command line are serialized as form fields'
|
echo -e "basic\tBasic HTTP auth"
|
||||||
complete -c http -s j -l json -d '(default) Data items from the command line are serialized as a JSON object'
|
echo -e "digest\tDigest HTTP auth"
|
||||||
complete -x -c http -l pretty -d 'Controls output processing' -a "all colors format none" -A
|
end
|
||||||
complete -x -c http -s p -l print -d 'String specifying what the output should contain'
|
|
||||||
complete -c http -s v -l verbose -d 'Print the whole request as well as the response'
|
function __fish_http_verify_options
|
||||||
complete -c http -s h -l headers -d 'Print only the response headers'
|
echo -e "yes\tEnable cert verification"
|
||||||
complete -c http -s b -l body -d 'Print only the response body'
|
echo -e "no\tDisable cert verification"
|
||||||
complete -c http -s S -l stream -d 'Always stream the output by line'
|
end
|
||||||
complete -c http -s o -l output -d 'Save output to FILE'
|
|
||||||
complete -c http -s d -l download -d 'Do not print the response body to stdout'
|
# Predefined Content Types
|
||||||
complete -c http -s c -l continue -d 'Resume an interrupted download'
|
|
||||||
complete -x -c http -l session -d 'Create, or reuse and update a session'
|
complete -c http -s j -l json -d 'Data items are serialized as a JSON object'
|
||||||
complete -x -c http -s a -l auth -d 'If only the username is provided (-a username), HTTPie will prompt for the password'
|
complete -c http -s f -l form -d 'Data items are serialized as form fields'
|
||||||
complete -x -c http -l auth-type -d 'The authentication mechanism to be used' -a '(__fish_httpie_auth_types)' -A
|
complete -c http -l multipart -d 'Always sends a multipart/form-data request'
|
||||||
complete -x -c http -l proxy -d 'String mapping protocol to the URL of the proxy'
|
complete -c http -l boundary -x -d 'Custom boundary string for multipart/form-data requests'
|
||||||
complete -c http -l follow -d 'Allow full redirects'
|
complete -c http -l raw -x -d 'Pass raw request data without extra processing'
|
||||||
complete -x -c http -l verify -d 'SSL cert verification'
|
|
||||||
complete -c http -l cert -d 'SSL cert'
|
|
||||||
complete -c http -l cert-key -d 'Private SSL cert key'
|
# Content Processing Options
|
||||||
complete -x -c http -l timeout -d 'Connection timeout in seconds'
|
|
||||||
complete -c http -l check-status -d 'Error with non-200 HTTP status code'
|
complete -c http -s x -l compress -d 'Content compressed with Deflate algorithm'
|
||||||
complete -c http -l ignore-stdin -d 'Do not attempt to read stdin'
|
|
||||||
complete -c http -l help -d 'Show help'
|
|
||||||
complete -c http -l version -d 'Show version'
|
# Output Processing
|
||||||
complete -c http -l traceback -d 'Prints exception traceback should one occur'
|
|
||||||
complete -c http -l debug -d 'Show debugging information'
|
complete -c http -l pretty -xa "all colors format none" -d 'Controls output processing'
|
||||||
|
complete -c http -s s -l style -xa "(__fish_httpie_styles)" -d 'Output coloring style'
|
||||||
|
complete -c http -l unsorted -d 'Disables all sorting while formatting output'
|
||||||
|
complete -c http -l sorted -d 'Re-enables all sorting options while formatting output'
|
||||||
|
complete -c http -l format-options -x -d 'Controls output formatting'
|
||||||
|
|
||||||
|
|
||||||
|
# Output Options
|
||||||
|
|
||||||
|
complete -c http -s p -l print -x -d 'String specifying what the output should contain'
|
||||||
|
complete -c http -s h -l headers -d 'Print only the response headers'
|
||||||
|
complete -c http -s b -l body -d 'Print only the response body'
|
||||||
|
complete -c http -s v -l verbose -d 'Print the whole request as well as the response'
|
||||||
|
complete -c http -l all -d 'Show any intermediary requests/responses'
|
||||||
|
complete -c http -s P -l history-print -x -d 'The same as --print but applies only to intermediary requests/responses'
|
||||||
|
complete -c http -s S -l stream -d 'Always stream the response body by line'
|
||||||
|
complete -c http -s o -l output -F -d 'Save output to FILE'
|
||||||
|
complete -c http -s d -l download -d 'Download a file'
|
||||||
|
complete -c http -s c -l continue -d 'Resume an interrupted download'
|
||||||
|
complete -c http -s q -l quiet -d 'Do not print to stdout or stderr'
|
||||||
|
|
||||||
|
|
||||||
|
# Sessions
|
||||||
|
|
||||||
|
complete -c http -l session -F -d 'Create, or reuse and update a session'
|
||||||
|
complete -c http -l session-read-only -F -d 'Create or read a session without updating it'
|
||||||
|
|
||||||
|
|
||||||
|
# Authentication
|
||||||
|
|
||||||
|
complete -c http -s a -l auth -x -d 'Username and password for authentication'
|
||||||
|
complete -c http -s A -l auth-type -xa "(__fish_httpie_auth_types)" -d 'The authentication mechanism to be used'
|
||||||
|
complete -c http -l ignore-netrc -d 'Ignore credentials from .netrc'
|
||||||
|
|
||||||
|
|
||||||
|
# Network
|
||||||
|
|
||||||
|
complete -c http -l offline -d 'Build the request and print it but don\'t actually send it'
|
||||||
|
complete -c http -l proxy -x -d 'String mapping protocol to the URL of the proxy'
|
||||||
|
complete -c http -s F -l follow -d 'Follow 30x Location redirects'
|
||||||
|
complete -c http -l max-redirects -x -d 'Set maximum number of redirects'
|
||||||
|
complete -c http -l max-headers -x -d 'Maximum number of response headers to be read before giving up'
|
||||||
|
complete -c http -l timeout -x -d 'Connection timeout in seconds'
|
||||||
|
complete -c http -l check-status -d 'Error with non-200 HTTP status code'
|
||||||
|
complete -c http -l path-as-is -d 'Bypass dot segment URL squashing'
|
||||||
|
complete -c http -l chunked -d ''
|
||||||
|
|
||||||
|
|
||||||
|
# SSL
|
||||||
|
|
||||||
|
complete -c http -l verify -xa "(__fish_http_verify_options)" -d 'Enable/disable cert verification'
|
||||||
|
complete -c http -l ssl -x -d 'Desired protocol version to use'
|
||||||
|
complete -c http -l ciphers -x -d 'String in the OpenSSL cipher list format'
|
||||||
|
complete -c http -l cert -F -d 'Client side SSL certificate'
|
||||||
|
complete -c http -l cert-key -F -d 'Private key to use with SSL'
|
||||||
|
|
||||||
|
|
||||||
|
# Troubleshooting
|
||||||
|
|
||||||
|
complete -c http -s I -l ignore-stdin -d 'Do not attempt to read stdin'
|
||||||
|
complete -c http -l help -d 'Show help'
|
||||||
|
complete -c http -l version -d 'Show version'
|
||||||
|
complete -c http -l traceback -d 'Prints exception traceback should one occur'
|
||||||
|
complete -c http -l default-scheme -x -d 'The default scheme to use'
|
||||||
|
complete -c http -l debug -d 'Show debugging output'
|
||||||
|
@ -8,53 +8,61 @@ class Httpie < Formula
|
|||||||
include Language::Python::Virtualenv
|
include Language::Python::Virtualenv
|
||||||
|
|
||||||
desc "User-friendly cURL replacement (command-line HTTP client)"
|
desc "User-friendly cURL replacement (command-line HTTP client)"
|
||||||
homepage "https://httpie.org/"
|
homepage "https://httpie.io/"
|
||||||
url "https://files.pythonhosted.org/packages/09/8d/581ef7bd9a09dc30b621638a4fa805a2073bbfb45fa06ed37f998f172419/httpie-1.0.2.tar.gz"
|
url "https://files.pythonhosted.org/packages/17/3a/90fb6702e600f5ba7d38d147bbc0b0a1e47159e3e244737319c98c140420/httpie-2.4.0.tar.gz"
|
||||||
sha256 "fc676c85febdf3d80abc1ef6fa71ec3764d8b838806a7ae4e55e5e5aa014a2ab"
|
sha256 "4d1bf5779cf6c9007351cfcaa20bd19947267dc026af09246db6006a8927d8c6"
|
||||||
head "https://github.com/jakubroztocil/httpie.git"
|
license "BSD-3-Clause"
|
||||||
|
head "https://github.com/httpie/httpie.git"
|
||||||
|
|
||||||
bottle do
|
bottle do
|
||||||
cellar :any_skip_relocation
|
rebuild 1
|
||||||
sha256 "158258be68ac93de13860be2bef02da6fd8b68aa24b2e6609bcff1ec3f93e7a0" => :mojave
|
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a01ce8767f6ea88eb8e7894347ba64eb29294053a8ee91eed44dfaf0ab5e7ea2"
|
||||||
sha256 "54352116b6fa2c3bd65f26136fdcb57986dbff8a52de5febf7aea59c126d29e1" => :high_sierra
|
sha256 cellar: :any_skip_relocation, big_sur: "bdffeff349595ed3c528ed791d568e308b0877246b49e05e867143ba3415a70f"
|
||||||
sha256 "9cce71768fe388808e11b26d651b44a6b54219f5406845b4273b5099f5c1f76f" => :sierra
|
sha256 cellar: :any_skip_relocation, catalina: "ba0627d70f0ee49c64677f5554881ebd56371f47d45196b6564680089ce69152"
|
||||||
|
sha256 cellar: :any_skip_relocation, mojave: "0b87901e88bdcf53c55c5138677087b4621c5aaf1fca67b53b730d5a2fd5a40a"
|
||||||
|
sha256 cellar: :any_skip_relocation, high_sierra: "87e7348b6fb40fd8e4f7597937952469601962189e62d321b8cb4fa421e035ef"
|
||||||
end
|
end
|
||||||
|
|
||||||
depends_on "python"
|
depends_on "python@3.9"
|
||||||
|
|
||||||
resource "Pygments" do
|
resource "Pygments" do
|
||||||
url "https://files.pythonhosted.org/packages/64/69/413708eaf3a64a6abb8972644e0f20891a55e621c6759e2c3f3891e05d63/Pygments-2.3.1.tar.gz"
|
url "https://files.pythonhosted.org/packages/e1/86/8059180e8217299079d8719c6e23d674aadaba0b1939e25e0cc15dcf075b/Pygments-2.7.4.tar.gz"
|
||||||
sha256 "5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a"
|
sha256 "df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"
|
||||||
end
|
end
|
||||||
|
|
||||||
resource "requests" do
|
resource "requests" do
|
||||||
url "https://files.pythonhosted.org/packages/52/2c/514e4ac25da2b08ca5a464c50463682126385c4272c18193876e91f4bc38/requests-2.21.0.tar.gz"
|
url "https://files.pythonhosted.org/packages/6b/47/c14abc08432ab22dc18b9892252efaf005ab44066de871e72a38d6af464b/requests-2.25.1.tar.gz"
|
||||||
sha256 "502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e"
|
sha256 "27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"
|
||||||
|
end
|
||||||
|
|
||||||
|
resource "requests-toolbelt" do
|
||||||
|
url "https://files.pythonhosted.org/packages/28/30/7bf7e5071081f761766d46820e52f4b16c8a08fef02d2eb4682ca7534310/requests-toolbelt-0.9.1.tar.gz"
|
||||||
|
sha256 "968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
|
||||||
end
|
end
|
||||||
|
|
||||||
resource "certifi" do
|
resource "certifi" do
|
||||||
url "https://files.pythonhosted.org/packages/55/54/3ce77783acba5979ce16674fc98b1920d00b01d337cfaaf5db22543505ed/certifi-2018.11.29.tar.gz"
|
url "https://files.pythonhosted.org/packages/06/a9/cd1fd8ee13f73a4d4f491ee219deeeae20afefa914dfb4c130cfc9dc397a/certifi-2020.12.5.tar.gz"
|
||||||
sha256 "47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7"
|
sha256 "1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"
|
||||||
end
|
end
|
||||||
|
|
||||||
resource "urllib3" do
|
resource "urllib3" do
|
||||||
url "https://files.pythonhosted.org/packages/b1/53/37d82ab391393565f2f831b8eedbffd57db5a718216f82f1a8b4d381a1c1/urllib3-1.24.1.tar.gz"
|
url "https://files.pythonhosted.org/packages/d7/8d/7ee68c6b48e1ec8d41198f694ecdc15f7596356f2ff8e6b1420300cf5db3/urllib3-1.26.3.tar.gz"
|
||||||
sha256 "de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
|
sha256 "de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"
|
||||||
end
|
end
|
||||||
|
|
||||||
resource "idna" do
|
resource "idna" do
|
||||||
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
|
url "https://files.pythonhosted.org/packages/ea/b7/e0e3c1c467636186c39925827be42f16fee389dc404ac29e930e9136be70/idna-2.10.tar.gz"
|
||||||
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
|
sha256 "b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"
|
||||||
end
|
end
|
||||||
|
|
||||||
resource "chardet" do
|
resource "chardet" do
|
||||||
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
|
url "https://files.pythonhosted.org/packages/ee/2d/9cdc2b527e127b4c9db64b86647d567985940ac3698eeabc7ffaccb4ea61/chardet-4.0.0.tar.gz"
|
||||||
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
|
sha256 "0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"
|
||||||
end
|
end
|
||||||
|
|
||||||
resource "PySocks" do
|
resource "PySocks" do
|
||||||
url "https://files.pythonhosted.org/packages/53/12/6bf1d764f128636cef7408e8156b7235b150ea31650d0260969215bb8e7d/PySocks-1.6.8.tar.gz"
|
url "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz"
|
||||||
sha256 "3fe52c55890a248676fd69dc9e3c4e811718b777834bcaab7a8125cf9deac672"
|
sha256 "3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"
|
||||||
end
|
end
|
||||||
|
|
||||||
def install
|
def install
|
||||||
@ -62,7 +70,7 @@ class Httpie < Formula
|
|||||||
end
|
end
|
||||||
|
|
||||||
test do
|
test do
|
||||||
raw_url = "https://raw.githubusercontent.com/Homebrew/homebrew-core/master/Formula/httpie.rb"
|
raw_url = "https://raw.githubusercontent.com/Homebrew/homebrew-core/HEAD/Formula/httpie.rb"
|
||||||
assert_match "PYTHONPATH", shell_output("#{bin}/http --ignore-stdin #{raw_url}")
|
assert_match "PYTHONPATH", shell_output("#{bin}/http --ignore-stdin #{raw_url}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
BIN
httpie.png
BIN
httpie.png
Binary file not shown.
Before Width: | Height: | Size: 681 KiB |
@ -1,32 +1,8 @@
|
|||||||
"""
|
"""
|
||||||
HTTPie - a CLI, cURL-like tool for humans.
|
HTTPie: command-line HTTP client for the API era.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
__version__ = '1.0.3'
|
|
||||||
|
__version__ = '2.5.0'
|
||||||
__author__ = 'Jakub Roztocil'
|
__author__ = 'Jakub Roztocil'
|
||||||
__licence__ = 'BSD'
|
__licence__ = 'BSD'
|
||||||
|
|
||||||
|
|
||||||
class ExitStatus:
|
|
||||||
"""Program exit code constants."""
|
|
||||||
SUCCESS = 0
|
|
||||||
ERROR = 1
|
|
||||||
PLUGIN_ERROR = 7
|
|
||||||
|
|
||||||
# 128+2 SIGINT <http://www.tldp.org/LDP/abs/html/exitcodes.html>
|
|
||||||
ERROR_CTRL_C = 130
|
|
||||||
|
|
||||||
ERROR_TIMEOUT = 2
|
|
||||||
ERROR_TOO_MANY_REDIRECTS = 6
|
|
||||||
|
|
||||||
# Used only when requested with --check-status:
|
|
||||||
ERROR_HTTP_3XX = 3
|
|
||||||
ERROR_HTTP_4XX = 4
|
|
||||||
ERROR_HTTP_5XX = 5
|
|
||||||
|
|
||||||
|
|
||||||
EXIT_STATUS_LABELS = {
|
|
||||||
value: key
|
|
||||||
for key, value in ExitStatus.__dict__.items()
|
|
||||||
if key.isupper()
|
|
||||||
}
|
|
||||||
|
@ -1,18 +1,19 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""The main entry point. Invoke as `http' or `python -m httpie'.
|
"""The main entry point. Invoke as `http' or `python -m httpie'.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
try:
|
try:
|
||||||
from .core import main
|
from httpie.core import main
|
||||||
sys.exit(main())
|
exit_status = main()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
from . import ExitStatus
|
from httpie.status import ExitStatus
|
||||||
sys.exit(ExitStatus.ERROR_CTRL_C)
|
exit_status = ExitStatus.ERROR_CTRL_C
|
||||||
|
|
||||||
|
return exit_status.value
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__': # pragma: nocover
|
||||||
main()
|
import sys
|
||||||
|
sys.exit(main())
|
||||||
|
0
httpie/cli/__init__.py
Normal file
0
httpie/cli/__init__.py
Normal file
463
httpie/cli/argparser.py
Normal file
463
httpie/cli/argparser.py
Normal file
@ -0,0 +1,463 @@
|
|||||||
|
import argparse
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from argparse import RawDescriptionHelpFormatter
|
||||||
|
from textwrap import dedent
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from requests.utils import get_netrc_auth
|
||||||
|
|
||||||
|
from .argtypes import (
|
||||||
|
AuthCredentials, KeyValueArgType, PARSED_DEFAULT_FORMAT_OPTIONS,
|
||||||
|
parse_auth,
|
||||||
|
parse_format_options,
|
||||||
|
)
|
||||||
|
from .constants import (
|
||||||
|
HTTP_GET, HTTP_POST, OUTPUT_OPTIONS, OUTPUT_OPTIONS_DEFAULT,
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_OFFLINE, OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED,
|
||||||
|
OUT_RESP_BODY, PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY, RequestType,
|
||||||
|
SEPARATOR_CREDENTIALS,
|
||||||
|
SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_GROUP_DATA_ITEMS, URL_SCHEME_RE,
|
||||||
|
)
|
||||||
|
from .exceptions import ParseError
|
||||||
|
from .requestitems import RequestItems
|
||||||
|
from ..context import Environment
|
||||||
|
from ..plugins.registry import plugin_manager
|
||||||
|
from ..utils import ExplicitNullAuth, get_content_type
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
||||||
|
"""A nicer help formatter.
|
||||||
|
|
||||||
|
Help for arguments can be indented and contain new lines.
|
||||||
|
It will be de-dented and arguments in the help
|
||||||
|
will be separated by a blank line for better readability.
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, max_help_position=6, *args, **kwargs):
|
||||||
|
# A smaller indent for args help.
|
||||||
|
kwargs['max_help_position'] = max_help_position
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def _split_lines(self, text, width):
|
||||||
|
text = dedent(text).strip() + '\n\n'
|
||||||
|
return text.splitlines()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: refactor and design type-annotated data structures
|
||||||
|
# for raw args + parsed args and keep things immutable.
|
||||||
|
class HTTPieArgumentParser(argparse.ArgumentParser):
|
||||||
|
"""Adds additional logic to `argparse.ArgumentParser`.
|
||||||
|
|
||||||
|
Handles all input (CLI args, file args, stdin), applies defaults,
|
||||||
|
and performs extra validation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *args, formatter_class=HTTPieHelpFormatter, **kwargs):
|
||||||
|
kwargs['add_help'] = False
|
||||||
|
super().__init__(*args, formatter_class=formatter_class, **kwargs)
|
||||||
|
self.env = None
|
||||||
|
self.args = None
|
||||||
|
self.has_stdin_data = False
|
||||||
|
self.has_input_data = False
|
||||||
|
|
||||||
|
# noinspection PyMethodOverriding
|
||||||
|
def parse_args(
|
||||||
|
self,
|
||||||
|
env: Environment,
|
||||||
|
args=None,
|
||||||
|
namespace=None
|
||||||
|
) -> argparse.Namespace:
|
||||||
|
self.env = env
|
||||||
|
self.args, no_options = super().parse_known_args(args, namespace)
|
||||||
|
if self.args.debug:
|
||||||
|
self.args.traceback = True
|
||||||
|
self.has_stdin_data = (
|
||||||
|
self.env.stdin
|
||||||
|
and not self.args.ignore_stdin
|
||||||
|
and not self.env.stdin_isatty
|
||||||
|
)
|
||||||
|
self.has_input_data = self.has_stdin_data or self.args.raw is not None
|
||||||
|
# Arguments processing and environment setup.
|
||||||
|
self._apply_no_options(no_options)
|
||||||
|
self._process_request_type()
|
||||||
|
self._process_download_options()
|
||||||
|
self._setup_standard_streams()
|
||||||
|
self._process_output_options()
|
||||||
|
self._process_pretty_options()
|
||||||
|
self._process_format_options()
|
||||||
|
self._guess_method()
|
||||||
|
self._parse_items()
|
||||||
|
self._process_url()
|
||||||
|
self._process_auth()
|
||||||
|
|
||||||
|
if self.args.raw is not None:
|
||||||
|
self._body_from_input(self.args.raw)
|
||||||
|
elif self.has_stdin_data:
|
||||||
|
self._body_from_file(self.env.stdin)
|
||||||
|
|
||||||
|
if self.args.compress:
|
||||||
|
# TODO: allow --compress with --chunked / --multipart
|
||||||
|
if self.args.chunked:
|
||||||
|
self.error('cannot combine --compress and --chunked')
|
||||||
|
if self.args.multipart:
|
||||||
|
self.error('cannot combine --compress and --multipart')
|
||||||
|
|
||||||
|
return self.args
|
||||||
|
|
||||||
|
def _process_request_type(self):
|
||||||
|
request_type = self.args.request_type
|
||||||
|
self.args.json = request_type is RequestType.JSON
|
||||||
|
self.args.multipart = request_type is RequestType.MULTIPART
|
||||||
|
self.args.form = request_type in {
|
||||||
|
RequestType.FORM,
|
||||||
|
RequestType.MULTIPART,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _process_url(self):
|
||||||
|
if not URL_SCHEME_RE.match(self.args.url):
|
||||||
|
if os.path.basename(self.env.program_name) == 'https':
|
||||||
|
scheme = 'https://'
|
||||||
|
else:
|
||||||
|
scheme = self.args.default_scheme + '://'
|
||||||
|
|
||||||
|
# See if we're using curl style shorthand for localhost (:3000/foo)
|
||||||
|
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
||||||
|
if shorthand:
|
||||||
|
port = shorthand.group(1)
|
||||||
|
rest = shorthand.group(2)
|
||||||
|
self.args.url = scheme + 'localhost'
|
||||||
|
if port:
|
||||||
|
self.args.url += ':' + port
|
||||||
|
self.args.url += rest
|
||||||
|
else:
|
||||||
|
self.args.url = scheme + self.args.url
|
||||||
|
|
||||||
|
# noinspection PyShadowingBuiltins
|
||||||
|
def _print_message(self, message, file=None):
|
||||||
|
# Sneak in our stderr/stdout.
|
||||||
|
file = {
|
||||||
|
sys.stdout: self.env.stdout,
|
||||||
|
sys.stderr: self.env.stderr,
|
||||||
|
None: self.env.stderr
|
||||||
|
}.get(file, file)
|
||||||
|
if not hasattr(file, 'buffer') and isinstance(message, str):
|
||||||
|
message = message.encode(self.env.stdout_encoding)
|
||||||
|
super()._print_message(message, file)
|
||||||
|
|
||||||
|
def _setup_standard_streams(self):
|
||||||
|
"""
|
||||||
|
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.args.output_file_specified = bool(self.args.output_file)
|
||||||
|
if self.args.download:
|
||||||
|
# FIXME: Come up with a cleaner solution.
|
||||||
|
if not self.args.output_file and not self.env.stdout_isatty:
|
||||||
|
# Use stdout as the download output file.
|
||||||
|
self.args.output_file = self.env.stdout
|
||||||
|
# With `--download`, we write everything that would normally go to
|
||||||
|
# `stdout` to `stderr` instead. Let's replace the stream so that
|
||||||
|
# we don't have to use many `if`s throughout the codebase.
|
||||||
|
# The response body will be treated separately.
|
||||||
|
self.env.stdout = self.env.stderr
|
||||||
|
self.env.stdout_isatty = self.env.stderr_isatty
|
||||||
|
|
||||||
|
elif self.args.output_file:
|
||||||
|
# When not `--download`ing, then `--output` simply replaces
|
||||||
|
# `stdout`. The file is opened for appending, which isn't what
|
||||||
|
# we want in this case.
|
||||||
|
self.args.output_file.seek(0)
|
||||||
|
try:
|
||||||
|
self.args.output_file.truncate()
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EINVAL:
|
||||||
|
# E.g. /dev/null on Linux.
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
self.env.stdout = self.args.output_file
|
||||||
|
self.env.stdout_isatty = False
|
||||||
|
|
||||||
|
if self.args.quiet:
|
||||||
|
self.env.stderr = self.env.devnull
|
||||||
|
if not (self.args.output_file_specified and not self.args.download):
|
||||||
|
self.env.stdout = self.env.devnull
|
||||||
|
|
||||||
|
def _process_auth(self):
|
||||||
|
# TODO: refactor & simplify this method.
|
||||||
|
self.args.auth_plugin = None
|
||||||
|
default_auth_plugin = plugin_manager.get_auth_plugins()[0]
|
||||||
|
auth_type_set = self.args.auth_type is not None
|
||||||
|
url = urlsplit(self.args.url)
|
||||||
|
|
||||||
|
if self.args.auth is None and not auth_type_set:
|
||||||
|
if url.username is not None:
|
||||||
|
# Handle http://username:password@hostname/
|
||||||
|
username = url.username
|
||||||
|
password = url.password or ''
|
||||||
|
self.args.auth = AuthCredentials(
|
||||||
|
key=username,
|
||||||
|
value=password,
|
||||||
|
sep=SEPARATOR_CREDENTIALS,
|
||||||
|
orig=SEPARATOR_CREDENTIALS.join([username, password])
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.args.auth is not None or auth_type_set:
|
||||||
|
if not self.args.auth_type:
|
||||||
|
self.args.auth_type = default_auth_plugin.auth_type
|
||||||
|
plugin = plugin_manager.get_auth_plugin(self.args.auth_type)()
|
||||||
|
|
||||||
|
if (not self.args.ignore_netrc
|
||||||
|
and self.args.auth is None
|
||||||
|
and plugin.netrc_parse):
|
||||||
|
# Only host needed, so it’s OK URL not finalized.
|
||||||
|
netrc_credentials = get_netrc_auth(self.args.url)
|
||||||
|
if netrc_credentials:
|
||||||
|
self.args.auth = AuthCredentials(
|
||||||
|
key=netrc_credentials[0],
|
||||||
|
value=netrc_credentials[1],
|
||||||
|
sep=SEPARATOR_CREDENTIALS,
|
||||||
|
orig=SEPARATOR_CREDENTIALS.join(netrc_credentials)
|
||||||
|
)
|
||||||
|
|
||||||
|
if plugin.auth_require and self.args.auth is None:
|
||||||
|
self.error('--auth required')
|
||||||
|
|
||||||
|
plugin.raw_auth = self.args.auth
|
||||||
|
self.args.auth_plugin = plugin
|
||||||
|
already_parsed = isinstance(self.args.auth, AuthCredentials)
|
||||||
|
|
||||||
|
if self.args.auth is None or not plugin.auth_parse:
|
||||||
|
self.args.auth = plugin.get_auth()
|
||||||
|
else:
|
||||||
|
if already_parsed:
|
||||||
|
# from the URL
|
||||||
|
credentials = self.args.auth
|
||||||
|
else:
|
||||||
|
credentials = parse_auth(self.args.auth)
|
||||||
|
|
||||||
|
if (not credentials.has_password()
|
||||||
|
and plugin.prompt_password):
|
||||||
|
if self.args.ignore_stdin:
|
||||||
|
# Non-tty stdin read by now
|
||||||
|
self.error(
|
||||||
|
'Unable to prompt for passwords because'
|
||||||
|
' --ignore-stdin is set.'
|
||||||
|
)
|
||||||
|
credentials.prompt_password(url.netloc)
|
||||||
|
self.args.auth = plugin.get_auth(
|
||||||
|
username=credentials.key,
|
||||||
|
password=credentials.value,
|
||||||
|
)
|
||||||
|
if not self.args.auth and self.args.ignore_netrc:
|
||||||
|
# Set a no-op auth to force requests to ignore .netrc
|
||||||
|
# <https://github.com/psf/requests/issues/2773#issuecomment-174312831>
|
||||||
|
self.args.auth = ExplicitNullAuth()
|
||||||
|
|
||||||
|
def _apply_no_options(self, no_options):
|
||||||
|
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
|
||||||
|
its default value. This allows for un-setting of options, e.g.,
|
||||||
|
specified in config.
|
||||||
|
|
||||||
|
"""
|
||||||
|
invalid = []
|
||||||
|
|
||||||
|
for option in no_options:
|
||||||
|
if not option.startswith('--no-'):
|
||||||
|
invalid.append(option)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --no-option => --option
|
||||||
|
inverted = '--' + option[5:]
|
||||||
|
for action in self._actions:
|
||||||
|
if inverted in action.option_strings:
|
||||||
|
setattr(self.args, action.dest, action.default)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
invalid.append(option)
|
||||||
|
|
||||||
|
if invalid:
|
||||||
|
self.error(f'unrecognized arguments: {" ".join(invalid)}')
|
||||||
|
|
||||||
|
def _body_from_file(self, fd):
|
||||||
|
"""Read the data from a file-like object.
|
||||||
|
|
||||||
|
Bytes are always read.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._ensure_one_data_source(self.args.data, self.args.files)
|
||||||
|
self.args.data = getattr(fd, 'buffer', fd)
|
||||||
|
|
||||||
|
def _body_from_input(self, data):
|
||||||
|
"""Read the data from the CLI.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._ensure_one_data_source(self.has_stdin_data, self.args.data,
|
||||||
|
self.args.files)
|
||||||
|
self.args.data = data.encode()
|
||||||
|
|
||||||
|
def _ensure_one_data_source(self, *other_sources):
|
||||||
|
"""There can only be one source of input request data.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if any(other_sources):
|
||||||
|
self.error('Request body (from stdin, --raw or a file) and request '
|
||||||
|
'data (key=value) cannot be mixed. Pass '
|
||||||
|
'--ignore-stdin to let key/value take priority. '
|
||||||
|
'See https://httpie.org/doc#scripting for details.')
|
||||||
|
|
||||||
|
def _guess_method(self):
|
||||||
|
"""Set `args.method` if not specified to either POST or GET
|
||||||
|
based on whether the request has data or not.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self.args.method is None:
|
||||||
|
# Invoked as `http URL'.
|
||||||
|
assert not self.args.request_items
|
||||||
|
if self.has_input_data:
|
||||||
|
self.args.method = HTTP_POST
|
||||||
|
else:
|
||||||
|
self.args.method = HTTP_GET
|
||||||
|
|
||||||
|
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
|
||||||
|
elif not re.match('^[a-zA-Z]+$', self.args.method):
|
||||||
|
# Invoked as `http URL item+'. The URL is now in `args.method`
|
||||||
|
# and the first ITEM is now incorrectly in `args.url`.
|
||||||
|
try:
|
||||||
|
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
||||||
|
self.args.request_items.insert(0, KeyValueArgType(
|
||||||
|
*SEPARATOR_GROUP_ALL_ITEMS).__call__(self.args.url))
|
||||||
|
|
||||||
|
except argparse.ArgumentTypeError as e:
|
||||||
|
if self.args.traceback:
|
||||||
|
raise
|
||||||
|
self.error(e.args[0])
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Set the URL correctly
|
||||||
|
self.args.url = self.args.method
|
||||||
|
# Infer the method
|
||||||
|
has_data = (
|
||||||
|
self.has_input_data
|
||||||
|
or any(
|
||||||
|
item.sep in SEPARATOR_GROUP_DATA_ITEMS
|
||||||
|
for item in self.args.request_items)
|
||||||
|
)
|
||||||
|
self.args.method = HTTP_POST if has_data else HTTP_GET
|
||||||
|
|
||||||
|
def _parse_items(self):
|
||||||
|
"""
|
||||||
|
Parse `args.request_items` into `args.headers`, `args.data`,
|
||||||
|
`args.params`, and `args.files`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
request_items = RequestItems.from_args(
|
||||||
|
request_item_args=self.args.request_items,
|
||||||
|
as_form=self.args.form,
|
||||||
|
)
|
||||||
|
except ParseError as e:
|
||||||
|
if self.args.traceback:
|
||||||
|
raise
|
||||||
|
self.error(e.args[0])
|
||||||
|
else:
|
||||||
|
self.args.headers = request_items.headers
|
||||||
|
self.args.data = request_items.data
|
||||||
|
self.args.files = request_items.files
|
||||||
|
self.args.params = request_items.params
|
||||||
|
self.args.multipart_data = request_items.multipart_data
|
||||||
|
|
||||||
|
if self.args.files and not self.args.form:
|
||||||
|
# `http url @/path/to/file`
|
||||||
|
request_file = None
|
||||||
|
for key, file in self.args.files.items():
|
||||||
|
if key != '':
|
||||||
|
self.error(
|
||||||
|
'Invalid file fields (perhaps you meant --form?):'
|
||||||
|
f' {",".join(self.args.files.keys())}')
|
||||||
|
if request_file is not None:
|
||||||
|
self.error("Can't read request from multiple files")
|
||||||
|
request_file = file
|
||||||
|
|
||||||
|
fn, fd, ct = request_file
|
||||||
|
self.args.files = {}
|
||||||
|
|
||||||
|
self._body_from_file(fd)
|
||||||
|
|
||||||
|
if 'Content-Type' not in self.args.headers:
|
||||||
|
content_type = get_content_type(fn)
|
||||||
|
if content_type:
|
||||||
|
self.args.headers['Content-Type'] = content_type
|
||||||
|
|
||||||
|
def _process_output_options(self):
|
||||||
|
"""Apply defaults to output options, or validate the provided ones.
|
||||||
|
|
||||||
|
The default output options are stdout-type-sensitive.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def check_options(value, option):
|
||||||
|
unknown = set(value) - OUTPUT_OPTIONS
|
||||||
|
if unknown:
|
||||||
|
self.error(f'Unknown output options: {option}={",".join(unknown)}')
|
||||||
|
|
||||||
|
if self.args.verbose:
|
||||||
|
self.args.all = True
|
||||||
|
|
||||||
|
if self.args.output_options is None:
|
||||||
|
if self.args.verbose:
|
||||||
|
self.args.output_options = ''.join(OUTPUT_OPTIONS)
|
||||||
|
elif self.args.offline:
|
||||||
|
self.args.output_options = OUTPUT_OPTIONS_DEFAULT_OFFLINE
|
||||||
|
elif not self.env.stdout_isatty:
|
||||||
|
self.args.output_options = OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED
|
||||||
|
else:
|
||||||
|
self.args.output_options = OUTPUT_OPTIONS_DEFAULT
|
||||||
|
|
||||||
|
if self.args.output_options_history is None:
|
||||||
|
self.args.output_options_history = self.args.output_options
|
||||||
|
|
||||||
|
check_options(self.args.output_options, '--print')
|
||||||
|
check_options(self.args.output_options_history, '--history-print')
|
||||||
|
|
||||||
|
if self.args.download and OUT_RESP_BODY in self.args.output_options:
|
||||||
|
# Response body is always downloaded with --download and it goes
|
||||||
|
# through a different routine, so we remove it.
|
||||||
|
self.args.output_options = str(
|
||||||
|
set(self.args.output_options) - set(OUT_RESP_BODY))
|
||||||
|
|
||||||
|
def _process_pretty_options(self):
|
||||||
|
if self.args.prettify == PRETTY_STDOUT_TTY_ONLY:
|
||||||
|
self.args.prettify = PRETTY_MAP[
|
||||||
|
'all' if self.env.stdout_isatty else 'none']
|
||||||
|
elif (self.args.prettify and self.env.is_windows
|
||||||
|
and self.args.output_file):
|
||||||
|
self.error('Only terminal output can be colorized on Windows.')
|
||||||
|
else:
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
self.args.prettify = PRETTY_MAP[self.args.prettify]
|
||||||
|
|
||||||
|
def _process_download_options(self):
|
||||||
|
if self.args.offline:
|
||||||
|
self.args.download = False
|
||||||
|
self.args.download_resume = False
|
||||||
|
return
|
||||||
|
if not self.args.download:
|
||||||
|
if self.args.download_resume:
|
||||||
|
self.error('--continue only works with --download')
|
||||||
|
if self.args.download_resume and not (
|
||||||
|
self.args.download and self.args.output_file):
|
||||||
|
self.error('--continue requires --output to be specified')
|
||||||
|
|
||||||
|
def _process_format_options(self):
|
||||||
|
parsed_options = PARSED_DEFAULT_FORMAT_OPTIONS
|
||||||
|
for options_group in self.args.format_options or []:
|
||||||
|
parsed_options = parse_format_options(options_group, defaults=parsed_options)
|
||||||
|
self.args.format_options = parsed_options
|
244
httpie/cli/argtypes.py
Normal file
244
httpie/cli/argtypes.py
Normal file
@ -0,0 +1,244 @@
|
|||||||
|
import argparse
|
||||||
|
import getpass
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from copy import deepcopy
|
||||||
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
|
from .constants import DEFAULT_FORMAT_OPTIONS, SEPARATOR_CREDENTIALS
|
||||||
|
from ..sessions import VALID_SESSION_NAME_PATTERN
|
||||||
|
|
||||||
|
|
||||||
|
class KeyValueArg:
|
||||||
|
"""Base key-value pair parsed from CLI."""
|
||||||
|
|
||||||
|
def __init__(self, key: str, value: Optional[str], sep: str, orig: str):
|
||||||
|
self.key = key
|
||||||
|
self.value = value
|
||||||
|
self.sep = sep
|
||||||
|
self.orig = orig
|
||||||
|
|
||||||
|
def __eq__(self, other: 'KeyValueArg'):
|
||||||
|
return self.__dict__ == other.__dict__
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self.__dict__)
|
||||||
|
|
||||||
|
|
||||||
|
class SessionNameValidator:
|
||||||
|
|
||||||
|
def __init__(self, error_message: str):
|
||||||
|
self.error_message = error_message
|
||||||
|
|
||||||
|
def __call__(self, value: str) -> str:
|
||||||
|
# Session name can be a path or just a name.
|
||||||
|
if (os.path.sep not in value
|
||||||
|
and not VALID_SESSION_NAME_PATTERN.search(value)):
|
||||||
|
raise argparse.ArgumentError(None, self.error_message)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class Escaped(str):
|
||||||
|
"""Represents an escaped character."""
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"Escaped({repr(str(self))})"
|
||||||
|
|
||||||
|
|
||||||
|
class KeyValueArgType:
|
||||||
|
"""A key-value pair argument type used with `argparse`.
|
||||||
|
|
||||||
|
Parses a key-value arg and constructs a `KeyValueArg` instance.
|
||||||
|
Used for headers, form data, and other key-value pair types.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
key_value_class = KeyValueArg
|
||||||
|
|
||||||
|
def __init__(self, *separators: str):
|
||||||
|
self.separators = separators
|
||||||
|
self.special_characters = set('\\')
|
||||||
|
for separator in separators:
|
||||||
|
self.special_characters.update(separator)
|
||||||
|
|
||||||
|
def __call__(self, s: str) -> KeyValueArg:
|
||||||
|
"""Parse raw string arg and return `self.key_value_class` instance.
|
||||||
|
|
||||||
|
The best of `self.separators` is determined (first found, longest).
|
||||||
|
Back slash escaped characters aren't considered as separators
|
||||||
|
(or parts thereof). Literal back slash characters have to be escaped
|
||||||
|
as well (r'\\').
|
||||||
|
|
||||||
|
"""
|
||||||
|
tokens = self.tokenize(s)
|
||||||
|
|
||||||
|
# Sorting by length ensures that the longest one will be
|
||||||
|
# chosen as it will overwrite any shorter ones starting
|
||||||
|
# at the same position in the `found` dictionary.
|
||||||
|
separators = sorted(self.separators, key=len)
|
||||||
|
|
||||||
|
for i, token in enumerate(tokens):
|
||||||
|
|
||||||
|
if isinstance(token, Escaped):
|
||||||
|
continue
|
||||||
|
|
||||||
|
found = {}
|
||||||
|
for sep in separators:
|
||||||
|
pos = token.find(sep)
|
||||||
|
if pos != -1:
|
||||||
|
found[pos] = sep
|
||||||
|
|
||||||
|
if found:
|
||||||
|
# Starting first, longest separator found.
|
||||||
|
sep = found[min(found.keys())]
|
||||||
|
|
||||||
|
key, value = token.split(sep, 1)
|
||||||
|
|
||||||
|
# Any preceding tokens are part of the key.
|
||||||
|
key = ''.join(tokens[:i]) + key
|
||||||
|
|
||||||
|
# Any following tokens are part of the value.
|
||||||
|
value += ''.join(tokens[i + 1:])
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise argparse.ArgumentTypeError(f'{s!r} is not a valid value')
|
||||||
|
|
||||||
|
return self.key_value_class(key=key, value=value, sep=sep, orig=s)
|
||||||
|
|
||||||
|
def tokenize(self, s: str) -> List[Union[str, Escaped]]:
|
||||||
|
r"""Tokenize the raw arg string
|
||||||
|
|
||||||
|
There are only two token types - strings and escaped characters:
|
||||||
|
|
||||||
|
>>> KeyValueArgType('=').tokenize(r'foo\=bar\\baz')
|
||||||
|
['foo', Escaped('='), 'bar', Escaped('\\'), 'baz']
|
||||||
|
|
||||||
|
"""
|
||||||
|
tokens = ['']
|
||||||
|
characters = iter(s)
|
||||||
|
for char in characters:
|
||||||
|
if char == '\\':
|
||||||
|
char = next(characters, '')
|
||||||
|
if char not in self.special_characters:
|
||||||
|
tokens[-1] += '\\' + char
|
||||||
|
else:
|
||||||
|
tokens.extend([Escaped(char), ''])
|
||||||
|
else:
|
||||||
|
tokens[-1] += char
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
class AuthCredentials(KeyValueArg):
|
||||||
|
"""Represents parsed credentials."""
|
||||||
|
|
||||||
|
def has_password(self) -> bool:
|
||||||
|
return self.value is not None
|
||||||
|
|
||||||
|
def prompt_password(self, host: str):
|
||||||
|
prompt_text = f'http: password for {self.key}@{host}: '
|
||||||
|
try:
|
||||||
|
self.value = self._getpass(prompt_text)
|
||||||
|
except (EOFError, KeyboardInterrupt):
|
||||||
|
sys.stderr.write('\n')
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _getpass(prompt):
|
||||||
|
# To allow easy mocking.
|
||||||
|
return getpass.getpass(str(prompt))
|
||||||
|
|
||||||
|
|
||||||
|
class AuthCredentialsArgType(KeyValueArgType):
|
||||||
|
"""A key-value arg type that parses credentials."""
|
||||||
|
|
||||||
|
key_value_class = AuthCredentials
|
||||||
|
|
||||||
|
def __call__(self, s):
|
||||||
|
"""Parse credentials from `s`.
|
||||||
|
|
||||||
|
("username" or "username:password").
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return super().__call__(s)
|
||||||
|
except argparse.ArgumentTypeError:
|
||||||
|
# No password provided, will prompt for it later.
|
||||||
|
return self.key_value_class(
|
||||||
|
key=s,
|
||||||
|
value=None,
|
||||||
|
sep=SEPARATOR_CREDENTIALS,
|
||||||
|
orig=s
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
parse_auth = AuthCredentialsArgType(SEPARATOR_CREDENTIALS)
|
||||||
|
|
||||||
|
|
||||||
|
def readable_file_arg(filename):
|
||||||
|
try:
|
||||||
|
with open(filename, 'rb'):
|
||||||
|
return filename
|
||||||
|
except OSError as ex:
|
||||||
|
raise argparse.ArgumentTypeError(f'{ex.filename}: {ex.strerror}')
|
||||||
|
|
||||||
|
|
||||||
|
def parse_format_options(s: str, defaults: Optional[dict]) -> dict:
|
||||||
|
"""
|
||||||
|
Parse `s` and update `defaults` with the parsed values.
|
||||||
|
|
||||||
|
>>> parse_format_options(
|
||||||
|
... defaults={'json': {'indent': 4, 'sort_keys': True}},
|
||||||
|
... s='json.indent:2,json.sort_keys:False',
|
||||||
|
... )
|
||||||
|
{'json': {'indent': 2, 'sort_keys': False}}
|
||||||
|
|
||||||
|
"""
|
||||||
|
value_map = {
|
||||||
|
'true': True,
|
||||||
|
'false': False,
|
||||||
|
}
|
||||||
|
options = deepcopy(defaults or {})
|
||||||
|
for option in s.split(','):
|
||||||
|
try:
|
||||||
|
path, value = option.lower().split(':')
|
||||||
|
section, key = path.split('.')
|
||||||
|
except ValueError:
|
||||||
|
raise argparse.ArgumentTypeError(f'invalid option {option!r}')
|
||||||
|
|
||||||
|
if value in value_map:
|
||||||
|
parsed_value = value_map[value]
|
||||||
|
else:
|
||||||
|
if value.isnumeric():
|
||||||
|
parsed_value = int(value)
|
||||||
|
else:
|
||||||
|
parsed_value = value
|
||||||
|
|
||||||
|
if defaults is None:
|
||||||
|
options.setdefault(section, {})
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
default_value = defaults[section][key]
|
||||||
|
except KeyError:
|
||||||
|
raise argparse.ArgumentTypeError(
|
||||||
|
f'invalid key {path!r}')
|
||||||
|
|
||||||
|
default_type, parsed_type = type(default_value), type(parsed_value)
|
||||||
|
if parsed_type is not default_type:
|
||||||
|
raise argparse.ArgumentTypeError(
|
||||||
|
'invalid value'
|
||||||
|
f' {value!r} in {option!r}'
|
||||||
|
f' (expected {default_type.__name__}'
|
||||||
|
f' got {parsed_type.__name__})'
|
||||||
|
)
|
||||||
|
|
||||||
|
options[section][key] = parsed_value
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
PARSED_DEFAULT_FORMAT_OPTIONS = parse_format_options(
|
||||||
|
s=','.join(DEFAULT_FORMAT_OPTIONS),
|
||||||
|
defaults=None,
|
||||||
|
)
|
113
httpie/cli/constants.py
Normal file
113
httpie/cli/constants.py
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
|
||||||
|
|
||||||
|
"""
|
||||||
|
import enum
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
|
||||||
|
|
||||||
|
HTTP_POST = 'POST'
|
||||||
|
HTTP_GET = 'GET'
|
||||||
|
|
||||||
|
# Various separators used in args
|
||||||
|
SEPARATOR_HEADER = ':'
|
||||||
|
SEPARATOR_HEADER_EMPTY = ';'
|
||||||
|
SEPARATOR_CREDENTIALS = ':'
|
||||||
|
SEPARATOR_PROXY = ':'
|
||||||
|
SEPARATOR_DATA_STRING = '='
|
||||||
|
SEPARATOR_DATA_RAW_JSON = ':='
|
||||||
|
SEPARATOR_FILE_UPLOAD = '@'
|
||||||
|
SEPARATOR_FILE_UPLOAD_TYPE = ';type=' # in already parsed file upload path only
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS = '=@'
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE = ':=@'
|
||||||
|
SEPARATOR_QUERY_PARAM = '=='
|
||||||
|
|
||||||
|
# Separators that become request data
|
||||||
|
SEPARATOR_GROUP_DATA_ITEMS = frozenset({
|
||||||
|
SEPARATOR_DATA_STRING,
|
||||||
|
SEPARATOR_DATA_RAW_JSON,
|
||||||
|
SEPARATOR_FILE_UPLOAD,
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE
|
||||||
|
})
|
||||||
|
|
||||||
|
SEPARATORS_GROUP_MULTIPART = frozenset({
|
||||||
|
SEPARATOR_DATA_STRING,
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_FILE_UPLOAD,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Separators for items whose value is a filename to be embedded
|
||||||
|
SEPARATOR_GROUP_DATA_EMBED_ITEMS = frozenset({
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Separators for raw JSON items
|
||||||
|
SEPARATOR_GROUP_RAW_JSON_ITEMS = frozenset([
|
||||||
|
SEPARATOR_DATA_RAW_JSON,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
])
|
||||||
|
|
||||||
|
# Separators allowed in ITEM arguments
|
||||||
|
SEPARATOR_GROUP_ALL_ITEMS = frozenset({
|
||||||
|
SEPARATOR_HEADER,
|
||||||
|
SEPARATOR_HEADER_EMPTY,
|
||||||
|
SEPARATOR_QUERY_PARAM,
|
||||||
|
SEPARATOR_DATA_STRING,
|
||||||
|
SEPARATOR_DATA_RAW_JSON,
|
||||||
|
SEPARATOR_FILE_UPLOAD,
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Output options
|
||||||
|
OUT_REQ_HEAD = 'H'
|
||||||
|
OUT_REQ_BODY = 'B'
|
||||||
|
OUT_RESP_HEAD = 'h'
|
||||||
|
OUT_RESP_BODY = 'b'
|
||||||
|
|
||||||
|
OUTPUT_OPTIONS = frozenset({
|
||||||
|
OUT_REQ_HEAD,
|
||||||
|
OUT_REQ_BODY,
|
||||||
|
OUT_RESP_HEAD,
|
||||||
|
OUT_RESP_BODY
|
||||||
|
})
|
||||||
|
|
||||||
|
# Pretty
|
||||||
|
PRETTY_MAP = {
|
||||||
|
'all': ['format', 'colors'],
|
||||||
|
'colors': ['colors'],
|
||||||
|
'format': ['format'],
|
||||||
|
'none': []
|
||||||
|
}
|
||||||
|
PRETTY_STDOUT_TTY_ONLY = object()
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_FORMAT_OPTIONS = [
|
||||||
|
'headers.sort:true',
|
||||||
|
'json.format:true',
|
||||||
|
'json.indent:4',
|
||||||
|
'json.sort_keys:true',
|
||||||
|
'xml.format:true',
|
||||||
|
'xml.indent:2',
|
||||||
|
]
|
||||||
|
SORTED_FORMAT_OPTIONS = [
|
||||||
|
'headers.sort:true',
|
||||||
|
'json.sort_keys:true',
|
||||||
|
]
|
||||||
|
SORTED_FORMAT_OPTIONS_STRING = ','.join(SORTED_FORMAT_OPTIONS)
|
||||||
|
UNSORTED_FORMAT_OPTIONS_STRING = ','.join(
|
||||||
|
option.replace('true', 'false') for option in SORTED_FORMAT_OPTIONS)
|
||||||
|
|
||||||
|
# Defaults
|
||||||
|
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
||||||
|
OUTPUT_OPTIONS_DEFAULT_OFFLINE = OUT_REQ_HEAD + OUT_REQ_BODY
|
||||||
|
|
||||||
|
|
||||||
|
class RequestType(enum.Enum):
|
||||||
|
FORM = enum.auto()
|
||||||
|
MULTIPART = enum.auto()
|
||||||
|
JSON = enum.auto()
|
@ -1,86 +1,65 @@
|
|||||||
"""CLI arguments definition.
|
"""
|
||||||
|
CLI arguments definition.
|
||||||
NOTE: the CLI interface may change before reaching v1.0.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# noinspection PyCompatibility
|
from argparse import (FileType, OPTIONAL, SUPPRESS, ZERO_OR_MORE)
|
||||||
from argparse import (
|
|
||||||
RawDescriptionHelpFormatter, FileType,
|
|
||||||
OPTIONAL, ZERO_OR_MORE, SUPPRESS
|
|
||||||
)
|
|
||||||
from textwrap import dedent, wrap
|
from textwrap import dedent, wrap
|
||||||
|
|
||||||
from httpie import __doc__, __version__
|
from .. import __doc__, __version__
|
||||||
from httpie.input import (
|
from .argparser import HTTPieArgumentParser
|
||||||
HTTPieArgumentParser, KeyValueArgType,
|
from .argtypes import (
|
||||||
SEP_PROXY, SEP_GROUP_ALL_ITEMS,
|
KeyValueArgType, SessionNameValidator,
|
||||||
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
|
readable_file_arg,
|
||||||
OUT_RESP_BODY, OUTPUT_OPTIONS,
|
|
||||||
OUTPUT_OPTIONS_DEFAULT, PRETTY_MAP,
|
|
||||||
PRETTY_STDOUT_TTY_ONLY, SessionNameValidator,
|
|
||||||
readable_file_arg, SSL_VERSION_ARG_MAPPING
|
|
||||||
)
|
)
|
||||||
from httpie.output.formatters.colors import (
|
from .constants import (
|
||||||
AVAILABLE_STYLES, DEFAULT_STYLE, AUTO_STYLE
|
DEFAULT_FORMAT_OPTIONS, OUTPUT_OPTIONS,
|
||||||
|
OUTPUT_OPTIONS_DEFAULT, OUT_REQ_BODY, OUT_REQ_HEAD,
|
||||||
|
OUT_RESP_BODY, OUT_RESP_HEAD, PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY,
|
||||||
|
RequestType, SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_PROXY,
|
||||||
|
SORTED_FORMAT_OPTIONS_STRING,
|
||||||
|
UNSORTED_FORMAT_OPTIONS_STRING,
|
||||||
)
|
)
|
||||||
from httpie.plugins import plugin_manager
|
from ..output.formatters.colors import (
|
||||||
from httpie.plugins.builtin import BuiltinAuthPlugin
|
AUTO_STYLE, AVAILABLE_STYLES, DEFAULT_STYLE,
|
||||||
from httpie.sessions import DEFAULT_SESSIONS_DIR
|
)
|
||||||
|
from ..plugins.builtin import BuiltinAuthPlugin
|
||||||
|
from ..plugins.registry import plugin_manager
|
||||||
class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
|
from ..sessions import DEFAULT_SESSIONS_DIR
|
||||||
"""A nicer help formatter.
|
from ..ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
|
||||||
|
|
||||||
Help for arguments can be indented and contain new lines.
|
|
||||||
It will be de-dented and arguments in the help
|
|
||||||
will be separated by a blank line for better readability.
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, max_help_position=6, *args, **kwargs):
|
|
||||||
# A smaller indent for args help.
|
|
||||||
kwargs['max_help_position'] = max_help_position
|
|
||||||
super(HTTPieHelpFormatter, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def _split_lines(self, text, width):
|
|
||||||
text = dedent(text).strip() + '\n\n'
|
|
||||||
return text.splitlines()
|
|
||||||
|
|
||||||
|
|
||||||
parser = HTTPieArgumentParser(
|
parser = HTTPieArgumentParser(
|
||||||
prog='http',
|
prog='http',
|
||||||
formatter_class=HTTPieHelpFormatter,
|
description=f'{__doc__.strip()} <https://httpie.org>',
|
||||||
description='%s <http://httpie.org>' % __doc__.strip(),
|
epilog=dedent('''
|
||||||
epilog=dedent("""
|
|
||||||
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
||||||
to its default value.
|
to its default value.
|
||||||
|
|
||||||
Suggestions and bug reports are greatly appreciated:
|
Suggestions and bug reports are greatly appreciated:
|
||||||
|
|
||||||
https://github.com/jakubroztocil/httpie/issues
|
https://github.com/httpie/httpie/issues
|
||||||
|
|
||||||
"""),
|
'''),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Positional arguments.
|
# Positional arguments.
|
||||||
#######################################################################
|
#######################################################################
|
||||||
|
|
||||||
positional = parser.add_argument_group(
|
positional = parser.add_argument_group(
|
||||||
title='Positional Arguments',
|
title='Positional Arguments',
|
||||||
description=dedent("""
|
description=dedent('''
|
||||||
These arguments come after any flags and in the order they are listed here.
|
These arguments come after any flags and in the order they are listed here.
|
||||||
Only URL is required.
|
Only URL is required.
|
||||||
|
|
||||||
""")
|
''')
|
||||||
)
|
)
|
||||||
positional.add_argument(
|
positional.add_argument(
|
||||||
'method',
|
dest='method',
|
||||||
metavar='METHOD',
|
metavar='METHOD',
|
||||||
nargs=OPTIONAL,
|
nargs=OPTIONAL,
|
||||||
default=None,
|
default=None,
|
||||||
help="""
|
help='''
|
||||||
The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).
|
The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).
|
||||||
|
|
||||||
This argument can be omitted in which case HTTPie will use POST if there
|
This argument can be omitted in which case HTTPie will use POST if there
|
||||||
@ -89,12 +68,12 @@ positional.add_argument(
|
|||||||
$ http example.org # => GET
|
$ http example.org # => GET
|
||||||
$ http example.org hello=world # => POST
|
$ http example.org hello=world # => POST
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
positional.add_argument(
|
positional.add_argument(
|
||||||
'url',
|
dest='url',
|
||||||
metavar='URL',
|
metavar='URL',
|
||||||
help="""
|
help='''
|
||||||
The scheme defaults to 'http://' if the URL does not include one.
|
The scheme defaults to 'http://' if the URL does not include one.
|
||||||
(You can override this with: --default-scheme=https)
|
(You can override this with: --default-scheme=https)
|
||||||
|
|
||||||
@ -103,15 +82,15 @@ positional.add_argument(
|
|||||||
$ http :3000 # => http://localhost:3000
|
$ http :3000 # => http://localhost:3000
|
||||||
$ http :/foo # => http://localhost/foo
|
$ http :/foo # => http://localhost/foo
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
positional.add_argument(
|
positional.add_argument(
|
||||||
'items',
|
dest='request_items',
|
||||||
metavar='REQUEST_ITEM',
|
metavar='REQUEST_ITEM',
|
||||||
nargs=ZERO_OR_MORE,
|
nargs=ZERO_OR_MORE,
|
||||||
default=None,
|
default=None,
|
||||||
type=KeyValueArgType(*SEP_GROUP_ALL_ITEMS),
|
type=KeyValueArgType(*SEPARATOR_GROUP_ALL_ITEMS),
|
||||||
help=r"""
|
help=r'''
|
||||||
Optional key-value pairs to be included in the request. The separator used
|
Optional key-value pairs to be included in the request. The separator used
|
||||||
determines the type:
|
determines the type:
|
||||||
|
|
||||||
@ -132,9 +111,10 @@ positional.add_argument(
|
|||||||
|
|
||||||
awesome:=true amount:=42 colors:='["red", "green", "blue"]'
|
awesome:=true amount:=42 colors:='["red", "green", "blue"]'
|
||||||
|
|
||||||
'@' Form file fields (only with --form, -f):
|
'@' Form file fields (only with --form or --multipart):
|
||||||
|
|
||||||
cs@~/Documents/CV.pdf
|
cv@~/Documents/CV.pdf
|
||||||
|
cv@'~/Documents/CV.pdf;type=application/pdf'
|
||||||
|
|
||||||
'=@' A data field like '=', but takes a file path and embeds its content:
|
'=@' A data field like '=', but takes a file path and embeds its content:
|
||||||
|
|
||||||
@ -148,10 +128,9 @@ positional.add_argument(
|
|||||||
|
|
||||||
field-name-with\:colon=value
|
field-name-with\:colon=value
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Content type.
|
# Content type.
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -163,28 +142,93 @@ content_type = parser.add_argument_group(
|
|||||||
|
|
||||||
content_type.add_argument(
|
content_type.add_argument(
|
||||||
'--json', '-j',
|
'--json', '-j',
|
||||||
action='store_true',
|
action='store_const',
|
||||||
help="""
|
const=RequestType.JSON,
|
||||||
|
dest='request_type',
|
||||||
|
help='''
|
||||||
(default) Data items from the command line are serialized as a JSON object.
|
(default) Data items from the command line are serialized as a JSON object.
|
||||||
The Content-Type and Accept headers are set to application/json
|
The Content-Type and Accept headers are set to application/json
|
||||||
(if not specified).
|
(if not specified).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
content_type.add_argument(
|
content_type.add_argument(
|
||||||
'--form', '-f',
|
'--form', '-f',
|
||||||
action='store_true',
|
action='store_const',
|
||||||
help="""
|
const=RequestType.FORM,
|
||||||
|
dest='request_type',
|
||||||
|
help='''
|
||||||
Data items from the command line are serialized as form fields.
|
Data items from the command line are serialized as form fields.
|
||||||
|
|
||||||
The Content-Type is set to application/x-www-form-urlencoded (if not
|
The Content-Type is set to application/x-www-form-urlencoded (if not
|
||||||
specified). The presence of any file fields results in a
|
specified). The presence of any file fields results in a
|
||||||
multipart/form-data request.
|
multipart/form-data request.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
)
|
||||||
|
content_type.add_argument(
|
||||||
|
'--multipart',
|
||||||
|
action='store_const',
|
||||||
|
const=RequestType.MULTIPART,
|
||||||
|
dest='request_type',
|
||||||
|
help='''
|
||||||
|
Similar to --form, but always sends a multipart/form-data
|
||||||
|
request (i.e., even without files).
|
||||||
|
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
content_type.add_argument(
|
||||||
|
'--boundary',
|
||||||
|
help='''
|
||||||
|
Specify a custom boundary string for multipart/form-data requests.
|
||||||
|
Only has effect only together with --form.
|
||||||
|
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
content_type.add_argument(
|
||||||
|
'--raw',
|
||||||
|
help='''
|
||||||
|
This option allows you to pass raw request data without extra processing
|
||||||
|
(as opposed to the structured request items syntax):
|
||||||
|
|
||||||
|
$ http --raw='data' pie.dev/post
|
||||||
|
|
||||||
|
You can achieve the same by piping the data via stdin:
|
||||||
|
|
||||||
|
$ echo data | http pie.dev/post
|
||||||
|
|
||||||
|
Or have HTTPie load the raw data from a file:
|
||||||
|
|
||||||
|
$ http pie.dev/post @data.txt
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
#######################################################################
|
||||||
|
# Content processing.
|
||||||
|
#######################################################################
|
||||||
|
|
||||||
|
content_processing = parser.add_argument_group(
|
||||||
|
title='Content Processing Options',
|
||||||
|
description=None
|
||||||
|
)
|
||||||
|
|
||||||
|
content_processing.add_argument(
|
||||||
|
'--compress', '-x',
|
||||||
|
action='count',
|
||||||
|
default=0,
|
||||||
|
help='''
|
||||||
|
Content compressed (encoded) with Deflate algorithm.
|
||||||
|
The Content-Encoding header is set to deflate.
|
||||||
|
|
||||||
|
Compression is skipped if it appears that compression ratio is
|
||||||
|
negative. Compression can be forced by repeating the argument.
|
||||||
|
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Output processing
|
# Output processing
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -196,12 +240,12 @@ output_processing.add_argument(
|
|||||||
dest='prettify',
|
dest='prettify',
|
||||||
default=PRETTY_STDOUT_TTY_ONLY,
|
default=PRETTY_STDOUT_TTY_ONLY,
|
||||||
choices=sorted(PRETTY_MAP.keys()),
|
choices=sorted(PRETTY_MAP.keys()),
|
||||||
help="""
|
help='''
|
||||||
Controls output processing. The value can be "none" to not prettify
|
Controls output processing. The value can be "none" to not prettify
|
||||||
the output (default for redirected output), "all" to apply both colors
|
the output (default for redirected output), "all" to apply both colors
|
||||||
and formatting (default for terminal output), "colors", or "format".
|
and formatting (default for terminal output), "colors", or "format".
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
output_processing.add_argument(
|
output_processing.add_argument(
|
||||||
'--style', '-s',
|
'--style', '-s',
|
||||||
@ -209,10 +253,10 @@ output_processing.add_argument(
|
|||||||
metavar='STYLE',
|
metavar='STYLE',
|
||||||
default=DEFAULT_STYLE,
|
default=DEFAULT_STYLE,
|
||||||
choices=AVAILABLE_STYLES,
|
choices=AVAILABLE_STYLES,
|
||||||
help="""
|
help='''
|
||||||
Output coloring style (default is "{default}"). One of:
|
Output coloring style (default is "{default}"). It can be One of:
|
||||||
|
|
||||||
{available_styles}
|
{available_styles}
|
||||||
|
|
||||||
The "{auto_style}" style follows your terminal's ANSI color styles.
|
The "{auto_style}" style follows your terminal's ANSI color styles.
|
||||||
|
|
||||||
@ -220,16 +264,75 @@ output_processing.add_argument(
|
|||||||
$TERM environment variable is set to "xterm-256color" or similar
|
$TERM environment variable is set to "xterm-256color" or similar
|
||||||
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
||||||
|
|
||||||
""".format(
|
'''.format(
|
||||||
default=DEFAULT_STYLE,
|
default=DEFAULT_STYLE,
|
||||||
available_styles='\n'.join(
|
available_styles='\n'.join(
|
||||||
'{0}{1}'.format(8 * ' ', line.strip())
|
f' {line.strip()}'
|
||||||
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
for line in wrap(', '.join(sorted(AVAILABLE_STYLES)), 60)
|
||||||
).rstrip(),
|
).strip(),
|
||||||
auto_style=AUTO_STYLE,
|
auto_style=AUTO_STYLE,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
_sorted_kwargs = {
|
||||||
|
'action': 'append_const',
|
||||||
|
'const': SORTED_FORMAT_OPTIONS_STRING,
|
||||||
|
'dest': 'format_options'
|
||||||
|
}
|
||||||
|
_unsorted_kwargs = {
|
||||||
|
'action': 'append_const',
|
||||||
|
'const': UNSORTED_FORMAT_OPTIONS_STRING,
|
||||||
|
'dest': 'format_options'
|
||||||
|
}
|
||||||
|
# The closest approx. of the documented resetting to default via --no-<option>.
|
||||||
|
# We hide them from the doc because they act only as low-level aliases here.
|
||||||
|
output_processing.add_argument('--no-unsorted', **_sorted_kwargs, help=SUPPRESS)
|
||||||
|
output_processing.add_argument('--no-sorted', **_unsorted_kwargs, help=SUPPRESS)
|
||||||
|
|
||||||
|
output_processing.add_argument(
|
||||||
|
'--unsorted',
|
||||||
|
**_unsorted_kwargs,
|
||||||
|
help=f'''
|
||||||
|
Disables all sorting while formatting output. It is a shortcut for:
|
||||||
|
|
||||||
|
--format-options={UNSORTED_FORMAT_OPTIONS_STRING}
|
||||||
|
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
output_processing.add_argument(
|
||||||
|
'--sorted',
|
||||||
|
**_sorted_kwargs,
|
||||||
|
help=f'''
|
||||||
|
Re-enables all sorting options while formatting output. It is a shortcut for:
|
||||||
|
|
||||||
|
--format-options={SORTED_FORMAT_OPTIONS_STRING}
|
||||||
|
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
output_processing.add_argument(
|
||||||
|
'--format-options',
|
||||||
|
action='append',
|
||||||
|
help='''
|
||||||
|
Controls output formatting. Only relevant when formatting is enabled
|
||||||
|
through (explicit or implied) --pretty=all or --pretty=format.
|
||||||
|
The following are the default options:
|
||||||
|
|
||||||
|
{option_list}
|
||||||
|
|
||||||
|
You may use this option multiple times, as well as specify multiple
|
||||||
|
comma-separated options at the same time. For example, this modifies the
|
||||||
|
settings to disable the sorting of JSON keys, and sets the indent size to 2:
|
||||||
|
|
||||||
|
--format-options json.sort_keys:false,json.indent:2
|
||||||
|
|
||||||
|
This is something you will typically put into your config file.
|
||||||
|
|
||||||
|
'''.format(
|
||||||
|
option_list='\n'.join(
|
||||||
|
f' {option}' for option in DEFAULT_FORMAT_OPTIONS).strip()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Output options
|
# Output options
|
||||||
@ -240,93 +343,83 @@ output_options.add_argument(
|
|||||||
'--print', '-p',
|
'--print', '-p',
|
||||||
dest='output_options',
|
dest='output_options',
|
||||||
metavar='WHAT',
|
metavar='WHAT',
|
||||||
help="""
|
help=f'''
|
||||||
String specifying what the output should contain:
|
String specifying what the output should contain:
|
||||||
|
|
||||||
'{req_head}' request headers
|
'{OUT_REQ_HEAD}' request headers
|
||||||
'{req_body}' request body
|
'{OUT_REQ_BODY}' request body
|
||||||
'{res_head}' response headers
|
'{OUT_RESP_HEAD}' response headers
|
||||||
'{res_body}' response body
|
'{OUT_RESP_BODY}' response body
|
||||||
|
|
||||||
The default behaviour is '{default}' (i.e., the response headers and body
|
The default behaviour is '{OUTPUT_OPTIONS_DEFAULT}' (i.e., the response
|
||||||
is printed), if standard output is not redirected. If the output is piped
|
headers and body is printed), if standard output is not redirected.
|
||||||
to another program or to a file, then only the response body is printed
|
If the output is piped to another program or to a file, then only the
|
||||||
by default.
|
response body is printed by default.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
.format(
|
|
||||||
req_head=OUT_REQ_HEAD,
|
|
||||||
req_body=OUT_REQ_BODY,
|
|
||||||
res_head=OUT_RESP_HEAD,
|
|
||||||
res_body=OUT_RESP_BODY,
|
|
||||||
default=OUTPUT_OPTIONS_DEFAULT,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--headers', '-h',
|
'--headers', '-h',
|
||||||
dest='output_options',
|
dest='output_options',
|
||||||
action='store_const',
|
action='store_const',
|
||||||
const=OUT_RESP_HEAD,
|
const=OUT_RESP_HEAD,
|
||||||
help="""
|
help=f'''
|
||||||
Print only the response headers. Shortcut for --print={0}.
|
Print only the response headers. Shortcut for --print={OUT_RESP_HEAD}.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
.format(OUT_RESP_HEAD)
|
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--body', '-b',
|
'--body', '-b',
|
||||||
dest='output_options',
|
dest='output_options',
|
||||||
action='store_const',
|
action='store_const',
|
||||||
const=OUT_RESP_BODY,
|
const=OUT_RESP_BODY,
|
||||||
help="""
|
help=f'''
|
||||||
Print only the response body. Shortcut for --print={0}.
|
Print only the response body. Shortcut for --print={OUT_RESP_BODY}.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
.format(OUT_RESP_BODY)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--verbose', '-v',
|
'--verbose', '-v',
|
||||||
dest='verbose',
|
dest='verbose',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="""
|
help=f'''
|
||||||
Verbose output. Print the whole request as well as the response. Also print
|
Verbose output. Print the whole request as well as the response. Also print
|
||||||
any intermediary requests/responses (such as redirects).
|
any intermediary requests/responses (such as redirects).
|
||||||
It's a shortcut for: --all --print={0}
|
It's a shortcut for: --all --print={''.join(OUTPUT_OPTIONS)}
|
||||||
|
|
||||||
"""
|
'''
|
||||||
.format(''.join(OUTPUT_OPTIONS))
|
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--all',
|
'--all',
|
||||||
default=False,
|
default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="""
|
help='''
|
||||||
By default, only the final request/response is shown. Use this flag to show
|
By default, only the final request/response is shown. Use this flag to show
|
||||||
any intermediary requests/responses as well. Intermediary requests include
|
any intermediary requests/responses as well. Intermediary requests include
|
||||||
followed redirects (with --follow), the first unauthorized request when
|
followed redirects (with --follow), the first unauthorized request when
|
||||||
Digest auth is used (--auth=digest), etc.
|
Digest auth is used (--auth=digest), etc.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--history-print', '-P',
|
'--history-print', '-P',
|
||||||
dest='output_options_history',
|
dest='output_options_history',
|
||||||
metavar='WHAT',
|
metavar='WHAT',
|
||||||
help="""
|
help='''
|
||||||
The same as --print, -p but applies only to intermediary requests/responses
|
The same as --print, -p but applies only to intermediary requests/responses
|
||||||
(such as redirects) when their inclusion is enabled with --all. If this
|
(such as redirects) when their inclusion is enabled with --all. If this
|
||||||
options is not specified, then they are formatted the same way as the final
|
options is not specified, then they are formatted the same way as the final
|
||||||
response.
|
response.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--stream', '-S',
|
'--stream', '-S',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help='''
|
||||||
Always stream the output by line, i.e., behave like `tail -f'.
|
Always stream the response body by line, i.e., behave like `tail -f'.
|
||||||
|
|
||||||
Without --stream and with --pretty (either set or implied),
|
Without --stream and with --pretty (either set or implied),
|
||||||
HTTPie fetches the whole response before it outputs the processed data.
|
HTTPie fetches the whole response before it outputs the processed data.
|
||||||
@ -337,19 +430,19 @@ output_options.add_argument(
|
|||||||
It is useful also without --pretty: It ensures that the output is flushed
|
It is useful also without --pretty: It ensures that the output is flushed
|
||||||
more often and in smaller chunks.
|
more often and in smaller chunks.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
'--output', '-o',
|
'--output', '-o',
|
||||||
type=FileType('a+b'),
|
type=FileType('a+b'),
|
||||||
dest='output_file',
|
dest='output_file',
|
||||||
metavar='FILE',
|
metavar='FILE',
|
||||||
help="""
|
help='''
|
||||||
Save output to FILE instead of stdout. If --download is also set, then only
|
Save output to FILE instead of stdout. If --download is also set, then only
|
||||||
the response body is saved to FILE. Other parts of the HTTP exchange are
|
the response body is saved to FILE. Other parts of the HTTP exchange are
|
||||||
printed to stderr.
|
printed to stderr.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -357,12 +450,12 @@ output_options.add_argument(
|
|||||||
'--download', '-d',
|
'--download', '-d',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help='''
|
||||||
Do not print the response body to stdout. Rather, download it and store it
|
Do not print the response body to stdout. Rather, download it and store it
|
||||||
in a file. The filename is guessed unless specified with --output
|
in a file. The filename is guessed unless specified with --output
|
||||||
[filename]. This action is similar to the default behaviour of wget.
|
[filename]. This action is similar to the default behaviour of wget.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
output_options.add_argument(
|
output_options.add_argument(
|
||||||
@ -370,20 +463,30 @@ output_options.add_argument(
|
|||||||
dest='download_resume',
|
dest='download_resume',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help='''
|
||||||
Resume an interrupted download. Note that the --output option needs to be
|
Resume an interrupted download. Note that the --output option needs to be
|
||||||
specified as well.
|
specified as well.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
|
output_options.add_argument(
|
||||||
|
'--quiet', '-q',
|
||||||
|
action='store_true',
|
||||||
|
default=False,
|
||||||
|
help='''
|
||||||
|
Do not print to stdout or stderr.
|
||||||
|
stdout is still redirected if --output is specified.
|
||||||
|
Flag doesn't affect behaviour of download beyond not printing to terminal.
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Sessions
|
# Sessions
|
||||||
#######################################################################
|
#######################################################################
|
||||||
|
|
||||||
sessions = parser.add_argument_group(title='Sessions')\
|
sessions = parser.add_argument_group(title='Sessions') \
|
||||||
.add_mutually_exclusive_group(required=False)
|
.add_mutually_exclusive_group(required=False)
|
||||||
|
|
||||||
session_name_validator = SessionNameValidator(
|
session_name_validator = SessionNameValidator(
|
||||||
'Session name contains invalid characters.'
|
'Session name contains invalid characters.'
|
||||||
@ -393,27 +496,26 @@ sessions.add_argument(
|
|||||||
'--session',
|
'--session',
|
||||||
metavar='SESSION_NAME_OR_PATH',
|
metavar='SESSION_NAME_OR_PATH',
|
||||||
type=session_name_validator,
|
type=session_name_validator,
|
||||||
help="""
|
help=f'''
|
||||||
Create, or reuse and update a session. Within a session, custom headers,
|
Create, or reuse and update a session. Within a session, custom headers,
|
||||||
auth credential, as well as any cookies sent by the server persist between
|
auth credential, as well as any cookies sent by the server persist between
|
||||||
requests.
|
requests.
|
||||||
|
|
||||||
Session files are stored in:
|
Session files are stored in:
|
||||||
|
|
||||||
{session_dir}/<HOST>/<SESSION_NAME>.json.
|
{DEFAULT_SESSIONS_DIR}/<HOST>/<SESSION_NAME>.json.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
.format(session_dir=DEFAULT_SESSIONS_DIR)
|
|
||||||
)
|
)
|
||||||
sessions.add_argument(
|
sessions.add_argument(
|
||||||
'--session-read-only',
|
'--session-read-only',
|
||||||
metavar='SESSION_NAME_OR_PATH',
|
metavar='SESSION_NAME_OR_PATH',
|
||||||
type=session_name_validator,
|
type=session_name_validator,
|
||||||
help="""
|
help='''
|
||||||
Create or read a session without updating it form the request/response
|
Create or read a session without updating it form the request/response
|
||||||
exchange.
|
exchange.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -426,15 +528,15 @@ auth.add_argument(
|
|||||||
'--auth', '-a',
|
'--auth', '-a',
|
||||||
default=None,
|
default=None,
|
||||||
metavar='USER[:PASS]',
|
metavar='USER[:PASS]',
|
||||||
help="""
|
help='''
|
||||||
If only the username is provided (-a username), HTTPie will prompt
|
If only the username is provided (-a username), HTTPie will prompt
|
||||||
for the password.
|
for the password.
|
||||||
|
|
||||||
""",
|
''',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class _AuthTypeLazyChoices(object):
|
class _AuthTypeLazyChoices:
|
||||||
# Needed for plugin testing
|
# Needed for plugin testing
|
||||||
|
|
||||||
def __contains__(self, item):
|
def __contains__(self, item):
|
||||||
@ -449,19 +551,18 @@ auth.add_argument(
|
|||||||
'--auth-type', '-A',
|
'--auth-type', '-A',
|
||||||
choices=_AuthTypeLazyChoices(),
|
choices=_AuthTypeLazyChoices(),
|
||||||
default=None,
|
default=None,
|
||||||
help="""
|
help='''
|
||||||
The authentication mechanism to be used. Defaults to "{default}".
|
The authentication mechanism to be used. Defaults to "{default}".
|
||||||
|
|
||||||
{types}
|
{types}
|
||||||
|
|
||||||
"""
|
'''.format(default=_auth_plugins[0].auth_type, types='\n '.join(
|
||||||
.format(default=_auth_plugins[0].auth_type, types='\n '.join(
|
|
||||||
'"{type}": {name}{package}{description}'.format(
|
'"{type}": {name}{package}{description}'.format(
|
||||||
type=plugin.auth_type,
|
type=plugin.auth_type,
|
||||||
name=plugin.name,
|
name=plugin.name,
|
||||||
package=(
|
package=(
|
||||||
'' if issubclass(plugin, BuiltinAuthPlugin)
|
'' if issubclass(plugin, BuiltinAuthPlugin)
|
||||||
else ' (provided by %s)' % plugin.package_name
|
else f' (provided by {plugin.package_name})'
|
||||||
),
|
),
|
||||||
description=(
|
description=(
|
||||||
'' if not plugin.description else
|
'' if not plugin.description else
|
||||||
@ -471,7 +572,15 @@ auth.add_argument(
|
|||||||
for plugin in _auth_plugins
|
for plugin in _auth_plugins
|
||||||
)),
|
)),
|
||||||
)
|
)
|
||||||
|
auth.add_argument(
|
||||||
|
'--ignore-netrc',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='''
|
||||||
|
Ignore credentials from .netrc.
|
||||||
|
|
||||||
|
''',
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# Network
|
# Network
|
||||||
@ -479,55 +588,79 @@ auth.add_argument(
|
|||||||
|
|
||||||
network = parser.add_argument_group(title='Network')
|
network = parser.add_argument_group(title='Network')
|
||||||
|
|
||||||
|
network.add_argument(
|
||||||
|
'--offline',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='''
|
||||||
|
Build the request and print it but don’t actually send it.
|
||||||
|
'''
|
||||||
|
)
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--proxy',
|
'--proxy',
|
||||||
default=[],
|
default=[],
|
||||||
action='append',
|
action='append',
|
||||||
metavar='PROTOCOL:PROXY_URL',
|
metavar='PROTOCOL:PROXY_URL',
|
||||||
type=KeyValueArgType(SEP_PROXY),
|
type=KeyValueArgType(SEPARATOR_PROXY),
|
||||||
help="""
|
help='''
|
||||||
String mapping protocol to the URL of the proxy
|
String mapping protocol to the URL of the proxy
|
||||||
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
||||||
different protocols.
|
different protocols. The environment variables $ALL_PROXY, $HTTP_PROXY,
|
||||||
|
and $HTTPS_proxy are supported as well.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--follow', '-F',
|
'--follow', '-F',
|
||||||
default=False,
|
default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="""
|
help='''
|
||||||
Follow 30x Location redirects.
|
Follow 30x Location redirects.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--max-redirects',
|
'--max-redirects',
|
||||||
type=int,
|
type=int,
|
||||||
default=30,
|
default=30,
|
||||||
help="""
|
help='''
|
||||||
By default, requests have a limit of 30 redirects (works with --follow).
|
By default, requests have a limit of 30 redirects (works with --follow).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
)
|
||||||
|
|
||||||
|
network.add_argument(
|
||||||
|
'--max-headers',
|
||||||
|
type=int,
|
||||||
|
default=0,
|
||||||
|
help='''
|
||||||
|
The maximum number of response headers to be read before giving up
|
||||||
|
(default 0, i.e., no limit).
|
||||||
|
|
||||||
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--timeout',
|
'--timeout',
|
||||||
type=float,
|
type=float,
|
||||||
default=30,
|
default=0,
|
||||||
metavar='SECONDS',
|
metavar='SECONDS',
|
||||||
help="""
|
help='''
|
||||||
The connection timeout of the request in seconds. The default value is
|
The connection timeout of the request in seconds.
|
||||||
30 seconds.
|
The default value is 0, i.e., there is no timeout limit.
|
||||||
|
This is not a time limit on the entire response download;
|
||||||
|
rather, an error is reported if the server has not issued a response for
|
||||||
|
timeout seconds (more precisely, if no bytes have been received on
|
||||||
|
the underlying socket for timeout seconds).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
network.add_argument(
|
network.add_argument(
|
||||||
'--check-status',
|
'--check-status',
|
||||||
default=False,
|
default=False,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="""
|
help='''
|
||||||
By default, HTTPie exits with 0 when no network or other fatal errors
|
By default, HTTPie exits with 0 when no network or other fatal errors
|
||||||
occur. This flag instructs HTTPie to also check the HTTP status code and
|
occur. This flag instructs HTTPie to also check the HTTP status code and
|
||||||
exit with an error if the status indicates one.
|
exit with an error if the status indicates one.
|
||||||
@ -537,9 +670,26 @@ network.add_argument(
|
|||||||
3xx (Redirect) and --follow hasn't been set, then the exit status is 3.
|
3xx (Redirect) and --follow hasn't been set, then the exit status is 3.
|
||||||
Also an error message is written to stderr if stdout is redirected.
|
Also an error message is written to stderr if stdout is redirected.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
)
|
||||||
|
network.add_argument(
|
||||||
|
'--path-as-is',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help='''
|
||||||
|
Bypass dot segment (/../ or /./) URL squashing.
|
||||||
|
|
||||||
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
|
network.add_argument(
|
||||||
|
'--chunked',
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help="""
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
# SSL
|
# SSL
|
||||||
@ -549,47 +699,58 @@ ssl = parser.add_argument_group(title='SSL')
|
|||||||
ssl.add_argument(
|
ssl.add_argument(
|
||||||
'--verify',
|
'--verify',
|
||||||
default='yes',
|
default='yes',
|
||||||
help="""
|
help='''
|
||||||
Set to "no" (or "false") to skip checking the host's SSL certificate.
|
Set to "no" (or "false") to skip checking the host's SSL certificate.
|
||||||
Defaults to "yes" ("true"). You can also pass the path to a CA_BUNDLE file
|
Defaults to "yes" ("true"). You can also pass the path to a CA_BUNDLE file
|
||||||
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
|
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
|
||||||
variable instead.)
|
variable instead.)
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
ssl.add_argument(
|
ssl.add_argument(
|
||||||
'--ssl', # TODO: Maybe something more general, such as --secure-protocol?
|
'--ssl',
|
||||||
dest='ssl_version',
|
dest='ssl_version',
|
||||||
choices=list(sorted(SSL_VERSION_ARG_MAPPING.keys())),
|
choices=sorted(AVAILABLE_SSL_VERSION_ARG_MAPPING.keys()),
|
||||||
help="""
|
help='''
|
||||||
The desired protocol version to use. This will default to
|
The desired protocol version to use. This will default to
|
||||||
SSL v2.3 which will negotiate the highest protocol that both
|
SSL v2.3 which will negotiate the highest protocol that both
|
||||||
the server and your installation of OpenSSL support. Available protocols
|
the server and your installation of OpenSSL support. Available protocols
|
||||||
may vary depending on OpenSSL installation (only the supported ones
|
may vary depending on OpenSSL installation (only the supported ones
|
||||||
are shown here).
|
are shown here).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
)
|
||||||
|
ssl.add_argument(
|
||||||
|
'--ciphers',
|
||||||
|
help=f'''
|
||||||
|
|
||||||
|
A string in the OpenSSL cipher list format. By default, the following
|
||||||
|
is used:
|
||||||
|
|
||||||
|
{DEFAULT_SSL_CIPHERS}
|
||||||
|
|
||||||
|
'''
|
||||||
)
|
)
|
||||||
ssl.add_argument(
|
ssl.add_argument(
|
||||||
'--cert',
|
'--cert',
|
||||||
default=None,
|
default=None,
|
||||||
type=readable_file_arg,
|
type=readable_file_arg,
|
||||||
help="""
|
help='''
|
||||||
You can specify a local cert to use as client side SSL certificate.
|
You can specify a local cert to use as client side SSL certificate.
|
||||||
This file may either contain both private key and certificate or you may
|
This file may either contain both private key and certificate or you may
|
||||||
specify --cert-key separately.
|
specify --cert-key separately.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
ssl.add_argument(
|
ssl.add_argument(
|
||||||
'--cert-key',
|
'--cert-key',
|
||||||
default=None,
|
default=None,
|
||||||
type=readable_file_arg,
|
type=readable_file_arg,
|
||||||
help="""
|
help='''
|
||||||
The private key to use with SSL. Only needed if --cert is given and the
|
The private key to use with SSL. Only needed if --cert is given and the
|
||||||
certificate file does not contain the private key.
|
certificate file does not contain the private key.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
|
|
||||||
#######################################################################
|
#######################################################################
|
||||||
@ -602,53 +763,53 @@ troubleshooting.add_argument(
|
|||||||
'--ignore-stdin', '-I',
|
'--ignore-stdin', '-I',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help='''
|
||||||
Do not attempt to read stdin.
|
Do not attempt to read stdin.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--help',
|
'--help',
|
||||||
action='help',
|
action='help',
|
||||||
default=SUPPRESS,
|
default=SUPPRESS,
|
||||||
help="""
|
help='''
|
||||||
Show this help message and exit.
|
Show this help message and exit.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--version',
|
'--version',
|
||||||
action='version',
|
action='version',
|
||||||
version=__version__,
|
version=__version__,
|
||||||
help="""
|
help='''
|
||||||
Show version and exit.
|
Show version and exit.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--traceback',
|
'--traceback',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help='''
|
||||||
Prints the exception traceback should one occur.
|
Prints the exception traceback should one occur.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--default-scheme',
|
'--default-scheme',
|
||||||
default="http",
|
default="http",
|
||||||
help="""
|
help='''
|
||||||
The default scheme to use if not specified in the URL.
|
The default scheme to use if not specified in the URL.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
||||||
troubleshooting.add_argument(
|
troubleshooting.add_argument(
|
||||||
'--debug',
|
'--debug',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
default=False,
|
default=False,
|
||||||
help="""
|
help='''
|
||||||
Prints the exception traceback should one occur, as well as other
|
Prints the exception traceback should one occur, as well as other
|
||||||
information useful for debugging HTTPie itself and for reporting bugs.
|
information useful for debugging HTTPie itself and for reporting bugs.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
)
|
)
|
58
httpie/cli/dicts.py
Normal file
58
httpie/cli/dicts.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
|
|
||||||
|
class RequestHeadersDict(CaseInsensitiveDict):
|
||||||
|
"""
|
||||||
|
Headers are case-insensitive and multiple values are currently not supported.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class RequestJSONDataDict(OrderedDict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MultiValueOrderedDict(OrderedDict):
|
||||||
|
"""Multi-value dict for URL parameters and form data."""
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
"""
|
||||||
|
If `key` is assigned more than once, `self[key]` holds a
|
||||||
|
`list` of all the values.
|
||||||
|
|
||||||
|
This allows having multiple fields with the same name in form
|
||||||
|
data and URL params.
|
||||||
|
|
||||||
|
"""
|
||||||
|
assert not isinstance(value, list)
|
||||||
|
if key not in self:
|
||||||
|
super().__setitem__(key, value)
|
||||||
|
else:
|
||||||
|
if not isinstance(self[key], list):
|
||||||
|
super().__setitem__(key, [self[key]])
|
||||||
|
self[key].append(value)
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
for key, values in super().items():
|
||||||
|
if not isinstance(values, list):
|
||||||
|
values = [values]
|
||||||
|
for value in values:
|
||||||
|
yield key, value
|
||||||
|
|
||||||
|
|
||||||
|
class RequestQueryParamsDict(MultiValueOrderedDict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RequestDataDict(MultiValueOrderedDict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MultipartRequestDataDict(MultiValueOrderedDict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RequestFilesDict(RequestDataDict):
|
||||||
|
pass
|
2
httpie/cli/exceptions.py
Normal file
2
httpie/cli/exceptions.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
class ParseError(Exception):
|
||||||
|
pass
|
155
httpie/cli/requestitems.py
Normal file
155
httpie/cli/requestitems.py
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
import os
|
||||||
|
from typing import Callable, Dict, IO, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
from .argtypes import KeyValueArg
|
||||||
|
from .constants import (
|
||||||
|
SEPARATORS_GROUP_MULTIPART, SEPARATOR_DATA_EMBED_FILE_CONTENTS,
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
|
||||||
|
SEPARATOR_DATA_RAW_JSON, SEPARATOR_DATA_STRING, SEPARATOR_FILE_UPLOAD,
|
||||||
|
SEPARATOR_FILE_UPLOAD_TYPE, SEPARATOR_HEADER, SEPARATOR_HEADER_EMPTY,
|
||||||
|
SEPARATOR_QUERY_PARAM,
|
||||||
|
)
|
||||||
|
from .dicts import (
|
||||||
|
MultipartRequestDataDict, RequestDataDict, RequestFilesDict,
|
||||||
|
RequestHeadersDict, RequestJSONDataDict,
|
||||||
|
RequestQueryParamsDict,
|
||||||
|
)
|
||||||
|
from .exceptions import ParseError
|
||||||
|
from ..utils import get_content_type, load_json_preserve_order
|
||||||
|
|
||||||
|
|
||||||
|
class RequestItems:
|
||||||
|
|
||||||
|
def __init__(self, as_form=False):
|
||||||
|
self.headers = RequestHeadersDict()
|
||||||
|
self.data = RequestDataDict() if as_form else RequestJSONDataDict()
|
||||||
|
self.files = RequestFilesDict()
|
||||||
|
self.params = RequestQueryParamsDict()
|
||||||
|
# To preserve the order of fields in file upload multipart requests.
|
||||||
|
self.multipart_data = MultipartRequestDataDict()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_args(
|
||||||
|
cls,
|
||||||
|
request_item_args: List[KeyValueArg],
|
||||||
|
as_form=False,
|
||||||
|
) -> 'RequestItems':
|
||||||
|
instance = cls(as_form=as_form)
|
||||||
|
rules: Dict[str, Tuple[Callable, dict]] = {
|
||||||
|
SEPARATOR_HEADER: (
|
||||||
|
process_header_arg,
|
||||||
|
instance.headers,
|
||||||
|
),
|
||||||
|
SEPARATOR_HEADER_EMPTY: (
|
||||||
|
process_empty_header_arg,
|
||||||
|
instance.headers,
|
||||||
|
),
|
||||||
|
SEPARATOR_QUERY_PARAM: (
|
||||||
|
process_query_param_arg,
|
||||||
|
instance.params,
|
||||||
|
),
|
||||||
|
SEPARATOR_FILE_UPLOAD: (
|
||||||
|
process_file_upload_arg,
|
||||||
|
instance.files,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_STRING: (
|
||||||
|
process_data_item_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_EMBED_FILE_CONTENTS: (
|
||||||
|
process_data_embed_file_contents_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_RAW_JSON: (
|
||||||
|
process_data_raw_json_embed_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
SEPARATOR_DATA_EMBED_RAW_JSON_FILE: (
|
||||||
|
process_data_embed_raw_json_file_arg,
|
||||||
|
instance.data,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
for arg in request_item_args:
|
||||||
|
processor_func, target_dict = rules[arg.sep]
|
||||||
|
value = processor_func(arg)
|
||||||
|
target_dict[arg.key] = value
|
||||||
|
|
||||||
|
if arg.sep in SEPARATORS_GROUP_MULTIPART:
|
||||||
|
instance.multipart_data[arg.key] = value
|
||||||
|
|
||||||
|
return instance
|
||||||
|
|
||||||
|
|
||||||
|
JSONType = Union[str, bool, int, list, dict]
|
||||||
|
|
||||||
|
|
||||||
|
def process_header_arg(arg: KeyValueArg) -> Optional[str]:
|
||||||
|
return arg.value or None
|
||||||
|
|
||||||
|
|
||||||
|
def process_empty_header_arg(arg: KeyValueArg) -> str:
|
||||||
|
if not arg.value:
|
||||||
|
return arg.value
|
||||||
|
raise ParseError(
|
||||||
|
f'Invalid item {arg.orig!r} (to specify an empty header use `Header;`)'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def process_query_param_arg(arg: KeyValueArg) -> str:
|
||||||
|
return arg.value
|
||||||
|
|
||||||
|
|
||||||
|
def process_file_upload_arg(arg: KeyValueArg) -> Tuple[str, IO, str]:
|
||||||
|
parts = arg.value.split(SEPARATOR_FILE_UPLOAD_TYPE)
|
||||||
|
filename = parts[0]
|
||||||
|
mime_type = parts[1] if len(parts) > 1 else None
|
||||||
|
try:
|
||||||
|
f = open(os.path.expanduser(filename), 'rb')
|
||||||
|
except OSError as e:
|
||||||
|
raise ParseError(f'{arg.orig!r}: {e}')
|
||||||
|
return (
|
||||||
|
os.path.basename(filename),
|
||||||
|
f,
|
||||||
|
mime_type or get_content_type(filename),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_item_arg(arg: KeyValueArg) -> str:
|
||||||
|
return arg.value
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_embed_file_contents_arg(arg: KeyValueArg) -> str:
|
||||||
|
return load_text_file(arg)
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_embed_raw_json_file_arg(arg: KeyValueArg) -> JSONType:
|
||||||
|
contents = load_text_file(arg)
|
||||||
|
value = load_json(arg, contents)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def process_data_raw_json_embed_arg(arg: KeyValueArg) -> JSONType:
|
||||||
|
value = load_json(arg, arg.value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def load_text_file(item: KeyValueArg) -> str:
|
||||||
|
path = item.value
|
||||||
|
try:
|
||||||
|
with open(os.path.expanduser(path), 'rb') as f:
|
||||||
|
return f.read().decode()
|
||||||
|
except OSError as e:
|
||||||
|
raise ParseError(f'{item.orig!r}: {e}')
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise ParseError(
|
||||||
|
f'{item.orig!r}: cannot embed the content of {item.value!r},'
|
||||||
|
' not a UTF-8 or ASCII-encoded text file'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(arg: KeyValueArg, contents: str) -> JSONType:
|
||||||
|
try:
|
||||||
|
return load_json_preserve_order(contents)
|
||||||
|
except ValueError as e:
|
||||||
|
raise ParseError(f'{arg.orig!r}: {e}')
|
360
httpie/client.py
360
httpie/client.py
@ -1,113 +1,201 @@
|
|||||||
|
import argparse
|
||||||
|
import http.client
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Callable, Iterable, Union
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
# noinspection PyPackageRequirements
|
||||||
from requests.structures import CaseInsensitiveDict
|
import urllib3
|
||||||
|
from . import __version__
|
||||||
from httpie import sessions
|
from .cli.dicts import RequestHeadersDict
|
||||||
from httpie import __version__
|
from .constants import UTF8
|
||||||
from httpie.compat import str
|
from .plugins.registry import plugin_manager
|
||||||
from httpie.input import SSL_VERSION_ARG_MAPPING
|
from .sessions import get_httpie_session
|
||||||
from httpie.plugins import plugin_manager
|
from .ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, HTTPieHTTPSAdapter
|
||||||
from httpie.utils import repr_dict_nice
|
from .uploads import (
|
||||||
|
compress_request, prepare_request_body,
|
||||||
try:
|
get_multipart_data_and_content_type,
|
||||||
# https://urllib3.readthedocs.io/en/latest/security.html
|
)
|
||||||
# noinspection PyPackageRequirements
|
from .utils import get_expired_cookies, repr_dict
|
||||||
import urllib3
|
|
||||||
urllib3.disable_warnings()
|
|
||||||
except (ImportError, AttributeError):
|
|
||||||
# In some rare cases, the user may have an old version of the requests
|
|
||||||
# or urllib3, and there is no method called "disable_warnings." In these
|
|
||||||
# cases, we don't need to call the method.
|
|
||||||
# They may get some noisy output but execution shouldn't die. Move on.
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
FORM_CONTENT_TYPE = 'application/x-www-form-urlencoded; charset=utf-8'
|
urllib3.disable_warnings()
|
||||||
|
|
||||||
|
FORM_CONTENT_TYPE = f'application/x-www-form-urlencoded; charset={UTF8}'
|
||||||
JSON_CONTENT_TYPE = 'application/json'
|
JSON_CONTENT_TYPE = 'application/json'
|
||||||
JSON_ACCEPT = '{0}, */*'.format(JSON_CONTENT_TYPE)
|
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*;q=0.5'
|
||||||
DEFAULT_UA = 'HTTPie/%s' % __version__
|
DEFAULT_UA = f'HTTPie/{__version__}'
|
||||||
|
|
||||||
|
|
||||||
class HTTPieHTTPAdapter(HTTPAdapter):
|
def collect_messages(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
config_dir: Path,
|
||||||
|
request_body_read_callback: Callable[[bytes], None] = None,
|
||||||
|
) -> Iterable[Union[requests.PreparedRequest, requests.Response]]:
|
||||||
|
httpie_session = None
|
||||||
|
httpie_session_headers = None
|
||||||
|
if args.session or args.session_read_only:
|
||||||
|
httpie_session = get_httpie_session(
|
||||||
|
config_dir=config_dir,
|
||||||
|
session_name=args.session or args.session_read_only,
|
||||||
|
host=args.headers.get('Host'),
|
||||||
|
url=args.url,
|
||||||
|
)
|
||||||
|
httpie_session_headers = httpie_session.headers
|
||||||
|
|
||||||
def __init__(self, ssl_version=None, **kwargs):
|
request_kwargs = make_request_kwargs(
|
||||||
self._ssl_version = ssl_version
|
args=args,
|
||||||
super(HTTPieHTTPAdapter, self).__init__(**kwargs)
|
base_headers=httpie_session_headers,
|
||||||
|
request_body_read_callback=request_body_read_callback
|
||||||
def init_poolmanager(self, *args, **kwargs):
|
|
||||||
kwargs['ssl_version'] = self._ssl_version
|
|
||||||
super(HTTPieHTTPAdapter, self).init_poolmanager(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def get_requests_session(ssl_version):
|
|
||||||
requests_session = requests.Session()
|
|
||||||
requests_session.mount(
|
|
||||||
'https://',
|
|
||||||
HTTPieHTTPAdapter(ssl_version=ssl_version)
|
|
||||||
)
|
)
|
||||||
for cls in plugin_manager.get_transport_plugins():
|
send_kwargs = make_send_kwargs(args)
|
||||||
transport_plugin = cls()
|
send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args)
|
||||||
requests_session.mount(prefix=transport_plugin.prefix,
|
requests_session = build_requests_session(
|
||||||
adapter=transport_plugin.get_adapter())
|
ssl_version=args.ssl_version,
|
||||||
|
ciphers=args.ciphers,
|
||||||
|
verify=bool(send_kwargs_mergeable_from_env['verify'])
|
||||||
|
)
|
||||||
|
|
||||||
|
if httpie_session:
|
||||||
|
httpie_session.update_headers(request_kwargs['headers'])
|
||||||
|
requests_session.cookies = httpie_session.cookies
|
||||||
|
if args.auth_plugin:
|
||||||
|
# Save auth from CLI to HTTPie session.
|
||||||
|
httpie_session.auth = {
|
||||||
|
'type': args.auth_plugin.auth_type,
|
||||||
|
'raw_auth': args.auth_plugin.raw_auth,
|
||||||
|
}
|
||||||
|
elif httpie_session.auth:
|
||||||
|
# Apply auth from HTTPie session
|
||||||
|
request_kwargs['auth'] = httpie_session.auth
|
||||||
|
|
||||||
|
if args.debug:
|
||||||
|
# TODO: reflect the split between request and send kwargs.
|
||||||
|
dump_request(request_kwargs)
|
||||||
|
|
||||||
|
request = requests.Request(**request_kwargs)
|
||||||
|
prepared_request = requests_session.prepare_request(request)
|
||||||
|
if args.path_as_is:
|
||||||
|
prepared_request.url = ensure_path_as_is(
|
||||||
|
orig_url=args.url,
|
||||||
|
prepped_url=prepared_request.url,
|
||||||
|
)
|
||||||
|
if args.compress and prepared_request.body:
|
||||||
|
compress_request(
|
||||||
|
request=prepared_request,
|
||||||
|
always=args.compress > 1,
|
||||||
|
)
|
||||||
|
response_count = 0
|
||||||
|
expired_cookies = []
|
||||||
|
while prepared_request:
|
||||||
|
yield prepared_request
|
||||||
|
if not args.offline:
|
||||||
|
send_kwargs_merged = requests_session.merge_environment_settings(
|
||||||
|
url=prepared_request.url,
|
||||||
|
**send_kwargs_mergeable_from_env,
|
||||||
|
)
|
||||||
|
with max_headers(args.max_headers):
|
||||||
|
response = requests_session.send(
|
||||||
|
request=prepared_request,
|
||||||
|
**send_kwargs_merged,
|
||||||
|
**send_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
expired_cookies += get_expired_cookies(
|
||||||
|
response.headers.get('Set-Cookie', '')
|
||||||
|
)
|
||||||
|
|
||||||
|
response_count += 1
|
||||||
|
if response.next:
|
||||||
|
if args.max_redirects and response_count == args.max_redirects:
|
||||||
|
raise requests.TooManyRedirects
|
||||||
|
if args.follow:
|
||||||
|
prepared_request = response.next
|
||||||
|
if args.all:
|
||||||
|
yield response
|
||||||
|
continue
|
||||||
|
yield response
|
||||||
|
break
|
||||||
|
|
||||||
|
if httpie_session:
|
||||||
|
if httpie_session.is_new() or not args.session_read_only:
|
||||||
|
httpie_session.cookies = requests_session.cookies
|
||||||
|
httpie_session.remove_cookies(
|
||||||
|
# TODO: take path & domain into account?
|
||||||
|
cookie['name'] for cookie in expired_cookies
|
||||||
|
)
|
||||||
|
httpie_session.save()
|
||||||
|
|
||||||
|
|
||||||
|
# noinspection PyProtectedMember
|
||||||
|
@contextmanager
|
||||||
|
def max_headers(limit):
|
||||||
|
# <https://github.com/httpie/httpie/issues/802>
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
orig = http.client._MAXHEADERS
|
||||||
|
http.client._MAXHEADERS = limit or float('Inf')
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
http.client._MAXHEADERS = orig
|
||||||
|
|
||||||
|
|
||||||
|
def build_requests_session(
|
||||||
|
verify: bool,
|
||||||
|
ssl_version: str = None,
|
||||||
|
ciphers: str = None,
|
||||||
|
) -> requests.Session:
|
||||||
|
requests_session = requests.Session()
|
||||||
|
|
||||||
|
# Install our adapter.
|
||||||
|
https_adapter = HTTPieHTTPSAdapter(
|
||||||
|
ciphers=ciphers,
|
||||||
|
verify=verify,
|
||||||
|
ssl_version=(
|
||||||
|
AVAILABLE_SSL_VERSION_ARG_MAPPING[ssl_version]
|
||||||
|
if ssl_version else None
|
||||||
|
),
|
||||||
|
)
|
||||||
|
requests_session.mount('https://', https_adapter)
|
||||||
|
|
||||||
|
# Install adapters from plugins.
|
||||||
|
for plugin_cls in plugin_manager.get_transport_plugins():
|
||||||
|
transport_plugin = plugin_cls()
|
||||||
|
requests_session.mount(
|
||||||
|
prefix=transport_plugin.prefix,
|
||||||
|
adapter=transport_plugin.get_adapter(),
|
||||||
|
)
|
||||||
|
|
||||||
return requests_session
|
return requests_session
|
||||||
|
|
||||||
|
|
||||||
def get_response(args, config_dir):
|
def dump_request(kwargs: dict):
|
||||||
"""Send the request and return a `request.Response`."""
|
sys.stderr.write(
|
||||||
|
f'\n>>> requests.request(**{repr_dict(kwargs)})\n\n')
|
||||||
ssl_version = None
|
|
||||||
if args.ssl_version:
|
|
||||||
ssl_version = SSL_VERSION_ARG_MAPPING[args.ssl_version]
|
|
||||||
|
|
||||||
requests_session = get_requests_session(ssl_version)
|
|
||||||
requests_session.max_redirects = args.max_redirects
|
|
||||||
|
|
||||||
if not args.session and not args.session_read_only:
|
|
||||||
kwargs = get_requests_kwargs(args)
|
|
||||||
if args.debug:
|
|
||||||
dump_request(kwargs)
|
|
||||||
response = requests_session.request(**kwargs)
|
|
||||||
else:
|
|
||||||
response = sessions.get_response(
|
|
||||||
requests_session=requests_session,
|
|
||||||
args=args,
|
|
||||||
config_dir=config_dir,
|
|
||||||
session_name=args.session or args.session_read_only,
|
|
||||||
read_only=bool(args.session_read_only),
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def dump_request(kwargs):
|
def finalize_headers(headers: RequestHeadersDict) -> RequestHeadersDict:
|
||||||
sys.stderr.write('\n>>> requests.request(**%s)\n\n'
|
final_headers = RequestHeadersDict()
|
||||||
% repr_dict_nice(kwargs))
|
|
||||||
|
|
||||||
|
|
||||||
def finalize_headers(headers):
|
|
||||||
final_headers = {}
|
|
||||||
for name, value in headers.items():
|
for name, value in headers.items():
|
||||||
if value is not None:
|
if value is not None:
|
||||||
|
# “leading or trailing LWS MAY be removed without
|
||||||
# >leading or trailing LWS MAY be removed without
|
# changing the semantics of the field value”
|
||||||
# >changing the semantics of the field value"
|
# <https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html>
|
||||||
# -https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html
|
|
||||||
# Also, requests raises `InvalidHeader` for leading spaces.
|
# Also, requests raises `InvalidHeader` for leading spaces.
|
||||||
value = value.strip()
|
value = value.strip()
|
||||||
|
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
# See: https://github.com/jakubroztocil/httpie/issues/212
|
# See <https://github.com/httpie/httpie/issues/212>
|
||||||
value = value.encode('utf8')
|
value = value.encode()
|
||||||
|
|
||||||
final_headers[name] = value
|
final_headers[name] = value
|
||||||
return final_headers
|
return final_headers
|
||||||
|
|
||||||
|
|
||||||
def get_default_headers(args):
|
def make_default_headers(args: argparse.Namespace) -> RequestHeadersDict:
|
||||||
default_headers = CaseInsensitiveDict({
|
default_headers = RequestHeadersDict({
|
||||||
'User-Agent': DEFAULT_UA
|
'User-Agent': DEFAULT_UA
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -124,11 +212,42 @@ def get_default_headers(args):
|
|||||||
return default_headers
|
return default_headers
|
||||||
|
|
||||||
|
|
||||||
def get_requests_kwargs(args, base_headers=None):
|
def make_send_kwargs(args: argparse.Namespace) -> dict:
|
||||||
|
return {
|
||||||
|
'timeout': args.timeout or None,
|
||||||
|
'allow_redirects': False,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict:
|
||||||
|
cert = None
|
||||||
|
if args.cert:
|
||||||
|
cert = args.cert
|
||||||
|
if args.cert_key:
|
||||||
|
cert = cert, args.cert_key
|
||||||
|
return {
|
||||||
|
'proxies': {p.key: p.value for p in args.proxy},
|
||||||
|
'stream': True,
|
||||||
|
'verify': {
|
||||||
|
'yes': True,
|
||||||
|
'true': True,
|
||||||
|
'no': False,
|
||||||
|
'false': False,
|
||||||
|
}.get(args.verify.lower(), args.verify),
|
||||||
|
'cert': cert,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def make_request_kwargs(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
base_headers: RequestHeadersDict = None,
|
||||||
|
request_body_read_callback=lambda chunk: chunk
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
Translate our `args` into `requests.request` keyword arguments.
|
Translate our `args` into `requests.Request` keyword arguments.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
files = args.files
|
||||||
# Serialize JSON data, if needed.
|
# Serialize JSON data, if needed.
|
||||||
data = args.data
|
data = args.data
|
||||||
auto_json = data and not args.form
|
auto_json = data and not args.form
|
||||||
@ -141,37 +260,60 @@ def get_requests_kwargs(args, base_headers=None):
|
|||||||
data = ''
|
data = ''
|
||||||
|
|
||||||
# Finalize headers.
|
# Finalize headers.
|
||||||
headers = get_default_headers(args)
|
headers = make_default_headers(args)
|
||||||
if base_headers:
|
if base_headers:
|
||||||
headers.update(base_headers)
|
headers.update(base_headers)
|
||||||
headers.update(args.headers)
|
headers.update(args.headers)
|
||||||
|
if args.offline and args.chunked and 'Transfer-Encoding' not in headers:
|
||||||
|
# When online, we let requests set the header instead to be able more
|
||||||
|
# easily verify chunking is taking place.
|
||||||
|
headers['Transfer-Encoding'] = 'chunked'
|
||||||
headers = finalize_headers(headers)
|
headers = finalize_headers(headers)
|
||||||
|
|
||||||
cert = None
|
if (args.form and files) or args.multipart:
|
||||||
if args.cert:
|
data, headers['Content-Type'] = get_multipart_data_and_content_type(
|
||||||
cert = args.cert
|
data=args.multipart_data,
|
||||||
if args.cert_key:
|
boundary=args.boundary,
|
||||||
cert = cert, args.cert_key
|
content_type=args.headers.get('Content-Type'),
|
||||||
|
)
|
||||||
|
|
||||||
kwargs = {
|
return {
|
||||||
'stream': True,
|
|
||||||
'method': args.method.lower(),
|
'method': args.method.lower(),
|
||||||
'url': args.url,
|
'url': args.url,
|
||||||
'headers': headers,
|
'headers': headers,
|
||||||
'data': data,
|
'data': prepare_request_body(
|
||||||
'verify': {
|
body=data,
|
||||||
'yes': True,
|
body_read_callback=request_body_read_callback,
|
||||||
'true': True,
|
chunked=args.chunked,
|
||||||
'no': False,
|
offline=args.offline,
|
||||||
'false': False,
|
content_length_header_value=headers.get('Content-Length'),
|
||||||
}.get(args.verify.lower(), args.verify),
|
),
|
||||||
'cert': cert,
|
|
||||||
'timeout': args.timeout,
|
|
||||||
'auth': args.auth,
|
'auth': args.auth,
|
||||||
'proxies': {p.key: p.value for p in args.proxy},
|
'params': args.params.items(),
|
||||||
'files': args.files,
|
|
||||||
'allow_redirects': args.follow,
|
|
||||||
'params': args.params,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return kwargs
|
|
||||||
|
def ensure_path_as_is(orig_url: str, prepped_url: str) -> str:
|
||||||
|
"""
|
||||||
|
Handle `--path-as-is` by replacing the path component of the prepared
|
||||||
|
URL with the path component from the original URL. Other parts stay
|
||||||
|
untouched because other (welcome) processing on the URL might have
|
||||||
|
taken place.
|
||||||
|
|
||||||
|
<https://github.com/httpie/httpie/issues/895>
|
||||||
|
|
||||||
|
|
||||||
|
<https://ec.haxx.se/http/http-basics#path-as-is>
|
||||||
|
<https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html>
|
||||||
|
|
||||||
|
>>> ensure_path_as_is('http://foo/../', 'http://foo/?foo=bar')
|
||||||
|
'http://foo/../?foo=bar'
|
||||||
|
|
||||||
|
"""
|
||||||
|
parsed_orig, parsed_prepped = urlparse(orig_url), urlparse(prepped_url)
|
||||||
|
final_dict = {
|
||||||
|
# noinspection PyProtectedMember
|
||||||
|
**parsed_prepped._asdict(),
|
||||||
|
'path': parsed_orig.path,
|
||||||
|
}
|
||||||
|
return urlunparse(tuple(final_dict.values()))
|
||||||
|
@ -1,39 +1,4 @@
|
|||||||
"""
|
|
||||||
Python 2.7, and 3.x compatibility.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
is_py2 = sys.version_info[0] == 2
|
|
||||||
is_py27 = sys.version_info[:2] == (2, 7)
|
|
||||||
is_py3 = sys.version_info[0] == 3
|
|
||||||
is_pypy = 'pypy' in sys.version.lower()
|
|
||||||
is_windows = 'win32' in str(sys.platform).lower()
|
is_windows = 'win32' in str(sys.platform).lower()
|
||||||
|
|
||||||
|
|
||||||
if is_py2:
|
|
||||||
# noinspection PyShadowingBuiltins
|
|
||||||
bytes = str
|
|
||||||
# noinspection PyUnresolvedReferences,PyShadowingBuiltins
|
|
||||||
str = unicode
|
|
||||||
elif is_py3:
|
|
||||||
# noinspection PyShadowingBuiltins
|
|
||||||
str = str
|
|
||||||
# noinspection PyShadowingBuiltins
|
|
||||||
bytes = bytes
|
|
||||||
|
|
||||||
|
|
||||||
try: # pragma: no cover
|
|
||||||
# noinspection PyUnresolvedReferences,PyCompatibility
|
|
||||||
from urllib.parse import urlsplit
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
# noinspection PyUnresolvedReferences,PyCompatibility
|
|
||||||
from urlparse import urlsplit
|
|
||||||
|
|
||||||
try: # pragma: no cover
|
|
||||||
# noinspection PyCompatibility
|
|
||||||
from urllib.request import urlopen
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
# noinspection PyCompatibility,PyUnresolvedReferences
|
|
||||||
from urllib2 import urlopen
|
|
||||||
|
158
httpie/config.py
158
httpie/config.py
@ -1,59 +1,97 @@
|
|||||||
import os
|
|
||||||
import json
|
import json
|
||||||
import errno
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from httpie import __version__
|
from . import __version__
|
||||||
from httpie.compat import is_windows
|
from .compat import is_windows
|
||||||
|
from .constants import UTF8
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_CONFIG_DIR = str(os.environ.get(
|
ENV_XDG_CONFIG_HOME = 'XDG_CONFIG_HOME'
|
||||||
'HTTPIE_CONFIG_DIR',
|
ENV_HTTPIE_CONFIG_DIR = 'HTTPIE_CONFIG_DIR'
|
||||||
os.path.expanduser('~/.httpie') if not is_windows else
|
DEFAULT_CONFIG_DIRNAME = 'httpie'
|
||||||
os.path.expandvars(r'%APPDATA%\\httpie')
|
DEFAULT_RELATIVE_XDG_CONFIG_HOME = Path('.config')
|
||||||
))
|
DEFAULT_RELATIVE_LEGACY_CONFIG_DIR = Path('.httpie')
|
||||||
|
DEFAULT_WINDOWS_CONFIG_DIR = Path(
|
||||||
|
os.path.expandvars('%APPDATA%')) / DEFAULT_CONFIG_DIRNAME
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_config_dir() -> Path:
|
||||||
|
"""
|
||||||
|
Return the path to the httpie configuration directory.
|
||||||
|
|
||||||
|
This directory isn't guaranteed to exist, and nor are any of its
|
||||||
|
ancestors (only the legacy ~/.httpie, if returned, is guaranteed to exist).
|
||||||
|
|
||||||
|
XDG Base Directory Specification support:
|
||||||
|
|
||||||
|
<https://wiki.archlinux.org/index.php/XDG_Base_Directory>
|
||||||
|
|
||||||
|
$XDG_CONFIG_HOME is supported; $XDG_CONFIG_DIRS is not
|
||||||
|
|
||||||
|
"""
|
||||||
|
# 1. explicitly set through env
|
||||||
|
env_config_dir = os.environ.get(ENV_HTTPIE_CONFIG_DIR)
|
||||||
|
if env_config_dir:
|
||||||
|
return Path(env_config_dir)
|
||||||
|
|
||||||
|
# 2. Windows
|
||||||
|
if is_windows:
|
||||||
|
return DEFAULT_WINDOWS_CONFIG_DIR
|
||||||
|
|
||||||
|
home_dir = Path.home()
|
||||||
|
|
||||||
|
# 3. legacy ~/.httpie
|
||||||
|
legacy_config_dir = home_dir / DEFAULT_RELATIVE_LEGACY_CONFIG_DIR
|
||||||
|
if legacy_config_dir.exists():
|
||||||
|
return legacy_config_dir
|
||||||
|
|
||||||
|
# 4. XDG
|
||||||
|
xdg_config_home_dir = os.environ.get(
|
||||||
|
ENV_XDG_CONFIG_HOME, # 4.1. explicit
|
||||||
|
home_dir / DEFAULT_RELATIVE_XDG_CONFIG_HOME # 4.2. default
|
||||||
|
)
|
||||||
|
return Path(xdg_config_home_dir) / DEFAULT_CONFIG_DIRNAME
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_CONFIG_DIR = get_default_config_dir()
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigFileError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class BaseConfigDict(dict):
|
class BaseConfigDict(dict):
|
||||||
|
|
||||||
name = None
|
name = None
|
||||||
helpurl = None
|
helpurl = None
|
||||||
about = None
|
about = None
|
||||||
|
|
||||||
def __getattr__(self, item):
|
def __init__(self, path: Path):
|
||||||
return self[item]
|
super().__init__()
|
||||||
|
self.path = path
|
||||||
|
|
||||||
def _get_path(self):
|
def ensure_directory(self):
|
||||||
"""Return the config file path without side-effects."""
|
self.path.parent.mkdir(mode=0o700, parents=True, exist_ok=True)
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@property
|
def is_new(self) -> bool:
|
||||||
def path(self):
|
return not self.path.exists()
|
||||||
"""Return the config file path creating basedir, if needed."""
|
|
||||||
path = self._get_path()
|
|
||||||
try:
|
|
||||||
os.makedirs(os.path.dirname(path), mode=0o700)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
return path
|
|
||||||
|
|
||||||
def is_new(self):
|
|
||||||
return not os.path.exists(self._get_path())
|
|
||||||
|
|
||||||
def load(self):
|
def load(self):
|
||||||
|
config_type = type(self).__name__.lower()
|
||||||
try:
|
try:
|
||||||
with open(self.path, 'rt') as f:
|
with self.path.open(encoding=UTF8) as f:
|
||||||
try:
|
try:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise ValueError(
|
raise ConfigFileError(
|
||||||
'Invalid %s JSON: %s [%s]' %
|
f'invalid {config_type} file: {e} [{self.path}]'
|
||||||
(type(self).__name__, str(e), self.path)
|
|
||||||
)
|
)
|
||||||
self.update(data)
|
self.update(data)
|
||||||
except IOError as e:
|
except FileNotFoundError:
|
||||||
if e.errno != errno.ENOENT:
|
pass
|
||||||
raise
|
except OSError as e:
|
||||||
|
raise ConfigFileError(f'cannot read {config_type} file: {e}')
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
self['__meta__'] = {
|
self['__meta__'] = {
|
||||||
@ -65,48 +103,28 @@ class BaseConfigDict(dict):
|
|||||||
if self.about:
|
if self.about:
|
||||||
self['__meta__']['about'] = self.about
|
self['__meta__']['about'] = self.about
|
||||||
|
|
||||||
with open(self.path, 'w') as f:
|
self.ensure_directory()
|
||||||
json.dump(self, f, indent=4, sort_keys=True, ensure_ascii=True)
|
|
||||||
f.write('\n')
|
|
||||||
|
|
||||||
def delete(self):
|
json_string = json.dumps(
|
||||||
try:
|
obj=self,
|
||||||
os.unlink(self.path)
|
indent=4,
|
||||||
except OSError as e:
|
sort_keys=True,
|
||||||
if e.errno != errno.ENOENT:
|
ensure_ascii=True,
|
||||||
raise
|
)
|
||||||
|
self.path.write_text(json_string + '\n', encoding=UTF8)
|
||||||
|
|
||||||
|
|
||||||
class Config(BaseConfigDict):
|
class Config(BaseConfigDict):
|
||||||
|
FILENAME = 'config.json'
|
||||||
name = 'config'
|
|
||||||
helpurl = 'https://httpie.org/doc#config'
|
|
||||||
about = 'HTTPie configuration file'
|
|
||||||
|
|
||||||
DEFAULTS = {
|
DEFAULTS = {
|
||||||
'default_options': []
|
'default_options': []
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, directory=DEFAULT_CONFIG_DIR):
|
def __init__(self, directory: Union[str, Path] = DEFAULT_CONFIG_DIR):
|
||||||
super(Config, self).__init__()
|
self.directory = Path(directory)
|
||||||
|
super().__init__(path=self.directory / self.FILENAME)
|
||||||
self.update(self.DEFAULTS)
|
self.update(self.DEFAULTS)
|
||||||
self.directory = directory
|
|
||||||
|
|
||||||
def load(self):
|
@property
|
||||||
super(Config, self).load()
|
def default_options(self) -> list:
|
||||||
self._migrate_implicit_content_type()
|
return self['default_options']
|
||||||
|
|
||||||
def _get_path(self):
|
|
||||||
return os.path.join(self.directory, self.name + '.json')
|
|
||||||
|
|
||||||
def _migrate_implicit_content_type(self):
|
|
||||||
"""Migrate the removed implicit_content_type config option"""
|
|
||||||
try:
|
|
||||||
implicit_content_type = self.pop('implicit_content_type')
|
|
||||||
except KeyError:
|
|
||||||
self.save()
|
|
||||||
else:
|
|
||||||
if implicit_content_type == 'form':
|
|
||||||
self['default_options'].insert(0, '--form')
|
|
||||||
self.save()
|
|
||||||
self.load()
|
|
||||||
|
2
httpie/constants.py
Normal file
2
httpie/constants.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# UTF-8 encoding name
|
||||||
|
UTF8 = 'utf-8'
|
@ -1,16 +1,22 @@
|
|||||||
import sys
|
import sys
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import IO, Optional
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import curses
|
import curses
|
||||||
except ImportError:
|
except ImportError:
|
||||||
curses = None # Compiled w/o curses
|
curses = None # Compiled w/o curses
|
||||||
|
|
||||||
from httpie.compat import is_windows
|
from .compat import is_windows
|
||||||
from httpie.config import DEFAULT_CONFIG_DIR, Config
|
from .config import DEFAULT_CONFIG_DIR, Config, ConfigFileError
|
||||||
|
from .constants import UTF8
|
||||||
|
|
||||||
from httpie.utils import repr_dict_nice
|
from .utils import repr_dict
|
||||||
|
|
||||||
|
|
||||||
class Environment(object):
|
class Environment:
|
||||||
"""
|
"""
|
||||||
Information about the execution context
|
Information about the execution context
|
||||||
(standard streams, config directory, etc).
|
(standard streams, config directory, etc).
|
||||||
@ -20,17 +26,18 @@ class Environment(object):
|
|||||||
is used by the test suite to simulate various scenarios.
|
is used by the test suite to simulate various scenarios.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
is_windows = is_windows
|
is_windows: bool = is_windows
|
||||||
config_dir = DEFAULT_CONFIG_DIR
|
config_dir: Path = DEFAULT_CONFIG_DIR
|
||||||
stdin = sys.stdin
|
stdin: Optional[IO] = sys.stdin # `None` when closed fd (#791)
|
||||||
stdin_isatty = stdin.isatty()
|
stdin_isatty: bool = stdin.isatty() if stdin else False
|
||||||
stdin_encoding = None
|
stdin_encoding: str = None
|
||||||
stdout = sys.stdout
|
stdout: IO = sys.stdout
|
||||||
stdout_isatty = stdout.isatty()
|
stdout_isatty: bool = stdout.isatty()
|
||||||
stdout_encoding = None
|
stdout_encoding: str = None
|
||||||
stderr = sys.stderr
|
stderr: IO = sys.stderr
|
||||||
stderr_isatty = stderr.isatty()
|
stderr_isatty: bool = stderr.isatty()
|
||||||
colors = 256
|
colors = 256
|
||||||
|
program_name: str = 'http'
|
||||||
if not is_windows:
|
if not is_windows:
|
||||||
if curses:
|
if curses:
|
||||||
try:
|
try:
|
||||||
@ -51,7 +58,7 @@ class Environment(object):
|
|||||||
)
|
)
|
||||||
del colorama
|
del colorama
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, devnull=None, **kwargs):
|
||||||
"""
|
"""
|
||||||
Use keyword arguments to overwrite
|
Use keyword arguments to overwrite
|
||||||
any of the class attributes for this instance.
|
any of the class attributes for this instance.
|
||||||
@ -60,40 +67,59 @@ class Environment(object):
|
|||||||
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
|
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
|
||||||
self.__dict__.update(**kwargs)
|
self.__dict__.update(**kwargs)
|
||||||
|
|
||||||
# Keyword arguments > stream.encoding > default utf8
|
# The original STDERR unaffected by --quiet’ing.
|
||||||
if self.stdin_encoding is None:
|
self._orig_stderr = self.stderr
|
||||||
|
self._devnull = devnull
|
||||||
|
|
||||||
|
# Keyword arguments > stream.encoding > default UTF-8
|
||||||
|
if self.stdin and self.stdin_encoding is None:
|
||||||
self.stdin_encoding = getattr(
|
self.stdin_encoding = getattr(
|
||||||
self.stdin, 'encoding', None) or 'utf8'
|
self.stdin, 'encoding', None) or UTF8
|
||||||
if self.stdout_encoding is None:
|
if self.stdout_encoding is None:
|
||||||
actual_stdout = self.stdout
|
actual_stdout = self.stdout
|
||||||
if is_windows:
|
if is_windows:
|
||||||
# noinspection PyUnresolvedReferences
|
# noinspection PyUnresolvedReferences
|
||||||
from colorama import AnsiToWin32
|
from colorama import AnsiToWin32
|
||||||
if isinstance(self.stdout, AnsiToWin32):
|
if isinstance(self.stdout, AnsiToWin32):
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
actual_stdout = self.stdout.wrapped
|
actual_stdout = self.stdout.wrapped
|
||||||
self.stdout_encoding = getattr(
|
self.stdout_encoding = getattr(
|
||||||
actual_stdout, 'encoding', None) or 'utf8'
|
actual_stdout, 'encoding', None) or UTF8
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
if not hasattr(self, '_config'):
|
|
||||||
self._config = Config(directory=self.config_dir)
|
|
||||||
if self._config.is_new():
|
|
||||||
self._config.save()
|
|
||||||
else:
|
|
||||||
self._config.load()
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
defaults = dict(type(self).__dict__)
|
defaults = dict(type(self).__dict__)
|
||||||
actual = dict(defaults)
|
actual = dict(defaults)
|
||||||
actual.update(self.__dict__)
|
actual.update(self.__dict__)
|
||||||
actual['config'] = self.config
|
actual['config'] = self.config
|
||||||
return repr_dict_nice(dict(
|
return repr_dict({
|
||||||
(key, value)
|
key: value
|
||||||
for key, value in actual.items()
|
for key, value in actual.items()
|
||||||
if not key.startswith('_'))
|
if not key.startswith('_')
|
||||||
)
|
})
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<{0} {1}>'.format(type(self).__name__, str(self))
|
return f'<{type(self).__name__} {self}>'
|
||||||
|
|
||||||
|
_config: Config = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self) -> Config:
|
||||||
|
config = self._config
|
||||||
|
if not config:
|
||||||
|
self._config = config = Config(directory=self.config_dir)
|
||||||
|
if not config.is_new():
|
||||||
|
try:
|
||||||
|
config.load()
|
||||||
|
except ConfigFileError as e:
|
||||||
|
self.log_error(e, level='warning')
|
||||||
|
return config
|
||||||
|
|
||||||
|
@property
|
||||||
|
def devnull(self) -> IO:
|
||||||
|
if self._devnull is None:
|
||||||
|
self._devnull = open(os.devnull, 'w+')
|
||||||
|
return self._devnull
|
||||||
|
|
||||||
|
def log_error(self, msg, level='error'):
|
||||||
|
assert level in ['error', 'warning']
|
||||||
|
self._orig_stderr.write(f'\n{self.program_name}: {level}: {msg}\n\n')
|
||||||
|
354
httpie/core.py
354
httpie/core.py
@ -1,175 +1,25 @@
|
|||||||
"""This module provides the main functionality of HTTPie.
|
import argparse
|
||||||
|
import os
|
||||||
Invocation flow:
|
|
||||||
|
|
||||||
1. Read, validate and process the input (args, `stdin`).
|
|
||||||
2. Create and send a request.
|
|
||||||
3. Stream, and possibly process and format, the parts
|
|
||||||
of the request-response exchange selected by output options.
|
|
||||||
4. Simultaneously write to `stdout`
|
|
||||||
5. Exit.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
import errno
|
|
||||||
import platform
|
import platform
|
||||||
|
import sys
|
||||||
|
from typing import List, Optional, Tuple, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests import __version__ as requests_version
|
|
||||||
from pygments import __version__ as pygments_version
|
from pygments import __version__ as pygments_version
|
||||||
|
from requests import __version__ as requests_version
|
||||||
|
|
||||||
from httpie import __version__ as httpie_version, ExitStatus
|
from . import __version__ as httpie_version
|
||||||
from httpie.compat import str, bytes, is_py3
|
from .cli.constants import OUT_REQ_BODY, OUT_REQ_HEAD, OUT_RESP_BODY, OUT_RESP_HEAD
|
||||||
from httpie.client import get_response
|
from .client import collect_messages
|
||||||
from httpie.downloads import Downloader
|
from .context import Environment
|
||||||
from httpie.context import Environment
|
from .downloads import Downloader
|
||||||
from httpie.plugins import plugin_manager
|
from .output.writer import write_message, write_stream, MESSAGE_SEPARATOR_BYTES
|
||||||
from httpie.output.streams import (
|
from .plugins.registry import plugin_manager
|
||||||
build_output_stream,
|
from .status import ExitStatus, http_status_to_exit_status
|
||||||
write_stream,
|
|
||||||
write_stream_with_colors_win_py3
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_exit_status(http_status, follow=False):
|
# noinspection PyDefaultArgument
|
||||||
"""Translate HTTP status code to exit status code."""
|
def main(args: List[Union[str, bytes]] = sys.argv, env=Environment()) -> ExitStatus:
|
||||||
if 300 <= http_status <= 399 and not follow:
|
|
||||||
# Redirect
|
|
||||||
return ExitStatus.ERROR_HTTP_3XX
|
|
||||||
elif 400 <= http_status <= 499:
|
|
||||||
# Client Error
|
|
||||||
return ExitStatus.ERROR_HTTP_4XX
|
|
||||||
elif 500 <= http_status <= 599:
|
|
||||||
# Server Error
|
|
||||||
return ExitStatus.ERROR_HTTP_5XX
|
|
||||||
else:
|
|
||||||
return ExitStatus.SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
def print_debug_info(env):
|
|
||||||
env.stderr.writelines([
|
|
||||||
'HTTPie %s\n' % httpie_version,
|
|
||||||
'Requests %s\n' % requests_version,
|
|
||||||
'Pygments %s\n' % pygments_version,
|
|
||||||
'Python %s\n%s\n' % (sys.version, sys.executable),
|
|
||||||
'%s %s' % (platform.system(), platform.release()),
|
|
||||||
])
|
|
||||||
env.stderr.write('\n\n')
|
|
||||||
env.stderr.write(repr(env))
|
|
||||||
env.stderr.write('\n')
|
|
||||||
|
|
||||||
|
|
||||||
def decode_args(args, stdin_encoding):
|
|
||||||
"""
|
|
||||||
Convert all bytes args to str
|
|
||||||
by decoding them using stdin encoding.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return [
|
|
||||||
arg.decode(stdin_encoding)
|
|
||||||
if type(arg) == bytes else arg
|
|
||||||
for arg in args
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def program(args, env, log_error):
|
|
||||||
"""
|
|
||||||
The main program without error handling
|
|
||||||
|
|
||||||
:param args: parsed args (argparse.Namespace)
|
|
||||||
:type env: Environment
|
|
||||||
:param log_error: error log function
|
|
||||||
:return: status code
|
|
||||||
|
|
||||||
"""
|
|
||||||
exit_status = ExitStatus.SUCCESS
|
|
||||||
downloader = None
|
|
||||||
show_traceback = args.debug or args.traceback
|
|
||||||
|
|
||||||
try:
|
|
||||||
if args.download:
|
|
||||||
args.follow = True # --download implies --follow.
|
|
||||||
downloader = Downloader(
|
|
||||||
output_file=args.output_file,
|
|
||||||
progress_file=env.stderr,
|
|
||||||
resume=args.download_resume
|
|
||||||
)
|
|
||||||
downloader.pre_request(args.headers)
|
|
||||||
|
|
||||||
final_response = get_response(args, config_dir=env.config.directory)
|
|
||||||
if args.all:
|
|
||||||
responses = final_response.history + [final_response]
|
|
||||||
else:
|
|
||||||
responses = [final_response]
|
|
||||||
|
|
||||||
for response in responses:
|
|
||||||
|
|
||||||
if args.check_status or downloader:
|
|
||||||
exit_status = get_exit_status(
|
|
||||||
http_status=response.status_code,
|
|
||||||
follow=args.follow
|
|
||||||
)
|
|
||||||
if not env.stdout_isatty and exit_status != ExitStatus.SUCCESS:
|
|
||||||
log_error(
|
|
||||||
'HTTP %s %s', response.raw.status, response.raw.reason,
|
|
||||||
level='warning'
|
|
||||||
)
|
|
||||||
|
|
||||||
write_stream_kwargs = {
|
|
||||||
'stream': build_output_stream(
|
|
||||||
args=args,
|
|
||||||
env=env,
|
|
||||||
request=response.request,
|
|
||||||
response=response,
|
|
||||||
output_options=(
|
|
||||||
args.output_options
|
|
||||||
if response is final_response
|
|
||||||
else args.output_options_history
|
|
||||||
)
|
|
||||||
),
|
|
||||||
# NOTE: `env.stdout` will in fact be `stderr` with `--download`
|
|
||||||
'outfile': env.stdout,
|
|
||||||
'flush': env.stdout_isatty or args.stream
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
if env.is_windows and is_py3 and 'colors' in args.prettify:
|
|
||||||
write_stream_with_colors_win_py3(**write_stream_kwargs)
|
|
||||||
else:
|
|
||||||
write_stream(**write_stream_kwargs)
|
|
||||||
except IOError as e:
|
|
||||||
if not show_traceback and e.errno == errno.EPIPE:
|
|
||||||
# Ignore broken pipes unless --traceback.
|
|
||||||
env.stderr.write('\n')
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
if downloader and exit_status == ExitStatus.SUCCESS:
|
|
||||||
# Last response body download.
|
|
||||||
download_stream, download_to = downloader.start(final_response)
|
|
||||||
write_stream(
|
|
||||||
stream=download_stream,
|
|
||||||
outfile=download_to,
|
|
||||||
flush=False,
|
|
||||||
)
|
|
||||||
downloader.finish()
|
|
||||||
if downloader.interrupted:
|
|
||||||
exit_status = ExitStatus.ERROR
|
|
||||||
log_error('Incomplete download: size=%d; downloaded=%d' % (
|
|
||||||
downloader.status.total_size,
|
|
||||||
downloader.status.downloaded
|
|
||||||
))
|
|
||||||
return exit_status
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if downloader and not downloader.finished:
|
|
||||||
downloader.failed()
|
|
||||||
|
|
||||||
if (not isinstance(args, list) and args.output_file
|
|
||||||
and args.output_file_specified):
|
|
||||||
args.output_file.close()
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=sys.argv[1:], env=Environment(), custom_log_error=None):
|
|
||||||
"""
|
"""
|
||||||
The main function.
|
The main function.
|
||||||
|
|
||||||
@ -179,23 +29,16 @@ def main(args=sys.argv[1:], env=Environment(), custom_log_error=None):
|
|||||||
Return exit status code.
|
Return exit status code.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
args = decode_args(args, env.stdin_encoding)
|
program_name, *args = args
|
||||||
|
env.program_name = os.path.basename(program_name)
|
||||||
|
args = decode_raw_args(args, env.stdin_encoding)
|
||||||
plugin_manager.load_installed_plugins()
|
plugin_manager.load_installed_plugins()
|
||||||
|
|
||||||
def log_error(msg, *args, **kwargs):
|
from .cli.definition import parser
|
||||||
msg = msg % args
|
|
||||||
level = kwargs.get('level', 'error')
|
|
||||||
assert level in ['error', 'warning']
|
|
||||||
env.stderr.write('\nhttp: %s: %s\n' % (level, msg))
|
|
||||||
|
|
||||||
from httpie.cli import parser
|
|
||||||
|
|
||||||
if env.config.default_options:
|
if env.config.default_options:
|
||||||
args = env.config.default_options + args
|
args = env.config.default_options + args
|
||||||
|
|
||||||
if custom_log_error:
|
|
||||||
log_error = custom_log_error
|
|
||||||
|
|
||||||
include_debug_info = '--debug' in args
|
include_debug_info = '--debug' in args
|
||||||
include_traceback = include_debug_info or '--traceback' in args
|
include_traceback = include_debug_info or '--traceback' in args
|
||||||
|
|
||||||
@ -207,7 +50,10 @@ def main(args=sys.argv[1:], env=Environment(), custom_log_error=None):
|
|||||||
exit_status = ExitStatus.SUCCESS
|
exit_status = ExitStatus.SUCCESS
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parsed_args = parser.parse_args(args=args, env=env)
|
parsed_args = parser.parse_args(
|
||||||
|
args=args,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
env.stderr.write('\n')
|
env.stderr.write('\n')
|
||||||
if include_traceback:
|
if include_traceback:
|
||||||
@ -224,7 +70,6 @@ def main(args=sys.argv[1:], env=Environment(), custom_log_error=None):
|
|||||||
exit_status = program(
|
exit_status = program(
|
||||||
args=parsed_args,
|
args=parsed_args,
|
||||||
env=env,
|
env=env,
|
||||||
log_error=log_error,
|
|
||||||
)
|
)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
env.stderr.write('\n')
|
env.stderr.write('\n')
|
||||||
@ -239,22 +84,163 @@ def main(args=sys.argv[1:], env=Environment(), custom_log_error=None):
|
|||||||
exit_status = ExitStatus.ERROR
|
exit_status = ExitStatus.ERROR
|
||||||
except requests.Timeout:
|
except requests.Timeout:
|
||||||
exit_status = ExitStatus.ERROR_TIMEOUT
|
exit_status = ExitStatus.ERROR_TIMEOUT
|
||||||
log_error('Request timed out (%ss).', parsed_args.timeout)
|
env.log_error(f'Request timed out ({parsed_args.timeout}s).')
|
||||||
except requests.TooManyRedirects:
|
except requests.TooManyRedirects:
|
||||||
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
|
||||||
log_error('Too many redirects (--max-redirects=%s).',
|
env.log_error(
|
||||||
parsed_args.max_redirects)
|
f'Too many redirects'
|
||||||
|
f' (--max-redirects={parsed_args.max_redirects}).'
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# TODO: Further distinction between expected and unexpected errors.
|
# TODO: Further distinction between expected and unexpected errors.
|
||||||
msg = str(e)
|
msg = str(e)
|
||||||
if hasattr(e, 'request'):
|
if hasattr(e, 'request'):
|
||||||
request = e.request
|
request = e.request
|
||||||
if hasattr(request, 'url'):
|
if hasattr(request, 'url'):
|
||||||
msg += ' while doing %s request to URL: %s' % (
|
msg = (
|
||||||
request.method, request.url)
|
f'{msg} while doing a {request.method}'
|
||||||
log_error('%s: %s', type(e).__name__, msg)
|
f' request to URL: {request.url}'
|
||||||
|
)
|
||||||
|
env.log_error(f'{type(e).__name__}: {msg}')
|
||||||
if include_traceback:
|
if include_traceback:
|
||||||
raise
|
raise
|
||||||
exit_status = ExitStatus.ERROR
|
exit_status = ExitStatus.ERROR
|
||||||
|
|
||||||
return exit_status
|
return exit_status
|
||||||
|
|
||||||
|
|
||||||
|
def get_output_options(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
message: Union[requests.PreparedRequest, requests.Response]
|
||||||
|
) -> Tuple[bool, bool]:
|
||||||
|
return {
|
||||||
|
requests.PreparedRequest: (
|
||||||
|
OUT_REQ_HEAD in args.output_options,
|
||||||
|
OUT_REQ_BODY in args.output_options,
|
||||||
|
),
|
||||||
|
requests.Response: (
|
||||||
|
OUT_RESP_HEAD in args.output_options,
|
||||||
|
OUT_RESP_BODY in args.output_options,
|
||||||
|
),
|
||||||
|
}[type(message)]
|
||||||
|
|
||||||
|
|
||||||
|
def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
||||||
|
"""
|
||||||
|
The main program without error handling.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere.
|
||||||
|
exit_status = ExitStatus.SUCCESS
|
||||||
|
downloader = None
|
||||||
|
initial_request: Optional[requests.PreparedRequest] = None
|
||||||
|
final_response: Optional[requests.Response] = None
|
||||||
|
|
||||||
|
def separate():
|
||||||
|
getattr(env.stdout, 'buffer', env.stdout).write(MESSAGE_SEPARATOR_BYTES)
|
||||||
|
|
||||||
|
def request_body_read_callback(chunk: bytes):
|
||||||
|
should_pipe_to_stdout = bool(
|
||||||
|
# Request body output desired
|
||||||
|
OUT_REQ_BODY in args.output_options
|
||||||
|
# & not `.read()` already pre-request (e.g., for compression)
|
||||||
|
and initial_request
|
||||||
|
# & non-EOF chunk
|
||||||
|
and chunk
|
||||||
|
)
|
||||||
|
if should_pipe_to_stdout:
|
||||||
|
msg = requests.PreparedRequest()
|
||||||
|
msg.is_body_upload_chunk = True
|
||||||
|
msg.body = chunk
|
||||||
|
msg.headers = initial_request.headers
|
||||||
|
write_message(requests_message=msg, env=env, args=args, with_body=True, with_headers=False)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if args.download:
|
||||||
|
args.follow = True # --download implies --follow.
|
||||||
|
downloader = Downloader(output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume)
|
||||||
|
downloader.pre_request(args.headers)
|
||||||
|
messages = collect_messages(args=args, config_dir=env.config.directory,
|
||||||
|
request_body_read_callback=request_body_read_callback)
|
||||||
|
force_separator = False
|
||||||
|
prev_with_body = False
|
||||||
|
|
||||||
|
# Process messages as they’re generated
|
||||||
|
for message in messages:
|
||||||
|
is_request = isinstance(message, requests.PreparedRequest)
|
||||||
|
with_headers, with_body = get_output_options(args=args, message=message)
|
||||||
|
do_write_body = with_body
|
||||||
|
if prev_with_body and (with_headers or with_body) and (force_separator or not env.stdout_isatty):
|
||||||
|
# Separate after a previous message with body, if needed. See test_tokens.py.
|
||||||
|
separate()
|
||||||
|
force_separator = False
|
||||||
|
if is_request:
|
||||||
|
if not initial_request:
|
||||||
|
initial_request = message
|
||||||
|
if with_body:
|
||||||
|
is_streamed_upload = not isinstance(message.body, (str, bytes))
|
||||||
|
do_write_body = not is_streamed_upload
|
||||||
|
force_separator = is_streamed_upload and env.stdout_isatty
|
||||||
|
else:
|
||||||
|
final_response = message
|
||||||
|
if args.check_status or downloader:
|
||||||
|
exit_status = http_status_to_exit_status(http_status=message.status_code, follow=args.follow)
|
||||||
|
if exit_status != ExitStatus.SUCCESS and (not env.stdout_isatty or args.quiet):
|
||||||
|
env.log_error(f'HTTP {message.raw.status} {message.raw.reason}', level='warning')
|
||||||
|
write_message(requests_message=message, env=env, args=args, with_headers=with_headers,
|
||||||
|
with_body=do_write_body)
|
||||||
|
prev_with_body = with_body
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
if force_separator:
|
||||||
|
separate()
|
||||||
|
if downloader and exit_status == ExitStatus.SUCCESS:
|
||||||
|
# Last response body download.
|
||||||
|
download_stream, download_to = downloader.start(
|
||||||
|
initial_url=initial_request.url,
|
||||||
|
final_response=final_response,
|
||||||
|
)
|
||||||
|
write_stream(stream=download_stream, outfile=download_to, flush=False)
|
||||||
|
downloader.finish()
|
||||||
|
if downloader.interrupted:
|
||||||
|
exit_status = ExitStatus.ERROR
|
||||||
|
env.log_error(
|
||||||
|
f'Incomplete download: size={downloader.status.total_size};'
|
||||||
|
f' downloaded={downloader.status.downloaded}'
|
||||||
|
)
|
||||||
|
return exit_status
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if downloader and not downloader.finished:
|
||||||
|
downloader.failed()
|
||||||
|
if args.output_file and args.output_file_specified:
|
||||||
|
args.output_file.close()
|
||||||
|
|
||||||
|
|
||||||
|
def print_debug_info(env: Environment):
|
||||||
|
env.stderr.writelines([
|
||||||
|
f'HTTPie {httpie_version}\n',
|
||||||
|
f'Requests {requests_version}\n',
|
||||||
|
f'Pygments {pygments_version}\n',
|
||||||
|
f'Python {sys.version}\n{sys.executable}\n',
|
||||||
|
f'{platform.system()} {platform.release()}',
|
||||||
|
])
|
||||||
|
env.stderr.write('\n\n')
|
||||||
|
env.stderr.write(repr(env))
|
||||||
|
env.stderr.write('\n')
|
||||||
|
|
||||||
|
|
||||||
|
def decode_raw_args(
|
||||||
|
args: List[Union[str, bytes]],
|
||||||
|
stdin_encoding: str
|
||||||
|
) -> List[str]:
|
||||||
|
"""
|
||||||
|
Convert all bytes args to str
|
||||||
|
by decoding them using stdin encoding.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
arg.decode(stdin_encoding)
|
||||||
|
if type(arg) is bytes else arg
|
||||||
|
for arg in args
|
||||||
|
]
|
||||||
|
@ -1,27 +1,26 @@
|
|||||||
# coding=utf-8
|
|
||||||
"""
|
"""
|
||||||
Download mode implementation.
|
Download mode implementation.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import division
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import errno
|
|
||||||
import mimetypes
|
|
||||||
import threading
|
import threading
|
||||||
from time import sleep, time
|
|
||||||
from mailbox import Message
|
from mailbox import Message
|
||||||
|
from time import sleep, monotonic
|
||||||
|
from typing import IO, Optional, Tuple
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
from httpie.output.streams import RawStream
|
import requests
|
||||||
from httpie.models import HTTPResponse
|
|
||||||
from httpie.utils import humanize_bytes
|
from .models import HTTPResponse
|
||||||
from httpie.compat import urlsplit
|
from .output.streams import RawStream
|
||||||
|
from .utils import humanize_bytes
|
||||||
|
|
||||||
|
|
||||||
PARTIAL_CONTENT = 206
|
PARTIAL_CONTENT = 206
|
||||||
|
|
||||||
|
|
||||||
CLEAR_LINE = '\r\033[K'
|
CLEAR_LINE = '\r\033[K'
|
||||||
PROGRESS = (
|
PROGRESS = (
|
||||||
'{percentage: 6.2f} %'
|
'{percentage: 6.2f} %'
|
||||||
@ -38,11 +37,11 @@ class ContentRangeError(ValueError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def parse_content_range(content_range, resumed_from):
|
def parse_content_range(content_range: str, resumed_from: int) -> int:
|
||||||
"""
|
"""
|
||||||
Parse and validate Content-Range header.
|
Parse and validate Content-Range header.
|
||||||
|
|
||||||
<http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>
|
<https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>
|
||||||
|
|
||||||
:param content_range: the value of a Content-Range response header
|
:param content_range: the value of a Content-Range response header
|
||||||
eg. "bytes 21010-47021/47022"
|
eg. "bytes 21010-47021/47022"
|
||||||
@ -61,7 +60,7 @@ def parse_content_range(content_range, resumed_from):
|
|||||||
|
|
||||||
if not match:
|
if not match:
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Invalid Content-Range format %r' % content_range)
|
f'Invalid Content-Range format {content_range!r}')
|
||||||
|
|
||||||
content_range_dict = match.groupdict()
|
content_range_dict = match.groupdict()
|
||||||
first_byte_pos = int(content_range_dict['first_byte_pos'])
|
first_byte_pos = int(content_range_dict['first_byte_pos'])
|
||||||
@ -78,26 +77,27 @@ def parse_content_range(content_range, resumed_from):
|
|||||||
# last-byte-pos value, is invalid. The recipient of an invalid
|
# last-byte-pos value, is invalid. The recipient of an invalid
|
||||||
# byte-content-range- spec MUST ignore it and any content
|
# byte-content-range- spec MUST ignore it and any content
|
||||||
# transferred along with it."
|
# transferred along with it."
|
||||||
if (first_byte_pos >= last_byte_pos
|
if (first_byte_pos > last_byte_pos
|
||||||
or (instance_length is not None
|
or (instance_length is not None
|
||||||
and instance_length <= last_byte_pos)):
|
and instance_length <= last_byte_pos)):
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Invalid Content-Range returned: %r' % content_range)
|
f'Invalid Content-Range returned: {content_range!r}')
|
||||||
|
|
||||||
if (first_byte_pos != resumed_from
|
if (first_byte_pos != resumed_from
|
||||||
or (instance_length is not None
|
or (instance_length is not None
|
||||||
and last_byte_pos + 1 != instance_length)):
|
and last_byte_pos + 1 != instance_length)):
|
||||||
# Not what we asked for.
|
# Not what we asked for.
|
||||||
raise ContentRangeError(
|
raise ContentRangeError(
|
||||||
'Unexpected Content-Range returned (%r)'
|
f'Unexpected Content-Range returned ({content_range!r})'
|
||||||
' for the requested Range ("bytes=%d-")'
|
f' for the requested Range ("bytes={resumed_from}-")'
|
||||||
% (content_range, resumed_from)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return last_byte_pos + 1
|
return last_byte_pos + 1
|
||||||
|
|
||||||
|
|
||||||
def filename_from_content_disposition(content_disposition):
|
def filename_from_content_disposition(
|
||||||
|
content_disposition: str
|
||||||
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Extract and validate filename from a Content-Disposition header.
|
Extract and validate filename from a Content-Disposition header.
|
||||||
|
|
||||||
@ -107,7 +107,7 @@ def filename_from_content_disposition(content_disposition):
|
|||||||
"""
|
"""
|
||||||
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
||||||
|
|
||||||
msg = Message('Content-Disposition: %s' % content_disposition)
|
msg = Message(f'Content-Disposition: {content_disposition}')
|
||||||
filename = msg.get_filename()
|
filename = msg.get_filename()
|
||||||
if filename:
|
if filename:
|
||||||
# Basic sanitation.
|
# Basic sanitation.
|
||||||
@ -116,7 +116,7 @@ def filename_from_content_disposition(content_disposition):
|
|||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def filename_from_url(url, content_type):
|
def filename_from_url(url: str, content_type: Optional[str]) -> str:
|
||||||
fn = urlsplit(url).path.rstrip('/')
|
fn = urlsplit(url).path.rstrip('/')
|
||||||
fn = os.path.basename(fn) if fn else 'index'
|
fn = os.path.basename(fn) if fn else 'index'
|
||||||
if '.' not in fn and content_type:
|
if '.' not in fn and content_type:
|
||||||
@ -127,7 +127,7 @@ def filename_from_url(url, content_type):
|
|||||||
else:
|
else:
|
||||||
ext = mimetypes.guess_extension(content_type)
|
ext = mimetypes.guess_extension(content_type)
|
||||||
|
|
||||||
if ext == '.htm': # Python 3
|
if ext == '.htm':
|
||||||
ext = '.html'
|
ext = '.html'
|
||||||
|
|
||||||
if ext:
|
if ext:
|
||||||
@ -136,7 +136,7 @@ def filename_from_url(url, content_type):
|
|||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def trim_filename(filename, max_len):
|
def trim_filename(filename: str, max_len: int) -> str:
|
||||||
if len(filename) > max_len:
|
if len(filename) > max_len:
|
||||||
trim_by = len(filename) - max_len
|
trim_by = len(filename) - max_len
|
||||||
name, ext = os.path.splitext(filename)
|
name, ext = os.path.splitext(filename)
|
||||||
@ -147,32 +147,24 @@ def trim_filename(filename, max_len):
|
|||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def get_filename_max_length(directory):
|
def get_filename_max_length(directory: str) -> int:
|
||||||
max_len = 255
|
max_len = 255
|
||||||
try:
|
if hasattr(os, 'pathconf') and 'PC_NAME_MAX' in os.pathconf_names:
|
||||||
pathconf = os.pathconf
|
max_len = os.pathconf(directory, 'PC_NAME_MAX')
|
||||||
except AttributeError:
|
|
||||||
pass # non-posix
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
max_len = pathconf(directory, 'PC_NAME_MAX')
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.EINVAL:
|
|
||||||
raise
|
|
||||||
return max_len
|
return max_len
|
||||||
|
|
||||||
|
|
||||||
def trim_filename_if_needed(filename, directory='.', extra=0):
|
def trim_filename_if_needed(filename: str, directory='.', extra=0) -> str:
|
||||||
max_len = get_filename_max_length(directory) - extra
|
max_len = get_filename_max_length(directory) - extra
|
||||||
if len(filename) > max_len:
|
if len(filename) > max_len:
|
||||||
filename = trim_filename(filename, max_len)
|
filename = trim_filename(filename, max_len)
|
||||||
return filename
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def get_unique_filename(filename, exists=os.path.exists):
|
def get_unique_filename(filename: str, exists=os.path.exists) -> str:
|
||||||
attempt = 0
|
attempt = 0
|
||||||
while True:
|
while True:
|
||||||
suffix = '-' + str(attempt) if attempt > 0 else ''
|
suffix = f'-{attempt}' if attempt > 0 else ''
|
||||||
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
|
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
|
||||||
try_filename += suffix
|
try_filename += suffix
|
||||||
if not exists(try_filename):
|
if not exists(try_filename):
|
||||||
@ -180,14 +172,17 @@ def get_unique_filename(filename, exists=os.path.exists):
|
|||||||
attempt += 1
|
attempt += 1
|
||||||
|
|
||||||
|
|
||||||
class Downloader(object):
|
class Downloader:
|
||||||
|
|
||||||
def __init__(self, output_file=None,
|
def __init__(
|
||||||
resume=False, progress_file=sys.stderr):
|
self,
|
||||||
|
output_file: IO = None,
|
||||||
|
resume: bool = False,
|
||||||
|
progress_file: IO = sys.stderr
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param resume: Should the download resume if partial download
|
:param resume: Should the download resume if partial download
|
||||||
already exists.
|
already exists.
|
||||||
:type resume: bool
|
|
||||||
|
|
||||||
:param output_file: The file to store response body in. If not
|
:param output_file: The file to store response body in. If not
|
||||||
provided, it will be guessed from the response.
|
provided, it will be guessed from the response.
|
||||||
@ -195,24 +190,21 @@ class Downloader(object):
|
|||||||
:param progress_file: Where to report download progress.
|
:param progress_file: Where to report download progress.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
self.finished = False
|
||||||
|
self.status = DownloadStatus()
|
||||||
self._output_file = output_file
|
self._output_file = output_file
|
||||||
self._resume = resume
|
self._resume = resume
|
||||||
self._resumed_from = 0
|
self._resumed_from = 0
|
||||||
self.finished = False
|
|
||||||
|
|
||||||
self.status = Status()
|
|
||||||
self._progress_reporter = ProgressReporterThread(
|
self._progress_reporter = ProgressReporterThread(
|
||||||
status=self.status,
|
status=self.status,
|
||||||
output=progress_file
|
output=progress_file
|
||||||
)
|
)
|
||||||
|
|
||||||
def pre_request(self, request_headers):
|
def pre_request(self, request_headers: dict):
|
||||||
"""Called just before the HTTP request is sent.
|
"""Called just before the HTTP request is sent.
|
||||||
|
|
||||||
Might alter `request_headers`.
|
Might alter `request_headers`.
|
||||||
|
|
||||||
:type request_headers: dict
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Ask the server not to encode the content so that we can resume, etc.
|
# Ask the server not to encode the content so that we can resume, etc.
|
||||||
request_headers['Accept-Encoding'] = 'identity'
|
request_headers['Accept-Encoding'] = 'identity'
|
||||||
@ -221,16 +213,20 @@ class Downloader(object):
|
|||||||
if bytes_have:
|
if bytes_have:
|
||||||
# Set ``Range`` header to resume the download
|
# Set ``Range`` header to resume the download
|
||||||
# TODO: Use "If-Range: mtime" to make sure it's fresh?
|
# TODO: Use "If-Range: mtime" to make sure it's fresh?
|
||||||
request_headers['Range'] = 'bytes=%d-' % bytes_have
|
request_headers['Range'] = f'bytes={bytes_have}-'
|
||||||
self._resumed_from = bytes_have
|
self._resumed_from = bytes_have
|
||||||
|
|
||||||
def start(self, final_response):
|
def start(
|
||||||
|
self,
|
||||||
|
initial_url: str,
|
||||||
|
final_response: requests.Response
|
||||||
|
) -> Tuple[RawStream, IO]:
|
||||||
"""
|
"""
|
||||||
Initiate and return a stream for `response` body with progress
|
Initiate and return a stream for `response` body with progress
|
||||||
callback attached. Can be called only once.
|
callback attached. Can be called only once.
|
||||||
|
|
||||||
|
:param initial_url: The original requested URL
|
||||||
:param final_response: Initiated response object with headers already fetched
|
:param final_response: Initiated response object with headers already fetched
|
||||||
:type final_response: requests.models.Response
|
|
||||||
|
|
||||||
:return: RawStream, output_file
|
:return: RawStream, output_file
|
||||||
|
|
||||||
@ -238,7 +234,7 @@ class Downloader(object):
|
|||||||
assert not self.status.time_started
|
assert not self.status.time_started
|
||||||
|
|
||||||
# FIXME: some servers still might sent Content-Encoding: gzip
|
# FIXME: some servers still might sent Content-Encoding: gzip
|
||||||
# <https://github.com/jakubroztocil/httpie/issues/423>
|
# <https://github.com/httpie/httpie/issues/423>
|
||||||
try:
|
try:
|
||||||
total_size = int(final_response.headers['Content-Length'])
|
total_size = int(final_response.headers['Content-Length'])
|
||||||
except (KeyError, ValueError, TypeError):
|
except (KeyError, ValueError, TypeError):
|
||||||
@ -246,7 +242,9 @@ class Downloader(object):
|
|||||||
|
|
||||||
if not self._output_file:
|
if not self._output_file:
|
||||||
self._output_file = self._get_output_file_from_response(
|
self._output_file = self._get_output_file_from_response(
|
||||||
final_response)
|
initial_url=initial_url,
|
||||||
|
final_response=final_response,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
# `--output, -o` provided
|
# `--output, -o` provided
|
||||||
if self._resume and final_response.status_code == PARTIAL_CONTENT:
|
if self._resume and final_response.status_code == PARTIAL_CONTENT:
|
||||||
@ -260,7 +258,7 @@ class Downloader(object):
|
|||||||
try:
|
try:
|
||||||
self._output_file.seek(0)
|
self._output_file.seek(0)
|
||||||
self._output_file.truncate()
|
self._output_file.truncate()
|
||||||
except IOError:
|
except OSError:
|
||||||
pass # stdout
|
pass # stdout
|
||||||
|
|
||||||
self.status.started(
|
self.status.started(
|
||||||
@ -277,12 +275,8 @@ class Downloader(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self._progress_reporter.output.write(
|
self._progress_reporter.output.write(
|
||||||
'Downloading %sto "%s"\n' % (
|
f'Downloading {humanize_bytes(total_size) + " " if total_size is not None else ""}'
|
||||||
(humanize_bytes(total_size) + ' '
|
f'to "{self._output_file.name}"\n'
|
||||||
if total_size is not None
|
|
||||||
else ''),
|
|
||||||
self._output_file.name
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
self._progress_reporter.start()
|
self._progress_reporter.start()
|
||||||
|
|
||||||
@ -297,46 +291,44 @@ class Downloader(object):
|
|||||||
self._progress_reporter.stop()
|
self._progress_reporter.stop()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def interrupted(self):
|
def interrupted(self) -> bool:
|
||||||
return (
|
return (
|
||||||
self.finished
|
self.finished
|
||||||
and self.status.total_size
|
and self.status.total_size
|
||||||
and self.status.total_size != self.status.downloaded
|
and self.status.total_size != self.status.downloaded
|
||||||
)
|
)
|
||||||
|
|
||||||
def chunk_downloaded(self, chunk):
|
def chunk_downloaded(self, chunk: bytes):
|
||||||
"""
|
"""
|
||||||
A download progress callback.
|
A download progress callback.
|
||||||
|
|
||||||
:param chunk: A chunk of response body data that has just
|
:param chunk: A chunk of response body data that has just
|
||||||
been downloaded and written to the output.
|
been downloaded and written to the output.
|
||||||
:type chunk: bytes
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.status.chunk_downloaded(len(chunk))
|
self.status.chunk_downloaded(len(chunk))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_output_file_from_response(final_response):
|
def _get_output_file_from_response(
|
||||||
|
initial_url: str,
|
||||||
|
final_response: requests.Response,
|
||||||
|
) -> IO:
|
||||||
# Output file not specified. Pick a name that doesn't exist yet.
|
# Output file not specified. Pick a name that doesn't exist yet.
|
||||||
filename = None
|
filename = None
|
||||||
if 'Content-Disposition' in final_response.headers:
|
if 'Content-Disposition' in final_response.headers:
|
||||||
filename = filename_from_content_disposition(
|
filename = filename_from_content_disposition(
|
||||||
final_response.headers['Content-Disposition'])
|
final_response.headers['Content-Disposition'])
|
||||||
if not filename:
|
if not filename:
|
||||||
initial_response = (
|
|
||||||
final_response.history[0] if final_response.history
|
|
||||||
else final_response
|
|
||||||
)
|
|
||||||
filename = filename_from_url(
|
filename = filename_from_url(
|
||||||
url=initial_response.url,
|
url=initial_url,
|
||||||
content_type=final_response.headers.get('Content-Type'),
|
content_type=final_response.headers.get('Content-Type'),
|
||||||
)
|
)
|
||||||
unique_filename = get_unique_filename(filename)
|
unique_filename = get_unique_filename(filename)
|
||||||
return open(unique_filename, mode='a+b')
|
return open(unique_filename, mode='a+b')
|
||||||
|
|
||||||
|
|
||||||
class Status(object):
|
class DownloadStatus:
|
||||||
"""Holds details about the downland status."""
|
"""Holds details about the download status."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.downloaded = 0
|
self.downloaded = 0
|
||||||
@ -349,7 +341,7 @@ class Status(object):
|
|||||||
assert self.time_started is None
|
assert self.time_started is None
|
||||||
self.total_size = total_size
|
self.total_size = total_size
|
||||||
self.downloaded = self.resumed_from = resumed_from
|
self.downloaded = self.resumed_from = resumed_from
|
||||||
self.time_started = time()
|
self.time_started = monotonic()
|
||||||
|
|
||||||
def chunk_downloaded(self, size):
|
def chunk_downloaded(self, size):
|
||||||
assert self.time_finished is None
|
assert self.time_finished is None
|
||||||
@ -362,7 +354,7 @@ class Status(object):
|
|||||||
def finished(self):
|
def finished(self):
|
||||||
assert self.time_started is not None
|
assert self.time_started is not None
|
||||||
assert self.time_finished is None
|
assert self.time_finished is None
|
||||||
self.time_finished = time()
|
self.time_finished = monotonic()
|
||||||
|
|
||||||
|
|
||||||
class ProgressReporterThread(threading.Thread):
|
class ProgressReporterThread(threading.Thread):
|
||||||
@ -372,13 +364,15 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
Uses threading to periodically update the status (speed, ETA, etc.).
|
Uses threading to periodically update the status (speed, ETA, etc.).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, status, output, tick=.1, update_interval=1):
|
|
||||||
"""
|
|
||||||
|
|
||||||
:type status: Status
|
def __init__(
|
||||||
:type output: file
|
self,
|
||||||
"""
|
status: DownloadStatus,
|
||||||
super(ProgressReporterThread, self).__init__()
|
output: IO,
|
||||||
|
tick=.1,
|
||||||
|
update_interval=1
|
||||||
|
):
|
||||||
|
super().__init__()
|
||||||
self.status = status
|
self.status = status
|
||||||
self.output = output
|
self.output = output
|
||||||
self._tick = tick
|
self._tick = tick
|
||||||
@ -386,7 +380,7 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
self._spinner_pos = 0
|
self._spinner_pos = 0
|
||||||
self._status_line = ''
|
self._status_line = ''
|
||||||
self._prev_bytes = 0
|
self._prev_bytes = 0
|
||||||
self._prev_time = time()
|
self._prev_time = monotonic()
|
||||||
self._should_stop = threading.Event()
|
self._should_stop = threading.Event()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
@ -403,16 +397,11 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
sleep(self._tick)
|
sleep(self._tick)
|
||||||
|
|
||||||
def report_speed(self):
|
def report_speed(self):
|
||||||
|
now = monotonic()
|
||||||
now = time()
|
|
||||||
|
|
||||||
if now - self._prev_time >= self._update_interval:
|
if now - self._prev_time >= self._update_interval:
|
||||||
downloaded = self.status.downloaded
|
downloaded = self.status.downloaded
|
||||||
try:
|
speed = ((downloaded - self._prev_bytes)
|
||||||
speed = ((downloaded - self._prev_bytes)
|
/ (now - self._prev_time))
|
||||||
/ (now - self._prev_time))
|
|
||||||
except ZeroDivisionError:
|
|
||||||
speed = 0
|
|
||||||
|
|
||||||
if not self.status.total_size:
|
if not self.status.total_size:
|
||||||
self._status_line = PROGRESS_NO_CONTENT_LENGTH.format(
|
self._status_line = PROGRESS_NO_CONTENT_LENGTH.format(
|
||||||
@ -420,10 +409,9 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
speed=humanize_bytes(speed),
|
speed=humanize_bytes(speed),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
percentage = (downloaded / self.status.total_size * 100
|
||||||
percentage = downloaded / self.status.total_size * 100
|
if self.status.total_size
|
||||||
except ZeroDivisionError:
|
else 0)
|
||||||
percentage = 0
|
|
||||||
|
|
||||||
if not speed:
|
if not speed:
|
||||||
eta = '-:--:--'
|
eta = '-:--:--'
|
||||||
@ -431,7 +419,7 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
s = int((self.status.total_size - downloaded) / speed)
|
s = int((self.status.total_size - downloaded) / speed)
|
||||||
h, s = divmod(s, 60 * 60)
|
h, s = divmod(s, 60 * 60)
|
||||||
m, s = divmod(s, 60)
|
m, s = divmod(s, 60)
|
||||||
eta = '{0}:{1:0>2}:{2:0>2}'.format(h, m, s)
|
eta = f'{h}:{m:0>2}:{s:0>2}'
|
||||||
|
|
||||||
self._status_line = PROGRESS.format(
|
self._status_line = PROGRESS.format(
|
||||||
percentage=percentage,
|
percentage=percentage,
|
||||||
@ -444,33 +432,20 @@ class ProgressReporterThread(threading.Thread):
|
|||||||
self._prev_bytes = downloaded
|
self._prev_bytes = downloaded
|
||||||
|
|
||||||
self.output.write(
|
self.output.write(
|
||||||
CLEAR_LINE
|
f'{CLEAR_LINE} {SPINNER[self._spinner_pos]} {self._status_line}'
|
||||||
+ ' '
|
|
||||||
+ SPINNER[self._spinner_pos]
|
|
||||||
+ ' '
|
|
||||||
+ self._status_line
|
|
||||||
)
|
)
|
||||||
self.output.flush()
|
self.output.flush()
|
||||||
|
|
||||||
self._spinner_pos = (self._spinner_pos + 1
|
self._spinner_pos = (self._spinner_pos + 1) % len(SPINNER)
|
||||||
if self._spinner_pos + 1 != len(SPINNER)
|
|
||||||
else 0)
|
|
||||||
|
|
||||||
def sum_up(self):
|
def sum_up(self):
|
||||||
actually_downloaded = (
|
actually_downloaded = (
|
||||||
self.status.downloaded - self.status.resumed_from)
|
self.status.downloaded - self.status.resumed_from)
|
||||||
time_taken = self.status.time_finished - self.status.time_started
|
time_taken = self.status.time_finished - self.status.time_started
|
||||||
|
speed = actually_downloaded / time_taken if time_taken else actually_downloaded
|
||||||
|
|
||||||
self.output.write(CLEAR_LINE)
|
self.output.write(CLEAR_LINE)
|
||||||
|
|
||||||
try:
|
|
||||||
speed = actually_downloaded / time_taken
|
|
||||||
except ZeroDivisionError:
|
|
||||||
# Either time is 0 (not all systems provide `time.time`
|
|
||||||
# with a better precision than 1 second), and/or nothing
|
|
||||||
# has been downloaded.
|
|
||||||
speed = actually_downloaded
|
|
||||||
|
|
||||||
self.output.write(SUMMARY.format(
|
self.output.write(SUMMARY.format(
|
||||||
downloaded=humanize_bytes(actually_downloaded),
|
downloaded=humanize_bytes(actually_downloaded),
|
||||||
total=(self.status.total_size
|
total=(self.status.total_size
|
||||||
|
758
httpie/input.py
758
httpie/input.py
@ -1,758 +0,0 @@
|
|||||||
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
|
|
||||||
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import ssl
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import errno
|
|
||||||
import mimetypes
|
|
||||||
import getpass
|
|
||||||
from io import BytesIO
|
|
||||||
from collections import namedtuple, Iterable, OrderedDict
|
|
||||||
# noinspection PyCompatibility
|
|
||||||
from argparse import ArgumentParser, ArgumentTypeError, ArgumentError
|
|
||||||
|
|
||||||
# TODO: Use MultiDict for headers once added to `requests`.
|
|
||||||
# https://github.com/jakubroztocil/httpie/issues/130
|
|
||||||
from httpie.plugins import plugin_manager
|
|
||||||
from requests.structures import CaseInsensitiveDict
|
|
||||||
|
|
||||||
from httpie.compat import urlsplit, str, is_pypy, is_py27
|
|
||||||
from httpie.sessions import VALID_SESSION_NAME_PATTERN
|
|
||||||
from httpie.utils import load_json_preserve_order
|
|
||||||
|
|
||||||
|
|
||||||
# ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
|
|
||||||
# <http://tools.ietf.org/html/rfc3986#section-3.1>
|
|
||||||
URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
|
|
||||||
|
|
||||||
HTTP_POST = 'POST'
|
|
||||||
HTTP_GET = 'GET'
|
|
||||||
|
|
||||||
|
|
||||||
# Various separators used in args
|
|
||||||
SEP_HEADERS = ':'
|
|
||||||
SEP_HEADERS_EMPTY = ';'
|
|
||||||
SEP_CREDENTIALS = ':'
|
|
||||||
SEP_PROXY = ':'
|
|
||||||
SEP_DATA = '='
|
|
||||||
SEP_DATA_RAW_JSON = ':='
|
|
||||||
SEP_FILES = '@'
|
|
||||||
SEP_DATA_EMBED_FILE = '=@'
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE = ':=@'
|
|
||||||
SEP_QUERY = '=='
|
|
||||||
|
|
||||||
# Separators that become request data
|
|
||||||
SEP_GROUP_DATA_ITEMS = frozenset([
|
|
||||||
SEP_DATA,
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_FILES,
|
|
||||||
SEP_DATA_EMBED_FILE,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE
|
|
||||||
])
|
|
||||||
|
|
||||||
# Separators for items whose value is a filename to be embedded
|
|
||||||
SEP_GROUP_DATA_EMBED_ITEMS = frozenset([
|
|
||||||
SEP_DATA_EMBED_FILE,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE,
|
|
||||||
])
|
|
||||||
|
|
||||||
# Separators for raw JSON items
|
|
||||||
SEP_GROUP_RAW_JSON_ITEMS = frozenset([
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE,
|
|
||||||
])
|
|
||||||
|
|
||||||
# Separators allowed in ITEM arguments
|
|
||||||
SEP_GROUP_ALL_ITEMS = frozenset([
|
|
||||||
SEP_HEADERS,
|
|
||||||
SEP_HEADERS_EMPTY,
|
|
||||||
SEP_QUERY,
|
|
||||||
SEP_DATA,
|
|
||||||
SEP_DATA_RAW_JSON,
|
|
||||||
SEP_FILES,
|
|
||||||
SEP_DATA_EMBED_FILE,
|
|
||||||
SEP_DATA_EMBED_RAW_JSON_FILE,
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
# Output options
|
|
||||||
OUT_REQ_HEAD = 'H'
|
|
||||||
OUT_REQ_BODY = 'B'
|
|
||||||
OUT_RESP_HEAD = 'h'
|
|
||||||
OUT_RESP_BODY = 'b'
|
|
||||||
|
|
||||||
OUTPUT_OPTIONS = frozenset([
|
|
||||||
OUT_REQ_HEAD,
|
|
||||||
OUT_REQ_BODY,
|
|
||||||
OUT_RESP_HEAD,
|
|
||||||
OUT_RESP_BODY
|
|
||||||
])
|
|
||||||
|
|
||||||
# Pretty
|
|
||||||
PRETTY_MAP = {
|
|
||||||
'all': ['format', 'colors'],
|
|
||||||
'colors': ['colors'],
|
|
||||||
'format': ['format'],
|
|
||||||
'none': []
|
|
||||||
}
|
|
||||||
PRETTY_STDOUT_TTY_ONLY = object()
|
|
||||||
|
|
||||||
|
|
||||||
# Defaults
|
|
||||||
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
|
|
||||||
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
|
|
||||||
|
|
||||||
|
|
||||||
SSL_VERSION_ARG_MAPPING = {
|
|
||||||
'ssl2.3': 'PROTOCOL_SSLv23',
|
|
||||||
'ssl3': 'PROTOCOL_SSLv3',
|
|
||||||
'tls1': 'PROTOCOL_TLSv1',
|
|
||||||
'tls1.1': 'PROTOCOL_TLSv1_1',
|
|
||||||
'tls1.2': 'PROTOCOL_TLSv1_2',
|
|
||||||
'tls1.3': 'PROTOCOL_TLSv1_3',
|
|
||||||
}
|
|
||||||
SSL_VERSION_ARG_MAPPING = {
|
|
||||||
cli_arg: getattr(ssl, ssl_constant)
|
|
||||||
for cli_arg, ssl_constant in SSL_VERSION_ARG_MAPPING.items()
|
|
||||||
if hasattr(ssl, ssl_constant)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPieArgumentParser(ArgumentParser):
|
|
||||||
"""Adds additional logic to `argparse.ArgumentParser`.
|
|
||||||
|
|
||||||
Handles all input (CLI args, file args, stdin), applies defaults,
|
|
||||||
and performs extra validation.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
kwargs['add_help'] = False
|
|
||||||
super(HTTPieArgumentParser, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# noinspection PyMethodOverriding
|
|
||||||
def parse_args(self, env, args=None, namespace=None):
|
|
||||||
|
|
||||||
self.env = env
|
|
||||||
self.args, no_options = super(HTTPieArgumentParser, self)\
|
|
||||||
.parse_known_args(args, namespace)
|
|
||||||
|
|
||||||
if self.args.debug:
|
|
||||||
self.args.traceback = True
|
|
||||||
|
|
||||||
# Arguments processing and environment setup.
|
|
||||||
self._apply_no_options(no_options)
|
|
||||||
self._validate_download_options()
|
|
||||||
self._setup_standard_streams()
|
|
||||||
self._process_output_options()
|
|
||||||
self._process_pretty_options()
|
|
||||||
self._guess_method()
|
|
||||||
self._parse_items()
|
|
||||||
if not self.args.ignore_stdin and not env.stdin_isatty:
|
|
||||||
self._body_from_file(self.env.stdin)
|
|
||||||
if not URL_SCHEME_RE.match(self.args.url):
|
|
||||||
scheme = self.args.default_scheme + "://"
|
|
||||||
|
|
||||||
# See if we're using curl style shorthand for localhost (:3000/foo)
|
|
||||||
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
|
|
||||||
if shorthand:
|
|
||||||
port = shorthand.group(1)
|
|
||||||
rest = shorthand.group(2)
|
|
||||||
self.args.url = scheme + 'localhost'
|
|
||||||
if port:
|
|
||||||
self.args.url += ':' + port
|
|
||||||
self.args.url += rest
|
|
||||||
else:
|
|
||||||
self.args.url = scheme + self.args.url
|
|
||||||
self._process_auth()
|
|
||||||
|
|
||||||
return self.args
|
|
||||||
|
|
||||||
# noinspection PyShadowingBuiltins
|
|
||||||
def _print_message(self, message, file=None):
|
|
||||||
# Sneak in our stderr/stdout.
|
|
||||||
file = {
|
|
||||||
sys.stdout: self.env.stdout,
|
|
||||||
sys.stderr: self.env.stderr,
|
|
||||||
None: self.env.stderr
|
|
||||||
}.get(file, file)
|
|
||||||
if not hasattr(file, 'buffer') and isinstance(message, str):
|
|
||||||
message = message.encode(self.env.stdout_encoding)
|
|
||||||
super(HTTPieArgumentParser, self)._print_message(message, file)
|
|
||||||
|
|
||||||
def _setup_standard_streams(self):
|
|
||||||
"""
|
|
||||||
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.args.output_file_specified = bool(self.args.output_file)
|
|
||||||
if self.args.download:
|
|
||||||
# FIXME: Come up with a cleaner solution.
|
|
||||||
if not self.args.output_file and not self.env.stdout_isatty:
|
|
||||||
# Use stdout as the download output file.
|
|
||||||
self.args.output_file = self.env.stdout
|
|
||||||
# With `--download`, we write everything that would normally go to
|
|
||||||
# `stdout` to `stderr` instead. Let's replace the stream so that
|
|
||||||
# we don't have to use many `if`s throughout the codebase.
|
|
||||||
# The response body will be treated separately.
|
|
||||||
self.env.stdout = self.env.stderr
|
|
||||||
self.env.stdout_isatty = self.env.stderr_isatty
|
|
||||||
elif self.args.output_file:
|
|
||||||
# When not `--download`ing, then `--output` simply replaces
|
|
||||||
# `stdout`. The file is opened for appending, which isn't what
|
|
||||||
# we want in this case.
|
|
||||||
self.args.output_file.seek(0)
|
|
||||||
try:
|
|
||||||
self.args.output_file.truncate()
|
|
||||||
except IOError as e:
|
|
||||||
if e.errno == errno.EINVAL:
|
|
||||||
# E.g. /dev/null on Linux.
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
self.env.stdout = self.args.output_file
|
|
||||||
self.env.stdout_isatty = False
|
|
||||||
|
|
||||||
def _process_auth(self):
|
|
||||||
# TODO: refactor
|
|
||||||
self.args.auth_plugin = None
|
|
||||||
default_auth_plugin = plugin_manager.get_auth_plugins()[0]
|
|
||||||
auth_type_set = self.args.auth_type is not None
|
|
||||||
url = urlsplit(self.args.url)
|
|
||||||
|
|
||||||
if self.args.auth is None and not auth_type_set:
|
|
||||||
if url.username is not None:
|
|
||||||
# Handle http://username:password@hostname/
|
|
||||||
username = url.username
|
|
||||||
password = url.password or ''
|
|
||||||
self.args.auth = AuthCredentials(
|
|
||||||
key=username,
|
|
||||||
value=password,
|
|
||||||
sep=SEP_CREDENTIALS,
|
|
||||||
orig=SEP_CREDENTIALS.join([username, password])
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.args.auth is not None or auth_type_set:
|
|
||||||
if not self.args.auth_type:
|
|
||||||
self.args.auth_type = default_auth_plugin.auth_type
|
|
||||||
plugin = plugin_manager.get_auth_plugin(self.args.auth_type)()
|
|
||||||
|
|
||||||
if plugin.auth_require and self.args.auth is None:
|
|
||||||
self.error('--auth required')
|
|
||||||
|
|
||||||
plugin.raw_auth = self.args.auth
|
|
||||||
self.args.auth_plugin = plugin
|
|
||||||
already_parsed = isinstance(self.args.auth, AuthCredentials)
|
|
||||||
|
|
||||||
if self.args.auth is None or not plugin.auth_parse:
|
|
||||||
self.args.auth = plugin.get_auth()
|
|
||||||
else:
|
|
||||||
if already_parsed:
|
|
||||||
# from the URL
|
|
||||||
credentials = self.args.auth
|
|
||||||
else:
|
|
||||||
credentials = parse_auth(self.args.auth)
|
|
||||||
|
|
||||||
if (not credentials.has_password()
|
|
||||||
and plugin.prompt_password):
|
|
||||||
if self.args.ignore_stdin:
|
|
||||||
# Non-tty stdin read by now
|
|
||||||
self.error(
|
|
||||||
'Unable to prompt for passwords because'
|
|
||||||
' --ignore-stdin is set.'
|
|
||||||
)
|
|
||||||
credentials.prompt_password(url.netloc)
|
|
||||||
self.args.auth = plugin.get_auth(
|
|
||||||
username=credentials.key,
|
|
||||||
password=credentials.value,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _apply_no_options(self, no_options):
|
|
||||||
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
|
|
||||||
its default value. This allows for un-setting of options, e.g.,
|
|
||||||
specified in config.
|
|
||||||
|
|
||||||
"""
|
|
||||||
invalid = []
|
|
||||||
|
|
||||||
for option in no_options:
|
|
||||||
if not option.startswith('--no-'):
|
|
||||||
invalid.append(option)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# --no-option => --option
|
|
||||||
inverted = '--' + option[5:]
|
|
||||||
for action in self._actions:
|
|
||||||
if inverted in action.option_strings:
|
|
||||||
setattr(self.args, action.dest, action.default)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
invalid.append(option)
|
|
||||||
|
|
||||||
if invalid:
|
|
||||||
msg = 'unrecognized arguments: %s'
|
|
||||||
self.error(msg % ' '.join(invalid))
|
|
||||||
|
|
||||||
def _body_from_file(self, fd):
|
|
||||||
"""There can only be one source of request data.
|
|
||||||
|
|
||||||
Bytes are always read.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.args.data:
|
|
||||||
self.error('Request body (from stdin or a file) and request '
|
|
||||||
'data (key=value) cannot be mixed. Pass '
|
|
||||||
'--ignore-stdin to let key/value take priority.')
|
|
||||||
self.args.data = getattr(fd, 'buffer', fd).read()
|
|
||||||
|
|
||||||
def _guess_method(self):
|
|
||||||
"""Set `args.method` if not specified to either POST or GET
|
|
||||||
based on whether the request has data or not.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.args.method is None:
|
|
||||||
# Invoked as `http URL'.
|
|
||||||
assert not self.args.items
|
|
||||||
if not self.args.ignore_stdin and not self.env.stdin_isatty:
|
|
||||||
self.args.method = HTTP_POST
|
|
||||||
else:
|
|
||||||
self.args.method = HTTP_GET
|
|
||||||
|
|
||||||
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
|
|
||||||
elif not re.match('^[a-zA-Z]+$', self.args.method):
|
|
||||||
# Invoked as `http URL item+'. The URL is now in `args.method`
|
|
||||||
# and the first ITEM is now incorrectly in `args.url`.
|
|
||||||
try:
|
|
||||||
# Parse the URL as an ITEM and store it as the first ITEM arg.
|
|
||||||
self.args.items.insert(0, KeyValueArgType(
|
|
||||||
*SEP_GROUP_ALL_ITEMS).__call__(self.args.url))
|
|
||||||
|
|
||||||
except ArgumentTypeError as e:
|
|
||||||
if self.args.traceback:
|
|
||||||
raise
|
|
||||||
self.error(e.args[0])
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Set the URL correctly
|
|
||||||
self.args.url = self.args.method
|
|
||||||
# Infer the method
|
|
||||||
has_data = (
|
|
||||||
(not self.args.ignore_stdin and not self.env.stdin_isatty)
|
|
||||||
or any(
|
|
||||||
item.sep in SEP_GROUP_DATA_ITEMS
|
|
||||||
for item in self.args.items
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.args.method = HTTP_POST if has_data else HTTP_GET
|
|
||||||
|
|
||||||
def _parse_items(self):
|
|
||||||
"""Parse `args.items` into `args.headers`, `args.data`, `args.params`,
|
|
||||||
and `args.files`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
items = parse_items(
|
|
||||||
items=self.args.items,
|
|
||||||
data_class=ParamsDict if self.args.form else OrderedDict
|
|
||||||
)
|
|
||||||
except ParseError as e:
|
|
||||||
if self.args.traceback:
|
|
||||||
raise
|
|
||||||
self.error(e.args[0])
|
|
||||||
else:
|
|
||||||
self.args.headers = items.headers
|
|
||||||
self.args.data = items.data
|
|
||||||
self.args.files = items.files
|
|
||||||
self.args.params = items.params
|
|
||||||
|
|
||||||
if self.args.files and not self.args.form:
|
|
||||||
# `http url @/path/to/file`
|
|
||||||
file_fields = list(self.args.files.keys())
|
|
||||||
if file_fields != ['']:
|
|
||||||
self.error(
|
|
||||||
'Invalid file fields (perhaps you meant --form?): %s'
|
|
||||||
% ','.join(file_fields))
|
|
||||||
|
|
||||||
fn, fd, ct = self.args.files['']
|
|
||||||
self.args.files = {}
|
|
||||||
|
|
||||||
self._body_from_file(fd)
|
|
||||||
|
|
||||||
if 'Content-Type' not in self.args.headers:
|
|
||||||
content_type = get_content_type(fn)
|
|
||||||
if content_type:
|
|
||||||
self.args.headers['Content-Type'] = content_type
|
|
||||||
|
|
||||||
def _process_output_options(self):
|
|
||||||
"""Apply defaults to output options, or validate the provided ones.
|
|
||||||
|
|
||||||
The default output options are stdout-type-sensitive.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def check_options(value, option):
|
|
||||||
unknown = set(value) - OUTPUT_OPTIONS
|
|
||||||
if unknown:
|
|
||||||
self.error('Unknown output options: {0}={1}'.format(
|
|
||||||
option,
|
|
||||||
','.join(unknown)
|
|
||||||
))
|
|
||||||
|
|
||||||
if self.args.verbose:
|
|
||||||
self.args.all = True
|
|
||||||
|
|
||||||
if self.args.output_options is None:
|
|
||||||
if self.args.verbose:
|
|
||||||
self.args.output_options = ''.join(OUTPUT_OPTIONS)
|
|
||||||
else:
|
|
||||||
self.args.output_options = (
|
|
||||||
OUTPUT_OPTIONS_DEFAULT
|
|
||||||
if self.env.stdout_isatty
|
|
||||||
else OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.args.output_options_history is None:
|
|
||||||
self.args.output_options_history = self.args.output_options
|
|
||||||
|
|
||||||
check_options(self.args.output_options, '--print')
|
|
||||||
check_options(self.args.output_options_history, '--history-print')
|
|
||||||
|
|
||||||
if self.args.download and OUT_RESP_BODY in self.args.output_options:
|
|
||||||
# Response body is always downloaded with --download and it goes
|
|
||||||
# through a different routine, so we remove it.
|
|
||||||
self.args.output_options = str(
|
|
||||||
set(self.args.output_options) - set(OUT_RESP_BODY))
|
|
||||||
|
|
||||||
def _process_pretty_options(self):
|
|
||||||
if self.args.prettify == PRETTY_STDOUT_TTY_ONLY:
|
|
||||||
self.args.prettify = PRETTY_MAP[
|
|
||||||
'all' if self.env.stdout_isatty else 'none']
|
|
||||||
elif (self.args.prettify and self.env.is_windows
|
|
||||||
and self.args.output_file):
|
|
||||||
self.error('Only terminal output can be colorized on Windows.')
|
|
||||||
else:
|
|
||||||
# noinspection PyTypeChecker
|
|
||||||
self.args.prettify = PRETTY_MAP[self.args.prettify]
|
|
||||||
|
|
||||||
def _validate_download_options(self):
|
|
||||||
if not self.args.download:
|
|
||||||
if self.args.download_resume:
|
|
||||||
self.error('--continue only works with --download')
|
|
||||||
if self.args.download_resume and not (
|
|
||||||
self.args.download and self.args.output_file):
|
|
||||||
self.error('--continue requires --output to be specified')
|
|
||||||
|
|
||||||
|
|
||||||
class ParseError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class KeyValue(object):
|
|
||||||
"""Base key-value pair parsed from CLI."""
|
|
||||||
|
|
||||||
def __init__(self, key, value, sep, orig):
|
|
||||||
self.key = key
|
|
||||||
self.value = value
|
|
||||||
self.sep = sep
|
|
||||||
self.orig = orig
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.__dict__ == other.__dict__
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return repr(self.__dict__)
|
|
||||||
|
|
||||||
|
|
||||||
class SessionNameValidator(object):
|
|
||||||
|
|
||||||
def __init__(self, error_message):
|
|
||||||
self.error_message = error_message
|
|
||||||
|
|
||||||
def __call__(self, value):
|
|
||||||
# Session name can be a path or just a name.
|
|
||||||
if (os.path.sep not in value
|
|
||||||
and not VALID_SESSION_NAME_PATTERN.search(value)):
|
|
||||||
raise ArgumentError(None, self.error_message)
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class KeyValueArgType(object):
|
|
||||||
"""A key-value pair argument type used with `argparse`.
|
|
||||||
|
|
||||||
Parses a key-value arg and constructs a `KeyValue` instance.
|
|
||||||
Used for headers, form data, and other key-value pair types.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
key_value_class = KeyValue
|
|
||||||
|
|
||||||
def __init__(self, *separators):
|
|
||||||
self.separators = separators
|
|
||||||
self.special_characters = set('\\')
|
|
||||||
for separator in separators:
|
|
||||||
self.special_characters.update(separator)
|
|
||||||
|
|
||||||
def __call__(self, string):
|
|
||||||
"""Parse `string` and return `self.key_value_class()` instance.
|
|
||||||
|
|
||||||
The best of `self.separators` is determined (first found, longest).
|
|
||||||
Back slash escaped characters aren't considered as separators
|
|
||||||
(or parts thereof). Literal back slash characters have to be escaped
|
|
||||||
as well (r'\\').
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Escaped(str):
|
|
||||||
"""Represents an escaped character."""
|
|
||||||
|
|
||||||
def tokenize(string):
|
|
||||||
r"""Tokenize `string`. There are only two token types - strings
|
|
||||||
and escaped characters:
|
|
||||||
|
|
||||||
tokenize(r'foo\=bar\\baz')
|
|
||||||
=> ['foo', Escaped('='), 'bar', Escaped('\\'), 'baz']
|
|
||||||
|
|
||||||
"""
|
|
||||||
tokens = ['']
|
|
||||||
characters = iter(string)
|
|
||||||
for char in characters:
|
|
||||||
if char == '\\':
|
|
||||||
char = next(characters, '')
|
|
||||||
if char not in self.special_characters:
|
|
||||||
tokens[-1] += '\\' + char
|
|
||||||
else:
|
|
||||||
tokens.extend([Escaped(char), ''])
|
|
||||||
else:
|
|
||||||
tokens[-1] += char
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
tokens = tokenize(string)
|
|
||||||
|
|
||||||
# Sorting by length ensures that the longest one will be
|
|
||||||
# chosen as it will overwrite any shorter ones starting
|
|
||||||
# at the same position in the `found` dictionary.
|
|
||||||
separators = sorted(self.separators, key=len)
|
|
||||||
|
|
||||||
for i, token in enumerate(tokens):
|
|
||||||
|
|
||||||
if isinstance(token, Escaped):
|
|
||||||
continue
|
|
||||||
|
|
||||||
found = {}
|
|
||||||
for sep in separators:
|
|
||||||
pos = token.find(sep)
|
|
||||||
if pos != -1:
|
|
||||||
found[pos] = sep
|
|
||||||
|
|
||||||
if found:
|
|
||||||
# Starting first, longest separator found.
|
|
||||||
sep = found[min(found.keys())]
|
|
||||||
|
|
||||||
key, value = token.split(sep, 1)
|
|
||||||
|
|
||||||
# Any preceding tokens are part of the key.
|
|
||||||
key = ''.join(tokens[:i]) + key
|
|
||||||
|
|
||||||
# Any following tokens are part of the value.
|
|
||||||
value += ''.join(tokens[i + 1:])
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ArgumentTypeError(
|
|
||||||
u'"%s" is not a valid value' % string)
|
|
||||||
|
|
||||||
return self.key_value_class(
|
|
||||||
key=key, value=value, sep=sep, orig=string)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthCredentials(KeyValue):
|
|
||||||
"""Represents parsed credentials."""
|
|
||||||
|
|
||||||
def _getpass(self, prompt):
|
|
||||||
# To allow mocking.
|
|
||||||
return getpass.getpass(str(prompt))
|
|
||||||
|
|
||||||
def has_password(self):
|
|
||||||
return self.value is not None
|
|
||||||
|
|
||||||
def prompt_password(self, host):
|
|
||||||
try:
|
|
||||||
self.value = self._getpass(
|
|
||||||
'http: password for %s@%s: ' % (self.key, host))
|
|
||||||
except (EOFError, KeyboardInterrupt):
|
|
||||||
sys.stderr.write('\n')
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthCredentialsArgType(KeyValueArgType):
|
|
||||||
"""A key-value arg type that parses credentials."""
|
|
||||||
|
|
||||||
key_value_class = AuthCredentials
|
|
||||||
|
|
||||||
def __call__(self, string):
|
|
||||||
"""Parse credentials from `string`.
|
|
||||||
|
|
||||||
("username" or "username:password").
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return super(AuthCredentialsArgType, self).__call__(string)
|
|
||||||
except ArgumentTypeError:
|
|
||||||
# No password provided, will prompt for it later.
|
|
||||||
return self.key_value_class(
|
|
||||||
key=string,
|
|
||||||
value=None,
|
|
||||||
sep=SEP_CREDENTIALS,
|
|
||||||
orig=string
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
parse_auth = AuthCredentialsArgType(SEP_CREDENTIALS)
|
|
||||||
|
|
||||||
|
|
||||||
class RequestItemsDict(OrderedDict):
|
|
||||||
"""Multi-value dict for URL parameters and form data."""
|
|
||||||
|
|
||||||
if is_pypy and is_py27:
|
|
||||||
# Manually set keys when initialized with an iterable as PyPy
|
|
||||||
# doesn't call __setitem__ in such case (pypy3 does).
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
if len(args) == 1 and isinstance(args[0], Iterable):
|
|
||||||
super(RequestItemsDict, self).__init__(**kwargs)
|
|
||||||
for k, v in args[0]:
|
|
||||||
self[k] = v
|
|
||||||
else:
|
|
||||||
super(RequestItemsDict, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# noinspection PyMethodOverriding
|
|
||||||
def __setitem__(self, key, value):
|
|
||||||
""" If `key` is assigned more than once, `self[key]` holds a
|
|
||||||
`list` of all the values.
|
|
||||||
|
|
||||||
This allows having multiple fields with the same name in form
|
|
||||||
data and URL params.
|
|
||||||
|
|
||||||
"""
|
|
||||||
assert not isinstance(value, list)
|
|
||||||
if key not in self:
|
|
||||||
super(RequestItemsDict, self).__setitem__(key, value)
|
|
||||||
else:
|
|
||||||
if not isinstance(self[key], list):
|
|
||||||
super(RequestItemsDict, self).__setitem__(key, [self[key]])
|
|
||||||
self[key].append(value)
|
|
||||||
|
|
||||||
|
|
||||||
class ParamsDict(RequestItemsDict):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DataDict(RequestItemsDict):
|
|
||||||
|
|
||||||
def items(self):
|
|
||||||
for key, values in super(RequestItemsDict, self).items():
|
|
||||||
if not isinstance(values, list):
|
|
||||||
values = [values]
|
|
||||||
for value in values:
|
|
||||||
yield key, value
|
|
||||||
|
|
||||||
|
|
||||||
RequestItems = namedtuple('RequestItems',
|
|
||||||
['headers', 'data', 'files', 'params'])
|
|
||||||
|
|
||||||
|
|
||||||
def get_content_type(filename):
|
|
||||||
"""
|
|
||||||
Return the content type for ``filename`` in format appropriate
|
|
||||||
for Content-Type headers, or ``None`` if the file type is unknown
|
|
||||||
to ``mimetypes``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
mime, encoding = mimetypes.guess_type(filename, strict=False)
|
|
||||||
if mime:
|
|
||||||
content_type = mime
|
|
||||||
if encoding:
|
|
||||||
content_type = '%s; charset=%s' % (mime, encoding)
|
|
||||||
return content_type
|
|
||||||
|
|
||||||
|
|
||||||
def parse_items(items,
|
|
||||||
headers_class=CaseInsensitiveDict,
|
|
||||||
data_class=OrderedDict,
|
|
||||||
files_class=DataDict,
|
|
||||||
params_class=ParamsDict):
|
|
||||||
"""Parse `KeyValue` `items` into `data`, `headers`, `files`,
|
|
||||||
and `params`.
|
|
||||||
|
|
||||||
"""
|
|
||||||
headers = []
|
|
||||||
data = []
|
|
||||||
files = []
|
|
||||||
params = []
|
|
||||||
for item in items:
|
|
||||||
value = item.value
|
|
||||||
if item.sep == SEP_HEADERS:
|
|
||||||
if value == '':
|
|
||||||
# No value => unset the header
|
|
||||||
value = None
|
|
||||||
target = headers
|
|
||||||
elif item.sep == SEP_HEADERS_EMPTY:
|
|
||||||
if item.value:
|
|
||||||
raise ParseError(
|
|
||||||
'Invalid item "%s" '
|
|
||||||
'(to specify an empty header use `Header;`)'
|
|
||||||
% item.orig
|
|
||||||
)
|
|
||||||
target = headers
|
|
||||||
elif item.sep == SEP_QUERY:
|
|
||||||
target = params
|
|
||||||
elif item.sep == SEP_FILES:
|
|
||||||
try:
|
|
||||||
with open(os.path.expanduser(value), 'rb') as f:
|
|
||||||
value = (os.path.basename(value),
|
|
||||||
BytesIO(f.read()),
|
|
||||||
get_content_type(value))
|
|
||||||
except IOError as e:
|
|
||||||
raise ParseError('"%s": %s' % (item.orig, e))
|
|
||||||
target = files
|
|
||||||
|
|
||||||
elif item.sep in SEP_GROUP_DATA_ITEMS:
|
|
||||||
|
|
||||||
if item.sep in SEP_GROUP_DATA_EMBED_ITEMS:
|
|
||||||
try:
|
|
||||||
with open(os.path.expanduser(value), 'rb') as f:
|
|
||||||
value = f.read().decode('utf8')
|
|
||||||
except IOError as e:
|
|
||||||
raise ParseError('"%s": %s' % (item.orig, e))
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
raise ParseError(
|
|
||||||
'"%s": cannot embed the content of "%s",'
|
|
||||||
' not a UTF8 or ASCII-encoded text file'
|
|
||||||
% (item.orig, item.value)
|
|
||||||
)
|
|
||||||
|
|
||||||
if item.sep in SEP_GROUP_RAW_JSON_ITEMS:
|
|
||||||
try:
|
|
||||||
value = load_json_preserve_order(value)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ParseError('"%s": %s' % (item.orig, e))
|
|
||||||
target = data
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise TypeError(item)
|
|
||||||
|
|
||||||
target.append((item.key, value))
|
|
||||||
|
|
||||||
return RequestItems(headers_class(headers),
|
|
||||||
data_class(data),
|
|
||||||
files_class(files),
|
|
||||||
params_class(params))
|
|
||||||
|
|
||||||
|
|
||||||
def readable_file_arg(filename):
|
|
||||||
try:
|
|
||||||
with open(filename, 'rb'):
|
|
||||||
return filename
|
|
||||||
except IOError as ex:
|
|
||||||
raise ArgumentTypeError('%s: %s' % (filename, ex.args[1]))
|
|
@ -1,41 +1,46 @@
|
|||||||
from httpie.compat import urlsplit, str
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from typing import Iterable, Optional
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from .constants import UTF8
|
||||||
|
from .utils import split_cookies
|
||||||
|
|
||||||
|
|
||||||
class HTTPMessage(object):
|
class HTTPMessage(metaclass=ABCMeta):
|
||||||
"""Abstract class for HTTP messages."""
|
"""Abstract class for HTTP messages."""
|
||||||
|
|
||||||
def __init__(self, orig):
|
def __init__(self, orig):
|
||||||
self._orig = orig
|
self._orig = orig
|
||||||
|
|
||||||
def iter_body(self, chunk_size):
|
@abstractmethod
|
||||||
|
def iter_body(self, chunk_size: int) -> Iterable[bytes]:
|
||||||
"""Return an iterator over the body."""
|
"""Return an iterator over the body."""
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def iter_lines(self, chunk_size):
|
@abstractmethod
|
||||||
|
def iter_lines(self, chunk_size: int) -> Iterable[bytes]:
|
||||||
"""Return an iterator over the body yielding (`line`, `line_feed`)."""
|
"""Return an iterator over the body yielding (`line`, `line_feed`)."""
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
@abstractmethod
|
||||||
|
def headers(self) -> str:
|
||||||
"""Return a `str` with the message's headers."""
|
"""Return a `str` with the message's headers."""
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def encoding(self):
|
@abstractmethod
|
||||||
|
def encoding(self) -> Optional[str]:
|
||||||
"""Return a `str` with the message's encoding, if known."""
|
"""Return a `str` with the message's encoding, if known."""
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self) -> bytes:
|
||||||
"""Return a `bytes` with the message's body."""
|
"""Return a `bytes` with the message's body."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def content_type(self):
|
def content_type(self) -> str:
|
||||||
"""Return the message content type."""
|
"""Return the message content type."""
|
||||||
ct = self._orig.headers.get('Content-Type', '')
|
ct = self._orig.headers.get('Content-Type', '')
|
||||||
if not isinstance(ct, str):
|
if not isinstance(ct, str):
|
||||||
ct = ct.decode('utf8')
|
ct = ct.decode()
|
||||||
return ct
|
return ct
|
||||||
|
|
||||||
|
|
||||||
@ -51,36 +56,35 @@ class HTTPResponse(HTTPMessage):
|
|||||||
# noinspection PyProtectedMember
|
# noinspection PyProtectedMember
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
original = self._orig.raw._original_response
|
try:
|
||||||
|
raw_version = self._orig.raw._original_response.version
|
||||||
|
except AttributeError:
|
||||||
|
# Assume HTTP/1.1
|
||||||
|
raw_version = 11
|
||||||
version = {
|
version = {
|
||||||
9: '0.9',
|
9: '0.9',
|
||||||
10: '1.0',
|
10: '1.0',
|
||||||
11: '1.1',
|
11: '1.1',
|
||||||
20: '2',
|
20: '2',
|
||||||
}[original.version]
|
}[raw_version]
|
||||||
|
|
||||||
status_line = 'HTTP/{version} {status} {reason}'.format(
|
original = self._orig
|
||||||
version=version,
|
status_line = f'HTTP/{version} {original.status_code} {original.reason}'
|
||||||
status=original.status,
|
|
||||||
reason=original.reason
|
|
||||||
)
|
|
||||||
headers = [status_line]
|
headers = [status_line]
|
||||||
try:
|
headers.extend(
|
||||||
# `original.msg` is a `http.client.HTTPMessage` on Python 3
|
': '.join(header)
|
||||||
# `_headers` is a 2-tuple
|
for header in original.headers.items()
|
||||||
headers.extend(
|
if header[0] != 'Set-Cookie'
|
||||||
'%s: %s' % header for header in original.msg._headers)
|
)
|
||||||
except AttributeError:
|
headers.extend(
|
||||||
# and a `httplib.HTTPMessage` on Python 2.x
|
f'Set-Cookie: {cookie}'
|
||||||
# `headers` is a list of `name: val<CRLF>`.
|
for cookie in split_cookies(original.headers.get('Set-Cookie'))
|
||||||
headers.extend(h.strip() for h in original.msg.headers)
|
)
|
||||||
|
|
||||||
return '\r\n'.join(headers)
|
return '\r\n'.join(headers)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def encoding(self):
|
def encoding(self):
|
||||||
return self._orig.encoding or 'utf8'
|
return self._orig.encoding or UTF8
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self):
|
||||||
@ -105,7 +109,7 @@ class HTTPRequest(HTTPMessage):
|
|||||||
request_line = '{method} {path}{query} HTTP/1.1'.format(
|
request_line = '{method} {path}{query} HTTP/1.1'.format(
|
||||||
method=self._orig.method,
|
method=self._orig.method,
|
||||||
path=url.path or '/',
|
path=url.path or '/',
|
||||||
query='?' + url.query if url.query else ''
|
query=f'?{url.query}' if url.query else ''
|
||||||
)
|
)
|
||||||
|
|
||||||
headers = dict(self._orig.headers)
|
headers = dict(self._orig.headers)
|
||||||
@ -113,29 +117,22 @@ class HTTPRequest(HTTPMessage):
|
|||||||
headers['Host'] = url.netloc.split('@')[-1]
|
headers['Host'] = url.netloc.split('@')[-1]
|
||||||
|
|
||||||
headers = [
|
headers = [
|
||||||
'%s: %s' % (
|
f'{name}: {value if isinstance(value, str) else value.decode()}'
|
||||||
name,
|
|
||||||
value if isinstance(value, str) else value.decode('utf8')
|
|
||||||
)
|
|
||||||
for name, value in headers.items()
|
for name, value in headers.items()
|
||||||
]
|
]
|
||||||
|
|
||||||
headers.insert(0, request_line)
|
headers.insert(0, request_line)
|
||||||
headers = '\r\n'.join(headers).strip()
|
headers = '\r\n'.join(headers).strip()
|
||||||
|
|
||||||
if isinstance(headers, bytes):
|
|
||||||
# Python < 3
|
|
||||||
headers = headers.decode('utf8')
|
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def encoding(self):
|
def encoding(self):
|
||||||
return 'utf8'
|
return UTF8
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self):
|
||||||
body = self._orig.body
|
body = self._orig.body
|
||||||
if isinstance(body, str):
|
if isinstance(body, str):
|
||||||
# Happens with JSON/form request data parsed from the command line.
|
# Happens with JSON/form request data parsed from the command line.
|
||||||
body = body.encode('utf8')
|
body = body.encode()
|
||||||
return body or b''
|
return body or b''
|
||||||
|
@ -1,19 +1,21 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
import json
|
import json
|
||||||
|
from typing import Optional, Type
|
||||||
|
|
||||||
import pygments.lexer
|
import pygments.lexer
|
||||||
import pygments.token
|
|
||||||
import pygments.styles
|
|
||||||
import pygments.lexers
|
import pygments.lexers
|
||||||
import pygments.style
|
import pygments.style
|
||||||
|
import pygments.styles
|
||||||
|
import pygments.token
|
||||||
from pygments.formatters.terminal import TerminalFormatter
|
from pygments.formatters.terminal import TerminalFormatter
|
||||||
from pygments.formatters.terminal256 import Terminal256Formatter
|
from pygments.formatters.terminal256 import Terminal256Formatter
|
||||||
|
from pygments.lexer import Lexer
|
||||||
from pygments.lexers.special import TextLexer
|
from pygments.lexers.special import TextLexer
|
||||||
from pygments.lexers.text import HttpLexer as PygmentsHttpLexer
|
from pygments.lexers.text import HttpLexer as PygmentsHttpLexer
|
||||||
from pygments.util import ClassNotFound
|
from pygments.util import ClassNotFound
|
||||||
|
|
||||||
from httpie.compat import is_windows
|
from ...compat import is_windows
|
||||||
from httpie.plugins import FormatterPlugin
|
from ...context import Environment
|
||||||
|
from ...plugins import FormatterPlugin
|
||||||
|
|
||||||
|
|
||||||
AUTO_STYLE = 'auto' # Follows terminal ANSI color styles
|
AUTO_STYLE = 'auto' # Follows terminal ANSI color styles
|
||||||
@ -24,7 +26,6 @@ if is_windows:
|
|||||||
# great and fruity seems to give the best result there.
|
# great and fruity seems to give the best result there.
|
||||||
DEFAULT_STYLE = 'fruity'
|
DEFAULT_STYLE = 'fruity'
|
||||||
|
|
||||||
|
|
||||||
AVAILABLE_STYLES = set(pygments.styles.get_all_styles())
|
AVAILABLE_STYLES = set(pygments.styles.get_all_styles())
|
||||||
AVAILABLE_STYLES.add(SOLARIZED_STYLE)
|
AVAILABLE_STYLES.add(SOLARIZED_STYLE)
|
||||||
AVAILABLE_STYLES.add(AUTO_STYLE)
|
AVAILABLE_STYLES.add(AUTO_STYLE)
|
||||||
@ -40,9 +41,14 @@ class ColorFormatter(FormatterPlugin):
|
|||||||
"""
|
"""
|
||||||
group_name = 'colors'
|
group_name = 'colors'
|
||||||
|
|
||||||
def __init__(self, env, explicit_json=False,
|
def __init__(
|
||||||
color_scheme=DEFAULT_STYLE, **kwargs):
|
self,
|
||||||
super(ColorFormatter, self).__init__(**kwargs)
|
env: Environment,
|
||||||
|
explicit_json=False,
|
||||||
|
color_scheme=DEFAULT_STYLE,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
if not env.colors:
|
if not env.colors:
|
||||||
self.enabled = False
|
self.enabled = False
|
||||||
@ -63,14 +69,14 @@ class ColorFormatter(FormatterPlugin):
|
|||||||
self.formatter = formatter
|
self.formatter = formatter
|
||||||
self.http_lexer = http_lexer
|
self.http_lexer = http_lexer
|
||||||
|
|
||||||
def format_headers(self, headers):
|
def format_headers(self, headers: str) -> str:
|
||||||
return pygments.highlight(
|
return pygments.highlight(
|
||||||
code=headers,
|
code=headers,
|
||||||
lexer=self.http_lexer,
|
lexer=self.http_lexer,
|
||||||
formatter=self.formatter,
|
formatter=self.formatter,
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
def format_body(self, body, mime):
|
def format_body(self, body: str, mime: str) -> str:
|
||||||
lexer = self.get_lexer_for_body(mime, body)
|
lexer = self.get_lexer_for_body(mime, body)
|
||||||
if lexer:
|
if lexer:
|
||||||
body = pygments.highlight(
|
body = pygments.highlight(
|
||||||
@ -78,24 +84,31 @@ class ColorFormatter(FormatterPlugin):
|
|||||||
lexer=lexer,
|
lexer=lexer,
|
||||||
formatter=self.formatter,
|
formatter=self.formatter,
|
||||||
)
|
)
|
||||||
return body.strip()
|
return body
|
||||||
|
|
||||||
def get_lexer_for_body(self, mime, body):
|
def get_lexer_for_body(
|
||||||
|
self, mime: str,
|
||||||
|
body: str
|
||||||
|
) -> Optional[Type[Lexer]]:
|
||||||
return get_lexer(
|
return get_lexer(
|
||||||
mime=mime,
|
mime=mime,
|
||||||
explicit_json=self.explicit_json,
|
explicit_json=self.explicit_json,
|
||||||
body=body,
|
body=body,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_style_class(self, color_scheme):
|
@staticmethod
|
||||||
|
def get_style_class(color_scheme: str) -> Type[pygments.style.Style]:
|
||||||
try:
|
try:
|
||||||
return pygments.styles.get_style_by_name(color_scheme)
|
return pygments.styles.get_style_by_name(color_scheme)
|
||||||
except ClassNotFound:
|
except ClassNotFound:
|
||||||
return Solarized256Style
|
return Solarized256Style
|
||||||
|
|
||||||
|
|
||||||
def get_lexer(mime, explicit_json=False, body=''):
|
def get_lexer(
|
||||||
|
mime: str,
|
||||||
|
explicit_json=False,
|
||||||
|
body=''
|
||||||
|
) -> Optional[Type[Lexer]]:
|
||||||
# Build candidate mime type and lexer names.
|
# Build candidate mime type and lexer names.
|
||||||
mime_types, lexer_names = [mime], []
|
mime_types, lexer_names = [mime], []
|
||||||
type_, subtype = mime.split('/', 1)
|
type_, subtype = mime.split('/', 1)
|
||||||
@ -105,8 +118,8 @@ def get_lexer(mime, explicit_json=False, body=''):
|
|||||||
subtype_name, subtype_suffix = subtype.split('+', 1)
|
subtype_name, subtype_suffix = subtype.split('+', 1)
|
||||||
lexer_names.extend([subtype_name, subtype_suffix])
|
lexer_names.extend([subtype_name, subtype_suffix])
|
||||||
mime_types.extend([
|
mime_types.extend([
|
||||||
'%s/%s' % (type_, subtype_name),
|
f'{type_}/{subtype_name}',
|
||||||
'%s/%s' % (type_, subtype_suffix)
|
f'{type_}/{subtype_suffix}',
|
||||||
])
|
])
|
||||||
|
|
||||||
# As a last resort, if no lexer feels responsible, and
|
# As a last resort, if no lexer feels responsible, and
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
from httpie.plugins import FormatterPlugin
|
from ...plugins import FormatterPlugin
|
||||||
|
|
||||||
|
|
||||||
class HeadersFormatter(FormatterPlugin):
|
class HeadersFormatter(FormatterPlugin):
|
||||||
|
|
||||||
def format_headers(self, headers):
|
def __init__(self, **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.enabled = self.format_options['headers']['sort']
|
||||||
|
|
||||||
|
def format_headers(self, headers: str) -> str:
|
||||||
"""
|
"""
|
||||||
Sorts headers by name while retaining relative
|
Sorts headers by name while retaining relative
|
||||||
order of multiple headers with the same name.
|
order of multiple headers with the same name.
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from httpie.plugins import FormatterPlugin
|
from ...plugins import FormatterPlugin
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_INDENT = 4
|
|
||||||
|
|
||||||
|
|
||||||
class JSONFormatter(FormatterPlugin):
|
class JSONFormatter(FormatterPlugin):
|
||||||
|
|
||||||
def format_body(self, body, mime):
|
def __init__(self, **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.enabled = self.format_options['json']['format']
|
||||||
|
|
||||||
|
def format_body(self, body: str, mime: str) -> str:
|
||||||
maybe_json = [
|
maybe_json = [
|
||||||
'json',
|
'json',
|
||||||
'javascript',
|
'javascript',
|
||||||
@ -26,8 +26,8 @@ class JSONFormatter(FormatterPlugin):
|
|||||||
# unicode escapes to improve readability.
|
# unicode escapes to improve readability.
|
||||||
body = json.dumps(
|
body = json.dumps(
|
||||||
obj=obj,
|
obj=obj,
|
||||||
sort_keys=True,
|
sort_keys=self.format_options['json']['sort_keys'],
|
||||||
ensure_ascii=False,
|
ensure_ascii=False,
|
||||||
indent=DEFAULT_INDENT
|
indent=self.format_options['json']['indent']
|
||||||
)
|
)
|
||||||
return body
|
return body
|
||||||
|
59
httpie/output/formatters/xml.py
Normal file
59
httpie/output/formatters/xml.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import sys
|
||||||
|
from typing import TYPE_CHECKING, Optional
|
||||||
|
|
||||||
|
from ...constants import UTF8
|
||||||
|
from ...plugins import FormatterPlugin
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from xml.dom.minidom import Document
|
||||||
|
|
||||||
|
|
||||||
|
def parse_xml(data: str) -> 'Document':
|
||||||
|
"""Parse given XML `data` string into an appropriate :class:`~xml.dom.minidom.Document` object."""
|
||||||
|
from defusedxml.minidom import parseString
|
||||||
|
return parseString(data)
|
||||||
|
|
||||||
|
|
||||||
|
def pretty_xml(document: 'Document',
|
||||||
|
encoding: Optional[str] = UTF8,
|
||||||
|
indent: int = 2,
|
||||||
|
standalone: Optional[bool] = None) -> str:
|
||||||
|
"""Render the given :class:`~xml.dom.minidom.Document` `document` into a prettified string."""
|
||||||
|
kwargs = {
|
||||||
|
'encoding': encoding or UTF8,
|
||||||
|
'indent': ' ' * indent,
|
||||||
|
}
|
||||||
|
if standalone is not None and sys.version_info >= (3, 9):
|
||||||
|
kwargs['standalone'] = standalone
|
||||||
|
body = document.toprettyxml(**kwargs).decode()
|
||||||
|
|
||||||
|
# Remove blank lines automatically added by `toprettyxml()`.
|
||||||
|
return '\n'.join(line for line in body.splitlines() if line.strip())
|
||||||
|
|
||||||
|
|
||||||
|
class XMLFormatter(FormatterPlugin):
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
self.enabled = self.format_options['xml']['format']
|
||||||
|
|
||||||
|
def format_body(self, body: str, mime: str):
|
||||||
|
if 'xml' not in mime:
|
||||||
|
return body
|
||||||
|
|
||||||
|
from xml.parsers.expat import ExpatError
|
||||||
|
from defusedxml.common import DefusedXmlException
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed_body = parse_xml(body)
|
||||||
|
except ExpatError:
|
||||||
|
pass # Invalid XML, ignore.
|
||||||
|
except DefusedXmlException:
|
||||||
|
pass # Unsafe XML, ignore.
|
||||||
|
else:
|
||||||
|
body = pretty_xml(parsed_body,
|
||||||
|
encoding=parsed_body.encoding,
|
||||||
|
indent=self.format_options['xml']['indent'],
|
||||||
|
standalone=parsed_body.standalone)
|
||||||
|
|
||||||
|
return body
|
@ -1,7 +1,9 @@
|
|||||||
import re
|
import re
|
||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
from httpie.plugins import plugin_manager
|
from ..plugins import ConverterPlugin
|
||||||
from httpie.context import Environment
|
from ..plugins.registry import plugin_manager
|
||||||
|
from ..context import Environment
|
||||||
|
|
||||||
|
|
||||||
MIME_RE = re.compile(r'^[^/]+/[^/]+$')
|
MIME_RE = re.compile(r'^[^/]+/[^/]+$')
|
||||||
@ -11,19 +13,20 @@ def is_valid_mime(mime):
|
|||||||
return mime and MIME_RE.match(mime)
|
return mime and MIME_RE.match(mime)
|
||||||
|
|
||||||
|
|
||||||
class Conversion(object):
|
class Conversion:
|
||||||
|
|
||||||
def get_converter(self, mime):
|
@staticmethod
|
||||||
|
def get_converter(mime: str) -> Optional[ConverterPlugin]:
|
||||||
if is_valid_mime(mime):
|
if is_valid_mime(mime):
|
||||||
for converter_class in plugin_manager.get_converters():
|
for converter_class in plugin_manager.get_converters():
|
||||||
if converter_class.supports(mime):
|
if converter_class.supports(mime):
|
||||||
return converter_class(mime)
|
return converter_class(mime)
|
||||||
|
|
||||||
|
|
||||||
class Formatting(object):
|
class Formatting:
|
||||||
"""A delegate class that invokes the actual processors."""
|
"""A delegate class that invokes the actual processors."""
|
||||||
|
|
||||||
def __init__(self, groups, env=Environment(), **kwargs):
|
def __init__(self, groups: List[str], env=Environment(), **kwargs):
|
||||||
"""
|
"""
|
||||||
:param groups: names of processor groups to be applied
|
:param groups: names of processor groups to be applied
|
||||||
:param env: Environment
|
:param env: Environment
|
||||||
@ -38,12 +41,12 @@ class Formatting(object):
|
|||||||
if p.enabled:
|
if p.enabled:
|
||||||
self.enabled_plugins.append(p)
|
self.enabled_plugins.append(p)
|
||||||
|
|
||||||
def format_headers(self, headers):
|
def format_headers(self, headers: str) -> str:
|
||||||
for p in self.enabled_plugins:
|
for p in self.enabled_plugins:
|
||||||
headers = p.format_headers(headers)
|
headers = p.format_headers(headers)
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
def format_body(self, content, mime):
|
def format_body(self, content: str, mime: str) -> str:
|
||||||
if is_valid_mime(mime):
|
if is_valid_mime(mime):
|
||||||
for p in self.enabled_plugins:
|
for p in self.enabled_plugins:
|
||||||
content = p.format_body(content, mime)
|
content = p.format_body(content, mime)
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from functools import partial
|
from typing import Callable, Iterable, Union
|
||||||
|
|
||||||
from httpie.compat import str
|
from ..context import Environment
|
||||||
from httpie.context import Environment
|
from ..constants import UTF8
|
||||||
from httpie.models import HTTPRequest, HTTPResponse
|
from ..models import HTTPMessage
|
||||||
from httpie.input import (OUT_REQ_BODY, OUT_REQ_HEAD,
|
from .processing import Conversion, Formatting
|
||||||
OUT_RESP_HEAD, OUT_RESP_BODY)
|
|
||||||
from httpie.output.processing import Formatting, Conversion
|
|
||||||
|
|
||||||
|
|
||||||
BINARY_SUPPRESSED_NOTICE = (
|
BINARY_SUPPRESSED_NOTICE = (
|
||||||
@ -17,119 +16,26 @@ BINARY_SUPPRESSED_NOTICE = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class BinarySuppressedError(Exception):
|
class DataSuppressedError(Exception):
|
||||||
|
message = None
|
||||||
|
|
||||||
|
|
||||||
|
class BinarySuppressedError(DataSuppressedError):
|
||||||
"""An error indicating that the body is binary and won't be written,
|
"""An error indicating that the body is binary and won't be written,
|
||||||
e.g., for terminal output)."""
|
e.g., for terminal output)."""
|
||||||
|
|
||||||
message = BINARY_SUPPRESSED_NOTICE
|
message = BINARY_SUPPRESSED_NOTICE
|
||||||
|
|
||||||
|
|
||||||
def write_stream(stream, outfile, flush):
|
class BaseStream(metaclass=ABCMeta):
|
||||||
"""Write the output stream."""
|
|
||||||
try:
|
|
||||||
# Writing bytes so we use the buffer interface (Python 3).
|
|
||||||
buf = outfile.buffer
|
|
||||||
except AttributeError:
|
|
||||||
buf = outfile
|
|
||||||
|
|
||||||
for chunk in stream:
|
|
||||||
buf.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def write_stream_with_colors_win_py3(stream, outfile, flush):
|
|
||||||
"""Like `write`, but colorized chunks are written as text
|
|
||||||
directly to `outfile` to ensure it gets processed by colorama.
|
|
||||||
Applies only to Windows with Python 3 and colorized terminal output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
color = b'\x1b['
|
|
||||||
encoding = outfile.encoding
|
|
||||||
for chunk in stream:
|
|
||||||
if color in chunk:
|
|
||||||
outfile.write(chunk.decode(encoding))
|
|
||||||
else:
|
|
||||||
outfile.buffer.write(chunk)
|
|
||||||
if flush:
|
|
||||||
outfile.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def build_output_stream(args, env, request, response, output_options):
|
|
||||||
"""Build and return a chain of iterators over the `request`-`response`
|
|
||||||
exchange each of which yields `bytes` chunks.
|
|
||||||
|
|
||||||
"""
|
|
||||||
req_h = OUT_REQ_HEAD in output_options
|
|
||||||
req_b = OUT_REQ_BODY in output_options
|
|
||||||
resp_h = OUT_RESP_HEAD in output_options
|
|
||||||
resp_b = OUT_RESP_BODY in output_options
|
|
||||||
req = req_h or req_b
|
|
||||||
resp = resp_h or resp_b
|
|
||||||
|
|
||||||
output = []
|
|
||||||
Stream = get_stream_type(env, args)
|
|
||||||
|
|
||||||
if req:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPRequest(request),
|
|
||||||
with_headers=req_h,
|
|
||||||
with_body=req_b))
|
|
||||||
|
|
||||||
if req_b and resp:
|
|
||||||
# Request/Response separator.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
if resp:
|
|
||||||
output.append(Stream(
|
|
||||||
msg=HTTPResponse(response),
|
|
||||||
with_headers=resp_h,
|
|
||||||
with_body=resp_b))
|
|
||||||
|
|
||||||
if env.stdout_isatty and resp_b:
|
|
||||||
# Ensure a blank line after the response body.
|
|
||||||
# For terminal output only.
|
|
||||||
output.append([b'\n\n'])
|
|
||||||
|
|
||||||
return chain(*output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_stream_type(env, args):
|
|
||||||
"""Pick the right stream type based on `env` and `args`.
|
|
||||||
Wrap it in a partial with the type-specific args so that
|
|
||||||
we don't need to think what stream we are dealing with.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not env.stdout_isatty and not args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
RawStream,
|
|
||||||
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
|
|
||||||
if args.stream
|
|
||||||
else RawStream.CHUNK_SIZE
|
|
||||||
)
|
|
||||||
elif args.prettify:
|
|
||||||
Stream = partial(
|
|
||||||
PrettyStream if args.stream else BufferedPrettyStream,
|
|
||||||
env=env,
|
|
||||||
conversion=Conversion(),
|
|
||||||
formatting=Formatting(
|
|
||||||
env=env,
|
|
||||||
groups=args.prettify,
|
|
||||||
color_scheme=args.style,
|
|
||||||
explicit_json=args.json,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
Stream = partial(EncodedStream, env=env)
|
|
||||||
|
|
||||||
return Stream
|
|
||||||
|
|
||||||
|
|
||||||
class BaseStream(object):
|
|
||||||
"""Base HTTP message output stream class."""
|
"""Base HTTP message output stream class."""
|
||||||
|
|
||||||
def __init__(self, msg, with_headers=True, with_body=True,
|
def __init__(
|
||||||
on_body_chunk_downloaded=None):
|
self,
|
||||||
|
msg: HTTPMessage,
|
||||||
|
with_headers=True,
|
||||||
|
with_body=True,
|
||||||
|
on_body_chunk_downloaded: Callable[[bytes], None] = None
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param msg: a :class:`models.HTTPMessage` subclass
|
:param msg: a :class:`models.HTTPMessage` subclass
|
||||||
:param with_headers: if `True`, headers will be included
|
:param with_headers: if `True`, headers will be included
|
||||||
@ -142,15 +48,15 @@ class BaseStream(object):
|
|||||||
self.with_body = with_body
|
self.with_body = with_body
|
||||||
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
self.on_body_chunk_downloaded = on_body_chunk_downloaded
|
||||||
|
|
||||||
def get_headers(self):
|
def get_headers(self) -> bytes:
|
||||||
"""Return the headers' bytes."""
|
"""Return the headers' bytes."""
|
||||||
return self.msg.headers.encode('utf8')
|
return self.msg.headers.encode()
|
||||||
|
|
||||||
def iter_body(self):
|
@abstractmethod
|
||||||
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
"""Return an iterator over the message body."""
|
"""Return an iterator over the message body."""
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self) -> Iterable[bytes]:
|
||||||
"""Return an iterator over `self.msg`."""
|
"""Return an iterator over `self.msg`."""
|
||||||
if self.with_headers:
|
if self.with_headers:
|
||||||
yield self.get_headers()
|
yield self.get_headers()
|
||||||
@ -162,7 +68,7 @@ class BaseStream(object):
|
|||||||
yield chunk
|
yield chunk
|
||||||
if self.on_body_chunk_downloaded:
|
if self.on_body_chunk_downloaded:
|
||||||
self.on_body_chunk_downloaded(chunk)
|
self.on_body_chunk_downloaded(chunk)
|
||||||
except BinarySuppressedError as e:
|
except DataSuppressedError as e:
|
||||||
if self.with_headers:
|
if self.with_headers:
|
||||||
yield b'\n'
|
yield b'\n'
|
||||||
yield e.message
|
yield e.message
|
||||||
@ -175,10 +81,10 @@ class RawStream(BaseStream):
|
|||||||
CHUNK_SIZE_BY_LINE = 1
|
CHUNK_SIZE_BY_LINE = 1
|
||||||
|
|
||||||
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
|
||||||
super(RawStream, self).__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.chunk_size = chunk_size
|
self.chunk_size = chunk_size
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
return self.msg.iter_body(self.chunk_size)
|
return self.msg.iter_body(self.chunk_size)
|
||||||
|
|
||||||
|
|
||||||
@ -193,26 +99,20 @@ class EncodedStream(BaseStream):
|
|||||||
CHUNK_SIZE = 1
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
def __init__(self, env=Environment(), **kwargs):
|
def __init__(self, env=Environment(), **kwargs):
|
||||||
|
super().__init__(**kwargs)
|
||||||
super(EncodedStream, self).__init__(**kwargs)
|
|
||||||
|
|
||||||
if env.stdout_isatty:
|
if env.stdout_isatty:
|
||||||
# Use the encoding supported by the terminal.
|
# Use the encoding supported by the terminal.
|
||||||
output_encoding = env.stdout_encoding
|
output_encoding = env.stdout_encoding
|
||||||
else:
|
else:
|
||||||
# Preserve the message encoding.
|
# Preserve the message encoding.
|
||||||
output_encoding = self.msg.encoding
|
output_encoding = self.msg.encoding
|
||||||
|
# Default to UTF-8 when unsure.
|
||||||
|
self.output_encoding = output_encoding or UTF8
|
||||||
|
|
||||||
# Default to utf8 when unsure.
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
self.output_encoding = output_encoding or 'utf8'
|
|
||||||
|
|
||||||
def iter_body(self):
|
|
||||||
|
|
||||||
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
|
||||||
|
|
||||||
if b'\0' in line:
|
if b'\0' in line:
|
||||||
raise BinarySuppressedError()
|
raise BinarySuppressedError()
|
||||||
|
|
||||||
yield line.decode(self.msg.encoding) \
|
yield line.decode(self.msg.encoding) \
|
||||||
.encode(self.output_encoding, 'replace') + lf
|
.encode(self.output_encoding, 'replace') + lf
|
||||||
|
|
||||||
@ -228,17 +128,21 @@ class PrettyStream(EncodedStream):
|
|||||||
|
|
||||||
CHUNK_SIZE = 1
|
CHUNK_SIZE = 1
|
||||||
|
|
||||||
def __init__(self, conversion, formatting, **kwargs):
|
def __init__(
|
||||||
super(PrettyStream, self).__init__(**kwargs)
|
self, conversion: Conversion,
|
||||||
|
formatting: Formatting,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
super().__init__(**kwargs)
|
||||||
self.formatting = formatting
|
self.formatting = formatting
|
||||||
self.conversion = conversion
|
self.conversion = conversion
|
||||||
self.mime = self.msg.content_type.split(';')[0]
|
self.mime = self.msg.content_type.split(';')[0]
|
||||||
|
|
||||||
def get_headers(self):
|
def get_headers(self) -> bytes:
|
||||||
return self.formatting.format_headers(
|
return self.formatting.format_headers(
|
||||||
self.msg.headers).encode(self.output_encoding)
|
self.msg.headers).encode(self.output_encoding)
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
first_chunk = True
|
first_chunk = True
|
||||||
iter_lines = self.msg.iter_lines(self.CHUNK_SIZE)
|
iter_lines = self.msg.iter_lines(self.CHUNK_SIZE)
|
||||||
for line, lf in iter_lines:
|
for line, lf in iter_lines:
|
||||||
@ -259,7 +163,7 @@ class PrettyStream(EncodedStream):
|
|||||||
yield self.process_body(line) + lf
|
yield self.process_body(line) + lf
|
||||||
first_chunk = False
|
first_chunk = False
|
||||||
|
|
||||||
def process_body(self, chunk):
|
def process_body(self, chunk: Union[str, bytes]) -> bytes:
|
||||||
if not isinstance(chunk, str):
|
if not isinstance(chunk, str):
|
||||||
# Text when a converter has been used,
|
# Text when a converter has been used,
|
||||||
# otherwise it will always be bytes.
|
# otherwise it will always be bytes.
|
||||||
@ -278,7 +182,7 @@ class BufferedPrettyStream(PrettyStream):
|
|||||||
|
|
||||||
CHUNK_SIZE = 1024 * 10
|
CHUNK_SIZE = 1024 * 10
|
||||||
|
|
||||||
def iter_body(self):
|
def iter_body(self) -> Iterable[bytes]:
|
||||||
# Read the whole body before prettifying it,
|
# Read the whole body before prettifying it,
|
||||||
# but bail out immediately if the body is binary.
|
# but bail out immediately if the body is binary.
|
||||||
converter = None
|
converter = None
|
||||||
|
156
httpie/output/writer.py
Normal file
156
httpie/output/writer.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
import argparse
|
||||||
|
import errno
|
||||||
|
from typing import IO, TextIO, Tuple, Type, Union
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from ..context import Environment
|
||||||
|
from ..models import HTTPRequest, HTTPResponse
|
||||||
|
from .processing import Conversion, Formatting
|
||||||
|
from .streams import (
|
||||||
|
BaseStream, BufferedPrettyStream, EncodedStream, PrettyStream, RawStream,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
MESSAGE_SEPARATOR = '\n\n'
|
||||||
|
MESSAGE_SEPARATOR_BYTES = MESSAGE_SEPARATOR.encode()
|
||||||
|
|
||||||
|
|
||||||
|
def write_message(
|
||||||
|
requests_message: Union[requests.PreparedRequest, requests.Response],
|
||||||
|
env: Environment,
|
||||||
|
args: argparse.Namespace,
|
||||||
|
with_headers=False,
|
||||||
|
with_body=False,
|
||||||
|
):
|
||||||
|
if not (with_body or with_headers):
|
||||||
|
return
|
||||||
|
write_stream_kwargs = {
|
||||||
|
'stream': build_output_stream_for_message(
|
||||||
|
args=args,
|
||||||
|
env=env,
|
||||||
|
requests_message=requests_message,
|
||||||
|
with_body=with_body,
|
||||||
|
with_headers=with_headers,
|
||||||
|
),
|
||||||
|
# NOTE: `env.stdout` will in fact be `stderr` with `--download`
|
||||||
|
'outfile': env.stdout,
|
||||||
|
'flush': env.stdout_isatty or args.stream
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
if env.is_windows and 'colors' in args.prettify:
|
||||||
|
write_stream_with_colors_win(**write_stream_kwargs)
|
||||||
|
else:
|
||||||
|
write_stream(**write_stream_kwargs)
|
||||||
|
except OSError as e:
|
||||||
|
show_traceback = args.debug or args.traceback
|
||||||
|
if not show_traceback and e.errno == errno.EPIPE:
|
||||||
|
# Ignore broken pipes unless --traceback.
|
||||||
|
env.stderr.write('\n')
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def write_stream(
|
||||||
|
stream: BaseStream,
|
||||||
|
outfile: Union[IO, TextIO],
|
||||||
|
flush: bool
|
||||||
|
):
|
||||||
|
"""Write the output stream."""
|
||||||
|
try:
|
||||||
|
# Writing bytes so we use the buffer interface.
|
||||||
|
buf = outfile.buffer
|
||||||
|
except AttributeError:
|
||||||
|
buf = outfile
|
||||||
|
|
||||||
|
for chunk in stream:
|
||||||
|
buf.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def write_stream_with_colors_win(
|
||||||
|
stream: 'BaseStream',
|
||||||
|
outfile: TextIO,
|
||||||
|
flush: bool
|
||||||
|
):
|
||||||
|
"""Like `write`, but colorized chunks are written as text
|
||||||
|
directly to `outfile` to ensure it gets processed by colorama.
|
||||||
|
Applies only to Windows and colorized terminal output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
color = b'\x1b['
|
||||||
|
encoding = outfile.encoding
|
||||||
|
for chunk in stream:
|
||||||
|
if color in chunk:
|
||||||
|
outfile.write(chunk.decode(encoding))
|
||||||
|
else:
|
||||||
|
outfile.buffer.write(chunk)
|
||||||
|
if flush:
|
||||||
|
outfile.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def build_output_stream_for_message(
|
||||||
|
args: argparse.Namespace,
|
||||||
|
env: Environment,
|
||||||
|
requests_message: Union[requests.PreparedRequest, requests.Response],
|
||||||
|
with_headers: bool,
|
||||||
|
with_body: bool,
|
||||||
|
):
|
||||||
|
stream_class, stream_kwargs = get_stream_type_and_kwargs(
|
||||||
|
env=env,
|
||||||
|
args=args,
|
||||||
|
)
|
||||||
|
message_class = {
|
||||||
|
requests.PreparedRequest: HTTPRequest,
|
||||||
|
requests.Response: HTTPResponse,
|
||||||
|
}[type(requests_message)]
|
||||||
|
yield from stream_class(
|
||||||
|
msg=message_class(requests_message),
|
||||||
|
with_headers=with_headers,
|
||||||
|
with_body=with_body,
|
||||||
|
**stream_kwargs,
|
||||||
|
)
|
||||||
|
if (env.stdout_isatty and with_body
|
||||||
|
and not getattr(requests_message, 'is_body_upload_chunk', False)):
|
||||||
|
# Ensure a blank line after the response body.
|
||||||
|
# For terminal output only.
|
||||||
|
yield MESSAGE_SEPARATOR_BYTES
|
||||||
|
|
||||||
|
|
||||||
|
def get_stream_type_and_kwargs(
|
||||||
|
env: Environment,
|
||||||
|
args: argparse.Namespace
|
||||||
|
) -> Tuple[Type['BaseStream'], dict]:
|
||||||
|
"""Pick the right stream type and kwargs for it based on `env` and `args`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not env.stdout_isatty and not args.prettify:
|
||||||
|
stream_class = RawStream
|
||||||
|
stream_kwargs = {
|
||||||
|
'chunk_size': (
|
||||||
|
RawStream.CHUNK_SIZE_BY_LINE
|
||||||
|
if args.stream
|
||||||
|
else RawStream.CHUNK_SIZE
|
||||||
|
)
|
||||||
|
}
|
||||||
|
elif args.prettify:
|
||||||
|
stream_class = PrettyStream if args.stream else BufferedPrettyStream
|
||||||
|
stream_kwargs = {
|
||||||
|
'env': env,
|
||||||
|
'conversion': Conversion(),
|
||||||
|
'formatting': Formatting(
|
||||||
|
env=env,
|
||||||
|
groups=args.prettify,
|
||||||
|
color_scheme=args.style,
|
||||||
|
explicit_json=args.json,
|
||||||
|
format_options=args.format_options,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
stream_class = EncodedStream
|
||||||
|
stream_kwargs = {
|
||||||
|
'env': env
|
||||||
|
}
|
||||||
|
|
||||||
|
return stream_class, stream_kwargs
|
@ -1,22 +1,11 @@
|
|||||||
"""
|
"""
|
||||||
WARNING: The plugin API is still work in progress and will
|
WARNING: The plugin API is still work in progress and will
|
||||||
probably be completely reworked by v1.0.0.
|
probably be completely reworked in the future.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from httpie.plugins.base import (
|
from .base import (
|
||||||
AuthPlugin, FormatterPlugin,
|
AuthPlugin, FormatterPlugin,
|
||||||
ConverterPlugin, TransportPlugin
|
ConverterPlugin, TransportPlugin
|
||||||
)
|
)
|
||||||
from httpie.plugins.manager import PluginManager
|
|
||||||
from httpie.plugins.builtin import BasicAuthPlugin, DigestAuthPlugin
|
|
||||||
from httpie.output.formatters.headers import HeadersFormatter
|
|
||||||
from httpie.output.formatters.json import JSONFormatter
|
|
||||||
from httpie.output.formatters.colors import ColorFormatter
|
|
||||||
|
|
||||||
|
__all__ = ('AuthPlugin', 'ConverterPlugin', 'FormatterPlugin', 'TransportPlugin')
|
||||||
plugin_manager = PluginManager()
|
|
||||||
plugin_manager.register(BasicAuthPlugin,
|
|
||||||
DigestAuthPlugin)
|
|
||||||
plugin_manager.register(HeadersFormatter,
|
|
||||||
JSONFormatter,
|
|
||||||
ColorFormatter)
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
class BasePlugin(object):
|
class BasePlugin:
|
||||||
|
|
||||||
# The name of the plugin, eg. "My auth".
|
# The name of the plugin, eg. "My auth".
|
||||||
name = None
|
name = None
|
||||||
|
|
||||||
# Optional short description. Will be be shown in the help
|
# Optional short description. It will be shown in the help
|
||||||
# under --auth-type.
|
# under --auth-type.
|
||||||
description = None
|
description = None
|
||||||
|
|
||||||
@ -15,7 +15,9 @@ class AuthPlugin(BasePlugin):
|
|||||||
"""
|
"""
|
||||||
Base auth plugin class.
|
Base auth plugin class.
|
||||||
|
|
||||||
See <https://github.com/httpie/httpie-ntlm> for an example auth plugin.
|
See httpie-ntlm for an example auth plugin:
|
||||||
|
|
||||||
|
<https://github.com/httpie/httpie-ntlm>
|
||||||
|
|
||||||
See also `test_auth_plugins.py`
|
See also `test_auth_plugins.py`
|
||||||
|
|
||||||
@ -33,13 +35,22 @@ class AuthPlugin(BasePlugin):
|
|||||||
# Set this to `False` to disable the parsing and error handling.
|
# Set this to `False` to disable the parsing and error handling.
|
||||||
auth_parse = True
|
auth_parse = True
|
||||||
|
|
||||||
|
# Set to `True` to make it possible for this auth
|
||||||
|
# plugin to acquire credentials from the user’s netrc file(s).
|
||||||
|
# It is used as a fallback when the credentials are not provided explicitly
|
||||||
|
# through `--auth, -a`. Enabling this will allow skipping `--auth, -a`
|
||||||
|
# even when `auth_require` is set `True` (provided that netrc provides
|
||||||
|
# credential for a given host).
|
||||||
|
netrc_parse = False
|
||||||
|
|
||||||
# If both `auth_parse` and `prompt_password` are set to `True`,
|
# If both `auth_parse` and `prompt_password` are set to `True`,
|
||||||
# and the value of `-a` lacks the password part,
|
# and the value of `-a` lacks the password part,
|
||||||
# then the user will be prompted to type the password in.
|
# then the user will be prompted to type the password in.
|
||||||
prompt_password = True
|
prompt_password = True
|
||||||
|
|
||||||
# Will be set to the raw value of `-a` (if provided) before
|
# Will be set to the raw value of `-a` (if provided) before
|
||||||
# `get_auth()` gets called.
|
# `get_auth()` gets called. If the credentials came from a netrc file,
|
||||||
|
# then this is `None`.
|
||||||
raw_auth = None
|
raw_auth = None
|
||||||
|
|
||||||
def get_auth(self, username=None, password=None):
|
def get_auth(self, username=None, password=None):
|
||||||
@ -58,8 +69,13 @@ class AuthPlugin(BasePlugin):
|
|||||||
|
|
||||||
class TransportPlugin(BasePlugin):
|
class TransportPlugin(BasePlugin):
|
||||||
"""
|
"""
|
||||||
|
Requests transport adapter docs:
|
||||||
|
|
||||||
http://docs.python-requests.org/en/latest/user/advanced/#transport-adapters
|
<https://requests.readthedocs.io/en/latest/user/advanced/#transport-adapters>
|
||||||
|
|
||||||
|
See httpie-unixsocket for an example transport plugin:
|
||||||
|
|
||||||
|
<https://github.com/httpie/httpie-unixsocket>
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -75,7 +91,15 @@ class TransportPlugin(BasePlugin):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
class ConverterPlugin(object):
|
class ConverterPlugin(BasePlugin):
|
||||||
|
"""
|
||||||
|
Possibly converts response data for prettified terminal display.
|
||||||
|
|
||||||
|
See httpie-msgpack for an example converter plugin:
|
||||||
|
|
||||||
|
<https://github.com/rasky/httpie-msgpack>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, mime):
|
def __init__(self, mime):
|
||||||
self.mime = mime
|
self.mime = mime
|
||||||
@ -88,19 +112,25 @@ class ConverterPlugin(object):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
class FormatterPlugin(object):
|
class FormatterPlugin(BasePlugin):
|
||||||
|
"""
|
||||||
|
Possibly formats response body & headers for prettified terminal display.
|
||||||
|
|
||||||
|
"""
|
||||||
|
group_name = 'format'
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
"""
|
"""
|
||||||
:param env: an class:`Environment` instance
|
:param env: an class:`Environment` instance
|
||||||
:param kwargs: additional keyword argument that some
|
:param kwargs: additional keyword argument that some
|
||||||
processor might require.
|
formatters might require.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.enabled = True
|
self.enabled = True
|
||||||
self.kwargs = kwargs
|
self.kwargs = kwargs
|
||||||
|
self.format_options = kwargs['format_options']
|
||||||
|
|
||||||
def format_headers(self, headers):
|
def format_headers(self, headers: str) -> str:
|
||||||
"""Return processed `headers`
|
"""Return processed `headers`
|
||||||
|
|
||||||
:param headers: The headers as text.
|
:param headers: The headers as text.
|
||||||
@ -108,7 +138,7 @@ class FormatterPlugin(object):
|
|||||||
"""
|
"""
|
||||||
return headers
|
return headers
|
||||||
|
|
||||||
def format_body(self, content, mime):
|
def format_body(self, content: str, mime: str) -> str:
|
||||||
"""Return processed `content`.
|
"""Return processed `content`.
|
||||||
|
|
||||||
:param mime: E.g., 'application/atom+xml'.
|
:param mime: E.g., 'application/atom+xml'.
|
||||||
|
@ -2,50 +2,57 @@ from base64 import b64encode
|
|||||||
|
|
||||||
import requests.auth
|
import requests.auth
|
||||||
|
|
||||||
from httpie.plugins.base import AuthPlugin
|
from .base import AuthPlugin
|
||||||
|
|
||||||
|
|
||||||
# noinspection PyAbstractClass
|
# noinspection PyAbstractClass
|
||||||
class BuiltinAuthPlugin(AuthPlugin):
|
class BuiltinAuthPlugin(AuthPlugin):
|
||||||
|
|
||||||
package_name = '(builtin)'
|
package_name = '(builtin)'
|
||||||
|
|
||||||
|
|
||||||
class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
class HTTPBasicAuth(requests.auth.HTTPBasicAuth):
|
||||||
|
|
||||||
def __call__(self, r):
|
def __call__(
|
||||||
|
self,
|
||||||
|
request: requests.PreparedRequest
|
||||||
|
) -> requests.PreparedRequest:
|
||||||
"""
|
"""
|
||||||
Override username/password serialization to allow unicode.
|
Override username/password serialization to allow unicode.
|
||||||
|
|
||||||
See https://github.com/jakubroztocil/httpie/issues/212
|
See https://github.com/httpie/httpie/issues/212
|
||||||
|
|
||||||
"""
|
"""
|
||||||
r.headers['Authorization'] = type(self).make_header(
|
# noinspection PyTypeChecker
|
||||||
|
request.headers['Authorization'] = type(self).make_header(
|
||||||
self.username, self.password).encode('latin1')
|
self.username, self.password).encode('latin1')
|
||||||
return r
|
return request
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def make_header(username, password):
|
def make_header(username: str, password: str) -> str:
|
||||||
credentials = u'%s:%s' % (username, password)
|
credentials = f'{username}:{password}'
|
||||||
token = b64encode(credentials.encode('utf8')).strip().decode('latin1')
|
token = b64encode(credentials.encode()).strip().decode('latin1')
|
||||||
return 'Basic %s' % token
|
return f'Basic {token}'
|
||||||
|
|
||||||
|
|
||||||
class BasicAuthPlugin(BuiltinAuthPlugin):
|
class BasicAuthPlugin(BuiltinAuthPlugin):
|
||||||
|
|
||||||
name = 'Basic HTTP auth'
|
name = 'Basic HTTP auth'
|
||||||
auth_type = 'basic'
|
auth_type = 'basic'
|
||||||
|
netrc_parse = True
|
||||||
|
|
||||||
# noinspection PyMethodOverriding
|
# noinspection PyMethodOverriding
|
||||||
def get_auth(self, username, password):
|
def get_auth(self, username: str, password: str) -> HTTPBasicAuth:
|
||||||
return HTTPBasicAuth(username, password)
|
return HTTPBasicAuth(username, password)
|
||||||
|
|
||||||
|
|
||||||
class DigestAuthPlugin(BuiltinAuthPlugin):
|
class DigestAuthPlugin(BuiltinAuthPlugin):
|
||||||
|
|
||||||
name = 'Digest HTTP auth'
|
name = 'Digest HTTP auth'
|
||||||
auth_type = 'digest'
|
auth_type = 'digest'
|
||||||
|
netrc_parse = True
|
||||||
|
|
||||||
# noinspection PyMethodOverriding
|
# noinspection PyMethodOverriding
|
||||||
def get_auth(self, username, password):
|
def get_auth(
|
||||||
|
self,
|
||||||
|
username: str,
|
||||||
|
password: str
|
||||||
|
) -> requests.auth.HTTPDigestAuth:
|
||||||
return requests.auth.HTTPDigestAuth(username, password)
|
return requests.auth.HTTPDigestAuth(username, password)
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
|
from operator import attrgetter
|
||||||
|
from typing import Dict, List, Type
|
||||||
|
|
||||||
from pkg_resources import iter_entry_points
|
from pkg_resources import iter_entry_points
|
||||||
from httpie.plugins import AuthPlugin, FormatterPlugin, ConverterPlugin
|
|
||||||
from httpie.plugins.base import TransportPlugin
|
from . import AuthPlugin, ConverterPlugin, FormatterPlugin
|
||||||
|
from .base import BasePlugin, TransportPlugin
|
||||||
|
|
||||||
|
|
||||||
ENTRY_POINT_NAMES = [
|
ENTRY_POINT_NAMES = [
|
||||||
@ -12,20 +16,17 @@ ENTRY_POINT_NAMES = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class PluginManager(object):
|
class PluginManager(list):
|
||||||
|
|
||||||
def __init__(self):
|
def register(self, *plugins: Type[BasePlugin]):
|
||||||
self._plugins = []
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(self._plugins)
|
|
||||||
|
|
||||||
def register(self, *plugins):
|
|
||||||
for plugin in plugins:
|
for plugin in plugins:
|
||||||
self._plugins.append(plugin)
|
self.append(plugin)
|
||||||
|
|
||||||
def unregister(self, plugin):
|
def unregister(self, plugin: Type[BasePlugin]):
|
||||||
self._plugins.remove(plugin)
|
self.remove(plugin)
|
||||||
|
|
||||||
|
def filter(self, by_type=Type[BasePlugin]):
|
||||||
|
return [plugin for plugin in self if issubclass(plugin, by_type)]
|
||||||
|
|
||||||
def load_installed_plugins(self):
|
def load_installed_plugins(self):
|
||||||
for entry_point_name in ENTRY_POINT_NAMES:
|
for entry_point_name in ENTRY_POINT_NAMES:
|
||||||
@ -35,33 +36,34 @@ class PluginManager(object):
|
|||||||
self.register(entry_point.load())
|
self.register(entry_point.load())
|
||||||
|
|
||||||
# Auth
|
# Auth
|
||||||
def get_auth_plugins(self):
|
def get_auth_plugins(self) -> List[Type[AuthPlugin]]:
|
||||||
return [plugin for plugin in self if issubclass(plugin, AuthPlugin)]
|
return self.filter(AuthPlugin)
|
||||||
|
|
||||||
def get_auth_plugin_mapping(self):
|
def get_auth_plugin_mapping(self) -> Dict[str, Type[AuthPlugin]]:
|
||||||
return {plugin.auth_type: plugin for plugin in self.get_auth_plugins()}
|
return {
|
||||||
|
plugin.auth_type: plugin for plugin in self.get_auth_plugins()
|
||||||
|
}
|
||||||
|
|
||||||
def get_auth_plugin(self, auth_type):
|
def get_auth_plugin(self, auth_type: str) -> Type[AuthPlugin]:
|
||||||
return self.get_auth_plugin_mapping()[auth_type]
|
return self.get_auth_plugin_mapping()[auth_type]
|
||||||
|
|
||||||
# Output processing
|
# Output processing
|
||||||
def get_formatters(self):
|
def get_formatters(self) -> List[Type[FormatterPlugin]]:
|
||||||
return [plugin for plugin in self
|
return self.filter(FormatterPlugin)
|
||||||
if issubclass(plugin, FormatterPlugin)]
|
|
||||||
|
|
||||||
def get_formatters_grouped(self):
|
def get_formatters_grouped(self) -> Dict[str, List[Type[FormatterPlugin]]]:
|
||||||
groups = {}
|
return {
|
||||||
for group_name, group in groupby(
|
group_name: list(group)
|
||||||
self.get_formatters(),
|
for group_name, group
|
||||||
key=lambda p: getattr(p, 'group_name', 'format')):
|
in groupby(self.get_formatters(), key=attrgetter('group_name'))
|
||||||
groups[group_name] = list(group)
|
}
|
||||||
return groups
|
|
||||||
|
|
||||||
def get_converters(self):
|
def get_converters(self) -> List[Type[ConverterPlugin]]:
|
||||||
return [plugin for plugin in self
|
return self.filter(ConverterPlugin)
|
||||||
if issubclass(plugin, ConverterPlugin)]
|
|
||||||
|
|
||||||
# Adapters
|
# Adapters
|
||||||
def get_transport_plugins(self):
|
def get_transport_plugins(self) -> List[Type[TransportPlugin]]:
|
||||||
return [plugin for plugin in self
|
return self.filter(TransportPlugin)
|
||||||
if issubclass(plugin, TransportPlugin)]
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'<PluginManager: {list(self)}>'
|
||||||
|
20
httpie/plugins/registry.py
Normal file
20
httpie/plugins/registry.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from .manager import PluginManager
|
||||||
|
from .builtin import BasicAuthPlugin, DigestAuthPlugin
|
||||||
|
from ..output.formatters.headers import HeadersFormatter
|
||||||
|
from ..output.formatters.json import JSONFormatter
|
||||||
|
from ..output.formatters.xml import XMLFormatter
|
||||||
|
from ..output.formatters.colors import ColorFormatter
|
||||||
|
|
||||||
|
|
||||||
|
plugin_manager = PluginManager()
|
||||||
|
|
||||||
|
|
||||||
|
# Register all built-in plugins.
|
||||||
|
plugin_manager.register(
|
||||||
|
BasicAuthPlugin,
|
||||||
|
DigestAuthPlugin,
|
||||||
|
HeadersFormatter,
|
||||||
|
JSONFormatter,
|
||||||
|
XMLFormatter,
|
||||||
|
ColorFormatter,
|
||||||
|
)
|
@ -1,85 +1,62 @@
|
|||||||
"""Persistent, JSON-serialized sessions.
|
"""
|
||||||
|
Persistent, JSON-serialized sessions.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
from http.cookies import SimpleCookie
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Iterable, Optional, Union
|
||||||
|
from urllib.parse import urlsplit
|
||||||
|
|
||||||
|
from requests.auth import AuthBase
|
||||||
from requests.cookies import RequestsCookieJar, create_cookie
|
from requests.cookies import RequestsCookieJar, create_cookie
|
||||||
|
|
||||||
from httpie.compat import urlsplit
|
from .cli.dicts import RequestHeadersDict
|
||||||
from httpie.config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
from .config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
||||||
from httpie.plugins import plugin_manager
|
from .plugins.registry import plugin_manager
|
||||||
|
|
||||||
|
|
||||||
SESSIONS_DIR_NAME = 'sessions'
|
SESSIONS_DIR_NAME = 'sessions'
|
||||||
DEFAULT_SESSIONS_DIR = os.path.join(DEFAULT_CONFIG_DIR, SESSIONS_DIR_NAME)
|
DEFAULT_SESSIONS_DIR = DEFAULT_CONFIG_DIR / SESSIONS_DIR_NAME
|
||||||
VALID_SESSION_NAME_PATTERN = re.compile('^[a-zA-Z0-9_.-]+$')
|
VALID_SESSION_NAME_PATTERN = re.compile('^[a-zA-Z0-9_.-]+$')
|
||||||
# Request headers starting with these prefixes won't be stored in sessions.
|
# Request headers starting with these prefixes won't be stored in sessions.
|
||||||
# They are specific to each request.
|
# They are specific to each request.
|
||||||
# http://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Requests
|
# <https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#Requests>
|
||||||
SESSION_IGNORED_HEADER_PREFIXES = ['Content-', 'If-']
|
SESSION_IGNORED_HEADER_PREFIXES = ['Content-', 'If-']
|
||||||
|
|
||||||
|
|
||||||
def get_response(requests_session, session_name,
|
def get_httpie_session(
|
||||||
config_dir, args, read_only=False):
|
config_dir: Path,
|
||||||
"""Like `client.get_responses`, but applies permanent
|
session_name: str,
|
||||||
aspects of the session to the request.
|
host: Optional[str],
|
||||||
|
url: str,
|
||||||
"""
|
) -> 'Session':
|
||||||
from .client import get_requests_kwargs, dump_request
|
|
||||||
if os.path.sep in session_name:
|
if os.path.sep in session_name:
|
||||||
path = os.path.expanduser(session_name)
|
path = os.path.expanduser(session_name)
|
||||||
else:
|
else:
|
||||||
hostname = (args.headers.get('Host', None)
|
hostname = host or urlsplit(url).netloc.split('@')[-1]
|
||||||
or urlsplit(args.url).netloc.split('@')[-1])
|
|
||||||
if not hostname:
|
if not hostname:
|
||||||
# HACK/FIXME: httpie-unixsocket's URLs have no hostname.
|
# HACK/FIXME: httpie-unixsocket's URLs have no hostname.
|
||||||
hostname = 'localhost'
|
hostname = 'localhost'
|
||||||
|
|
||||||
# host:port => host_port
|
# host:port => host_port
|
||||||
hostname = hostname.replace(':', '_')
|
hostname = hostname.replace(':', '_')
|
||||||
path = os.path.join(config_dir,
|
path = (
|
||||||
SESSIONS_DIR_NAME,
|
config_dir / SESSIONS_DIR_NAME / hostname / f'{session_name}.json'
|
||||||
hostname,
|
)
|
||||||
session_name + '.json')
|
|
||||||
|
|
||||||
session = Session(path)
|
session = Session(path)
|
||||||
session.load()
|
session.load()
|
||||||
|
return session
|
||||||
kwargs = get_requests_kwargs(args, base_headers=session.headers)
|
|
||||||
if args.debug:
|
|
||||||
dump_request(kwargs)
|
|
||||||
session.update_headers(kwargs['headers'])
|
|
||||||
|
|
||||||
if args.auth_plugin:
|
|
||||||
session.auth = {
|
|
||||||
'type': args.auth_plugin.auth_type,
|
|
||||||
'raw_auth': args.auth_plugin.raw_auth,
|
|
||||||
}
|
|
||||||
elif session.auth:
|
|
||||||
kwargs['auth'] = session.auth
|
|
||||||
|
|
||||||
requests_session.cookies = session.cookies
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = requests_session.request(**kwargs)
|
|
||||||
except Exception:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
# Existing sessions with `read_only=True` don't get updated.
|
|
||||||
if session.is_new() or not read_only:
|
|
||||||
session.cookies = requests_session.cookies
|
|
||||||
session.save()
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class Session(BaseConfigDict):
|
class Session(BaseConfigDict):
|
||||||
helpurl = 'https://httpie.org/doc#sessions'
|
helpurl = 'https://httpie.org/doc#sessions'
|
||||||
about = 'HTTPie session file'
|
about = 'HTTPie session file'
|
||||||
|
|
||||||
def __init__(self, path, *args, **kwargs):
|
def __init__(self, path: Union[str, Path]):
|
||||||
super(Session, self).__init__(*args, **kwargs)
|
super().__init__(path=Path(path))
|
||||||
self._path = path
|
|
||||||
self['headers'] = {}
|
self['headers'] = {}
|
||||||
self['cookies'] = {}
|
self['cookies'] = {}
|
||||||
self['auth'] = {
|
self['auth'] = {
|
||||||
@ -88,38 +65,44 @@ class Session(BaseConfigDict):
|
|||||||
'password': None
|
'password': None
|
||||||
}
|
}
|
||||||
|
|
||||||
def _get_path(self):
|
def update_headers(self, request_headers: RequestHeadersDict):
|
||||||
return self._path
|
|
||||||
|
|
||||||
def update_headers(self, request_headers):
|
|
||||||
"""
|
"""
|
||||||
Update the session headers with the request ones while ignoring
|
Update the session headers with the request ones while ignoring
|
||||||
certain name prefixes.
|
certain name prefixes.
|
||||||
|
|
||||||
:type request_headers: dict
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
for name, value in request_headers.items():
|
headers = self.headers
|
||||||
|
for name, value in request_headers.copy().items():
|
||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
continue # Ignore explicitely unset headers
|
continue # Ignore explicitly unset headers
|
||||||
|
|
||||||
value = value.decode('utf8')
|
if type(value) is not str:
|
||||||
if name == 'User-Agent' and value.startswith('HTTPie/'):
|
value = value.decode()
|
||||||
|
|
||||||
|
if name.lower() == 'user-agent' and value.startswith('HTTPie/'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if name.lower() == 'cookie':
|
||||||
|
for cookie_name, morsel in SimpleCookie(value).items():
|
||||||
|
self['cookies'][cookie_name] = {'value': morsel.value}
|
||||||
|
del request_headers[name]
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for prefix in SESSION_IGNORED_HEADER_PREFIXES:
|
for prefix in SESSION_IGNORED_HEADER_PREFIXES:
|
||||||
if name.lower().startswith(prefix.lower()):
|
if name.lower().startswith(prefix.lower()):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self['headers'][name] = value
|
headers[name] = value
|
||||||
|
|
||||||
|
self['headers'] = dict(headers)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self) -> RequestHeadersDict:
|
||||||
return self['headers']
|
return RequestHeadersDict(self['headers'])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cookies(self):
|
def cookies(self) -> RequestsCookieJar:
|
||||||
jar = RequestsCookieJar()
|
jar = RequestsCookieJar()
|
||||||
for name, cookie_dict in self['cookies'].items():
|
for name, cookie_dict in self['cookies'].items():
|
||||||
jar.set_cookie(create_cookie(
|
jar.set_cookie(create_cookie(
|
||||||
@ -128,11 +111,8 @@ class Session(BaseConfigDict):
|
|||||||
return jar
|
return jar
|
||||||
|
|
||||||
@cookies.setter
|
@cookies.setter
|
||||||
def cookies(self, jar):
|
def cookies(self, jar: RequestsCookieJar):
|
||||||
"""
|
# <https://docs.python.org/2/library/cookielib.html#cookie-objects>
|
||||||
:type jar: CookieJar
|
|
||||||
"""
|
|
||||||
# http://docs.python.org/2/library/cookielib.html#cookie-objects
|
|
||||||
stored_attrs = ['value', 'path', 'secure', 'expires']
|
stored_attrs = ['value', 'path', 'secure', 'expires']
|
||||||
self['cookies'] = {}
|
self['cookies'] = {}
|
||||||
for cookie in jar:
|
for cookie in jar:
|
||||||
@ -142,7 +122,7 @@ class Session(BaseConfigDict):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auth(self):
|
def auth(self) -> Optional[AuthBase]:
|
||||||
auth = self.get('auth', None)
|
auth = self.get('auth', None)
|
||||||
if not auth or not auth['type']:
|
if not auth or not auth['type']:
|
||||||
return
|
return
|
||||||
@ -161,7 +141,7 @@ class Session(BaseConfigDict):
|
|||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
if plugin.auth_parse:
|
if plugin.auth_parse:
|
||||||
from httpie.input import parse_auth
|
from .cli.argtypes import parse_auth
|
||||||
parsed = parse_auth(plugin.raw_auth)
|
parsed = parse_auth(plugin.raw_auth)
|
||||||
credentials = {
|
credentials = {
|
||||||
'username': parsed.key,
|
'username': parsed.key,
|
||||||
@ -171,6 +151,11 @@ class Session(BaseConfigDict):
|
|||||||
return plugin.get_auth(**credentials)
|
return plugin.get_auth(**credentials)
|
||||||
|
|
||||||
@auth.setter
|
@auth.setter
|
||||||
def auth(self, auth):
|
def auth(self, auth: dict):
|
||||||
assert set(['type', 'raw_auth']) == set(auth.keys())
|
assert {'type', 'raw_auth'} == auth.keys()
|
||||||
self['auth'] = auth
|
self['auth'] = auth
|
||||||
|
|
||||||
|
def remove_cookies(self, names: Iterable[str]):
|
||||||
|
for name in names:
|
||||||
|
if name in self['cookies']:
|
||||||
|
del self['cookies'][name]
|
||||||
|
63
httpie/ssl.py
Normal file
63
httpie/ssl.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import ssl
|
||||||
|
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
# noinspection PyPackageRequirements
|
||||||
|
from urllib3.util.ssl_ import (
|
||||||
|
DEFAULT_CIPHERS, create_urllib3_context,
|
||||||
|
resolve_ssl_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_SSL_CIPHERS = DEFAULT_CIPHERS
|
||||||
|
SSL_VERSION_ARG_MAPPING = {
|
||||||
|
'ssl2.3': 'PROTOCOL_SSLv23',
|
||||||
|
'ssl3': 'PROTOCOL_SSLv3',
|
||||||
|
'tls1': 'PROTOCOL_TLSv1',
|
||||||
|
'tls1.1': 'PROTOCOL_TLSv1_1',
|
||||||
|
'tls1.2': 'PROTOCOL_TLSv1_2',
|
||||||
|
'tls1.3': 'PROTOCOL_TLSv1_3',
|
||||||
|
}
|
||||||
|
AVAILABLE_SSL_VERSION_ARG_MAPPING = {
|
||||||
|
arg: getattr(ssl, constant_name)
|
||||||
|
for arg, constant_name in SSL_VERSION_ARG_MAPPING.items()
|
||||||
|
if hasattr(ssl, constant_name)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPieHTTPSAdapter(HTTPAdapter):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
verify: bool,
|
||||||
|
ssl_version: str = None,
|
||||||
|
ciphers: str = None,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
self._ssl_context = self._create_ssl_context(
|
||||||
|
verify=verify,
|
||||||
|
ssl_version=ssl_version,
|
||||||
|
ciphers=ciphers,
|
||||||
|
)
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
def init_poolmanager(self, *args, **kwargs):
|
||||||
|
kwargs['ssl_context'] = self._ssl_context
|
||||||
|
return super().init_poolmanager(*args, **kwargs)
|
||||||
|
|
||||||
|
def proxy_manager_for(self, *args, **kwargs):
|
||||||
|
kwargs['ssl_context'] = self._ssl_context
|
||||||
|
return super().proxy_manager_for(*args, **kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _create_ssl_context(
|
||||||
|
verify: bool,
|
||||||
|
ssl_version: str = None,
|
||||||
|
ciphers: str = None,
|
||||||
|
) -> 'ssl.SSLContext':
|
||||||
|
return create_urllib3_context(
|
||||||
|
ciphers=ciphers,
|
||||||
|
ssl_version=resolve_ssl_version(ssl_version),
|
||||||
|
# Since we are using a custom SSL context, we need to pass this
|
||||||
|
# here manually, even though it’s also passed to the connection
|
||||||
|
# in `super().cert_verify()`.
|
||||||
|
cert_reqs=ssl.CERT_REQUIRED if verify else ssl.CERT_NONE
|
||||||
|
)
|
40
httpie/status.py
Normal file
40
httpie/status.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from enum import IntEnum, unique
|
||||||
|
|
||||||
|
|
||||||
|
@unique
|
||||||
|
class ExitStatus(IntEnum):
|
||||||
|
"""Program exit status code constants."""
|
||||||
|
SUCCESS = 0
|
||||||
|
ERROR = 1
|
||||||
|
ERROR_TIMEOUT = 2
|
||||||
|
|
||||||
|
# See --check-status
|
||||||
|
ERROR_HTTP_3XX = 3
|
||||||
|
ERROR_HTTP_4XX = 4
|
||||||
|
ERROR_HTTP_5XX = 5
|
||||||
|
|
||||||
|
ERROR_TOO_MANY_REDIRECTS = 6
|
||||||
|
PLUGIN_ERROR = 7
|
||||||
|
# 128+2 SIGINT
|
||||||
|
# <http://www.tldp.org/LDP/abs/html/exitcodes.html>
|
||||||
|
ERROR_CTRL_C = 130
|
||||||
|
|
||||||
|
|
||||||
|
def http_status_to_exit_status(http_status: int, follow=False) -> ExitStatus:
|
||||||
|
"""
|
||||||
|
Translate HTTP status code to exit status code.
|
||||||
|
|
||||||
|
(Relevant only when invoked with --check-status or --download.)
|
||||||
|
|
||||||
|
"""
|
||||||
|
if 300 <= http_status <= 399 and not follow:
|
||||||
|
# Redirect
|
||||||
|
return ExitStatus.ERROR_HTTP_3XX
|
||||||
|
elif 400 <= http_status <= 499:
|
||||||
|
# Client Error
|
||||||
|
return ExitStatus.ERROR_HTTP_4XX
|
||||||
|
elif 500 <= http_status <= 599:
|
||||||
|
# Server Error
|
||||||
|
return ExitStatus.ERROR_HTTP_5XX
|
||||||
|
else:
|
||||||
|
return ExitStatus.SUCCESS
|
138
httpie/uploads.py
Normal file
138
httpie/uploads.py
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
import zlib
|
||||||
|
from typing import Callable, IO, Iterable, Tuple, Union
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from requests.utils import super_len
|
||||||
|
from requests_toolbelt import MultipartEncoder
|
||||||
|
|
||||||
|
from .cli.dicts import MultipartRequestDataDict, RequestDataDict
|
||||||
|
|
||||||
|
|
||||||
|
class ChunkedUploadStream:
|
||||||
|
def __init__(self, stream: Iterable, callback: Callable):
|
||||||
|
self.callback = callback
|
||||||
|
self.stream = stream
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterable[Union[str, bytes]]:
|
||||||
|
for chunk in self.stream:
|
||||||
|
self.callback(chunk)
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
|
||||||
|
class ChunkedMultipartUploadStream:
|
||||||
|
chunk_size = 100 * 1024
|
||||||
|
|
||||||
|
def __init__(self, encoder: MultipartEncoder):
|
||||||
|
self.encoder = encoder
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterable[Union[str, bytes]]:
|
||||||
|
while True:
|
||||||
|
chunk = self.encoder.read(self.chunk_size)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_request_body(
|
||||||
|
body: Union[str, bytes, IO, MultipartEncoder, RequestDataDict],
|
||||||
|
body_read_callback: Callable[[bytes], bytes],
|
||||||
|
content_length_header_value: int = None,
|
||||||
|
chunked=False,
|
||||||
|
offline=False,
|
||||||
|
) -> Union[str, bytes, IO, MultipartEncoder, ChunkedUploadStream]:
|
||||||
|
|
||||||
|
is_file_like = hasattr(body, 'read')
|
||||||
|
|
||||||
|
if isinstance(body, RequestDataDict):
|
||||||
|
body = urlencode(body, doseq=True)
|
||||||
|
|
||||||
|
if offline:
|
||||||
|
if is_file_like:
|
||||||
|
return body.read()
|
||||||
|
return body
|
||||||
|
|
||||||
|
if not is_file_like:
|
||||||
|
if chunked:
|
||||||
|
body = ChunkedUploadStream(
|
||||||
|
# Pass the entire body as one chunk.
|
||||||
|
stream=(chunk.encode() for chunk in [body]),
|
||||||
|
callback=body_read_callback,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# File-like object.
|
||||||
|
|
||||||
|
if not super_len(body):
|
||||||
|
# Zero-length -> assume stdin.
|
||||||
|
if content_length_header_value is None and not chunked:
|
||||||
|
#
|
||||||
|
# Read the whole stdin to determine `Content-Length`.
|
||||||
|
#
|
||||||
|
# TODO: Instead of opt-in --chunked, consider making
|
||||||
|
# `Transfer-Encoding: chunked` for STDIN opt-out via
|
||||||
|
# something like --no-chunked.
|
||||||
|
# This would be backwards-incompatible so wait until v3.0.0.
|
||||||
|
#
|
||||||
|
body = body.read()
|
||||||
|
else:
|
||||||
|
orig_read = body.read
|
||||||
|
|
||||||
|
def new_read(*args):
|
||||||
|
chunk = orig_read(*args)
|
||||||
|
body_read_callback(chunk)
|
||||||
|
return chunk
|
||||||
|
|
||||||
|
body.read = new_read
|
||||||
|
|
||||||
|
if chunked:
|
||||||
|
if isinstance(body, MultipartEncoder):
|
||||||
|
body = ChunkedMultipartUploadStream(
|
||||||
|
encoder=body,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
body = ChunkedUploadStream(
|
||||||
|
stream=body,
|
||||||
|
callback=body_read_callback,
|
||||||
|
)
|
||||||
|
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
def get_multipart_data_and_content_type(
|
||||||
|
data: MultipartRequestDataDict,
|
||||||
|
boundary: str = None,
|
||||||
|
content_type: str = None,
|
||||||
|
) -> Tuple[MultipartEncoder, str]:
|
||||||
|
encoder = MultipartEncoder(
|
||||||
|
fields=data.items(),
|
||||||
|
boundary=boundary,
|
||||||
|
)
|
||||||
|
if content_type:
|
||||||
|
content_type = content_type.strip()
|
||||||
|
if 'boundary=' not in content_type:
|
||||||
|
content_type = f'{content_type}; boundary={encoder.boundary_value}'
|
||||||
|
else:
|
||||||
|
content_type = encoder.content_type
|
||||||
|
|
||||||
|
data = encoder
|
||||||
|
return data, content_type
|
||||||
|
|
||||||
|
|
||||||
|
def compress_request(
|
||||||
|
request: requests.PreparedRequest,
|
||||||
|
always: bool,
|
||||||
|
):
|
||||||
|
deflater = zlib.compressobj()
|
||||||
|
if isinstance(request.body, str):
|
||||||
|
body_bytes = request.body.encode()
|
||||||
|
elif hasattr(request.body, 'read'):
|
||||||
|
body_bytes = request.body.read()
|
||||||
|
else:
|
||||||
|
body_bytes = request.body
|
||||||
|
deflated_data = deflater.compress(body_bytes)
|
||||||
|
deflated_data += deflater.flush()
|
||||||
|
is_economical = len(deflated_data) < len(body_bytes)
|
||||||
|
if is_economical or always:
|
||||||
|
request.body = deflated_data
|
||||||
|
request.headers['Content-Encoding'] = 'deflate'
|
||||||
|
request.headers['Content-Length'] = str(len(deflated_data))
|
112
httpie/utils.py
112
httpie/utils.py
@ -1,36 +1,31 @@
|
|||||||
from __future__ import division
|
|
||||||
import json
|
import json
|
||||||
|
import mimetypes
|
||||||
|
import time
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from http.cookiejar import parse_ns_headers
|
||||||
|
from pprint import pformat
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
import re
|
||||||
|
|
||||||
|
import requests.auth
|
||||||
|
|
||||||
|
RE_COOKIE_SPLIT = re.compile(r', (?=[^ ;]+=)')
|
||||||
|
|
||||||
|
|
||||||
def load_json_preserve_order(s):
|
def load_json_preserve_order(s):
|
||||||
return json.loads(s, object_pairs_hook=OrderedDict)
|
return json.loads(s, object_pairs_hook=OrderedDict)
|
||||||
|
|
||||||
|
|
||||||
def repr_dict_nice(d):
|
def repr_dict(d: dict) -> str:
|
||||||
def prepare_dict(d):
|
return pformat(d)
|
||||||
for k, v in d.items():
|
|
||||||
if isinstance(v, dict):
|
|
||||||
v = dict(prepare_dict(v))
|
|
||||||
elif isinstance(v, bytes):
|
|
||||||
v = v.decode('utf8')
|
|
||||||
elif not isinstance(v, (int, str)):
|
|
||||||
v = repr(v)
|
|
||||||
yield k, v
|
|
||||||
return json.dumps(
|
|
||||||
dict(prepare_dict(d)),
|
|
||||||
indent=4, sort_keys=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def humanize_bytes(n, precision=2):
|
def humanize_bytes(n, precision=2):
|
||||||
# Author: Doug Latornell
|
# Author: Doug Latornell
|
||||||
# Licence: MIT
|
# Licence: MIT
|
||||||
# URL: http://code.activestate.com/recipes/577081/
|
# URL: https://code.activestate.com/recipes/577081/
|
||||||
"""Return a humanized string representation of a number of bytes.
|
"""Return a humanized string representation of a number of bytes.
|
||||||
|
|
||||||
Assumes `from __future__ import division`.
|
|
||||||
|
|
||||||
>>> humanize_bytes(1)
|
>>> humanize_bytes(1)
|
||||||
'1 B'
|
'1 B'
|
||||||
>>> humanize_bytes(1024, precision=1)
|
>>> humanize_bytes(1024, precision=1)
|
||||||
@ -66,4 +61,83 @@ def humanize_bytes(n, precision=2):
|
|||||||
break
|
break
|
||||||
|
|
||||||
# noinspection PyUnboundLocalVariable
|
# noinspection PyUnboundLocalVariable
|
||||||
return '%.*f %s' % (precision, n / factor, suffix)
|
return f'{n / factor:.{precision}f} {suffix}'
|
||||||
|
|
||||||
|
|
||||||
|
class ExplicitNullAuth(requests.auth.AuthBase):
|
||||||
|
"""Forces requests to ignore the ``.netrc``.
|
||||||
|
<https://github.com/psf/requests/issues/2773#issuecomment-174312831>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __call__(self, r):
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def get_content_type(filename):
|
||||||
|
"""
|
||||||
|
Return the content type for ``filename`` in format appropriate
|
||||||
|
for Content-Type headers, or ``None`` if the file type is unknown
|
||||||
|
to ``mimetypes``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return mimetypes.guess_type(filename, strict=False)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def split_cookies(cookies):
|
||||||
|
"""
|
||||||
|
When ``requests`` stores cookies in ``response.headers['Set-Cookie']``
|
||||||
|
it concatenates all of them through ``, ``.
|
||||||
|
|
||||||
|
This function splits cookies apart being careful to not to
|
||||||
|
split on ``, `` which may be part of cookie value.
|
||||||
|
"""
|
||||||
|
if not cookies:
|
||||||
|
return []
|
||||||
|
return RE_COOKIE_SPLIT.split(cookies)
|
||||||
|
|
||||||
|
|
||||||
|
def get_expired_cookies(
|
||||||
|
cookies: str,
|
||||||
|
now: float = None
|
||||||
|
) -> List[dict]:
|
||||||
|
|
||||||
|
now = now or time.time()
|
||||||
|
|
||||||
|
def is_expired(expires: Optional[float]) -> bool:
|
||||||
|
return expires is not None and expires <= now
|
||||||
|
|
||||||
|
attr_sets: List[Tuple[str, str]] = parse_ns_headers(
|
||||||
|
split_cookies(cookies)
|
||||||
|
)
|
||||||
|
|
||||||
|
cookies = [
|
||||||
|
# The first attr name is the cookie name.
|
||||||
|
dict(attrs[1:], name=attrs[0][0])
|
||||||
|
for attrs in attr_sets
|
||||||
|
]
|
||||||
|
|
||||||
|
_max_age_to_expires(cookies=cookies, now=now)
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
'name': cookie['name'],
|
||||||
|
'path': cookie.get('path', '/')
|
||||||
|
}
|
||||||
|
for cookie in cookies
|
||||||
|
if is_expired(expires=cookie.get('expires'))
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _max_age_to_expires(cookies, now):
|
||||||
|
"""
|
||||||
|
Translate `max-age` into `expires` for Requests to take it into account.
|
||||||
|
|
||||||
|
HACK/FIXME: <https://github.com/psf/requests/issues/5743>
|
||||||
|
|
||||||
|
"""
|
||||||
|
for cookie in cookies:
|
||||||
|
if 'expires' in cookie:
|
||||||
|
continue
|
||||||
|
max_age = cookie.get('max-age')
|
||||||
|
if max_age and max_age.isdigit():
|
||||||
|
cookie['expires'] = now + float(max_age)
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
tox
|
|
||||||
mock
|
|
||||||
pytest
|
|
||||||
pytest-cov
|
|
||||||
pytest-httpbin>=0.0.6
|
|
||||||
docutils
|
|
||||||
wheel
|
|
||||||
pycodestyle
|
|
||||||
twine
|
|
19
setup.cfg
19
setup.cfg
@ -1,19 +1,18 @@
|
|||||||
|
# Please keep all characters in this file in ASCII
|
||||||
|
# distutils uses system's locale to interpret it and not everybody
|
||||||
|
# uses UTF-8. See https://github.com/httpie/httpie/issues/1039
|
||||||
|
# for an example
|
||||||
[wheel]
|
[wheel]
|
||||||
universal = 1
|
|
||||||
|
|
||||||
|
|
||||||
[tool:pytest]
|
[tool:pytest]
|
||||||
# <https://docs.pytest.org/en/latest/customize.html>
|
# <https://docs.pytest.org/en/latest/customize.html>
|
||||||
norecursedirs = tests/fixtures
|
norecursedirs = tests/fixtures .*
|
||||||
|
addopts = --tb=native --doctest-modules
|
||||||
|
|
||||||
|
|
||||||
[pycodestyle]
|
[flake8]
|
||||||
# <http://pycodestyle.pycqa.org/en/latest/intro.html#configuration>
|
# <https://flake8.pycqa.org/en/latest/user/error-codes.html>
|
||||||
|
|
||||||
exclude = .git,.idea,__pycache__,build,dist,.tox,.pytest_cache,*.egg-info
|
|
||||||
|
|
||||||
# <http://pycodestyle.pycqa.org/en/latest/intro.html#error-codes>
|
|
||||||
# E241 - multiple spaces after ‘,’
|
|
||||||
# E501 - line too long
|
# E501 - line too long
|
||||||
# W503 - line break before binary operator
|
# W503 - line break before binary operator
|
||||||
ignore = E241,E501,W503
|
ignore = E501,W503
|
||||||
|
93
setup.py
93
setup.py
@ -1,70 +1,61 @@
|
|||||||
# This is purely the result of trial and error.
|
# This is purely the result of trial and error.
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import codecs
|
|
||||||
|
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
import httpie
|
import httpie
|
||||||
|
|
||||||
|
# Note: keep requirements here to ease distributions packaging
|
||||||
class PyTest(TestCommand):
|
|
||||||
# `$ python setup.py test' simply installs minimal requirements
|
|
||||||
# and runs the tests with no fancy stuff like parallel execution.
|
|
||||||
def finalize_options(self):
|
|
||||||
TestCommand.finalize_options(self)
|
|
||||||
self.test_args = [
|
|
||||||
'--doctest-modules', '--verbose',
|
|
||||||
'./httpie', './tests'
|
|
||||||
]
|
|
||||||
self.test_suite = True
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
import pytest
|
|
||||||
sys.exit(pytest.main(self.test_args))
|
|
||||||
|
|
||||||
|
|
||||||
tests_require = [
|
tests_require = [
|
||||||
# Pytest needs to come last.
|
|
||||||
# https://bitbucket.org/pypa/setuptools/issue/196/
|
|
||||||
'pytest-httpbin',
|
|
||||||
'pytest',
|
'pytest',
|
||||||
'mock',
|
'pytest-httpbin>=0.0.6',
|
||||||
|
'responses',
|
||||||
|
]
|
||||||
|
dev_require = [
|
||||||
|
*tests_require,
|
||||||
|
'flake8',
|
||||||
|
'flake8-comprehensions',
|
||||||
|
'flake8-deprecated',
|
||||||
|
'flake8-mutable',
|
||||||
|
'flake8-tuple',
|
||||||
|
'mdformat',
|
||||||
|
'pytest-cov',
|
||||||
|
'twine',
|
||||||
|
'wheel',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
install_requires = [
|
install_requires = [
|
||||||
'requests>=2.21.0',
|
'defusedxml>=0.6.0',
|
||||||
'Pygments>=2.3.1'
|
'requests[socks]>=2.22.0',
|
||||||
|
'Pygments>=2.5.2',
|
||||||
|
'requests-toolbelt>=0.9.1',
|
||||||
|
'setuptools',
|
||||||
|
]
|
||||||
|
install_requires_win_only = [
|
||||||
|
'colorama>=0.2.4',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# Conditional dependencies:
|
# Conditional dependencies:
|
||||||
|
|
||||||
# sdist
|
# sdist
|
||||||
if 'bdist_wheel' not in sys.argv:
|
if 'bdist_wheel' not in sys.argv:
|
||||||
try:
|
|
||||||
# noinspection PyUnresolvedReferences
|
|
||||||
import argparse
|
|
||||||
except ImportError:
|
|
||||||
install_requires.append('argparse>=1.2.1')
|
|
||||||
|
|
||||||
if 'win32' in str(sys.platform).lower():
|
if 'win32' in str(sys.platform).lower():
|
||||||
# Terminal colors for Windows
|
# Terminal colors for Windows
|
||||||
install_requires.append('colorama>=0.2.4')
|
install_requires.extend(install_requires_win_only)
|
||||||
|
|
||||||
|
|
||||||
# bdist_wheel
|
# bdist_wheel
|
||||||
extras_require = {
|
extras_require = {
|
||||||
# http://wheel.readthedocs.io/en/latest/#defining-conditional-dependencies
|
'dev': dev_require,
|
||||||
'python_version == "3.0" or python_version == "3.1"': ['argparse>=1.2.1'],
|
'test': tests_require,
|
||||||
':sys_platform == "win32"': ['colorama>=0.2.4'],
|
# https://wheel.readthedocs.io/en/latest/#defining-conditional-dependencies
|
||||||
|
':sys_platform == "win32"': install_requires_win_only,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def long_description():
|
def long_description():
|
||||||
with codecs.open('README.rst', encoding='utf8') as f:
|
with open('README.md', encoding='utf-8') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
@ -73,32 +64,26 @@ setup(
|
|||||||
version=httpie.__version__,
|
version=httpie.__version__,
|
||||||
description=httpie.__doc__.strip(),
|
description=httpie.__doc__.strip(),
|
||||||
long_description=long_description(),
|
long_description=long_description(),
|
||||||
url='http://httpie.org/',
|
long_description_content_type='text/markdown',
|
||||||
download_url='https://github.com/jakubroztocil/httpie',
|
url='https://httpie.org/',
|
||||||
|
download_url=f'https://github.com/httpie/httpie/archive/{httpie.__version__}.tar.gz',
|
||||||
author=httpie.__author__,
|
author=httpie.__author__,
|
||||||
author_email='jakub@roztocil.co',
|
author_email='jakub@roztocil.co',
|
||||||
license=httpie.__licence__,
|
license=httpie.__licence__,
|
||||||
packages=find_packages(),
|
packages=find_packages(include=['httpie', 'httpie.*']),
|
||||||
entry_points={
|
entry_points={
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'http = httpie.__main__:main',
|
'http = httpie.__main__:main',
|
||||||
|
'https = httpie.__main__:main',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
python_requires='>=3.6',
|
||||||
extras_require=extras_require,
|
extras_require=extras_require,
|
||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
tests_require=tests_require,
|
|
||||||
cmdclass={'test': PyTest},
|
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
'Programming Language :: Python :: 2.7',
|
'Programming Language :: Python :: 3 :: Only',
|
||||||
'Programming Language :: Python :: 3',
|
|
||||||
'Programming Language :: Python :: 3.1',
|
|
||||||
'Programming Language :: Python :: 3.2',
|
|
||||||
'Programming Language :: Python :: 3.3',
|
|
||||||
'Programming Language :: Python :: 3.4',
|
|
||||||
'Programming Language :: Python :: 3.5',
|
|
||||||
'Programming Language :: Python :: 3.6',
|
|
||||||
'Environment :: Console',
|
'Environment :: Console',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'Intended Audience :: System Administrators',
|
'Intended Audience :: System Administrators',
|
||||||
@ -110,4 +95,10 @@ setup(
|
|||||||
'Topic :: Text Processing',
|
'Topic :: Text Processing',
|
||||||
'Topic :: Utilities'
|
'Topic :: Utilities'
|
||||||
],
|
],
|
||||||
|
project_urls={
|
||||||
|
'GitHub': 'https://github.com/httpie/httpie',
|
||||||
|
'Twitter': 'https://twitter.com/httpie',
|
||||||
|
'Documentation': 'https://httpie.org/docs',
|
||||||
|
'Online Demo': 'https://httpie.org/run',
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
3
tests/README.md
Normal file
3
tests/README.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# HTTPie Test Suite
|
||||||
|
|
||||||
|
Please see [CONTRIBUTING](https://github.com/httpie/httpie/blob/master/CONTRIBUTING.md) for contribution and testing guidelines.
|
@ -1,8 +0,0 @@
|
|||||||
HTTPie Test Suite
|
|
||||||
=================
|
|
||||||
|
|
||||||
|
|
||||||
Please see `CONTRIBUTING`_.
|
|
||||||
|
|
||||||
|
|
||||||
.. _CONTRIBUTING: https://github.com/jakubroztocil/httpie/blob/master/CONTRIBUTING.rst
|
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
@ -1,29 +1,31 @@
|
|||||||
-----BEGIN CERTIFICATE-----
|
-----BEGIN CERTIFICATE-----
|
||||||
MIIFAjCCAuoCAQEwDQYJKoZIhvcNAQEFBQAwSTELMAkGA1UEBhMCVVMxCzAJBgNV
|
MIIFazCCA1OgAwIBAgIUNMIIO7cG2Lkx+qo0Z43k4+voT4swDQYJKoZIhvcNAQEN
|
||||||
BAgTAkNBMQswCQYDVQQHEwJTRjEPMA0GA1UEChMGSFRUUGllMQ8wDQYDVQQDEwZI
|
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||||
VFRQaWUwHhcNMTUwMTIzMjIyNTM2WhcNMTYwMTIzMjIyNTM2WjBFMQswCQYDVQQG
|
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMDA3MDQxMDE5NDBaFw0yMTA3
|
||||||
EwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lk
|
MDQxMDE5NDBaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||||
Z2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAu6aP
|
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggIiMA0GCSqGSIb3DQEB
|
||||||
iR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/Rn5mCMKmD506JrFV8fktQ
|
AQUAA4ICDwAwggIKAoICAQCpnv/bnF8qkRoFu2M/+btxR5kRDAMqvbBivG2F4Uop
|
||||||
M6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535lb9V9hHjAgy60QgJBgSE7
|
37mxwW0YJFOiMtzCN76w8JgEZrTeH3zG0fNNdIKIKjjwf+8j3KSbQi60oDOelkL5
|
||||||
lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et0RQiWIi7S6vpDRpZFxRi
|
34Yt1o+lW9ricKQCsVl/XkYHh4RPnzNE8XRZmcZtL/6+1vVjTlxe6iW1Q0tzU2l3
|
||||||
gtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQI6JadczU0JyVVjJVTny3
|
RHPhHbmsVclwFOd/eE+D6WB5tb6SVvhDyOfLdZwxWWpgARx6aboR/+/CKazE0wt4
|
||||||
ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ2nc+OrJwYLvOp1cG/zYl
|
IJtTpe3M7IHt3i/8EkCZyFNdV+pQ8qz3PIOKBQws8aCpuQ+IHnvq4wSiyUV6eEaU
|
||||||
GHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK3gEbMz3y+YTlVNPo108H
|
bfOguWHGKlyVuN9AIiNl8A4xlU6QHKwzisTuRywschlvT8LaK1WGk+BNGBcidogh
|
||||||
JI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZdVH3feAhTfDZbpSxhpRo
|
yp73KrDpiUd+Udv3TPDg5Q7pE6LT+sZxFrCidvZEZ1YdBDfXUhOaCTmtlFFYJiMT
|
||||||
Ja84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQFTCjN22UhPP0PrqY3ngEj
|
2+FnPQCfFv53D79llGaovE7t6KBf+qYRpIkSDoYhSSZ5GhFGTVsgQERYG39MSnbz
|
||||||
1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5Vr89NO08QtnLwQduusVkc
|
4b1CQtg7Q8e9DJq8d/ChKUCfymJ+HSQIXEMu1FXrlEbEoyGvRyvA5cnUHjvY7GPY
|
||||||
4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0EyV2z6pZiH6HK1r5Xwaq
|
2HGHHaTFhiB9qRQhD3TdK4G6iIHF9tuxi2V+7waYp7q9N8KHfZRhIZbGSWQlaM9f
|
||||||
0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEAATANBgkqhkiG9w0BAQUF
|
njAUy8NAX6W4cL/ZpDf8PpVeMhLolvO8D8qCNZyWD+x5HtqDfqFkFPvr2vOSxZ+v
|
||||||
AAOCAgEAQgIicN/uWtaYYBVEVeMGMdxzpp2pv3AaCfQMoVGaQu9VLydK/GBlYOqj
|
6wIDAQABo1MwUTAdBgNVHQ4EFgQUkJwSpoGIxHUaArfJrX602HdHUWcwHwYDVR0j
|
||||||
AGPjdmQ7p4ISlduXqslu646+RxZ+H6TSSj0NTF4FyR8LPckRPiePNlsGp3u6ffix
|
BBgwFoAUkJwSpoGIxHUaArfJrX602HdHUWcwDwYDVR0TAQH/BAUwAwEB/zANBgkq
|
||||||
PX0554Ks+JYyFJ7qyMhsilqCYtw8prX9lj8fjzbWWXlgJFH/SRZw4xdcJ1yYA9sQ
|
hkiG9w0BAQ0FAAOCAgEAqDuULnNBNJsydUXDyGTzCrXjJuqhuOhi1eALyCLwuT+F
|
||||||
fBHxveCWFS1ibX5+QGy/+7jPb99MP38HEIt9vTMW5aiwXeIbipXohWqcJhxL9GXz
|
+/l7hOgdKWn4KJF4vcfNObcWh7sJ+iIcXEOYKaL3dPW9nqj+oCoPBKNAX+u3ZKXy
|
||||||
KPsrt9a++rLjqsquhZL4uCksGmI4Gv0FQQswgSyHSSQzagee5VRB68WYSAyYdvzi
|
I4O5wVAd3X0beh1ba69nOfdn9PMlVEB80TzTda0My9+tI5SD84OXUc7AWQXnh5Sb
|
||||||
YCfkNcbQtOOQWGx4rsEdENViPs1GEZkWJJ1h9pmWzZl0U9c3cnABffK7o9v6ap2F
|
tHkul7cKcBA7/phnlC83qa6WoMlmNfqo8s2u+quDkhshKdrLFGGBI17gUQH3GbHN
|
||||||
NrnU5H/7jLuBiUJFzqwkgAjANLRZ6hLj6h/grcnIIThJwg6KaXvpEh4UkHuqHYBF
|
WBymHi/BCCIKYJB9+vt+M5L5C8FtNCMrCwTGtIOgC9IMre4wF2gODbjuRtkO2w6k
|
||||||
Fq1BWZIWU25ASggEVIsCPXC2+I1oGhxK1DN/J+wIht9MBWWlQWVMZAQsBkszNZrh
|
sXOtKweCdgMd2H3SwE4txEU2hUHE1IYPYnG1fg0YwYfKfbTLZQtn7xgEK93+nkp8
|
||||||
nzdfMoQZTG5bT4Bf0bI5LmPaY0xBxXA1f4TLuqrEAziOjRX3vIQV4i33nZZJvPcC
|
ufnnHgUxd//+pFPkbEOTnShuepl7g45qOBGUX4fBh78EVeL7NIZ9F8dHGsawD/CT
|
||||||
mCoyhAUpTJm+OI90ePll+vBO1ENAx7EMHqNe6eCChZ/9DUsVxxtaorVq1l0xWons
|
/tATlH9gQ+JRvXCNCKO8jNgeu3v2gVw+haXP1d4F7NysVIr4A5LiFufJk5Zyizcm
|
||||||
ynOCgx46hGE12/oiRIKq/wGMpv6ClfJhW1N5nJahDqoIMEvnNaQ=
|
WyjgfI99CnEwvqzv4yMQCoHAOK3awhH7uR+QHhCpG9D91PlzdJu7yP7O7zQaKobg
|
||||||
|
YTqMoMkYr63WbMrH21Tokoc/6CBPAAp3g8rC/E024SquJE7OUG0If5JkvlfJU5EP
|
||||||
|
K+e7hFNoD4uc+0cgAccpEb9hCc0oPfC+3WM5poVBKSnukfs4KyqcVIt4ZaNoYic=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
|
@ -1,51 +1,51 @@
|
|||||||
-----BEGIN RSA PRIVATE KEY-----
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
MIIJKAIBAAKCAgEAu6aPiR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/R
|
MIIJKQIBAAKCAgEAqZ7/25xfKpEaBbtjP/m7cUeZEQwDKr2wYrxtheFKKd+5scFt
|
||||||
n5mCMKmD506JrFV8fktQM6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535l
|
GCRTojLcwje+sPCYBGa03h98xtHzTXSCiCo48H/vI9ykm0IutKAznpZC+d+GLdaP
|
||||||
b9V9hHjAgy60QgJBgSE7lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et
|
pVva4nCkArFZf15GB4eET58zRPF0WZnGbS/+vtb1Y05cXuoltUNLc1Npd0Rz4R25
|
||||||
0RQiWIi7S6vpDRpZFxRigtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQ
|
rFXJcBTnf3hPg+lgebW+klb4Q8jny3WcMVlqYAEcemm6Ef/vwimsxNMLeCCbU6Xt
|
||||||
I6JadczU0JyVVjJVTny3ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ
|
zOyB7d4v/BJAmchTXVfqUPKs9zyDigUMLPGgqbkPiB576uMEoslFenhGlG3zoLlh
|
||||||
2nc+OrJwYLvOp1cG/zYlGHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK
|
xipclbjfQCIjZfAOMZVOkBysM4rE7kcsLHIZb0/C2itVhpPgTRgXInaIIcqe9yqw
|
||||||
3gEbMz3y+YTlVNPo108HJI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZ
|
6YlHflHb90zw4OUO6ROi0/rGcRawonb2RGdWHQQ311ITmgk5rZRRWCYjE9vhZz0A
|
||||||
dVH3feAhTfDZbpSxhpRoJa84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQF
|
nxb+dw+/ZZRmqLxO7eigX/qmEaSJEg6GIUkmeRoRRk1bIEBEWBt/TEp28+G9QkLY
|
||||||
TCjN22UhPP0PrqY3ngEj1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5V
|
O0PHvQyavHfwoSlAn8pifh0kCFxDLtRV65RGxKMhr0crwOXJ1B472Oxj2Nhxhx2k
|
||||||
r89NO08QtnLwQduusVkc4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0
|
xYYgfakUIQ903SuBuoiBxfbbsYtlfu8GmKe6vTfCh32UYSGWxklkJWjPX54wFMvD
|
||||||
EyV2z6pZiH6HK1r5Xwaq0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEA
|
QF+luHC/2aQ3/D6VXjIS6JbzvA/KgjWclg/seR7ag36hZBT769rzksWfr+sCAwEA
|
||||||
AQKCAgBOY1DYlZYg8/eXAhuDDkayYYzDuny1ylG8c4F9nFYVCxB2GZ1Wz3icPWP1
|
AQKCAgBmZ1W0si1KN5vsRftfjle5xi4E+qmWzjqFAZllsGPj7+veAxbn8laDoA1j
|
||||||
j1BhpkBgPbPeLfM+O0V1H6eCdVvapKOxXM52mDuHO3TJP6P8lOZgZOOY6RUK7qp0
|
O+BmVnqQfalISN498lbfNi3wIv2JRNONZRIDoesspWNEpRb+YBJT7it++3ukJbj+
|
||||||
4mC4plqYx7oto23CBLoOdgMtM937rG0SLGDfIF6z8sI0XCMRkqPpRviNu5xxYYTk
|
3y9XFAVXWlto7oY3Y0aJKauAE+/KK2CueYqOyvHFA0Gz+HG9zZfgGuATyR76CcTR
|
||||||
IoczSwtmYcSZJRjHhk4AGnmicDbMPRlJ2k2E0euHhI9wMAyQFUFnhLJlQGALj6pj
|
UkM/MlBKao0JMHRmCA7Y6MJJkOAF4eXdiaMKZufK4vopQfi0p4re71gn1cmDYBa8
|
||||||
DtYvcM1EAUN46EXK66bXQq8zgozYS0WIJ6+wOUKQMSIgUGCF6Rvm3ZTt9xwOxxW8
|
KhDSRvz9Z6xQ/pGqGeCYHQACykXi8ZUM6sqJPlF4LedCTwbdaZwiNolu5/hJc/lk
|
||||||
wxebvfYVTJgIdh2Nfusgmye9Debl73f+k9/O4RsvYc5J5w2n4IxKqQrfCZrZqevZ
|
cLfKPSl0id2KZ6UW4PqPmGx00NXFP/XcCxzzht8ejrI1GY9LXR6fKpmoYZvUoXba
|
||||||
s+KvARkuQbXrHPanvEd8MPrRZ6FOAdiZYAbB9OvzuKCbEkgag8GPjMMAvrjT49N2
|
SK58l+OcAaxJ7JoTCvH2adas5mhNGyHTTghceNlFPuT+LC7nNq6rJD0QLouDQMr5
|
||||||
qp9gwGgnzczQYn+vLblJuRzofcblvLE+sxKKDE8qrfcOjN1murZP7714y5E3NmEZ
|
0my2lJtDiafa+Z3aGt759vkTT7k4wnfWNkjZJDIVf6UkAoMFtN5nOgR36OaDLegA
|
||||||
NB2NTHveTflYI1HJ1tznI1C40GdBYH4GwT/0he53rBcjNaPhyP7j3cTR1doRfZap
|
7udascC3hKRUi2BIlc713hl2dlcPVMcCQArpvbwgwPFXiZO9PW+Qc7IWogHqWNWY
|
||||||
2oz8KE/Sij3Zb6b8r7hi+Lcwpa9txZftro7XNOJIX7ZT5B4KMiXowtCHbkMMnL6k
|
Ms9JsDcAE5Q5PRlAA8QSveSyl3QNJpeHT9PVx159a28E8xEWCs9nfpI/jXfYxFnr
|
||||||
48tRBpyX20MqDFezBRCK7lfGhU1Coms8UcDHoFXLuGY/sAYEcQKCAQEA9D9/PD1t
|
dfS7gn8XW1WNUJvtHsKIhdSRD/4ks6VRPm6KMskR+j+zpTbmcQKCAQEA3CvDiT/E
|
||||||
e90haG6nLl4LKP5wH2wB2BK1RRBERqOVqwSmdSgn3/+GkpeYWKdhN2jyYn6qnpJQ
|
oD2VK9rE0KNDZBljED2p7IVE+zED5olGPUGC3F+WiEl9ldd6DKL6K0Xv/zAEv7Nt
|
||||||
hXXYGtHAAHuw0dGXnOmgcsyZSlAWPzpMYRYrSh3ds8JVJdV2d58yS0ty3Ae3W6aW
|
hHJ4m3B8siOQf2wzrX6JTvqDhBnrYjsD3VU7Zpys4ZjMOAp/aIM124ZRDECe2do3
|
||||||
p4SRuhf8yIMgOmE+TARCU1rJdke9jIIl2TQmnpJahlsZeGLEmEXE99EhB5VoshRJ
|
yzfV+oR0qw9KmyywjMwPa/8LL9d+kwYSQX6Y2hy+5TquDghKCmQzBw0iCDlmWfNP
|
||||||
hLXNn3xTtkQz3tNR0rMAtXI6SIMB00FFEG1+jClza6PYriT9dkORI5LSVqXDEpxR
|
jqfztSc1oBPcij+X98h3EI3Ai7R+hlolWlowXy0qBY8qCWegbguRDFkDhTXDCPwW
|
||||||
C41PvYMKTAloWd0hZ2gdfwAcJScoAv75L10uR7O1IeQI+Et5h2tj4a/OfzILa0d5
|
RMiQobI3xWfhZybSohx42/HUYMi5Uis++CV3XeE/aRdLw/O3gHTz5n9Z3v0i0Xnd
|
||||||
BYMmVsTa3NZXLQKCAQEAxK3uJKmoN2uswJQSKpX4WziVBgbaZdpCNnAWhfJZYIcP
|
KIWxpCKzLzLAVwKCAQEAxTlZHVlNaVz8fsSajAyq3n4LnOxGEwhYspzY7U2tHnbr
|
||||||
zlIuv9qOc/DIPiy9Sa9XtITSkcONAwRowtI783AtXeAelyo3W7A2aLIfBBZAXDzJ
|
U1QXTlvGN97u9hMdHgvvPu7OULfeJM0EPNBdQC2B2Y2vkAZBcdw2cgXdzVksv+gO
|
||||||
8KMc9xMDPebvUhlPSzg4bNwvduukAnktlzCjrRWPXRoSfloSpFkFPP4GwTdVcf17
|
//ryo37xBZXY46prGyPZCrfrrBXHNOHlxY1AklQUu8PnNKU+Z02hirMtY6pm/WyI
|
||||||
1mkki6rK4rbHmIoCITlZkNbUBCcu20ivK6N3pvH1wN123bxnF7lwvB5qizdFO5P7
|
2fbUJRqQu3nTMiuqFeee+5vaKbWXPRWKjpF/KZxoA4YSymGhG+fVIJVKxWjz1ns/
|
||||||
xRVIoCdCXQ0+WK2ZokCa/r44rcp4ffgrLoO/WRlo4yERIa9NwaucIrXmotKX8kYc
|
0Kkx/a4D3xWZO+vY9LE24PZzygUfr3/ZsCe8N+UpvZ60h7eJT9DJB1ETgqPFL8zr
|
||||||
YYpFzrGs72DljS7TBZCOqek5fNQBNK79BA2hNcJ1FQKCAQBC+M44hFdq6T1p1z18
|
EhGxoNDLRpm0b1JELAuclCHuHdqQ/uTJB2DjSFpAjQKCAQEAxmNU3R4toan7+Tk2
|
||||||
F0lUGkBAPWtcBfUyVL2D6QL2+7Vw1mvonbYWp/6cAHlFqj8cBsNd65ysm51/7ReK
|
cT07oz3Q6rh1nd70KlefSSLWvKmELeif7owx8kvn+Oz9+PIa8FmnXcli3J59GKsC
|
||||||
il/3iFLcMatPDw7RM5iGCcQ7ssp37iyGR7j1QMzVDA/MWYnLD0qVlN4mXNFgh4dG
|
YU30jSzFYAaN2TGYQfdNBwVgVRbQ4IQ6r0kMc07aQSVB6V4dN6oeuPSNo7rbP9IM
|
||||||
q73AhD2CtoBBPtmS1yUATAd4wTX9sP+la4FWYy6o2iiiEvPNkog8nBd0ji0tl/eU
|
gnrT4gEh0KyrFMgKn4BQ2E/3MTbOqnKOfGUkoxZLCRQCes8VpE18cX7xZ/zkd44u
|
||||||
OKtIZAVBkteU6RdWHqX3eSQo1v0mDY+aajjVt0rQjMJVUMLgA1+z0KzgUAUXX8EJ
|
HuDmr1fgKnBjAPKJ1hi8jXk7ATAVOB2tKLc4zKKoh6A6geLPbj/kTvs/YZlL4beB
|
||||||
DGNSkLHCGuhLlIojHdN4ztUgyZoRCxOVkWNsQbW3Dhk7HuuuMNi0t8pVWpq+nAev
|
04noLBdqYpK/QIimstMLUgQPyG+SIHCvv5UzOw0ng0Ne5opIQ8rajeB+LF5TlC+E
|
||||||
Gg6ZAoIBAQC0mMk9nRO7oAGG6/Aqbn8YtEISwKQ2Nk3qUs47vKdZPWvEFi6bOILp
|
P/o+HwKCAQAurZcI2jT3JfngqvmFAg6C4EQxXL5tDMGpbHPvHj5GApFJxJJLim8M
|
||||||
70TP4qEFUh6EwhngguGuzZOsoQMvq+fcdXlhcQBYDtxHEpfsVspOZ/s+HWjxbuHh
|
lCfsd7Ohg+OY+n48HnhmL1u8ZPhdEygzbFRL+x8MKrl8HSVUz7FGrk62iRdaWNYE
|
||||||
K3bBuj/XYA5f12c2GXYGV2MHm0AQJOX5pYEpyGepxZxLvy5QqRCqlQnrfaxzGycl
|
o2WU5KW6464f2k3eCb1/J6PxMLBCscHCeuhCzoVJf9cm86dfeloryr9NDx1Attvg
|
||||||
OpTYepEuFM0rdDhGf/xEmt9OgNHT2AXDTRhizycS39Kmyn8myl+mL2JWPA7uEF6d
|
c0HoEuuLialYFZf53S+xVmLXwVneaFU52EakPZ0a9LC9qHfs5x0m+z6sTQ824jOq
|
||||||
txVytCWImS45kE3XNz2g3go4sf04QV7QgIKMnb4Wgg/ix4i6JgokC0DwR9mFzBxx
|
XftJclWD/FlnvwzCmJnaOKE2DwF+HS/W4DQMFwVZramWoLrEZaxq1s4gFa37yM8D
|
||||||
ylW+aCqYx35YgrGo77sTt0LZP/KxvJdpAoIBAF7YfhR1wFbW2L8sJ4gAbjPUWOMu
|
o6dP3aGi5xClAq7PxAYjPdTSeTzxx+KVAoIBAQDGwk1/sJW99Oif+7RXvV99l+BL
|
||||||
JUfE4FhdLcSdqCo+N8uN0qawJxXltBKfjeeoH0CDh9Yv0qqalVdSOKS9BPAa1zJc
|
1R0BI1Dgc+aXkXSX4OeWJdLdiGLztrJ/lEzesKEdVHmG+wamexaxWzYgUeKklcAA
|
||||||
o2kBcT8AVwoPS5oxa9eDT+7iHPMF4BErB2IGv3yYwpjqSZBJ9TsTu1B6iTf5hOL5
|
IPrEawh3qB9gmlWei4BrK+e0cGjPZwq5bQi7gkpsMdxlHYkCmO12DzZ7/4CaGqET
|
||||||
9pqcv/LjfcwtWu2XMCVoZj2Q8iYv55l3jJ1ByF/UDVezWajE69avvJkQZrMZmuBw
|
+Az0Xa7wjlRbSv62HvKbCm1yMizs8l9k3E8vMo9vU1soyEvR3r/aHzo7KyiXJaio
|
||||||
UuHelP/7anRyyelh7RkndZpPCExGmuO7pd5aG25/mBs0i34R1PElAtt8AN36f5Tk
|
ioppLcx/FVQCkaFQ1/H4dBZCSxviJxQmnOWlTkJT1mH44GLQnv21UsEWUrpz13VK
|
||||||
1GxIltTNtLk4Mivwp9aZ1vf9s5FAhgPDvfGV5yFoKYmA/65ZlrKx0zlFNng=
|
8Dp0zWwNtSKoEQ6YJYl1Nwt04OhUrxG5fStSOpRiQ2r8bUAM0d4qDSjV92Yf
|
||||||
-----END RSA PRIVATE KEY-----
|
-----END RSA PRIVATE KEY-----
|
||||||
|
@ -1,87 +1,82 @@
|
|||||||
Bag Attributes
|
|
||||||
localKeyID: 93 0C 3E A7 82 62 36 37 5E 73 9B 05 C4 98 DF DC 04 5C B4 C9
|
|
||||||
subject=/C=AU/ST=Some-State/O=Internet Widgits Pty Ltd
|
|
||||||
issuer=/C=US/ST=CA/L=SF/O=HTTPie/CN=HTTPie
|
|
||||||
-----BEGIN CERTIFICATE-----
|
-----BEGIN CERTIFICATE-----
|
||||||
MIIFAjCCAuoCAQEwDQYJKoZIhvcNAQEFBQAwSTELMAkGA1UEBhMCVVMxCzAJBgNV
|
MIIFazCCA1OgAwIBAgIUNMIIO7cG2Lkx+qo0Z43k4+voT4swDQYJKoZIhvcNAQEN
|
||||||
BAgTAkNBMQswCQYDVQQHEwJTRjEPMA0GA1UEChMGSFRUUGllMQ8wDQYDVQQDEwZI
|
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||||
VFRQaWUwHhcNMTUwMTIzMjIyNTM2WhcNMTYwMTIzMjIyNTM2WjBFMQswCQYDVQQG
|
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMDA3MDQxMDE5NDBaFw0yMTA3
|
||||||
EwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lk
|
MDQxMDE5NDBaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||||
Z2l0cyBQdHkgTHRkMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAu6aP
|
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggIiMA0GCSqGSIb3DQEB
|
||||||
iR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/Rn5mCMKmD506JrFV8fktQ
|
AQUAA4ICDwAwggIKAoICAQCpnv/bnF8qkRoFu2M/+btxR5kRDAMqvbBivG2F4Uop
|
||||||
M6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535lb9V9hHjAgy60QgJBgSE7
|
37mxwW0YJFOiMtzCN76w8JgEZrTeH3zG0fNNdIKIKjjwf+8j3KSbQi60oDOelkL5
|
||||||
lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et0RQiWIi7S6vpDRpZFxRi
|
34Yt1o+lW9ricKQCsVl/XkYHh4RPnzNE8XRZmcZtL/6+1vVjTlxe6iW1Q0tzU2l3
|
||||||
gtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQI6JadczU0JyVVjJVTny3
|
RHPhHbmsVclwFOd/eE+D6WB5tb6SVvhDyOfLdZwxWWpgARx6aboR/+/CKazE0wt4
|
||||||
ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ2nc+OrJwYLvOp1cG/zYl
|
IJtTpe3M7IHt3i/8EkCZyFNdV+pQ8qz3PIOKBQws8aCpuQ+IHnvq4wSiyUV6eEaU
|
||||||
GHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK3gEbMz3y+YTlVNPo108H
|
bfOguWHGKlyVuN9AIiNl8A4xlU6QHKwzisTuRywschlvT8LaK1WGk+BNGBcidogh
|
||||||
JI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZdVH3feAhTfDZbpSxhpRo
|
yp73KrDpiUd+Udv3TPDg5Q7pE6LT+sZxFrCidvZEZ1YdBDfXUhOaCTmtlFFYJiMT
|
||||||
Ja84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQFTCjN22UhPP0PrqY3ngEj
|
2+FnPQCfFv53D79llGaovE7t6KBf+qYRpIkSDoYhSSZ5GhFGTVsgQERYG39MSnbz
|
||||||
1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5Vr89NO08QtnLwQduusVkc
|
4b1CQtg7Q8e9DJq8d/ChKUCfymJ+HSQIXEMu1FXrlEbEoyGvRyvA5cnUHjvY7GPY
|
||||||
4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0EyV2z6pZiH6HK1r5Xwaq
|
2HGHHaTFhiB9qRQhD3TdK4G6iIHF9tuxi2V+7waYp7q9N8KHfZRhIZbGSWQlaM9f
|
||||||
0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEAATANBgkqhkiG9w0BAQUF
|
njAUy8NAX6W4cL/ZpDf8PpVeMhLolvO8D8qCNZyWD+x5HtqDfqFkFPvr2vOSxZ+v
|
||||||
AAOCAgEAQgIicN/uWtaYYBVEVeMGMdxzpp2pv3AaCfQMoVGaQu9VLydK/GBlYOqj
|
6wIDAQABo1MwUTAdBgNVHQ4EFgQUkJwSpoGIxHUaArfJrX602HdHUWcwHwYDVR0j
|
||||||
AGPjdmQ7p4ISlduXqslu646+RxZ+H6TSSj0NTF4FyR8LPckRPiePNlsGp3u6ffix
|
BBgwFoAUkJwSpoGIxHUaArfJrX602HdHUWcwDwYDVR0TAQH/BAUwAwEB/zANBgkq
|
||||||
PX0554Ks+JYyFJ7qyMhsilqCYtw8prX9lj8fjzbWWXlgJFH/SRZw4xdcJ1yYA9sQ
|
hkiG9w0BAQ0FAAOCAgEAqDuULnNBNJsydUXDyGTzCrXjJuqhuOhi1eALyCLwuT+F
|
||||||
fBHxveCWFS1ibX5+QGy/+7jPb99MP38HEIt9vTMW5aiwXeIbipXohWqcJhxL9GXz
|
+/l7hOgdKWn4KJF4vcfNObcWh7sJ+iIcXEOYKaL3dPW9nqj+oCoPBKNAX+u3ZKXy
|
||||||
KPsrt9a++rLjqsquhZL4uCksGmI4Gv0FQQswgSyHSSQzagee5VRB68WYSAyYdvzi
|
I4O5wVAd3X0beh1ba69nOfdn9PMlVEB80TzTda0My9+tI5SD84OXUc7AWQXnh5Sb
|
||||||
YCfkNcbQtOOQWGx4rsEdENViPs1GEZkWJJ1h9pmWzZl0U9c3cnABffK7o9v6ap2F
|
tHkul7cKcBA7/phnlC83qa6WoMlmNfqo8s2u+quDkhshKdrLFGGBI17gUQH3GbHN
|
||||||
NrnU5H/7jLuBiUJFzqwkgAjANLRZ6hLj6h/grcnIIThJwg6KaXvpEh4UkHuqHYBF
|
WBymHi/BCCIKYJB9+vt+M5L5C8FtNCMrCwTGtIOgC9IMre4wF2gODbjuRtkO2w6k
|
||||||
Fq1BWZIWU25ASggEVIsCPXC2+I1oGhxK1DN/J+wIht9MBWWlQWVMZAQsBkszNZrh
|
sXOtKweCdgMd2H3SwE4txEU2hUHE1IYPYnG1fg0YwYfKfbTLZQtn7xgEK93+nkp8
|
||||||
nzdfMoQZTG5bT4Bf0bI5LmPaY0xBxXA1f4TLuqrEAziOjRX3vIQV4i33nZZJvPcC
|
ufnnHgUxd//+pFPkbEOTnShuepl7g45qOBGUX4fBh78EVeL7NIZ9F8dHGsawD/CT
|
||||||
mCoyhAUpTJm+OI90ePll+vBO1ENAx7EMHqNe6eCChZ/9DUsVxxtaorVq1l0xWons
|
/tATlH9gQ+JRvXCNCKO8jNgeu3v2gVw+haXP1d4F7NysVIr4A5LiFufJk5Zyizcm
|
||||||
ynOCgx46hGE12/oiRIKq/wGMpv6ClfJhW1N5nJahDqoIMEvnNaQ=
|
WyjgfI99CnEwvqzv4yMQCoHAOK3awhH7uR+QHhCpG9D91PlzdJu7yP7O7zQaKobg
|
||||||
|
YTqMoMkYr63WbMrH21Tokoc/6CBPAAp3g8rC/E024SquJE7OUG0If5JkvlfJU5EP
|
||||||
|
K+e7hFNoD4uc+0cgAccpEb9hCc0oPfC+3WM5poVBKSnukfs4KyqcVIt4ZaNoYic=
|
||||||
-----END CERTIFICATE-----
|
-----END CERTIFICATE-----
|
||||||
Bag Attributes
|
|
||||||
localKeyID: 93 0C 3E A7 82 62 36 37 5E 73 9B 05 C4 98 DF DC 04 5C B4 C9
|
|
||||||
Key Attributes: <No Attributes>
|
|
||||||
-----BEGIN RSA PRIVATE KEY-----
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
MIIJKAIBAAKCAgEAu6aPiR3TpPESWKTS969fxNRoSxl8P4osjhIaUuwblFNZc8/R
|
MIIJKQIBAAKCAgEAqZ7/25xfKpEaBbtjP/m7cUeZEQwDKr2wYrxtheFKKd+5scFt
|
||||||
n5mCMKmD506JrFV8fktQM6JRL7QuDC9vCw0ycr2HCV1sYX/ICgPCXYgmyigH535l
|
GCRTojLcwje+sPCYBGa03h98xtHzTXSCiCo48H/vI9ykm0IutKAznpZC+d+GLdaP
|
||||||
b9V9hHjAgy60QgJBgSE7lmMYaPpX6OKbT7UlzSwYtfHomXEBFA18Rlc9GwMXH8Et
|
pVva4nCkArFZf15GB4eET58zRPF0WZnGbS/+vtb1Y05cXuoltUNLc1Npd0Rz4R25
|
||||||
0RQiWIi7S6vpDRpZFxRigtXMceK1X8kut2ODv9B5ZwiuXh7+AMSCUkO58bXJTewQ
|
rFXJcBTnf3hPg+lgebW+klb4Q8jny3WcMVlqYAEcemm6Ef/vwimsxNMLeCCbU6Xt
|
||||||
I6JadczU0JyVVjJVTny3ta0x4SyXn8/ibylOalIsmTd/CAXJRfhV0Umb34LwaWrZ
|
zOyB7d4v/BJAmchTXVfqUPKs9zyDigUMLPGgqbkPiB576uMEoslFenhGlG3zoLlh
|
||||||
2nc+OrJwYLvOp1cG/zYlGHkFCViRfuwwSkL4iKjVeHx2o0DxJ4bc2Z7k1ig2fTJK
|
xipclbjfQCIjZfAOMZVOkBysM4rE7kcsLHIZb0/C2itVhpPgTRgXInaIIcqe9yqw
|
||||||
3gEbMz3y+YTlVNPo108HJI77DPbkBUqLPeF7PMaN/zDqmdH0yNCW+WiHZlf6h7kZ
|
6YlHflHb90zw4OUO6ROi0/rGcRawonb2RGdWHQQ311ITmgk5rZRRWCYjE9vhZz0A
|
||||||
dVH3feAhTfDZbpSxhpRoJa84OAVCNqAuNjnZs8pMIW/iRixwP8p84At7VsS4yQQF
|
nxb+dw+/ZZRmqLxO7eigX/qmEaSJEg6GIUkmeRoRRk1bIEBEWBt/TEp28+G9QkLY
|
||||||
TCjN22UhPP0PrqY3ngEj1lbfhHC1FNZvCMxrkUAUQbeYRqLrIwB4KdDMkRJixv5V
|
O0PHvQyavHfwoSlAn8pifh0kCFxDLtRV65RGxKMhr0crwOXJ1B472Oxj2Nhxhx2k
|
||||||
r89NO08QtnLwQduusVkc4Zg9HXtJTKjgQTHxHtn+OrTbpx0ogaUuYpVcQOsBT3b0
|
xYYgfakUIQ903SuBuoiBxfbbsYtlfu8GmKe6vTfCh32UYSGWxklkJWjPX54wFMvD
|
||||||
EyV2z6pZiH6HK1r5Xwaq0+nvFwpCHe58PlaI3Geihxejkv+85ZgDqXSGt7ECAwEA
|
QF+luHC/2aQ3/D6VXjIS6JbzvA/KgjWclg/seR7ag36hZBT769rzksWfr+sCAwEA
|
||||||
AQKCAgBOY1DYlZYg8/eXAhuDDkayYYzDuny1ylG8c4F9nFYVCxB2GZ1Wz3icPWP1
|
AQKCAgBmZ1W0si1KN5vsRftfjle5xi4E+qmWzjqFAZllsGPj7+veAxbn8laDoA1j
|
||||||
j1BhpkBgPbPeLfM+O0V1H6eCdVvapKOxXM52mDuHO3TJP6P8lOZgZOOY6RUK7qp0
|
O+BmVnqQfalISN498lbfNi3wIv2JRNONZRIDoesspWNEpRb+YBJT7it++3ukJbj+
|
||||||
4mC4plqYx7oto23CBLoOdgMtM937rG0SLGDfIF6z8sI0XCMRkqPpRviNu5xxYYTk
|
3y9XFAVXWlto7oY3Y0aJKauAE+/KK2CueYqOyvHFA0Gz+HG9zZfgGuATyR76CcTR
|
||||||
IoczSwtmYcSZJRjHhk4AGnmicDbMPRlJ2k2E0euHhI9wMAyQFUFnhLJlQGALj6pj
|
UkM/MlBKao0JMHRmCA7Y6MJJkOAF4eXdiaMKZufK4vopQfi0p4re71gn1cmDYBa8
|
||||||
DtYvcM1EAUN46EXK66bXQq8zgozYS0WIJ6+wOUKQMSIgUGCF6Rvm3ZTt9xwOxxW8
|
KhDSRvz9Z6xQ/pGqGeCYHQACykXi8ZUM6sqJPlF4LedCTwbdaZwiNolu5/hJc/lk
|
||||||
wxebvfYVTJgIdh2Nfusgmye9Debl73f+k9/O4RsvYc5J5w2n4IxKqQrfCZrZqevZ
|
cLfKPSl0id2KZ6UW4PqPmGx00NXFP/XcCxzzht8ejrI1GY9LXR6fKpmoYZvUoXba
|
||||||
s+KvARkuQbXrHPanvEd8MPrRZ6FOAdiZYAbB9OvzuKCbEkgag8GPjMMAvrjT49N2
|
SK58l+OcAaxJ7JoTCvH2adas5mhNGyHTTghceNlFPuT+LC7nNq6rJD0QLouDQMr5
|
||||||
qp9gwGgnzczQYn+vLblJuRzofcblvLE+sxKKDE8qrfcOjN1murZP7714y5E3NmEZ
|
0my2lJtDiafa+Z3aGt759vkTT7k4wnfWNkjZJDIVf6UkAoMFtN5nOgR36OaDLegA
|
||||||
NB2NTHveTflYI1HJ1tznI1C40GdBYH4GwT/0he53rBcjNaPhyP7j3cTR1doRfZap
|
7udascC3hKRUi2BIlc713hl2dlcPVMcCQArpvbwgwPFXiZO9PW+Qc7IWogHqWNWY
|
||||||
2oz8KE/Sij3Zb6b8r7hi+Lcwpa9txZftro7XNOJIX7ZT5B4KMiXowtCHbkMMnL6k
|
Ms9JsDcAE5Q5PRlAA8QSveSyl3QNJpeHT9PVx159a28E8xEWCs9nfpI/jXfYxFnr
|
||||||
48tRBpyX20MqDFezBRCK7lfGhU1Coms8UcDHoFXLuGY/sAYEcQKCAQEA9D9/PD1t
|
dfS7gn8XW1WNUJvtHsKIhdSRD/4ks6VRPm6KMskR+j+zpTbmcQKCAQEA3CvDiT/E
|
||||||
e90haG6nLl4LKP5wH2wB2BK1RRBERqOVqwSmdSgn3/+GkpeYWKdhN2jyYn6qnpJQ
|
oD2VK9rE0KNDZBljED2p7IVE+zED5olGPUGC3F+WiEl9ldd6DKL6K0Xv/zAEv7Nt
|
||||||
hXXYGtHAAHuw0dGXnOmgcsyZSlAWPzpMYRYrSh3ds8JVJdV2d58yS0ty3Ae3W6aW
|
hHJ4m3B8siOQf2wzrX6JTvqDhBnrYjsD3VU7Zpys4ZjMOAp/aIM124ZRDECe2do3
|
||||||
p4SRuhf8yIMgOmE+TARCU1rJdke9jIIl2TQmnpJahlsZeGLEmEXE99EhB5VoshRJ
|
yzfV+oR0qw9KmyywjMwPa/8LL9d+kwYSQX6Y2hy+5TquDghKCmQzBw0iCDlmWfNP
|
||||||
hLXNn3xTtkQz3tNR0rMAtXI6SIMB00FFEG1+jClza6PYriT9dkORI5LSVqXDEpxR
|
jqfztSc1oBPcij+X98h3EI3Ai7R+hlolWlowXy0qBY8qCWegbguRDFkDhTXDCPwW
|
||||||
C41PvYMKTAloWd0hZ2gdfwAcJScoAv75L10uR7O1IeQI+Et5h2tj4a/OfzILa0d5
|
RMiQobI3xWfhZybSohx42/HUYMi5Uis++CV3XeE/aRdLw/O3gHTz5n9Z3v0i0Xnd
|
||||||
BYMmVsTa3NZXLQKCAQEAxK3uJKmoN2uswJQSKpX4WziVBgbaZdpCNnAWhfJZYIcP
|
KIWxpCKzLzLAVwKCAQEAxTlZHVlNaVz8fsSajAyq3n4LnOxGEwhYspzY7U2tHnbr
|
||||||
zlIuv9qOc/DIPiy9Sa9XtITSkcONAwRowtI783AtXeAelyo3W7A2aLIfBBZAXDzJ
|
U1QXTlvGN97u9hMdHgvvPu7OULfeJM0EPNBdQC2B2Y2vkAZBcdw2cgXdzVksv+gO
|
||||||
8KMc9xMDPebvUhlPSzg4bNwvduukAnktlzCjrRWPXRoSfloSpFkFPP4GwTdVcf17
|
//ryo37xBZXY46prGyPZCrfrrBXHNOHlxY1AklQUu8PnNKU+Z02hirMtY6pm/WyI
|
||||||
1mkki6rK4rbHmIoCITlZkNbUBCcu20ivK6N3pvH1wN123bxnF7lwvB5qizdFO5P7
|
2fbUJRqQu3nTMiuqFeee+5vaKbWXPRWKjpF/KZxoA4YSymGhG+fVIJVKxWjz1ns/
|
||||||
xRVIoCdCXQ0+WK2ZokCa/r44rcp4ffgrLoO/WRlo4yERIa9NwaucIrXmotKX8kYc
|
0Kkx/a4D3xWZO+vY9LE24PZzygUfr3/ZsCe8N+UpvZ60h7eJT9DJB1ETgqPFL8zr
|
||||||
YYpFzrGs72DljS7TBZCOqek5fNQBNK79BA2hNcJ1FQKCAQBC+M44hFdq6T1p1z18
|
EhGxoNDLRpm0b1JELAuclCHuHdqQ/uTJB2DjSFpAjQKCAQEAxmNU3R4toan7+Tk2
|
||||||
F0lUGkBAPWtcBfUyVL2D6QL2+7Vw1mvonbYWp/6cAHlFqj8cBsNd65ysm51/7ReK
|
cT07oz3Q6rh1nd70KlefSSLWvKmELeif7owx8kvn+Oz9+PIa8FmnXcli3J59GKsC
|
||||||
il/3iFLcMatPDw7RM5iGCcQ7ssp37iyGR7j1QMzVDA/MWYnLD0qVlN4mXNFgh4dG
|
YU30jSzFYAaN2TGYQfdNBwVgVRbQ4IQ6r0kMc07aQSVB6V4dN6oeuPSNo7rbP9IM
|
||||||
q73AhD2CtoBBPtmS1yUATAd4wTX9sP+la4FWYy6o2iiiEvPNkog8nBd0ji0tl/eU
|
gnrT4gEh0KyrFMgKn4BQ2E/3MTbOqnKOfGUkoxZLCRQCes8VpE18cX7xZ/zkd44u
|
||||||
OKtIZAVBkteU6RdWHqX3eSQo1v0mDY+aajjVt0rQjMJVUMLgA1+z0KzgUAUXX8EJ
|
HuDmr1fgKnBjAPKJ1hi8jXk7ATAVOB2tKLc4zKKoh6A6geLPbj/kTvs/YZlL4beB
|
||||||
DGNSkLHCGuhLlIojHdN4ztUgyZoRCxOVkWNsQbW3Dhk7HuuuMNi0t8pVWpq+nAev
|
04noLBdqYpK/QIimstMLUgQPyG+SIHCvv5UzOw0ng0Ne5opIQ8rajeB+LF5TlC+E
|
||||||
Gg6ZAoIBAQC0mMk9nRO7oAGG6/Aqbn8YtEISwKQ2Nk3qUs47vKdZPWvEFi6bOILp
|
P/o+HwKCAQAurZcI2jT3JfngqvmFAg6C4EQxXL5tDMGpbHPvHj5GApFJxJJLim8M
|
||||||
70TP4qEFUh6EwhngguGuzZOsoQMvq+fcdXlhcQBYDtxHEpfsVspOZ/s+HWjxbuHh
|
lCfsd7Ohg+OY+n48HnhmL1u8ZPhdEygzbFRL+x8MKrl8HSVUz7FGrk62iRdaWNYE
|
||||||
K3bBuj/XYA5f12c2GXYGV2MHm0AQJOX5pYEpyGepxZxLvy5QqRCqlQnrfaxzGycl
|
o2WU5KW6464f2k3eCb1/J6PxMLBCscHCeuhCzoVJf9cm86dfeloryr9NDx1Attvg
|
||||||
OpTYepEuFM0rdDhGf/xEmt9OgNHT2AXDTRhizycS39Kmyn8myl+mL2JWPA7uEF6d
|
c0HoEuuLialYFZf53S+xVmLXwVneaFU52EakPZ0a9LC9qHfs5x0m+z6sTQ824jOq
|
||||||
txVytCWImS45kE3XNz2g3go4sf04QV7QgIKMnb4Wgg/ix4i6JgokC0DwR9mFzBxx
|
XftJclWD/FlnvwzCmJnaOKE2DwF+HS/W4DQMFwVZramWoLrEZaxq1s4gFa37yM8D
|
||||||
ylW+aCqYx35YgrGo77sTt0LZP/KxvJdpAoIBAF7YfhR1wFbW2L8sJ4gAbjPUWOMu
|
o6dP3aGi5xClAq7PxAYjPdTSeTzxx+KVAoIBAQDGwk1/sJW99Oif+7RXvV99l+BL
|
||||||
JUfE4FhdLcSdqCo+N8uN0qawJxXltBKfjeeoH0CDh9Yv0qqalVdSOKS9BPAa1zJc
|
1R0BI1Dgc+aXkXSX4OeWJdLdiGLztrJ/lEzesKEdVHmG+wamexaxWzYgUeKklcAA
|
||||||
o2kBcT8AVwoPS5oxa9eDT+7iHPMF4BErB2IGv3yYwpjqSZBJ9TsTu1B6iTf5hOL5
|
IPrEawh3qB9gmlWei4BrK+e0cGjPZwq5bQi7gkpsMdxlHYkCmO12DzZ7/4CaGqET
|
||||||
9pqcv/LjfcwtWu2XMCVoZj2Q8iYv55l3jJ1ByF/UDVezWajE69avvJkQZrMZmuBw
|
+Az0Xa7wjlRbSv62HvKbCm1yMizs8l9k3E8vMo9vU1soyEvR3r/aHzo7KyiXJaio
|
||||||
UuHelP/7anRyyelh7RkndZpPCExGmuO7pd5aG25/mBs0i34R1PElAtt8AN36f5Tk
|
ioppLcx/FVQCkaFQ1/H4dBZCSxviJxQmnOWlTkJT1mH44GLQnv21UsEWUrpz13VK
|
||||||
1GxIltTNtLk4Mivwp9aZ1vf9s5FAhgPDvfGV5yFoKYmA/65ZlrKx0zlFNng=
|
8Dp0zWwNtSKoEQ6YJYl1Nwt04OhUrxG5fStSOpRiQ2r8bUAM0d4qDSjV92Yf
|
||||||
-----END RSA PRIVATE KEY-----
|
-----END RSA PRIVATE KEY-----
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
|
import socket
|
||||||
import pytest
|
import pytest
|
||||||
from pytest_httpbin import certs
|
from pytest_httpbin import certs
|
||||||
|
|
||||||
|
from .utils import HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN, HTTPBIN_WITH_CHUNKED_SUPPORT
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function', autouse=True)
|
@pytest.fixture(scope='function', autouse=True)
|
||||||
def httpbin_add_ca_bundle(monkeypatch):
|
def httpbin_add_ca_bundle(monkeypatch):
|
||||||
@ -22,3 +25,19 @@ def httpbin_secure_untrusted(monkeypatch, httpbin_secure):
|
|||||||
"""
|
"""
|
||||||
monkeypatch.delenv('REQUESTS_CA_BUNDLE')
|
monkeypatch.delenv('REQUESTS_CA_BUNDLE')
|
||||||
return httpbin_secure
|
return httpbin_secure
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def _httpbin_with_chunked_support_available():
|
||||||
|
try:
|
||||||
|
socket.gethostbyname(HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN)
|
||||||
|
return True
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def httpbin_with_chunked_support(_httpbin_with_chunked_support_available):
|
||||||
|
if _httpbin_with_chunked_support_available:
|
||||||
|
return HTTPBIN_WITH_CHUNKED_SUPPORT
|
||||||
|
pytest.skip(f'{HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN} not resolvable')
|
||||||
|
6
tests/fixtures/.editorconfig
vendored
Normal file
6
tests/fixtures/.editorconfig
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
# https://editorconfig.org
|
||||||
|
|
||||||
|
[{*.txt, *.json}]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
insert_final_newline = false
|
||||||
|
|
38
tests/fixtures/__init__.py
vendored
38
tests/fixtures/__init__.py
vendored
@ -1,6 +1,7 @@
|
|||||||
"""Test data"""
|
"""Test data"""
|
||||||
from os import path
|
from pathlib import Path
|
||||||
import codecs
|
|
||||||
|
from httpie.constants import UTF8
|
||||||
|
|
||||||
|
|
||||||
def patharg(path):
|
def patharg(path):
|
||||||
@ -9,32 +10,27 @@ def patharg(path):
|
|||||||
even in Windows paths.
|
even in Windows paths.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return path.replace('\\', '\\\\\\')
|
return str(path).replace('\\', '\\\\\\')
|
||||||
|
|
||||||
|
|
||||||
FIXTURES_ROOT = path.join(path.abspath(path.dirname(__file__)))
|
FIXTURES_ROOT = Path(__file__).parent
|
||||||
FILE_PATH = path.join(FIXTURES_ROOT, 'test.txt')
|
FILE_PATH = FIXTURES_ROOT / 'test.txt'
|
||||||
JSON_FILE_PATH = path.join(FIXTURES_ROOT, 'test.json')
|
JSON_FILE_PATH = FIXTURES_ROOT / 'test.json'
|
||||||
BIN_FILE_PATH = path.join(FIXTURES_ROOT, 'test.bin')
|
BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin'
|
||||||
|
XML_FILES_PATH = FIXTURES_ROOT / 'xmldata'
|
||||||
|
XML_FILES_VALID = list((XML_FILES_PATH / 'valid').glob('*_raw.xml'))
|
||||||
|
XML_FILES_INVALID = list((XML_FILES_PATH / 'invalid').glob('*.xml'))
|
||||||
|
|
||||||
FILE_PATH_ARG = patharg(FILE_PATH)
|
FILE_PATH_ARG = patharg(FILE_PATH)
|
||||||
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
|
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
|
||||||
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
|
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
|
||||||
|
|
||||||
|
# Strip because we don't want new lines in the data so that we can
|
||||||
with codecs.open(FILE_PATH, encoding='utf8') as f:
|
# easily count occurrences also when embedded in JSON (where the new
|
||||||
# Strip because we don't want new lines in the data so that we can
|
# line would be escaped).
|
||||||
# easily count occurrences also when embedded in JSON (where the new
|
FILE_CONTENT = FILE_PATH.read_text(encoding=UTF8).strip()
|
||||||
# line would be escaped).
|
|
||||||
FILE_CONTENT = f.read().strip()
|
|
||||||
|
|
||||||
|
|
||||||
with codecs.open(JSON_FILE_PATH, encoding='utf8') as f:
|
JSON_FILE_CONTENT = JSON_FILE_PATH.read_text(encoding=UTF8)
|
||||||
JSON_FILE_CONTENT = f.read()
|
BIN_FILE_CONTENT = BIN_FILE_PATH.read_bytes()
|
||||||
|
|
||||||
|
|
||||||
with open(BIN_FILE_PATH, 'rb') as f:
|
|
||||||
BIN_FILE_CONTENT = f.read()
|
|
||||||
|
|
||||||
UNICODE = FILE_CONTENT
|
UNICODE = FILE_CONTENT
|
||||||
|
2
tests/fixtures/test.json
vendored
2
tests/fixtures/test.json
vendored
@ -1,4 +1,4 @@
|
|||||||
{
|
{
|
||||||
"name": "Jakub Roztočil",
|
"name": "Jakub Roztočil",
|
||||||
"unicode": "χρυσαφὶ 太陽 เลิศ ♜♞♝♛♚♝♞♜ оживлённым तान्यहानि 有朋"
|
"unicode": "χρυσαφὶ 太陽 เลิศ ♜♞♝♛♚♝♞♜ оживлённым तान्यहानि 有朋"
|
||||||
}
|
}
|
2
tests/fixtures/test.txt
vendored
2
tests/fixtures/test.txt
vendored
@ -1 +1 @@
|
|||||||
[one line of UTF8-encoded unicode text] χρυσαφὶ 太陽 เลิศ ♜♞♝♛♚♝♞♜ оживлённым तान्यहानि 有朋 ஸ்றீனிவாஸ ٱلرَّحْمـَبنِ
|
[one line of UTF8-encoded unicode text] χρυσαφὶ 太陽 เลิศ ♜♞♝♛♚♝♞♜ оживлённым तान्यहानि 有朋 ஸ்றீனிவாஸ ٱلرَّحْمـَبنِ
|
5
tests/fixtures/xmldata/invalid/cyclic.xml
vendored
Normal file
5
tests/fixtures/xmldata/invalid/cyclic.xml
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
<!DOCTYPE xmlbomb [
|
||||||
|
<!ENTITY a "123 &b;" >
|
||||||
|
<!ENTITY b "&a;">
|
||||||
|
]>
|
||||||
|
<bomb>&a;</bomb>
|
4
tests/fixtures/xmldata/invalid/external.xml
vendored
Normal file
4
tests/fixtures/xmldata/invalid/external.xml
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
<!DOCTYPE external [
|
||||||
|
<!ENTITY ee SYSTEM "http://www.w3schools.com/xml/note.xml">
|
||||||
|
]>
|
||||||
|
<root>ⅇ</root>
|
5
tests/fixtures/xmldata/invalid/external_file.xml
vendored
Normal file
5
tests/fixtures/xmldata/invalid/external_file.xml
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
<!DOCTYPE external [
|
||||||
|
<!ENTITY ee SYSTEM "file:///PATH/TO/xmltestdata/simple.xml">
|
||||||
|
]>
|
||||||
|
<root>ⅇ</root>
|
||||||
|
|
1
tests/fixtures/xmldata/invalid/not-xml.xml
vendored
Normal file
1
tests/fixtures/xmldata/invalid/not-xml.xml
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
some string
|
4
tests/fixtures/xmldata/invalid/quadratic.xml
vendored
Normal file
4
tests/fixtures/xmldata/invalid/quadratic.xml
vendored
Normal file
File diff suppressed because one or more lines are too long
20
tests/fixtures/xmldata/invalid/xalan_exec.xsl
vendored
Normal file
20
tests/fixtures/xmldata/invalid/xalan_exec.xsl
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
<!-- Tested with xalan-j_2_7_1-bin.zip, Xerces-J-bin.2.11.0.tar.gz on
|
||||||
|
OpenJDK 1.7.0_15
|
||||||
|
|
||||||
|
$ LC_ALL=C java -cp xalan.jar:serializer.jar:xercesImpl.jar:xml-apis.jar \
|
||||||
|
org.apache.xalan.xslt.Process -in simple.xml -xsl xalan_exec.xsl
|
||||||
|
-->
|
||||||
|
<xsl:stylesheet version="1.0"
|
||||||
|
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||||
|
xmlns:rt="http://xml.apache.org/xalan/java/java.lang.Runtime"
|
||||||
|
xmlns:ob="http://xml.apache.org/xalan/java/java.lang.Object"
|
||||||
|
exclude-result-prefixes="rt ob">
|
||||||
|
<xsl:template match="/">
|
||||||
|
<xsl:variable name="runtimeObject" select="rt:getRuntime()"/>
|
||||||
|
<xsl:variable name="command"
|
||||||
|
select="rt:exec($runtimeObject, '/usr/bin/notify-send SomethingBadHappensHere')"/>
|
||||||
|
<xsl:variable name="commandAsString" select="ob:toString($command)"/>
|
||||||
|
<xsl:value-of select="$commandAsString"/>
|
||||||
|
</xsl:template>
|
||||||
|
</xsl:stylesheet>
|
||||||
|
|
18
tests/fixtures/xmldata/invalid/xalan_write.xsl
vendored
Normal file
18
tests/fixtures/xmldata/invalid/xalan_write.xsl
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
<!-- Tested with xalan-j_2_7_1-bin.zip, Xerces-J-bin.2.11.0.tar.gz on
|
||||||
|
OpenJDK 1.7.0_15
|
||||||
|
|
||||||
|
$ LC_ALL=C java -cp xalan.jar:serializer.jar:xercesImpl.jar:xml-apis.jar \
|
||||||
|
org.apache.xalan.xslt.Process -in simple.xml -xsl xalan_write.xsl
|
||||||
|
-->
|
||||||
|
<xsl:stylesheet version="1.0"
|
||||||
|
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||||
|
xmlns:redirect="http://xml.apache.org/xalan/redirect"
|
||||||
|
extension-element-prefixes="redirect">
|
||||||
|
<xsl:output omit-xml-declaration="yes" indent="yes"/>
|
||||||
|
<xsl:template match="/">
|
||||||
|
<redirect:write file="xalan_redirect.txt" method="text">
|
||||||
|
<xsl:text>Something bad happens here! </xsl:text>
|
||||||
|
</redirect:write>
|
||||||
|
</xsl:template>
|
||||||
|
</xsl:stylesheet>
|
||||||
|
|
7
tests/fixtures/xmldata/invalid/xmlbomb.xml
vendored
Normal file
7
tests/fixtures/xmldata/invalid/xmlbomb.xml
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
<!DOCTYPE xmlbomb [
|
||||||
|
<!ENTITY a "1234567890" >
|
||||||
|
<!ENTITY b "&a;&a;&a;&a;&a;&a;&a;&a;">
|
||||||
|
<!ENTITY c "&b;&b;&b;&b;&b;&b;&b;&b;">
|
||||||
|
<!ENTITY d "&c;&c;&c;&c;&c;&c;&c;&c;">
|
||||||
|
]>
|
||||||
|
<bomb>&c;</bomb>
|
4
tests/fixtures/xmldata/invalid/xmlbomb2.xml
vendored
Normal file
4
tests/fixtures/xmldata/invalid/xmlbomb2.xml
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
<!DOCTYPE xmlbomb [
|
||||||
|
<!ENTITY a "1234567890">
|
||||||
|
]>
|
||||||
|
<root>text<bomb>&a;</bomb><tag/></root>
|
8
tests/fixtures/xmldata/valid/dtd_formatted.xml
vendored
Normal file
8
tests/fixtures/xmldata/valid/dtd_formatted.xml
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<!DOCTYPE html
|
||||||
|
PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'
|
||||||
|
'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'>
|
||||||
|
<html>
|
||||||
|
<head/>
|
||||||
|
<body>text</body>
|
||||||
|
</html>
|
2
tests/fixtures/xmldata/valid/dtd_raw.xml
vendored
Normal file
2
tests/fixtures/xmldata/valid/dtd_raw.xml
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||||
|
<html><head/><body>text</body></html>
|
9
tests/fixtures/xmldata/valid/simple-ns_formatted.xml
vendored
Normal file
9
tests/fixtures/xmldata/valid/simple-ns_formatted.xml
vendored
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<?pi data?>
|
||||||
|
<!-- comment -->
|
||||||
|
<root xmlns="namespace">
|
||||||
|
<element key="value">text</element>
|
||||||
|
<element>text</element>
|
||||||
|
tail
|
||||||
|
<empty-element/>
|
||||||
|
</root>
|
1
tests/fixtures/xmldata/valid/simple-ns_raw.xml
vendored
Normal file
1
tests/fixtures/xmldata/valid/simple-ns_raw.xml
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
<?pi data?><!-- comment --><root xmlns='namespace'><element key='value'>text</element><element>text</element>tail<empty-element/></root>
|
3
tests/fixtures/xmldata/valid/simple-standalone-no_formatted.xml
vendored
Normal file
3
tests/fixtures/xmldata/valid/simple-standalone-no_formatted.xml
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE s1>
|
||||||
|
<s1>........</s1>
|
2
tests/fixtures/xmldata/valid/simple-standalone-no_raw.xml
vendored
Normal file
2
tests/fixtures/xmldata/valid/simple-standalone-no_raw.xml
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE s1>
|
||||||
|
<s1>........</s1>
|
3
tests/fixtures/xmldata/valid/simple-standalone-yes_formatted.xml
vendored
Normal file
3
tests/fixtures/xmldata/valid/simple-standalone-yes_formatted.xml
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<!DOCTYPE s1>
|
||||||
|
<s1>........</s1>
|
2
tests/fixtures/xmldata/valid/simple-standalone-yes_raw.xml
vendored
Normal file
2
tests/fixtures/xmldata/valid/simple-standalone-yes_raw.xml
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?><!DOCTYPE s1>
|
||||||
|
<s1>........</s1>
|
8
tests/fixtures/xmldata/valid/simple_formatted.xml
vendored
Normal file
8
tests/fixtures/xmldata/valid/simple_formatted.xml
vendored
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<!-- comment -->
|
||||||
|
<root>
|
||||||
|
<element key="value">text</element>
|
||||||
|
<element>text</element>
|
||||||
|
tail
|
||||||
|
<empty-element/>
|
||||||
|
</root>
|
1
tests/fixtures/xmldata/valid/simple_raw.xml
vendored
Normal file
1
tests/fixtures/xmldata/valid/simple_raw.xml
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
<!-- comment --><root><element key='value'>text</element><element>text</element>tail<empty-element/></root>
|
29
tests/fixtures/xmldata/xhtml/xhtml_formatted.xml
vendored
Normal file
29
tests/fixtures/xmldata/xhtml/xhtml_formatted.xml
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
<!DOCTYPE html
|
||||||
|
PUBLIC '-//W3C//DTD XHTML 1.0 Strict//EN'
|
||||||
|
'http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd'>
|
||||||
|
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
|
||||||
|
<head>
|
||||||
|
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
|
||||||
|
<title>XHTML 1.0 Strict Example</title>
|
||||||
|
<script type="text/javascript">
|
||||||
|
//
|
||||||
|
<![CDATA[
|
||||||
|
function loadpdf() {
|
||||||
|
document.getElementById("pdf-object").src="http://www.w3.org/TR/xhtml1/xhtml1.pdf";
|
||||||
|
}
|
||||||
|
//]]>
|
||||||
|
</script>
|
||||||
|
</head>
|
||||||
|
<body onload="loadpdf()">
|
||||||
|
<p>
|
||||||
|
This is an example of an
|
||||||
|
<abbr title="Extensible HyperText Markup Language">XHTML</abbr>
|
||||||
|
1.0 Strict document.
|
||||||
|
<br/>
|
||||||
|
<img id="validation-icon" src="http://www.w3.org/Icons/valid-xhtml10" alt="Valid XHTML 1.0 Strict"/>
|
||||||
|
<br/>
|
||||||
|
<object id="pdf-object" name="pdf-object" type="application/pdf" data="http://www.w3.org/TR/xhtml1/xhtml1.pdf" width="100%" height="500">
|
||||||
|
</object>
|
||||||
|
</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user