Compare commits

..

1 Commits
3.0.2 ... 0.4.0

Author SHA1 Message Date
d4f2daca56 v0.4.0 2013-02-22 13:52:05 +01:00
207 changed files with 4700 additions and 21059 deletions

View File

@ -1,17 +0,0 @@
# https://editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.yml]
indent_size = 2
[Makefile]
indent_style = tab
indent_size = 8

View File

@ -1,44 +0,0 @@
---
name: Bug report
about: Report a possible bug in HTTPie
title: ''
labels: "new, bug"
assignees: ''
---
## Checklist
- [ ] I've searched for similar issues.
- [ ] I'm using the latest version of HTTPie.
---
## Minimal reproduction code and steps
1.
2.
3.
## Current result
## Expected result
---
## Debug output
Please re-run the command with `--debug`, then copy the entire command & output and paste both below:
```bash
$ http --debug <COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR>
<COMPLETE OUTPUT>
```
## Additional information, screenshots, or code examples

View File

@ -1,30 +0,0 @@
---
name: Feature request
about: Suggest an enhancement for HTTPie
title: ''
labels: "new, enhancement"
assignees: ''
---
## Checklist
- [ ] I've searched for similar feature requests.
---
## Enhancement request
---
## Problem it solves
E.g. “I'm always frustrated when […]”, “Im trying to do […] so that […]”.
---
## Additional information, screenshots, or code examples

View File

@ -1,10 +0,0 @@
---
name: Other
about: Anything else that isn't a feature or a bug
title: ''
labels: "new"
assignees: ''
---
If you have a general question, please consider asking on Discord: https://httpie.io/chat

View File

@ -1,9 +0,0 @@
version: 2
updates:
# GitHub Actions
- package-ecosystem: github-actions
directory: /
schedule:
interval: daily
assignees:
- BoboTiG

View File

@ -1,52 +0,0 @@
name: Benchmark
on:
pull_request:
types: [ labeled ]
permissions:
issues: write
pull-requests: write
jobs:
test:
if: github.event.label.name == 'benchmark'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.9"
- id: benchmarks
name: Run Benchmarks
run: |
python -m pip install pyperf>=2.3.0
python extras/profiling/run.py --fresh --complex --min-speed=6 --file output.txt
body=$(cat output.txt)
body="${body//'%'/'%25'}"
body="${body//$'\n'/'%0A'}"
body="${body//$'\r'/'%0D'}"
echo "::set-output name=body::$body"
- name: Find Comment
uses: peter-evans/find-comment@v1
id: fc
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '# Benchmarks'
- name: Create or update comment
uses: peter-evans/create-or-update-comment@v1
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
body: |
# Benchmarks
${{ steps.benchmarks.outputs.body }}
edit-mode: replace
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: benchmark

View File

@ -1,19 +0,0 @@
on:
pull_request:
paths:
- .github/workflows/code-style.yml
- extras/*.py
- httpie/**/*.py
- setup.py
- tests/**/*.py
jobs:
code-style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
- run: make venv
- run: make codestyle

View File

@ -1,22 +0,0 @@
on:
pull_request:
paths:
- .github/workflows/coverage.yml
- httpie/**/*.py
- setup.*
- tests/**/*.py
jobs:
coverage:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.10"
- run: make install
- run: make test-cover
- run: make codecov-upload
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_REPO_TOKEN }}
- run: make test-dist

View File

@ -1,19 +0,0 @@
on:
pull_request:
paths:
- "*.md"
- "**/*.md"
jobs:
doc:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.7
- name: Install the linter
run: sudo gem install mdl
- name: Check files
run: make doc-check

View File

@ -1,20 +0,0 @@
on:
push:
branches:
- master
paths:
- docs/README.md
- docs/config.json
release:
types:
- published
- unpublished
- deleted
jobs:
trigger-doc-build:
runs-on: ubuntu-latest
steps:
- name: Install HTTPie
run: sudo snap install --edge httpie
- name: Trigger new documentation build
run: http --ignore-stdin POST ${{ secrets.DOCS_UPDATE_VERCEL_HOOK }}

View File

@ -1,31 +0,0 @@
on:
push:
branches:
- master
paths:
- .github/workflows/docs-update-install.yml
- docs/installation/*
# Allow to call the workflow manually
workflow_dispatch:
jobs:
doc:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
- run: make install
- run: make doc-update-install
- uses: Automattic/action-commit-to-branch@master
with:
branch: master
commit_message: |
Auto-update install docs
Via .github/workflows/docs-update-install.yml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,22 +0,0 @@
on:
workflow_dispatch:
inputs:
branch:
description: "The branch, tag or SHA to release from"
required: true
default: "master"
jobs:
snap:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.inputs.branch }}
- uses: snapcore/action-build@v1
id: build
- uses: snapcore/action-publish@v1
with:
store_login: ${{ secrets.SNAP_STORE_LOGIN }}
snap: ${{ steps.build.outputs.snap }}
release: edge

View File

@ -1,31 +0,0 @@
on:
# Add a "Trigger" button to manually start the workflow.
workflow_dispatch:
inputs:
branch:
description: "The branch, tag or SHA to release from"
required: true
default: "master"
# It could be fully automated by uncommenting following lines.
# Let's see later if we are confident enough to try it :)
# release:
# types:
# - published
jobs:
new-release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.inputs.branch }}
- name: PyPi configuration
run: |
echo "[distutils]\nindex-servers=\n httpie\n\n[httpie]\nrepository = https://upload.pypi.org/legacy/\n" > $HOME/.pypirc
- uses: actions/setup-python@v2
with:
python-version: 3.9
- run: make publish
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}

View File

@ -1,26 +0,0 @@
name: Mark stale pull requests
on: workflow_dispatch
permissions:
pull-requests: write
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v4
with:
close-pr-message: 'Thanks for the pull request, but since it was stale for more than a 30 days we are closing it. If you want to work back on it, feel free to re-open it or create a new one.'
stale-pr-label: 'stale'
days-before-stale: -1
days-before-issue-stale: -1
days-before-pr-stale: 30
days-before-close: -1
days-before-issue-close: -1
days-before-pr-close: 0
operations-per-run: 300

View File

@ -1,25 +0,0 @@
on:
pull_request:
paths:
- .github/workflows/test-package-linux-snap.yml
- snapcraft.yaml
workflow_dispatch:
jobs:
snap:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build
uses: snapcore/action-build@v1
id: snapcraft
- name: Install
run: sudo snap install --dangerous ${{ steps.snapcraft.outputs.snap }}
- name: Test
run: |
httpie.http --version
httpie.https --version
httpie --version
# Auto-aliases cannot be tested when installing a snap outside the store.
# http --version
# https --version

View File

@ -1,18 +0,0 @@
on:
pull_request:
paths:
- .github/workflows/test-package-mac-brew.yml
- docs/packaging/brew/httpie.rb
workflow_dispatch:
jobs:
brew:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Setup brew
run: |
brew developer on
brew update
- name: Build and test the formula
run: make brew-test

View File

@ -1,45 +0,0 @@
on:
push:
branches:
- master
paths:
- .github/workflows/tests.yml
- httpie/**/*.py
- setup.*
- tests/**/*.py
pull_request:
paths:
- .github/workflows/tests.yml
- httpie/**/*.py
- setup.*
- tests/**/*.py
jobs:
test:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.7, 3.8, 3.9, "3.10"]
pyopenssl: [0, 1]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Windows setup
if: matrix.os == 'windows-latest'
run: |
python -m pip install --upgrade pip wheel
python -m pip install --upgrade '.[dev]'
python -m pytest --verbose ./httpie ./tests
env:
HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }}
- name: Linux & Mac setup
if: matrix.os != 'windows-latest'
run: |
make install
make test
env:
HTTPIE_TEST_WITH_PYOPENSSL: ${{ matrix.pyopenssl }}

159
.gitignore vendored
View File

@ -1,153 +1,10 @@
.DS_Store
.idea/
*.egg-info
.cache/
dist
httpie.egg-info
build
*.pyc
htmlcov
##############################################################################
# The below is GitHub template for Python project. gitignore.
# <https://github.com/github/gitignore/blob/master/Python.gitignore>
##############################################################################
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.nox/
.tox
README.html
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
venv*/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# Packit
/httpie.spec
/httpie-*.rpm
/httpie-*.tar.gz
# VS Code
.vscode/
# Windows Chocolatey
*.nupkg
htmlcov
.idea
.DS_Store

View File

@ -1,20 +0,0 @@
# See the documentation for more information:
# https://packit.dev/docs/configuration/
specfile_path: httpie.spec
actions:
# get the current Fedora Rawhide specfile:
# post-upstream-clone: "wget https://src.fedoraproject.org/rpms/httpie/raw/rawhide/f/httpie.spec -O httpie.spec"
post-upstream-clone: "cp docs/packaging/linux-fedora/httpie.spec.txt httpie.spec"
jobs:
- job: copr_build
trigger: pull_request
metadata:
targets:
- fedora-all
additional_repos:
- "https://kojipkgs.fedoraproject.org/repos/f$releasever-build/latest/$basearch/"
- job: propose_downstream
trigger: release
metadata:
dist_git_branches:
- rawhide

9
.travis.yml Normal file
View File

@ -0,0 +1,9 @@
language: python
python:
- 2.6
- 2.7
- pypy
- 3.3
script: python setup.py test
install:
- pip install . --use-mirrors

View File

@ -1,41 +0,0 @@
# HTTPie authors
- [Jakub Roztocil](https://github.com/jakubroztocil)
## Patches, features, ideas
[Complete list of contributors on GitHub](https://github.com/httpie/httpie/graphs/contributors)
- [Cláudia T. Delgado](https://github.com/claudiatd)
- [Hank Gay](https://github.com/gthank)
- [Jake Basile](https://github.com/jakebasile)
- [Vladimir Berkutov](https://github.com/dair-targ)
- [Jakob Kramer](https://github.com/gandaro)
- [Chris Faulkner](https://github.com/faulkner)
- [Alen Mujezinovic](https://github.com/flashingpumpkin)
- [Praful Mathur](https://github.com/tictactix)
- [Marc Abramowitz](https://github.com/msabramo)
- [Ismail Badawi](https://github.com/isbadawi)
- [Laurent Bachelier](https://github.com/laurentb)
- [Isman Firmansyah](https://github.com/iromli)
- [Simon Olofsson](https://github.com/simono)
- [Churkin Oleg](https://github.com/Bahus)
- [Jökull Sólberg Auðunsson](https://github.com/jokull)
- [Matthew M. Boedicker](https://github.com/mmb)
- [marblar](https://github.com/marblar)
- [Tomek Wójcik](https://github.com/tomekwojcik)
- [Davey Shafik](https://github.com/dshafik)
- [cido](https://github.com/cido)
- [Justin Bonnar](https://github.com/jargonjustin)
- [Nathan LaFreniere](https://github.com/nlf)
- [Matthias Lehmann](https://github.com/matleh)
- [Dennis Brakhane](https://github.com/brakhane)
- [Matt Layman](https://github.com/mblayman)
- [Edward Yang](https://github.com/honorabrutroll)
- [Aleksandr Vinokurov](https://github.com/aleksandr-vin)
- [Jeff Byrnes](https://github.com/jeffbyrnes)
- [Denis Belavin](https://github.com/LuckyDenis)
- [Mickaël Schoentgen](https://github.com/BoboTiG)
- [Elena Lape](https://github.com/elenalape)
- [Rohit Sehgal](https://github.com/r0hi7)
- [Bartłomiej Jacak](https://github.com/bartekjacak)

30
AUTHORS.rst Normal file
View File

@ -0,0 +1,30 @@
==============
HTTPie authors
==============
* `Jakub Roztocil <https://github.com/jkbr>`_
Patches and ideas
-----------------
* `Cláudia T. Delgado <https://github.com/claudiatd>`_ (logo)
* `Hank Gay <https://github.com/gthank>`_
* `Jake Basile <https://github.com/jakebasile>`_
* `Vladimir Berkutov <https://github.com/dair-targ>`_
* `Jakob Kramer <https://github.com/gandaro>`_
* `Chris Faulkner <https://github.com/faulkner>`_
* `Alen Mujezinovic <https://github.com/flashingpumpkin>`_
* `Praful Mathur <https://github.com/tictactix>`_
* `Marc Abramowitz <https://github.com/msabramo>`_
* `Ismail Badawi <https://github.com/isbadawi>`_
* `Laurent Bachelier <https://github.com/laurentb>`_
* `Isman Firmansyah <https://github.com/iromli>`_
* `Simon Olofsson <https://github.com/simono>`_
* `Churkin Oleg <https://github.com/Bahus>`_
* `Jökull Sólberg Auðunsson <https://github.com/jokull>`_
* `Matthew M. Boedicker <https://github.com/mmb>`_
* `marblar <https://github.com/marblar>`_
* `Tomek Wójcik <https://github.com/tomekwojcik>`_
* `Davey Shafik <https://github.com/dshafik>`_
* `cido <https://github.com/cido>`_

View File

@ -1,418 +0,0 @@
# Change Log
This document records all notable changes to [HTTPie](https://httpie.io).
This project adheres to [Semantic Versioning](https://semver.org/).
## [3.0.2](https://github.com/httpie/httpie/compare/3.0.0...3.0.1) (2022-01-23)
[Whats new in HTTPie for Terminal 3.0 →](https://httpie.io/blog/httpie-3.0.0)
- Fixed usage of `httpie` when there is a presence of a config with `default_options` ([#1280](https://github.com/httpie/httpie/pull/1280)).
## [3.0.1](https://github.com/httpie/httpie/compare/3.0.0...3.0.1) (2022-01-23)
[Whats new in HTTPie for Terminal 3.0 →](https://httpie.io/blog/httpie-3.0.0)
- Changed the value shown as time elapsed from time-to-read-headers to total exchange time ([#1277](https://github.com/httpie/httpie/issues/1277))
## [3.0.0](https://github.com/httpie/httpie/compare/2.6.0...3.0.0) (2022-01-21)
[Whats new in HTTPie for Terminal 3.0 →](https://httpie.io/blog/httpie-3.0.0)
- Dropped support for Python 3.6. ([#1177](https://github.com/httpie/httpie/issues/1177))
- Improved startup time by 40%. ([#1211](https://github.com/httpie/httpie/pull/1211))
- Added support for nested JSON syntax. ([#1169](https://github.com/httpie/httpie/issues/1169))
- Added `httpie plugins` interface for plugin management. ([#566](https://github.com/httpie/httpie/issues/566))
- Added support for Bearer authentication via `--auth-type=bearer` ([#1215](https://github.com/httpie/httpie/issues/1215)).
- Added support for quick conversions of pasted URLs into HTTPie calls by adding a space after the protocol name (`$ https ://pie.dev``https://pie.dev`). ([#1195](https://github.com/httpie/httpie/issues/1195))
- Added support for _sending_ multiple HTTP header lines with the same name. ([#130](https://github.com/httpie/httpie/issues/130))
- Added support for _receiving_ multiple HTTP headers lines with the same name. ([#1207](https://github.com/httpie/httpie/issues/1207))
- Added support for basic JSON types on `--form`/`--multipart` when using JSON only operators (`:=`/`:=@`). ([#1212](https://github.com/httpie/httpie/issues/1212))
- Added support for automatically enabling `--stream` when `Content-Type` is `text/event-stream`. ([#376](https://github.com/httpie/httpie/issues/376))
- Added support for displaying the total elapsed time through `--meta`/`-vv` or `--print=m`. ([#243](https://github.com/httpie/httpie/issues/243))
- Added new `pie-dark`/`pie-light` (and `pie`) styles that match with [HTTPie for Web and Desktop](https://httpie.io/product). ([#1237](https://github.com/httpie/httpie/issues/1237))
- Added support for better error handling on DNS failures. ([#1248](https://github.com/httpie/httpie/issues/1248))
- Added support for storing prompted passwords in the local sessions. ([#1098](https://github.com/httpie/httpie/issues/1098))
- Added warnings about the `--ignore-stdin`, when there is no incoming data from stdin. ([#1255](https://github.com/httpie/httpie/issues/1255))
- Fixed crashing due to broken plugins. ([#1204](https://github.com/httpie/httpie/issues/1204))
- Fixed auto addition of XML declaration to every formatted XML response. ([#1156](https://github.com/httpie/httpie/issues/1156))
- Fixed highlighting when `Content-Type` specifies `charset`. ([#1242](https://github.com/httpie/httpie/issues/1242))
- Fixed an unexpected crash when `--raw` is used with `--chunked`. ([#1253](https://github.com/httpie/httpie/issues/1253))
- Changed the default Windows theme from `fruity` to `auto`. ([#1266](https://github.com/httpie/httpie/issues/1266))
## [2.6.0](https://github.com/httpie/httpie/compare/2.5.0...2.6.0) (2021-10-14)
[Whats new in HTTPie for Terminal 2.6.0 →](https://httpie.io/blog/httpie-2.6.0)
- Added support for formatting & coloring of JSON bodies preceded by non-JSON data (e.g., an XXSI prefix). ([#1130](https://github.com/httpie/httpie/issues/1130))
- Added charset auto-detection when `Content-Type` doesnt include it. ([#1110](https://github.com/httpie/httpie/issues/1110), [#1168](https://github.com/httpie/httpie/issues/1168))
- Added `--response-charset` to allow overriding the response encoding for terminal display purposes. ([#1168](https://github.com/httpie/httpie/issues/1168))
- Added `--response-mime` to allow overriding the response mime type for coloring and formatting for the terminal. ([#1168](https://github.com/httpie/httpie/issues/1168))
- Added the ability to silence warnings through using `-q` or `--quiet` twice (e.g. `-qq`) ([#1175](https://github.com/httpie/httpie/issues/1175))
- Added installed plugin list to `--debug` output. ([#1165](https://github.com/httpie/httpie/issues/1165))
- Fixed duplicate keys preservation in JSON data. ([#1163](https://github.com/httpie/httpie/issues/1163))
## [2.5.0](https://github.com/httpie/httpie/compare/2.4.0...2.5.0) (2021-09-06)
[Whats new in HTTPie for Terminal 2.5.0 →](https://httpie.io/blog/httpie-2.5.0)
- Added `--raw` to allow specifying the raw request body without extra processing as
an alternative to `stdin`. ([#534](https://github.com/httpie/httpie/issues/534))
- Added support for XML formatting. ([#1129](https://github.com/httpie/httpie/issues/1129))
- Added internal support for file-like object responses to improve adapter plugin support. ([#1094](https://github.com/httpie/httpie/issues/1094))
- Fixed `--continue --download` with a single byte to be downloaded left. ([#1032](https://github.com/httpie/httpie/issues/1032))
- Fixed `--verbose` HTTP 307 redirects with streamed request body. ([#1088](https://github.com/httpie/httpie/issues/1088))
- Fixed handling of session files with `Cookie:` followed by other headers. ([#1126](https://github.com/httpie/httpie/issues/1126))
## [2.4.0](https://github.com/httpie/httpie/compare/2.3.0...2.4.0) (2021-02-06)
- Added support for `--session` cookie expiration based on `Set-Cookie: max-age=<n>`. ([#1029](https://github.com/httpie/httpie/issues/1029))
- Show a `--check-status` warning with `--quiet` as well, not only when the output is redirected. ([#1026](https://github.com/httpie/httpie/issues/1026))
- Fixed upload with `--session` ([#1020](https://github.com/httpie/httpie/issues/1020)).
- Fixed a missing blank line between request and response ([#1006](https://github.com/httpie/httpie/issues/1006)).
## [2.3.0](https://github.com/httpie/httpie/compare/2.2.0...2.3.0) (2020-10-25)
- Added support for streamed uploads ([#201](https://github.com/httpie/httpie/issues/201)).
- Added support for multipart upload streaming ([#684](https://github.com/httpie/httpie/issues/684)).
- Added support for body-from-file upload streaming (`http pie.dev/post @file`).
- Added `--chunked` to enable chunked transfer encoding ([#753](https://github.com/httpie/httpie/issues/753)).
- Added `--multipart` to allow `multipart/form-data` encoding for non-file `--form` requests as well.
- Added support for preserving field order in multipart requests ([#903](https://github.com/httpie/httpie/issues/903)).
- Added `--boundary` to allow a custom boundary string for `multipart/form-data` requests.
- Added support for combining cookies specified on the CLI and in a session file ([#932](https://github.com/httpie/httpie/issues/932)).
- Added out of the box SOCKS support with no extra installation ([#904](https://github.com/httpie/httpie/issues/904)).
- Added `--quiet, -q` flag to enforce silent behaviour.
- Fixed the handling of invalid `expires` dates in `Set-Cookie` headers ([#963](https://github.com/httpie/httpie/issues/963)).
- Removed Tox testing entirely ([#943](https://github.com/httpie/httpie/issues/943)).
## [2.2.0](https://github.com/httpie/httpie/compare/2.1.0...2.2.0) (2020-06-18)
- Added support for custom content types for uploaded files ([#668](https://github.com/httpie/httpie/issues/668)).
- Added support for `$XDG_CONFIG_HOME` ([#920](https://github.com/httpie/httpie/issues/920)).
- Added support for `Set-Cookie`-triggered cookie expiration ([#853](https://github.com/httpie/httpie/issues/853)).
- Added `--format-options` to allow disabling sorting, etc. ([#128](https://github.com/httpie/httpie/issues/128))
- Added `--sorted` and `--unsorted` shortcuts for (un)setting all sorting-related `--format-options`. ([#128](https://github.com/httpie/httpie/issues/128))
- Added `--ciphers` to allow configuring OpenSSL ciphers ([#870](https://github.com/httpie/httpie/issues/870)).
- Added `netrc` support for auth plugins. Enabled for `--auth-type=basic`
and `digest`, 3rd parties may opt in ([#718](https://github.com/httpie/httpie/issues/718), [#719](https://github.com/httpie/httpie/issues/719), [#852](https://github.com/httpie/httpie/issues/852), [#934](https://github.com/httpie/httpie/issues/934)).
- Fixed built-in plugins-related circular imports ([#925](https://github.com/httpie/httpie/issues/925)).
## [2.1.0](https://github.com/httpie/httpie/compare/2.0.0...2.1.0) (2020-04-18)
- Added `--path-as-is` to bypass dot segment (`/../` or `/./`)
URL squashing ([#895](https://github.com/httpie/httpie/issues/895)).
- Changed the default `Accept` header value for JSON requests from
`application/json, */*` to `application/json, */*;q=0.5`
to clearly indicate preference ([#488](https://github.com/httpie/httpie/issues/488)).
- Fixed `--form` file upload mixed with redirected `stdin` error handling
([#840](https://github.com/httpie/httpie/issues/840)).
## [2.0.0](https://github.com/httpie/httpie/compare/1.0.3...2.0.0) (2020-01-12)
- Removed Python 2.7 support ([EOL Jan 2020](https://www.python.org/doc/sunset-python-2/).
- Added `--offline` to allow building an HTTP request and printing it but not
actually sending it over the network.
- Replaced the old collect-all-then-process handling of HTTP communication
with one-by-one processing of each HTTP request or response as they become
available. This means that you can see headers immediately,
see what is being sent even if the request fails, etc.
- Removed automatic config file creation to avoid concurrency issues.
- Removed the default 30-second connection `--timeout` limit.
- Removed Pythons default limit of 100 response headers.
- Added `--max-headers` to allow setting the max header limit.
- Added `--compress` to allow request body compression.
- Added `--ignore-netrc` to allow bypassing credentials from `.netrc`.
- Added `https` alias command with `https://` as the default scheme.
- Added `$ALL_PROXY` documentation.
- Added type annotations throughout the codebase.
- Added `tests/` to the PyPi package for the convenience of
downstream package maintainers.
- Fixed an error when `stdin` was a closed fd.
- Improved `--debug` output formatting.
## [1.0.3](https://github.com/httpie/httpie/compare/1.0.2...1.0.3) (2019-08-26)
- Fixed CVE-2019-10751 — the way the output filename is generated for
`--download` requests without `--output` resulting in a redirect has
been changed to only consider the initial URL as the base for the generated
filename, and not the final one. This fixes a potential security issue under
the following scenario:
1. A `--download` request with no explicit `--output` is made (e.g.,
`$ http -d example.org/file.txt`), instructing httpie to
[generate the output filename](https://httpie.org/doc#downloaded-filename)
from the `Content-Disposition` response header, or from the URL if the header
is not provided.
2. The server handling the request has been modified by an attacker and
instead of the expected response the URL returns a redirect to another
URL, e.g., `attacker.example.org/.bash_profile`, whose response does
not provide a `Content-Disposition` header (i.e., the base for the
generated filename becomes `.bash_profile` instead of `file.txt`).
3. Your current directory doesnt already contain `.bash_profile`
(i.e., no unique suffix is added to the generated filename).
4. You dont notice the potentially unexpected output filename
as reported by httpie in the console output
(e.g., `Downloading 100.00 B to ".bash_profile"`).
Reported by Raul Onitza and Giulio Comi.
## [1.0.2](https://github.com/httpie/httpie/compare/1.0.1...1.0.2) (2018-11-14)
- Fixed tests for installation with pyOpenSSL.
## [1.0.1](https://github.com/httpie/httpie/compare/1.0.0...1.0.1) (2018-11-14)
- Removed external URL calls from tests.
## [1.0.0](https://github.com/httpie/httpie/compare/0.9.9...1.0.0) (2018-11-02)
- Added `--style=auto` which follows the terminal ANSI color styles.
- Added support for selecting TLS 1.3 via `--ssl=tls1.3`
(available once implemented in upstream libraries).
- Added `true`/`false` as valid values for `--verify`
(in addition to `yes`/`no`) and the boolean value is case-insensitive.
- Changed the default `--style` from `solarized` to `auto` (on Windows it stays `fruity`).
- Fixed default headers being incorrectly case-sensitive.
- Removed Python 2.6 support.
## [0.9.9](https://github.com/httpie/httpie/compare/0.9.8...0.9.9) (2016-12-08)
- Fixed README.
## [0.9.8](https://github.com/httpie/httpie/compare/0.9.6...0.9.8) (2016-12-08)
- Extended auth plugin API.
- Added exit status code `7` for plugin errors.
- Added support for `curses`-less Python installations.
- Fixed `REQUEST_ITEM` arg incorrectly being reported as required.
- Improved `CTRL-C` interrupt handling.
- Added the standard exit status code `130` for keyboard interrupts.
## [0.9.6](https://github.com/httpie/httpie/compare/0.9.4...0.9.6) (2016-08-13)
- Added Python 3 as a dependency for Homebrew installations
to ensure some of the newer HTTP features work out of the box
for macOS users (starting with HTTPie 0.9.4.).
- Added the ability to unset a request header with `Header:`, and send an
empty value with `Header;`.
- Added `--default-scheme <URL_SCHEME>` to enable things like
`$ alias https='http --default-scheme=https`.
- Added `-I` as a shortcut for `--ignore-stdin`.
- Added fish shell completion (located in `extras/httpie-completion.fish`
in the GitHub repo).
- Updated `requests` to 2.10.0 so that SOCKS support can be added via
`pip install requests[socks]`.
- Changed the default JSON `Accept` header from `application/json`
to `application/json, */*`.
- Changed the pre-processing of request HTTP headers so that any leading
and trailing whitespace is removed.
## [0.9.4](https://github.com/httpie/httpie/compare/0.9.3...0.9.4) (2016-07-01)
- Added `Content-Type` of files uploaded in `multipart/form-data` requests
- Added `--ssl=<PROTOCOL>` to specify the desired SSL/TLS protocol version
to use for HTTPS requests.
- Added JSON detection with `--json, -j` to work around incorrect
`Content-Type`
- Added `--all` to show intermediate responses such as redirects (with `--follow`)
- Added `--history-print, -P WHAT` to specify formatting of intermediate responses
- Added `--max-redirects=N` (default 30)
- Added `-A` as short name for `--auth-type`
- Added `-F` as short name for `--follow`
- Removed the `implicit_content_type` config option
(use `"default_options": ["--form"]` instead)
- Redirected `stdout` doesn't trigger an error anymore when `--output FILE`
is set
- Changed the default `--style` back to `solarized` for better support
of light and dark terminals
- Improved `--debug` output
- Fixed `--session` when used with `--download`
- Fixed `--download` to trim too long filenames before saving the file
- Fixed the handling of `Content-Type` with multiple `+subtype` parts
- Removed the XML formatter as the implementation suffered from multiple issues
## [0.9.3](https://github.com/httpie/httpie/compare/0.9.2...0.9.3) (2016-01-01)
- Changed the default color `--style` from `solarized` to `monokai`
- Added basic Bash autocomplete support (need to be installed manually)
- Added request details to connection error messages
- Fixed `'requests.packages.urllib3' has no attribute 'disable_warnings'`
errors that occurred in some installations
- Fixed colors and formatting on Windows
- Fixed `--auth` prompt on Windows
## [0.9.2](https://github.com/httpie/httpie/compare/0.9.1...0.9.2) (2015-02-24)
- Fixed compatibility with Requests 2.5.1
- Changed the default JSON `Content-Type` to `application/json` as UTF-8
is the default JSON encoding
## [0.9.1](https://github.com/httpie/httpie/compare/0.9.0...0.9.1) (2015-02-07)
- Added support for Requests transport adapter plugins
(see [httpie-unixsocket](https://github.com/httpie/httpie-unixsocket)
and [httpie-http2](https://github.com/httpie/httpie-http2))
## [0.9.0](https://github.com/httpie/httpie/compare/0.8.0...0.9.0) (2015-01-31)
- Added `--cert` and `--cert-key` parameters to specify a client side
certificate and private key for SSL
- Improved unicode support
- Improved terminal color depth detection via `curses`
- To make it easier to deal with Windows paths in request items, `\`
now only escapes special characters (the ones that are used as key-value
separators by HTTPie)
- Switched from `unittest` to `pytest`
- Added Python `wheel` support
- Various test suite improvements
- Added `CONTRIBUTING`
- Fixed `User-Agent` overwriting when used within a session
- Fixed handling of empty passwords in URL credentials
- Fixed multiple file uploads with the same form field name
- Fixed `--output=/dev/null` on Linux
- Miscellaneous bugfixes
## [0.8.0](https://github.com/httpie/httpie/compare/0.7.1...0.8.0) (2014-01-25)
- Added `field=@file.txt` and `field:=@file.json` for embedding
the contents of text and JSON files into request data
- Added curl-style shorthand for localhost
- Fixed request `Host` header value output so that it doesn't contain
credentials, if included in the URL
## [0.7.1](https://github.com/httpie/httpie/compare/0.6.0...0.7.1) (2013-09-24)
- Added `--ignore-stdin`
- Added support for auth plugins
- Improved `--help` output
- Improved `Content-Disposition` parsing for `--download` mode
- Update to Requests 2.0.0
## [0.6.0](https://github.com/httpie/httpie/compare/0.5.1...0.6.0) (2013-06-03)
- XML data is now formatted
- `--session` and `--session-read-only` now also accept paths to
session files (eg. `http --session=/tmp/session.json example.org`)
## [0.5.1](https://github.com/httpie/httpie/compare/0.5.0...0.5.1) (2013-05-13)
- `Content-*` and `If-*` request headers are not stored in sessions
anymore as they are request-specific
## [0.5.0](https://github.com/httpie/httpie/compare/0.4.1...0.5.0) (2013-04-27)
- Added a download mode via `--download`
- Fixes miscellaneous bugs
## [0.4.1](https://github.com/httpie/httpie/compare/0.4.0...0.4.1) (2013-02-26)
- Fixed `setup.py`
## [0.4.0](https://github.com/httpie/httpie/compare/0.3.0...0.4.0) (2013-02-22)
- Added Python 3.3 compatibility
- Added Requests >= v1.0.4 compatibility
- Added support for credentials in URL
- Added `--no-option` for every `--option` to be config-friendly
- Mutually exclusive arguments can be specified multiple times. The
last value is used
## [0.3.0](https://github.com/httpie/httpie/compare/0.2.7...0.3.0) (2012-09-21)
- Allow output redirection on Windows
- Added configuration file
- Added persistent session support
- Renamed `--allow-redirects` to `--follow`
- Improved the usability of `http --help`
- Fixed installation on Windows with Python 3
- Fixed colorized output on Windows with Python 3
- CRLF HTTP header field separation in the output
- Added exit status code `2` for timed-out requests
- Added the option to separate colorizing and formatting
(`--pretty=all`, `--pretty=colors` and `--pretty=format`)
`--ugly` has bee removed in favor of `--pretty=none`
## [0.2.7](https://github.com/httpie/httpie/compare/0.2.5...0.2.7) (2012-08-07)
- Added compatibility with Requests 0.13.6
- Added streamed terminal output. `--stream, -S` can be used to enable
streaming also with `--pretty` and to ensure a more frequent output
flushing
- Added support for efficient large file downloads
- Sort headers by name (unless `--pretty=none`)
- Response body is fetched only when needed (e.g., not with `--headers`)
- Improved content type matching
- Updated Solarized color scheme
- Windows: Added `--output FILE` to store output into a file
(piping results in corrupted data on Windows)
- Proper handling of binary requests and responses
- Fixed printing of `multipart/form-data` requests
- Renamed `--traceback` to `--debug`
## [0.2.6](https://github.com/httpie/httpie/compare/0.2.5...0.2.6) (2012-07-26)
- The short option for `--headers` is now `-h` (`-t` has been
removed, for usage use `--help`)
- Form data and URL parameters can have multiple fields with the same name
(e.g.,`http -f url a=1 a=2`)
- Added `--check-status` to exit with an error on HTTP 3xx, 4xx and
5xx (3, 4, and 5, respectively)
- If the output is piped to another program or redirected to a file,
the default behaviour is to only print the response body
(It can still be overwritten via the `--print` flag.)
- Improved highlighting of HTTP headers
- Added query string parameters (`param==value`)
- Added support for terminal colors under Windows
## [0.2.5](https://github.com/httpie/httpie/compare/0.2.2...0.2.5) (2012-07-17)
- Unicode characters in prettified JSON now don't get escaped for
improved readability
- --auth now prompts for a password if only a username provided
- Added support for request payloads from a file path with automatic
`Content-Type` (`http URL @/path`)
- Fixed missing query string when displaying the request headers via
`--verbose`
- Fixed Content-Type for requests with no data
## [0.2.2](https://github.com/httpie/httpie/compare/0.2.1...0.2.2) (2012-06-24)
- The `METHOD` positional argument can now be omitted (defaults to
`GET`, or to `POST` with data)
- Fixed --verbose --form
- Added support for Tox
## [0.2.1](https://github.com/httpie/httpie/compare/0.2.0...0.2.1) (2012-06-13)
- Added compatibility with `requests-0.12.1`
- Dropped custom JSON and HTTP lexers in favor of the ones newly included
in `pygments-1.5`
## [0.2.0](https://github.com/httpie/httpie/compare/0.1.6...0.2.0) (2012-04-25)
- Added Python 3 support
- Added the ability to print the HTTP request as well as the response
(see `--print` and `--verbose`)
- Added support for Digest authentication
- Added file upload support
(`http -f POST file_field_name@/path/to/file`)
- Improved syntax highlighting for JSON
- Added support for field name escaping
- Many bug fixes
## [0.1.6](https://github.com/httpie/httpie/compare/0.1.5...0.1.6) (2012-03-04)
- Fixed `setup.py`
## [0.1.5](https://github.com/httpie/httpie/compare/0.1.4...0.1.5) (2012-03-04)
- Many improvements and bug fixes
## [0.1.4](https://github.com/httpie/httpie/compare/b966efa...0.1.4) (2012-02-28)
- Many improvements and bug fixes
## [0.1.0](https://github.com/httpie/httpie/commit/b966efa) (2012-02-25)
- Initial public release

View File

@ -1,74 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
- The use of sexualized language or imagery and unwelcome sexual attention or
advances
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic
address, without explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at jakub@roztocil.co. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org),
version 1.4, available at <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>
For answers to common questions about this code of conduct, see
<https://www.contributor-covenant.org/faq>

View File

@ -1,210 +0,0 @@
# Contributing to HTTPie
Bug reports and code and documentation patches are welcome. You can
help this project also by using the development version of HTTPie
and by reporting any bugs you might encounter.
## 1. Reporting bugs
**It's important that you provide the full command argument list
as well as the output of the failing command.**
Use the `--debug` flag and copy&paste both the command and its output
to your bug report, e.g.:
```bash
$ http --debug <COMPLETE ARGUMENT LIST THAT TRIGGERS THE ERROR>
<COMPLETE OUTPUT>
```
## 2. Contributing Code and Docs
Before working on a new feature or a bug, please browse [existing issues](https://github.com/httpie/httpie/issues)
to see whether it has previously been discussed.
If your change alters HTTPies behaviour or interface, it's a good idea to
discuss it before you start working on it.
If you are fixing an issue, the first step should be to create a test case that
reproduces the incorrect behaviour. That will also help you to build an
understanding of the issue at hand.
**Pull requests introducing code changes without tests
will generally not get merged. The same goes for PRs changing HTTPies
behaviour and not providing documentation.**
Conversely, PRs consisting of documentation improvements or tests
for existing-yet-previously-untested behavior will very likely be merged.
Therefore, docs and tests improvements are a great candidate for your first
contribution.
Consider also adding a [CHANGELOG](https://github.com/httpie/httpie/blob/master/CHANGELOG.md) entry for your changes.
### Development Environment
#### Getting the code
Go to <https://github.com/httpie/httpie> and fork the project repository.
```bash
# Clone your fork
$ git clone git@github.com:<YOU>/httpie.git
# Enter the project directory
$ cd httpie
# Create a branch for your changes
$ git checkout -b my_topical_branch
```
#### Setup
The [Makefile](https://github.com/httpie/httpie/blob/master/Makefile) contains a bunch of tasks to get you started. Just run
the following command, which:
- Creates an isolated Python virtual environment inside `./venv`
(via the standard library [venv](https://docs.python.org/3/library/venv.html) tool);
- installs all dependencies and also installs HTTPie
(in editable mode so that the `http` command will point to your
working copy).
- and runs tests (It is the same as running `make install test`).
```bash
$ make
```
#### Python virtual environment
Activate the Python virtual environment—created via the `make install`
task during [setup](#setup) for your active shell session using the following command:
```bash
$ source venv/bin/activate
```
(If you use `virtualenvwrapper`, you can also use `workon httpie` to
activate the environment — we have created a symlink for you. Its a bit of
a hack but it works™.)
You should now see `(httpie)` next to your shell prompt, and
the `http` command should point to your development copy:
```bash
(httpie) ~/Code/httpie $ which http
/Users/<user>/Code/httpie/venv/bin/http
(httpie) ~/Code/httpie $ http --version
2.0.0-dev
```
(Btw, you dont need to activate the virtual environment if you just want
run some of the `make` tasks. You can also invoke the development
version of HTTPie directly with `./venv/bin/http` without having to activate
the environment first. The same goes for `./venv/bin/pytest`, etc.).
### Making Changes
Please make sure your changes conform to [Style Guide for Python Code](https://python.org/dev/peps/pep-0008/) (PEP8)
and that `make pycodestyle` passes.
### Testing & CI
Please add tests for any new features and bug fixes.
When you open a Pull Request, [GitHub Actions](https://github.com/httpie/httpie/actions) will automatically run HTTPies [test suite](https://github.com/httpie/httpie/tree/master/tests) against your code, so please make sure all checks pass.
#### Running tests locally
HTTPie uses the [pytest](https://pytest.org/) runner.
```bash
# Run tests on the current Python interpreter with coverage.
$ make test
# Run tests with coverage
$ make test-cover
# Test PEP8 compliance
$ make codestyle
# Run extended tests — for code as well as .md files syntax, packaging, etc.
$ make test-all
```
#### Running specific tests
After you have activated your virtual environment (see [setup](#setup)), you
can run specific tests from the terminal:
```bash
# Run specific tests on the current Python
$ python -m pytest tests/test_uploads.py
$ python -m pytest tests/test_uploads.py::TestMultipartFormDataFileUpload
$ python -m pytest tests/test_uploads.py::TestMultipartFormDataFileUpload::test_upload_ok
```
See [Makefile](https://github.com/httpie/httpie/blob/master/Makefile) for additional development utilities.
#### Running benchmarks
If you are trying to work on speeding up HTTPie and want to verify your results, you
can run the benchmark suite. The suite will compare the last commit of your branch
with the master branch of your repository (or a fresh checkout of HTTPie master, through
`--fresh`) and report the results back.
```bash
$ python extras/benchmarks/run.py
```
The benchmarks can also be run on the CI. Since it is a long process, it requires manual
oversight. Ping one of the maintainers to get a `benchmark` label on your branch.
#### Windows
If you are on a Windows machine and not able to run `make`,
follow the next steps for a basic setup. As a prerequisite, you need to have
Python 3.7+ installed.
Create a virtual environment and activate it:
```powershell
C:\> python -m venv --prompt httpie venv
C:\> venv\Scripts\activate
```
Install HTTPie in editable mode with all the dependencies:
```powershell
C:\> python -m pip install --upgrade -e .[dev]
```
You should now see `(httpie)` next to your shell prompt, and
the `http` command should point to your development copy:
```powershell
# In PowerShell:
(httpie) PS C:\Users\<user>\httpie> Get-Command http
CommandType Name Version Source
----------- ---- ------- ------
Application http.exe 0.0.0.0 C:\Users\<user>\httpie\venv\Scripts\http.exe
```
```bash
# In CMD:
(httpie) C:\Users\<user>\httpie> where http
C:\Users\<user>\httpie\venv\Scripts\http.exe
C:\Users\<user>\AppData\Local\Programs\Python\Python38-32\Scripts\http.exe
(httpie) C:\Users\<user>\httpie> http --version
2.3.0-dev
```
Use `pytest` to run tests locally with an active virtual environment:
```bash
# Run all tests
$ python -m pytest
```
______________________________________________________________________
Finally, feel free to add yourself to [AUTHORS](https://github.com/httpie/httpie/blob/master/AUTHORS.md)!

10
LICENSE
View File

@ -1,4 +1,4 @@
Copyright © 2012-2021 Jakub Roztocil <jakub@roztocil.co>
Copyright © 2012 Jakub Roztocil <jakub@roztocil.name>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
@ -10,14 +10,14 @@ modification, are permitted provided that the following conditions are met:
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
3. Neither the name of The author nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR AND CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON

View File

@ -1,8 +1 @@
include LICENSE
include README.md
include CHANGELOG.md
include AUTHORS.md
include docs/README.md
# <https://github.com/httpie/httpie/issues/182>
recursive-include tests/ *
include README.rst LICENSE

213
Makefile
View File

@ -1,213 +0,0 @@
###############################################################################
# See ./CONTRIBUTING.md
###############################################################################
.PHONY: build
ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
VERSION=$(shell grep __version__ httpie/__init__.py)
H1="\n\n\033[0;32m\#\#\# "
H1END=" \#\#\# \033[0m\n"
# Only used to create our venv.
SYSTEM_PYTHON=python3
VENV_ROOT=venv
VENV_BIN=$(VENV_ROOT)/bin
VENV_PIP=$(VENV_BIN)/pip3
VENV_PYTHON=$(VENV_BIN)/python
export PATH := $(VENV_BIN):$(PATH)
all: uninstall-httpie install test
install: venv install-reqs
install-reqs:
@echo $(H1)Updating package tools$(H1END)
$(VENV_PIP) install --upgrade pip wheel
@echo $(H1)Installing dev requirements$(H1END)
$(VENV_PIP) install --upgrade --editable '.[dev]'
@echo $(H1)Installing HTTPie$(H1END)
$(VENV_PIP) install --upgrade --editable .
@echo
clean:
@echo $(H1)Cleaning up$(H1END)
rm -rf $(VENV_ROOT)
# Remove symlink for virtualenvwrapper, if weve created one.
[ -n "$(WORKON_HOME)" -a -L "$(WORKON_HOME)/httpie" -a -f "$(WORKON_HOME)/httpie" ] && rm $(WORKON_HOME)/httpie || true
rm -rf *.egg dist build .coverage .cache .pytest_cache httpie.egg-info
find . -name '__pycache__' -delete -o -name '*.pyc' -delete
@echo
venv:
@echo $(H1)Creating a Python environment $(VENV_ROOT) $(H1END)
$(SYSTEM_PYTHON) -m venv --prompt httpie $(VENV_ROOT)
@echo
@echo done.
@echo
@echo To active it manually, run:
@echo
@echo " source $(VENV_BIN)/activate"
@echo
@echo '(learn more: https://docs.python.org/3/library/venv.html)'
@echo
@if [ -n "$(WORKON_HOME)" ]; then \
echo $(ROOT_DIR) > $(VENV_ROOT)/.project; \
if [ ! -d $(WORKON_HOME)/httpie -a ! -L $(WORKON_HOME)/httpie ]; then \
ln -s $(ROOT_DIR)/$(VENV_ROOT) $(WORKON_HOME)/httpie ; \
echo ''; \
echo 'Since you use virtualenvwrapper, we created a symlink'; \
echo 'so you can also use "workon httpie" to activate the venv.'; \
echo ''; \
fi; \
fi
###############################################################################
# Testing
###############################################################################
test:
@echo $(H1)Running tests$(HEADER_EXTRA)$(H1END)
$(VENV_BIN)/python -m pytest $(COV)
@echo
test-cover: COV=--cov=httpie --cov=tests
test-cover: HEADER_EXTRA=' (with coverage)'
test-cover: test
# test-all is meant to test everything — even this Makefile
test-all: clean install test test-dist codestyle
@echo
test-dist: test-sdist test-bdist-wheel
@echo
test-sdist: clean venv
@echo $(H1)Testing sdist build an installation$(H1END)
$(VENV_PYTHON) setup.py sdist
$(VENV_PIP) install --force-reinstall --upgrade dist/*.gz
$(VENV_BIN)/http --version
@echo
test-bdist-wheel: clean venv
@echo $(H1)Testing wheel build an installation$(H1END)
$(VENV_PIP) install wheel
$(VENV_PYTHON) setup.py bdist_wheel
$(VENV_PIP) install --force-reinstall --upgrade dist/*.whl
$(VENV_BIN)/http --version
@echo
twine-check:
twine check dist/*
# Kept for convenience, "make codestyle" is preferred though
pycodestyle: codestyle
codestyle:
@echo $(H1)Running flake8$(H1END)
@[ -f $(VENV_BIN)/flake8 ] || $(VENV_PIP) install --upgrade --editable '.[dev]'
$(VENV_BIN)/flake8 httpie/ tests/ extras/profiling/ docs/packaging/brew/ *.py
@echo
codecov-upload:
@echo $(H1)Running codecov$(H1END)
@[ -f $(VENV_BIN)/codecov ] || $(VENV_PIP) install codecov
# $(VENV_BIN)/codecov --required
$(VENV_BIN)/codecov
@echo
doc-check:
@echo $(H1)Running documentations checks$(H1END)
mdl --git-recurse --style docs/markdownlint.rb .
doc-update-install:
@echo $(H1)Updating installation instructions in the docs$(H1END)
$(VENV_PYTHON) docs/installation/generate.py
###############################################################################
# Publishing to PyPi
###############################################################################
build:
rm -rf build/
$(VENV_PYTHON) setup.py sdist bdist_wheel
publish: test-all publish-no-test
publish-no-test:
@echo $(H1)Testing wheel build an installation$(H1END)
@echo "$(VERSION)"
@echo "$(VERSION)" | grep -q "dev" && echo '!!!Not publishing dev version!!!' && exit 1 || echo ok
make build
make twine-check
$(VENV_BIN)/twine upload --repository=httpie dist/*
@echo
###############################################################################
# Uninstalling
###############################################################################
uninstall-httpie:
@echo $(H1)Uninstalling httpie$(H1END)
- $(VENV_PIP) uninstall --yes httpie &2>/dev/null
@echo "Verifying…"
cd .. && ! $(VENV_PYTHON) -m httpie --version &2>/dev/null
@echo "Done"
@echo
###############################################################################
# Homebrew
###############################################################################
brew-deps:
docs/packaging/brew/brew-deps.py
brew-test:
@echo $(H1)Uninstalling httpie$(H1END)
- brew uninstall httpie
@echo $(H1)Building from source…$(H1END)
- brew install --build-from-source ./docs/packaging/brew/httpie.rb
@echo $(H1)Verifying…$(H1END)
http --version
https --version
@echo $(H1)Auditing…$(H1END)
brew audit --strict httpie

View File

@ -1,84 +0,0 @@
<br/>
<a href="https://httpie.io" target="blank_">
<img height="100" alt="HTTPie" src="https://raw.githubusercontent.com/httpie/httpie/master/docs/httpie-logo.svg" />
</a>
<br/>
# HTTPie: human-friendly CLI HTTP client for the API era
HTTPie (pronounced _aitch-tee-tee-pie_) is a command-line HTTP client.
Its goal is to make CLI interaction with web services as human-friendly as possible.
HTTPie is designed for testing, debugging, and generally interacting with APIs & HTTP servers.
The `http` & `https` commands allow for creating and sending arbitrary HTTP requests.
They use simple and natural syntax and provide formatted and colorized output.
[![Docs](https://img.shields.io/badge/stable%20docs-httpie.io%2Fdocs-brightgreen?style=flat&color=%2373DC8C&label=Docs)](https://httpie.org/docs)
[![Latest version](https://img.shields.io/pypi/v/httpie.svg?style=flat&label=Latest&color=%234B78E6&logo=&logoColor=white)](https://pypi.python.org/pypi/httpie)
[![Build](https://img.shields.io/github/workflow/status/httpie/httpie/Build?color=%23FA9BFA&label=Build)](https://github.com/httpie/httpie/actions)
[![Coverage](https://img.shields.io/codecov/c/github/httpie/httpie?style=flat&label=Coverage&color=%2373DC8C)](https://codecov.io/gh/httpie/httpie)
[![Twitter](https://img.shields.io/twitter/follow/httpie?style=flat&color=%234B78E6&logoColor=%234B78E6)](https://twitter.com/httpie)
[![Chat](https://img.shields.io/badge/chat-Discord-brightgreen?style=flat&label=Chat%20on&color=%23FA9BFA)](https://httpie.io/discord)
<img src="https://raw.githubusercontent.com/httpie/httpie/master/docs/httpie-animation.gif" alt="HTTPie in action" width="100%"/>
## Getting started
- [Installation instructions →](https://httpie.io/docs#installation)
- [Full documentation →](https://httpie.io/docs)
## Features
- Expressive and intuitive syntax
- Formatted and colorized terminal output
- Built-in JSON support
- Forms and file uploads
- HTTPS, proxies, and authentication
- Arbitrary request data
- Custom headers
- Persistent sessions
- `wget`-like downloads
[See all features →](https://httpie.io/docs)
## Examples
Hello World:
```bash
$ https httpie.io/hello
```
Custom [HTTP method](https://httpie.io/docs#http-method), [HTTP headers](https://httpie.io/docs#http-headers) and [JSON](https://httpie.io/docs#json) data:
```bash
$ http PUT pie.dev/put X-API-Token:123 name=John
```
Build and print a request without sending it using [offline mode](https://httpie.io/docs#offline-mode):
```bash
$ http --offline pie.dev/post hello=offline
```
Use [GitHub API](https://developer.github.com/v3/issues/comments/#create-a-comment) to post a comment on an [Issue](https://github.com/httpie/httpie/issues/83) with [authentication](https://httpie.io/docs#authentication):
```bash
$ http -a USERNAME POST https://api.github.com/repos/httpie/httpie/issues/83/comments body='HTTPie is awesome! :heart:'
```
[See more examples →](https://httpie.io/docs#examples)
## Community & support
- Visit the [HTTPie website](https://httpie.io) for full documentation and useful links.
- Join our [Discord server](https://httpie.io/discord) is to ask questions, discuss features, and for general API chat.
- Tweet at [@httpie](https://twitter.com/httpie) on Twitter.
- Use [StackOverflow](https://stackoverflow.com/questions/tagged/httpie) to ask questions and include a `httpie` tag.
- Create [GitHub Issues](https://github.com/httpie/httpie/issues) for bug reports and feature requests.
- Subscribe to the [HTTPie newsletter](https://httpie.io) for occasional updates.
## Contributing
Have a look through existing [Issues](https://github.com/httpie/httpie/issues) and [Pull Requests](https://github.com/httpie/httpie/pulls) that you could help with. If you'd like to request a feature or report a bug, please [create a GitHub Issue](https://github.com/httpie/httpie/issues) using one of the templates provided.
[See contribution guide →](https://github.com/httpie/httpie/blob/master/CONTRIBUTING.md)

1198
README.rst Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +0,0 @@
{
"website": {
"master_and_released_docs_differ_after": null
}
}

View File

@ -1,3 +0,0 @@
Here we maintain a database of contributors, from which we generate credits on release blog posts and social medias.
For the HTTPie blog see: <https://httpie.io/blog>.

View File

@ -1,280 +0,0 @@
"""
Generate the contributors database.
FIXME: replace `requests` calls with the HTTPie API, when available.
"""
import json
import os
import re
import sys
from copy import deepcopy
from datetime import datetime
from pathlib import Path
from subprocess import check_output
from time import sleep
from typing import Any, Dict, Optional, Set
import requests
FullNames = Set[str]
GitHubLogins = Set[str]
Person = Dict[str, str]
People = Dict[str, Person]
UserInfo = Dict[str, Any]
CO_AUTHORS = re.compile(r'Co-authored-by: ([^<]+) <').finditer
API_URL = 'https://api.github.com'
REPO = OWNER = 'httpie'
REPO_URL = f'{API_URL}/repos/{REPO}/{OWNER}'
HERE = Path(__file__).parent
DB_FILE = HERE / 'people.json'
DEFAULT_PERSON: Person = {'committed': [], 'reported': [], 'github': '', 'twitter': ''}
SKIPPED_LABELS = {'invalid'}
GITHUB_TOKEN = os.getenv('GITHUB_TOKEN')
assert GITHUB_TOKEN, 'GITHUB_TOKEN envar is missing'
class FinishedForNow(Exception):
"""Raised when remaining GitHub rate limit is zero."""
def main(previous_release: str, current_release: str) -> int:
since = release_date(previous_release)
until = release_date(current_release)
contributors = load_awesome_people()
try:
committers = find_committers(since, until)
reporters = find_reporters(since, until)
except Exception as exc:
# We want to save what we fetched so far. So pass.
print(' !! ', exc)
try:
merge_all_the_people(current_release, contributors, committers, reporters)
fetch_missing_users_details(contributors)
except FinishedForNow:
# We want to save what we fetched so far. So pass.
print(' !! Committers:', committers)
print(' !! Reporters:', reporters)
exit_status = 1
else:
exit_status = 0
save_awesome_people(contributors)
return exit_status
def find_committers(since: str, until: str) -> FullNames:
url = f'{REPO_URL}/commits'
page = 1
per_page = 100
params = {
'since': since,
'until': until,
'per_page': per_page,
}
committers: FullNames = set()
while 'there are commits':
params['page'] = page
data = fetch(url, params=params)
for item in data:
commit = item['commit']
committers.add(commit['author']['name'])
debug(' >>> Commit', item['html_url'])
for co_author in CO_AUTHORS(commit['message']):
name = co_author.group(1)
committers.add(name)
if len(data) < per_page:
break
page += 1
return committers
def find_reporters(since: str, until: str) -> GitHubLogins:
url = f'{API_URL}/search/issues'
page = 1
per_page = 100
params = {
'q': f'repo:{REPO}/{OWNER} is:issue closed:{since}..{until}',
'per_page': per_page,
}
reporters: GitHubLogins = set()
while 'there are issues':
params['page'] = page
data = fetch(url, params=params)
for item in data['items']:
# Filter out unwanted labels.
if any(label['name'] in SKIPPED_LABELS for label in item['labels']):
continue
debug(' >>> Issue', item['html_url'])
reporters.add(item['user']['login'])
if len(data['items']) < per_page:
break
page += 1
return reporters
def merge_all_the_people(release: str, contributors: People, committers: FullNames, reporters: GitHubLogins) -> None:
"""
>>> contributors = {'Alice': new_person(github='alice', twitter='alice')}
>>> merge_all_the_people('2.6.0', contributors, {}, {})
>>> contributors
{'Alice': {'committed': [], 'reported': [], 'github': 'alice', 'twitter': 'alice'}}
>>> contributors = {'Bob': new_person(github='bob', twitter='bob')}
>>> merge_all_the_people('2.6.0', contributors, {'Bob'}, {'bob'})
>>> contributors
{'Bob': {'committed': ['2.6.0'], 'reported': ['2.6.0'], 'github': 'bob', 'twitter': 'bob'}}
>>> contributors = {'Charlotte': new_person(github='charlotte', twitter='charlotte', committed=['2.5.0'], reported=['2.5.0'])}
>>> merge_all_the_people('2.6.0', contributors, {'Charlotte'}, {'charlotte'})
>>> contributors
{'Charlotte': {'committed': ['2.5.0', '2.6.0'], 'reported': ['2.5.0', '2.6.0'], 'github': 'charlotte', 'twitter': 'charlotte'}}
"""
# Update known contributors.
for name, details in contributors.items():
if name in committers:
if release not in details['committed']:
details['committed'].append(release)
committers.remove(name)
if details['github'] in reporters:
if release not in details['reported']:
details['reported'].append(release)
reporters.remove(details['github'])
# Add new committers.
for name in committers:
user_info = user(fullname=name)
contributors[name] = new_person(
github=user_info['login'],
twitter=user_info['twitter_username'],
committed=[release],
)
if user_info['login'] in reporters:
contributors[name]['reported'].append(release)
reporters.remove(user_info['login'])
# Add new reporters.
for github_username in reporters:
user_info = user(github_username=github_username)
contributors[user_info['name'] or user_info['login']] = new_person(
github=github_username,
twitter=user_info['twitter_username'],
reported=[release],
)
def release_date(release: str) -> str:
date = check_output(['git', 'log', '-1', '--format=%ai', release], text=True).strip()
return datetime.strptime(date, '%Y-%m-%d %H:%M:%S %z').isoformat()
def load_awesome_people() -> People:
try:
with DB_FILE.open(encoding='utf-8') as fh:
return json.load(fh)
except (FileNotFoundError, ValueError):
return {}
def fetch(url: str, params: Optional[Dict[str, str]] = None) -> UserInfo:
headers = {
'Accept': 'application/vnd.github.v3+json',
'Authentication': f'token {GITHUB_TOKEN}'
}
for retry in range(1, 6):
debug(f'[{retry}/5]', f'{url = }', f'{params = }')
with requests.get(url, params=params, headers=headers) as req:
try:
req.raise_for_status()
except requests.exceptions.HTTPError as exc:
if exc.response.status_code == 403:
# 403 Client Error: rate limit exceeded for url: ...
now = int(datetime.utcnow().timestamp())
xrate_limit_reset = int(exc.response.headers['X-RateLimit-Reset'])
wait = xrate_limit_reset - now
if wait > 20:
raise FinishedForNow()
debug(' !', 'Waiting', wait, 'seconds before another try ...')
sleep(wait)
continue
return req.json()
assert ValueError('Rate limit exceeded')
def new_person(**kwargs: str) -> Person:
data = deepcopy(DEFAULT_PERSON)
data.update(**kwargs)
return data
def user(fullname: Optional[str] = '', github_username: Optional[str] = '') -> UserInfo:
if github_username:
url = f'{API_URL}/users/{github_username}'
return fetch(url)
url = f'{API_URL}/search/users'
for query in (f'fullname:{fullname}', f'user:{fullname}'):
params = {
'q': f'repo:{REPO}/{OWNER} {query}',
'per_page': 1,
}
user_info = fetch(url, params=params)
if user_info['items']:
user_url = user_info['items'][0]['url']
return fetch(user_url)
def fetch_missing_users_details(people: People) -> None:
for name, details in people.items():
if details['github'] and details['twitter']:
continue
user_info = user(github_username=details['github'], fullname=name)
if not details['github']:
details['github'] = user_info['login']
if not details['twitter']:
details['twitter'] = user_info['twitter_username']
def save_awesome_people(people: People) -> None:
with DB_FILE.open(mode='w', encoding='utf-8') as fh:
json.dump(people, fh, indent=4, sort_keys=True)
def debug(*args: Any) -> None:
if os.getenv('DEBUG') == '1':
print(*args)
if __name__ == '__main__':
ret = 1
try:
ret = main(*sys.argv[1:])
except TypeError:
ret = 2
print(f'''
Fetch contributors to a release.
Usage:
python {sys.argv[0]} {sys.argv[0]} <RELEASE N-1> <RELEASE N>
Example:
python {sys.argv[0]} 2.4.0 2.5.0
Define the DEBUG=1 environment variable to enable verbose output.
''')
except KeyboardInterrupt:
ret = 255
sys.exit(ret)

View File

@ -1,45 +0,0 @@
"""
Generate snippets to copy-paste.
"""
import sys
from jinja2 import Template
from fetch import HERE, load_awesome_people
TPL_FILE = HERE / 'snippet.jinja2'
HTTPIE_TEAM = {
'claudiatd',
'jakubroztocil',
'jkbr',
}
def generate_snippets(release: str) -> str:
people = load_awesome_people()
contributors = {
name: details
for name, details in people.items()
if details['github'] not in HTTPIE_TEAM
and (release in details['committed'] or release in details['reported'])
}
template = Template(source=TPL_FILE.read_text(encoding='utf-8'))
output = template.render(contributors=contributors, release=release)
print(output)
return 0
if __name__ == '__main__':
ret = 1
try:
ret = generate_snippets(sys.argv[1])
except (IndexError, TypeError):
ret = 2
print(f'''
Generate snippets for contributors to a release.
Usage:
python {sys.argv[0]} {sys.argv[0]} <RELEASE>
''')
sys.exit(ret)

View File

@ -1,630 +0,0 @@
{
"Aaron Miller": {
"committed": [],
"github": "aaronhmiller",
"reported": [
"3.0.0"
],
"twitter": "aaronmiller8"
},
"Alexander Bogdanov": {
"committed": [],
"github": "ab-kily",
"reported": [
"3.0.0"
],
"twitter": null
},
"Almad": {
"committed": [
"2.5.0"
],
"github": "Almad",
"reported": [
"2.6.0",
"3.0.0"
],
"twitter": "almadcz"
},
"Andr\u00e1s Czig\u00e1ny": {
"committed": [],
"github": "andrascz",
"reported": [
"3.0.0"
],
"twitter": null
},
"Annette Wilson": {
"committed": [],
"github": "annettejanewilson",
"reported": [
"2.6.0",
"3.0.0"
],
"twitter": null
},
"Anton Emelyanov": {
"committed": [
"2.5.0"
],
"github": "king-menin",
"reported": [],
"twitter": null
},
"Batuhan Taskaya": {
"committed": [
"3.0.0"
],
"github": "isidentical",
"reported": [
"3.0.0"
],
"twitter": "isidentical"
},
"Brad Crittenden": {
"committed": [],
"github": "bac",
"reported": [
"3.0.0"
],
"twitter": null
},
"Chad": {
"committed": [],
"github": "cythrawll",
"reported": [
"3.0.0"
],
"twitter": null
},
"D8ger": {
"committed": [],
"github": "caofanCPU",
"reported": [
"2.5.0"
],
"twitter": null
},
"Dave": {
"committed": [
"2.6.0",
"3.0.0"
],
"github": "davecheney",
"reported": [],
"twitter": "davecheney"
},
"Dawid Ferenczy Rogo\u017ean": {
"committed": [],
"github": "ferenczy",
"reported": [
"2.5.0"
],
"twitter": "DawidFerenczy"
},
"Ed Rooth": {
"committed": [],
"github": "sym3tri",
"reported": [
"3.0.0"
],
"twitter": null
},
"Elena Lape": {
"committed": [
"2.5.0"
],
"github": "elenalape",
"reported": [],
"twitter": "elena_lape"
},
"Fabio Peruzzo": {
"committed": [],
"github": "peruzzof",
"reported": [
"2.6.0",
"3.0.0"
],
"twitter": null
},
"F\u00fash\u0113ng": {
"committed": [],
"github": "lienide",
"reported": [
"2.5.0"
],
"twitter": null
},
"Gabriel Cruz": {
"committed": [],
"github": "gmelodie",
"reported": [
"3.0.0"
],
"twitter": "gmelodiecruz"
},
"Gaurav": {
"committed": [
"3.0.0"
],
"github": "gkcs",
"reported": [],
"twitter": null
},
"Giampaolo Rodola": {
"committed": [],
"github": "giampaolo",
"reported": [
"2.5.0"
],
"twitter": null
},
"Greg Myers": {
"committed": [
"3.0.0"
],
"github": "myersg86",
"reported": [],
"twitter": null
},
"Hugh Williams": {
"committed": [],
"github": "hughpv",
"reported": [
"2.5.0"
],
"twitter": null
},
"Ilya Sukhanov": {
"committed": [
"2.5.0"
],
"github": "IlyaSukhanov",
"reported": [
"2.5.0"
],
"twitter": null
},
"Jakub Roztocil": {
"committed": [
"2.5.0",
"2.6.0",
"3.0.0"
],
"github": "jakubroztocil",
"reported": [
"2.5.0",
"2.6.0",
"3.0.0"
],
"twitter": "jakubroztocil"
},
"Jan Bra\u0161na": {
"committed": [
"3.0.0"
],
"github": "janbrasna",
"reported": [],
"twitter": "janbrasna"
},
"Jan Verbeek": {
"committed": [
"2.5.0"
],
"github": "blyxxyz",
"reported": [
"3.0.0"
],
"twitter": null
},
"Jannik Vieten": {
"committed": [
"2.5.0"
],
"github": "exploide",
"reported": [],
"twitter": null
},
"Jesper Holmberg": {
"committed": [],
"github": "strindberg",
"reported": [
"3.0.0"
],
"twitter": null
},
"Kirill Krasnov": {
"committed": [],
"github": "Kirill",
"reported": [
"3.0.0"
],
"twitter": null
},
"Marcel St\u00f6r": {
"committed": [
"2.5.0"
],
"github": "marcelstoer",
"reported": [],
"twitter": "frightanic"
},
"Marco Seguri": {
"committed": [],
"github": "seguri",
"reported": [
"3.0.0"
],
"twitter": null
},
"Mariano Ruiz": {
"committed": [],
"github": "mrsarm",
"reported": [
"2.5.0"
],
"twitter": "mrsarm82"
},
"Mark Rosenbaum": {
"committed": [],
"github": "markrosenbaum",
"reported": [
"3.0.0"
],
"twitter": null
},
"Micka\u00ebl Schoentgen": {
"committed": [
"2.5.0",
"2.6.0",
"3.0.0"
],
"github": "BoboTiG",
"reported": [
"2.5.0",
"2.6.0",
"3.0.0"
],
"twitter": "__tiger222__"
},
"Mike DePalatis": {
"committed": [],
"github": "mivade",
"reported": [
"3.0.0"
],
"twitter": null
},
"Miro Hron\u010dok": {
"committed": [
"2.5.0",
"2.6.0",
"3.0.0"
],
"github": "hroncok",
"reported": [],
"twitter": "hroncok"
},
"Mohamed Daahir": {
"committed": [],
"github": "ducaale",
"reported": [
"2.5.0"
],
"twitter": null
},
"Nanashi.": {
"committed": [],
"github": "sevenc-nanashi",
"reported": [
"3.0.0"
],
"twitter": "sevenc_nanashi"
},
"Omer Akram": {
"committed": [
"2.6.0",
"3.0.0"
],
"github": "om26er",
"reported": [
"2.6.0",
"3.0.0"
],
"twitter": "om26er"
},
"Patrick Taylor": {
"committed": [],
"github": "pmeister",
"reported": [
"3.0.0"
],
"twitter": null
},
"Paul Laffitte": {
"committed": [],
"github": "paullaffitte",
"reported": [
"3.0.0"
],
"twitter": "plaffitt"
},
"Pavel Alexeev aka Pahan-Hubbitus": {
"committed": [],
"github": "Hubbitus",
"reported": [
"2.5.0"
],
"twitter": null
},
"Russell Shurts": {
"committed": [],
"github": "rshurts",
"reported": [
"3.0.0"
],
"twitter": null
},
"Samuel Marks": {
"committed": [],
"github": "SamuelMarks",
"reported": [
"2.5.0"
],
"twitter": null
},
"Sebastian Czech": {
"committed": [
"3.0.0"
],
"github": "sebastianczech",
"reported": [],
"twitter": "sebaczech"
},
"Sullivan SENECHAL": {
"committed": [],
"github": "soullivaneuh",
"reported": [
"2.5.0"
],
"twitter": null
},
"Thomas Klinger": {
"committed": [],
"github": "mosesontheweb",
"reported": [
"2.5.0"
],
"twitter": null
},
"Vincent van \u2019t Zand": {
"committed": [],
"github": "vovtz",
"reported": [
"2.6.0",
"3.0.0"
],
"twitter": null
},
"Vivaan Verma": {
"committed": [
"3.0.0"
],
"github": "doublevcodes",
"reported": [],
"twitter": "doublevcodes"
},
"Vladimir Berkutov": {
"committed": [
"3.0.0"
],
"github": "dair-targ",
"reported": [],
"twitter": null
},
"Will Rogers": {
"committed": [],
"github": "wjrogers",
"reported": [
"3.0.0"
],
"twitter": null
},
"Yannic Schneider": {
"committed": [],
"github": "cynay",
"reported": [
"2.5.0"
],
"twitter": null
},
"a1346054": {
"committed": [
"2.5.0"
],
"github": "a1346054",
"reported": [],
"twitter": null
},
"arloan": {
"committed": [],
"github": "arloan",
"reported": [
"3.0.0"
],
"twitter": null
},
"bl-ue": {
"committed": [
"2.5.0"
],
"github": "FiReBlUe45",
"reported": [],
"twitter": null
},
"blueray453": {
"committed": [],
"github": "blueray453",
"reported": [
"3.0.0"
],
"twitter": null
},
"claudiatd": {
"committed": [
"2.6.0",
"3.0.0"
],
"github": "claudiatd",
"reported": [],
"twitter": null
},
"coldcoff": {
"committed": [],
"github": "coldcoff",
"reported": [
"3.0.0"
],
"twitter": null
},
"dkreeft": {
"committed": [
"2.6.0",
"3.0.0"
],
"github": "dkreeft",
"reported": [],
"twitter": null
},
"greg": {
"committed": [
"3.0.0"
],
"github": "gregkh",
"reported": [],
"twitter": null
},
"henryhu712": {
"committed": [
"2.5.0"
],
"github": "henryhu712",
"reported": [],
"twitter": null
},
"hosseingt": {
"committed": [
"3.0.0"
],
"github": "hosseingt",
"reported": [],
"twitter": null
},
"jakubroztocil": {
"committed": [
"2.6.0",
"3.0.0"
],
"github": "jkbr",
"reported": [],
"twitter": null
},
"josephworks": {
"committed": [],
"github": "josephworks",
"reported": [
"3.0.0"
],
"twitter": null
},
"jungle-boogie": {
"committed": [],
"github": "jungle-boogie",
"reported": [
"2.5.0"
],
"twitter": null
},
"luisuimi": {
"committed": [],
"github": "luisuimi",
"reported": [
"3.0.0"
],
"twitter": null
},
"nixbytes": {
"committed": [
"2.5.0"
],
"github": "nixbytes",
"reported": [],
"twitter": "linuxbyte3"
},
"peterpt": {
"committed": [],
"github": "peterpt",
"reported": [
"3.0.0"
],
"twitter": null
},
"qiulang": {
"committed": [],
"github": "qiulang",
"reported": [
"2.5.0"
],
"twitter": null
},
"stonebig": {
"committed": [],
"github": "stonebig",
"reported": [
"3.0.0"
],
"twitter": null
},
"whodidthis": {
"committed": [],
"github": "whodidthis",
"reported": [
"3.0.0"
],
"twitter": null
},
"zoulja": {
"committed": [],
"github": "zoulja",
"reported": [
"3.0.0"
],
"twitter": null
},
"zwx00": {
"committed": [],
"github": "zwx00",
"reported": [
"2.5.0"
],
"twitter": null
},
"\u5d14\u5c0f\u4e8c": {
"committed": [],
"github": "rogerdehe",
"reported": [
"2.6.0",
"3.0.0"
],
"twitter": null
},
"\u9ec4\u6d77": {
"committed": [],
"github": "hh-in-zhuzhou",
"reported": [
"2.6.0",
"3.0.0"
],
"twitter": null
}
}

View File

@ -1,13 +0,0 @@
<!-- Blog post -->
## Community contributions
Wed like to thank these amazing people for their contributions to this release: {% for name, details in contributors.items() -%}
[{{ name }}](https://github.com/{{ details.github }}){{ '' if loop.last else ', ' }}
{%- endfor %}.
<!-- Twitter -->
Wed like to thank these amazing people for their contributions to HTTPie {{ release }}: {% for name, details in contributors.items() if details.twitter -%}
@{{ details.twitter }}{{ '' if loop.last else ', ' }}
{%- endfor %} 🥧

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1019 KiB

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1635.31 470"><defs><style>.cls-1{fill:#4b78e6;}</style></defs><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><path class="cls-1" d="M1322.19,73.91h0a36.56,36.56,0,0,1,36.56-36.29h3.41a36.56,36.56,0,0,1,36.56,36.83h0a36.56,36.56,0,0,1-36.56,36.29h-3.41A36.56,36.56,0,0,1,1322.19,73.91Zm6.16,276.93V142.35a7.94,7.94,0,0,1,8-7.94h48.32a7.93,7.93,0,0,1,7.94,7.94V350.84a7.94,7.94,0,0,1-7.94,7.94H1336.3A8,8,0,0,1,1328.35,350.84Z"/><path class="cls-1" d="M1635.31,233.34c0-61.06-33.28-105.09-101.71-105.09-72.17,0-114.82,45.45-114.82,123.08,0,74.79,46.86,113.6,113.89,113.6,56.83,0,85.93-27.17,98.33-63.86a8,8,0,0,0-5.34-10.28l-40.32-11.39a8,8,0,0,0-9.54,4.73c-5.77,14.37-16.57,25.42-42.2,25.42-29.32,0-46.06-13.62-50.74-44.29a7.17,7.17,0,0,0,.81.08h143.7a8,8,0,0,0,7.94-7.94V242.23c0-.09,0-.18,0-.28C1635.31,239.17,1635.31,236.36,1635.31,233.34Zm-103.58-51.6c28.59,0,43.12,15.15,45,45H1483C1487.21,195,1503.61,181.74,1531.73,181.74Z"/><path class="cls-1" d="M581.91,358.75H533.56a7.93,7.93,0,0,1-7.94-7.94V76.39a7.93,7.93,0,0,1,7.94-7.94h48.35a7.93,7.93,0,0,1,7.94,7.94v84.66a6,6,0,0,0,11.22,2.77c13.45-25.56,34.68-35.33,60.42-35.69,38.66-.55,70,31.45,70,70.12V350.81a7.94,7.94,0,0,1-7.94,7.94H675.63a7.94,7.94,0,0,1-7.94-7.94V227.1c0-23.21-10.32-40.73-37-40.73-25.79,0-40.8,15.15-40.8,40.73V350.81A7.93,7.93,0,0,1,581.91,358.75Z"/><path class="cls-1" d="M1052.84,306.12a7.94,7.94,0,0,0-9.77-6.78c-6.47,1.55-13.73,3.05-20.35,3.05-19.23,0-25.79-8.52-25.79-26.52V188.26h50.67a7.94,7.94,0,0,0,7.94-7.94v-38.1a8,8,0,0,0-7.94-7.95H996.93V85.86A7.94,7.94,0,0,0,989,77.92H941.1a7.93,7.93,0,0,0-7.94,7.94v48.41H842.67V85.86a7.94,7.94,0,0,0-7.94-7.94H786.84a7.93,7.93,0,0,0-7.94,7.94v48.41H761.05a7.94,7.94,0,0,0-7.94,7.95v38.1a7.93,7.93,0,0,0,7.94,7.94H778.9v99.93c0,42.62,21.57,77.19,73.15,77.19,21.16,0,32.43-2.5,46.08-6.56a8,8,0,0,0,5.65-8.56l-5.2-44.14a7.94,7.94,0,0,0-9.77-6.78c-6.47,1.55-13.73,3.05-20.35,3.05-19.23,0-25.79-8.52-25.79-26.52V188.26h90.49v99.93c0,42.62,21.57,77.19,73.14,77.19,21.17,0,32.44-2.5,46.09-6.56a8,8,0,0,0,5.65-8.56Z"/><path class="cls-1" d="M1219.14,365.27c-28.49,0-49.51-10.92-62.87-35.86a6,6,0,0,0-11.19,2.84v82.83a7.93,7.93,0,0,1-7.94,7.94h-48.32a7.94,7.94,0,0,1-8-7.94V142.21a8,8,0,0,1,8-7.94h48.32a7.94,7.94,0,0,1,7.94,7.94v18.95c0,6.13,8.21,8.3,11.15,2.92,13.74-25.16,35.63-36,64.31-36,53.43,0,81.08,44,81.08,116.92C1301.62,320.78,1273,365.27,1219.14,365.27Zm19.21-119.76c0-37.39-14.06-59.17-46.4-59.17-29.53,0-46.87,20.35-46.87,57.28v4.26c0,36.45,17.34,59.17,46.87,59.17C1223.82,307.05,1238.35,284.33,1238.35,245.51Z"/><path class="cls-1" d="M394.41,102.12C394,45.31,346.61,0,289.8,0H104.69C48.31,0,1.09,44.6,0,101A103.07,103.07,0,0,0,103,205.92h82.7a6,6,0,0,1,2.39,11.42L61.31,272.91A103.09,103.09,0,0,0,0,367.83C.43,424.65,47.79,470,104.62,470H148c57.23,0,104.88-45.9,104.79-103.13a103.1,103.1,0,0,0-64.49-95.31,5.94,5.94,0,0,1-.1-10.94l145-63.58A103.08,103.08,0,0,0,394.41,102.12Z"/></g></g></svg>

Before

Width:  |  Height:  |  Size: 2.9 KiB

View File

@ -1,5 +0,0 @@
Here we maintain a database of installation methods, from which we generate
the installation section in docs. If youd like add or update an installation method,
edit [methods.yml](./methods.yml), do not edit the main docs directly.
For HTTPie installation instructions see: <https://httpie.io/docs#installation>.

View File

@ -1,85 +0,0 @@
import re
import sys
from pathlib import Path
from typing import Dict
import yaml
from jinja2 import Template
Database = Dict[str, dict]
# Files
HERE = Path(__file__).parent
DB_FILE = HERE / 'methods.yml'
DOC_FILE = HERE.parent / 'README.md'
TPL_FILE = HERE / 'installation.jinja2'
# Database keys
KEY_DOC_STRUCTURE = 'docs-structure'
KEY_TOOLS = 'tools'
# Markers in-between content will be put.
MARKER_START = '<div data-installation-instructions>'
MARKER_END = '</div>'
def generate_documentation() -> str:
database = load_database()
structure = build_docs_structure(database)
template = Template(source=TPL_FILE.read_text(encoding='utf-8'))
output = template.render(structure=structure)
output = clean_template_output(output)
return output
def save_doc_file(content: str) -> None:
current_doc = load_doc_file()
marker_start = current_doc.find(MARKER_START) + len(MARKER_START)
assert marker_start > 0, 'cannot find the start marker'
marker_end = current_doc.find(MARKER_END, marker_start)
assert marker_start < marker_end, f'{marker_end=} < {marker_start=}'
updated_doc = (
current_doc[:marker_start]
+ '\n\n'
+ content
+ '\n\n'
+ current_doc[marker_end:]
)
if current_doc != updated_doc:
DOC_FILE.write_text(updated_doc, encoding='utf-8')
def build_docs_structure(database: Database):
tools = database[KEY_TOOLS]
assert len(tools) == len({tool['title'] for tool in tools.values()}), 'tool titles need to be unique'
tree = database[KEY_DOC_STRUCTURE]
structure = []
for platform, tools_ids in tree.items():
assert platform.isalnum(), f'{platform=} must be alpha-numeric for generated links to work'
platform_tools = [tools[tool_id] for tool_id in tools_ids]
structure.append((platform, platform_tools))
return structure
def clean_template_output(output):
output = '\n'.join(line.strip() for line in output.strip().splitlines())
output = re.sub('\n{3,}', '\n\n', output)
return output
def load_database() -> Database:
return yaml.safe_load(DB_FILE.read_text(encoding='utf-8'))
def load_doc_file() -> str:
return DOC_FILE.read_text(encoding='utf-8')
def main() -> int:
content = generate_documentation()
save_doc_file(content)
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,37 +0,0 @@
<!--
THE INSTALLATION SECTION IS GENERATED
Do not edit here, but in docs/installation/.
-->
{% for platform, tools in structure %}
- [{{ platform }}](#{{ platform.lower() }}){% endfor %} {# <= keep `endfor` here to prevent unwanted `\n` #}
{% for platform, tools in structure %}
### {{ platform }}
{% for tool in tools %}
#### {{ tool.title }}
{% if tool.note %}
{{ tool.note }}
{% endif %}
{% if tool.links.setup %}
To install [{{ tool.name }}]({{ tool.links.homepage }}), see [its installation]({{ tool.links.setup }}).
{% endif %}
```bash
# Install httpie
$ {{ tool.commands.install|join('\n$ ') }}
```
```bash
# Upgrade httpie
$ {{ tool.commands.upgrade|join('\n$ ') }}
```
{% endfor %}
{% endfor %}
<!-- /GENERATED SECTION -->

View File

@ -1,181 +0,0 @@
# Database of HTTPie installation methods. Used to build the docs.
#
# We currently only include here methods for popular systems where we take care of the package,
# or have a good relationship with the maintainers.
#
# Each tool name should be unique (it becomes a linkable header).
# If a tools have `links.setup`, it also needs `links.homepage`.
# Some tools are available on multiple platforms, take into account when editing.
#
docs-structure:
Universal:
- pypi
macOS:
- brew-mac
- port
Windows:
- chocolatey
Linux:
- snap-linux
- brew-linux
- apt
- dnf
- yum
- pacman
FreeBSD:
- pkg
tools:
apt:
title: Debian and Ubuntu
note: Also works for other Debian-derived distributions like MX Linux, Linux Mint, deepin, Pop!_OS, KDE neon, Zorin OS, elementary OS, Kubuntu, Devuan, Linux Lite, Peppermint OS, Lubuntu, antiX, Xubuntu, etc.
name: APT
links:
homepage: https://en.wikipedia.org/wiki/APT_(software)
package: https://packages.debian.org/sid/web/httpie
commands:
install:
- apt update
- apt install httpie
upgrade:
- apt update
- apt upgrade httpie
brew-mac:
title: Homebrew
name: Homebrew
links:
homepage: https://brew.sh/
setup: https://docs.brew.sh/Installation
package: https://formulae.brew.sh/formula/httpie
commands:
install:
- brew update
- brew install httpie
upgrade:
- brew update
- brew upgrade httpie
brew-linux:
title: Linuxbrew
name: Linuxbrew
links:
homepage: https://docs.brew.sh/Homebrew-on-Linux
setup: https://docs.brew.sh/Homebrew-on-Linux#install
package: https://formulae.brew.sh/formula/httpie
commands:
install:
- brew update
- brew install httpie
upgrade:
- brew update
- brew upgrade httpie
chocolatey:
title: Chocolatey
name: Chocolatey
links:
homepage: https://chocolatey.org/
setup: https://chocolatey.org/install
package: https://community.chocolatey.org/packages/httpie/
commands:
install:
- choco install httpie
upgrade:
- choco upgrade httpie
dnf:
title: Fedora
name: DNF
links:
homepage: https://fedoraproject.org/wiki/DNF
package: https://src.fedoraproject.org/rpms/httpie
commands:
install:
- dnf install httpie
upgrade:
- dnf upgrade httpie
pacman:
title: Arch Linux
name: pacman
note: Also works for other Arch-derived distributions like ArcoLinux, EndeavourOS, Artix Linux, etc.
links:
homepage: https://archlinux.org/pacman/
package: https://archlinux.org/packages/community/any/httpie/
commands:
install:
- pacman -Sy httpie
upgrade:
- pacman -Syu httpie
pkg:
title: FreshPorts
name: FreshPorts
links:
homepage: https://www.freebsd.org/cgi/man.cgi?query=pkg&sektion=8&n=1
package: https://www.freshports.org/www/py-httpie/
commands:
install:
- pkg install www/py-httpie
upgrade:
- pkg upgrade www/py-httpie
port:
title: MacPorts
name: MacPorts
links:
homepage: https://www.macports.org/
setup: https://www.macports.org/install.php
package: https://ports.macports.org/port/httpie/
commands:
install:
- port selfupdate
- port install httpie
upgrade:
- port selfupdate
- port upgrade httpie
pypi:
title: PyPI
name: pip
note: Please make sure you have Python 3.7 or newer (`python --version`).
links:
homepage: https://pypi.org/
# setup: https://pip.pypa.io/en/stable/installation/
package: https://pypi.org/project/httpie/
commands:
install:
- python -m pip install --upgrade pip wheel
- python -m pip install httpie
upgrade:
- python -m pip install --upgrade pip wheel
- python -m pip install --upgrade httpie
snap-linux:
title: Snapcraft (Linux)
name: Snapcraft
links:
homepage: https://snapcraft.io/
setup: https://snapcraft.io/docs/installing-snapd
package: https://snapcraft.io/httpie
commands:
install:
- snap install httpie
upgrade:
- snap refresh httpie
yum:
title: CentOS and RHEL
name: Yum
note: Also works for other RHEL-derived distributions like ClearOS, Oracle Linux, etc.
links:
homepage: http://yum.baseurl.org/
package: https://src.fedoraproject.org/rpms/httpie
commands:
install:
- yum install epel-release
- yum install httpie
upgrade:
- yum upgrade httpie

View File

@ -1,47 +0,0 @@
# Rules for <https://github.com/markdownlint/markdownlint>
# Load all rules by default
all
#
# Tweak rules
#
# MD002 First header should be a top level header
# Because we use HTML to hide them on the website.
exclude_rule 'MD002'
# MD013 Line length
exclude_rule 'MD013'
# MD014 Dollar signs used before commands without showing output
exclude_rule 'MD014'
# MD028 Blank line inside blockquote
exclude_rule 'MD028'
# MD012 Multiple consecutive blank lines
exclude_rule 'MD012'
# Tell the linter to use ordered lists:
# 1. Foo
# 2. Bar
# 3. Baz
#
# Instead of:
# 1. Foo
# 1. Bar
# 1. Baz
rule 'MD029', :style => :ordered
# MD033 Inline HTML
# TODO: Tweak elements when https://github.com/markdownlint/markdownlint/issues/118 will be done?
exclude_rule 'MD033'
# MD034 Bare URL used
# TODO: Remove when https://github.com/markdownlint/markdownlint/issues/328 will be fixed.
exclude_rule 'MD034'
# MD041 First line in file should be a top level header
# Because we use HTML to hide them on the website.
exclude_rule 'MD041'

View File

@ -1,46 +0,0 @@
# HTTPie release process
Welcome on the documentation part of the **HTTPie release process**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions, then you can find all you need for your OS on [that page](https://httpie.io/docs#installation). In the case you do not find your OS, [let us know](https://github.com/httpie/httpie/issues/).
- If you are looking for technical information about the HTTPie packaging, then you are at the good place.
## About
You are looking at the HTTPie packaging documentation, where you will find valuable information about how we manage to release HTTPie to lots of OSes, including technical data that may be worth reading if you are a package maintainer.
The overall release process starts simple:
1. Do the [PyPI](https://pypi.org/project/httpie/) publication.
2. Then, handle company-related tasks.
3. Finally, follow OS-specific steps, described in documents below, to send patches downstream.
## First, PyPI
Let's do the release on [PyPi](https://pypi.org/project/httpie/).
That is done quite easily by manually triggering the [release workflow](https://github.com/httpie/httpie/actions/workflows/release.yml).
## Then, company-specific tasks
- Blank the `master_and_released_docs_differ_after` value in [config.json](https://github.com/httpie/httpie/blob/master/docs/config.json).
- Update the [contributors list](../contributors).
- Update the HTTPie version bundled into [Termible](https://termible.io/) ([example](https://github.com/httpie/termible/pull/1)).
## Finally, spread dowstream
Find out how we do release new versions for each and every supported OS in the following table.
A more complete state of deployment can be found on [repology](https://repology.org/project/httpie/versions), including unofficial packages.
| OS | Maintainer |
| -------------------------------------------: | -------------- |
| [Arch Linux, and derived](linux-arch/) | trusted person |
| [CentOS, RHEL, and derived](linux-centos/) | trusted person |
| [Debian, Ubuntu, and derived](linux-debian/) | trusted person |
| [Fedora](linux-fedora/) | trusted person |
| :construction: [Homebrew, Linuxbrew](brew/) | **HTTPie** |
| :construction: [MacPorts](mac-ports/) | **HTTPie** |
| [Snapcraft](snapcraft/) | **HTTPie** |
| [Windows — Chocolatey](windows-chocolatey/) | **HTTPie** |
:new: You do not find your system or you would like to see HTTPie supported on another OS? Then [let us know](https://github.com/httpie/httpie/issues/).

View File

@ -1,33 +0,0 @@
# HTTPie on Homebrew, and Linuxbrew
Welcome to the documentation about **packaging HTTPie for Homebrew**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on Homebrew, then you can find them on [that page](https://httpie.io/docs#homebrew) ([that one](https://httpie.io/docs#linuxbrew) for Linuxbrew).
- If you are looking for technical information about the HTTPie packaging on Homebrew, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for Homebrew. They apply to Linuxbrew as well.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
## Overall process
:construction: Work in progress.
First, update the current Formula:
```bash
make brew-deps
# Copy-paste content into downstream/mac/brew/httpie.rb
git add downstream/mac/brew/httpie.rb
git commit -s -m 'Update brew formula to XXX'
```
That [GitHub workflow](https://github.com/httpie/httpie/actions/workflows/test-package-mac-brew.yml) will test the formula when `downstream/mac/brew/httpie.rb` is changed in a pull request.
Then, open a pull request with those changes to the [downstream file](https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb).
## Hacking
:construction: Work in progress.

View File

@ -1,81 +0,0 @@
#!/usr/bin/env python3
"""
Generate Ruby code with URLs and file hashes for packages from PyPi
(i.e., httpie itself as well as its dependencies) to be included
in the Homebrew formula after a new release of HTTPie has been published
on PyPi.
<https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb>
"""
import hashlib
import requests
VERSIONS = {
# By default, we use the latest packages. But sometimes Requests has a maximum supported versions.
# Take a look here before making a release: <https://github.com/psf/requests/blob/master/setup.py>
'idna': '3.2',
}
# Note: Keep that list sorted.
PACKAGES = [
'certifi',
'charset-normalizer',
'defusedxml',
'httpie',
'idna',
'Pygments',
'PySocks',
'requests',
'requests-toolbelt',
'urllib3',
'multidict',
]
def get_package_meta(package_name):
api_url = f'https://pypi.org/pypi/{package_name}/json'
resp = requests.get(api_url).json()
hasher = hashlib.sha256()
version = VERSIONS.get(package_name)
if package_name not in VERSIONS:
# Latest version
release_bundle = resp['urls']
else:
release_bundle = resp['releases'][version]
for release in release_bundle:
download_url = release['url']
if download_url.endswith('.tar.gz'):
hasher.update(requests.get(download_url).content)
return {
'name': package_name,
'url': download_url,
'sha256': hasher.hexdigest(),
}
else:
raise RuntimeError(f'{package_name}: download not found: {resp}')
def main():
package_meta_map = {
package_name: get_package_meta(package_name)
for package_name in PACKAGES
}
httpie_meta = package_meta_map.pop('httpie')
print()
print(' url "{url}"'.format(url=httpie_meta['url']))
print(' sha256 "{sha256}"'.format(sha256=httpie_meta['sha256']))
print()
for dep_meta in package_meta_map.values():
print(' resource "{name}" do'.format(name=dep_meta['name']))
print(' url "{url}"'.format(url=dep_meta['url']))
print(' sha256 "{sha256}"'.format(sha256=dep_meta['sha256']))
print(' end')
print('')
if __name__ == '__main__':
main()

View File

@ -1,84 +0,0 @@
class Httpie < Formula
include Language::Python::Virtualenv
desc "User-friendly cURL replacement (command-line HTTP client)"
homepage "https://httpie.io/"
url "https://files.pythonhosted.org/packages/7b/f9/13070f19226b7db3641fb787df36bb715063abe1b8ca03fbaeca0f465d27/httpie-3.0.1.tar.gz"
sha256 "0e9bc93ebdcdd2d32ec24b8fa46cf7e4fde9eec7a6bd0c5d0ef224f25d7466b2"
license "BSD-3-Clause"
head "https://github.com/httpie/httpie.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "9d285fcfb55ce8ed787d1b01966d51e6e07f7e77c44a204695a2d6eee9c8698d"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "743a282b475e87a4eaf11e545f761aef1b8e4bfe49eaee47251d7629a35a8ced"
sha256 cellar: :any_skip_relocation, monterey: "5d63ea4f47b2028b2ba68abe12a4176934193e058edd869270221b41cc946c76"
sha256 cellar: :any_skip_relocation, big_sur: "5a53221a680a35d1aa00cbadde279dbe4f562d22ed207c15bd4221cb8c3180f1"
sha256 cellar: :any_skip_relocation, catalina: "5feadb6d76f55d6f9681682e221008c282dccf0e46ae22a959b4bad2efde204a"
sha256 cellar: :any_skip_relocation, x86_64_linux: "d530ddbec49588b0d481f156d35f7e5bb7d3b6427d203f04750e55cd3eecc303"
end
depends_on "python@3.10"
resource "certifi" do
url "https://files.pythonhosted.org/packages/6c/ae/d26450834f0acc9e3d1f74508da6df1551ceab6c2ce0766a593362d6d57f/certifi-2021.10.8.tar.gz"
sha256 "78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"
end
resource "charset-normalizer" do
url "https://files.pythonhosted.org/packages/48/44/76b179e0d1afe6e6a91fd5661c284f60238987f3b42b676d141d01cd5b97/charset-normalizer-2.0.10.tar.gz"
sha256 "876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"
end
resource "defusedxml" do
url "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz"
sha256 "1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/62/08/e3fc7c8161090f742f504f40b1bccbfc544d4a4e09eb774bf40aafce5436/idna-3.3.tar.gz"
sha256 "9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
end
resource "multidict" do
url "https://files.pythonhosted.org/packages/8e/7c/e12a69795b7b7d5071614af2c691c97fbf16a2a513c66ec52dd7d0a115bb/multidict-5.2.0.tar.gz"
sha256 "0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/94/9c/cb656d06950268155f46d4f6ce25d7ffc51a0da47eadf1b164bbf23b718b/Pygments-2.11.2.tar.gz"
sha256 "4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"
end
resource "PySocks" do
url "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz"
sha256 "3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/60/f3/26ff3767f099b73e0efa138a9998da67890793bfa475d8278f84a30fec77/requests-2.27.1.tar.gz"
sha256 "68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"
end
resource "requests-toolbelt" do
url "https://files.pythonhosted.org/packages/28/30/7bf7e5071081f761766d46820e52f4b16c8a08fef02d2eb4682ca7534310/requests-toolbelt-0.9.1.tar.gz"
sha256 "968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/b0/b1/7bbf5181f8e3258efae31702f5eab87d8a74a72a0aa78bc8c08c1466e243/urllib3-1.26.8.tar.gz"
sha256 "0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"
end
def install
virtualenv_install_with_resources
end
test do
assert_match version.to_s, shell_output("#{bin}/httpie --version")
assert_match version.to_s, shell_output("#{bin}/https --version")
assert_match version.to_s, shell_output("#{bin}/http --version")
raw_url = "https://raw.githubusercontent.com/Homebrew/homebrew-core/HEAD/Formula/httpie.rb"
assert_match "PYTHONPATH", shell_output("#{bin}/http --ignore-stdin #{raw_url}")
end
end

View File

@ -1,47 +0,0 @@
# Maintainer: Jelle van der Waa <jelle@archlinux.org>
# Maintainer: daurnimator <daurnimator@archlinux.org>
# Contributor: Daniel Micay <danielmicay@gmail.com>
# Contributor: Thomas Weißschuh <thomas_weissschuh lavabit com>
pkgname=httpie
pkgver=2.6.0
pkgrel=1
pkgdesc="human-friendly CLI HTTP client for the API era"
url="https://github.com/httpie/httpie"
depends=('python-defusedxml'
'python-pygments'
'python-pysocks'
'python-requests'
'python-requests-toolbelt'
'python-charset-normalizer')
makedepends=('python-setuptools')
checkdepends=('python-pytest'
'python-pytest-httpbin'
'python-responses')
conflicts=(python-httpie)
replaces=(python-httpie python2-httpie)
license=('BSD')
arch=('any')
source=($pkgname-$pkgver.tar.gz::"https://github.com/httpie/httpie/archive/$pkgver.tar.gz")
sha256sums=('3bcd9a8cb2b11299da12d3af36c095c6d4b665e41c395898a07f1ae4d99fc14a')
build() {
cd $pkgname-$pkgver
python3 setup.py build
}
package() {
cd $pkgname-$pkgver
install -Dm644 LICENSE "$pkgdir/usr/share/licenses/httpie/LICENSE"
python3 setup.py install --root="$pkgdir" --optimize=1
# Fix upstream, include them in MANIFEST.in and use data_files in setup.py to install them automatically
# TODO: add zsh support
install -Dm644 extras/httpie-completion.bash "$pkgdir"/usr/share/bash-completion/completions/http
install -Dm644 extras/httpie-completion.fish "$pkgdir"/usr/share/fish/vendor_completions.d/http.fish
}
check() {
cd $pkgname-$pkgver
PYTHONDONTWRITEBYTECODE=1 pytest tests
}

View File

@ -1,22 +0,0 @@
# HTTPie on Arch Linux, and derived
Welcome to the documentation about **packaging HTTPie for Arch Linux**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on Arch Linux, then you can find them on [that page](https://httpie.io/docs#arch-linux).
- If you are looking for technical information about the HTTPie packaging on Arch Linux, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for Arch Linux. They apply to Arch-derived distributions as well, like ArcoLinux, EndeavourOS, Artix Linux, etc.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
## Overall process
Note: Sending patches downstream does not seem easy. We failed to find where is located the package file on <https://gitlab.archlinux.org>. So we are relying on the last maintainer, daurnimator, and it works pretty well so far.
Check <https://archlinux.org/packages/community/any/httpie/> and if the version is outdated, simply [report it](https://archlinux.org/packages/community/any/httpie/flag/).
## Hacking
Left blank on purpose, we will fill that section when we will have access to the downstream repository.

View File

@ -1,26 +0,0 @@
# HTTPie on CentOS, RHEL, and derived
Welcome to the documentation about **packaging HTTPie for CentOS and RHEL**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on CentOS, then you can find them on [that page](https://httpie.io/docs#centos-and-rhel).
- If you are looking for technical information about the HTTPie packaging on CentOS, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for CentOS. They apply to RHEL as well, and any RHEL-derived distributions like ClearOS, Oracle Linux, etc.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
The current maintainer is [Mikel Olasagasti](https://github.com/kaxero).
## Overall process
Same as [Fedora](../linux-fedora/README.md#overall-process).
## Q/A with Mikel
Q: What should we do to help seeing a new version on CentOS?
A: When a new release is published Miro and I get notified by [release-monitoring](https://release-monitoring.org/project/1337/), that fills a BugZilla ticket reporting a new version being available.
The system also tries to create a simple patch to update the spec file, but in the case of CentOS it needs some manual revision. For example for 2.5.0 `defuxedxml` dep is required. Maybe with CentOS-9 and some new macros that are available now in Fedora it can be automated same way. But even the bump can be automated, maintainers should check for license changes, new binaries/docs/ and so on.

View File

@ -1,29 +0,0 @@
# HTTPie on Debian, Ubuntu, and derived
Welcome to the documentation about **packaging HTTPie for Debian GNU/Linux**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on Debian GNU/Linux, then you can find them on [that page](https://httpie.io/docs#debian-and-ubuntu).
- If you are looking for technical information about the HTTPie packaging on Debian GNU/Linux, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for Debian GNU/Linux. They apply to Ubuntu as well, and any Debian-derived distributions like MX Linux, Linux Mint, deepin, Pop!_OS, KDE neon, Zorin OS, elementary OS, Kubuntu, Devuan, Linux Lite, Peppermint OS, Lubuntu, antiX, Xubuntu, etc.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
The current maintainer is Bartosz Fenski.
## Overall process
Open a new bug on the Debian Bug Tracking System by sending an email:
- To: `Debian Bug Tracking System <submit@bugs.debian.org>`
- Subject: `httpie: Version XXX available`
- Message template (examples [1](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=993937), and [2](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=996479)):
```email
Package: httpie
Severity: normal
<MESSAGE>
```

View File

@ -1,48 +0,0 @@
# HTTPie on Fedora
Welcome to the documentation about **packaging HTTPie for Fedora**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on Fedora, then you can find them on [that page](https://httpie.io/docs#fedora).
- If you are looking for technical information about the HTTPie packaging on Fedora, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for Fedora.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
The current maintainer is [Miro Hrončok](https://github.com/hroncok).
## Overall process
We added the [.packit.yaml](https://github.com/httpie/httpie/blob/master/.packit.yaml) local file.
It unlocks real-time Fedora checks on pull requests and new releases.
So there is nothing to do on our side: `Packit` will see the new release and open a pull request [there](https://src.fedoraproject.org/rpms/httpie). Then, the Fedora maintainer will review and merge.
It is also possible to follow [user feedbacks](https://bodhi.fedoraproject.org/updates/?packages=httpie) for all builds.
## Q/A with Miro
Q: What would the command to install the latest stable version look like?
A: Assuming the latest stable version is already propagated to Fedora:
```bash
# Note that yum is an alias to dnf.
$ sudo dnf install httpie
```
Q: Will dnf/yum upgrade then update to the latest?
A: Yes, assuming the same as above.
Q: Are new versions backported automatically?
A: No. The process is:
1. A new HTTPie release is created on Github.
2. A pull request for Fedora `rawhide` (the development version of Fedora, currently Fedora 36) is created.
3. A Fedora packager (usually Miro) sanity checks the pull request and merges, builds. HTTPie is updated in `rawhide` within 24 hours (sometimes more, for unrelated issues).
4. A Fedora packager decides whether the upgrade is suitable for stable Fedora releases (currently 35, 34, 33), if so, merges the changes there.
5. (if the above is yes) The new version of HTTPie lands in `updates-testing` repo where it waits for user feedback and lands within ~1 week for broad availability.

View File

@ -1,251 +0,0 @@
Name: httpie
Version: 2.6.0
Release: 1%{?dist}
Summary: A Curl-like tool for humans
License: BSD
URL: https://httpie.org/
Source0: https://github.com/httpie/httpie/archive/%{version}/%{name}-%{version}.tar.gz
BuildArch: noarch
BuildRequires: python3-devel
BuildRequires: pyproject-rpm-macros
BuildRequires: help2man
%description
HTTPie is a CLI HTTP utility built out of frustration with existing tools. The
goal is to make CLI interaction with HTTP-based services as human-friendly as
possible.
HTTPie does so by providing an http command that allows for issuing arbitrary
HTTP requests using a simple and natural syntax and displaying colorized
responses.
%prep
%autosetup -p1
%generate_buildrequires
%pyproject_buildrequires -rx test
%build
%pyproject_wheel
%install
%pyproject_install
%pyproject_save_files httpie
# Bash completion
mkdir -p %{buildroot}%{_datadir}/bash-completion/completions
cp -a extras/httpie-completion.bash %{buildroot}%{_datadir}/bash-completion/completions/http
ln -s ./http %{buildroot}%{_datadir}/bash-completion/completions/https
# Fish completion
mkdir -p %{buildroot}%{_datadir}/fish/vendor_completions.d/
cp -a extras/httpie-completion.fish %{buildroot}%{_datadir}/fish/vendor_completions.d/http.fish
ln -s ./http.fish %{buildroot}%{_datadir}/fish/vendor_completions.d/https.fish
# Generate man pages for everything
export PYTHONPATH=%{buildroot}%{python3_sitelib}
mkdir -p %{buildroot}%{_mandir}/man1
help2man %{buildroot}%{_bindir}/http > %{buildroot}%{_mandir}/man1/http.1
help2man %{buildroot}%{_bindir}/https > %{buildroot}%{_mandir}/man1/https.1
help2man %{buildroot}%{_bindir}/httpie > %{buildroot}%{_mandir}/man1/httpie.1
%check
%pytest -v
%files -f %{pyproject_files}
%doc README.md
%license LICENSE
%{_bindir}/http
%{_bindir}/https
%{_bindir}/httpie
%{_mandir}/man1/http.1*
%{_mandir}/man1/https.1*
%{_mandir}/man1/httpie.1*
# we co-own the entire directory structures for bash/fish completion to avoid a dependency
%{_datadir}/bash-completion/
%{_datadir}/fish/
%changelog
* Fri Oct 15 2021 Miro Hrončok <mhroncok@redhat.com> - 2.6.0-1
- Update to 2.6.0
- Fixes: rhbz#2014022
* Tue Sep 07 2021 Miro Hrončok <mhroncok@redhat.com> - 2.5.0-1
- Update to 2.5.0
- Fixes: rhbz#2001693
* Thu Jul 22 2021 Fedora Release Engineering <releng@fedoraproject.org> - 2.4.0-4
- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild
* Fri Jun 04 2021 Python Maint <python-maint@redhat.com> - 2.4.0-3
- Rebuilt for Python 3.10
* Thu May 27 2021 Miro Hrončok <mhroncok@redhat.com> - 2.4.0-2
- Add Bash and Fish completion
- Fixes rhbz#1834441
- Run tests on build time
* Wed Mar 24 2021 Mikel Olasagasti Uranga <mikel@olasagasti.info> - 2.4.0-1
- Update to 2.4.0
- Use pypi_source macro
* Tue Jan 26 2021 Fedora Release Engineering <releng@fedoraproject.org> - 2.3.0-3
- Rebuilt for https://fedoraproject.org/wiki/Fedora_34_Mass_Rebuild
* Thu Jan 21 2021 Nils Philippsen <nils@tiptoe.de> - 2.3.0-2
- use macros for Python dependencies
- add missing Python dependencies needed for running help2man
- remove manual Python dependencies
- discard stderr when running help2man
* Thu Dec 24 2020 Nils Philippsen <nils@tiptoe.de> - 2.3.0-1
- version 2.3.0
- Python 2 is no more
- use %%autosetup and Python build macros
- remove EL7-isms
- explicitly require sed for building
* Tue Jul 28 2020 Fedora Release Engineering <releng@fedoraproject.org> - 1.0.3-4
- Rebuilt for https://fedoraproject.org/wiki/Fedora_33_Mass_Rebuild
* Tue May 26 2020 Miro Hrončok <mhroncok@redhat.com> - 1.0.3-3
- Rebuilt for Python 3.9
* Wed Jan 29 2020 Fedora Release Engineering <releng@fedoraproject.org> - 1.0.3-2
- Rebuilt for https://fedoraproject.org/wiki/Fedora_32_Mass_Rebuild
* Mon Sep 30 2019 Rick Elrod <relrod@redhat.com> - 1.0.3-1
- Latest upstream
* Mon Aug 19 2019 Miro Hrončok <mhroncok@redhat.com> - 0.9.4-15
- Rebuilt for Python 3.8
* Thu Jul 25 2019 Fedora Release Engineering <releng@fedoraproject.org> - 0.9.4-14
- Rebuilt for https://fedoraproject.org/wiki/Fedora_31_Mass_Rebuild
* Fri Feb 01 2019 Fedora Release Engineering <releng@fedoraproject.org> - 0.9.4-13
- Rebuilt for https://fedoraproject.org/wiki/Fedora_30_Mass_Rebuild
* Fri Jul 13 2018 Fedora Release Engineering <releng@fedoraproject.org> - 0.9.4-12
- Rebuilt for https://fedoraproject.org/wiki/Fedora_29_Mass_Rebuild
* Tue Jun 19 2018 Miro Hrončok <mhroncok@redhat.com> - 0.9.4-11
- Rebuilt for Python 3.7
* Wed Feb 07 2018 Fedora Release Engineering <releng@fedoraproject.org> - 0.9.4-10
- Rebuilt for https://fedoraproject.org/wiki/Fedora_28_Mass_Rebuild
* Wed Jul 26 2017 Fedora Release Engineering <releng@fedoraproject.org> - 0.9.4-9
- Rebuilt for https://fedoraproject.org/wiki/Fedora_27_Mass_Rebuild
* Fri Mar 10 2017 Ralph Bean <rbean@redhat.com> - 0.9.4-8
- Fix help2man usage with python3.
https://bugzilla.redhat.com/show_bug.cgi?id=1430733
* Mon Feb 27 2017 Ralph Bean <rbean@redhat.com> - 0.9.4-7
- Fix missing Requires. https://bugzilla.redhat.com/show_bug.cgi?id=1417730
* Fri Feb 10 2017 Fedora Release Engineering <releng@fedoraproject.org> - 0.9.4-6
- Rebuilt for https://fedoraproject.org/wiki/Fedora_26_Mass_Rebuild
* Mon Jan 2 2017 Ricky Elrod <relrod@redhat.com> - 0.9.4-5
- Add missing Obsoletes.
* Mon Jan 2 2017 Ricky Elrod <relrod@redhat.com> - 0.9.4-4
- Nuke python-version-specific subpackages. Just use py3 if we can.
* Mon Dec 19 2016 Miro Hrončok <mhroncok@redhat.com> - 0.9.4-3
- Rebuild for Python 3.6
* Tue Jul 19 2016 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 0.9.4-2
- https://fedoraproject.org/wiki/Changes/Automatic_Provides_for_Python_RPM_Packages
* Tue Jul 05 2016 Ricky Elrod <relrod@redhat.com> - 0.9.4-1
- Update to latest upstream.
* Fri Jun 03 2016 Ricky Elrod <relrod@redhat.com> - 0.9.3-4
- Add proper Obsoletes for rhbz#1329226.
* Wed Feb 03 2016 Fedora Release Engineering <releng@fedoraproject.org> - 0.9.3-3
- Rebuilt for https://fedoraproject.org/wiki/Fedora_24_Mass_Rebuild
* Mon Jan 04 2016 Ralph Bean <rbean@redhat.com> - 0.9.3-2
- Modernize python macros and subpackaging.
- Move LICENSE to %%license macro.
- Make python3 the default on modern Fedora.
* Mon Jan 04 2016 Ralph Bean <rbean@redhat.com> - 0.9.3-1
- new version
* Tue Nov 10 2015 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 0.9.2-3
- Rebuilt for https://fedoraproject.org/wiki/Changes/python3.5
* Wed Jun 17 2015 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 0.9.2-2
- Rebuilt for https://fedoraproject.org/wiki/Fedora_23_Mass_Rebuild
* Thu Mar 26 2015 Ricky Elrod <relrod@redhat.com> - 0.9.2-1
- Latest upstream release.
* Sat Jun 07 2014 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 0.8.0-3
- Rebuilt for https://fedoraproject.org/wiki/Fedora_21_Mass_Rebuild
* Wed May 28 2014 Kalev Lember <kalevlember@gmail.com> - 0.8.0-2
- Rebuilt for https://fedoraproject.org/wiki/Changes/Python_3.4
* Fri Jan 31 2014 Ricky Elrod <codeblock@fedoraproject.org> - 0.8.0-1
- Latest upstream release.
* Fri Oct 4 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.7.2-2
- Add in patch to work without having python-requests 2.0.0.
* Sat Sep 28 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.7.2-1
- Latest upstream release.
* Thu Sep 5 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.6.0-7
- Only try building the manpage on Fedora, since RHEL's help2man doesn't
have the --no-discard-stderr flag.
* Thu Sep 5 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.6.0-6
- Loosen the requirement on python-pygments.
* Sat Aug 03 2013 Fedora Release Engineering <rel-eng@lists.fedoraproject.org> - 0.6.0-5
- Rebuilt for https://fedoraproject.org/wiki/Fedora_20_Mass_Rebuild
* Tue Jul 2 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.6.0-4
- python-requests 1.2.3 exists in rawhide now.
* Sun Jun 30 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.6.0-3
- Patch to use python-requests 1.1.0 for now.
* Sat Jun 29 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.6.0-2
- Update to latest upstream release.
* Mon Apr 29 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.5.0-2
- Fix changelog messup.
* Mon Apr 29 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.5.0-1
- Update to latest upstream release.
* Mon Apr 8 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.4.1-3
- Fix manpage generation by exporting PYTHONPATH.
* Tue Mar 26 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.4.1-2
- Include Python3 support, and fix other review blockers.
* Mon Mar 11 2013 Ricky Elrod <codeblock@fedoraproject.org> - 0.4.1-1
- Update to latest upstream release
* Thu Jul 19 2012 Ricky Elrod <codeblock@fedoraproject.org> - 0.2.5-1
- Initial build.

View File

@ -1,50 +0,0 @@
# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8::et:sw=4:ts=4:sts=4
PortSystem 1.0
PortGroup github 1.0
PortGroup python 1.0
github.setup httpie httpie 2.6.0
maintainers {g5pw @g5pw} openmaintainer
categories net
description Modern, user-friendly command-line HTTP client for the API era
long_description HTTPie (pronounced aych-tee-tee-pie) is a command line HTTP \
client. Its goal is to make CLI interaction with web \
services as human-friendly as possible. It provides a simple \
http command that allows for sending arbitrary HTTP requests \
using a simple and natural syntax, and displays colorized \
responses. HTTPie can be used for testing, debugging, and \
generally interacting with HTTP servers.
platforms darwin
license BSD
homepage https://httpie.io/
variant python37 conflicts python36 python38 python39 python310 description "Use Python 3.7" {}
variant python38 conflicts python36 python37 python39 python310 description "Use Python 3.8" {}
variant python39 conflicts python36 python37 python38 python310 description "Use Python 3.9" {}
variant python310 conflicts python36 python37 python38 python39 description "Use Python 3.10" {}
if {[variant_isset python37]} {
python.default_version 37
} elseif {[variant_isset python39]} {
python.default_version 39
} elseif {[variant_isset python310]} {
python.default_version 310
} else {
default_variants +python38
python.default_version 38
}
depends_lib-append port:py${python.version}-requests \
port:py${python.version}-requests-toolbelt \
port:py${python.version}-pygments \
port:py${python.version}-socks \
port:py${python.version}-charset-normalizer \
port:py${python.version}-defusedxml
checksums rmd160 07b1d1592da1c505ed3ee4ef3b6056215e16e9ff \
sha256 63cf104bf3552305c68a74f16494a90172b15296610a875e17918e5e36373c0b \
size 1133491
python.link_binaries_suffix

View File

@ -1,40 +0,0 @@
# HTTPie on MacPorts
Welcome to the documentation about **packaging HTTPie for MacPorts**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on MacPorts, then you can find them on [that page](https://httpie.io/docs#macports).
- If you are looking for technical information about the HTTPie packaging on MacPorts, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for MacPorts.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
## Overall process
Open a pull request to update the [downstream file](https://github.com/macports/macports-ports/blob/master/net/httpie/Portfile) ([example](https://github.com/macports/macports-ports/pull/12583)).
- Here is how to calculate the size and checksums (replace `2.5.0` with the correct version):
```bash
# Download the archive
$ wget https://api.github.com/repos/httpie/httpie/tarball/2.5.0
# Size
$ stat --printf="%s\n" 2.5.0
1105185
# Checksums
$ openssl dgst -rmd160 2.5.0
RIPEMD160(2.5.0)= 88d227d52199c232c0ddf704a219d1781b1e77ee
$ openssl dgst -sha256 2.5.0
SHA256(2.5.0)= 00c4b7bbe7f65abe1473f37b39d9d9f8f53f44069a430ad143a404c01c2179fc
```
- The commit message must be `httpie: update to XXX`.
- The commit must be signed-off (`git commit -s`).
## Hacking
:construction: Work in progress.

View File

@ -1,51 +0,0 @@
# HTTPie on Snapcraft
Welcome to the documentation about **packaging HTTPie for Snapcraft**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on Snapcraft, then you can find them on [that page](https://httpie.io/docs#snapcraft-linux) ([that one](https://httpie.io/docs#snapcraft-macos) for macOS).
- If you are looking for technical information about the HTTPie packaging on Snapcraft, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for Snapcraft. They apply to Snapcraft on Linux, macOS, and Windows.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
## Overall process
Trigger a new [build](https://snapcraft.io/httpie/builds), then [promote it](https://snapcraft.io/httpie/releases). If more management is needed: [revisions supervision](https://dashboard.snapcraft.io/snaps/httpie/revisions/).
## Hacking
Launch the docker image:
```bash
docker pull ubuntu/latest
docker run -it --rm ubuntu/latest
```
From inside the container:
```bash
# Clone
git clone --depth=1 https://github.com/httpie/httpie.git
cd httpie
# Build
export SNAPCRAFT_BUILD_ENVIRONMENT_CPU=8
export SNAPCRAFT_BUILD_ENVIRONMENT_MEMORY=16G
snapcraft --debug
# Install
sudo snap install --dangerous httpie_XXX_amd64.snap
# Test
httpie.http --version
httpie.https --version
# Auto-aliases cannot be tested when installing a snap outside the store.
# http --version
# https --version
# Remove
sudo snap remove httpie
```

View File

@ -1,45 +0,0 @@
# HTTPie on Chocolatey
Welcome to the documentation about **packaging HTTPie for Chocolatey**.
- If you do not know HTTPie, have a look [here](https://httpie.io/cli).
- If you are looking for HTTPie installation or upgrade instructions on Chocolatey, then you can find them on [that page](https://httpie.io/docs#chocolatey).
- If you are looking for technical information about the HTTPie packaging on Chocolatey, then you are in a good place.
## About
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for Chocolatey.
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
## Overall process
After having successfully [built and tested](#hacking) the package, push it:
```bash
# Replace 2.5.0 with the correct version
choco push httpie.2.5.0.nupkg -s https://push.chocolatey.org/ --api-key=API_KEY
```
## Hacking
```bash
# Clone
git clone --depth=1 https://github.com/httpie/httpie.git
cd httpie/docs/packaging/windows-chocolatey
# Build
choco pack
# Check metadata
choco info httpie -s .
# Install
choco install httpie -y -dv -s "'.;https://community.chocolatey.org/api/v2/'"
# Test
http --version
https --version
# Remove
choco uninstall -y httpie
```

View File

@ -1,50 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<package xmlns="http://schemas.microsoft.com/packaging/2015/06/nuspec.xsd">
<metadata>
<id>httpie</id>
<version>2.6.0</version>
<summary>Modern, user-friendly command-line HTTP client for the API era</summary>
<description>
HTTPie *aitch-tee-tee-pie* is a user-friendly command-line HTTP client for the API era.
It comes with JSON support, syntax highlighting, persistent sessions, wget-like downloads, plugins, and more.
The project's goal is to make CLI interaction with web services as human-friendly as possible. HTTPie is designed for testing, debugging, and generally interacting with APIs and HTTP servers.
The `http` and `https` commands allow for creating and sending arbitrary HTTP requests. They use simple and natural syntax and provide formatted and colorized output.
Main features:
- Built-in JSON support
- Colorized and formatted terminal output
- Sensible defaults for the API era
- Persistent sessions
- Forms and file uploads
- HTTPS, proxies, and authentication support
- Support for arbitrary request data and headers
- Wget-like downloads
- Extensions API
- Expressive and intuitive syntax
- Linux, macOS, Windows, and FreeBSD support
- All that and more in 2 simple commands: `http` + `https`
</description>
<title>HTTPie</title>
<authors>HTTPie</authors>
<owners>jakubroztocil</owners>
<copyright>2012-2021 Jakub Roztocil</copyright>
<licenseUrl>https://raw.githubusercontent.com/httpie/httpie/master/LICENSE</licenseUrl>
<iconUrl>https://pie-assets.s3.eu-central-1.amazonaws.com/LogoIcons/GB.png</iconUrl>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<releaseNotes>See the [changelog](https://github.com/httpie/httpie/blob/2.6.0/CHANGELOG.md).</releaseNotes>
<tags>httpie http https rest api client curl python ssl cli foss oss url</tags>
<projectUrl>https://httpie.io</projectUrl>
<packageSourceUrl>https://github.com/httpie/httpie/tree/master/docs/packaging/windows-chocolatey</packageSourceUrl>
<projectSourceUrl>https://github.com/httpie/httpie</projectSourceUrl>
<docsUrl>https://httpie.io/docs</docsUrl>
<bugTrackerUrl>https://github.com/httpie/httpie/issues</bugTrackerUrl>
<dependencies>
<dependency id="python3" version="3.7" />
</dependencies>
</metadata>
<files>
<file src="tools\**" target="tools" />
</files>
</package>

View File

@ -1,2 +0,0 @@
$ErrorActionPreference = 'Stop';
py -m pip install $env:ChocolateyPackageName==$env:ChocolateyPackageVersion --disable-pip-version-check

View File

@ -1,2 +0,0 @@
$ErrorActionPreference = 'Stop';
py -m pip uninstall -y $env:ChocolateyPackageName --disable-pip-version-check

View File

@ -1,20 +0,0 @@
_http_complete() {
local cur_word=${COMP_WORDS[COMP_CWORD]}
local prev_word=${COMP_WORDS[COMP_CWORD - 1]}
if [[ "$cur_word" == -* ]]; then
_http_complete_options "$cur_word"
fi
}
complete -o default -F _http_complete http httpie.http httpie.https https
_http_complete_options() {
local cur_word=$1
local options="-j --json -f --form --pretty -s --style -p --print
-v --verbose -h --headers -b --body -S --stream -o --output -d --download
-c --continue --session --session-read-only -a --auth --auth-type --proxy
--follow --verify --cert --cert-key --timeout --check-status --ignore-stdin
--help --version --traceback --debug --raw"
COMPREPLY=( $( compgen -W "$options" -- "$cur_word" ) )
}

View File

@ -1,137 +0,0 @@
function __fish_httpie_styles
echo "
abap
algol
algol_nu
arduino
auto
autumn
borland
bw
colorful
default
emacs
friendly
fruity
gruvbox-dark
gruvbox-light
igor
inkpot
lovelace
manni
material
monokai
murphy
native
paraiso-dark
paraiso-light
pastie
perldoc
rainbow_dash
rrt
sas
solarized
solarized-dark
solarized-light
stata
stata-dark
stata-light
tango
trac
vim
vs
xcode
zenburn"
end
function __fish_httpie_auth_types
echo -e "basic\tBasic HTTP auth"
echo -e "digest\tDigest HTTP auth"
end
function __fish_http_verify_options
echo -e "yes\tEnable cert verification"
echo -e "no\tDisable cert verification"
end
# Predefined Content Types
complete -c http -s j -l json -d 'Data items are serialized as a JSON object'
complete -c http -s f -l form -d 'Data items are serialized as form fields'
complete -c http -l multipart -d 'Always sends a multipart/form-data request'
complete -c http -l boundary -x -d 'Custom boundary string for multipart/form-data requests'
complete -c http -l raw -x -d 'Pass raw request data without extra processing'
# Content Processing Options
complete -c http -s x -l compress -d 'Content compressed with Deflate algorithm'
# Output Processing
complete -c http -l pretty -xa "all colors format none" -d 'Controls output processing'
complete -c http -s s -l style -xa "(__fish_httpie_styles)" -d 'Output coloring style'
complete -c http -l unsorted -d 'Disables all sorting while formatting output'
complete -c http -l sorted -d 'Re-enables all sorting options while formatting output'
complete -c http -l format-options -x -d 'Controls output formatting'
# Output Options
complete -c http -s p -l print -x -d 'String specifying what the output should contain'
complete -c http -s h -l headers -d 'Print only the response headers'
complete -c http -s b -l body -d 'Print only the response body'
complete -c http -s v -l verbose -d 'Print the whole request as well as the response'
complete -c http -l all -d 'Show any intermediary requests/responses'
complete -c http -s P -l history-print -x -d 'The same as --print but applies only to intermediary requests/responses'
complete -c http -s S -l stream -d 'Always stream the response body by line'
complete -c http -s o -l output -F -d 'Save output to FILE'
complete -c http -s d -l download -d 'Download a file'
complete -c http -s c -l continue -d 'Resume an interrupted download'
complete -c http -s q -l quiet -d 'Do not print to stdout or stderr'
# Sessions
complete -c http -l session -F -d 'Create, or reuse and update a session'
complete -c http -l session-read-only -F -d 'Create or read a session without updating it'
# Authentication
complete -c http -s a -l auth -x -d 'Username and password for authentication'
complete -c http -s A -l auth-type -xa "(__fish_httpie_auth_types)" -d 'The authentication mechanism to be used'
complete -c http -l ignore-netrc -d 'Ignore credentials from .netrc'
# Network
complete -c http -l offline -d 'Build the request and print it but don\'t actually send it'
complete -c http -l proxy -x -d 'String mapping protocol to the URL of the proxy'
complete -c http -s F -l follow -d 'Follow 30x Location redirects'
complete -c http -l max-redirects -x -d 'Set maximum number of redirects'
complete -c http -l max-headers -x -d 'Maximum number of response headers to be read before giving up'
complete -c http -l timeout -x -d 'Connection timeout in seconds'
complete -c http -l check-status -d 'Error with non-200 HTTP status code'
complete -c http -l path-as-is -d 'Bypass dot segment URL squashing'
complete -c http -l chunked -d ''
# SSL
complete -c http -l verify -xa "(__fish_http_verify_options)" -d 'Enable/disable cert verification'
complete -c http -l ssl -x -d 'Desired protocol version to use'
complete -c http -l ciphers -x -d 'String in the OpenSSL cipher list format'
complete -c http -l cert -F -d 'Client side SSL certificate'
complete -c http -l cert-key -F -d 'Private key to use with SSL'
# Troubleshooting
complete -c http -s I -l ignore-stdin -d 'Do not attempt to read stdin'
complete -c http -l help -d 'Show help'
complete -c http -l version -d 'Show version'
complete -c http -l traceback -d 'Prints exception traceback should one occur'
complete -c http -l default-scheme -x -d 'The default scheme to use'
complete -c http -l debug -d 'Show debugging output'

View File

@ -1,39 +0,0 @@
# HTTPie Benchmarking Infrastructure
This directory includes the benchmarks we use for testing HTTPie's speed and the
infrastructure to automate this testing accross versions.
## Usage
Ensure the following requirements are satisfied:
- Python 3.7+
- `pyperf`
Then, run the `extras/benchmarks/run.py`:
```console
$ python extras/profiling/run.py
```
Without any options, this command will initially create an isolated environment
and install `httpie` from the latest commit. Then it will create a second
environment with the `master` of the current repository and run the benchmarks
on both of them. It will compare the results and print it as a markdown table:
| Benchmark | master | this_branch |
| -------------------------------------- | :----: | :------------------: |
| `http --version` (startup) | 201 ms | 174 ms: 1.16x faster |
| `http --offline pie.dev/get` (startup) | 200 ms | 174 ms: 1.15x faster |
| Geometric mean | (ref) | 1.10x faster |
If your `master` branch is not up-to-date, you can get a fresh clone by passing
`--fresh` option. This way, the benchmark runner will clone the `httpie/httpie`
repo from `GitHub` and use it as the baseline.
You can customize these branches by passing `--local-repo`/`--target-branch`,
and customize the repos by passing `--local-repo`/`--target-repo` (can either
take a URL or a path).
If you want to run a third enviroment with additional dependencies (such as
`pyOpenSSL`), you can pass `--complex`.

View File

@ -1,202 +0,0 @@
"""
This file is the declaration of benchmarks for HTTPie. It
is also used to run them with the current environment.
Each instance of BaseRunner class will be an individual
benchmark. And if run without any arguments, this file
will execute every benchmark instance and report the
timings.
The benchmarks are run through 'pyperf', which allows to
do get very precise results. For micro-benchmarks like startup,
please run `pyperf system tune` to get even more acurrate results.
Examples:
# Run everything as usual, the default is that we do 3 warmup runs
# and 5 actual runs.
$ python extras/profiling/benchmarks.py
# For retrieving results faster, pass --fast
$ python extras/profiling/benchmarks.py --fast
# For verify everything works as expected, pass --debug-single-value.
# It will only run everything once, so the resuls are not realiable. But
# very useful when iterating on a benchmark
$ python extras/profiling/benchmarks.py --debug-single-value
# If you want to run with a custom HTTPie command (for example with
# and HTTPie instance installed in another virtual environment),
# pass HTTPIE_COMMAND variable.
$ HTTPIE_COMMAND="/my/python /my/httpie" python extras/profiling/benchmarks.py
"""
from __future__ import annotations
import os
import shlex
import subprocess
import sys
import threading
from contextlib import ExitStack, contextmanager
from dataclasses import dataclass, field
from functools import cached_property, partial
from http.server import HTTPServer, SimpleHTTPRequestHandler
from tempfile import TemporaryDirectory
from typing import ClassVar, Final, List
import pyperf
# For download benchmarks, define a set of files.
# file: (block_size, count) => total_size = block_size * count
PREDEFINED_FILES: Final = {'3G': (3 * 1024 ** 2, 1024)}
class QuietSimpleHTTPServer(SimpleHTTPRequestHandler):
def log_message(self, *args, **kwargs):
pass
@contextmanager
def start_server():
"""Create a server to serve local files. It will create the
PREDEFINED_FILES through dd."""
with TemporaryDirectory() as directory:
for file_name, (block_size, count) in PREDEFINED_FILES.items():
subprocess.check_call(
[
'dd',
'if=/dev/zero',
f'of={file_name}',
f'bs={block_size}',
f'count={count}',
],
cwd=directory,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
handler = partial(QuietSimpleHTTPServer, directory=directory)
server = HTTPServer(('localhost', 0), handler)
thread = threading.Thread(target=server.serve_forever)
thread.start()
yield '{}:{}'.format(*server.socket.getsockname())
server.shutdown()
thread.join(timeout=0.5)
@dataclass
class Context:
benchmarks: ClassVar[List[BaseRunner]] = []
stack: ExitStack = field(default_factory=ExitStack)
runner: pyperf.Runner = field(default_factory=pyperf.Runner)
def run(self) -> pyperf.BenchmarkSuite:
results = [benchmark.run(self) for benchmark in self.benchmarks]
return pyperf.BenchmarkSuite(results)
@property
def cmd(self) -> List[str]:
if cmd := os.getenv('HTTPIE_COMMAND'):
return shlex.split(cmd)
http = os.path.join(os.path.dirname(sys.executable), 'http')
assert os.path.exists(http)
return [sys.executable, http]
@cached_property
def server(self) -> str:
return self.stack.enter_context(start_server())
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.stack.close()
@dataclass
class BaseRunner:
"""
An individual benchmark case. By default it has the category
(e.g like startup or download) and a name.
"""
category: str
title: str
def __post_init__(self):
Context.benchmarks.append(self)
def run(self, context: Context) -> pyperf.Benchmark:
raise NotImplementedError
@property
def name(self) -> str:
return f'{self.title} ({self.category})'
@dataclass
class CommandRunner(BaseRunner):
"""
Run a single command, and benchmark it.
"""
args: List[str]
def run(self, context: Context) -> pyperf.Benchmark:
return context.runner.bench_command(self.name, [*context.cmd, *self.args])
@dataclass
class DownloadRunner(BaseRunner):
"""
Benchmark downloading a single file from the
remote server.
"""
file_name: str
def run(self, context: Context) -> pyperf.Benchmark:
return context.runner.bench_command(
self.name,
[
*context.cmd,
'--download',
'GET',
f'{context.server}/{self.file_name}',
],
)
CommandRunner('startup', '`http --version`', ['--version'])
CommandRunner('startup', '`http --offline pie.dev/get`', ['--offline', 'pie.dev/get'])
for pretty in ['all', 'none']:
CommandRunner(
'startup',
f'`http --pretty={pretty} pie.dev/stream/1000`',
[
'--print=HBhb',
f'--pretty={pretty}',
'httpbin.org/stream/1000'
]
)
DownloadRunner('download', '`http --download :/big_file.txt` (3GB)', '3G')
def main() -> None:
# PyPerf will bring it's own argument parser, so configure the script.
# The somewhat fast and also precise enough configuration is this. We run
# benchmarks 3 times to warmup (e.g especially for download benchmark, this
# is important). And then 5 actual runs where we record.
sys.argv.extend(
['--worker', '--loops=1', '--warmup=3', '--values=5', '--processes=2']
)
with Context() as context:
context.run()
if __name__ == '__main__':
main()

View File

@ -1,287 +0,0 @@
"""
Run the HTTPie benchmark suite with multiple environments.
This script is configured in a way that, it will create
two (or more) isolated environments and compare the *last
commit* of this repository with it's master.
> If you didn't commit yet, it won't be showing results.
You can also pass --fresh, which would test the *last
commit* of this repository with a fresh copy of HTTPie
itself. This way even if you don't have an up-to-date
master branch, you can still compare it with the upstream's
master.
You can also pass --complex to add 2 additional environments,
which would include additional dependencies like pyOpenSSL.
Examples:
# Run everything as usual, and compare last commit with master
$ python extras/benchmarks/run.py
# Include complex environments
$ python extras/benchmarks/run.py --complex
# Compare against a fresh copy
$ python extras/benchmarks/run.py --fresh
# Compare against a custom branch of a custom repo
$ python extras/benchmarks/run.py --target-repo my_repo --target-branch my_branch
# Debug changes made on this script (only run benchmarks once)
$ python extras/benchmarks/run.py --debug
"""
import dataclasses
import shlex
import subprocess
import sys
import tempfile
import venv
from argparse import ArgumentParser, FileType
from contextlib import contextmanager
from dataclasses import dataclass
from pathlib import Path
from typing import (IO, Dict, Generator, Iterable, List, Optional,
Tuple)
BENCHMARK_SCRIPT = Path(__file__).parent / 'benchmarks.py'
CURRENT_REPO = Path(__file__).parent.parent.parent
GITHUB_URL = 'https://github.com/httpie/httpie.git'
TARGET_BRANCH = 'master'
# Additional dependencies for --complex
ADDITIONAL_DEPS = ('pyOpenSSL',)
def call(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.DEVNULL)
return subprocess.check_call(*args, **kwargs)
class Environment:
"""
Each environment defines how to create an isolated instance
where we could install HTTPie and run benchmarks without any
environmental factors.
"""
@contextmanager
def on_repo(self) -> Generator[Tuple[Path, Dict[str, str]], None, None]:
"""
Return the path to the python interpreter and the
environment variables (e.g HTTPIE_COMMAND) to be
used on the benchmarks.
"""
raise NotImplementedError
@dataclass
class HTTPieEnvironment(Environment):
repo_url: str
branch: Optional[str] = None
dependencies: Iterable[str] = ()
@contextmanager
def on_repo(self) -> Generator[Path, None, None]:
with tempfile.TemporaryDirectory() as directory_path:
directory = Path(directory_path)
# Clone the repo
repo_path = directory / 'httpie'
call(
['git', 'clone', self.repo_url, repo_path],
stderr=subprocess.DEVNULL,
)
if self.branch is not None:
call(
['git', 'checkout', self.branch],
cwd=repo_path,
stderr=subprocess.DEVNULL,
)
# Prepare the environment
venv_path = directory / '.venv'
venv.create(venv_path, with_pip=True)
# Install basic dependencies
python = venv_path / 'bin' / 'python'
call(
[
python,
'-m',
'pip',
'install',
'wheel',
'pyperf==2.3.0',
*self.dependencies,
]
)
# Create a wheel distribution of HTTPie
call([python, 'setup.py', 'bdist_wheel'], cwd=repo_path)
# Install httpie
distribution_path = next((repo_path / 'dist').iterdir())
call(
[python, '-m', 'pip', 'install', distribution_path],
cwd=repo_path,
)
http = venv_path / 'bin' / 'http'
yield python, {'HTTPIE_COMMAND': shlex.join([str(python), str(http)])}
@dataclass
class LocalCommandEnvironment(Environment):
local_command: str
@contextmanager
def on_repo(self) -> Generator[Path, None, None]:
yield sys.executable, {'HTTPIE_COMMAND': self.local_command}
def dump_results(
results: List[str],
file: IO[str],
min_speed: Optional[str] = None
) -> None:
for result in results:
lines = result.strip().splitlines()
if min_speed is not None and "hidden" in lines[-1]:
lines[-1] = (
'Some benchmarks were hidden from this list '
'because their timings did not change in a '
'significant way (change was within the error '
'margin ±{margin}%).'
).format(margin=min_speed)
result = '\n'.join(lines)
print(result, file=file)
print("\n---\n", file=file)
def compare(*args, directory: Path, min_speed: Optional[str] = None):
compare_args = ['pyperf', 'compare_to', '--table', '--table-format=md', *args]
if min_speed:
compare_args.extend(['--min-speed', min_speed])
return subprocess.check_output(
compare_args,
cwd=directory,
text=True,
)
def run(
configs: List[Dict[str, Environment]],
file: IO[str],
debug: bool = False,
min_speed: Optional[str] = None,
) -> None:
result_directory = Path(tempfile.mkdtemp())
results = []
current = 1
total = sum(1 for config in configs for _ in config.items())
def iterate(env_name, status):
print(
f'Iteration: {env_name} ({current}/{total}) ({status})' + ' ' * 10,
end='\r',
flush=True,
)
for config in configs:
for env_name, env in config.items():
iterate(env_name, 'setting up')
with env.on_repo() as (python, env_vars):
iterate(env_name, 'running benchmarks')
args = [python, BENCHMARK_SCRIPT, '-o', env_name]
if debug:
args.append('--debug-single-value')
call(
args,
cwd=result_directory,
env=env_vars,
)
current += 1
results.append(compare(
*config.keys(),
directory=result_directory,
min_speed=min_speed
))
dump_results(results, file=file, min_speed=min_speed)
print('Results are available at:', result_directory)
def main() -> None:
parser = ArgumentParser()
parser.add_argument('--local-repo', default=CURRENT_REPO)
parser.add_argument('--local-branch', default=None)
parser.add_argument('--target-repo', default=CURRENT_REPO)
parser.add_argument('--target-branch', default=TARGET_BRANCH)
parser.add_argument(
'--fresh',
action='store_const',
const=GITHUB_URL,
dest='target_repo',
help='Clone the target repo from upstream GitHub URL',
)
parser.add_argument(
'--complex',
action='store_true',
help='Add a second run, with a complex python environment.',
)
parser.add_argument(
'--local-bin',
help='Run the suite with the given local binary in addition to'
' existing runners. (E.g --local-bin $(command -v xh))',
)
parser.add_argument(
'--file',
type=FileType('w'),
default=sys.stdout,
help='File to print the actual results',
)
parser.add_argument(
'--min-speed',
help='Minimum of speed in percent to consider that a '
'benchmark is significant'
)
parser.add_argument(
'--debug',
action='store_true',
)
options = parser.parse_args()
configs = []
base_config = {
options.target_branch: HTTPieEnvironment(options.target_repo, options.target_branch),
'this_branch': HTTPieEnvironment(options.local_repo, options.local_branch),
}
configs.append(base_config)
if options.complex:
complex_config = {
env_name
+ '-complex': dataclasses.replace(env, dependencies=ADDITIONAL_DEPS)
for env_name, env in base_config.items()
}
configs.append(complex_config)
if options.local_bin:
base_config['binary'] = LocalCommandEnvironment(options.local_bin)
run(configs, file=options.file, debug=options.debug, min_speed=options.min_speed)
if __name__ == '__main__':
main()

BIN
httpie.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 446 KiB

View File

@ -1,8 +1,19 @@
"""
HTTPie: modern, user-friendly command-line HTTP client for the API era.
HTTPie - a CLI, cURL-like tool for humans.
"""
__version__ = '3.0.2'
__author__ = 'Jakub Roztocil'
__version__ = '0.4.0'
__licence__ = 'BSD'
class ExitStatus:
"""Exit status code constants."""
OK = 0
ERROR = 1
ERROR_TIMEOUT = 2
# Used only when requested with --check-status:
ERROR_HTTP_3XX = 3
ERROR_HTTP_4XX = 4
ERROR_HTTP_5XX = 5

View File

@ -1,19 +1,10 @@
#!/usr/bin/env python
"""The main entry point. Invoke as `http' or `python -m httpie'.
"""
import sys
from .core import main
def main():
try:
from httpie.core import main
exit_status = main()
except KeyboardInterrupt:
from httpie.status import ExitStatus
exit_status = ExitStatus.ERROR_CTRL_C
return exit_status.value
if __name__ == '__main__': # pragma: nocover
import sys
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,13 +0,0 @@
from httpie.cli.dicts import HTTPHeadersDict
from requests.adapters import HTTPAdapter
class HTTPieHTTPAdapter(HTTPAdapter):
def build_response(self, req, resp):
"""Wrap the original headers with the `HTTPHeadersDict`
to preserve multiple headers that have the same name"""
response = super().build_response(req, resp)
response.headers = HTTPHeadersDict(getattr(resp, 'headers', {}))
return response

352
httpie/cli.py Normal file
View File

@ -0,0 +1,352 @@
"""CLI arguments definition.
NOTE: the CLI interface may change before reaching v1.0.
TODO: make the options config friendly, i.e., no mutually exclusive groups to
allow options overwriting.
"""
from argparse import FileType, OPTIONAL, ZERO_OR_MORE, SUPPRESS
from . import __doc__
from . import __version__
from .compat import is_windows
from .sessions import DEFAULT_SESSIONS_DIR, Session
from .output import AVAILABLE_STYLES, DEFAULT_STYLE
from .input import (Parser, AuthCredentialsArgType, KeyValueArgType,
SEP_PROXY, SEP_CREDENTIALS, SEP_GROUP_ITEMS,
OUT_REQ_HEAD, OUT_REQ_BODY, OUT_RESP_HEAD,
OUT_RESP_BODY, OUTPUT_OPTIONS,
PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY, RegexValidator)
def _(text):
"""Normalize whitespace."""
return ' '.join(text.strip().split())
parser = Parser(
description='%s <http://httpie.org>' % __doc__.strip(),
epilog='For every --option there is a --no-option'
' that reverts the option to its default value.\n\n'
'Suggestions and bug reports are greatly appreciated:\n'
'https://github.com/jkbr/httpie/issues'
)
###############################################################################
# Positional arguments.
###############################################################################
positional = parser.add_argument_group(
title='Positional arguments',
description=_('''
These arguments come after any flags and in the
order they are listed here. Only URL is required.
''')
)
positional.add_argument(
'method', metavar='METHOD',
nargs=OPTIONAL,
default=None,
help=_('''
The HTTP method to be used for the request
(GET, POST, PUT, DELETE, PATCH, ...).
If this argument is omitted, then HTTPie
will guess the HTTP method. If there is some
data to be sent, then it will be POST, otherwise GET.
''')
)
positional.add_argument(
'url', metavar='URL',
help=_('''
The protocol defaults to http:// if the
URL does not include one.
''')
)
positional.add_argument(
'items', metavar='REQUEST ITEM',
nargs=ZERO_OR_MORE,
type=KeyValueArgType(*SEP_GROUP_ITEMS),
help=_('''
A key-value pair whose type is defined by the
separator used. It can be an HTTP header (header:value),
a data field to be used in the request body (field_name=value),
a raw JSON data field (field_name:=value),
a query parameter (name==value),
or a file field (field_name@/path/to/file).
You can use a backslash to escape a colliding
separator in the field name.
''')
)
###############################################################################
# Content type.
###############################################################################
content_type = parser.add_argument_group(
title='Predefined content types',
description=None
)
content_type.add_argument(
'--json', '-j', action='store_true',
help=_('''
(default) Data items from the command
line are serialized as a JSON object.
The Content-Type and Accept headers
are set to application/json (if not specified).
''')
)
content_type.add_argument(
'--form', '-f', action='store_true',
help=_('''
Data items from the command line are serialized as form fields.
The Content-Type is set to application/x-www-form-urlencoded
(if not specified).
The presence of any file fields results
in a multipart/form-data request.
''')
)
###############################################################################
# Output processing
###############################################################################
output_processing = parser.add_argument_group(title='Output processing')
output_processing.add_argument(
'--output', '-o', type=FileType('w+b'),
metavar='FILE',
help=SUPPRESS if not is_windows else _(
'''
Save output to FILE.
This option is a replacement for piping output to FILE,
which would on Windows result in corrupted data
being saved.
'''
)
)
output_processing.add_argument(
'--pretty', dest='prettify', default=PRETTY_STDOUT_TTY_ONLY,
choices=sorted(PRETTY_MAP.keys()),
help=_('''
Controls output processing. The value can be "none" to not prettify
the output (default for redirected output), "all" to apply both colors
and formatting
(default for terminal output), "colors", or "format".
''')
)
output_processing.add_argument(
'--style', '-s', dest='style', default=DEFAULT_STYLE, metavar='STYLE',
choices=AVAILABLE_STYLES,
help=_('''
Output coloring style. One of %s. Defaults to "%s".
For this option to work properly, please make sure that the
$TERM environment variable is set to "xterm-256color" or similar
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
''') % (', '.join(sorted(AVAILABLE_STYLES)), DEFAULT_STYLE)
)
###############################################################################
# Output options
###############################################################################
output_options = parser.add_argument_group(title='Output options')
output_options.add_argument(
'--print', '-p', dest='output_options', metavar='WHAT',
help=_('''
String specifying what the output should contain:
"{request_headers}" stands for the request headers, and
"{request_body}" for the request body.
"{response_headers}" stands for the response headers and
"{response_body}" for response the body.
The default behaviour is "hb" (i.e., the response
headers and body is printed), if standard output is not redirected.
If the output is piped to another program or to a file,
then only the body is printed by default.
'''.format(request_headers=OUT_REQ_HEAD,
request_body=OUT_REQ_BODY,
response_headers=OUT_RESP_HEAD,
response_body=OUT_RESP_BODY,))
)
output_options.add_argument(
'--verbose', '-v', dest='output_options',
action='store_const', const=''.join(OUTPUT_OPTIONS),
help=_('''
Print the whole request as well as the response.
Shortcut for --print={0}.
'''.format(''.join(OUTPUT_OPTIONS)))
)
output_options.add_argument(
'--headers', '-h', dest='output_options',
action='store_const', const=OUT_RESP_HEAD,
help=_('''
Print only the response headers.
Shortcut for --print={0}.
'''.format(OUT_RESP_HEAD))
)
output_options.add_argument(
'--body', '-b', dest='output_options',
action='store_const', const=OUT_RESP_BODY,
help=_('''
Print only the response body.
Shortcut for --print={0}.
'''.format(OUT_RESP_BODY))
)
output_options.add_argument(
'--stream', '-S', action='store_true', default=False,
help=_('''
Always stream the output by line, i.e., behave like `tail -f'.
Without --stream and with --pretty (either set or implied),
HTTPie fetches the whole response before it outputs the processed data.
Set this option when you want to continuously display a prettified
long-lived response, such as one from the Twitter streaming API.
It is useful also without --pretty: It ensures that the output is flushed
more often and in smaller chunks.
''')
)
###############################################################################
# Sessions
###############################################################################
sessions = parser.add_argument_group(title='Sessions')\
.add_mutually_exclusive_group(required=False)
sessions.add_argument(
'--session', metavar='SESSION_NAME', type=RegexValidator(
Session.VALID_NAME_PATTERN,
'Session name contains invalid characters.'
),
help=_('''
Create, or reuse and update a session.
Within a session, custom headers, auth credential, as well as any
cookies sent by the server persist between requests.
Session files are stored in %s/<HOST>/<SESSION_NAME>.json.
''' % DEFAULT_SESSIONS_DIR)
)
sessions.add_argument(
'--session-read-only', metavar='SESSION_NAME',
help=_('''
Create or read a session without updating it form the
request/response exchange.
''')
)
###############################################################################
# Authentication
###############################################################################
# ``requests.request`` keyword arguments.
auth = parser.add_argument_group(title='Authentication')
auth.add_argument(
'--auth', '-a', metavar='USER[:PASS]',
type=AuthCredentialsArgType(SEP_CREDENTIALS),
help=_('''
If only the username is provided (-a username),
HTTPie will prompt for the password.
'''),
)
auth.add_argument(
'--auth-type', choices=['basic', 'digest'], default='basic',
help=_('''
The authentication mechanism to be used.
Defaults to "basic".
''')
)
# Network
#############################################
network = parser.add_argument_group(title='Network')
network.add_argument(
'--proxy', default=[], action='append', metavar='PROTOCOL:HOST',
type=KeyValueArgType(SEP_PROXY),
help=_('''
String mapping protocol to the URL of the proxy
(e.g. http:foo.bar:3128). You can specify multiple
proxies with different protocols.
''')
)
network.add_argument(
'--follow', default=False, action='store_true',
help=_('''
Set this flag if full redirects are allowed
(e.g. re-POST-ing of data at new ``Location``)
''')
)
network.add_argument(
'--verify', default='yes',
help=_('''
Set to "no" to skip checking the host\'s SSL certificate.
You can also pass the path to a CA_BUNDLE
file for private certs. You can also set
the REQUESTS_CA_BUNDLE environment variable.
Defaults to "yes".
''')
)
network.add_argument(
'--timeout', type=float, default=30, metavar='SECONDS',
help=_('''
The connection timeout of the request in seconds.
The default value is 30 seconds.
''')
)
network.add_argument(
'--check-status', default=False, action='store_true',
help=_('''
By default, HTTPie exits with 0 when no network or other fatal
errors occur.
This flag instructs HTTPie to also check the HTTP status code and
exit with an error if the status indicates one.
When the server replies with a 4xx (Client Error) or 5xx
(Server Error) status code, HTTPie exits with 4 or 5 respectively.
If the response is a 3xx (Redirect) and --follow
hasn't been set, then the exit status is 3.
Also an error message is written to stderr if stdout is redirected.
''')
)
###############################################################################
# Troubleshooting
###############################################################################
troubleshooting = parser.add_argument_group(title='Troubleshooting')
troubleshooting.add_argument(
'--help',
action='help', default=SUPPRESS,
help='Show this help message and exit'
)
troubleshooting.add_argument(
'--version', action='version', version=__version__)
troubleshooting.add_argument(
'--traceback', action='store_true', default=False,
help='Prints exception traceback should one occur.'
)
troubleshooting.add_argument(
'--debug', action='store_true', default=False,
help=_('''
Prints exception traceback should one occur, and also other
information that is useful for debugging HTTPie itself and
for bug reports.
''')
)

View File

View File

@ -1,514 +0,0 @@
import argparse
import errno
import os
import re
import sys
from argparse import RawDescriptionHelpFormatter
from textwrap import dedent
from urllib.parse import urlsplit
from requests.utils import get_netrc_auth
from .argtypes import (
AuthCredentials, KeyValueArgType, PARSED_DEFAULT_FORMAT_OPTIONS,
parse_auth,
parse_format_options,
)
from .constants import (
HTTP_GET, HTTP_POST, BASE_OUTPUT_OPTIONS, OUTPUT_OPTIONS, OUTPUT_OPTIONS_DEFAULT,
OUTPUT_OPTIONS_DEFAULT_OFFLINE, OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED,
OUT_RESP_BODY, PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY, RequestType,
SEPARATOR_CREDENTIALS,
SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_GROUP_DATA_ITEMS, URL_SCHEME_RE,
)
from .exceptions import ParseError
from .requestitems import RequestItems
from ..context import Environment
from ..plugins.registry import plugin_manager
from ..utils import ExplicitNullAuth, get_content_type
class HTTPieHelpFormatter(RawDescriptionHelpFormatter):
"""A nicer help formatter.
Help for arguments can be indented and contain new lines.
It will be de-dented and arguments in the help
will be separated by a blank line for better readability.
"""
def __init__(self, max_help_position=6, *args, **kwargs):
# A smaller indent for args help.
kwargs['max_help_position'] = max_help_position
super().__init__(*args, **kwargs)
def _split_lines(self, text, width):
text = dedent(text).strip() + '\n\n'
return text.splitlines()
# TODO: refactor and design type-annotated data structures
# for raw args + parsed args and keep things immutable.
class BaseHTTPieArgumentParser(argparse.ArgumentParser):
def __init__(self, *args, formatter_class=HTTPieHelpFormatter, **kwargs):
super().__init__(*args, formatter_class=formatter_class, **kwargs)
self.env = None
self.args = None
self.has_stdin_data = False
self.has_input_data = False
# noinspection PyMethodOverriding
def parse_args(
self,
env: Environment,
args=None,
namespace=None
) -> argparse.Namespace:
self.env = env
self.args, no_options = self.parse_known_args(args, namespace)
if self.args.debug:
self.args.traceback = True
self.has_stdin_data = (
self.env.stdin
and not getattr(self.args, 'ignore_stdin', False)
and not self.env.stdin_isatty
)
self.has_input_data = self.has_stdin_data or getattr(self.args, 'raw', None) is not None
return self.args
# noinspection PyShadowingBuiltins
def _print_message(self, message, file=None):
# Sneak in our stderr/stdout.
if hasattr(self, 'root'):
env = self.root.env
else:
env = self.env
if env is not None:
file = {
sys.stdout: env.stdout,
sys.stderr: env.stderr,
None: env.stderr
}.get(file, file)
if not hasattr(file, 'buffer') and isinstance(message, str):
message = message.encode(env.stdout_encoding)
super()._print_message(message, file)
class HTTPieManagerArgumentParser(BaseHTTPieArgumentParser):
def parse_known_args(self, args=None, namespace=None):
try:
return super().parse_known_args(args, namespace)
except SystemExit as exc:
if not hasattr(self, 'root') and exc.code == 2: # Argument Parser Error
raise argparse.ArgumentError(None, None)
raise
class HTTPieArgumentParser(BaseHTTPieArgumentParser):
"""Adds additional logic to `argparse.ArgumentParser`.
Handles all input (CLI args, file args, stdin), applies defaults,
and performs extra validation.
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('add_help', False)
super().__init__(*args, **kwargs)
# noinspection PyMethodOverriding
def parse_args(
self,
env: Environment,
args=None,
namespace=None
) -> argparse.Namespace:
self.env = env
self.args, no_options = super().parse_known_args(args, namespace)
if self.args.debug:
self.args.traceback = True
self.has_stdin_data = (
self.env.stdin
and not self.args.ignore_stdin
and not self.env.stdin_isatty
)
self.has_input_data = self.has_stdin_data or self.args.raw is not None
# Arguments processing and environment setup.
self._apply_no_options(no_options)
self._process_request_type()
self._process_download_options()
self._setup_standard_streams()
self._process_output_options()
self._process_pretty_options()
self._process_format_options()
self._guess_method()
self._parse_items()
self._process_url()
self._process_auth()
if self.args.raw is not None:
self._body_from_input(self.args.raw)
elif self.has_stdin_data:
self._body_from_file(self.env.stdin)
if self.args.compress:
# TODO: allow --compress with --chunked / --multipart
if self.args.chunked:
self.error('cannot combine --compress and --chunked')
if self.args.multipart:
self.error('cannot combine --compress and --multipart')
return self.args
def _process_request_type(self):
request_type = self.args.request_type
self.args.json = request_type is RequestType.JSON
self.args.multipart = request_type is RequestType.MULTIPART
self.args.form = request_type in {
RequestType.FORM,
RequestType.MULTIPART,
}
def _process_url(self):
if self.args.url.startswith('://'):
# Paste URL & add space shortcut: `http ://pie.dev` → `http://pie.dev`
self.args.url = self.args.url[3:]
if not URL_SCHEME_RE.match(self.args.url):
if os.path.basename(self.env.program_name) == 'https':
scheme = 'https://'
else:
scheme = self.args.default_scheme + '://'
# See if we're using curl style shorthand for localhost (:3000/foo)
shorthand = re.match(r'^:(?!:)(\d*)(/?.*)$', self.args.url)
if shorthand:
port = shorthand.group(1)
rest = shorthand.group(2)
self.args.url = scheme + 'localhost'
if port:
self.args.url += ':' + port
self.args.url += rest
else:
self.args.url = scheme + self.args.url
def _setup_standard_streams(self):
"""
Modify `env.stdout` and `env.stdout_isatty` based on args, if needed.
"""
self.args.output_file_specified = bool(self.args.output_file)
if self.args.download:
# FIXME: Come up with a cleaner solution.
if not self.args.output_file and not self.env.stdout_isatty:
# Use stdout as the download output file.
self.args.output_file = self.env.stdout
# With `--download`, we write everything that would normally go to
# `stdout` to `stderr` instead. Let's replace the stream so that
# we don't have to use many `if`s throughout the codebase.
# The response body will be treated separately.
self.env.stdout = self.env.stderr
self.env.stdout_isatty = self.env.stderr_isatty
elif self.args.output_file:
# When not `--download`ing, then `--output` simply replaces
# `stdout`. The file is opened for appending, which isn't what
# we want in this case.
self.args.output_file.seek(0)
try:
self.args.output_file.truncate()
except OSError as e:
if e.errno == errno.EINVAL:
# E.g. /dev/null on Linux.
pass
else:
raise
self.env.stdout = self.args.output_file
self.env.stdout_isatty = False
if self.args.quiet:
self.env.stderr = self.env.devnull
if not (self.args.output_file_specified and not self.args.download):
self.env.stdout = self.env.devnull
def _process_auth(self):
# TODO: refactor & simplify this method.
self.args.auth_plugin = None
default_auth_plugin = plugin_manager.get_auth_plugins()[0]
auth_type_set = self.args.auth_type is not None
url = urlsplit(self.args.url)
if self.args.auth is None and not auth_type_set:
if url.username is not None:
# Handle http://username:password@hostname/
username = url.username
password = url.password or ''
self.args.auth = AuthCredentials(
key=username,
value=password,
sep=SEPARATOR_CREDENTIALS,
orig=SEPARATOR_CREDENTIALS.join([username, password])
)
if self.args.auth is not None or auth_type_set:
if not self.args.auth_type:
self.args.auth_type = default_auth_plugin.auth_type
plugin = plugin_manager.get_auth_plugin(self.args.auth_type)()
if (not self.args.ignore_netrc
and self.args.auth is None
and plugin.netrc_parse):
# Only host needed, so its OK URL not finalized.
netrc_credentials = get_netrc_auth(self.args.url)
if netrc_credentials:
self.args.auth = AuthCredentials(
key=netrc_credentials[0],
value=netrc_credentials[1],
sep=SEPARATOR_CREDENTIALS,
orig=SEPARATOR_CREDENTIALS.join(netrc_credentials)
)
if plugin.auth_require and self.args.auth is None:
self.error('--auth required')
plugin.raw_auth = self.args.auth
self.args.auth_plugin = plugin
already_parsed = isinstance(self.args.auth, AuthCredentials)
if self.args.auth is None or not plugin.auth_parse:
self.args.auth = plugin.get_auth()
else:
if already_parsed:
# from the URL
credentials = self.args.auth
else:
credentials = parse_auth(self.args.auth)
if (not credentials.has_password()
and plugin.prompt_password):
if self.args.ignore_stdin:
# Non-tty stdin read by now
self.error(
'Unable to prompt for passwords because'
' --ignore-stdin is set.'
)
credentials.prompt_password(url.netloc)
if (credentials.key and credentials.value):
plugin.raw_auth = credentials.key + ":" + credentials.value
self.args.auth = plugin.get_auth(
username=credentials.key,
password=credentials.value,
)
if not self.args.auth and self.args.ignore_netrc:
# Set a no-op auth to force requests to ignore .netrc
# <https://github.com/psf/requests/issues/2773#issuecomment-174312831>
self.args.auth = ExplicitNullAuth()
def _apply_no_options(self, no_options):
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
its default value. This allows for un-setting of options, e.g.,
specified in config.
"""
invalid = []
for option in no_options:
if not option.startswith('--no-'):
invalid.append(option)
continue
# --no-option => --option
inverted = '--' + option[5:]
for action in self._actions:
if inverted in action.option_strings:
setattr(self.args, action.dest, action.default)
break
else:
invalid.append(option)
if invalid:
self.error(f'unrecognized arguments: {" ".join(invalid)}')
def _body_from_file(self, fd):
"""Read the data from a file-like object.
Bytes are always read.
"""
self._ensure_one_data_source(self.args.data, self.args.files)
self.args.data = getattr(fd, 'buffer', fd)
def _body_from_input(self, data):
"""Read the data from the CLI.
"""
self._ensure_one_data_source(self.has_stdin_data, self.args.data,
self.args.files)
self.args.data = data.encode()
def _ensure_one_data_source(self, *other_sources):
"""There can only be one source of input request data.
"""
if any(other_sources):
self.error('Request body (from stdin, --raw or a file) and request '
'data (key=value) cannot be mixed. Pass '
'--ignore-stdin to let key/value take priority. '
'See https://httpie.io/docs#scripting for details.')
def _guess_method(self):
"""Set `args.method` if not specified to either POST or GET
based on whether the request has data or not.
"""
if self.args.method is None:
# Invoked as `http URL'.
assert not self.args.request_items
if self.has_input_data:
self.args.method = HTTP_POST
else:
self.args.method = HTTP_GET
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
elif not re.match('^[a-zA-Z]+$', self.args.method):
# Invoked as `http URL item+'. The URL is now in `args.method`
# and the first ITEM is now incorrectly in `args.url`.
try:
# Parse the URL as an ITEM and store it as the first ITEM arg.
self.args.request_items.insert(0, KeyValueArgType(
*SEPARATOR_GROUP_ALL_ITEMS).__call__(self.args.url))
except argparse.ArgumentTypeError as e:
if self.args.traceback:
raise
self.error(e.args[0])
else:
# Set the URL correctly
self.args.url = self.args.method
# Infer the method
has_data = (
self.has_input_data
or any(
item.sep in SEPARATOR_GROUP_DATA_ITEMS
for item in self.args.request_items)
)
self.args.method = HTTP_POST if has_data else HTTP_GET
def _parse_items(self):
"""
Parse `args.request_items` into `args.headers`, `args.data`,
`args.params`, and `args.files`.
"""
try:
request_items = RequestItems.from_args(
request_item_args=self.args.request_items,
request_type=self.args.request_type,
)
except ParseError as e:
if self.args.traceback:
raise
self.error(e.args[0])
else:
self.args.headers = request_items.headers
self.args.data = request_items.data
self.args.files = request_items.files
self.args.params = request_items.params
self.args.multipart_data = request_items.multipart_data
if self.args.files and not self.args.form:
# `http url @/path/to/file`
request_file = None
for key, file in self.args.files.items():
if key != '':
self.error(
'Invalid file fields (perhaps you meant --form?):'
f' {",".join(self.args.files.keys())}')
if request_file is not None:
self.error("Can't read request from multiple files")
request_file = file
fn, fd, ct = request_file
self.args.files = {}
self._body_from_file(fd)
if 'Content-Type' not in self.args.headers:
content_type = get_content_type(fn)
if content_type:
self.args.headers['Content-Type'] = content_type
def _process_output_options(self):
"""Apply defaults to output options, or validate the provided ones.
The default output options are stdout-type-sensitive.
"""
def check_options(value, option):
unknown = set(value) - OUTPUT_OPTIONS
if unknown:
self.error(f'Unknown output options: {option}={",".join(unknown)}')
if self.args.verbose:
self.args.all = True
if self.args.output_options is None:
if self.args.verbose >= 2:
self.args.output_options = ''.join(OUTPUT_OPTIONS)
elif self.args.verbose == 1:
self.args.output_options = ''.join(BASE_OUTPUT_OPTIONS)
elif self.args.offline:
self.args.output_options = OUTPUT_OPTIONS_DEFAULT_OFFLINE
elif not self.env.stdout_isatty:
self.args.output_options = OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED
else:
self.args.output_options = OUTPUT_OPTIONS_DEFAULT
if self.args.output_options_history is None:
self.args.output_options_history = self.args.output_options
check_options(self.args.output_options, '--print')
check_options(self.args.output_options_history, '--history-print')
if self.args.download and OUT_RESP_BODY in self.args.output_options:
# Response body is always downloaded with --download and it goes
# through a different routine, so we remove it.
self.args.output_options = str(
set(self.args.output_options) - set(OUT_RESP_BODY))
def _process_pretty_options(self):
if self.args.prettify == PRETTY_STDOUT_TTY_ONLY:
self.args.prettify = PRETTY_MAP[
'all' if self.env.stdout_isatty else 'none']
elif (self.args.prettify and self.env.is_windows
and self.args.output_file):
self.error('Only terminal output can be colorized on Windows.')
else:
# noinspection PyTypeChecker
self.args.prettify = PRETTY_MAP[self.args.prettify]
def _process_download_options(self):
if self.args.offline:
self.args.download = False
self.args.download_resume = False
return
if not self.args.download:
if self.args.download_resume:
self.error('--continue only works with --download')
if self.args.download_resume and not (
self.args.download and self.args.output_file):
self.error('--continue requires --output to be specified')
def _process_format_options(self):
format_options = self.args.format_options or []
parsed_options = PARSED_DEFAULT_FORMAT_OPTIONS
for options_group in format_options:
parsed_options = parse_format_options(options_group, defaults=parsed_options)
self.args.format_options = parsed_options

View File

@ -1,260 +0,0 @@
import argparse
import getpass
import os
import sys
from copy import deepcopy
from typing import List, Optional, Union
from .constants import DEFAULT_FORMAT_OPTIONS, SEPARATOR_CREDENTIALS
from ..sessions import VALID_SESSION_NAME_PATTERN
class KeyValueArg:
"""Base key-value pair parsed from CLI."""
def __init__(self, key: str, value: Optional[str], sep: str, orig: str):
self.key = key
self.value = value
self.sep = sep
self.orig = orig
def __eq__(self, other: 'KeyValueArg'):
return self.__dict__ == other.__dict__
def __repr__(self):
return repr(self.__dict__)
class SessionNameValidator:
def __init__(self, error_message: str):
self.error_message = error_message
def __call__(self, value: str) -> str:
# Session name can be a path or just a name.
if (os.path.sep not in value
and not VALID_SESSION_NAME_PATTERN.search(value)):
raise argparse.ArgumentError(None, self.error_message)
return value
class Escaped(str):
"""Represents an escaped character."""
def __repr__(self):
return f"Escaped({repr(str(self))})"
class KeyValueArgType:
"""A key-value pair argument type used with `argparse`.
Parses a key-value arg and constructs a `KeyValueArg` instance.
Used for headers, form data, and other key-value pair types.
"""
key_value_class = KeyValueArg
def __init__(self, *separators: str):
self.separators = separators
self.special_characters = set()
for separator in separators:
self.special_characters.update(separator)
def __call__(self, s: str) -> KeyValueArg:
"""Parse raw string arg and return `self.key_value_class` instance.
The best of `self.separators` is determined (first found, longest).
Back slash escaped characters aren't considered as separators
(or parts thereof). Literal back slash characters have to be escaped
as well (r'\\').
"""
tokens = self.tokenize(s)
# Sorting by length ensures that the longest one will be
# chosen as it will overwrite any shorter ones starting
# at the same position in the `found` dictionary.
separators = sorted(self.separators, key=len)
for i, token in enumerate(tokens):
if isinstance(token, Escaped):
continue
found = {}
for sep in separators:
pos = token.find(sep)
if pos != -1:
found[pos] = sep
if found:
# Starting first, longest separator found.
sep = found[min(found.keys())]
key, value = token.split(sep, 1)
# Any preceding tokens are part of the key.
key = ''.join(tokens[:i]) + key
# Any following tokens are part of the value.
value += ''.join(tokens[i + 1:])
break
else:
raise argparse.ArgumentTypeError(f'{s!r} is not a valid value')
return self.key_value_class(key=key, value=value, sep=sep, orig=s)
def tokenize(self, s: str) -> List[Union[str, Escaped]]:
r"""Tokenize the raw arg string
There are only two token types - strings and escaped characters:
>>> KeyValueArgType('=').tokenize(r'foo\=bar\\baz')
['foo', Escaped('='), 'bar\\\\baz']
"""
tokens = ['']
characters = iter(s)
for char in characters:
if char == '\\':
char = next(characters, '')
if char not in self.special_characters:
tokens[-1] += '\\' + char
else:
tokens.extend([Escaped(char), ''])
else:
tokens[-1] += char
return tokens
class AuthCredentials(KeyValueArg):
"""Represents parsed credentials."""
def has_password(self) -> bool:
return self.value is not None
def prompt_password(self, host: str):
prompt_text = f'http: password for {self.key}@{host}: '
try:
self.value = self._getpass(prompt_text)
except (EOFError, KeyboardInterrupt):
sys.stderr.write('\n')
sys.exit(0)
@staticmethod
def _getpass(prompt):
# To allow easy mocking.
return getpass.getpass(str(prompt))
class AuthCredentialsArgType(KeyValueArgType):
"""A key-value arg type that parses credentials."""
key_value_class = AuthCredentials
def __call__(self, s):
"""Parse credentials from `s`.
("username" or "username:password").
"""
try:
return super().__call__(s)
except argparse.ArgumentTypeError:
# No password provided, will prompt for it later.
return self.key_value_class(
key=s,
value=None,
sep=SEPARATOR_CREDENTIALS,
orig=s
)
parse_auth = AuthCredentialsArgType(SEPARATOR_CREDENTIALS)
def readable_file_arg(filename):
try:
with open(filename, 'rb'):
return filename
except OSError as ex:
raise argparse.ArgumentTypeError(f'{ex.filename}: {ex.strerror}')
def parse_format_options(s: str, defaults: Optional[dict]) -> dict:
"""
Parse `s` and update `defaults` with the parsed values.
>>> parse_format_options(
... defaults={'json': {'indent': 4, 'sort_keys': True}},
... s='json.indent:2,json.sort_keys:False',
... )
{'json': {'indent': 2, 'sort_keys': False}}
"""
value_map = {
'true': True,
'false': False,
}
options = deepcopy(defaults or {})
for option in s.split(','):
try:
path, value = option.lower().split(':')
section, key = path.split('.')
except ValueError:
raise argparse.ArgumentTypeError(f'invalid option {option!r}')
if value in value_map:
parsed_value = value_map[value]
else:
if value.isnumeric():
parsed_value = int(value)
else:
parsed_value = value
if defaults is None:
options.setdefault(section, {})
else:
try:
default_value = defaults[section][key]
except KeyError:
raise argparse.ArgumentTypeError(
f'invalid key {path!r}')
default_type, parsed_type = type(default_value), type(parsed_value)
if parsed_type is not default_type:
raise argparse.ArgumentTypeError(
'invalid value'
f' {value!r} in {option!r}'
f' (expected {default_type.__name__}'
f' got {parsed_type.__name__})'
)
options[section][key] = parsed_value
return options
PARSED_DEFAULT_FORMAT_OPTIONS = parse_format_options(
s=','.join(DEFAULT_FORMAT_OPTIONS),
defaults=None,
)
def response_charset_type(encoding: str) -> str:
try:
''.encode(encoding)
except LookupError:
raise argparse.ArgumentTypeError(
f'{encoding!r} is not a supported encoding')
return encoding
def response_mime_type(mime_type: str) -> str:
if mime_type.count('/') != 1:
raise argparse.ArgumentTypeError(
f'{mime_type!r} doesnt look like a mime type; use type/subtype')
return mime_type

View File

@ -1,133 +0,0 @@
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
"""
import enum
import re
URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
HTTP_POST = 'POST'
HTTP_GET = 'GET'
# Various separators used in args
SEPARATOR_HEADER = ':'
SEPARATOR_HEADER_EMPTY = ';'
SEPARATOR_CREDENTIALS = ':'
SEPARATOR_PROXY = ':'
SEPARATOR_HEADER_EMBED = ':@'
SEPARATOR_DATA_STRING = '='
SEPARATOR_DATA_RAW_JSON = ':='
SEPARATOR_FILE_UPLOAD = '@'
SEPARATOR_FILE_UPLOAD_TYPE = ';type=' # in already parsed file upload path only
SEPARATOR_DATA_EMBED_FILE_CONTENTS = '=@'
SEPARATOR_DATA_EMBED_RAW_JSON_FILE = ':=@'
SEPARATOR_QUERY_PARAM = '=='
SEPARATOR_QUERY_EMBED_FILE = '==@'
# Separators that become request data
SEPARATOR_GROUP_DATA_ITEMS = frozenset({
SEPARATOR_DATA_STRING,
SEPARATOR_DATA_RAW_JSON,
SEPARATOR_FILE_UPLOAD,
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
SEPARATOR_DATA_EMBED_RAW_JSON_FILE
})
SEPARATORS_GROUP_MULTIPART = frozenset({
SEPARATOR_DATA_STRING,
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
SEPARATOR_FILE_UPLOAD,
})
# Separators for items whose value is a filename to be embedded
SEPARATOR_GROUP_DATA_EMBED_ITEMS = frozenset({
SEPARATOR_HEADER_EMBED,
SEPARATOR_QUERY_EMBED_FILE,
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
})
# Separators for nested JSON items
SEPARATOR_GROUP_NESTED_JSON_ITEMS = frozenset([
SEPARATOR_DATA_STRING,
SEPARATOR_DATA_RAW_JSON,
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
])
# Separators allowed in ITEM arguments
SEPARATOR_GROUP_ALL_ITEMS = frozenset({
SEPARATOR_HEADER,
SEPARATOR_HEADER_EMPTY,
SEPARATOR_HEADER_EMBED,
SEPARATOR_QUERY_PARAM,
SEPARATOR_QUERY_EMBED_FILE,
SEPARATOR_DATA_STRING,
SEPARATOR_DATA_RAW_JSON,
SEPARATOR_FILE_UPLOAD,
SEPARATOR_DATA_EMBED_FILE_CONTENTS,
SEPARATOR_DATA_EMBED_RAW_JSON_FILE,
})
# Output options
OUT_REQ_HEAD = 'H'
OUT_REQ_BODY = 'B'
OUT_RESP_HEAD = 'h'
OUT_RESP_BODY = 'b'
OUT_RESP_META = 'm'
BASE_OUTPUT_OPTIONS = frozenset({
OUT_REQ_HEAD,
OUT_REQ_BODY,
OUT_RESP_HEAD,
OUT_RESP_BODY,
})
OUTPUT_OPTIONS = frozenset({
*BASE_OUTPUT_OPTIONS,
OUT_RESP_META,
})
# Pretty
PRETTY_MAP = {
'all': ['format', 'colors'],
'colors': ['colors'],
'format': ['format'],
'none': []
}
PRETTY_STDOUT_TTY_ONLY = object()
DEFAULT_FORMAT_OPTIONS = [
'headers.sort:true',
'json.format:true',
'json.indent:4',
'json.sort_keys:true',
'xml.format:true',
'xml.indent:2',
]
SORTED_FORMAT_OPTIONS = [
'headers.sort:true',
'json.sort_keys:true',
]
SORTED_FORMAT_OPTIONS_STRING = ','.join(SORTED_FORMAT_OPTIONS)
UNSORTED_FORMAT_OPTIONS_STRING = ','.join(
option.replace('true', 'false') for option in SORTED_FORMAT_OPTIONS)
# Defaults
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
OUTPUT_OPTIONS_DEFAULT_OFFLINE = OUT_REQ_HEAD + OUT_REQ_BODY
class RequestType(enum.Enum):
FORM = enum.auto()
MULTIPART = enum.auto()
JSON = enum.auto()
OPEN_BRACKET = '['
CLOSE_BRACKET = ']'
BACKSLASH = '\\'
HIGHLIGHTER = '^'

View File

@ -1,865 +0,0 @@
"""
CLI arguments definition.
"""
from argparse import (FileType, OPTIONAL, SUPPRESS, ZERO_OR_MORE)
from textwrap import dedent, wrap
from .. import __doc__, __version__
from .argparser import HTTPieArgumentParser
from .argtypes import (
KeyValueArgType, SessionNameValidator,
readable_file_arg, response_charset_type, response_mime_type,
)
from .constants import (
DEFAULT_FORMAT_OPTIONS, BASE_OUTPUT_OPTIONS, OUTPUT_OPTIONS,
OUTPUT_OPTIONS_DEFAULT, OUT_REQ_BODY, OUT_REQ_HEAD,
OUT_RESP_BODY, OUT_RESP_HEAD, OUT_RESP_META, PRETTY_MAP, PRETTY_STDOUT_TTY_ONLY,
RequestType, SEPARATOR_GROUP_ALL_ITEMS, SEPARATOR_PROXY,
SORTED_FORMAT_OPTIONS_STRING,
UNSORTED_FORMAT_OPTIONS_STRING,
)
from .utils import LazyChoices
from ..output.formatters.colors import (
AUTO_STYLE, DEFAULT_STYLE, get_available_styles
)
from ..plugins.builtin import BuiltinAuthPlugin
from ..plugins.registry import plugin_manager
from ..sessions import DEFAULT_SESSIONS_DIR
from ..ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
parser = HTTPieArgumentParser(
prog='http',
description=f'{__doc__.strip()} <https://httpie.io>',
epilog=dedent('''
For every --OPTION there is also a --no-OPTION that reverts OPTION
to its default value.
Suggestions and bug reports are greatly appreciated:
https://github.com/httpie/httpie/issues
'''),
)
parser.register('action', 'lazy_choices', LazyChoices)
#######################################################################
# Positional arguments.
#######################################################################
positional = parser.add_argument_group(
title='Positional Arguments',
description=dedent('''
These arguments come after any flags and in the order they are listed here.
Only URL is required.
''')
)
positional.add_argument(
dest='method',
metavar='METHOD',
nargs=OPTIONAL,
default=None,
help='''
The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).
This argument can be omitted in which case HTTPie will use POST if there
is some data to be sent, otherwise GET:
$ http example.org # => GET
$ http example.org hello=world # => POST
'''
)
positional.add_argument(
dest='url',
metavar='URL',
help='''
The scheme defaults to 'http://' if the URL does not include one.
(You can override this with: --default-scheme=https)
You can also use a shorthand for localhost
$ http :3000 # => http://localhost:3000
$ http :/foo # => http://localhost/foo
'''
)
positional.add_argument(
dest='request_items',
metavar='REQUEST_ITEM',
nargs=ZERO_OR_MORE,
default=None,
type=KeyValueArgType(*SEPARATOR_GROUP_ALL_ITEMS),
help=r'''
Optional key-value pairs to be included in the request. The separator used
determines the type:
':' HTTP headers:
Referer:https://httpie.io Cookie:foo=bar User-Agent:bacon/1.0
'==' URL parameters to be appended to the request URI:
search==httpie
'=' Data fields to be serialized into a JSON object (with --json, -j)
or form data (with --form, -f):
name=HTTPie language=Python description='CLI HTTP client'
':=' Non-string JSON data fields (only with --json, -j):
awesome:=true amount:=42 colors:='["red", "green", "blue"]'
'@' Form file fields (only with --form or --multipart):
cv@~/Documents/CV.pdf
cv@'~/Documents/CV.pdf;type=application/pdf'
'=@' A data field like '=', but takes a file path and embeds its content:
essay=@Documents/essay.txt
':=@' A raw JSON field like ':=', but takes a file path and embeds its content:
package:=@./package.json
You can use a backslash to escape a colliding separator in the field name:
field-name-with\:colon=value
'''
)
#######################################################################
# Content type.
#######################################################################
content_type = parser.add_argument_group(
title='Predefined Content Types',
description=None
)
content_type.add_argument(
'--json', '-j',
action='store_const',
const=RequestType.JSON,
dest='request_type',
help='''
(default) Data items from the command line are serialized as a JSON object.
The Content-Type and Accept headers are set to application/json
(if not specified).
'''
)
content_type.add_argument(
'--form', '-f',
action='store_const',
const=RequestType.FORM,
dest='request_type',
help='''
Data items from the command line are serialized as form fields.
The Content-Type is set to application/x-www-form-urlencoded (if not
specified). The presence of any file fields results in a
multipart/form-data request.
'''
)
content_type.add_argument(
'--multipart',
action='store_const',
const=RequestType.MULTIPART,
dest='request_type',
help='''
Similar to --form, but always sends a multipart/form-data
request (i.e., even without files).
'''
)
content_type.add_argument(
'--boundary',
help='''
Specify a custom boundary string for multipart/form-data requests.
Only has effect only together with --form.
'''
)
content_type.add_argument(
'--raw',
help='''
This option allows you to pass raw request data without extra processing
(as opposed to the structured request items syntax):
$ http --raw='data' pie.dev/post
You can achieve the same by piping the data via stdin:
$ echo data | http pie.dev/post
Or have HTTPie load the raw data from a file:
$ http pie.dev/post @data.txt
'''
)
#######################################################################
# Content processing.
#######################################################################
content_processing = parser.add_argument_group(
title='Content Processing Options',
description=None
)
content_processing.add_argument(
'--compress', '-x',
action='count',
default=0,
help='''
Content compressed (encoded) with Deflate algorithm.
The Content-Encoding header is set to deflate.
Compression is skipped if it appears that compression ratio is
negative. Compression can be forced by repeating the argument.
'''
)
#######################################################################
# Output processing
#######################################################################
output_processing = parser.add_argument_group(title='Output Processing')
output_processing.add_argument(
'--pretty',
dest='prettify',
default=PRETTY_STDOUT_TTY_ONLY,
choices=sorted(PRETTY_MAP.keys()),
help='''
Controls output processing. The value can be "none" to not prettify
the output (default for redirected output), "all" to apply both colors
and formatting (default for terminal output), "colors", or "format".
'''
)
def format_style_help(available_styles):
return '''
Output coloring style (default is "{default}"). It can be one of:
{available_styles}
The "{auto_style}" style follows your terminal's ANSI color styles.
For non-{auto_style} styles to work properly, please make sure that the
$TERM environment variable is set to "xterm-256color" or similar
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
'''.format(
default=DEFAULT_STYLE,
available_styles='\n'.join(
f' {line.strip()}'
for line in wrap(', '.join(available_styles), 60)
).strip(),
auto_style=AUTO_STYLE,
)
output_processing.add_argument(
'--style', '-s',
dest='style',
metavar='STYLE',
default=DEFAULT_STYLE,
action='lazy_choices',
getter=get_available_styles,
help_formatter=format_style_help
)
_sorted_kwargs = {
'action': 'append_const',
'const': SORTED_FORMAT_OPTIONS_STRING,
'dest': 'format_options'
}
_unsorted_kwargs = {
'action': 'append_const',
'const': UNSORTED_FORMAT_OPTIONS_STRING,
'dest': 'format_options'
}
# The closest approx. of the documented resetting to default via --no-<option>.
# We hide them from the doc because they act only as low-level aliases here.
output_processing.add_argument('--no-unsorted', **_sorted_kwargs, help=SUPPRESS)
output_processing.add_argument('--no-sorted', **_unsorted_kwargs, help=SUPPRESS)
output_processing.add_argument(
'--unsorted',
**_unsorted_kwargs,
help=f'''
Disables all sorting while formatting output. It is a shortcut for:
--format-options={UNSORTED_FORMAT_OPTIONS_STRING}
'''
)
output_processing.add_argument(
'--sorted',
**_sorted_kwargs,
help=f'''
Re-enables all sorting options while formatting output. It is a shortcut for:
--format-options={SORTED_FORMAT_OPTIONS_STRING}
'''
)
output_processing.add_argument(
'--response-charset',
metavar='ENCODING',
type=response_charset_type,
help='''
Override the response encoding for terminal display purposes, e.g.:
--response-charset=utf8
--response-charset=big5
'''
)
output_processing.add_argument(
'--response-mime',
metavar='MIME_TYPE',
type=response_mime_type,
help='''
Override the response mime type for coloring and formatting for the terminal, e.g.:
--response-mime=application/json
--response-mime=text/xml
'''
)
output_processing.add_argument(
'--format-options',
action='append',
help='''
Controls output formatting. Only relevant when formatting is enabled
through (explicit or implied) --pretty=all or --pretty=format.
The following are the default options:
{option_list}
You may use this option multiple times, as well as specify multiple
comma-separated options at the same time. For example, this modifies the
settings to disable the sorting of JSON keys, and sets the indent size to 2:
--format-options json.sort_keys:false,json.indent:2
This is something you will typically put into your config file.
'''.format(
option_list='\n'.join(
f' {option}' for option in DEFAULT_FORMAT_OPTIONS).strip()
)
)
#######################################################################
# Output options
#######################################################################
output_options = parser.add_argument_group(title='Output Options')
output_options.add_argument(
'--print', '-p',
dest='output_options',
metavar='WHAT',
help=f'''
String specifying what the output should contain:
'{OUT_REQ_HEAD}' request headers
'{OUT_REQ_BODY}' request body
'{OUT_RESP_HEAD}' response headers
'{OUT_RESP_BODY}' response body
'{OUT_RESP_META}' response metadata
The default behaviour is '{OUTPUT_OPTIONS_DEFAULT}' (i.e., the response
headers and body is printed), if standard output is not redirected.
If the output is piped to another program or to a file, then only the
response body is printed by default.
'''
)
output_options.add_argument(
'--headers', '-h',
dest='output_options',
action='store_const',
const=OUT_RESP_HEAD,
help=f'''
Print only the response headers. Shortcut for --print={OUT_RESP_HEAD}.
'''
)
output_options.add_argument(
'--meta', '-m',
dest='output_options',
action='store_const',
const=OUT_RESP_META,
help=f'''
Print only the response metadata. Shortcut for --print={OUT_RESP_META}.
'''
)
output_options.add_argument(
'--body', '-b',
dest='output_options',
action='store_const',
const=OUT_RESP_BODY,
help=f'''
Print only the response body. Shortcut for --print={OUT_RESP_BODY}.
'''
)
output_options.add_argument(
'--verbose', '-v',
dest='verbose',
action='count',
default=0,
help=f'''
Verbose output. For the level one (with single `-v`/`--verbose`), print
the whole request as well as the response. Also print any intermediary
requests/responses (such as redirects). For the second level and higher,
print these as well as the response metadata.
Level one is a shortcut for: --all --print={''.join(BASE_OUTPUT_OPTIONS)}
Level two is a shortcut for: --all --print={''.join(OUTPUT_OPTIONS)}
'''
)
output_options.add_argument(
'--all',
default=False,
action='store_true',
help='''
By default, only the final request/response is shown. Use this flag to show
any intermediary requests/responses as well. Intermediary requests include
followed redirects (with --follow), the first unauthorized request when
Digest auth is used (--auth=digest), etc.
'''
)
output_options.add_argument(
'--history-print', '-P',
dest='output_options_history',
metavar='WHAT',
help='''
The same as --print, -p but applies only to intermediary requests/responses
(such as redirects) when their inclusion is enabled with --all. If this
options is not specified, then they are formatted the same way as the final
response.
'''
)
output_options.add_argument(
'--stream', '-S',
action='store_true',
default=False,
help='''
Always stream the response body by line, i.e., behave like `tail -f'.
Without --stream and with --pretty (either set or implied),
HTTPie fetches the whole response before it outputs the processed data.
Set this option when you want to continuously display a prettified
long-lived response, such as one from the Twitter streaming API.
It is useful also without --pretty: It ensures that the output is flushed
more often and in smaller chunks.
'''
)
output_options.add_argument(
'--output', '-o',
type=FileType('a+b'),
dest='output_file',
metavar='FILE',
help='''
Save output to FILE instead of stdout. If --download is also set, then only
the response body is saved to FILE. Other parts of the HTTP exchange are
printed to stderr.
'''
)
output_options.add_argument(
'--download', '-d',
action='store_true',
default=False,
help='''
Do not print the response body to stdout. Rather, download it and store it
in a file. The filename is guessed unless specified with --output
[filename]. This action is similar to the default behaviour of wget.
'''
)
output_options.add_argument(
'--continue', '-c',
dest='download_resume',
action='store_true',
default=False,
help='''
Resume an interrupted download. Note that the --output option needs to be
specified as well.
'''
)
output_options.add_argument(
'--quiet', '-q',
action='count',
default=0,
help='''
Do not print to stdout or stderr, except for errors and warnings when provided once.
Provide twice to suppress warnings as well.
stdout is still redirected if --output is specified.
Flag doesn't affect behaviour of download beyond not printing to terminal.
'''
)
#######################################################################
# Sessions
#######################################################################
sessions = parser.add_argument_group(title='Sessions') \
.add_mutually_exclusive_group(required=False)
session_name_validator = SessionNameValidator(
'Session name contains invalid characters.'
)
sessions.add_argument(
'--session',
metavar='SESSION_NAME_OR_PATH',
type=session_name_validator,
help=f'''
Create, or reuse and update a session. Within a session, custom headers,
auth credential, as well as any cookies sent by the server persist between
requests.
Session files are stored in:
{DEFAULT_SESSIONS_DIR}/<HOST>/<SESSION_NAME>.json.
'''
)
sessions.add_argument(
'--session-read-only',
metavar='SESSION_NAME_OR_PATH',
type=session_name_validator,
help='''
Create or read a session without updating it form the request/response
exchange.
'''
)
#######################################################################
# Authentication
#######################################################################
# ``requests.request`` keyword arguments.
auth = parser.add_argument_group(title='Authentication')
auth.add_argument(
'--auth', '-a',
default=None,
metavar='USER[:PASS] | TOKEN',
help='''
For username/password based authentication mechanisms (e.g
basic auth or digest auth) if only the username is provided
(-a username), HTTPie will prompt for the password.
''',
)
def format_auth_help(auth_plugins_mapping):
auth_plugins = list(auth_plugins_mapping.values())
return '''
The authentication mechanism to be used. Defaults to "{default}".
{types}
'''.format(default=auth_plugins[0].auth_type, types='\n '.join(
'"{type}": {name}{package}{description}'.format(
type=plugin.auth_type,
name=plugin.name,
package=(
'' if issubclass(plugin, BuiltinAuthPlugin)
else f' (provided by {plugin.package_name})'
),
description=(
'' if not plugin.description else
'\n ' + ('\n '.join(wrap(plugin.description)))
)
)
for plugin in auth_plugins
))
auth.add_argument(
'--auth-type', '-A',
action='lazy_choices',
default=None,
getter=plugin_manager.get_auth_plugin_mapping,
sort=True,
cache=False,
help_formatter=format_auth_help,
)
auth.add_argument(
'--ignore-netrc',
default=False,
action='store_true',
help='''
Ignore credentials from .netrc.
''',
)
#######################################################################
# Network
#######################################################################
network = parser.add_argument_group(title='Network')
network.add_argument(
'--offline',
default=False,
action='store_true',
help='''
Build the request and print it but dont actually send it.
'''
)
network.add_argument(
'--proxy',
default=[],
action='append',
metavar='PROTOCOL:PROXY_URL',
type=KeyValueArgType(SEPARATOR_PROXY),
help='''
String mapping protocol to the URL of the proxy
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
different protocols. The environment variables $ALL_PROXY, $HTTP_PROXY,
and $HTTPS_proxy are supported as well.
'''
)
network.add_argument(
'--follow', '-F',
default=False,
action='store_true',
help='''
Follow 30x Location redirects.
'''
)
network.add_argument(
'--max-redirects',
type=int,
default=30,
help='''
By default, requests have a limit of 30 redirects (works with --follow).
'''
)
network.add_argument(
'--max-headers',
type=int,
default=0,
help='''
The maximum number of response headers to be read before giving up
(default 0, i.e., no limit).
'''
)
network.add_argument(
'--timeout',
type=float,
default=0,
metavar='SECONDS',
help='''
The connection timeout of the request in seconds.
The default value is 0, i.e., there is no timeout limit.
This is not a time limit on the entire response download;
rather, an error is reported if the server has not issued a response for
timeout seconds (more precisely, if no bytes have been received on
the underlying socket for timeout seconds).
'''
)
network.add_argument(
'--check-status',
default=False,
action='store_true',
help='''
By default, HTTPie exits with 0 when no network or other fatal errors
occur. This flag instructs HTTPie to also check the HTTP status code and
exit with an error if the status indicates one.
When the server replies with a 4xx (Client Error) or 5xx (Server Error)
status code, HTTPie exits with 4 or 5 respectively. If the response is a
3xx (Redirect) and --follow hasn't been set, then the exit status is 3.
Also an error message is written to stderr if stdout is redirected.
'''
)
network.add_argument(
'--path-as-is',
default=False,
action='store_true',
help='''
Bypass dot segment (/../ or /./) URL squashing.
'''
)
network.add_argument(
'--chunked',
default=False,
action='store_true',
help='''
Enable streaming via chunked transfer encoding.
The Transfer-Encoding header is set to chunked.
'''
)
#######################################################################
# SSL
#######################################################################
ssl = parser.add_argument_group(title='SSL')
ssl.add_argument(
'--verify',
default='yes',
help='''
Set to "no" (or "false") to skip checking the host's SSL certificate.
Defaults to "yes" ("true"). You can also pass the path to a CA_BUNDLE file
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
variable instead.)
'''
)
ssl.add_argument(
'--ssl',
dest='ssl_version',
choices=sorted(AVAILABLE_SSL_VERSION_ARG_MAPPING.keys()),
help='''
The desired protocol version to use. This will default to
SSL v2.3 which will negotiate the highest protocol that both
the server and your installation of OpenSSL support. Available protocols
may vary depending on OpenSSL installation (only the supported ones
are shown here).
'''
)
ssl.add_argument(
'--ciphers',
help=f'''
A string in the OpenSSL cipher list format. By default, the following
is used:
{DEFAULT_SSL_CIPHERS}
'''
)
ssl.add_argument(
'--cert',
default=None,
type=readable_file_arg,
help='''
You can specify a local cert to use as client side SSL certificate.
This file may either contain both private key and certificate or you may
specify --cert-key separately.
'''
)
ssl.add_argument(
'--cert-key',
default=None,
type=readable_file_arg,
help='''
The private key to use with SSL. Only needed if --cert is given and the
certificate file does not contain the private key.
'''
)
#######################################################################
# Troubleshooting
#######################################################################
troubleshooting = parser.add_argument_group(title='Troubleshooting')
troubleshooting.add_argument(
'--ignore-stdin', '-I',
action='store_true',
default=False,
help='''
Do not attempt to read stdin.
'''
)
troubleshooting.add_argument(
'--help',
action='help',
default=SUPPRESS,
help='''
Show this help message and exit.
'''
)
troubleshooting.add_argument(
'--version',
action='version',
version=__version__,
help='''
Show version and exit.
'''
)
troubleshooting.add_argument(
'--traceback',
action='store_true',
default=False,
help='''
Prints the exception traceback should one occur.
'''
)
troubleshooting.add_argument(
'--default-scheme',
default="http",
help='''
The default scheme to use if not specified in the URL.
'''
)
troubleshooting.add_argument(
'--debug',
action='store_true',
default=False,
help='''
Prints the exception traceback should one occur, as well as other
information useful for debugging HTTPie itself and for reporting bugs.
'''
)

View File

@ -1,84 +0,0 @@
from collections import OrderedDict
from multidict import MultiDict, CIMultiDict
class BaseMultiDict(MultiDict):
"""
Base class for all MultiDicts.
"""
class HTTPHeadersDict(CIMultiDict, BaseMultiDict):
"""
Headers are case-insensitive and multiple values are supported
through the `add()` API.
"""
def add(self, key, value):
"""
Add or update a new header.
If the given `value` is `None`, then all the previous
values will be overwritten and the value will be set
to `None`.
"""
if value is None:
self[key] = value
return None
# If the previous value for the given header is `None`
# then discard it since we are explicitly giving a new
# value for it.
if key in self and self.getone(key) is None:
self.popone(key)
super().add(key, value)
class RequestJSONDataDict(OrderedDict):
pass
class MultiValueOrderedDict(OrderedDict):
"""Multi-value dict for URL parameters and form data."""
def __setitem__(self, key, value):
"""
If `key` is assigned more than once, `self[key]` holds a
`list` of all the values.
This allows having multiple fields with the same name in form
data and URL params.
"""
assert not isinstance(value, list)
if key not in self:
super().__setitem__(key, value)
else:
if not isinstance(self[key], list):
super().__setitem__(key, [self[key]])
self[key].append(value)
def items(self):
for key, values in super().items():
if not isinstance(values, list):
values = [values]
for value in values:
yield key, value
class RequestQueryParamsDict(MultiValueOrderedDict):
pass
class RequestDataDict(MultiValueOrderedDict):
pass
class MultipartRequestDataDict(MultiValueOrderedDict):
pass
class RequestFilesDict(RequestDataDict):
pass

View File

@ -1,2 +0,0 @@
class ParseError(Exception):
pass

View File

@ -1,344 +0,0 @@
from enum import Enum, auto
from typing import (
Any,
Iterator,
NamedTuple,
Optional,
List,
NoReturn,
Type,
Union,
)
from httpie.cli.constants import OPEN_BRACKET, CLOSE_BRACKET, BACKSLASH, HIGHLIGHTER
class HTTPieSyntaxError(ValueError):
def __init__(
self,
source: str,
token: Optional['Token'],
message: str,
message_kind: str = 'Syntax',
) -> None:
self.source = source
self.token = token
self.message = message
self.message_kind = message_kind
def __str__(self):
lines = [f'HTTPie {self.message_kind} Error: {self.message}']
if self.token is not None:
lines.append(self.source)
lines.append(
' ' * (self.token.start)
+ HIGHLIGHTER * (self.token.end - self.token.start)
)
return '\n'.join(lines)
class TokenKind(Enum):
TEXT = auto()
NUMBER = auto()
LEFT_BRACKET = auto()
RIGHT_BRACKET = auto()
def to_name(self) -> str:
for key, value in OPERATORS.items():
if value is self:
return repr(key)
else:
return 'a ' + self.name.lower()
OPERATORS = {OPEN_BRACKET: TokenKind.LEFT_BRACKET, CLOSE_BRACKET: TokenKind.RIGHT_BRACKET}
SPECIAL_CHARS = OPERATORS.keys() | {BACKSLASH}
class Token(NamedTuple):
kind: TokenKind
value: Union[str, int]
start: int
end: int
def assert_cant_happen() -> NoReturn:
raise ValueError('Unexpected value')
def check_escaped_int(value: str) -> str:
if not value.startswith(BACKSLASH):
raise ValueError('Not an escaped int')
try:
int(value[1:])
except ValueError as exc:
raise ValueError('Not an escaped int') from exc
else:
return value[1:]
def tokenize(source: str) -> Iterator[Token]:
cursor = 0
backslashes = 0
buffer = []
def send_buffer() -> Iterator[Token]:
nonlocal backslashes
if not buffer:
return None
value = ''.join(buffer)
for variation, kind in [
(int, TokenKind.NUMBER),
(check_escaped_int, TokenKind.TEXT),
]:
try:
value = variation(value)
except ValueError:
continue
else:
break
else:
kind = TokenKind.TEXT
yield Token(
kind, value, start=cursor - (len(buffer) + backslashes), end=cursor
)
buffer.clear()
backslashes = 0
def can_advance() -> bool:
return cursor < len(source)
while can_advance():
index = source[cursor]
if index in OPERATORS:
yield from send_buffer()
yield Token(OPERATORS[index], index, cursor, cursor + 1)
elif index == BACKSLASH and can_advance():
if source[cursor + 1] in SPECIAL_CHARS:
backslashes += 1
else:
buffer.append(index)
buffer.append(source[cursor + 1])
cursor += 1
else:
buffer.append(index)
cursor += 1
yield from send_buffer()
class PathAction(Enum):
KEY = auto()
INDEX = auto()
APPEND = auto()
# Pseudo action, used by the interpreter
SET = auto()
def to_string(self) -> str:
return self.name.lower()
class Path:
def __init__(
self,
kind: PathAction,
accessor: Optional[Union[str, int]] = None,
tokens: Optional[List[Token]] = None,
is_root: bool = False,
):
self.kind = kind
self.accessor = accessor
self.tokens = tokens or []
self.is_root = is_root
def reconstruct(self) -> str:
if self.kind is PathAction.KEY:
if self.is_root:
return str(self.accessor)
return OPEN_BRACKET + self.accessor + CLOSE_BRACKET
elif self.kind is PathAction.INDEX:
return OPEN_BRACKET + str(self.accessor) + CLOSE_BRACKET
elif self.kind is PathAction.APPEND:
return OPEN_BRACKET + CLOSE_BRACKET
else:
assert_cant_happen()
def parse(source: str) -> Iterator[Path]:
"""
start: literal? path*
literal: TEXT | NUMBER
path:
key_path
| index_path
| append_path
key_path: LEFT_BRACKET TEXT RIGHT_BRACKET
index_path: LEFT_BRACKET NUMBER RIGHT_BRACKET
append_path: LEFT_BRACKET RIGHT_BRACKET
"""
tokens = list(tokenize(source))
cursor = 0
def can_advance():
return cursor < len(tokens)
def expect(*kinds):
nonlocal cursor
assert len(kinds) > 0
if can_advance():
token = tokens[cursor]
cursor += 1
if token.kind in kinds:
return token
elif tokens:
token = tokens[-1]._replace(
start=tokens[-1].end + 0, end=tokens[-1].end + 1
)
else:
token = None
if len(kinds) == 1:
suffix = kinds[0].to_name()
else:
suffix = ', '.join(kind.to_name() for kind in kinds[:-1])
suffix += ' or ' + kinds[-1].to_name()
message = f'Expecting {suffix}'
raise HTTPieSyntaxError(source, token, message)
root = Path(PathAction.KEY, '', is_root=True)
if can_advance():
token = tokens[cursor]
if token.kind in {TokenKind.TEXT, TokenKind.NUMBER}:
token = expect(TokenKind.TEXT, TokenKind.NUMBER)
root.accessor = str(token.value)
root.tokens.append(token)
yield root
while can_advance():
path_tokens = []
path_tokens.append(expect(TokenKind.LEFT_BRACKET))
token = expect(
TokenKind.TEXT, TokenKind.NUMBER, TokenKind.RIGHT_BRACKET
)
path_tokens.append(token)
if token.kind is TokenKind.RIGHT_BRACKET:
path = Path(PathAction.APPEND, tokens=path_tokens)
elif token.kind is TokenKind.TEXT:
path = Path(PathAction.KEY, token.value, tokens=path_tokens)
path_tokens.append(expect(TokenKind.RIGHT_BRACKET))
elif token.kind is TokenKind.NUMBER:
path = Path(PathAction.INDEX, token.value, tokens=path_tokens)
path_tokens.append(expect(TokenKind.RIGHT_BRACKET))
else:
assert_cant_happen()
yield path
JSON_TYPE_MAPPING = {
dict: 'object',
list: 'array',
int: 'number',
float: 'number',
str: 'string',
}
def interpret(context: Any, key: str, value: Any) -> Any:
cursor = context
paths = list(parse(key))
paths.append(Path(PathAction.SET, value))
def type_check(index: int, path: Path, expected_type: Type[Any]) -> None:
if not isinstance(cursor, expected_type):
if path.tokens:
pseudo_token = Token(
None, None, path.tokens[0].start, path.tokens[-1].end
)
else:
pseudo_token = None
cursor_type = JSON_TYPE_MAPPING.get(
type(cursor), type(cursor).__name__
)
required_type = JSON_TYPE_MAPPING[expected_type]
message = f"Can't perform {path.kind.to_string()!r} based access on "
message += repr(
''.join(path.reconstruct() for path in paths[:index])
)
message += (
f' which has a type of {cursor_type!r} but this operation'
)
message += f' requires a type of {required_type!r}.'
raise HTTPieSyntaxError(
key, pseudo_token, message, message_kind='Type'
)
def object_for(kind: str) -> Any:
if kind is PathAction.KEY:
return {}
elif kind in {PathAction.INDEX, PathAction.APPEND}:
return []
else:
assert_cant_happen()
for index, (path, next_path) in enumerate(zip(paths, paths[1:])):
if path.kind is PathAction.KEY:
type_check(index, path, dict)
if next_path.kind is PathAction.SET:
cursor[path.accessor] = next_path.accessor
break
cursor = cursor.setdefault(
path.accessor, object_for(next_path.kind)
)
elif path.kind is PathAction.INDEX:
type_check(index, path, list)
if path.accessor < 0:
raise HTTPieSyntaxError(
key,
path.tokens[1],
'Negative indexes are not supported.',
message_kind='Value',
)
cursor.extend([None] * (path.accessor - len(cursor) + 1))
if next_path.kind is PathAction.SET:
cursor[path.accessor] = next_path.accessor
break
if cursor[path.accessor] is None:
cursor[path.accessor] = object_for(next_path.kind)
cursor = cursor[path.accessor]
elif path.kind is PathAction.APPEND:
type_check(index, path, list)
if next_path.kind is PathAction.SET:
cursor.append(next_path.accessor)
break
cursor.append(object_for(next_path.kind))
cursor = cursor[-1]
else:
assert_cant_happen()
return context
def interpret_nested_json(pairs):
context = {}
for key, value in pairs:
interpret(context, key, value)
return context

View File

@ -1,226 +0,0 @@
import os
import functools
from typing import Callable, Dict, IO, List, Optional, Tuple, Union
from .argtypes import KeyValueArg
from .constants import (
SEPARATORS_GROUP_MULTIPART, SEPARATOR_DATA_EMBED_FILE_CONTENTS,
SEPARATOR_DATA_EMBED_RAW_JSON_FILE, SEPARATOR_GROUP_NESTED_JSON_ITEMS,
SEPARATOR_DATA_RAW_JSON, SEPARATOR_DATA_STRING, SEPARATOR_FILE_UPLOAD,
SEPARATOR_FILE_UPLOAD_TYPE, SEPARATOR_HEADER, SEPARATOR_HEADER_EMPTY,
SEPARATOR_HEADER_EMBED, SEPARATOR_QUERY_PARAM,
SEPARATOR_QUERY_EMBED_FILE, RequestType
)
from .dicts import (
BaseMultiDict, MultipartRequestDataDict, RequestDataDict,
RequestFilesDict, HTTPHeadersDict, RequestJSONDataDict,
RequestQueryParamsDict,
)
from .exceptions import ParseError
from .nested_json import interpret_nested_json
from ..utils import get_content_type, load_json_preserve_order_and_dupe_keys, split
class RequestItems:
def __init__(self, request_type: Optional[RequestType] = None):
self.headers = HTTPHeadersDict()
self.request_type = request_type
self.is_json = request_type is None or request_type is RequestType.JSON
self.data = RequestJSONDataDict() if self.is_json else RequestDataDict()
self.files = RequestFilesDict()
self.params = RequestQueryParamsDict()
# To preserve the order of fields in file upload multipart requests.
self.multipart_data = MultipartRequestDataDict()
@classmethod
def from_args(
cls,
request_item_args: List[KeyValueArg],
request_type: Optional[RequestType] = None,
) -> 'RequestItems':
instance = cls(request_type=request_type)
rules: Dict[str, Tuple[Callable, dict]] = {
SEPARATOR_HEADER: (
process_header_arg,
instance.headers,
),
SEPARATOR_HEADER_EMPTY: (
process_empty_header_arg,
instance.headers,
),
SEPARATOR_HEADER_EMBED: (
process_embed_header_arg,
instance.headers,
),
SEPARATOR_QUERY_PARAM: (
process_query_param_arg,
instance.params,
),
SEPARATOR_QUERY_EMBED_FILE: (
process_embed_query_param_arg,
instance.params,
),
SEPARATOR_FILE_UPLOAD: (
process_file_upload_arg,
instance.files,
),
SEPARATOR_DATA_STRING: (
process_data_item_arg,
instance.data,
),
SEPARATOR_DATA_EMBED_FILE_CONTENTS: (
process_data_embed_file_contents_arg,
instance.data,
),
SEPARATOR_GROUP_NESTED_JSON_ITEMS: (
process_data_nested_json_embed_args,
instance.data,
),
SEPARATOR_DATA_RAW_JSON: (
json_only(instance, process_data_raw_json_embed_arg),
instance.data,
),
SEPARATOR_DATA_EMBED_RAW_JSON_FILE: (
json_only(instance, process_data_embed_raw_json_file_arg),
instance.data,
),
}
if instance.is_json:
json_item_args, request_item_args = split(
request_item_args,
lambda arg: arg.sep in SEPARATOR_GROUP_NESTED_JSON_ITEMS
)
if json_item_args:
pairs = [
(arg.key, rules[arg.sep][0](arg))
for arg in json_item_args
]
processor_func, target_dict = rules[SEPARATOR_GROUP_NESTED_JSON_ITEMS]
value = processor_func(pairs)
target_dict.update(value)
# Then handle all other items.
for arg in request_item_args:
processor_func, target_dict = rules[arg.sep]
value = processor_func(arg)
if arg.sep in SEPARATORS_GROUP_MULTIPART:
instance.multipart_data[arg.key] = value
if isinstance(target_dict, BaseMultiDict):
target_dict.add(arg.key, value)
else:
target_dict[arg.key] = value
return instance
JSONType = Union[str, bool, int, list, dict]
def process_header_arg(arg: KeyValueArg) -> Optional[str]:
return arg.value or None
def process_embed_header_arg(arg: KeyValueArg) -> str:
return load_text_file(arg).rstrip('\n')
def process_empty_header_arg(arg: KeyValueArg) -> str:
if not arg.value:
return arg.value
raise ParseError(
f'Invalid item {arg.orig!r} (to specify an empty header use `Header;`)'
)
def process_query_param_arg(arg: KeyValueArg) -> str:
return arg.value
def process_embed_query_param_arg(arg: KeyValueArg) -> str:
return load_text_file(arg).rstrip('\n')
def process_file_upload_arg(arg: KeyValueArg) -> Tuple[str, IO, str]:
parts = arg.value.split(SEPARATOR_FILE_UPLOAD_TYPE)
filename = parts[0]
mime_type = parts[1] if len(parts) > 1 else None
try:
f = open(os.path.expanduser(filename), 'rb')
except OSError as e:
raise ParseError(f'{arg.orig!r}: {e}')
return (
os.path.basename(filename),
f,
mime_type or get_content_type(filename),
)
def process_data_item_arg(arg: KeyValueArg) -> str:
return arg.value
def process_data_embed_file_contents_arg(arg: KeyValueArg) -> str:
return load_text_file(arg)
def json_only(items: RequestItems, func: Callable[[KeyValueArg], JSONType]) -> str:
if items.is_json:
return func
@functools.wraps(func)
def wrapper(*args, **kwargs) -> str:
try:
ret = func(*args, **kwargs)
except ParseError:
ret = None
# If it is a basic type, then allow it
if isinstance(ret, (str, int, float)):
return str(ret)
else:
raise ParseError(
'Can\'t use complex JSON value types with '
'--form/--multipart.'
)
return wrapper
def process_data_embed_raw_json_file_arg(arg: KeyValueArg) -> JSONType:
contents = load_text_file(arg)
value = load_json(arg, contents)
return value
def process_data_raw_json_embed_arg(arg: KeyValueArg) -> JSONType:
value = load_json(arg, arg.value)
return value
def process_data_nested_json_embed_args(pairs) -> Dict[str, JSONType]:
return interpret_nested_json(pairs)
def load_text_file(item: KeyValueArg) -> str:
path = item.value
try:
with open(os.path.expanduser(path), 'rb') as f:
return f.read().decode()
except OSError as e:
raise ParseError(f'{item.orig!r}: {e}')
except UnicodeDecodeError:
raise ParseError(
f'{item.orig!r}: cannot embed the content of {item.value!r},'
' not a UTF-8 or ASCII-encoded text file'
)
def load_json(arg: KeyValueArg, contents: str) -> JSONType:
try:
return load_json_preserve_order_and_dupe_keys(contents)
except ValueError as e:
raise ParseError(f'{arg.orig!r}: {e}')

View File

@ -1,53 +0,0 @@
import argparse
from typing import Any, Callable, Generic, Iterator, Iterable, Optional, TypeVar
T = TypeVar('T')
class LazyChoices(argparse.Action, Generic[T]):
def __init__(
self,
*args,
getter: Callable[[], Iterable[T]],
help_formatter: Optional[Callable[[T], str]] = None,
sort: bool = False,
cache: bool = True,
**kwargs
) -> None:
self.getter = getter
self.help_formatter = help_formatter
self.sort = sort
self.cache = cache
self._help: Optional[str] = None
self._obj: Optional[Iterable[T]] = None
super().__init__(*args, **kwargs)
self.choices = self
def load(self) -> T:
if self._obj is None or not self.cache:
self._obj = self.getter()
assert self._obj is not None
return self._obj
@property
def help(self) -> str:
if self._help is None and self.help_formatter is not None:
self._help = self.help_formatter(self.load())
return self._help
@help.setter
def help(self, value: Any) -> None:
self._help = value
def __contains__(self, item: Any) -> bool:
return item in self.load()
def __iter__(self) -> Iterator[T]:
if self.sort:
return iter(sorted(self.load()))
else:
return iter(self.load())
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)

View File

@ -1,371 +1,94 @@
import argparse
import http.client
import json
import sys
from contextlib import contextmanager
from time import monotonic
from typing import Any, Dict, Callable, Iterable
from urllib.parse import urlparse, urlunparse
from pprint import pformat
import requests
# noinspection PyPackageRequirements
import urllib3
import requests.auth
from . import sessions
from . import __version__
from .adapters import HTTPieHTTPAdapter
from .context import Environment
from .cli.dicts import HTTPHeadersDict
from .encoding import UTF8
from .models import RequestsMessage
from .plugins.registry import plugin_manager
from .sessions import get_httpie_session
from .ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, HTTPieHTTPSAdapter
from .uploads import (
compress_request, prepare_request_body,
get_multipart_data_and_content_type,
)
from .utils import get_expired_cookies, repr_dict
urllib3.disable_warnings()
FORM_CONTENT_TYPE = f'application/x-www-form-urlencoded; charset={UTF8}'
JSON_CONTENT_TYPE = 'application/json'
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*;q=0.5'
DEFAULT_UA = f'HTTPie/{__version__}'
FORM = 'application/x-www-form-urlencoded; charset=utf-8'
JSON = 'application/json; charset=utf-8'
DEFAULT_UA = 'HTTPie/%s' % __version__
def collect_messages(
env: Environment,
args: argparse.Namespace,
request_body_read_callback: Callable[[bytes], None] = None,
) -> Iterable[RequestsMessage]:
httpie_session = None
httpie_session_headers = None
if args.session or args.session_read_only:
httpie_session = get_httpie_session(
config_dir=env.config.directory,
session_name=args.session or args.session_read_only,
host=args.headers.get('Host'),
url=args.url,
)
httpie_session_headers = httpie_session.headers
def get_response(args, config_dir):
"""Send the request and return a `request.Response`."""
request_kwargs = make_request_kwargs(
env,
args=args,
base_headers=httpie_session_headers,
request_body_read_callback=request_body_read_callback
)
send_kwargs = make_send_kwargs(args)
send_kwargs_mergeable_from_env = make_send_kwargs_mergeable_from_env(args)
requests_session = build_requests_session(
ssl_version=args.ssl_version,
ciphers=args.ciphers,
verify=bool(send_kwargs_mergeable_from_env['verify'])
)
if httpie_session:
httpie_session.update_headers(request_kwargs['headers'])
requests_session.cookies = httpie_session.cookies
if args.auth_plugin:
# Save auth from CLI to HTTPie session.
httpie_session.auth = {
'type': args.auth_plugin.auth_type,
'raw_auth': args.auth_plugin.raw_auth,
}
elif httpie_session.auth:
# Apply auth from HTTPie session
request_kwargs['auth'] = httpie_session.auth
requests_kwargs = get_requests_kwargs(args)
if args.debug:
# TODO: reflect the split between request and send kwargs.
dump_request(request_kwargs)
sys.stderr.write('\n>>> requests.request(%s)\n\n'
% pformat(requests_kwargs))
request = requests.Request(**request_kwargs)
prepared_request = requests_session.prepare_request(request)
apply_missing_repeated_headers(prepared_request, request.headers)
if args.path_as_is:
prepared_request.url = ensure_path_as_is(
orig_url=args.url,
prepped_url=prepared_request.url,
)
if args.compress and prepared_request.body:
compress_request(
request=prepared_request,
always=args.compress > 1,
)
response_count = 0
expired_cookies = []
while prepared_request:
yield prepared_request
if not args.offline:
send_kwargs_merged = requests_session.merge_environment_settings(
url=prepared_request.url,
**send_kwargs_mergeable_from_env,
)
with max_headers(args.max_headers):
response = requests_session.send(
request=prepared_request,
**send_kwargs_merged,
**send_kwargs,
)
response._httpie_headers_parsed_at = monotonic()
expired_cookies += get_expired_cookies(
response.headers.get('Set-Cookie', '')
)
response_count += 1
if response.next:
if args.max_redirects and response_count == args.max_redirects:
raise requests.TooManyRedirects
if args.follow:
prepared_request = response.next
if args.all:
yield response
continue
yield response
break
if httpie_session:
if httpie_session.is_new() or not args.session_read_only:
httpie_session.cookies = requests_session.cookies
httpie_session.remove_cookies(
# TODO: take path & domain into account?
cookie['name'] for cookie in expired_cookies
)
httpie_session.save()
# noinspection PyProtectedMember
@contextmanager
def max_headers(limit):
# <https://github.com/httpie/httpie/issues/802>
# noinspection PyUnresolvedReferences
orig = http.client._MAXHEADERS
http.client._MAXHEADERS = limit or float('Inf')
try:
yield
finally:
http.client._MAXHEADERS = orig
def build_requests_session(
verify: bool,
ssl_version: str = None,
ciphers: str = None,
) -> requests.Session:
requests_session = requests.Session()
# Install our adapter.
http_adapter = HTTPieHTTPAdapter()
https_adapter = HTTPieHTTPSAdapter(
ciphers=ciphers,
verify=verify,
ssl_version=(
AVAILABLE_SSL_VERSION_ARG_MAPPING[ssl_version]
if ssl_version else None
),
)
requests_session.mount('http://', http_adapter)
requests_session.mount('https://', https_adapter)
# Install adapters from plugins.
for plugin_cls in plugin_manager.get_transport_plugins():
transport_plugin = plugin_cls()
requests_session.mount(
prefix=transport_plugin.prefix,
adapter=transport_plugin.get_adapter(),
if not args.session and not args.session_read_only:
response = requests.request(**requests_kwargs)
else:
response = sessions.get_response(
config_dir=config_dir,
name=args.session or args.session_read_only,
request_kwargs=requests_kwargs,
read_only=bool(args.session_read_only),
)
return requests_session
return response
def dump_request(kwargs: dict):
sys.stderr.write(
f'\n>>> requests.request(**{repr_dict(kwargs)})\n\n')
def get_requests_kwargs(args):
"""Translate our `args` into `requests.request` keyword arguments."""
def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict:
final_headers = HTTPHeadersDict()
for name, value in headers.items():
if value is not None:
# “leading or trailing LWS MAY be removed without
# changing the semantics of the field value”
# <https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html>
# Also, requests raises `InvalidHeader` for leading spaces.
value = value.strip()
if isinstance(value, str):
# See <https://github.com/httpie/httpie/issues/212>
value = value.encode()
final_headers.add(name, value)
return final_headers
def apply_missing_repeated_headers(
prepared_request: requests.PreparedRequest,
original_headers: HTTPHeadersDict
) -> None:
"""Update the given `prepared_request`'s headers with the original
ones. This allows the requests to be prepared as usual, and then later
merged with headers that are specified multiple times."""
new_headers = HTTPHeadersDict(prepared_request.headers)
for prepared_name, prepared_value in prepared_request.headers.items():
if prepared_name not in original_headers:
continue
original_keys, original_values = zip(*filter(
lambda item: item[0].casefold() == prepared_name.casefold(),
original_headers.items()
))
if prepared_value not in original_values:
# If the current value is not among the initial values
# set for this field, then it means that this field got
# overridden on the way, and we should preserve it.
continue
new_headers.popone(prepared_name)
new_headers.update(zip(original_keys, original_values))
prepared_request.headers = new_headers
def make_default_headers(args: argparse.Namespace) -> HTTPHeadersDict:
default_headers = HTTPHeadersDict({
implicit_headers = {
'User-Agent': DEFAULT_UA
})
}
auto_json = args.data and not args.form
if args.json or auto_json:
default_headers['Accept'] = JSON_ACCEPT
if args.json or (auto_json and args.data):
default_headers['Content-Type'] = JSON_CONTENT_TYPE
implicit_headers['Accept'] = 'application/json'
if args.data:
implicit_headers['Content-Type'] = JSON
if isinstance(args.data, dict):
if args.data:
args.data = json.dumps(args.data)
else:
# We need to set data to an empty string to prevent requests
# from assigning an empty list to `response.request.data`.
args.data = ''
elif args.form and not args.files:
# If sending files, `requests` will set
# the `Content-Type` for us.
default_headers['Content-Type'] = FORM_CONTENT_TYPE
return default_headers
implicit_headers['Content-Type'] = FORM
for name, value in implicit_headers.items():
if name not in args.headers:
args.headers[name] = value
def make_send_kwargs(args: argparse.Namespace) -> dict:
return {
'timeout': args.timeout or None,
'allow_redirects': False,
}
credentials = None
if args.auth:
credentials = {
'basic': requests.auth.HTTPBasicAuth,
'digest': requests.auth.HTTPDigestAuth,
}[args.auth_type](args.auth.key, args.auth.value)
def make_send_kwargs_mergeable_from_env(args: argparse.Namespace) -> dict:
cert = None
if args.cert:
cert = args.cert
if args.cert_key:
cert = cert, args.cert_key
return {
'proxies': {p.key: p.value for p in args.proxy},
kwargs = {
'stream': True,
'verify': {
'yes': True,
'true': True,
'no': False,
'false': False,
}.get(args.verify.lower(), args.verify),
'cert': cert,
}
def json_dict_to_request_body(data: Dict[str, Any]) -> str:
# Propagate the top-level list if there is only one
# item in the object, with an en empty key.
if len(data) == 1:
[(key, value)] = data.items()
if key == '' and isinstance(value, list):
data = value
if data:
data = json.dumps(data)
else:
# We need to set data to an empty string to prevent requests
# from assigning an empty list to `response.request.data`.
data = ''
return data
def make_request_kwargs(
env: Environment,
args: argparse.Namespace,
base_headers: HTTPHeadersDict = None,
request_body_read_callback=lambda chunk: chunk
) -> dict:
"""
Translate our `args` into `requests.Request` keyword arguments.
"""
files = args.files
# Serialize JSON data, if needed.
data = args.data
auto_json = data and not args.form
if (args.json or auto_json) and isinstance(data, dict):
data = json_dict_to_request_body(data)
# Finalize headers.
headers = make_default_headers(args)
if base_headers:
headers.update(base_headers)
headers.update(args.headers)
if args.offline and args.chunked and 'Transfer-Encoding' not in headers:
# When online, we let requests set the header instead to be able more
# easily verify chunking is taking place.
headers['Transfer-Encoding'] = 'chunked'
headers = finalize_headers(headers)
if (args.form and files) or args.multipart:
data, headers['Content-Type'] = get_multipart_data_and_content_type(
data=args.multipart_data,
boundary=args.boundary,
content_type=args.headers.get('Content-Type'),
)
return {
'method': args.method.lower(),
'url': args.url,
'headers': headers,
'data': prepare_request_body(
env,
data,
body_read_callback=request_body_read_callback,
chunked=args.chunked,
offline=args.offline,
content_length_header_value=headers.get('Content-Length'),
),
'auth': args.auth,
'params': args.params.items(),
'headers': args.headers,
'data': args.data,
'verify': {
'yes': True,
'no': False
}.get(args.verify, args.verify),
'timeout': args.timeout,
'auth': credentials,
'proxies': dict((p.key, p.value) for p in args.proxy),
'files': args.files,
'allow_redirects': args.follow,
'params': args.params,
}
def ensure_path_as_is(orig_url: str, prepped_url: str) -> str:
"""
Handle `--path-as-is` by replacing the path component of the prepared
URL with the path component from the original URL. Other parts stay
untouched because other (welcome) processing on the URL might have
taken place.
<https://github.com/httpie/httpie/issues/895>
<https://ec.haxx.se/http/http-basics#path-as-is>
<https://curl.haxx.se/libcurl/c/CURLOPT_PATH_AS_IS.html>
>>> ensure_path_as_is('http://foo/../', 'http://foo/?foo=bar')
'http://foo/../?foo=bar'
"""
parsed_orig, parsed_prepped = urlparse(orig_url), urlparse(prepped_url)
final_dict = {
# noinspection PyProtectedMember
**parsed_prepped._asdict(),
'path': parsed_orig.path,
}
return urlunparse(tuple(final_dict.values()))
return kwargs

View File

@ -1,90 +1,18 @@
import sys
from typing import Any, Optional, Iterable
is_windows = 'win32' in str(sys.platform).lower()
"""
Python 2/3 compatibility.
"""
#noinspection PyUnresolvedReferences
from requests.compat import (
is_windows,
bytes,
str,
is_py3,
is_py26,
)
try:
from functools import cached_property
from urllib.parse import urlsplit
except ImportError:
# Can be removed once we drop Python <3.8 support.
# Taken from `django.utils.functional.cached_property`.
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
from urlparse import urlsplit
A cached property can be made out of an existing method:
(e.g. ``url = cached_property(get_absolute_url)``).
The optional ``name`` argument is obsolete as of Python 3.6 and will be
deprecated in Django 4.0 (#30127).
"""
name = None
@staticmethod
def func(instance):
raise TypeError(
'Cannot use cached_property instance without calling '
'__set_name__() on it.'
)
def __init__(self, func, name=None):
self.real_func = func
self.__doc__ = getattr(func, '__doc__')
def __set_name__(self, owner, name):
if self.name is None:
self.name = name
self.func = self.real_func
elif name != self.name:
raise TypeError(
"Cannot assign the same cached_property to two different names "
"(%r and %r)." % (self.name, name)
)
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
# importlib_metadata was a provisional module, so the APIs changed quite a few times
# between 3.8-3.10. It was also not included in the standard library until 3.8, so
# we install the backport for <3.8.
if sys.version_info >= (3, 8):
import importlib.metadata as importlib_metadata
else:
import importlib_metadata
def find_entry_points(entry_points: Any, group: str) -> Iterable[importlib_metadata.EntryPoint]:
if hasattr(entry_points, "select"): # Python 3.10+ / importlib_metadata >= 3.9.0
return entry_points.select(group=group)
else:
return set(entry_points.get(group, ()))
def get_dist_name(entry_point: importlib_metadata.EntryPoint) -> Optional[str]:
dist = getattr(entry_point, "dist", None)
if dist is not None: # Python 3.10+
return dist.name
match = entry_point.pattern.match(entry_point.value)
if not (match and match.group('module')):
return None
package = match.group('module').split('.')[0]
try:
metadata = importlib_metadata.metadata(package)
except importlib_metadata.PackageNotFoundError:
return None
else:
return metadata.get('name')

View File

@ -1,134 +1,95 @@
import json
import os
from pathlib import Path
from typing import Union
import json
import errno
from . import __version__
from .compat import is_windows
from .encoding import UTF8
ENV_XDG_CONFIG_HOME = 'XDG_CONFIG_HOME'
ENV_HTTPIE_CONFIG_DIR = 'HTTPIE_CONFIG_DIR'
DEFAULT_CONFIG_DIRNAME = 'httpie'
DEFAULT_RELATIVE_XDG_CONFIG_HOME = Path('.config')
DEFAULT_RELATIVE_LEGACY_CONFIG_DIR = Path('.httpie')
DEFAULT_WINDOWS_CONFIG_DIR = Path(
os.path.expandvars('%APPDATA%')) / DEFAULT_CONFIG_DIRNAME
def get_default_config_dir() -> Path:
"""
Return the path to the httpie configuration directory.
This directory isn't guaranteed to exist, and nor are any of its
ancestors (only the legacy ~/.httpie, if returned, is guaranteed to exist).
XDG Base Directory Specification support:
<https://wiki.archlinux.org/index.php/XDG_Base_Directory>
$XDG_CONFIG_HOME is supported; $XDG_CONFIG_DIRS is not
"""
# 1. explicitly set through env
env_config_dir = os.environ.get(ENV_HTTPIE_CONFIG_DIR)
if env_config_dir:
return Path(env_config_dir)
# 2. Windows
if is_windows:
return DEFAULT_WINDOWS_CONFIG_DIR
home_dir = Path.home()
# 3. legacy ~/.httpie
legacy_config_dir = home_dir / DEFAULT_RELATIVE_LEGACY_CONFIG_DIR
if legacy_config_dir.exists():
return legacy_config_dir
# 4. XDG
xdg_config_home_dir = os.environ.get(
ENV_XDG_CONFIG_HOME, # 4.1. explicit
home_dir / DEFAULT_RELATIVE_XDG_CONFIG_HOME # 4.2. default
)
return Path(xdg_config_home_dir) / DEFAULT_CONFIG_DIRNAME
DEFAULT_CONFIG_DIR = get_default_config_dir()
class ConfigFileError(Exception):
pass
DEFAULT_CONFIG_DIR = os.environ.get(
'HTTPIE_CONFIG_DIR',
os.path.expanduser('~/.httpie') if not is_windows else
os.path.expandvars(r'%APPDATA%\\httpie')
)
class BaseConfigDict(dict):
name = None
helpurl = None
help = None
about = None
def __init__(self, path: Path):
super().__init__()
self.path = path
directory = DEFAULT_CONFIG_DIR
def ensure_directory(self):
self.path.parent.mkdir(mode=0o700, parents=True, exist_ok=True)
def __init__(self, directory=None, *args, **kwargs):
super(BaseConfigDict, self).__init__(*args, **kwargs)
if directory:
self.directory = directory
def is_new(self) -> bool:
return not self.path.exists()
def __getattr__(self, item):
return self[item]
@property
def path(self):
try:
os.makedirs(self.directory, mode=0o700)
except OSError as e:
if e.errno != errno.EEXIST:
raise
return os.path.join(self.directory, self.name + '.json')
@property
def is_new(self):
return not os.path.exists(self.path)
def load(self):
config_type = type(self).__name__.lower()
try:
with self.path.open(encoding=UTF8) as f:
with open(self.path, 'rt') as f:
try:
data = json.load(f)
except ValueError as e:
raise ConfigFileError(
f'invalid {config_type} file: {e} [{self.path}]'
raise ValueError(
'Invalid %s JSON: %s [%s]' %
(type(self).__name__, e.message, self.path)
)
self.update(data)
except FileNotFoundError:
pass
except OSError as e:
raise ConfigFileError(f'cannot read {config_type} file: {e}')
except IOError as e:
if e.errno != errno.ENOENT:
raise
def save(self):
self['__meta__'] = {
'httpie': __version__
}
if self.helpurl:
self['__meta__']['help'] = self.helpurl
if self.help:
self['__meta__']['help'] = self.help
if self.about:
self['__meta__']['about'] = self.about
self.ensure_directory()
with open(self.path, 'w') as f:
json.dump(self, f, indent=4, sort_keys=True, ensure_ascii=True)
f.write('\n')
json_string = json.dumps(
obj=self,
indent=4,
sort_keys=True,
ensure_ascii=True,
)
self.path.write_text(json_string + '\n', encoding=UTF8)
def delete(self):
try:
os.unlink(self.path)
except OSError as e:
if e.errno != errno.ENOENT:
raise
class Config(BaseConfigDict):
FILENAME = 'config.json'
name = 'config'
help = 'https://github.com/jkbr/httpie#config'
about = 'HTTPie configuration file'
DEFAULTS = {
'implicit_content_type': 'json',
'default_options': []
}
def __init__(self, directory: Union[str, Path] = DEFAULT_CONFIG_DIR):
self.directory = Path(directory)
super().__init__(path=self.directory / self.FILENAME)
def __init__(self, *args, **kwargs):
super(Config, self).__init__(*args, **kwargs)
self.update(self.DEFAULTS)
@property
def default_options(self) -> list:
return self['default_options']
@property
def plugins_dir(self) -> Path:
return Path(self.get('plugins_dir', self.directory / 'plugins')).resolve()

View File

@ -1,139 +0,0 @@
import sys
import os
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator, IO, Optional
try:
import curses
except ImportError:
curses = None # Compiled w/o curses
from .compat import is_windows
from .config import DEFAULT_CONFIG_DIR, Config, ConfigFileError
from .encoding import UTF8
from .utils import repr_dict
class Environment:
"""
Information about the execution context
(standard streams, config directory, etc).
By default, it represents the actual environment.
All of the attributes can be overwritten though, which
is used by the test suite to simulate various scenarios.
"""
is_windows: bool = is_windows
config_dir: Path = DEFAULT_CONFIG_DIR
stdin: Optional[IO] = sys.stdin # `None` when closed fd (#791)
stdin_isatty: bool = stdin.isatty() if stdin else False
stdin_encoding: str = None
stdout: IO = sys.stdout
stdout_isatty: bool = stdout.isatty()
stdout_encoding: str = None
stderr: IO = sys.stderr
stderr_isatty: bool = stderr.isatty()
colors = 256
program_name: str = 'http'
if not is_windows:
if curses:
try:
curses.setupterm()
colors = curses.tigetnum('colors')
except curses.error:
pass
else:
# noinspection PyUnresolvedReferences
import colorama.initialise
stdout = colorama.initialise.wrap_stream(
stdout, convert=None, strip=None,
autoreset=True, wrap=True
)
stderr = colorama.initialise.wrap_stream(
stderr, convert=None, strip=None,
autoreset=True, wrap=True
)
del colorama
def __init__(self, devnull=None, **kwargs):
"""
Use keyword arguments to overwrite
any of the class attributes for this instance.
"""
assert all(hasattr(type(self), attr) for attr in kwargs.keys())
self.__dict__.update(**kwargs)
# The original STDERR unaffected by --quieting.
self._orig_stderr = self.stderr
self._devnull = devnull
# Keyword arguments > stream.encoding > default UTF-8
if self.stdin and self.stdin_encoding is None:
self.stdin_encoding = getattr(
self.stdin, 'encoding', None) or UTF8
if self.stdout_encoding is None:
actual_stdout = self.stdout
if is_windows:
# noinspection PyUnresolvedReferences
from colorama import AnsiToWin32
if isinstance(self.stdout, AnsiToWin32):
# noinspection PyUnresolvedReferences
actual_stdout = self.stdout.wrapped
self.stdout_encoding = getattr(
actual_stdout, 'encoding', None) or UTF8
def __str__(self):
defaults = dict(type(self).__dict__)
actual = dict(defaults)
actual.update(self.__dict__)
actual['config'] = self.config
return repr_dict({
key: value
for key, value in actual.items()
if not key.startswith('_')
})
def __repr__(self):
return f'<{type(self).__name__} {self}>'
_config: Config = None
@property
def config(self) -> Config:
config = self._config
if not config:
self._config = config = Config(directory=self.config_dir)
if not config.is_new():
try:
config.load()
except ConfigFileError as e:
self.log_error(e, level='warning')
return config
@property
def devnull(self) -> IO:
if self._devnull is None:
self._devnull = open(os.devnull, 'w+')
return self._devnull
@contextmanager
def as_silent(self) -> Iterator[None]:
original_stdout = self.stdout
original_stderr = self.stderr
try:
self.stdout = self.devnull
self.stderr = self.devnull
yield
finally:
self.stdout = original_stdout
self.stderr = original_stderr
def log_error(self, msg, level='error'):
assert level in ['error', 'warning']
self._orig_stderr.write(f'\n{self.program_name}: {level}: {msg}\n\n')

View File

@ -1,284 +1,129 @@
import argparse
import os
import platform
"""This module provides the main functionality of HTTPie.
Invocation flow:
1. Read, validate and process the input (args, `stdin`).
2. Create and send a request.
3. Stream, and possibly process and format, the requested parts
of the request-response exchange.
4. Simultaneously write to `stdout`
5. Exit.
"""
import sys
import socket
from typing import List, Optional, Union, Callable
import errno
import requests
from pygments import __version__ as pygments_version
from httpie import __version__ as httpie_version
from requests import __version__ as requests_version
from pygments import __version__ as pygments_version
from . import __version__ as httpie_version
from .cli.constants import OUT_REQ_BODY
from .cli.nested_json import HTTPieSyntaxError
from .client import collect_messages
from .context import Environment
from .downloads import Downloader
from .models import (
RequestsMessageKind,
OutputOptions,
)
from .output.writer import write_message, write_stream, MESSAGE_SEPARATOR_BYTES
from .plugins.registry import plugin_manager
from .status import ExitStatus, http_status_to_exit_status
from .utils import unwrap_context
from .cli import parser
from .compat import str, is_py3
from .client import get_response
from .models import Environment
from .output import build_output_stream, write, write_with_colors_win_p3k
from . import ExitStatus
# noinspection PyDefaultArgument
def raw_main(
parser: argparse.ArgumentParser,
main_program: Callable[[argparse.Namespace, Environment], ExitStatus],
args: List[Union[str, bytes]] = sys.argv,
env: Environment = Environment(),
use_default_options: bool = True,
) -> ExitStatus:
program_name, *args = args
env.program_name = os.path.basename(program_name)
args = decode_raw_args(args, env.stdin_encoding)
plugin_manager.load_installed_plugins(env.config.plugins_dir)
if use_default_options and env.config.default_options:
args = env.config.default_options + args
include_debug_info = '--debug' in args
include_traceback = include_debug_info or '--traceback' in args
def handle_generic_error(e, annotation=None):
msg = str(e)
if hasattr(e, 'request'):
request = e.request
if hasattr(request, 'url'):
msg = (
f'{msg} while doing a {request.method}'
f' request to URL: {request.url}'
)
if annotation:
msg += annotation
env.log_error(f'{type(e).__name__}: {msg}')
if include_traceback:
raise
if include_debug_info:
print_debug_info(env)
if args == ['--debug']:
return ExitStatus.SUCCESS
exit_status = ExitStatus.SUCCESS
try:
parsed_args = parser.parse_args(
args=args,
env=env,
)
except HTTPieSyntaxError as exc:
env.stderr.write(str(exc) + "\n")
if include_traceback:
raise
exit_status = ExitStatus.ERROR
except KeyboardInterrupt:
env.stderr.write('\n')
if include_traceback:
raise
exit_status = ExitStatus.ERROR_CTRL_C
except SystemExit as e:
if e.code != ExitStatus.SUCCESS:
env.stderr.write('\n')
if include_traceback:
raise
exit_status = ExitStatus.ERROR
def get_exit_status(http_status, follow=False):
"""Translate HTTP status code to exit status code."""
if 300 <= http_status <= 399 and not follow:
# Redirect
return ExitStatus.ERROR_HTTP_3XX
elif 400 <= http_status <= 499:
# Client Error
return ExitStatus.ERROR_HTTP_4XX
elif 500 <= http_status <= 599:
# Server Error
return ExitStatus.ERROR_HTTP_5XX
else:
try:
exit_status = main_program(
args=parsed_args,
env=env,
)
except KeyboardInterrupt:
env.stderr.write('\n')
if include_traceback:
raise
exit_status = ExitStatus.ERROR_CTRL_C
except SystemExit as e:
if e.code != ExitStatus.SUCCESS:
env.stderr.write('\n')
if include_traceback:
raise
exit_status = ExitStatus.ERROR
except requests.Timeout:
exit_status = ExitStatus.ERROR_TIMEOUT
env.log_error(f'Request timed out ({parsed_args.timeout}s).')
except requests.TooManyRedirects:
exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS
env.log_error(
f'Too many redirects'
f' (--max-redirects={parsed_args.max_redirects}).'
)
except requests.exceptions.ConnectionError as exc:
annotation = None
original_exc = unwrap_context(exc)
if isinstance(original_exc, socket.gaierror):
if original_exc.errno == socket.EAI_AGAIN:
annotation = '\nCouldnt connect to a DNS server. Please check your connection and try again.'
elif original_exc.errno == socket.EAI_NONAME:
annotation = '\nCouldnt resolve the given hostname. Please check the URL and try again.'
propagated_exc = original_exc
else:
propagated_exc = exc
handle_generic_error(propagated_exc, annotation=annotation)
exit_status = ExitStatus.ERROR
except Exception as e:
# TODO: Further distinction between expected and unexpected errors.
handle_generic_error(e)
exit_status = ExitStatus.ERROR
return exit_status
return ExitStatus.OK
def main(
args: List[Union[str, bytes]] = sys.argv,
env: Environment = Environment()
) -> ExitStatus:
"""
The main function.
def print_debug_info(env):
sys.stderr.writelines([
'HTTPie %s\n' % httpie_version,
'HTTPie data: %s\n' % env.config.directory,
'Requests %s\n' % requests_version,
'Pygments %s\n' % pygments_version,
'Python %s %s\n' % (sys.version, sys.platform)
])
Pre-process args, handle some special types of invocations,
and run the main program with error handling.
def main(args=sys.argv[1:], env=Environment()):
"""Run the main program and write the output to ``env.stdout``.
Return exit status code.
"""
if env.config.default_options:
args = env.config.default_options + args
from .cli.definition import parser
def error(msg, *args, **kwargs):
msg = msg % args
level = kwargs.get('level', 'error')
env.stderr.write('http: %s: %s\n' % (level, msg))
return raw_main(
parser=parser,
main_program=program,
args=args,
env=env
)
debug = '--debug' in args
traceback = debug or '--traceback' in args
exit_status = ExitStatus.OK
def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
"""
The main program without error handling.
"""
# TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere.
exit_status = ExitStatus.SUCCESS
downloader = None
initial_request: Optional[requests.PreparedRequest] = None
final_response: Optional[requests.Response] = None
def separate():
getattr(env.stdout, 'buffer', env.stdout).write(MESSAGE_SEPARATOR_BYTES)
def request_body_read_callback(chunk: bytes):
should_pipe_to_stdout = bool(
# Request body output desired
OUT_REQ_BODY in args.output_options
# & not `.read()` already pre-request (e.g., for compression)
and initial_request
# & non-EOF chunk
and chunk
)
if should_pipe_to_stdout:
msg = requests.PreparedRequest()
msg.is_body_upload_chunk = True
msg.body = chunk
msg.headers = initial_request.headers
msg_output_options = OutputOptions.from_message(msg, body=True, headers=False)
write_message(requests_message=msg, env=env, args=args, output_options=msg_output_options)
if debug:
print_debug_info(env)
if args == ['--debug']:
return exit_status
try:
if args.download:
args.follow = True # --download implies --follow.
downloader = Downloader(output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume)
downloader.pre_request(args.headers)
messages = collect_messages(env, args=args,
request_body_read_callback=request_body_read_callback)
force_separator = False
prev_with_body = False
args = parser.parse_args(args=args, env=env)
# Process messages as theyre generated
for message in messages:
output_options = OutputOptions.from_message(message, args.output_options)
response = get_response(args, config_dir=env.config.directory)
do_write_body = output_options.body
if prev_with_body and output_options.any() and (force_separator or not env.stdout_isatty):
# Separate after a previous message with body, if needed. See test_tokens.py.
separate()
force_separator = False
if output_options.kind is RequestsMessageKind.REQUEST:
if not initial_request:
initial_request = message
if output_options.body:
is_streamed_upload = not isinstance(message.body, (str, bytes))
do_write_body = not is_streamed_upload
force_separator = is_streamed_upload and env.stdout_isatty
if args.check_status:
exit_status = get_exit_status(response.status_code, args.follow)
if not env.stdout_isatty and exit_status != ExitStatus.OK:
error('HTTP %s %s',
response.raw.status,
response.raw.reason,
level='warning')
write_kwargs = {
'stream': build_output_stream(args, env,
response.request,
response),
'outfile': env.stdout,
'flush': env.stdout_isatty or args.stream
}
try:
if env.is_windows and is_py3 and 'colors' in args.prettify:
write_with_colors_win_p3k(**write_kwargs)
else:
final_response = message
if args.check_status or downloader:
exit_status = http_status_to_exit_status(http_status=message.status_code, follow=args.follow)
if exit_status != ExitStatus.SUCCESS and (not env.stdout_isatty or args.quiet == 1):
env.log_error(f'HTTP {message.raw.status} {message.raw.reason}', level='warning')
write_message(requests_message=message, env=env, args=args, output_options=output_options._replace(
body=do_write_body
))
prev_with_body = output_options.body
write(**write_kwargs)
# Cleanup
if force_separator:
separate()
if downloader and exit_status == ExitStatus.SUCCESS:
# Last response body download.
download_stream, download_to = downloader.start(
initial_url=initial_request.url,
final_response=final_response,
)
write_stream(stream=download_stream, outfile=download_to, flush=False)
downloader.finish()
if downloader.interrupted:
exit_status = ExitStatus.ERROR
env.log_error(
f'Incomplete download: size={downloader.status.total_size};'
f' downloaded={downloader.status.downloaded}'
)
return exit_status
except IOError as e:
if not traceback and e.errno == errno.EPIPE:
# Ignore broken pipes unless --traceback.
env.stderr.write('\n')
else:
raise
finally:
if downloader and not downloader.finished:
downloader.failed()
if args.output_file and args.output_file_specified:
args.output_file.close()
except (KeyboardInterrupt, SystemExit):
if traceback:
raise
env.stderr.write('\n')
exit_status = ExitStatus.ERROR
except requests.Timeout:
exit_status = ExitStatus.ERROR_TIMEOUT
error('Request timed out (%ss).', args.timeout)
def print_debug_info(env: Environment):
env.stderr.writelines([
f'HTTPie {httpie_version}\n',
f'Requests {requests_version}\n',
f'Pygments {pygments_version}\n',
f'Python {sys.version}\n{sys.executable}\n',
f'{platform.system()} {platform.release()}',
])
env.stderr.write('\n\n')
env.stderr.write(repr(env))
env.stderr.write('\n\n')
env.stderr.write(repr(plugin_manager))
env.stderr.write('\n')
except Exception as e:
# TODO: Better distinction between expected and unexpected errors.
# Network errors vs. bugs, etc.
if traceback:
raise
error('%s: %s', type(e).__name__, str(e))
exit_status = ExitStatus.ERROR
def decode_raw_args(
args: List[Union[str, bytes]],
stdin_encoding: str
) -> List[str]:
"""
Convert all bytes args to str
by decoding them using stdin encoding.
"""
return [
arg.decode(stdin_encoding)
if type(arg) is bytes else arg
for arg in args
]
return exit_status

View File

@ -1,455 +0,0 @@
"""
Download mode implementation.
"""
import mimetypes
import os
import re
import sys
import threading
from mailbox import Message
from time import sleep, monotonic
from typing import IO, Optional, Tuple
from urllib.parse import urlsplit
import requests
from .models import HTTPResponse, OutputOptions
from .output.streams import RawStream
from .utils import humanize_bytes
PARTIAL_CONTENT = 206
CLEAR_LINE = '\r\033[K'
PROGRESS = (
'{percentage: 6.2f} %'
' {downloaded: >10}'
' {speed: >10}/s'
' {eta: >8} ETA'
)
PROGRESS_NO_CONTENT_LENGTH = '{downloaded: >10} {speed: >10}/s'
SUMMARY = 'Done. {downloaded} in {time:0.5f}s ({speed}/s)\n'
SPINNER = '|/-\\'
class ContentRangeError(ValueError):
pass
def parse_content_range(content_range: str, resumed_from: int) -> int:
"""
Parse and validate Content-Range header.
<https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>
:param content_range: the value of a Content-Range response header
eg. "bytes 21010-47021/47022"
:param resumed_from: first byte pos. from the Range request header
:return: total size of the response body when fully downloaded.
"""
if content_range is None:
raise ContentRangeError('Missing Content-Range')
pattern = (
r'^bytes (?P<first_byte_pos>\d+)-(?P<last_byte_pos>\d+)'
r'/(\*|(?P<instance_length>\d+))$'
)
match = re.match(pattern, content_range)
if not match:
raise ContentRangeError(
f'Invalid Content-Range format {content_range!r}')
content_range_dict = match.groupdict()
first_byte_pos = int(content_range_dict['first_byte_pos'])
last_byte_pos = int(content_range_dict['last_byte_pos'])
instance_length = (
int(content_range_dict['instance_length'])
if content_range_dict['instance_length']
else None
)
# "A byte-content-range-spec with a byte-range-resp-spec whose
# last- byte-pos value is less than its first-byte-pos value,
# or whose instance-length value is less than or equal to its
# last-byte-pos value, is invalid. The recipient of an invalid
# byte-content-range- spec MUST ignore it and any content
# transferred along with it."
if (first_byte_pos > last_byte_pos
or (instance_length is not None
and instance_length <= last_byte_pos)):
raise ContentRangeError(
f'Invalid Content-Range returned: {content_range!r}')
if (first_byte_pos != resumed_from
or (instance_length is not None
and last_byte_pos + 1 != instance_length)):
# Not what we asked for.
raise ContentRangeError(
f'Unexpected Content-Range returned ({content_range!r})'
f' for the requested Range ("bytes={resumed_from}-")'
)
return last_byte_pos + 1
def filename_from_content_disposition(
content_disposition: str
) -> Optional[str]:
"""
Extract and validate filename from a Content-Disposition header.
:param content_disposition: Content-Disposition value
:return: the filename if present and valid, otherwise `None`
"""
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
msg = Message(f'Content-Disposition: {content_disposition}')
filename = msg.get_filename()
if filename:
# Basic sanitation.
filename = os.path.basename(filename).lstrip('.').strip()
if filename:
return filename
def filename_from_url(url: str, content_type: Optional[str]) -> str:
fn = urlsplit(url).path.rstrip('/')
fn = os.path.basename(fn) if fn else 'index'
if '.' not in fn and content_type:
content_type = content_type.split(';')[0]
if content_type == 'text/plain':
# mimetypes returns '.ksh'
ext = '.txt'
else:
ext = mimetypes.guess_extension(content_type)
if ext == '.htm':
ext = '.html'
if ext:
fn += ext
return fn
def trim_filename(filename: str, max_len: int) -> str:
if len(filename) > max_len:
trim_by = len(filename) - max_len
name, ext = os.path.splitext(filename)
if trim_by >= len(name):
filename = filename[:-trim_by]
else:
filename = name[:-trim_by] + ext
return filename
def get_filename_max_length(directory: str) -> int:
max_len = 255
if hasattr(os, 'pathconf') and 'PC_NAME_MAX' in os.pathconf_names:
max_len = os.pathconf(directory, 'PC_NAME_MAX')
return max_len
def trim_filename_if_needed(filename: str, directory='.', extra=0) -> str:
max_len = get_filename_max_length(directory) - extra
if len(filename) > max_len:
filename = trim_filename(filename, max_len)
return filename
def get_unique_filename(filename: str, exists=os.path.exists) -> str:
attempt = 0
while True:
suffix = f'-{attempt}' if attempt > 0 else ''
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
try_filename += suffix
if not exists(try_filename):
return try_filename
attempt += 1
class Downloader:
def __init__(
self,
output_file: IO = None,
resume: bool = False,
progress_file: IO = sys.stderr
):
"""
:param resume: Should the download resume if partial download
already exists.
:param output_file: The file to store response body in. If not
provided, it will be guessed from the response.
:param progress_file: Where to report download progress.
"""
self.finished = False
self.status = DownloadStatus()
self._output_file = output_file
self._resume = resume
self._resumed_from = 0
self._progress_reporter = ProgressReporterThread(
status=self.status,
output=progress_file
)
def pre_request(self, request_headers: dict):
"""Called just before the HTTP request is sent.
Might alter `request_headers`.
"""
# Ask the server not to encode the content so that we can resume, etc.
request_headers['Accept-Encoding'] = 'identity'
if self._resume:
bytes_have = os.path.getsize(self._output_file.name)
if bytes_have:
# Set ``Range`` header to resume the download
# TODO: Use "If-Range: mtime" to make sure it's fresh?
request_headers['Range'] = f'bytes={bytes_have}-'
self._resumed_from = bytes_have
def start(
self,
initial_url: str,
final_response: requests.Response
) -> Tuple[RawStream, IO]:
"""
Initiate and return a stream for `response` body with progress
callback attached. Can be called only once.
:param initial_url: The original requested URL
:param final_response: Initiated response object with headers already fetched
:return: RawStream, output_file
"""
assert not self.status.time_started
# FIXME: some servers still might sent Content-Encoding: gzip
# <https://github.com/httpie/httpie/issues/423>
try:
total_size = int(final_response.headers['Content-Length'])
except (KeyError, ValueError, TypeError):
total_size = None
if not self._output_file:
self._output_file = self._get_output_file_from_response(
initial_url=initial_url,
final_response=final_response,
)
else:
# `--output, -o` provided
if self._resume and final_response.status_code == PARTIAL_CONTENT:
total_size = parse_content_range(
final_response.headers.get('Content-Range'),
self._resumed_from
)
else:
self._resumed_from = 0
try:
self._output_file.seek(0)
self._output_file.truncate()
except OSError:
pass # stdout
self.status.started(
resumed_from=self._resumed_from,
total_size=total_size
)
output_options = OutputOptions.from_message(final_response, headers=False, body=True)
stream = RawStream(
msg=HTTPResponse(final_response),
output_options=output_options,
on_body_chunk_downloaded=self.chunk_downloaded,
)
self._progress_reporter.output.write(
f'Downloading {humanize_bytes(total_size) + " " if total_size is not None else ""}'
f'to "{self._output_file.name}"\n'
)
self._progress_reporter.start()
return stream, self._output_file
def finish(self):
assert not self.finished
self.finished = True
self.status.finished()
def failed(self):
self._progress_reporter.stop()
@property
def interrupted(self) -> bool:
return (
self.finished
and self.status.total_size
and self.status.total_size != self.status.downloaded
)
def chunk_downloaded(self, chunk: bytes):
"""
A download progress callback.
:param chunk: A chunk of response body data that has just
been downloaded and written to the output.
"""
self.status.chunk_downloaded(len(chunk))
@staticmethod
def _get_output_file_from_response(
initial_url: str,
final_response: requests.Response,
) -> IO:
# Output file not specified. Pick a name that doesn't exist yet.
filename = None
if 'Content-Disposition' in final_response.headers:
filename = filename_from_content_disposition(
final_response.headers['Content-Disposition'])
if not filename:
filename = filename_from_url(
url=initial_url,
content_type=final_response.headers.get('Content-Type'),
)
unique_filename = get_unique_filename(filename)
return open(unique_filename, buffering=0, mode='a+b')
class DownloadStatus:
"""Holds details about the download status."""
def __init__(self):
self.downloaded = 0
self.total_size = None
self.resumed_from = 0
self.time_started = None
self.time_finished = None
def started(self, resumed_from=0, total_size=None):
assert self.time_started is None
self.total_size = total_size
self.downloaded = self.resumed_from = resumed_from
self.time_started = monotonic()
def chunk_downloaded(self, size):
assert self.time_finished is None
self.downloaded += size
@property
def has_finished(self):
return self.time_finished is not None
def finished(self):
assert self.time_started is not None
assert self.time_finished is None
self.time_finished = monotonic()
class ProgressReporterThread(threading.Thread):
"""
Reports download progress based on its status.
Uses threading to periodically update the status (speed, ETA, etc.).
"""
def __init__(
self,
status: DownloadStatus,
output: IO,
tick=.1,
update_interval=1
):
super().__init__()
self.status = status
self.output = output
self._tick = tick
self._update_interval = update_interval
self._spinner_pos = 0
self._status_line = ''
self._prev_bytes = 0
self._prev_time = monotonic()
self._should_stop = threading.Event()
def stop(self):
"""Stop reporting on next tick."""
self._should_stop.set()
def run(self):
while not self._should_stop.is_set():
if self.status.has_finished:
self.sum_up()
break
self.report_speed()
sleep(self._tick)
def report_speed(self):
now = monotonic()
if now - self._prev_time >= self._update_interval:
downloaded = self.status.downloaded
speed = ((downloaded - self._prev_bytes)
/ (now - self._prev_time))
if not self.status.total_size:
self._status_line = PROGRESS_NO_CONTENT_LENGTH.format(
downloaded=humanize_bytes(downloaded),
speed=humanize_bytes(speed),
)
else:
percentage = (downloaded / self.status.total_size * 100
if self.status.total_size
else 0)
if not speed:
eta = '-:--:--'
else:
s = int((self.status.total_size - downloaded) / speed)
h, s = divmod(s, 60 * 60)
m, s = divmod(s, 60)
eta = f'{h}:{m:0>2}:{s:0>2}'
self._status_line = PROGRESS.format(
percentage=percentage,
downloaded=humanize_bytes(downloaded),
speed=humanize_bytes(speed),
eta=eta,
)
self._prev_time = now
self._prev_bytes = downloaded
self.output.write(
f'{CLEAR_LINE} {SPINNER[self._spinner_pos]} {self._status_line}'
)
self.output.flush()
self._spinner_pos = (self._spinner_pos + 1) % len(SPINNER)
def sum_up(self):
actually_downloaded = (
self.status.downloaded - self.status.resumed_from)
time_taken = self.status.time_finished - self.status.time_started
speed = actually_downloaded / time_taken if time_taken else actually_downloaded
self.output.write(CLEAR_LINE)
self.output.write(SUMMARY.format(
downloaded=humanize_bytes(actually_downloaded),
total=(self.status.total_size
and humanize_bytes(self.status.total_size)),
speed=humanize_bytes(speed),
time=time_taken,
))
self.output.flush()

View File

@ -1,50 +0,0 @@
from typing import Union, Tuple
from charset_normalizer import from_bytes
from charset_normalizer.constant import TOO_SMALL_SEQUENCE
UTF8 = 'utf-8'
ContentBytes = Union[bytearray, bytes]
def detect_encoding(content: ContentBytes) -> str:
"""
We default to UTF-8 if text too short, because the detection
can return a random encoding leading to confusing results
given the `charset_normalizer` version (< 2.0.5).
>>> too_short = ']"foo"'
>>> detected = from_bytes(too_short.encode()).best().encoding
>>> detected
'ascii'
>>> too_short.encode().decode(detected)
']"foo"'
"""
encoding = UTF8
if len(content) > TOO_SMALL_SEQUENCE:
match = from_bytes(bytes(content)).best()
if match:
encoding = match.encoding
return encoding
def smart_decode(content: ContentBytes, encoding: str) -> Tuple[str, str]:
"""Decode `content` using the given `encoding`.
If no `encoding` is provided, the best effort is to guess it from `content`.
Unicode errors are replaced.
"""
if not encoding:
encoding = detect_encoding(content)
return content.decode(encoding, 'replace'), encoding
def smart_encode(content: str, encoding: str) -> bytes:
"""Encode `content` using the given `encoding`.
Unicode errors are replaced.
"""
return content.encode(encoding, 'replace')

540
httpie/input.py Normal file
View File

@ -0,0 +1,540 @@
"""Parsing and processing of CLI input (args, auth credentials, files, stdin).
"""
import os
import sys
import re
import json
import mimetypes
import getpass
from io import BytesIO
from argparse import ArgumentParser, ArgumentTypeError, ArgumentError
try:
from collections import OrderedDict
except ImportError:
OrderedDict = dict
# TODO: Use MultiDict for headers once added to `requests`.
# https://github.com/jkbr/httpie/issues/130
from requests.structures import CaseInsensitiveDict
from .compat import urlsplit, str
HTTP_POST = 'POST'
HTTP_GET = 'GET'
HTTP = 'http://'
HTTPS = 'https://'
# Various separators used in args
SEP_HEADERS = ':'
SEP_CREDENTIALS = ':'
SEP_PROXY = ':'
SEP_DATA = '='
SEP_DATA_RAW_JSON = ':='
SEP_FILES = '@'
SEP_QUERY = '=='
# Separators that become request data
SEP_GROUP_DATA_ITEMS = frozenset([
SEP_DATA,
SEP_DATA_RAW_JSON,
SEP_FILES
])
# Separators allowed in ITEM arguments
SEP_GROUP_ITEMS = frozenset([
SEP_HEADERS,
SEP_QUERY,
SEP_DATA,
SEP_DATA_RAW_JSON,
SEP_FILES
])
# Output options
OUT_REQ_HEAD = 'H'
OUT_REQ_BODY = 'B'
OUT_RESP_HEAD = 'h'
OUT_RESP_BODY = 'b'
OUTPUT_OPTIONS = frozenset([
OUT_REQ_HEAD,
OUT_REQ_BODY,
OUT_RESP_HEAD,
OUT_RESP_BODY
])
# Pretty
PRETTY_MAP = {
'all': ['format', 'colors'],
'colors': ['colors'],
'format': ['format'],
'none': []
}
PRETTY_STDOUT_TTY_ONLY = object()
# Defaults
OUTPUT_OPTIONS_DEFAULT = OUT_RESP_HEAD + OUT_RESP_BODY
OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED = OUT_RESP_BODY
class Parser(ArgumentParser):
"""Adds additional logic to `argparse.ArgumentParser`.
Handles all input (CLI args, file args, stdin), applies defaults,
and performs extra validation.
"""
def __init__(self, *args, **kwargs):
kwargs['add_help'] = False
super(Parser, self).__init__(*args, **kwargs)
#noinspection PyMethodOverriding
def parse_args(self, env, args=None, namespace=None):
self.env = env
args, no_options = super(Parser, self).parse_known_args(args,
namespace)
self._apply_no_options(args, no_options)
if not args.json and env.config.implicit_content_type == 'form':
args.form = True
if args.debug:
args.traceback = True
if args.output:
env.stdout = args.output
env.stdout_isatty = False
self._process_output_options(args, env)
self._process_pretty_options(args, env)
self._guess_method(args, env)
self._parse_items(args)
if not env.stdin_isatty:
self._body_from_file(args, env.stdin)
if not (args.url.startswith(HTTP) or args.url.startswith(HTTPS)):
scheme = HTTPS if env.progname == 'https' else HTTP
args.url = scheme + args.url
self._process_auth(args)
return args
def _process_auth(self, args):
url = urlsplit(args.url)
if args.auth:
if not args.auth.has_password():
# Stdin already read (if not a tty) so it's save to prompt.
args.auth.prompt_password(url.netloc)
elif url.username is not None:
# Handle http://username:password@hostname/
username, password = url.username, url.password
args.auth = AuthCredentials(
key=username,
value=password,
sep=SEP_CREDENTIALS,
orig=SEP_CREDENTIALS.join([username, password])
)
def _apply_no_options(self, args, no_options):
"""For every `--no-OPTION` in `no_options`, set `args.OPTION` to
its default value. This allows for un-setting of options, e.g.,
specified in config.
"""
invalid = []
for option in no_options:
if not option.startswith('--no-'):
invalid.append(option)
continue
# --no-option => --option
inverted = '--' + option[5:]
for action in self._actions:
if inverted in action.option_strings:
setattr(args, action.dest, action.default)
break
else:
invalid.append(option)
if invalid:
msg = 'unrecognized arguments: %s'
self.error(msg % ' '.join(invalid))
def _print_message(self, message, file=None):
# Sneak in our stderr/stdout.
file = {
sys.stdout: self.env.stdout,
sys.stderr: self.env.stderr,
None: self.env.stderr
}.get(file, file)
super(Parser, self)._print_message(message, file)
def _body_from_file(self, args, fd):
"""There can only be one source of request data.
Bytes are always read.
"""
if args.data:
self.error('Request body (from stdin or a file) and request '
'data (key=value) cannot be mixed.')
args.data = getattr(fd, 'buffer', fd).read()
def _guess_method(self, args, env):
"""Set `args.method` if not specified to either POST or GET
based on whether the request has data or not.
"""
if args.method is None:
# Invoked as `http URL'.
assert not args.items
if not env.stdin_isatty:
args.method = HTTP_POST
else:
args.method = HTTP_GET
# FIXME: False positive, e.g., "localhost" matches but is a valid URL.
elif not re.match('^[a-zA-Z]+$', args.method):
# Invoked as `http URL item+'. The URL is now in `args.method`
# and the first ITEM is now incorrectly in `args.url`.
try:
# Parse the URL as an ITEM and store it as the first ITEM arg.
args.items.insert(
0, KeyValueArgType(*SEP_GROUP_ITEMS).__call__(args.url))
except ArgumentTypeError as e:
if args.traceback:
raise
self.error(e.message)
else:
# Set the URL correctly
args.url = args.method
# Infer the method
has_data = not env.stdin_isatty or any(
item.sep in SEP_GROUP_DATA_ITEMS for item in args.items)
args.method = HTTP_POST if has_data else HTTP_GET
def _parse_items(self, args):
"""Parse `args.items` into `args.headers`, `args.data`,
`args.`, and `args.files`.
"""
args.headers = CaseInsensitiveDict()
args.data = ParamDict() if args.form else OrderedDict()
args.files = OrderedDict()
args.params = ParamDict()
try:
parse_items(items=args.items,
headers=args.headers,
data=args.data,
files=args.files,
params=args.params)
except ParseError as e:
if args.traceback:
raise
self.error(e.message)
if args.files and not args.form:
# `http url @/path/to/file`
file_fields = list(args.files.keys())
if file_fields != ['']:
self.error(
'Invalid file fields (perhaps you meant --form?): %s'
% ','.join(file_fields))
fn, fd = args.files['']
args.files = {}
self._body_from_file(args, fd)
if 'Content-Type' not in args.headers:
mime, encoding = mimetypes.guess_type(fn, strict=False)
if mime:
content_type = mime
if encoding:
content_type = '%s; charset=%s' % (mime, encoding)
args.headers['Content-Type'] = content_type
def _process_output_options(self, args, env):
"""Apply defaults to output options or validate the provided ones.
The default output options are stdout-type-sensitive.
"""
if not args.output_options:
args.output_options = (OUTPUT_OPTIONS_DEFAULT if env.stdout_isatty
else OUTPUT_OPTIONS_DEFAULT_STDOUT_REDIRECTED)
unknown = set(args.output_options) - OUTPUT_OPTIONS
if unknown:
self.error('Unknown output options: %s' % ','.join(unknown))
def _process_pretty_options(self, args, env):
if args.prettify == PRETTY_STDOUT_TTY_ONLY:
args.prettify = PRETTY_MAP['all' if env.stdout_isatty else 'none']
elif args.prettify and env.is_windows:
self.error('Only terminal output can be colorized on Windows.')
else:
args.prettify = PRETTY_MAP[args.prettify]
class ParseError(Exception):
pass
class KeyValue(object):
"""Base key-value pair parsed from CLI."""
def __init__(self, key, value, sep, orig):
self.key = key
self.value = value
self.sep = sep
self.orig = orig
def __eq__(self, other):
return self.__dict__ == other.__dict__
def session_name_arg_type(name):
from .sessions import Session
if not Session.is_valid_name(name):
raise ArgumentTypeError(
'special characters and spaces are not'
' allowed in session names: "%s"'
% name)
return name
def host_name_arg_type(name):
from .sessions import Host
if not Host.is_valid_name(name):
raise ArgumentTypeError(
'special characters and spaces are not'
' allowed in host names: "%s"'
% name)
return name
class RegexValidator(object):
def __init__(self, pattern, error_message):
self.pattern = re.compile(pattern)
self.error_message = error_message
def __call__(self, value):
if not self.pattern.search(value):
raise ArgumentError(None, self.error_message)
return value
class KeyValueArgType(object):
"""A key-value pair argument type used with `argparse`.
Parses a key-value arg and constructs a `KeyValue` instance.
Used for headers, form data, and other key-value pair types.
"""
key_value_class = KeyValue
def __init__(self, *separators):
self.separators = separators
def __call__(self, string):
"""Parse `string` and return `self.key_value_class()` instance.
The best of `self.separators` is determined (first found, longest).
Back slash escaped characters aren't considered as separators
(or parts thereof). Literal back slash characters have to be escaped
as well (r'\\').
"""
class Escaped(str):
"""Represents an escaped character."""
def tokenize(s):
"""Tokenize `s`. There are only two token types - strings
and escaped characters:
>>> tokenize(r'foo\=bar\\baz')
['foo', Escaped('='), 'bar', Escaped('\\'), 'baz']
"""
tokens = ['']
esc = False
for c in s:
if esc:
tokens.extend([Escaped(c), ''])
esc = False
else:
if c == '\\':
esc = True
else:
tokens[-1] += c
return tokens
tokens = tokenize(string)
# Sorting by length ensures that the longest one will be
# chosen as it will overwrite any shorter ones starting
# at the same position in the `found` dictionary.
separators = sorted(self.separators, key=len)
for i, token in enumerate(tokens):
if isinstance(token, Escaped):
continue
found = {}
for sep in separators:
pos = token.find(sep)
if pos != -1:
found[pos] = sep
if found:
# Starting first, longest separator found.
sep = found[min(found.keys())]
key, value = token.split(sep, 1)
# Any preceding tokens are part of the key.
key = ''.join(tokens[:i]) + key
# Any following tokens are part of the value.
value += ''.join(tokens[i + 1:])
break
else:
raise ArgumentTypeError(
'"%s" is not a valid value' % string)
return self.key_value_class(
key=key, value=value, sep=sep, orig=string)
class AuthCredentials(KeyValue):
"""Represents parsed credentials."""
def _getpass(self, prompt):
# To allow mocking.
return getpass.getpass(prompt)
def has_password(self):
return self.value is not None
def prompt_password(self, host):
try:
self.value = self._getpass(
'http: password for %s@%s: ' % (self.key, host))
except (EOFError, KeyboardInterrupt):
sys.stderr.write('\n')
sys.exit(0)
class AuthCredentialsArgType(KeyValueArgType):
"""A key-value arg type that parses credentials."""
key_value_class = AuthCredentials
def __call__(self, string):
"""Parse credentials from `string`.
("username" or "username:password").
"""
try:
return super(AuthCredentialsArgType, self).__call__(string)
except ArgumentTypeError:
# No password provided, will prompt for it later.
return self.key_value_class(
key=string,
value=None,
sep=SEP_CREDENTIALS,
orig=string
)
class ParamDict(OrderedDict):
"""Multi-value dict for URL parameters and form data."""
#noinspection PyMethodOverriding
def __setitem__(self, key, value):
""" If `key` is assigned more than once, `self[key]` holds a
`list` of all the values.
This allows having multiple fields with the same name in form
data and URL params.
"""
if key not in self:
super(ParamDict, self).__setitem__(key, value)
else:
if not isinstance(self[key], list):
super(ParamDict, self).__setitem__(key, [self[key]])
self[key].append(value)
def parse_items(items, data=None, headers=None, files=None, params=None):
"""Parse `KeyValue` `items` into `data`, `headers`, `files`,
and `params`.
"""
if headers is None:
headers = CaseInsensitiveDict()
if data is None:
data = OrderedDict()
if files is None:
files = OrderedDict()
if params is None:
params = ParamDict()
for item in items:
value = item.value
key = item.key
if item.sep == SEP_HEADERS:
target = headers
elif item.sep == SEP_QUERY:
target = params
elif item.sep == SEP_FILES:
try:
with open(os.path.expanduser(value), 'rb') as f:
value = (os.path.basename(value),
BytesIO(f.read()))
except IOError as e:
raise ParseError(
'Invalid argument "%s": %s' % (item.orig, e))
target = files
elif item.sep in [SEP_DATA, SEP_DATA_RAW_JSON]:
if item.sep == SEP_DATA_RAW_JSON:
try:
value = json.loads(item.value)
except ValueError:
raise ParseError('"%s" is not valid JSON' % item.orig)
target = data
else:
raise TypeError(item)
target[key] = value
return headers, data, files, params

View File

@ -1,62 +0,0 @@
import argparse
import sys
from typing import List, Union
from httpie.context import Environment
from httpie.status import ExitStatus
from httpie.manager.cli import parser
from httpie.manager.core import MSG_COMMAND_CONFUSION, program as main_program
def is_http_command(args: List[Union[str, bytes]], env: Environment) -> bool:
"""Check whether http/https parser can parse the arguments."""
from httpie.cli.definition import parser as http_parser
from httpie.manager.cli import COMMANDS
# If the user already selected a top-level sub-command, never
# show the http/https version. E.g httpie plugins pie.dev/post
if len(args) >= 1 and args[0] in COMMANDS:
return False
with env.as_silent():
try:
http_parser.parse_args(env=env, args=args)
except (Exception, SystemExit):
return False
else:
return True
def main(args: List[Union[str, bytes]] = sys.argv, env: Environment = Environment()) -> ExitStatus:
from httpie.core import raw_main
try:
return raw_main(
parser=parser,
main_program=main_program,
args=args,
env=env,
use_default_options=False,
)
except argparse.ArgumentError:
program_args = args[1:]
if is_http_command(program_args, env):
env.stderr.write(MSG_COMMAND_CONFUSION.format(args=' '.join(program_args)) + "\n")
return ExitStatus.ERROR
def program():
try:
exit_status = main()
except KeyboardInterrupt:
from httpie.status import ExitStatus
exit_status = ExitStatus.ERROR_CTRL_C
return exit_status
if __name__ == '__main__': # pragma: nocover
sys.exit(program())

View File

@ -1,112 +0,0 @@
from textwrap import dedent
from httpie.cli.argparser import HTTPieManagerArgumentParser
from httpie import __version__
COMMANDS = {
'plugins': {
'help': 'Manage HTTPie plugins.',
'install': [
'Install the given targets from PyPI '
'or from a local paths.',
{
'dest': 'targets',
'nargs': '+',
'help': 'targets to install'
}
],
'upgrade': [
'Upgrade the given plugins',
{
'dest': 'targets',
'nargs': '+',
'help': 'targets to upgrade'
}
],
'uninstall': [
'Uninstall the given HTTPie plugins.',
{
'dest': 'targets',
'nargs': '+',
'help': 'targets to install'
}
],
'list': [
'List all installed HTTPie plugins.'
],
},
}
def missing_subcommand(*args) -> str:
base = COMMANDS
for arg in args:
base = base[arg]
assert isinstance(base, dict)
subcommands = ', '.join(map(repr, base.keys()))
return f'Please specify one of these: {subcommands}'
def generate_subparsers(root, parent_parser, definitions):
action_dest = '_'.join(parent_parser.prog.split()[1:] + ['action'])
actions = parent_parser.add_subparsers(
dest=action_dest
)
for command, properties in definitions.items():
is_subparser = isinstance(properties, dict)
descr = properties.pop('help', None) if is_subparser else properties.pop(0)
command_parser = actions.add_parser(command, description=descr)
command_parser.root = root
if is_subparser:
generate_subparsers(root, command_parser, properties)
continue
for argument in properties:
command_parser.add_argument(**argument)
parser = HTTPieManagerArgumentParser(
prog='httpie',
description=dedent(
'''
Managing interface for the HTTPie itself. <https://httpie.io/docs#manager>
Be aware that you might be looking for http/https commands for sending
HTTP requests. This command is only available for managing the HTTTPie
plugins and the configuration around it.
'''
),
)
parser.add_argument(
'--debug',
action='store_true',
default=False,
help='''
Prints the exception traceback should one occur, as well as other
information useful for debugging HTTPie itself and for reporting bugs.
'''
)
parser.add_argument(
'--traceback',
action='store_true',
default=False,
help='''
Prints the exception traceback should one occur.
'''
)
parser.add_argument(
'--version',
action='version',
version=__version__,
help='''
Show version and exit.
'''
)
generate_subparsers(parser, parser, COMMANDS)

View File

@ -1,33 +0,0 @@
import argparse
from httpie.context import Environment
from httpie.manager.plugins import PluginInstaller
from httpie.status import ExitStatus
from httpie.manager.cli import missing_subcommand, parser
MSG_COMMAND_CONFUSION = '''\
This command is only for managing HTTPie plugins.
To send a request, please use the http/https commands:
$ http {args}
$ https {args}
'''
# noinspection PyStringFormat
MSG_NAKED_INVOCATION = f'''\
{missing_subcommand()}
{MSG_COMMAND_CONFUSION}
'''.rstrip("\n").format(args='POST pie.dev/post hello=world')
def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
if args.action is None:
parser.error(MSG_NAKED_INVOCATION)
if args.action == 'plugins':
plugins = PluginInstaller(env, debug=args.debug)
return plugins.run(args.plugins_action, args)
return ExitStatus.SUCCESS

View File

@ -1,250 +0,0 @@
import argparse
import os
import subprocess
import sys
import textwrap
import re
import shutil
from collections import defaultdict
from contextlib import suppress
from pathlib import Path
from typing import Tuple, Optional, List
from httpie.manager.cli import parser, missing_subcommand
from httpie.compat import importlib_metadata, get_dist_name
from httpie.context import Environment
from httpie.status import ExitStatus
from httpie.utils import as_site
PEP_503 = re.compile(r"[-_.]+")
class PluginInstaller:
def __init__(self, env: Environment, debug: bool = False) -> None:
self.env = env
self.dir = env.config.plugins_dir
self.debug = debug
self.setup_plugins_dir()
def setup_plugins_dir(self) -> None:
try:
self.dir.mkdir(
exist_ok=True,
parents=True
)
except OSError:
self.env.stderr.write(
f'Couldn\'t create "{self.dir!s}"'
' directory for plugin installation.'
' Please re-check the permissions for that directory,'
' and if needed, allow write-access.'
)
raise
def fail(
self,
command: str,
target: Optional[str] = None,
reason: Optional[str] = None
) -> ExitStatus:
message = f'Can\'t {command}'
if target:
message += f' {target!r}'
if reason:
message += f': {reason}'
self.env.stderr.write(message + '\n')
return ExitStatus.ERROR
def pip(self, *args, **kwargs) -> subprocess.CompletedProcess:
options = {
'check': True,
'shell': False,
'stdout': self.env.stdout,
'stderr': subprocess.PIPE,
}
options.update(kwargs)
cmd = [sys.executable, '-m', 'pip', *args]
return subprocess.run(
cmd,
**options
)
def _install(self, targets: List[str], mode='install', **process_options) -> Tuple[
Optional[bytes], ExitStatus
]:
pip_args = [
'install',
f'--prefix={self.dir}',
'--no-warn-script-location',
]
if mode == 'upgrade':
pip_args.append('--upgrade')
try:
process = self.pip(
*pip_args,
*targets,
**process_options,
)
except subprocess.CalledProcessError as error:
reason = None
if error.stderr:
stderr = error.stderr.decode()
if self.debug:
self.env.stderr.write('Command failed: ')
self.env.stderr.write(' '.join(error.cmd) + '\n')
self.env.stderr.write(textwrap.indent(' ', stderr))
last_line = stderr.strip().splitlines()[-1]
severity, _, message = last_line.partition(': ')
if severity == 'ERROR':
reason = message
stdout = error.stdout
exit_status = self.fail(mode, ', '.join(targets), reason)
else:
stdout = process.stdout
exit_status = ExitStatus.SUCCESS
return stdout, exit_status
def install(self, targets: List[str]) -> ExitStatus:
self.env.stdout.write(f"Installing {', '.join(targets)}...\n")
self.env.stdout.flush()
_, exit_status = self._install(targets)
return exit_status
def _clear_metadata(self, targets: List[str]) -> None:
# Due to an outstanding pip problem[0], we have to get rid of
# existing metadata for old versions manually.
# [0]: https://github.com/pypa/pip/issues/10727
result_deps = defaultdict(list)
for child in as_site(self.dir).iterdir():
if child.suffix in {'.dist-info', '.egg-info'}:
name, _, version = child.stem.rpartition('-')
result_deps[name].append((version, child))
for target in targets:
name, _, version = target.rpartition('-')
name = PEP_503.sub("-", name).lower().replace('-', '_')
if name not in result_deps:
continue
for result_version, meta_path in result_deps[name]:
if version != result_version:
shutil.rmtree(meta_path)
def upgrade(self, targets: List[str]) -> ExitStatus:
self.env.stdout.write(f"Upgrading {', '.join(targets)}...\n")
self.env.stdout.flush()
raw_stdout, exit_status = self._install(
targets,
mode='upgrade',
stdout=subprocess.PIPE
)
if not raw_stdout:
return exit_status
stdout = raw_stdout.decode()
self.env.stdout.write(stdout)
installation_line = stdout.splitlines()[-1]
if installation_line.startswith('Successfully installed'):
self._clear_metadata(installation_line.split()[2:])
def _uninstall(self, target: str) -> Optional[ExitStatus]:
try:
distribution = importlib_metadata.distribution(target)
except importlib_metadata.PackageNotFoundError:
return self.fail('uninstall', target, 'package is not installed')
base_dir = Path(distribution.locate_file('.')).resolve()
if self.dir not in base_dir.parents:
# If the package is installed somewhere else (e.g on the site packages
# of the real python interpreter), than that means this package is not
# installed through us.
return self.fail('uninstall', target,
'package is not installed through httpie plugins'
' interface')
files = distribution.files
if files is None:
return self.fail('uninstall', target, 'couldn\'t locate the package')
# TODO: Consider handling failures here (e.g if it fails,
# just rever the operation and leave the site-packages
# in a proper shape).
for file in files:
with suppress(FileNotFoundError):
os.unlink(distribution.locate_file(file))
metadata_path = getattr(distribution, '_path', None)
if (
metadata_path
and metadata_path.exists()
and not any(metadata_path.iterdir())
):
metadata_path.rmdir()
self.env.stdout.write(f'Successfully uninstalled {target}\n')
def uninstall(self, targets: List[str]) -> ExitStatus:
# Unfortunately uninstall doesn't work with custom pip schemes. See:
# - https://github.com/pypa/pip/issues/5595
# - https://github.com/pypa/pip/issues/4575
# so we have to implement our own uninstalling logic. Which works
# on top of the importlib_metadata.
exit_code = ExitStatus.SUCCESS
for target in targets:
exit_code |= self._uninstall(target) or ExitStatus.SUCCESS
return ExitStatus(exit_code)
def list(self) -> None:
from httpie.plugins.registry import plugin_manager
known_plugins = defaultdict(list)
for entry_point in plugin_manager.iter_entry_points(self.dir):
ep_info = (entry_point.group, entry_point.name)
ep_name = get_dist_name(entry_point) or entry_point.module
known_plugins[ep_name].append(ep_info)
for plugin, entry_points in known_plugins.items():
self.env.stdout.write(plugin)
version = importlib_metadata.version(plugin)
if version is not None:
self.env.stdout.write(f' ({version})')
self.env.stdout.write('\n')
for group, entry_point in sorted(entry_points):
self.env.stdout.write(f' {entry_point} ({group})\n')
def run(
self,
action: Optional[str],
args: argparse.Namespace,
) -> ExitStatus:
from httpie.plugins.manager import enable_plugins
if action is None:
parser.error(missing_subcommand('plugins'))
with enable_plugins(self.dir):
if action == 'install':
status = self.install(args.targets)
elif action == 'upgrade':
status = self.upgrade(args.targets)
elif action == 'uninstall':
status = self.uninstall(args.targets)
elif action == 'list':
status = self.list()
return status or ExitStatus.SUCCESS

View File

@ -1,59 +1,91 @@
from time import monotonic
import os
import sys
import requests
from enum import Enum, auto
from typing import Iterable, Union, NamedTuple
from urllib.parse import urlsplit
from .cli.constants import (
OUT_REQ_BODY,
OUT_REQ_HEAD,
OUT_RESP_BODY,
OUT_RESP_HEAD,
OUT_RESP_META
)
from .compat import cached_property
from .utils import split_cookies, parse_content_type_header
from .config import DEFAULT_CONFIG_DIR, Config
from .compat import urlsplit, is_windows, bytes, str
ELAPSED_TIME_LABEL = 'Elapsed time'
class Environment(object):
"""Holds information about the execution context.
Groups various aspects of the environment in a changeable object
and allows for mocking.
"""
is_windows = is_windows
progname = os.path.basename(sys.argv[0])
if progname not in ['http', 'https']:
progname = 'http'
stdin_isatty = sys.stdin.isatty()
stdin = sys.stdin
stdout_isatty = sys.stdout.isatty()
config_dir = DEFAULT_CONFIG_DIR
if stdout_isatty and is_windows:
from colorama.initialise import wrap_stream
stdout = wrap_stream(sys.stdout, convert=None,
strip=None, autoreset=True, wrap=True)
else:
stdout = sys.stdout
stderr = sys.stderr
# Can be set to 0 to disable colors completely.
colors = 256 if '256color' in os.environ.get('TERM', '') else 88
def __init__(self, **kwargs):
assert all(hasattr(type(self), attr)
for attr in kwargs.keys())
self.__dict__.update(**kwargs)
@property
def config(self):
if not hasattr(self, '_config'):
self._config = Config(directory=self.config_dir)
if self._config.is_new:
self._config.save()
else:
self._config.load()
return self._config
class HTTPMessage:
class HTTPMessage(object):
"""Abstract class for HTTP messages."""
def __init__(self, orig):
self._orig = orig
def iter_body(self, chunk_size: int) -> Iterable[bytes]:
def iter_body(self, chunk_size):
"""Return an iterator over the body."""
raise NotImplementedError
raise NotImplementedError()
def iter_lines(self, chunk_size: int) -> Iterable[bytes]:
def iter_lines(self, chunk_size):
"""Return an iterator over the body yielding (`line`, `line_feed`)."""
raise NotImplementedError
raise NotImplementedError()
@property
def headers(self) -> str:
def headers(self):
"""Return a `str` with the message's headers."""
raise NotImplementedError
raise NotImplementedError()
@property
def metadata(self) -> str:
"""Return metadata about the current message."""
raise NotImplementedError
@cached_property
def encoding(self) -> str:
ct, params = parse_content_type_header(self.content_type)
return params.get('charset', '')
def encoding(self):
"""Return a `str` with the message's encoding, if known."""
raise NotImplementedError()
@property
def content_type(self) -> str:
def body(self):
"""Return a `bytes` with the message's body."""
raise NotImplementedError()
@property
def content_type(self):
"""Return the message content type."""
ct = self._orig.headers.get('Content-Type', '')
if not isinstance(ct, str):
if isinstance(ct, bytes):
ct = ct.decode()
return ct
@ -67,51 +99,36 @@ class HTTPResponse(HTTPMessage):
def iter_lines(self, chunk_size):
return ((line, b'\n') for line in self._orig.iter_lines(chunk_size))
# noinspection PyProtectedMember
@property
def headers(self):
try:
raw_version = self._orig.raw._original_response.version
except AttributeError:
# Assume HTTP/1.1
raw_version = 11
version = {
9: '0.9',
10: '1.0',
11: '1.1',
20: '2',
}[raw_version]
original = self._orig
status_line = f'HTTP/{version} {original.status_code} {original.reason}'
original = self._orig.raw._original_response
status_line = 'HTTP/{version} {status} {reason}'.format(
version='.'.join(str(original.version)),
status=original.status,
reason=original.reason
)
headers = [status_line]
headers.extend(
': '.join(header)
for header in original.headers.items()
if header[0] != 'Set-Cookie'
)
headers.extend(
f'Set-Cookie: {cookie}'
for header, value in original.headers.items()
for cookie in split_cookies(value)
if header == 'Set-Cookie'
)
try:
# `original.msg` is a `http.client.HTTPMessage` on Python 3
# `_headers` is a 2-tuple
headers.extend(
'%s: %s' % header for header in original.msg._headers)
except AttributeError:
# and a `httplib.HTTPMessage` on Python 2.x
# `headers` is a list of `name: val<CRLF>`.
headers.extend(h.strip() for h in original.msg.headers)
return '\r\n'.join(headers)
@property
def metadata(self) -> str:
data = {}
time_to_parse_headers = self._orig.elapsed.total_seconds()
# noinspection PyProtectedMember
time_since_headers_parsed = monotonic() - self._orig._httpie_headers_parsed_at
time_elapsed = time_to_parse_headers + time_since_headers_parsed
# data['Headers time'] = str(round(time_to_parse_headers, 5)) + 's'
# data['Body time'] = str(round(time_since_headers_parsed, 5)) + 's'
data[ELAPSED_TIME_LABEL] = str(round(time_elapsed, 10)) + 's'
return '\n'.join(
f'{key}: {value}'
for key, value in data.items()
)
def encoding(self):
return self._orig.encoding or 'utf8'
@property
def body(self):
# Only now the response body is fetched.
# Shouldn't be touched unless the body is actually needed.
return self._orig.content
class HTTPRequest(HTTPMessage):
@ -130,90 +147,29 @@ class HTTPRequest(HTTPMessage):
request_line = '{method} {path}{query} HTTP/1.1'.format(
method=self._orig.method,
path=url.path or '/',
query=f'?{url.query}' if url.query else ''
query='?' + url.query if url.query else ''
)
headers = self._orig.headers.copy()
if 'Host' not in self._orig.headers:
headers['Host'] = url.netloc.split('@')[-1]
headers = dict(self._orig.headers)
headers = [
f'{name}: {value if isinstance(value, str) else value.decode()}'
for name, value in headers.items()
]
if 'Host' not in headers:
headers['Host'] = url.netloc
headers = ['%s: %s' % (name, value)
for name, value in headers.items()]
headers.insert(0, request_line)
headers = '\r\n'.join(headers).strip()
return headers
return '\r\n'.join(headers).strip()
@property
def encoding(self):
return 'utf8'
@property
def body(self):
body = self._orig.body
if isinstance(body, str):
# Happens with JSON/form request data parsed from the command line.
body = body.encode()
body = body.encode('utf8')
return body or b''
RequestsMessage = Union[requests.PreparedRequest, requests.Response]
class RequestsMessageKind(Enum):
REQUEST = auto()
RESPONSE = auto()
def infer_requests_message_kind(message: RequestsMessage) -> RequestsMessageKind:
if isinstance(message, requests.PreparedRequest):
return RequestsMessageKind.REQUEST
elif isinstance(message, requests.Response):
return RequestsMessageKind.RESPONSE
else:
raise TypeError(f"Unexpected message type: {type(message).__name__}")
OPTION_TO_PARAM = {
RequestsMessageKind.REQUEST: {
'headers': OUT_REQ_HEAD,
'body': OUT_REQ_BODY,
},
RequestsMessageKind.RESPONSE: {
'headers': OUT_RESP_HEAD,
'body': OUT_RESP_BODY,
'meta': OUT_RESP_META
}
}
class OutputOptions(NamedTuple):
kind: RequestsMessageKind
headers: bool
body: bool
meta: bool = False
def any(self):
return (
self.headers
or self.body
or self.meta
)
@classmethod
def from_message(
cls,
message: RequestsMessage,
raw_args: str = '',
**kwargs
):
kind = infer_requests_message_kind(message)
options = {
option: param in raw_args
for option, param in OPTION_TO_PARAM[kind].items()
}
options.update(kwargs)
return cls(
kind=kind,
**options
)

494
httpie/output.py Normal file
View File

@ -0,0 +1,494 @@
"""Output streaming, processing and formatting.
"""
import json
from functools import partial
from itertools import chain
import pygments
from pygments import token, lexer
from pygments.styles import get_style_by_name, STYLE_MAP
from pygments.lexers import get_lexer_for_mimetype, get_lexer_by_name
from pygments.formatters.terminal import TerminalFormatter
from pygments.formatters.terminal256 import Terminal256Formatter
from pygments.util import ClassNotFound
from .compat import is_windows
from .solarized import Solarized256Style
from .models import HTTPRequest, HTTPResponse, Environment
from .input import (OUT_REQ_BODY, OUT_REQ_HEAD,
OUT_RESP_HEAD, OUT_RESP_BODY)
# Colors on Windows via colorama don't look that
# great and fruity seems to give the best result there.
AVAILABLE_STYLES = set(STYLE_MAP.keys())
AVAILABLE_STYLES.add('solarized')
DEFAULT_STYLE = 'solarized' if not is_windows else 'fruity'
BINARY_SUPPRESSED_NOTICE = (
b'\n'
b'+-----------------------------------------+\n'
b'| NOTE: binary data not shown in terminal |\n'
b'+-----------------------------------------+'
)
class BinarySuppressedError(Exception):
"""An error indicating that the body is binary and won't be written,
e.g., for terminal output)."""
message = BINARY_SUPPRESSED_NOTICE
###############################################################################
# Output Streams
###############################################################################
def write(stream, outfile, flush):
"""Write the output stream."""
try:
# Writing bytes so we use the buffer interface (Python 3).
buf = outfile.buffer
except AttributeError:
buf = outfile
for chunk in stream:
buf.write(chunk)
if flush:
outfile.flush()
def write_with_colors_win_p3k(stream, outfile, flush):
"""Like `write`, but colorized chunks are written as text
directly to `outfile` to ensure it gets processed by colorama.
Applies only to Windows with Python 3 and colorized terminal output.
"""
color = b'\x1b['
encoding = outfile.encoding
for chunk in stream:
if color in chunk:
outfile.write(chunk.decode(encoding))
else:
outfile.buffer.write(chunk)
if flush:
outfile.flush()
def build_output_stream(args, env, request, response):
"""Build and return a chain of iterators over the `request`-`response`
exchange each of which yields `bytes` chunks.
"""
req_h = OUT_REQ_HEAD in args.output_options
req_b = OUT_REQ_BODY in args.output_options
resp_h = OUT_RESP_HEAD in args.output_options
resp_b = OUT_RESP_BODY in args.output_options
req = req_h or req_b
resp = resp_h or resp_b
output = []
Stream = get_stream_type(env, args)
if req:
output.append(Stream(
msg=HTTPRequest(request),
with_headers=req_h,
with_body=req_b))
if req_b and resp:
# Request/Response separator.
output.append([b'\n\n'])
if resp:
output.append(Stream(
msg=HTTPResponse(response),
with_headers=resp_h,
with_body=resp_b))
if env.stdout_isatty and resp_b:
# Ensure a blank line after the response body.
# For terminal output only.
output.append([b'\n\n'])
return chain(*output)
def get_stream_type(env, args):
"""Pick the right stream type based on `env` and `args`.
Wrap it in a partial with the type-specific args so that
we don't need to think what stream we are dealing with.
"""
if not env.stdout_isatty and not args.prettify:
Stream = partial(
RawStream,
chunk_size=RawStream.CHUNK_SIZE_BY_LINE
if args.stream
else RawStream.CHUNK_SIZE
)
elif args.prettify:
Stream = partial(
PrettyStream if args.stream else BufferedPrettyStream,
env=env,
processor=OutputProcessor(
env=env, groups=args.prettify, pygments_style=args.style),
)
else:
Stream = partial(EncodedStream, env=env)
return Stream
class BaseStream(object):
"""Base HTTP message output stream class."""
def __init__(self, msg, with_headers=True, with_body=True):
"""
:param msg: a :class:`models.HTTPMessage` subclass
:param with_headers: if `True`, headers will be included
:param with_body: if `True`, body will be included
"""
assert with_headers or with_body
self.msg = msg
self.with_headers = with_headers
self.with_body = with_body
def _get_headers(self):
"""Return the headers' bytes."""
return self.msg.headers.encode('ascii')
def _iter_body(self):
"""Return an iterator over the message body."""
raise NotImplementedError()
def __iter__(self):
"""Return an iterator over `self.msg`."""
if self.with_headers:
yield self._get_headers()
yield b'\r\n\r\n'
if self.with_body:
try:
for chunk in self._iter_body():
yield chunk
except BinarySuppressedError as e:
if self.with_headers:
yield b'\n'
yield e.message
class RawStream(BaseStream):
"""The message is streamed in chunks with no processing."""
CHUNK_SIZE = 1024 * 100
CHUNK_SIZE_BY_LINE = 1024 * 5
def __init__(self, chunk_size=CHUNK_SIZE, **kwargs):
super(RawStream, self).__init__(**kwargs)
self.chunk_size = chunk_size
def _iter_body(self):
return self.msg.iter_body(self.chunk_size)
class EncodedStream(BaseStream):
"""Encoded HTTP message stream.
The message bytes are converted to an encoding suitable for
`self.env.stdout`. Unicode errors are replaced and binary data
is suppressed. The body is always streamed by line.
"""
CHUNK_SIZE = 1024 * 5
def __init__(self, env=Environment(), **kwargs):
super(EncodedStream, self).__init__(**kwargs)
if env.stdout_isatty:
# Use the encoding supported by the terminal.
output_encoding = getattr(env.stdout, 'encoding', None)
else:
# Preserve the message encoding.
output_encoding = self.msg.encoding
# Default to utf8 when unsure.
self.output_encoding = output_encoding or 'utf8'
def _iter_body(self):
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
if b'\0' in line:
raise BinarySuppressedError()
yield line.decode(self.msg.encoding)\
.encode(self.output_encoding, 'replace') + lf
class PrettyStream(EncodedStream):
"""In addition to :class:`EncodedStream` behaviour, this stream applies
content processing.
Useful for long-lived HTTP responses that stream by lines
such as the Twitter streaming API.
"""
CHUNK_SIZE = 1024 * 5
def __init__(self, processor, **kwargs):
super(PrettyStream, self).__init__(**kwargs)
self.processor = processor
def _get_headers(self):
return self.processor.process_headers(
self.msg.headers).encode(self.output_encoding)
def _iter_body(self):
for line, lf in self.msg.iter_lines(self.CHUNK_SIZE):
if b'\0' in line:
raise BinarySuppressedError()
yield self._process_body(line) + lf
def _process_body(self, chunk):
return (self.processor
.process_body(
chunk.decode(self.msg.encoding, 'replace'),
self.msg.content_type)
.encode(self.output_encoding, 'replace'))
class BufferedPrettyStream(PrettyStream):
"""The same as :class:`PrettyStream` except that the body is fully
fetched before it's processed.
Suitable regular HTTP responses.
"""
CHUNK_SIZE = 1024 * 10
def _iter_body(self):
# Read the whole body before prettifying it,
# but bail out immediately if the body is binary.
body = bytearray()
for chunk in self.msg.iter_body(self.CHUNK_SIZE):
if b'\0' in chunk:
raise BinarySuppressedError()
body.extend(chunk)
yield self._process_body(body)
###############################################################################
# Processing
###############################################################################
class HTTPLexer(lexer.RegexLexer):
"""Simplified HTTP lexer for Pygments.
It only operates on headers and provides a stronger contrast between
their names and values than the original one bundled with Pygments
(:class:`pygments.lexers.text import HttpLexer`), especially when
Solarized color scheme is used.
"""
name = 'HTTP'
aliases = ['http']
filenames = ['*.http']
tokens = {
'root': [
# Request-Line
(r'([A-Z]+)( +)([^ ]+)( +)(HTTP)(/)(\d+\.\d+)',
lexer.bygroups(
token.Name.Function,
token.Text,
token.Name.Namespace,
token.Text,
token.Keyword.Reserved,
token.Operator,
token.Number
)),
# Response Status-Line
(r'(HTTP)(/)(\d+\.\d+)( +)(\d{3})( +)(.+)',
lexer.bygroups(
token.Keyword.Reserved, # 'HTTP'
token.Operator, # '/'
token.Number, # Version
token.Text,
token.Number, # Status code
token.Text,
token.Name.Exception, # Reason
)),
# Header
(r'(.*?)( *)(:)( *)(.+)', lexer.bygroups(
token.Name.Attribute, # Name
token.Text,
token.Operator, # Colon
token.Text,
token.String # Value
))
]
}
class BaseProcessor(object):
"""Base, noop output processor class."""
enabled = True
def __init__(self, env=Environment(), **kwargs):
"""
:param env: an class:`Environment` instance
:param kwargs: additional keyword argument that some
processor might require.
"""
self.env = env
self.kwargs = kwargs
def process_headers(self, headers):
"""Return processed `headers`
:param headers: The headers as text.
"""
return headers
def process_body(self, content, content_type, subtype):
"""Return processed `content`.
:param content: The body content as text
:param content_type: Full content type, e.g., 'application/atom+xml'.
:param subtype: E.g. 'xml'.
"""
return content
class JSONProcessor(BaseProcessor):
"""JSON body processor."""
def process_body(self, content, content_type, subtype):
if subtype == 'json':
try:
# Indent the JSON data, sort keys by name, and
# avoid unicode escapes to improve readability.
content = json.dumps(json.loads(content),
sort_keys=True,
ensure_ascii=False,
indent=4)
except ValueError:
# Invalid JSON but we don't care.
pass
return content
class PygmentsProcessor(BaseProcessor):
"""A processor that applies syntax-highlighting using Pygments
to the headers, and to the body as well if its content type is recognized.
"""
def __init__(self, *args, **kwargs):
super(PygmentsProcessor, self).__init__(*args, **kwargs)
# Cache that speeds up when we process streamed body by line.
self.lexers_by_type = {}
if not self.env.colors:
self.enabled = False
return
try:
style = get_style_by_name(
self.kwargs.get('pygments_style', DEFAULT_STYLE))
except ClassNotFound:
style = Solarized256Style
if self.env.is_windows or self.env.colors == 256:
fmt_class = Terminal256Formatter
else:
fmt_class = TerminalFormatter
self.formatter = fmt_class(style=style)
def process_headers(self, headers):
return pygments.highlight(
headers, HTTPLexer(), self.formatter).strip()
def process_body(self, content, content_type, subtype):
try:
lexer = self.lexers_by_type.get(content_type)
if not lexer:
try:
lexer = get_lexer_for_mimetype(content_type)
except ClassNotFound:
lexer = get_lexer_by_name(subtype)
self.lexers_by_type[content_type] = lexer
except ClassNotFound:
pass
else:
content = pygments.highlight(content, lexer, self.formatter)
return content.strip()
class HeadersProcessor(BaseProcessor):
"""Sorts headers by name retaining relative order of multiple headers
with the same name.
"""
def process_headers(self, headers):
lines = headers.splitlines()
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
return '\r\n'.join(lines[:1] + headers)
class OutputProcessor(object):
"""A delegate class that invokes the actual processors."""
installed_processors = {
'format': [
HeadersProcessor,
JSONProcessor
],
'colors': [
PygmentsProcessor
]
}
def __init__(self, groups, env=Environment(), **kwargs):
"""
:param env: a :class:`models.Environment` instance
:param groups: the groups of processors to be applied
:param kwargs: additional keyword arguments for processors
"""
self.processors = []
for group in groups:
for cls in self.installed_processors[group]:
processor = cls(env, **kwargs)
if processor.enabled:
self.processors.append(processor)
def process_headers(self, headers):
for processor in self.processors:
headers = processor.process_headers(headers)
return headers
def process_body(self, content, content_type):
# e.g., 'application/atom+xml'
content_type = content_type.split(';')[0]
# e.g., 'xml'
subtype = content_type.split('/')[-1].split('+')[-1]
for processor in self.processors:
content = processor.process_body(content, content_type, subtype)
return content

View File

@ -1,387 +0,0 @@
import json
from typing import Optional, Type, Tuple
import pygments.formatters
import pygments.lexer
import pygments.lexers
import pygments.style
import pygments.styles
import pygments.token
from pygments.formatters.terminal import TerminalFormatter
from pygments.formatters.terminal256 import Terminal256Formatter
from pygments.lexer import Lexer
from pygments.lexers.data import JsonLexer
from pygments.lexers.special import TextLexer
from pygments.lexers.text import HttpLexer as PygmentsHttpLexer
from pygments.util import ClassNotFound
from ..lexers.json import EnhancedJsonLexer
from ..lexers.metadata import MetadataLexer
from ..ui.palette import SHADE_NAMES, get_color
from ...context import Environment
from ...plugins import FormatterPlugin
AUTO_STYLE = 'auto' # Follows terminal ANSI color styles
DEFAULT_STYLE = AUTO_STYLE
SOLARIZED_STYLE = 'solarized' # Bundled here
BUNDLED_STYLES = {
SOLARIZED_STYLE,
AUTO_STYLE
}
def get_available_styles():
return BUNDLED_STYLES | set(pygments.styles.get_all_styles())
class ColorFormatter(FormatterPlugin):
"""
Colorize using Pygments
This processor that applies syntax highlighting to the headers,
and also to the body if its content type is recognized.
"""
group_name = 'colors'
metadata_lexer = MetadataLexer()
def __init__(
self,
env: Environment,
explicit_json=False,
color_scheme=DEFAULT_STYLE,
**kwargs
):
super().__init__(**kwargs)
if not env.colors:
self.enabled = False
return
use_auto_style = color_scheme == AUTO_STYLE
has_256_colors = env.colors == 256
if use_auto_style or not has_256_colors:
http_lexer = PygmentsHttpLexer()
body_formatter = header_formatter = TerminalFormatter()
precise = False
else:
from ..lexers.http import SimplifiedHTTPLexer
header_formatter, body_formatter, precise = self.get_formatters(color_scheme)
http_lexer = SimplifiedHTTPLexer(precise=precise)
self.explicit_json = explicit_json # --json
self.header_formatter = header_formatter
self.body_formatter = body_formatter
self.http_lexer = http_lexer
self.metadata_lexer = MetadataLexer(precise=precise)
def format_headers(self, headers: str) -> str:
return pygments.highlight(
code=headers,
lexer=self.http_lexer,
formatter=self.header_formatter,
).strip()
def format_body(self, body: str, mime: str) -> str:
lexer = self.get_lexer_for_body(mime, body)
if lexer:
body = pygments.highlight(
code=body,
lexer=lexer,
formatter=self.body_formatter,
)
return body
def format_metadata(self, metadata: str) -> str:
return pygments.highlight(
code=metadata,
lexer=self.metadata_lexer,
formatter=self.header_formatter,
).strip()
def get_lexer_for_body(
self, mime: str,
body: str
) -> Optional[Type[Lexer]]:
return get_lexer(
mime=mime,
explicit_json=self.explicit_json,
body=body,
)
def get_formatters(self, color_scheme: str) -> Tuple[
pygments.formatter.Formatter,
pygments.formatter.Formatter,
bool
]:
if color_scheme in PIE_STYLES:
header_style, body_style = PIE_STYLES[color_scheme]
precise = True
else:
header_style = self.get_style_class(color_scheme)
body_style = header_style
precise = False
return (
Terminal256Formatter(style=header_style),
Terminal256Formatter(style=body_style),
precise
)
@staticmethod
def get_style_class(color_scheme: str) -> Type[pygments.style.Style]:
try:
return pygments.styles.get_style_by_name(color_scheme)
except ClassNotFound:
return Solarized256Style
def get_lexer(
mime: str,
explicit_json=False,
body=''
) -> Optional[Type[Lexer]]:
# Build candidate mime type and lexer names.
mime_types, lexer_names = [mime], []
type_, subtype = mime.split('/', 1)
if '+' not in subtype:
lexer_names.append(subtype)
else:
subtype_name, subtype_suffix = subtype.split('+', 1)
lexer_names.extend([subtype_name, subtype_suffix])
mime_types.extend([
f'{type_}/{subtype_name}',
f'{type_}/{subtype_suffix}',
])
# As a last resort, if no lexer feels responsible, and
# the subtype contains 'json', take the JSON lexer
if 'json' in subtype:
lexer_names.append('json')
# Try to resolve the right lexer.
lexer = None
for mime_type in mime_types:
try:
lexer = pygments.lexers.get_lexer_for_mimetype(mime_type)
break
except ClassNotFound:
pass
else:
for name in lexer_names:
try:
lexer = pygments.lexers.get_lexer_by_name(name)
except ClassNotFound:
pass
if explicit_json and body and (not lexer or isinstance(lexer, TextLexer)):
# JSON response with an incorrect Content-Type?
try:
json.loads(body) # FIXME: the body also gets parsed in json.py
except ValueError:
pass # Nope
else:
lexer = pygments.lexers.get_lexer_by_name('json')
# Use our own JSON lexer: it supports JSON bodies preceded by non-JSON data
# as well as legit JSON bodies.
if isinstance(lexer, JsonLexer):
lexer = EnhancedJsonLexer()
return lexer
class Solarized256Style(pygments.style.Style):
"""
solarized256
------------
A Pygments style inspired by Solarized's 256 color mode.
:copyright: (c) 2011 by Hank Gay, (c) 2012 by John Mastro.
:license: BSD, see LICENSE for more details.
"""
BASE03 = "#1c1c1c"
BASE02 = "#262626"
BASE01 = "#4e4e4e"
BASE00 = "#585858"
BASE0 = "#808080"
BASE1 = "#8a8a8a"
BASE2 = "#d7d7af"
BASE3 = "#ffffd7"
YELLOW = "#af8700"
ORANGE = "#d75f00"
RED = "#af0000"
MAGENTA = "#af005f"
VIOLET = "#5f5faf"
BLUE = "#0087ff"
CYAN = "#00afaf"
GREEN = "#5f8700"
background_color = BASE03
styles = {
pygments.token.Keyword: GREEN,
pygments.token.Keyword.Constant: ORANGE,
pygments.token.Keyword.Declaration: BLUE,
pygments.token.Keyword.Namespace: ORANGE,
pygments.token.Keyword.Reserved: BLUE,
pygments.token.Keyword.Type: RED,
pygments.token.Name.Attribute: BASE1,
pygments.token.Name.Builtin: BLUE,
pygments.token.Name.Builtin.Pseudo: BLUE,
pygments.token.Name.Class: BLUE,
pygments.token.Name.Constant: ORANGE,
pygments.token.Name.Decorator: BLUE,
pygments.token.Name.Entity: ORANGE,
pygments.token.Name.Exception: YELLOW,
pygments.token.Name.Function: BLUE,
pygments.token.Name.Tag: BLUE,
pygments.token.Name.Variable: BLUE,
pygments.token.String: CYAN,
pygments.token.String.Backtick: BASE01,
pygments.token.String.Char: CYAN,
pygments.token.String.Doc: CYAN,
pygments.token.String.Escape: RED,
pygments.token.String.Heredoc: CYAN,
pygments.token.String.Regex: RED,
pygments.token.Number: CYAN,
pygments.token.Operator: BASE1,
pygments.token.Operator.Word: GREEN,
pygments.token.Comment: BASE01,
pygments.token.Comment.Preproc: GREEN,
pygments.token.Comment.Special: GREEN,
pygments.token.Generic.Deleted: CYAN,
pygments.token.Generic.Emph: 'italic',
pygments.token.Generic.Error: RED,
pygments.token.Generic.Heading: ORANGE,
pygments.token.Generic.Inserted: GREEN,
pygments.token.Generic.Strong: 'bold',
pygments.token.Generic.Subheading: ORANGE,
pygments.token.Token: BASE1,
pygments.token.Token.Other: ORANGE,
}
PIE_HEADER_STYLE = {
# HTTP line / Headers / Etc.
pygments.token.Name.Namespace: 'bold primary',
pygments.token.Keyword.Reserved: 'bold grey',
pygments.token.Operator: 'bold grey',
pygments.token.Number: 'bold grey',
pygments.token.Name.Function.Magic: 'bold green',
pygments.token.Name.Exception: 'bold green',
pygments.token.Name.Attribute: 'blue',
pygments.token.String: 'primary',
# HTTP Methods
pygments.token.Name.Function: 'bold grey',
pygments.token.Name.Function.HTTP.GET: 'bold green',
pygments.token.Name.Function.HTTP.HEAD: 'bold green',
pygments.token.Name.Function.HTTP.POST: 'bold yellow',
pygments.token.Name.Function.HTTP.PUT: 'bold orange',
pygments.token.Name.Function.HTTP.PATCH: 'bold orange',
pygments.token.Name.Function.HTTP.DELETE: 'bold red',
# HTTP status codes
pygments.token.Number.HTTP.INFO: 'bold aqua',
pygments.token.Number.HTTP.OK: 'bold green',
pygments.token.Number.HTTP.REDIRECT: 'bold yellow',
pygments.token.Number.HTTP.CLIENT_ERR: 'bold orange',
pygments.token.Number.HTTP.SERVER_ERR: 'bold red',
# Metadata
pygments.token.Name.Decorator: 'grey',
pygments.token.Number.SPEED.FAST: 'bold green',
pygments.token.Number.SPEED.AVG: 'bold yellow',
pygments.token.Number.SPEED.SLOW: 'bold orange',
pygments.token.Number.SPEED.VERY_SLOW: 'bold red',
}
PIE_BODY_STYLE = {
# {}[]:
pygments.token.Punctuation: 'grey',
# Keys
pygments.token.Name.Tag: 'pink',
# Values
pygments.token.Literal.String: 'green',
pygments.token.Literal.String.Double: 'green',
pygments.token.Literal.Number: 'aqua',
pygments.token.Keyword: 'orange',
# Other stuff
pygments.token.Text: 'primary',
pygments.token.Name.Attribute: 'primary',
pygments.token.Name.Builtin: 'blue',
pygments.token.Name.Builtin.Pseudo: 'blue',
pygments.token.Name.Class: 'blue',
pygments.token.Name.Constant: 'orange',
pygments.token.Name.Decorator: 'blue',
pygments.token.Name.Entity: 'orange',
pygments.token.Name.Exception: 'yellow',
pygments.token.Name.Function: 'blue',
pygments.token.Name.Variable: 'blue',
pygments.token.String: 'aqua',
pygments.token.String.Backtick: 'secondary',
pygments.token.String.Char: 'aqua',
pygments.token.String.Doc: 'aqua',
pygments.token.String.Escape: 'red',
pygments.token.String.Heredoc: 'aqua',
pygments.token.String.Regex: 'red',
pygments.token.Number: 'aqua',
pygments.token.Operator: 'primary',
pygments.token.Operator.Word: 'green',
pygments.token.Comment: 'secondary',
pygments.token.Comment.Preproc: 'green',
pygments.token.Comment.Special: 'green',
pygments.token.Generic.Deleted: 'aqua',
pygments.token.Generic.Emph: 'italic',
pygments.token.Generic.Error: 'red',
pygments.token.Generic.Heading: 'orange',
pygments.token.Generic.Inserted: 'green',
pygments.token.Generic.Strong: 'bold',
pygments.token.Generic.Subheading: 'orange',
pygments.token.Token: 'primary',
pygments.token.Token.Other: 'orange',
}
def make_style(name, raw_styles, shade):
def format_value(value):
return ' '.join(
get_color(part, shade) or part
for part in value.split()
)
bases = (pygments.style.Style,)
data = {
'styles': {
key: format_value(value)
for key, value in raw_styles.items()
}
}
return type(name, bases, data)
def make_styles():
styles = {}
for shade, name in SHADE_NAMES.items():
styles[name] = [
make_style(name, style_map, shade)
for style_name, style_map in [
(f'Pie{name}HeaderStyle', PIE_HEADER_STYLE),
(f'Pie{name}BodyStyle', PIE_BODY_STYLE),
]
]
return styles
PIE_STYLES = make_styles()
PIE_STYLE_NAMES = list(PIE_STYLES.keys())
BUNDLED_STYLES |= PIE_STYLES.keys()

View File

@ -1,18 +0,0 @@
from ...plugins import FormatterPlugin
class HeadersFormatter(FormatterPlugin):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.enabled = self.format_options['headers']['sort']
def format_headers(self, headers: str) -> str:
"""
Sorts headers by name while retaining relative
order of multiple headers with the same name.
"""
lines = headers.splitlines()
headers = sorted(lines[1:], key=lambda h: h.split(':')[0])
return '\r\n'.join(lines[:1] + headers)

Some files were not shown because too many files have changed in this diff Show More