forked from extern/httpie-cli
Compare commits
56 Commits
3.1.0
...
fix-initia
Author | SHA1 | Date | |
---|---|---|---|
9d2e2afede | |||
418b12bbd6 | |||
ecff53f2d5 | |||
41da87f7c8 | |||
4f172a61b4 | |||
542a2d35de | |||
d9e1dc08c9 | |||
3b734fb0bc | |||
8abe47969e | |||
8173cb0337 | |||
7fd34fc8ce | |||
80ae644464 | |||
69fe5dbfd1 | |||
f09e7564e7 | |||
dc5274e491 | |||
ad2b86ccf4 | |||
11b2af0f59 | |||
b54239b525 | |||
b0b0f3dc53 | |||
9f7612cdeb | |||
5e76ebc5e1 | |||
343a521673 | |||
2142ae60c3 | |||
0b6a9b23c2 | |||
9e1c0b98c7 | |||
003f2095d4 | |||
f9b5c2f696 | |||
76495cbdec | |||
c4d7d05f3b | |||
7a4fb5d966 | |||
f7c1bb269e | |||
0f9fd76852 | |||
af1d6b1853 | |||
419cc2c34a | |||
79a8ecd84b | |||
d262181bed | |||
732878f1b4 | |||
83803db14d | |||
dd2c9513f3 | |||
278dfc487d | |||
ff6f1887b0 | |||
86f4bf4d0a | |||
e6d0bfec7c | |||
9f1ec6d5cc | |||
85ba9ad8ea | |||
d03e3f4e14 | |||
c157948531 | |||
33ea977b64 | |||
d1596dde12 | |||
af2ffb6999 | |||
0632c4d614 | |||
6787a2bd29 | |||
9d2864b966 | |||
a5288f0cd6 | |||
8efa7cb04d | |||
baec1b2202 |
28
.github/workflows/autogenerated-files.yml
vendored
Normal file
28
.github/workflows/autogenerated-files.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
name: Update Autogenerated Files
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
regen-autogenerated-files:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- run: make regen-all
|
||||
|
||||
- name: Create Pull Request
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
commit-message: "[automated] Update auto-generated files"
|
||||
title: "[automated] Update auto-generated files"
|
||||
delete-branch: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
4
.github/workflows/benchmark.yml
vendored
4
.github/workflows/benchmark.yml
vendored
@ -30,7 +30,7 @@ jobs:
|
||||
echo "::set-output name=body::$body"
|
||||
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v1
|
||||
uses: peter-evans/find-comment@v2
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
@ -38,7 +38,7 @@ jobs:
|
||||
body-includes: '# Benchmarks'
|
||||
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@v1
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
with:
|
||||
comment-id: ${{ steps.fc.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
|
33
.github/workflows/docs-update-install.yml
vendored
33
.github/workflows/docs-update-install.yml
vendored
@ -1,33 +0,0 @@
|
||||
name: Update & Install Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- .github/workflows/docs-update-install.yml
|
||||
- docs/installation/*
|
||||
|
||||
# Allow to call the workflow manually
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
doc:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.9
|
||||
- run: make install
|
||||
- run: make doc-update-install
|
||||
- uses: Automattic/action-commit-to-branch@master
|
||||
with:
|
||||
branch: master
|
||||
commit_message: |
|
||||
Auto-update install docs
|
||||
|
||||
Via .github/workflows/docs-update-install.yml
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
26
.github/workflows/release-brew.yml
vendored
Normal file
26
.github/workflows/release-brew.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
name: Release on Homebrew
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "The branch, tag or SHA to release from"
|
||||
required: true
|
||||
default: "master"
|
||||
|
||||
jobs:
|
||||
brew-release:
|
||||
name: Release the Homebrew Package
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
|
||||
- uses: mislav/bump-homebrew-formula-action@v1
|
||||
with:
|
||||
formula-name: httpie
|
||||
tag-name: ${{ github.events.inputs.branch }}
|
||||
env:
|
||||
COMMITTER_TOKEN: ${{ secrets.BREW_UPDATE_TOKEN }}
|
61
.github/workflows/release-choco.yml
vendored
Normal file
61
.github/workflows/release-choco.yml
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
name: Release on Chocolatey
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "The branch, tag or SHA to release from"
|
||||
required: true
|
||||
default: "master"
|
||||
|
||||
jobs:
|
||||
brew-release:
|
||||
name: Release the Chocolatey
|
||||
runs-on: windows-2019
|
||||
env:
|
||||
package-dir: docs\packaging\windows-chocolatey
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
|
||||
# Chocolatey comes already installed on the Windows GHA image
|
||||
- name: Build the Choco package
|
||||
shell: cmd
|
||||
run: choco pack -v
|
||||
working-directory: ${{ env.package-dir }}
|
||||
|
||||
- name: Check the Choco package
|
||||
run: choco info httpie -s .
|
||||
working-directory: ${{ env.package-dir }}
|
||||
|
||||
- name: Local installation
|
||||
run: |
|
||||
choco install httpie -y -dv -s "'.;https://community.chocolatey.org/api/v2/'"
|
||||
working-directory: ${{ env.package-dir }}
|
||||
|
||||
- name: Test the locally installed binaries
|
||||
run: |
|
||||
# Source: https://stackoverflow.com/a/46760714/15330941
|
||||
|
||||
# Make `refreshenv` available right away, by defining the $env:ChocolateyInstall
|
||||
# variable and importing the Chocolatey profile module.
|
||||
$env:ChocolateyInstall = Convert-Path "$((Get-Command choco).Path)\..\.."
|
||||
Import-Module "$env:ChocolateyInstall\helpers\chocolateyProfile.psm1"
|
||||
refreshenv
|
||||
|
||||
http --version
|
||||
https --version
|
||||
httpie --version
|
||||
choco uninstall -y httpie
|
||||
working-directory: ${{ env.package-dir }}
|
||||
|
||||
- name: Publish on Chocolatey
|
||||
shell: bash
|
||||
env:
|
||||
CHOCO_API_KEY: ${{ secrets.CHOCO_API_KEY }}
|
||||
run: |
|
||||
choco apikey --key $CHOCO_API_KEY --source https://push.chocolatey.org/
|
||||
choco push httpie*.nupkg --source https://push.chocolatey.org/
|
||||
working-directory: ${{ env.package-dir }}
|
77
.github/workflows/release-linux-standalone.yml
vendored
Normal file
77
.github/workflows/release-linux-standalone.yml
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
name: Release as Standalone Linux Package
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "The branch, tag or SHA to release from"
|
||||
required: true
|
||||
default: "master"
|
||||
tag_name:
|
||||
description: "Which release to upload the artifacts to (e.g., 3.0)"
|
||||
required: true
|
||||
|
||||
release:
|
||||
types: [released, prereleased]
|
||||
|
||||
|
||||
jobs:
|
||||
binary-build-and-release:
|
||||
name: Build and Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Build Artifacts
|
||||
run: |
|
||||
cd extras/packaging/linux
|
||||
./get_release_artifacts.sh
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: http
|
||||
path: extras/packaging/linux/artifacts/dist/http
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: httpie.deb
|
||||
path: extras/packaging/linux/artifacts/dist/*.deb
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: httpie.rpm
|
||||
path: extras/packaging/linux/artifacts/dist/*.rpm
|
||||
|
||||
- name: Determine the release upload upload_url
|
||||
id: release_id
|
||||
run: |
|
||||
pip install httpie
|
||||
export API_URL="api.github.com/repos/httpie/httpie/releases/tags/${{ github.event.inputs.tag_name }}"
|
||||
export UPLOAD_URL=`https --ignore-stdin GET $API_URL | jq -r ".upload_url"`
|
||||
echo "::set-output name=UPLOAD_URL::$UPLOAD_URL"
|
||||
|
||||
- name: Publish Debian Package
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.release_id.outputs.UPLOAD_URL }}
|
||||
asset_path: extras/packaging/linux/artifacts/dist/httpie_${{ github.event.inputs.tag_name }}_amd64.deb
|
||||
asset_name: httpie-${{ github.event.inputs.tag_name }}.deb
|
||||
asset_content_type: binary/octet-stream
|
||||
|
||||
- name: Publish Single Executable
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.release_id.outputs.UPLOAD_URL }}
|
||||
asset_path: extras/packaging/linux/artifacts/dist/http
|
||||
asset_name: http
|
||||
asset_content_type: binary/octet-stream
|
30
.github/workflows/release-pypi.yml
vendored
Normal file
30
.github/workflows/release-pypi.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
name: Release on PyPI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "The branch, tag or SHA to release from"
|
||||
required: true
|
||||
default: "master"
|
||||
|
||||
jobs:
|
||||
pypi-build-and-release:
|
||||
name: Build and Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Build a binary wheel and a source tarball
|
||||
run: make install && make build
|
||||
|
||||
- name: Release on PyPI
|
||||
uses: pypa/gh-action-pypi-publish@master
|
||||
with:
|
||||
password: ${{ secrets.PYPI_TOKEN }}
|
26
.github/workflows/release-snap.yml
vendored
26
.github/workflows/release-snap.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: Release snap
|
||||
name: Release on Snap
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@ -7,22 +7,34 @@ on:
|
||||
description: "The branch, tag or SHA to release from"
|
||||
required: true
|
||||
default: "master"
|
||||
level:
|
||||
description: "Release level: stable, candidate, beta, edge"
|
||||
required: true
|
||||
default: "edge"
|
||||
|
||||
jobs:
|
||||
snap:
|
||||
snap-build-and-release:
|
||||
name: Build & Release the Snap Package
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
# If any of the stages fail, then we'll stop the action
|
||||
# to give release manager time to investigate the underlying
|
||||
# issue.
|
||||
fail-fast: true
|
||||
matrix:
|
||||
level: [edge, beta, candidate, stable]
|
||||
|
||||
# Set the concurrency level for this version, so
|
||||
# that we'll release one by one.
|
||||
concurrency: ${{ github.event.inputs.branch }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
|
||||
- uses: snapcore/action-build@v1
|
||||
id: build
|
||||
|
||||
- uses: snapcore/action-publish@v1
|
||||
with:
|
||||
store_login: ${{ secrets.SNAP_STORE_LOGIN }}
|
||||
snap: ${{ steps.build.outputs.snap }}
|
||||
release: ${{ github.event.inputs.level }}
|
||||
release: ${{ matrix.level }}
|
||||
|
33
.github/workflows/release.yml
vendored
33
.github/workflows/release.yml
vendored
@ -1,33 +0,0 @@
|
||||
name: Release on PyPI
|
||||
|
||||
on:
|
||||
# Add a "Trigger" button to manually start the workflow.
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "The branch, tag or SHA to release from"
|
||||
required: true
|
||||
default: "master"
|
||||
# It could be fully automated by uncommenting following lines.
|
||||
# Let's see later if we are confident enough to try it :)
|
||||
# release:
|
||||
# types:
|
||||
# - published
|
||||
|
||||
jobs:
|
||||
new-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
- name: PyPI configuration
|
||||
run: |
|
||||
echo "[distutils]\nindex-servers=\n httpie\n\n[httpie]\nrepository = https://upload.pypi.org/legacy/\n" > $HOME/.pypirc
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: 3.9
|
||||
- run: make publish
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v4
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
close-pr-message: 'Thanks for the pull request, but since it was stale for more than a 30 days we are closing it. If you want to work back on it, feel free to re-open it or create a new one.'
|
||||
stale-pr-label: 'stale'
|
||||
|
3
.github/workflows/tests.yml
vendored
3
.github/workflows/tests.yml
vendored
@ -1,4 +1,7 @@
|
||||
name: Tests
|
||||
concurrency:
|
||||
group: ${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
push:
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -43,8 +43,8 @@ MANIFEST
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
*.manifest
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
@ -151,3 +151,5 @@ dmypy.json
|
||||
|
||||
# Windows Chocolatey
|
||||
*.nupkg
|
||||
|
||||
artifacts/
|
||||
|
@ -7,11 +7,6 @@ actions:
|
||||
# Use this when the latest spec is not up-to-date.
|
||||
# post-upstream-clone: "cp docs/packaging/linux-fedora/httpie.spec.txt httpie.spec"
|
||||
jobs:
|
||||
- job: copr_build
|
||||
trigger: pull_request
|
||||
metadata:
|
||||
targets:
|
||||
- fedora-all
|
||||
- job: propose_downstream
|
||||
trigger: release
|
||||
metadata:
|
||||
|
19
CHANGELOG.md
19
CHANGELOG.md
@ -3,6 +3,25 @@
|
||||
This document records all notable changes to [HTTPie](https://httpie.io).
|
||||
This project adheres to [Semantic Versioning](https://semver.org/).
|
||||
|
||||
## [3.2.1](https://github.com/httpie/httpie/compare/3.1.0...3.2.1) (2022-05-06)
|
||||
|
||||
- Improved support for determining auto-streaming when the `Content-Type` header includes encoding information. ([#1383](https://github.com/httpie/httpie/pull/1383))
|
||||
- Fixed the display of the crash happening in the secondary process for update checks. ([#1388](https://github.com/httpie/httpie/issues/1388))
|
||||
|
||||
## [3.2.0](https://github.com/httpie/httpie/compare/3.1.0...3.2.0) (2022-05-05)
|
||||
|
||||
- Added a warning for notifying the user about the new updates. ([#1336](https://github.com/httpie/httpie/pull/1336))
|
||||
- Added support for single binary executables. ([#1330](https://github.com/httpie/httpie/pull/1330))
|
||||
- Added support for man pages (and auto generation of them from the parser declaration). ([#1317](https://github.com/httpie/httpie/pull/1317))
|
||||
- Added `http --manual` for man pages & regular manual with pager. ([#1343](https://github.com/httpie/httpie/pull/1343))
|
||||
- Added support for session persistence of repeated headers with the same name. ([#1335](https://github.com/httpie/httpie/pull/1335))
|
||||
- Added support for sending `Secure` cookies to the `localhost` (and `.local` suffixed domains). ([#1308](https://github.com/httpie/httpie/issues/1308))
|
||||
- Improved UI for the progress bars. ([#1324](https://github.com/httpie/httpie/pull/1324))
|
||||
- Fixed redundant creation of `Content-Length` header on `OPTIONS` requests. ([#1310](https://github.com/httpie/httpie/issues/1310))
|
||||
- Fixed blocking of warning thread on some use cases. ([#1349](https://github.com/httpie/httpie/issues/1349))
|
||||
- Changed `httpie plugins` to the new `httpie cli` namespace as `httpie cli plugins` (`httpie plugins` continues to work as a hidden alias). ([#1320](https://github.com/httpie/httpie/issues/1320))
|
||||
- Soft deprecated the `--history-print`. ([#1380](https://github.com/httpie/httpie/pull/1380))
|
||||
|
||||
## [3.1.0](https://github.com/httpie/httpie/compare/3.0.2...3.1.0) (2022-03-08)
|
||||
|
||||
- **SECURITY** Fixed the [vulnerability](https://github.com/httpie/httpie/security/advisories/GHSA-9w4w-cpc8-h2fq) that caused exposure of cookies on redirects to third party hosts. ([#1312](https://github.com/httpie/httpie/pull/1312))
|
||||
|
@ -59,8 +59,10 @@ $ git checkout -b my_topical_branch
|
||||
|
||||
#### Setup
|
||||
|
||||
The [Makefile](https://github.com/httpie/httpie/blob/master/Makefile) contains a bunch of tasks to get you started. Just run
|
||||
the following command, which:
|
||||
The [Makefile](https://github.com/httpie/httpie/blob/master/Makefile) contains a bunch of tasks to get you started.
|
||||
You can run `$ make` to see all the available tasks.
|
||||
|
||||
To get started, run the command below, which:
|
||||
|
||||
- Creates an isolated Python virtual environment inside `./venv`
|
||||
(via the standard library [venv](https://docs.python.org/3/library/venv.html) tool);
|
||||
@ -70,7 +72,7 @@ the following command, which:
|
||||
- and runs tests (It is the same as running `make install test`).
|
||||
|
||||
```bash
|
||||
$ make
|
||||
$ make all
|
||||
```
|
||||
|
||||
#### Python virtual environment
|
||||
|
52
Makefile
52
Makefile
@ -22,6 +22,26 @@ VENV_PYTHON=$(VENV_BIN)/python
|
||||
export PATH := $(VENV_BIN):$(PATH)
|
||||
|
||||
|
||||
|
||||
default: list-tasks
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Default task to get a list of tasks when `make' is run without args.
|
||||
# <https://stackoverflow.com/questions/4219255>
|
||||
###############################################################################
|
||||
|
||||
list-tasks:
|
||||
@echo Available tasks:
|
||||
@echo ----------------
|
||||
@$(MAKE) -pRrq -f $(lastword $(MAKEFILE_LIST)) : 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | sort | egrep -v -e '^[^[:alnum:]]' -e '^$@$$'
|
||||
@echo
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Installation
|
||||
###############################################################################
|
||||
|
||||
all: uninstall-httpie install test
|
||||
|
||||
|
||||
@ -30,10 +50,10 @@ install: venv install-reqs
|
||||
|
||||
install-reqs:
|
||||
@echo $(H1)Updating package tools$(H1END)
|
||||
$(VENV_PIP) install --upgrade pip wheel
|
||||
$(VENV_PIP) install --upgrade pip wheel build
|
||||
|
||||
@echo $(H1)Installing dev requirements$(H1END)
|
||||
$(VENV_PIP) install --upgrade --editable '.[dev]'
|
||||
$(VENV_PIP) install --upgrade '.[dev]' '.[test]'
|
||||
|
||||
@echo $(H1)Installing HTTPie$(H1END)
|
||||
$(VENV_PIP) install --upgrade --editable .
|
||||
@ -147,19 +167,17 @@ doc-check:
|
||||
mdl --git-recurse --style docs/markdownlint.rb .
|
||||
|
||||
|
||||
doc-update-install:
|
||||
@echo $(H1)Updating installation instructions in the docs$(H1END)
|
||||
$(VENV_PYTHON) docs/installation/generate.py
|
||||
|
||||
|
||||
###############################################################################
|
||||
# Publishing to PyPi
|
||||
###############################################################################
|
||||
|
||||
|
||||
build:
|
||||
rm -rf build/
|
||||
$(VENV_PYTHON) setup.py sdist bdist_wheel
|
||||
rm -rf build/ dist/
|
||||
mv httpie/internal/__build_channel__.py httpie/internal/__build_channel__.py.original
|
||||
echo 'BUILD_CHANNEL = "pip"' > httpie/internal/__build_channel__.py
|
||||
$(VENV_PYTHON) -m build --sdist --wheel --outdir dist/
|
||||
mv httpie/internal/__build_channel__.py.original httpie/internal/__build_channel__.py
|
||||
|
||||
|
||||
publish: test-all publish-no-test
|
||||
@ -203,7 +221,7 @@ brew-test:
|
||||
- brew uninstall httpie
|
||||
|
||||
@echo $(H1)Building from source…$(H1END)
|
||||
- brew install --build-from-source ./docs/packaging/brew/httpie.rb
|
||||
- brew install --HEAD --build-from-source ./docs/packaging/brew/httpie.rb
|
||||
|
||||
@echo $(H1)Verifying…$(H1END)
|
||||
http --version
|
||||
@ -211,3 +229,17 @@ brew-test:
|
||||
|
||||
@echo $(H1)Auditing…$(H1END)
|
||||
brew audit --strict httpie
|
||||
|
||||
###############################################################################
|
||||
# Regeneration
|
||||
###############################################################################
|
||||
|
||||
regen-all: regen-man-pages regen-install-methods
|
||||
|
||||
regen-man-pages: install
|
||||
@echo $(H1)Regenerate man pages$(H1END)
|
||||
$(VENV_PYTHON) extras/scripts/generate_man_pages.py
|
||||
|
||||
regen-install-methods:
|
||||
@echo $(H1)Updating installation instructions in the docs$(H1END)
|
||||
$(VENV_PYTHON) docs/installation/generate.py
|
||||
|
@ -21,6 +21,14 @@ They use simple and natural syntax and provide formatted and colorized output.
|
||||
|
||||
<img src="https://raw.githubusercontent.com/httpie/httpie/master/docs/httpie-animation.gif" alt="HTTPie in action" width="100%"/>
|
||||
|
||||
|
||||
## We lost 54k GitHub stars
|
||||
|
||||
Please note we recently accidentally made this repo private for a moment, and GitHub deleted our community that took a decade to build. Read the full story here: https://httpie.io/blog/stardust
|
||||
|
||||

|
||||
|
||||
|
||||
## Getting started
|
||||
|
||||
- [Installation instructions →](https://httpie.io/docs#installation)
|
||||
|
@ -162,6 +162,8 @@ Also works for other Debian-derived distributions like MX Linux, Linux Mint, dee
|
||||
|
||||
```bash
|
||||
# Install httpie
|
||||
$ curl -SsL https://packages.httpie.io/deb/KEY.gpg | apt-key add -
|
||||
$ curl -SsL -o /etc/apt/sources.list.d/httpie.list https://packages.httpie.io/deb/httpie.list
|
||||
$ apt update
|
||||
$ apt install httpie
|
||||
```
|
||||
@ -213,6 +215,21 @@ $ pacman -Syu httpie
|
||||
$ pacman -Syu
|
||||
```
|
||||
|
||||
#### Single binary executables
|
||||
|
||||
Get the standalone HTTPie Linux executables when you don't want to go through the full installation process
|
||||
|
||||
```bash
|
||||
# Install httpie
|
||||
$ https --download packages.httpie.io/binaries/linux/http-latest -o http
|
||||
$ chmod +x ./http
|
||||
```
|
||||
|
||||
```bash
|
||||
# Upgrade httpie
|
||||
$ https --download packages.httpie.io/binaries/linux/http-latest -o http
|
||||
```
|
||||
|
||||
### FreeBSD
|
||||
|
||||
#### FreshPorts
|
||||
@ -277,7 +294,7 @@ Synopsis:
|
||||
$ http [flags] [METHOD] URL [ITEM [ITEM]]
|
||||
```
|
||||
|
||||
See also `http --help`.
|
||||
See also `http --help` (and for systems where man pages are available, you can use `man http`).
|
||||
|
||||
### Examples
|
||||
|
||||
@ -448,7 +465,7 @@ $ http https://api.github.com/search/repositories q==httpie per_page==1
|
||||
GET /search/repositories?q=httpie&per_page=1 HTTP/1.1
|
||||
```
|
||||
|
||||
You can even retrieve the `value` from a file by using the `param==@file` syntax. This would also effectively strip the newlines from the end. See [#file-based-separators] for more examples.
|
||||
You can even retrieve the `value` from a file by using the `param==@file` syntax. This would also effectively strip the newlines from the end. See [file based separators](#file-based-separators) for more examples.
|
||||
|
||||
```bash
|
||||
$ http pie.dev/get text==@files/text.txt
|
||||
@ -1653,6 +1670,10 @@ If you’d like to silence warnings as well, use `-q` or `--quiet` twice:
|
||||
Let’s say that there is an API that returns the whole resource when it is updated, but you are only interested in the response headers to see the status code after an update:
|
||||
|
||||
```bash
|
||||
$ http --headers PATCH pie.dev/patch name='New Name'
|
||||
```
|
||||
|
||||
Since you are only printing the HTTP headers here, the connection to the server is closed as soon as all the response headers have been received.
|
||||
Therefore, bandwidth and time isn’t wasted downloading the body which you don’t care about.
|
||||
The response headers are downloaded always, even if they are not part of the output
|
||||
|
||||
@ -1665,14 +1686,6 @@ $ http --all --follow pie.dev/redirect/3
|
||||
`--raw='data'`, and `@/file/path`.
|
||||
|
||||
### Redirected Input
|
||||
|
||||
In addition to crafting structured [JSON](#json) and [forms](#forms) requests with the [request items](#request-items) syntax, you can provide a raw request body that will be sent without further processing.
|
||||
These two approaches for specifying request data (i.e., structured and raw) cannot be combined.
|
||||
|
||||
There are three methods for passing raw request data: piping via `stdin`,
|
||||
`--raw='data'`, and `@/file/path`.
|
||||
|
||||
### Redirected Input
|
||||
|
||||
The universal method for passing request data is through redirected `stdin`
|
||||
(standard input)—piping.
|
||||
@ -2292,8 +2305,10 @@ Upgraded 'session.json' @ 'pie.dev' to v3.1.0
|
||||
$ http pie.dev/get
|
||||
```
|
||||
|
||||
Currently, HTTPie offers a single configurable option:
|
||||
|
||||
### Configurable options
|
||||
|
||||
Currently, HTTPie offers a single configurable option:
|
||||
|
||||
#### `default_options`
|
||||
|
||||
An `Array` (by default empty) of default options that should be applied to every invocation of HTTPie.
|
||||
@ -2345,7 +2360,7 @@ However, it is not recommended modifying the default behavior in a way that woul
|
||||
*) echo 'Other Error!' ;;
|
||||
esac
|
||||
fi
|
||||
```
|
||||
```
|
||||
|
||||
### Best practices
|
||||
|
||||
@ -2404,15 +2419,23 @@ This command is currently in beta.
|
||||
"Program: http, Version: 0.0.1a0"
|
||||
```
|
||||
|
||||
#### `httpie cli plugins`
|
||||
|
||||
`plugins` interface is a very simple plugin manager for installing, listing and uninstalling HTTPie plugins.
|
||||
|
||||
In the past `pip` was used to install/uninstall plugins, but on some environments (e.g., brew installed
|
||||
packages) it wasn’t working properly. The new interface is a very simple overlay on top of `pip` to allow
|
||||
plugin installations on every installation method.
|
||||
|
||||
By default, the plugins (and their missing dependencies) will be stored under the configuration directory,
|
||||
but this can be modified through `plugins_dir` variable on the config.
|
||||
|
||||
##### `httpie cli plugins install`
|
||||
|
||||
For installing plugins from [PyPI](https://pypi.org/) or from local paths, `httpie plugins install`
|
||||
|
||||
For installing plugins from [PyPI](https://pypi.org/) or from local paths, `httpie cli plugins install`
|
||||
can be used.
|
||||
```bash
|
||||
|
||||
```bash
|
||||
$ httpie cli plugins install httpie-plugin
|
||||
Installing httpie-plugin...
|
||||
Successfully installed httpie-plugin-1.0.2
|
||||
@ -2423,7 +2446,7 @@ $ httpie cli export-args | jq '"Program: " + .spec.name + ", Version: " + .vers
|
||||
|
||||
##### `httpie cli plugins list`
|
||||
|
||||
List all installed plugins.
|
||||
List all installed plugins.
|
||||
|
||||
```bash
|
||||
$ httpie cli plugins list
|
||||
@ -2434,13 +2457,13 @@ plugin installations on every installation method.
|
||||
httpie_converter (1.0.0)
|
||||
httpie_iterm_converter (httpie.plugins.converter.v1)
|
||||
httpie_konsole_konverter (httpie.plugins.converter.v1)
|
||||
```
|
||||
```
|
||||
|
||||
#### `httpie plugins upgrade`
|
||||
##### `httpie cli plugins upgrade`
|
||||
|
||||
For upgrading already installed plugins, use `httpie plugins upgrade`.
|
||||
|
||||
```bash
|
||||
```bash
|
||||
$ httpie cli plugins upgrade httpie-plugin
|
||||
```
|
||||
|
||||
@ -2448,12 +2471,12 @@ Successfully installed httpie-plugin-1.0.2
|
||||
|
||||
Uninstall plugins from the isolated plugins directory. If the plugin is not installed
|
||||
through `httpie cli plugins install`, it won’t uninstall it.
|
||||
|
||||
|
||||
```bash
|
||||
$ httpie cli plugins uninstall httpie-plugin
|
||||
```
|
||||
|
||||
## Meta
|
||||
## Meta
|
||||
|
||||
### Interface design
|
||||
|
||||
@ -2463,21 +2486,21 @@ httpie_converter (1.0.0)
|
||||
For example, compare this HTTP request:
|
||||
|
||||
```http
|
||||
POST /post HTTP/1.1
|
||||
POST /post HTTP/1.1
|
||||
Host: pie.dev
|
||||
X-API-Key: 123
|
||||
User-Agent: Bacon/1.0
|
||||
Content-Type: application/x-www-form-urlencoded
|
||||
|
||||
|
||||
name=value&name2=value2
|
||||
```
|
||||
|
||||
|
||||
with the HTTPie command that sends it:
|
||||
|
||||
```bash
|
||||
```bash
|
||||
$ http -f POST pie.dev/post \
|
||||
X-API-Key:123 \
|
||||
User-Agent:Bacon/1.0 \
|
||||
User-Agent:Bacon/1.0 \
|
||||
name=value \
|
||||
name2=value2
|
||||
```
|
||||
|
@ -252,6 +252,7 @@ def fetch_missing_users_details(people: People) -> None:
|
||||
def save_awesome_people(people: People) -> None:
|
||||
with DB_FILE.open(mode='w', encoding='utf-8') as fh:
|
||||
json.dump(people, fh, indent=4, sort_keys=True)
|
||||
fh.write("\n")
|
||||
|
||||
|
||||
def debug(*args: Any) -> None:
|
||||
|
@ -8,19 +8,27 @@ from jinja2 import Template
|
||||
from fetch import HERE, load_awesome_people
|
||||
|
||||
TPL_FILE = HERE / 'snippet.jinja2'
|
||||
|
||||
HTTPIE_TEAM = {
|
||||
'claudiatd',
|
||||
'jakubroztocil',
|
||||
'jkbr',
|
||||
'isidentical'
|
||||
}
|
||||
|
||||
BOT_ACCOUNTS = {
|
||||
'dependabot-sr'
|
||||
}
|
||||
|
||||
IGNORE_ACCOUNTS = HTTPIE_TEAM | BOT_ACCOUNTS
|
||||
|
||||
|
||||
def generate_snippets(release: str) -> str:
|
||||
people = load_awesome_people()
|
||||
contributors = {
|
||||
name: details
|
||||
for name, details in people.items()
|
||||
if details['github'] not in HTTPIE_TEAM
|
||||
if details['github'] not in IGNORE_ACCOUNTS
|
||||
and (release in details['committed'] or release in details['reported'])
|
||||
}
|
||||
|
||||
|
@ -53,11 +53,13 @@
|
||||
},
|
||||
"Batuhan Taskaya": {
|
||||
"committed": [
|
||||
"3.0.0"
|
||||
"3.0.0",
|
||||
"3.2.0"
|
||||
],
|
||||
"github": "isidentical",
|
||||
"reported": [
|
||||
"3.0.0"
|
||||
"3.0.0",
|
||||
"3.2.0"
|
||||
],
|
||||
"twitter": "isidentical"
|
||||
},
|
||||
@ -118,6 +120,14 @@
|
||||
"reported": [],
|
||||
"twitter": "elena_lape"
|
||||
},
|
||||
"Ethan Mills": {
|
||||
"committed": [
|
||||
"3.2.0"
|
||||
],
|
||||
"github": "ethanmills",
|
||||
"reported": [],
|
||||
"twitter": null
|
||||
},
|
||||
"Fabio Peruzzo": {
|
||||
"committed": [],
|
||||
"github": "peruzzof",
|
||||
@ -189,7 +199,8 @@
|
||||
"committed": [
|
||||
"2.5.0",
|
||||
"2.6.0",
|
||||
"3.0.0"
|
||||
"3.0.0",
|
||||
"3.2.0"
|
||||
],
|
||||
"github": "jakubroztocil",
|
||||
"reported": [
|
||||
@ -213,7 +224,8 @@
|
||||
],
|
||||
"github": "blyxxyz",
|
||||
"reported": [
|
||||
"3.0.0"
|
||||
"3.0.0",
|
||||
"3.2.0"
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
@ -309,7 +321,8 @@
|
||||
"committed": [],
|
||||
"github": "ducaale",
|
||||
"reported": [
|
||||
"2.5.0"
|
||||
"2.5.0",
|
||||
"3.2.0"
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
@ -321,6 +334,22 @@
|
||||
],
|
||||
"twitter": "sevenc_nanashi"
|
||||
},
|
||||
"Nicklas Ansman Giertz": {
|
||||
"committed": [],
|
||||
"github": "ansman",
|
||||
"reported": [
|
||||
"3.2.0"
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"Oliver Fish": {
|
||||
"committed": [],
|
||||
"github": "Oliver-Fish",
|
||||
"reported": [
|
||||
"3.2.0"
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"Omer Akram": {
|
||||
"committed": [
|
||||
"2.6.0",
|
||||
@ -357,6 +386,14 @@
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"Roberto L\u00f3pez L\u00f3pez": {
|
||||
"committed": [],
|
||||
"github": "robertolopezlopez",
|
||||
"reported": [
|
||||
"3.2.0"
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"Russell Shurts": {
|
||||
"committed": [],
|
||||
"github": "rshurts",
|
||||
@ -487,6 +524,14 @@
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"dependabot[bot]": {
|
||||
"committed": [
|
||||
"3.2.0"
|
||||
],
|
||||
"github": "dependabot-sr",
|
||||
"reported": [],
|
||||
"twitter": null
|
||||
},
|
||||
"dkreeft": {
|
||||
"committed": [
|
||||
"2.6.0",
|
||||
@ -553,6 +598,14 @@
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"luzpaz": {
|
||||
"committed": [
|
||||
"3.2.0"
|
||||
],
|
||||
"github": "luzpaz",
|
||||
"reported": [],
|
||||
"twitter": null
|
||||
},
|
||||
"nixbytes": {
|
||||
"committed": [
|
||||
"2.5.0"
|
||||
@ -593,6 +646,14 @@
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"zhaohanqing95": {
|
||||
"committed": [],
|
||||
"github": "zhaohanqing95",
|
||||
"reported": [
|
||||
"3.2.0"
|
||||
],
|
||||
"twitter": null
|
||||
},
|
||||
"zoulja": {
|
||||
"committed": [],
|
||||
"github": "zoulja",
|
||||
@ -627,4 +688,4 @@
|
||||
],
|
||||
"twitter": null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,9 +2,10 @@
|
||||
|
||||
## Community contributions
|
||||
|
||||
We’d like to thank these amazing people for their contributions to this release: {% for name, details in contributors.items() -%}
|
||||
[{{ name }}](https://github.com/{{ details.github }}){{ '' if loop.last else ', ' }}
|
||||
{%- endfor %}.
|
||||
We’d like to thank these amazing people for their contributions to this release:
|
||||
{% for name, details in contributors.items() -%}
|
||||
- [{{ name }}](https://github.com/{{ details.github }}){{ '' if loop.last else '\n' }}
|
||||
{%- endfor %}
|
||||
|
||||
<!-- Twitter -->
|
||||
|
||||
|
@ -17,11 +17,12 @@ docs-structure:
|
||||
Windows:
|
||||
- chocolatey
|
||||
Linux:
|
||||
- snap-linux
|
||||
- brew-linux
|
||||
- apt
|
||||
- dnf
|
||||
- yum
|
||||
- single-binary
|
||||
- snap-linux
|
||||
- brew-linux
|
||||
- pacman
|
||||
FreeBSD:
|
||||
- pkg
|
||||
@ -36,6 +37,8 @@ tools:
|
||||
package: https://packages.debian.org/sid/web/httpie
|
||||
commands:
|
||||
install:
|
||||
- curl -SsL https://packages.httpie.io/deb/KEY.gpg | apt-key add -
|
||||
- curl -SsL -o /etc/apt/sources.list.d/httpie.list https://packages.httpie.io/deb/httpie.list
|
||||
- apt update
|
||||
- apt install httpie
|
||||
upgrade:
|
||||
@ -179,3 +182,16 @@ tools:
|
||||
- yum install httpie
|
||||
upgrade:
|
||||
- yum upgrade httpie
|
||||
|
||||
single-binary:
|
||||
title: Single binary executables
|
||||
name: Single binary executables
|
||||
note: Get the standalone HTTPie Linux executables when you don't want to go through the full installation process.
|
||||
links:
|
||||
commands:
|
||||
install:
|
||||
- https --download packages.httpie.io/binaries/linux/http-latest -o http
|
||||
- ln -ls ./http ./https
|
||||
- chmod +x ./http ./https
|
||||
upgrade:
|
||||
- https --download packages.httpie.io/binaries/linux/http-latest -o http
|
||||
|
@ -11,6 +11,9 @@ all
|
||||
# Because we use HTML to hide them on the website.
|
||||
exclude_rule 'MD002'
|
||||
|
||||
# MD007 Allow unordered list indentation
|
||||
exclude_rule 'MD007'
|
||||
|
||||
# MD013 Line length
|
||||
exclude_rule 'MD013'
|
||||
|
||||
|
@ -12,16 +12,18 @@ You are looking at the HTTPie packaging documentation, where you will find valua
|
||||
|
||||
The overall release process starts simple:
|
||||
|
||||
1. Do the [PyPI](https://pypi.org/project/httpie/) publication.
|
||||
2. Then, handle company-related tasks.
|
||||
3. Finally, follow OS-specific steps, described in documents below, to send patches downstream.
|
||||
1. Bump the version identifiers in the following places:
|
||||
- `httpie/__init__.py`
|
||||
- `docs/packaging/windows-chocolatey/httpie.nuspec`
|
||||
- `CHANGELOG.md`
|
||||
2. Commit your changes and make a PR against the `master`.
|
||||
3. Merge the PR, and tag the last commit with your version identifier.
|
||||
4. Make a GitHub release (by copying the text in `CHANGELOG.md`)
|
||||
5. Push that release to PyPI (dispatch the `Release PyPI` GitHub action).
|
||||
6. Once PyPI is ready, push the release to the Snap, Homebrew and Chocolatey with their respective actions.
|
||||
7. Go to the [`httpie/debian.httpie.io`](https://github.com/httpie/debian.httpie.io) repo and trigger the package index workflow.
|
||||
|
||||
## First, PyPI
|
||||
|
||||
Let's do the release on [PyPi](https://pypi.org/project/httpie/).
|
||||
That is done quite easily by manually triggering the [release workflow](https://github.com/httpie/httpie/actions/workflows/release.yml).
|
||||
|
||||
## Then, company-specific tasks
|
||||
## Company-specific tasks
|
||||
|
||||
- Blank the `master_and_released_docs_differ_after` value in [config.json](https://github.com/httpie/httpie/blob/master/docs/config.json).
|
||||
- Update the [contributors list](../contributors).
|
||||
@ -36,10 +38,9 @@ A more complete state of deployment can be found on [repology](https://repology.
|
||||
| -------------------------------------------: | -------------- |
|
||||
| [Arch Linux, and derived](linux-arch/) | trusted person |
|
||||
| [CentOS, RHEL, and derived](linux-centos/) | trusted person |
|
||||
| [Debian, Ubuntu, and derived](linux-debian/) | trusted person |
|
||||
| [Fedora](linux-fedora/) | trusted person |
|
||||
| :construction: [Homebrew, Linuxbrew](brew/) | **HTTPie** |
|
||||
| :construction: [MacPorts](mac-ports/) | **HTTPie** |
|
||||
| [Debian, Ubuntu, and derived](linux-debian/) | **HTTPie** |
|
||||
| [Homebrew, Linuxbrew](brew/) | **HTTPie** |
|
||||
| [Snapcraft](snapcraft/) | **HTTPie** |
|
||||
| [Windows — Chocolatey](windows-chocolatey/) | **HTTPie** |
|
||||
|
||||
|
@ -13,21 +13,19 @@ We will discuss setting up the environment, installing development tools, instal
|
||||
|
||||
## Overall process
|
||||
|
||||
:construction: Work in progress.
|
||||
The brew deployment is completely automated, and only requires a trigger to [`Release on Homebrew`](https://github.com/httpie/httpie/actions/workflows/release-brew.yml) action
|
||||
from the release manager.
|
||||
|
||||
First, update the current Formula:
|
||||
If it is needed to be done manually, the following command can be used:
|
||||
|
||||
```bash
|
||||
make brew-deps
|
||||
# Copy-paste content into downstream/mac/brew/httpie.rb
|
||||
git add downstream/mac/brew/httpie.rb
|
||||
git commit -s -m 'Update brew formula to XXX'
|
||||
```console
|
||||
$ brew bump-formula-pr httpie --version={TARGET_VERSION}
|
||||
```
|
||||
|
||||
That [GitHub workflow](https://github.com/httpie/httpie/actions/workflows/test-package-mac-brew.yml) will test the formula when `downstream/mac/brew/httpie.rb` is changed in a pull request.
|
||||
|
||||
Then, open a pull request with those changes to the [downstream file](https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb).
|
||||
which will bump the formula, and create a PR against the package index.
|
||||
|
||||
## Hacking
|
||||
|
||||
:construction: Work in progress.
|
||||
Make your changes, test the formula through the [`Test Brew Package`](https://github.com/httpie/httpie/actions/workflows/test-package-mac-brew.yml) action
|
||||
and then finally submit your patch to [`homebrew-core`](https://github.com/Homebrew/homebrew-core`)
|
||||
|
||||
|
@ -1,81 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate Ruby code with URLs and file hashes for packages from PyPi
|
||||
(i.e., httpie itself as well as its dependencies) to be included
|
||||
in the Homebrew formula after a new release of HTTPie has been published
|
||||
on PyPi.
|
||||
|
||||
<https://github.com/Homebrew/homebrew-core/blob/master/Formula/httpie.rb>
|
||||
|
||||
"""
|
||||
import hashlib
|
||||
import requests
|
||||
|
||||
|
||||
VERSIONS = {
|
||||
# By default, we use the latest packages. But sometimes Requests has a maximum supported versions.
|
||||
# Take a look here before making a release: <https://github.com/psf/requests/blob/master/setup.py>
|
||||
'idna': '3.2',
|
||||
}
|
||||
|
||||
|
||||
# Note: Keep that list sorted.
|
||||
PACKAGES = [
|
||||
'certifi',
|
||||
'charset-normalizer',
|
||||
'defusedxml',
|
||||
'httpie',
|
||||
'idna',
|
||||
'Pygments',
|
||||
'PySocks',
|
||||
'requests',
|
||||
'requests-toolbelt',
|
||||
'urllib3',
|
||||
'multidict',
|
||||
]
|
||||
|
||||
|
||||
def get_package_meta(package_name):
|
||||
api_url = f'https://pypi.org/pypi/{package_name}/json'
|
||||
resp = requests.get(api_url).json()
|
||||
hasher = hashlib.sha256()
|
||||
version = VERSIONS.get(package_name)
|
||||
if package_name not in VERSIONS:
|
||||
# Latest version
|
||||
release_bundle = resp['urls']
|
||||
else:
|
||||
release_bundle = resp['releases'][version]
|
||||
|
||||
for release in release_bundle:
|
||||
download_url = release['url']
|
||||
if download_url.endswith('.tar.gz'):
|
||||
hasher.update(requests.get(download_url).content)
|
||||
return {
|
||||
'name': package_name,
|
||||
'url': download_url,
|
||||
'sha256': hasher.hexdigest(),
|
||||
}
|
||||
else:
|
||||
raise RuntimeError(f'{package_name}: download not found: {resp}')
|
||||
|
||||
|
||||
def main():
|
||||
package_meta_map = {
|
||||
package_name: get_package_meta(package_name)
|
||||
for package_name in PACKAGES
|
||||
}
|
||||
httpie_meta = package_meta_map.pop('httpie')
|
||||
print()
|
||||
print(' url "{url}"'.format(url=httpie_meta['url']))
|
||||
print(' sha256 "{sha256}"'.format(sha256=httpie_meta['sha256']))
|
||||
print()
|
||||
for dep_meta in package_meta_map.values():
|
||||
print(' resource "{name}" do'.format(name=dep_meta['name']))
|
||||
print(' url "{url}"'.format(url=dep_meta['url']))
|
||||
print(' sha256 "{sha256}"'.format(sha256=dep_meta['sha256']))
|
||||
print(' end')
|
||||
print('')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -3,18 +3,18 @@ class Httpie < Formula
|
||||
|
||||
desc "User-friendly cURL replacement (command-line HTTP client)"
|
||||
homepage "https://httpie.io/"
|
||||
url "https://files.pythonhosted.org/packages/7b/f9/13070f19226b7db3641fb787df36bb715063abe1b8ca03fbaeca0f465d27/httpie-3.0.1.tar.gz"
|
||||
sha256 "0e9bc93ebdcdd2d32ec24b8fa46cf7e4fde9eec7a6bd0c5d0ef224f25d7466b2"
|
||||
url "https://files.pythonhosted.org/packages/32/85/bb095699be20cc98731261cb80884e9458178f8fef2a38273530ce77c0a5/httpie-3.1.0.tar.gz"
|
||||
sha256 "2e4a2040b84a912e65c01fb34f7aafe88cad2a3af2da8c685ca65080f376feda"
|
||||
license "BSD-3-Clause"
|
||||
head "https://github.com/httpie/httpie.git", branch: "master"
|
||||
|
||||
bottle do
|
||||
sha256 cellar: :any_skip_relocation, arm64_monterey: "9d285fcfb55ce8ed787d1b01966d51e6e07f7e77c44a204695a2d6eee9c8698d"
|
||||
sha256 cellar: :any_skip_relocation, arm64_big_sur: "743a282b475e87a4eaf11e545f761aef1b8e4bfe49eaee47251d7629a35a8ced"
|
||||
sha256 cellar: :any_skip_relocation, monterey: "5d63ea4f47b2028b2ba68abe12a4176934193e058edd869270221b41cc946c76"
|
||||
sha256 cellar: :any_skip_relocation, big_sur: "5a53221a680a35d1aa00cbadde279dbe4f562d22ed207c15bd4221cb8c3180f1"
|
||||
sha256 cellar: :any_skip_relocation, catalina: "5feadb6d76f55d6f9681682e221008c282dccf0e46ae22a959b4bad2efde204a"
|
||||
sha256 cellar: :any_skip_relocation, x86_64_linux: "d530ddbec49588b0d481f156d35f7e5bb7d3b6427d203f04750e55cd3eecc303"
|
||||
sha256 cellar: :any_skip_relocation, arm64_monterey: "9bb6e8c1ef5ba8b019ddedd7e908dd2174da695351aa9a238dfb28b0f57ef005"
|
||||
sha256 cellar: :any_skip_relocation, arm64_big_sur: "47ffccd3241155d863e1b4f6259d538a34d42a0cdeed8152bda257ee607b51be"
|
||||
sha256 cellar: :any_skip_relocation, monterey: "dc4a04cb05a9cd1bfa6a632a0e4a21975905954af54ece41f9050c52474267be"
|
||||
sha256 cellar: :any_skip_relocation, big_sur: "ae469e37864e967e0fd99fba15a78e719dcb351b462f98f3843c78ed1473df6d"
|
||||
sha256 cellar: :any_skip_relocation, catalina: "291a3eaecb2a2cc845c1652686a9a14b21053d7e3a7d0115245b2150ca2e199e"
|
||||
sha256 cellar: :any_skip_relocation, x86_64_linux: "710836e27c44c8e3ad181d668f4a9f78c4cb4c355d7b148a397599a7cd42713d"
|
||||
end
|
||||
|
||||
depends_on "python@3.10"
|
||||
@ -25,8 +25,8 @@ class Httpie < Formula
|
||||
end
|
||||
|
||||
resource "charset-normalizer" do
|
||||
url "https://files.pythonhosted.org/packages/48/44/76b179e0d1afe6e6a91fd5661c284f60238987f3b42b676d141d01cd5b97/charset-normalizer-2.0.10.tar.gz"
|
||||
sha256 "876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd"
|
||||
url "https://files.pythonhosted.org/packages/56/31/7bcaf657fafb3c6db8c787a865434290b726653c912085fbd371e9b92e1c/charset-normalizer-2.0.12.tar.gz"
|
||||
sha256 "2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"
|
||||
end
|
||||
|
||||
resource "defusedxml" do
|
||||
@ -40,8 +40,8 @@ class Httpie < Formula
|
||||
end
|
||||
|
||||
resource "multidict" do
|
||||
url "https://files.pythonhosted.org/packages/8e/7c/e12a69795b7b7d5071614af2c691c97fbf16a2a513c66ec52dd7d0a115bb/multidict-5.2.0.tar.gz"
|
||||
sha256 "0dd1c93edb444b33ba2274b66f63def8a327d607c6c790772f448a53b6ea59ce"
|
||||
url "https://files.pythonhosted.org/packages/fa/a7/71c253cdb8a1528802bac7503bf82fe674367e4055b09c28846fdfa4ab90/multidict-6.0.2.tar.gz"
|
||||
sha256 "5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"
|
||||
end
|
||||
|
||||
resource "Pygments" do
|
||||
|
6
docs/packaging/brew/update.sh
Executable file
6
docs/packaging/brew/update.sh
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -xe
|
||||
|
||||
rm -f httpie.rb
|
||||
http --download https://raw.githubusercontent.com/Homebrew/homebrew-core/master/Formula/httpie.rb
|
@ -11,19 +11,16 @@ Welcome to the documentation about **packaging HTTPie for Debian GNU/Linux**.
|
||||
This document contains technical details, where we describe how to create a patch for the latest HTTPie version for Debian GNU/Linux. They apply to Ubuntu as well, and any Debian-derived distributions like MX Linux, Linux Mint, deepin, Pop!_OS, KDE neon, Zorin OS, elementary OS, Kubuntu, Devuan, Linux Lite, Peppermint OS, Lubuntu, antiX, Xubuntu, etc.
|
||||
We will discuss setting up the environment, installing development tools, installing and testing changes before submitting a patch downstream.
|
||||
|
||||
The current maintainer is Bartosz Fenski.
|
||||
We create the standalone binaries (see this [for more details](../../../extras/packaging/linux/)) and package them with
|
||||
[FPM](https://github.com/jordansissel/fpm)'s `dir` mode. The core `http`/`https` commands don't have any dependencies, but the `httpie`
|
||||
command (due to the underlying `httpie cli plugins` interface) explicitly depends to the system Python (through `python3`/`python3-pip`).
|
||||
|
||||
## Overall process
|
||||
|
||||
Open a new bug on the Debian Bug Tracking System by sending an email:
|
||||
The [`Release as Standalone Linux Binary`](https://github.com/httpie/httpie/actions/workflows/release-linux-standalone.yml) will be automatically
|
||||
triggered when a new release is created, and it will submit the `.deb` package as a release asset.
|
||||
|
||||
- To: `Debian Bug Tracking System <submit@bugs.debian.org>`
|
||||
- Subject: `httpie: Version XXX available`
|
||||
- Message template (examples [1](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=993937), and [2](https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=996479)):
|
||||
|
||||
```email
|
||||
Package: httpie
|
||||
Severity: normal
|
||||
|
||||
<MESSAGE>
|
||||
```
|
||||
For making that asset available for all debian users, the release manager needs to go to the [`httpie/debian.httpie.io`](https://github.com/httpie/debian.httpie.io) repo
|
||||
and trigger the [`Update Index`](https://github.com/httpie/debian.httpie.io/actions/workflows/update-index.yml) action. It will automatically
|
||||
scrape all new debian packages from the release assets, properly update the indexes and create a new PR ([an example](https://github.com/httpie/debian.httpie.io/pull/1))
|
||||
which then will become active when merged.
|
||||
|
@ -1,5 +1,5 @@
|
||||
Name: httpie
|
||||
Version: 3.0.2
|
||||
Version: 3.1.0
|
||||
Release: 1%{?dist}
|
||||
Summary: A Curl-like tool for humans
|
||||
|
||||
@ -78,6 +78,10 @@ help2man %{buildroot}%{_bindir}/httpie > %{buildroot}%{_mandir}/man1/httpie.1
|
||||
|
||||
|
||||
%changelog
|
||||
* Tue Mar 08 2022 Miro Hrončok <mhroncok@redhat.com> - 3.1.0-1
|
||||
- Update to 3.1.0
|
||||
- Fixes: rhbz#2061597
|
||||
|
||||
* Mon Jan 24 2022 Miro Hrončok <mhroncok@redhat.com> - 3.0.2-1
|
||||
- Update to 3.0.2
|
||||
- Fixes: rhbz#2044572
|
||||
|
6
docs/packaging/linux-fedora/update.sh
Executable file
6
docs/packaging/linux-fedora/update.sh
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -xe
|
||||
|
||||
rm -f httpie.spec.txt
|
||||
https --download src.fedoraproject.org/rpms/httpie/raw/rawhide/f/httpie.spec -o httpie.spec.txt
|
@ -13,7 +13,16 @@ We will discuss setting up the environment, installing development tools, instal
|
||||
|
||||
## Overall process
|
||||
|
||||
Trigger a new [build](https://snapcraft.io/httpie/builds), then [promote it](https://snapcraft.io/httpie/releases). If more management is needed: [revisions supervision](https://dashboard.snapcraft.io/snaps/httpie/revisions/).
|
||||
Trigger the [`Release on Snap`](https://github.com/httpie/httpie/actions/workflows/release-snap.yml) action, which will
|
||||
create a snap package for HTTPie and then push it to Snap Store in the following channels:
|
||||
|
||||
- Edge
|
||||
- Beta
|
||||
- Candidate
|
||||
- Stable
|
||||
|
||||
If a push to any of them fail, all the release tasks for the following channels will be cancelled so that the
|
||||
release manager can look into the underlying cause.
|
||||
|
||||
## Hacking
|
||||
|
||||
|
@ -13,13 +13,18 @@ We will discuss setting up the environment, installing development tools, instal
|
||||
|
||||
## Overall process
|
||||
|
||||
After having successfully [built and tested](#hacking) the package, push it:
|
||||
After having successfully [built and tested](#hacking) the package, either trigger the
|
||||
[`Release on Chocolatey`](https://github.com/httpie/httpie/actions/workflows/release-choco.yml) action
|
||||
to push it to the `Chocolatey` store or use the CLI:
|
||||
|
||||
```bash
|
||||
# Replace 2.5.0 with the correct version
|
||||
choco push httpie.2.5.0.nupkg -s https://push.chocolatey.org/ --api-key=API_KEY
|
||||
```
|
||||
|
||||
Be aware that it might take multiple days until the release is approved, sine it goes through multiple
|
||||
sets of reviews (some of them are done manually).
|
||||
|
||||
## Hacking
|
||||
|
||||
```bash
|
||||
|
@ -2,7 +2,7 @@
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2015/06/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>httpie</id>
|
||||
<version>3.0.2</version>
|
||||
<version>3.2.1</version>
|
||||
<summary>Modern, user-friendly command-line HTTP client for the API era</summary>
|
||||
<description>
|
||||
HTTPie *aitch-tee-tee-pie* is a user-friendly command-line HTTP client for the API era.
|
||||
@ -33,7 +33,7 @@ Main features:
|
||||
<licenseUrl>https://raw.githubusercontent.com/httpie/httpie/master/LICENSE</licenseUrl>
|
||||
<iconUrl>https://pie-assets.s3.eu-central-1.amazonaws.com/LogoIcons/GB.png</iconUrl>
|
||||
<requireLicenseAcceptance>false</requireLicenseAcceptance>
|
||||
<releaseNotes>See the [changelog](https://httpie.io/blog/httpie-3.0.0).</releaseNotes>
|
||||
<releaseNotes>See the [changelog](https://github.com/httpie/httpie/releases/tag/3.2.0).</releaseNotes>
|
||||
<tags>httpie http https rest api client curl python ssl cli foss oss url</tags>
|
||||
<projectUrl>https://httpie.io</projectUrl>
|
||||
<packageSourceUrl>https://github.com/httpie/httpie/tree/master/docs/packaging/windows-chocolatey</packageSourceUrl>
|
||||
|
BIN
docs/stardust.png
Normal file
BIN
docs/stardust.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 58 KiB |
@ -1,52 +1,24 @@
|
||||
function __fish_httpie_styles
|
||||
echo "
|
||||
abap
|
||||
algol
|
||||
algol_nu
|
||||
arduino
|
||||
auto
|
||||
autumn
|
||||
borland
|
||||
bw
|
||||
colorful
|
||||
default
|
||||
emacs
|
||||
friendly
|
||||
fruity
|
||||
gruvbox-dark
|
||||
gruvbox-light
|
||||
igor
|
||||
inkpot
|
||||
lovelace
|
||||
manni
|
||||
material
|
||||
monokai
|
||||
murphy
|
||||
native
|
||||
paraiso-dark
|
||||
paraiso-light
|
||||
pastie
|
||||
perldoc
|
||||
rainbow_dash
|
||||
rrt
|
||||
sas
|
||||
solarized
|
||||
solarized-dark
|
||||
solarized-light
|
||||
stata
|
||||
stata-dark
|
||||
stata-light
|
||||
tango
|
||||
trac
|
||||
vim
|
||||
vs
|
||||
xcode
|
||||
zenburn"
|
||||
printf '%s\n' abap algol algol_nu arduino auto autumn borland bw colorful default emacs friendly fruity gruvbox-dark gruvbox-light igor inkpot lovelace manni material monokai murphy native paraiso-dark paraiso-light pastie perldoc pie pie-dark pie-light rainbow_dash rrt sas solarized solarized-dark solarized-light stata stata-dark stata-light tango trac vim vs xcode zenburn
|
||||
end
|
||||
|
||||
function __fish_httpie_mime_types
|
||||
test -r /usr/share/mime/types && cat /usr/share/mime/types
|
||||
end
|
||||
|
||||
function __fish_httpie_print_args
|
||||
set -l arg (commandline -t)
|
||||
string match -qe H "$arg" || echo -e $arg"H\trequest headers"
|
||||
string match -qe B "$arg" || echo -e $arg"B\trequest body"
|
||||
string match -qe h "$arg" || echo -e $arg"h\tresponse headers"
|
||||
string match -qe b "$arg" || echo -e $arg"b\tresponse body"
|
||||
string match -qe m "$arg" || echo -e $arg"m\tresponse metadata"
|
||||
end
|
||||
|
||||
function __fish_httpie_auth_types
|
||||
echo -e "basic\tBasic HTTP auth"
|
||||
echo -e "digest\tDigest HTTP auth"
|
||||
echo -e "bearer\tBearer HTTP Auth"
|
||||
end
|
||||
|
||||
function __fish_http_verify_options
|
||||
@ -54,6 +26,7 @@ function __fish_http_verify_options
|
||||
echo -e "no\tDisable cert verification"
|
||||
end
|
||||
|
||||
|
||||
# Predefined Content Types
|
||||
|
||||
complete -c http -s j -l json -d 'Data items are serialized as a JSON object'
|
||||
@ -70,26 +43,28 @@ complete -c http -s x -l compress -d 'Content compressed with Deflate algorithm'
|
||||
|
||||
# Output Processing
|
||||
|
||||
complete -c http -l pretty -xa "all colors format none" -d 'Controls output processing'
|
||||
complete -c http -s s -l style -xa "(__fish_httpie_styles)" -d 'Output coloring style'
|
||||
complete -c http -l unsorted -d 'Disables all sorting while formatting output'
|
||||
complete -c http -l sorted -d 'Re-enables all sorting options while formatting output'
|
||||
complete -c http -l format-options -x -d 'Controls output formatting'
|
||||
complete -c http -l pretty -xa "all colors format none" -d 'Controls output processing'
|
||||
complete -c http -s s -l style -xa "(__fish_httpie_styles)" -d 'Output coloring style'
|
||||
complete -c http -l unsorted -d 'Disables all sorting while formatting output'
|
||||
complete -c http -l sorted -d 'Re-enables all sorting options while formatting output'
|
||||
complete -c http -l response-charset -x -d 'Override the response encoding'
|
||||
complete -c http -l response-mime -xa "(__fish_httpie_mime_types)" -d 'Override the response mime type for coloring and formatting'
|
||||
complete -c http -l format-options -x -d 'Controls output formatting'
|
||||
|
||||
|
||||
# Output Options
|
||||
|
||||
complete -c http -s p -l print -x -d 'String specifying what the output should contain'
|
||||
complete -c http -s h -l headers -d 'Print only the response headers'
|
||||
complete -c http -s b -l body -d 'Print only the response body'
|
||||
complete -c http -s v -l verbose -d 'Print the whole request as well as the response'
|
||||
complete -c http -l all -d 'Show any intermediary requests/responses'
|
||||
complete -c http -s P -l history-print -x -d 'The same as --print but applies only to intermediary requests/responses'
|
||||
complete -c http -s S -l stream -d 'Always stream the response body by line'
|
||||
complete -c http -s o -l output -F -d 'Save output to FILE'
|
||||
complete -c http -s d -l download -d 'Download a file'
|
||||
complete -c http -s c -l continue -d 'Resume an interrupted download'
|
||||
complete -c http -s q -l quiet -d 'Do not print to stdout or stderr'
|
||||
complete -c http -s p -l print -xa "(__fish_httpie_print_args)" -d 'String specifying what the output should contain'
|
||||
complete -c http -s h -l headers -d 'Print only the response headers'
|
||||
complete -c http -s m -l meta -d 'Print only the response metadata'
|
||||
complete -c http -s b -l body -d 'Print only the response body'
|
||||
complete -c http -s v -l verbose -d 'Print the whole request as well as the response'
|
||||
complete -c http -l all -d 'Show any intermediary requests/responses'
|
||||
complete -c http -s S -l stream -d 'Always stream the response body by line'
|
||||
complete -c http -s o -l output -F -d 'Save output to FILE'
|
||||
complete -c http -s d -l download -d 'Download a file'
|
||||
complete -c http -s c -l continue -d 'Resume an interrupted download'
|
||||
complete -c http -s q -l quiet -d 'Do not print to stdout or stderr'
|
||||
|
||||
|
||||
# Sessions
|
||||
@ -115,22 +90,24 @@ complete -c http -l max-headers -x -d 'Maximum number of response headers
|
||||
complete -c http -l timeout -x -d 'Connection timeout in seconds'
|
||||
complete -c http -l check-status -d 'Error with non-200 HTTP status code'
|
||||
complete -c http -l path-as-is -d 'Bypass dot segment URL squashing'
|
||||
complete -c http -l chunked -d ''
|
||||
complete -c http -l chunked -d 'Enable streaming via chunked transfer encoding'
|
||||
|
||||
|
||||
# SSL
|
||||
|
||||
complete -c http -l verify -xa "(__fish_http_verify_options)" -d 'Enable/disable cert verification'
|
||||
complete -c http -l ssl -x -d 'Desired protocol version to use'
|
||||
complete -c http -l ciphers -x -d 'String in the OpenSSL cipher list format'
|
||||
complete -c http -l cert -F -d 'Client side SSL certificate'
|
||||
complete -c http -l cert-key -F -d 'Private key to use with SSL'
|
||||
complete -c http -l verify -xa "(__fish_http_verify_options)" -d 'Enable/disable cert verification'
|
||||
complete -c http -l ssl -x -d 'Desired protocol version to use'
|
||||
complete -c http -l ciphers -x -d 'String in the OpenSSL cipher list format'
|
||||
complete -c http -l cert -F -d 'Client side SSL certificate'
|
||||
complete -c http -l cert-key -F -d 'Private key to use with SSL'
|
||||
complete -c http -l cert-key-pass -x -d 'Passphrase for the given private key'
|
||||
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
complete -c http -s I -l ignore-stdin -d 'Do not attempt to read stdin'
|
||||
complete -c http -l help -d 'Show help'
|
||||
complete -c http -l manual -d 'Show the full manual'
|
||||
complete -c http -l version -d 'Show version'
|
||||
complete -c http -l traceback -d 'Prints exception traceback should one occur'
|
||||
complete -c http -l default-scheme -x -d 'The default scheme to use'
|
||||
|
598
extras/man/http.1
Normal file
598
extras/man/http.1
Normal file
@ -0,0 +1,598 @@
|
||||
.\" This file is auto-generated from the parser declaration in httpie/cli/definition.py by extras/scripts/generate_man_pages.py.
|
||||
.TH http 1 "2022-05-06" "HTTPie 3.2.1" "HTTPie Manual"
|
||||
.SH NAME
|
||||
http
|
||||
.SH SYNOPSIS
|
||||
http [METHOD] URL [REQUEST_ITEM ...]
|
||||
|
||||
.SH DESCRIPTION
|
||||
HTTPie: modern, user-friendly command-line HTTP client for the API era. <https://httpie.io>
|
||||
.SH Positional arguments
|
||||
|
||||
These arguments come after any flags and in the order they are listed here.
|
||||
Only URL is required.
|
||||
|
||||
.IP "\fB\,METHOD\/\fR"
|
||||
|
||||
|
||||
The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).
|
||||
|
||||
This argument can be omitted in which case HTTPie will use POST if there
|
||||
is some data to be sent, otherwise GET:
|
||||
|
||||
$ http example.org # => GET
|
||||
$ http example.org hello=world # => POST
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,URL\/\fR"
|
||||
|
||||
|
||||
The request URL. Scheme defaults to \[aq]http://\[aq] if the URL
|
||||
does not include one. (You can override this with: \fB\,--default-scheme\/\fR=http/https)
|
||||
|
||||
You can also use a shorthand for localhost
|
||||
|
||||
$ http :3000 # => http://localhost:3000
|
||||
$ http :/foo # => http://localhost/foo
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,REQUEST_ITEM\/\fR"
|
||||
|
||||
|
||||
Optional key-value pairs to be included in the request. The separator used
|
||||
determines the type:
|
||||
|
||||
\[aq]:\[aq] HTTP headers:
|
||||
|
||||
Referer:https://httpie.io Cookie:foo=bar User-Agent:bacon/1.0
|
||||
|
||||
\[aq]==\[aq] URL parameters to be appended to the request URI:
|
||||
|
||||
search==httpie
|
||||
|
||||
\[aq]=\[aq] Data fields to be serialized into a JSON object (with \fB\,--json\/\fR, \fB\,-j\/\fR)
|
||||
or form data (with \fB\,--form\/\fR, \fB\,-f\/\fR):
|
||||
|
||||
name=HTTPie language=Python description=\[aq]CLI HTTP client\[aq]
|
||||
|
||||
\[aq]:=\[aq] Non-string JSON data fields (only with \fB\,--json\/\fR, \fB\,-j\/\fR):
|
||||
|
||||
awesome:=true amount:=42 colors:=\[aq][\[dq]red\[dq], \[dq]green\[dq], \[dq]blue\[dq]]\[aq]
|
||||
|
||||
\[aq]@\[aq] Form file fields (only with \fB\,--form\/\fR or \fB\,--multipart\/\fR):
|
||||
|
||||
cv@\(ti/Documents/CV.pdf
|
||||
cv@\[aq]\(ti/Documents/CV.pdf;type=application/pdf\[aq]
|
||||
|
||||
\[aq]=@\[aq] A data field like \[aq]=\[aq], but takes a file path and embeds its content:
|
||||
|
||||
essay=@Documents/essay.txt
|
||||
|
||||
\[aq]:=@\[aq] A raw JSON field like \[aq]:=\[aq], but takes a file path and embeds its content:
|
||||
|
||||
package:=@./package.json
|
||||
|
||||
You can use a backslash to escape a colliding separator in the field name:
|
||||
|
||||
field-name-with\e:colon=value
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Predefined content types
|
||||
.IP "\fB\,--json\/\fR, \fB\,-j\/\fR"
|
||||
|
||||
|
||||
(default) Data items from the command line are serialized as a JSON object.
|
||||
The Content-Type and Accept headers are set to application/json
|
||||
(if not specified).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--form\/\fR, \fB\,-f\/\fR"
|
||||
|
||||
|
||||
Data items from the command line are serialized as form fields.
|
||||
|
||||
The Content-Type is set to application/x-www-form-urlencoded (if not
|
||||
specified). The presence of any file fields results in a
|
||||
multipart/form-data request.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--multipart\/\fR"
|
||||
|
||||
|
||||
Similar to \fB\,--form\/\fR, but always sends a multipart/form-data request (i.e., even without files).
|
||||
|
||||
|
||||
.IP "\fB\,--boundary\/\fR"
|
||||
|
||||
|
||||
Specify a custom boundary string for multipart/form-data requests. Only has effect only together with \fB\,--form\/\fR.
|
||||
|
||||
|
||||
.IP "\fB\,--raw\/\fR"
|
||||
|
||||
|
||||
This option allows you to pass raw request data without extra processing
|
||||
(as opposed to the structured request items syntax):
|
||||
|
||||
$ http \fB\,--raw\/\fR=\[aq]data\[aq] pie.dev/post
|
||||
|
||||
You can achieve the same by piping the data via stdin:
|
||||
|
||||
$ echo data | http pie.dev/post
|
||||
|
||||
Or have HTTPie load the raw data from a file:
|
||||
|
||||
$ http pie.dev/post @data.txt
|
||||
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Content processing options
|
||||
.IP "\fB\,--compress\/\fR, \fB\,-x\/\fR"
|
||||
|
||||
|
||||
Content compressed (encoded) with Deflate algorithm.
|
||||
The Content-Encoding header is set to deflate.
|
||||
|
||||
Compression is skipped if it appears that compression ratio is
|
||||
negative. Compression can be forced by repeating the argument.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Output processing
|
||||
.IP "\fB\,--pretty\/\fR"
|
||||
|
||||
|
||||
Controls output processing. The value can be \[dq]none\[dq] to not prettify
|
||||
the output (default for redirected output), \[dq]all\[dq] to apply both colors
|
||||
and formatting (default for terminal output), \[dq]colors\[dq], or \[dq]format\[dq].
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--style\/\fR, \fB\,-s\/\fR \fI\,STYLE\/\fR"
|
||||
|
||||
|
||||
Output coloring style (default is \[dq]auto\[dq]). It can be one of:
|
||||
|
||||
auto, pie, pie-dark, pie-light, solarized
|
||||
|
||||
|
||||
For finding out all available styles in your system, try:
|
||||
|
||||
$ http \fB\,--style\/\fR
|
||||
|
||||
The \[dq]auto\[dq] style follows your terminal\[aq]s ANSI color styles.
|
||||
For non-auto styles to work properly, please make sure that the
|
||||
$TERM environment variable is set to \[dq]xterm-256color\[dq] or similar
|
||||
(e.g., via `export TERM=xterm-256color\[aq] in your \(ti/.bashrc).
|
||||
|
||||
.IP "\fB\,--unsorted\/\fR"
|
||||
|
||||
|
||||
Disables all sorting while formatting output. It is a shortcut for:
|
||||
|
||||
\fB\,--format-options\/\fR=headers.sort:false,json.sort_keys:false
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--sorted\/\fR"
|
||||
|
||||
|
||||
Re-enables all sorting options while formatting output. It is a shortcut for:
|
||||
|
||||
\fB\,--format-options\/\fR=headers.sort:true,json.sort_keys:true
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--response-charset\/\fR \fI\,ENCODING\/\fR"
|
||||
|
||||
|
||||
Override the response encoding for terminal display purposes, e.g.:
|
||||
|
||||
\fB\,--response-charset\/\fR=utf8
|
||||
\fB\,--response-charset\/\fR=big5
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--response-mime\/\fR \fI\,MIME_TYPE\/\fR"
|
||||
|
||||
|
||||
Override the response mime type for coloring and formatting for the terminal, e.g.:
|
||||
|
||||
\fB\,--response-mime\/\fR=application/json
|
||||
\fB\,--response-mime\/\fR=text/xml
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--format-options\/\fR"
|
||||
|
||||
|
||||
Controls output formatting. Only relevant when formatting is enabled
|
||||
through (explicit or implied) \fB\,--pretty\/\fR=all or \fB\,--pretty\/\fR=format.
|
||||
The following are the default options:
|
||||
|
||||
headers.sort:true
|
||||
json.format:true
|
||||
json.indent:4
|
||||
json.sort_keys:true
|
||||
xml.format:true
|
||||
xml.indent:2
|
||||
|
||||
You may use this option multiple times, as well as specify multiple
|
||||
comma-separated options at the same time. For example, this modifies the
|
||||
settings to disable the sorting of JSON keys, and sets the indent size to 2:
|
||||
|
||||
\fB\,--format-options\/\fR json.sort_keys:false,json.indent:2
|
||||
|
||||
This is something you will typically put into your config file.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Output options
|
||||
.IP "\fB\,--print\/\fR, \fB\,-p\/\fR \fI\,WHAT\/\fR"
|
||||
|
||||
|
||||
String specifying what the output should contain:
|
||||
|
||||
\[aq]H\[aq] request headers
|
||||
\[aq]B\[aq] request body
|
||||
\[aq]h\[aq] response headers
|
||||
\[aq]b\[aq] response body
|
||||
\[aq]m\[aq] response metadata
|
||||
|
||||
The default behaviour is \[aq]hb\[aq] (i.e., the response
|
||||
headers and body is printed), if standard output is not redirected.
|
||||
If the output is piped to another program or to a file, then only the
|
||||
response body is printed by default.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--headers\/\fR, \fB\,-h\/\fR"
|
||||
|
||||
|
||||
Print only the response headers. Shortcut for \fB\,--print\/\fR=h.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--meta\/\fR, \fB\,-m\/\fR"
|
||||
|
||||
|
||||
Print only the response metadata. Shortcut for \fB\,--print\/\fR=m.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--body\/\fR, \fB\,-b\/\fR"
|
||||
|
||||
|
||||
Print only the response body. Shortcut for \fB\,--print\/\fR=b.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--verbose\/\fR, \fB\,-v\/\fR"
|
||||
|
||||
|
||||
Verbose output. For the level one (with single `\fB\,-v\/\fR`/`\fB\,--verbose\/\fR`), print
|
||||
the whole request as well as the response. Also print any intermediary
|
||||
requests/responses (such as redirects). For the second level and higher,
|
||||
print these as well as the response metadata.
|
||||
|
||||
Level one is a shortcut for: \fB\,--all\/\fR \fB\,--print\/\fR=BHbh
|
||||
Level two is a shortcut for: \fB\,--all\/\fR \fB\,--print\/\fR=BHbhm
|
||||
|
||||
|
||||
.IP "\fB\,--all\/\fR"
|
||||
|
||||
|
||||
By default, only the final request/response is shown. Use this flag to show
|
||||
any intermediary requests/responses as well. Intermediary requests include
|
||||
followed redirects (with \fB\,--follow\/\fR), the first unauthorized request when
|
||||
Digest auth is used (\fB\,--auth\/\fR=digest), etc.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--stream\/\fR, \fB\,-S\/\fR"
|
||||
|
||||
|
||||
Always stream the response body by line, i.e., behave like `tail \fB\,-f\/\fR\[aq].
|
||||
|
||||
Without \fB\,--stream\/\fR and with \fB\,--pretty\/\fR (either set or implied),
|
||||
HTTPie fetches the whole response before it outputs the processed data.
|
||||
|
||||
Set this option when you want to continuously display a prettified
|
||||
long-lived response, such as one from the Twitter streaming API.
|
||||
|
||||
It is useful also without \fB\,--pretty\/\fR: It ensures that the output is flushed
|
||||
more often and in smaller chunks.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--output\/\fR, \fB\,-o\/\fR \fI\,FILE\/\fR"
|
||||
|
||||
|
||||
Save output to FILE instead of stdout. If \fB\,--download\/\fR is also set, then only
|
||||
the response body is saved to FILE. Other parts of the HTTP exchange are
|
||||
printed to stderr.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--download\/\fR, \fB\,-d\/\fR"
|
||||
|
||||
|
||||
Do not print the response body to stdout. Rather, download it and store it
|
||||
in a file. The filename is guessed unless specified with \fB\,--output\/\fR
|
||||
[filename]. This action is similar to the default behaviour of wget.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--continue\/\fR, \fB\,-c\/\fR"
|
||||
|
||||
|
||||
Resume an interrupted download. Note that the \fB\,--output\/\fR option needs to be
|
||||
specified as well.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--quiet\/\fR, \fB\,-q\/\fR"
|
||||
|
||||
|
||||
Do not print to stdout or stderr, except for errors and warnings when provided once.
|
||||
Provide twice to suppress warnings as well.
|
||||
stdout is still redirected if \fB\,--output\/\fR is specified.
|
||||
Flag doesn\[aq]t affect behaviour of download beyond not printing to terminal.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Sessions
|
||||
.IP "\fB\,--session\/\fR \fI\,SESSION_NAME_OR_PATH\/\fR"
|
||||
|
||||
|
||||
Create, or reuse and update a session. Within a session, custom headers,
|
||||
auth credential, as well as any cookies sent by the server persist between
|
||||
requests.
|
||||
|
||||
Session files are stored in:
|
||||
|
||||
[HTTPIE_CONFIG_DIR]/<HOST>/<SESSION_NAME>.json.
|
||||
|
||||
See the following page to find out your default HTTPIE_CONFIG_DIR:
|
||||
|
||||
https://httpie.io/docs/cli/config-file-directory
|
||||
|
||||
|
||||
.IP "\fB\,--session-read-only\/\fR \fI\,SESSION_NAME_OR_PATH\/\fR"
|
||||
|
||||
|
||||
Create or read a session without updating it form the request/response
|
||||
exchange.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Authentication
|
||||
.IP "\fB\,--auth\/\fR, \fB\,-a\/\fR \fI\,USER[:PASS] | TOKEN\/\fR"
|
||||
|
||||
|
||||
For username/password based authentication mechanisms (e.g
|
||||
basic auth or digest auth) if only the username is provided
|
||||
(\fB\,-a\/\fR username), HTTPie will prompt for the password.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--auth-type\/\fR, \fB\,-A\/\fR"
|
||||
|
||||
|
||||
The authentication mechanism to be used. Defaults to \[dq]basic\[dq].
|
||||
|
||||
\[dq]basic\[dq]: Basic HTTP auth
|
||||
|
||||
\[dq]digest\[dq]: Digest HTTP auth
|
||||
|
||||
\[dq]bearer\[dq]: Bearer HTTP Auth
|
||||
|
||||
For finding out all available authentication types in your system, try:
|
||||
|
||||
$ http \fB\,--auth-type\/\fR
|
||||
|
||||
.IP "\fB\,--ignore-netrc\/\fR"
|
||||
|
||||
|
||||
Ignore credentials from .netrc.
|
||||
|
||||
|
||||
.PP
|
||||
.SH Network
|
||||
.IP "\fB\,--offline\/\fR"
|
||||
|
||||
|
||||
Build the request and print it but don\(gat actually send it.
|
||||
|
||||
|
||||
.IP "\fB\,--proxy\/\fR \fI\,PROTOCOL:PROXY_URL\/\fR"
|
||||
|
||||
|
||||
String mapping protocol to the URL of the proxy
|
||||
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
||||
different protocols. The environment variables $ALL_PROXY, $HTTP_PROXY,
|
||||
and $HTTPS_proxy are supported as well.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--follow\/\fR, \fB\,-F\/\fR"
|
||||
|
||||
|
||||
Follow 30x Location redirects.
|
||||
|
||||
|
||||
.IP "\fB\,--max-redirects\/\fR"
|
||||
|
||||
|
||||
By default, requests have a limit of 30 redirects (works with \fB\,--follow\/\fR).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--max-headers\/\fR"
|
||||
|
||||
|
||||
The maximum number of response headers to be read before giving up (default 0, i.e., no limit).
|
||||
|
||||
|
||||
.IP "\fB\,--timeout\/\fR \fI\,SECONDS\/\fR"
|
||||
|
||||
|
||||
The connection timeout of the request in seconds.
|
||||
The default value is 0, i.e., there is no timeout limit.
|
||||
This is not a time limit on the entire response download;
|
||||
rather, an error is reported if the server has not issued a response for
|
||||
timeout seconds (more precisely, if no bytes have been received on
|
||||
the underlying socket for timeout seconds).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--check-status\/\fR"
|
||||
|
||||
|
||||
By default, HTTPie exits with 0 when no network or other fatal errors
|
||||
occur. This flag instructs HTTPie to also check the HTTP status code and
|
||||
exit with an error if the status indicates one.
|
||||
|
||||
When the server replies with a 4xx (Client Error) or 5xx (Server Error)
|
||||
status code, HTTPie exits with 4 or 5 respectively. If the response is a
|
||||
3xx (Redirect) and \fB\,--follow\/\fR hasn\[aq]t been set, then the exit status is 3.
|
||||
Also an error message is written to stderr if stdout is redirected.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--path-as-is\/\fR"
|
||||
|
||||
|
||||
Bypass dot segment (/../ or /./) URL squashing.
|
||||
|
||||
|
||||
.IP "\fB\,--chunked\/\fR"
|
||||
|
||||
|
||||
Enable streaming via chunked transfer encoding. The Transfer-Encoding header is set to chunked.
|
||||
|
||||
|
||||
.PP
|
||||
.SH SSL
|
||||
.IP "\fB\,--verify\/\fR"
|
||||
|
||||
|
||||
Set to \[dq]no\[dq] (or \[dq]false\[dq]) to skip checking the host\[aq]s SSL certificate.
|
||||
Defaults to \[dq]yes\[dq] (\[dq]true\[dq]). You can also pass the path to a CA_BUNDLE file
|
||||
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
|
||||
variable instead.)
|
||||
|
||||
|
||||
.IP "\fB\,--ssl\/\fR"
|
||||
|
||||
|
||||
The desired protocol version to use. This will default to
|
||||
SSL v2.3 which will negotiate the highest protocol that both
|
||||
the server and your installation of OpenSSL support. Available protocols
|
||||
may vary depending on OpenSSL installation (only the supported ones
|
||||
are shown here).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--ciphers\/\fR"
|
||||
|
||||
|
||||
|
||||
A string in the OpenSSL cipher list format. By default, the following
|
||||
is used:
|
||||
|
||||
ECDHE+AESGCM:ECDHE+CHACHA20:DHE+AESGCM:DHE+CHACHA20:ECDH+AESGCM:DH+AESGCM:ECDH+AES:DH+AES:RSA+AESGCM:RSA+AES:!aNULL:!eNULL:!MD5:!DSS
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--cert\/\fR"
|
||||
|
||||
|
||||
You can specify a local cert to use as client side SSL certificate.
|
||||
This file may either contain both private key and certificate or you may
|
||||
specify \fB\,--cert-key\/\fR separately.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--cert-key\/\fR"
|
||||
|
||||
|
||||
The private key to use with SSL. Only needed if \fB\,--cert\/\fR is given and the
|
||||
certificate file does not contain the private key.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--cert-key-pass\/\fR"
|
||||
|
||||
|
||||
The passphrase to be used to with the given private key. Only needed if \fB\,--cert-key\/\fR
|
||||
is given and the key file requires a passphrase.
|
||||
If not provided, you\(gall be prompted interactively.
|
||||
|
||||
|
||||
.PP
|
||||
.SH Troubleshooting
|
||||
.IP "\fB\,--ignore-stdin\/\fR, \fB\,-I\/\fR"
|
||||
|
||||
|
||||
Do not attempt to read stdin
|
||||
|
||||
|
||||
.IP "\fB\,--help\/\fR"
|
||||
|
||||
|
||||
Show this help message and exit.
|
||||
|
||||
|
||||
.IP "\fB\,--manual\/\fR"
|
||||
|
||||
|
||||
Show the full manual.
|
||||
|
||||
|
||||
.IP "\fB\,--version\/\fR"
|
||||
|
||||
|
||||
Show version and exit.
|
||||
|
||||
|
||||
.IP "\fB\,--traceback\/\fR"
|
||||
|
||||
|
||||
Prints the exception traceback should one occur.
|
||||
|
||||
|
||||
.IP "\fB\,--default-scheme\/\fR"
|
||||
|
||||
|
||||
The default scheme to use if not specified in the URL.
|
||||
|
||||
|
||||
.IP "\fB\,--debug\/\fR"
|
||||
|
||||
|
||||
Prints the exception traceback should one occur, as well as other
|
||||
information useful for debugging HTTPie itself and for reporting bugs.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
|
||||
For every \fB\,--OPTION\/\fR there is also a \fB\,--no-OPTION\/\fR that reverts OPTION
|
||||
to its default value.
|
||||
|
||||
Suggestions and bug reports are greatly appreciated:
|
||||
https://github.com/httpie/httpie/issues
|
100
extras/man/httpie.1
Normal file
100
extras/man/httpie.1
Normal file
@ -0,0 +1,100 @@
|
||||
.\" This file is auto-generated from the parser declaration in httpie/manager/cli.py by extras/scripts/generate_man_pages.py.
|
||||
.TH httpie 1 "2022-05-06" "HTTPie 3.2.1" "HTTPie Manual"
|
||||
.SH NAME
|
||||
httpie
|
||||
.SH SYNOPSIS
|
||||
httpie
|
||||
.SH DESCRIPTION
|
||||
|
||||
Managing interface for the HTTPie itself. <https://httpie.io/docs#manager>
|
||||
|
||||
Be aware that you might be looking for http/https commands for sending
|
||||
HTTP requests. This command is only available for managing the HTTTPie
|
||||
plugins and the configuration around it.
|
||||
|
||||
|
||||
If you are looking for the man pages of http/https commands, try one of the following:
|
||||
$ man http
|
||||
$ man https
|
||||
|
||||
|
||||
.SH httpie cli export-args
|
||||
Export available options for the CLI
|
||||
.IP "\fB\,-f\/\fR, \fB\,--format\/\fR"
|
||||
|
||||
Format to export in.
|
||||
|
||||
.PP
|
||||
.SH httpie cli check-updates
|
||||
Check for updates
|
||||
.PP
|
||||
.SH httpie cli sessions upgrade
|
||||
Upgrade the given HTTPie session with the latest layout. A list of changes between different session versions can be found in the official documentation.
|
||||
.IP "\fB\,HOSTNAME\/\fR"
|
||||
|
||||
The host this session belongs.
|
||||
|
||||
.IP "\fB\,SESSION_NAME_OR_PATH\/\fR"
|
||||
|
||||
The name or the path for the session that will be upgraded.
|
||||
|
||||
.IP "\fB\,--bind-cookies\/\fR"
|
||||
|
||||
Bind domainless cookies to the host that session belongs.
|
||||
|
||||
.PP
|
||||
.SH httpie cli sessions upgrade-all
|
||||
Upgrade all named sessions with the latest layout. A list of changes between different session versions can be found in the official documentation.
|
||||
.IP "\fB\,--bind-cookies\/\fR"
|
||||
|
||||
Bind domainless cookies to the host that session belongs.
|
||||
|
||||
.PP
|
||||
.SH httpie cli plugins install
|
||||
Install the given targets from PyPI or from a local paths.
|
||||
.IP "\fB\,TARGET\/\fR"
|
||||
|
||||
targets to install
|
||||
|
||||
.PP
|
||||
.SH httpie cli plugins upgrade
|
||||
Upgrade the given plugins
|
||||
.IP "\fB\,TARGET\/\fR"
|
||||
|
||||
targets to upgrade
|
||||
|
||||
.PP
|
||||
.SH httpie cli plugins uninstall
|
||||
Uninstall the given HTTPie plugins.
|
||||
.IP "\fB\,TARGET\/\fR"
|
||||
|
||||
targets to install
|
||||
|
||||
.PP
|
||||
.SH httpie cli plugins list
|
||||
List all installed HTTPie plugins.
|
||||
.PP
|
||||
.SH httpie plugins install
|
||||
Install the given targets from PyPI or from a local paths.
|
||||
.IP "\fB\,TARGET\/\fR"
|
||||
|
||||
targets to install
|
||||
|
||||
.PP
|
||||
.SH httpie plugins upgrade
|
||||
Upgrade the given plugins
|
||||
.IP "\fB\,TARGET\/\fR"
|
||||
|
||||
targets to upgrade
|
||||
|
||||
.PP
|
||||
.SH httpie plugins uninstall
|
||||
Uninstall the given HTTPie plugins.
|
||||
.IP "\fB\,TARGET\/\fR"
|
||||
|
||||
targets to install
|
||||
|
||||
.PP
|
||||
.SH httpie plugins list
|
||||
List all installed HTTPie plugins.
|
||||
.PP
|
598
extras/man/https.1
Normal file
598
extras/man/https.1
Normal file
@ -0,0 +1,598 @@
|
||||
.\" This file is auto-generated from the parser declaration in httpie/cli/definition.py by extras/scripts/generate_man_pages.py.
|
||||
.TH https 1 "2022-05-06" "HTTPie 3.2.1" "HTTPie Manual"
|
||||
.SH NAME
|
||||
https
|
||||
.SH SYNOPSIS
|
||||
https [METHOD] URL [REQUEST_ITEM ...]
|
||||
|
||||
.SH DESCRIPTION
|
||||
HTTPie: modern, user-friendly command-line HTTP client for the API era. <https://httpie.io>
|
||||
.SH Positional arguments
|
||||
|
||||
These arguments come after any flags and in the order they are listed here.
|
||||
Only URL is required.
|
||||
|
||||
.IP "\fB\,METHOD\/\fR"
|
||||
|
||||
|
||||
The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).
|
||||
|
||||
This argument can be omitted in which case HTTPie will use POST if there
|
||||
is some data to be sent, otherwise GET:
|
||||
|
||||
$ http example.org # => GET
|
||||
$ http example.org hello=world # => POST
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,URL\/\fR"
|
||||
|
||||
|
||||
The request URL. Scheme defaults to \[aq]http://\[aq] if the URL
|
||||
does not include one. (You can override this with: \fB\,--default-scheme\/\fR=http/https)
|
||||
|
||||
You can also use a shorthand for localhost
|
||||
|
||||
$ http :3000 # => http://localhost:3000
|
||||
$ http :/foo # => http://localhost/foo
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,REQUEST_ITEM\/\fR"
|
||||
|
||||
|
||||
Optional key-value pairs to be included in the request. The separator used
|
||||
determines the type:
|
||||
|
||||
\[aq]:\[aq] HTTP headers:
|
||||
|
||||
Referer:https://httpie.io Cookie:foo=bar User-Agent:bacon/1.0
|
||||
|
||||
\[aq]==\[aq] URL parameters to be appended to the request URI:
|
||||
|
||||
search==httpie
|
||||
|
||||
\[aq]=\[aq] Data fields to be serialized into a JSON object (with \fB\,--json\/\fR, \fB\,-j\/\fR)
|
||||
or form data (with \fB\,--form\/\fR, \fB\,-f\/\fR):
|
||||
|
||||
name=HTTPie language=Python description=\[aq]CLI HTTP client\[aq]
|
||||
|
||||
\[aq]:=\[aq] Non-string JSON data fields (only with \fB\,--json\/\fR, \fB\,-j\/\fR):
|
||||
|
||||
awesome:=true amount:=42 colors:=\[aq][\[dq]red\[dq], \[dq]green\[dq], \[dq]blue\[dq]]\[aq]
|
||||
|
||||
\[aq]@\[aq] Form file fields (only with \fB\,--form\/\fR or \fB\,--multipart\/\fR):
|
||||
|
||||
cv@\(ti/Documents/CV.pdf
|
||||
cv@\[aq]\(ti/Documents/CV.pdf;type=application/pdf\[aq]
|
||||
|
||||
\[aq]=@\[aq] A data field like \[aq]=\[aq], but takes a file path and embeds its content:
|
||||
|
||||
essay=@Documents/essay.txt
|
||||
|
||||
\[aq]:=@\[aq] A raw JSON field like \[aq]:=\[aq], but takes a file path and embeds its content:
|
||||
|
||||
package:=@./package.json
|
||||
|
||||
You can use a backslash to escape a colliding separator in the field name:
|
||||
|
||||
field-name-with\e:colon=value
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Predefined content types
|
||||
.IP "\fB\,--json\/\fR, \fB\,-j\/\fR"
|
||||
|
||||
|
||||
(default) Data items from the command line are serialized as a JSON object.
|
||||
The Content-Type and Accept headers are set to application/json
|
||||
(if not specified).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--form\/\fR, \fB\,-f\/\fR"
|
||||
|
||||
|
||||
Data items from the command line are serialized as form fields.
|
||||
|
||||
The Content-Type is set to application/x-www-form-urlencoded (if not
|
||||
specified). The presence of any file fields results in a
|
||||
multipart/form-data request.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--multipart\/\fR"
|
||||
|
||||
|
||||
Similar to \fB\,--form\/\fR, but always sends a multipart/form-data request (i.e., even without files).
|
||||
|
||||
|
||||
.IP "\fB\,--boundary\/\fR"
|
||||
|
||||
|
||||
Specify a custom boundary string for multipart/form-data requests. Only has effect only together with \fB\,--form\/\fR.
|
||||
|
||||
|
||||
.IP "\fB\,--raw\/\fR"
|
||||
|
||||
|
||||
This option allows you to pass raw request data without extra processing
|
||||
(as opposed to the structured request items syntax):
|
||||
|
||||
$ http \fB\,--raw\/\fR=\[aq]data\[aq] pie.dev/post
|
||||
|
||||
You can achieve the same by piping the data via stdin:
|
||||
|
||||
$ echo data | http pie.dev/post
|
||||
|
||||
Or have HTTPie load the raw data from a file:
|
||||
|
||||
$ http pie.dev/post @data.txt
|
||||
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Content processing options
|
||||
.IP "\fB\,--compress\/\fR, \fB\,-x\/\fR"
|
||||
|
||||
|
||||
Content compressed (encoded) with Deflate algorithm.
|
||||
The Content-Encoding header is set to deflate.
|
||||
|
||||
Compression is skipped if it appears that compression ratio is
|
||||
negative. Compression can be forced by repeating the argument.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Output processing
|
||||
.IP "\fB\,--pretty\/\fR"
|
||||
|
||||
|
||||
Controls output processing. The value can be \[dq]none\[dq] to not prettify
|
||||
the output (default for redirected output), \[dq]all\[dq] to apply both colors
|
||||
and formatting (default for terminal output), \[dq]colors\[dq], or \[dq]format\[dq].
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--style\/\fR, \fB\,-s\/\fR \fI\,STYLE\/\fR"
|
||||
|
||||
|
||||
Output coloring style (default is \[dq]auto\[dq]). It can be one of:
|
||||
|
||||
auto, pie, pie-dark, pie-light, solarized
|
||||
|
||||
|
||||
For finding out all available styles in your system, try:
|
||||
|
||||
$ http \fB\,--style\/\fR
|
||||
|
||||
The \[dq]auto\[dq] style follows your terminal\[aq]s ANSI color styles.
|
||||
For non-auto styles to work properly, please make sure that the
|
||||
$TERM environment variable is set to \[dq]xterm-256color\[dq] or similar
|
||||
(e.g., via `export TERM=xterm-256color\[aq] in your \(ti/.bashrc).
|
||||
|
||||
.IP "\fB\,--unsorted\/\fR"
|
||||
|
||||
|
||||
Disables all sorting while formatting output. It is a shortcut for:
|
||||
|
||||
\fB\,--format-options\/\fR=headers.sort:false,json.sort_keys:false
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--sorted\/\fR"
|
||||
|
||||
|
||||
Re-enables all sorting options while formatting output. It is a shortcut for:
|
||||
|
||||
\fB\,--format-options\/\fR=headers.sort:true,json.sort_keys:true
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--response-charset\/\fR \fI\,ENCODING\/\fR"
|
||||
|
||||
|
||||
Override the response encoding for terminal display purposes, e.g.:
|
||||
|
||||
\fB\,--response-charset\/\fR=utf8
|
||||
\fB\,--response-charset\/\fR=big5
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--response-mime\/\fR \fI\,MIME_TYPE\/\fR"
|
||||
|
||||
|
||||
Override the response mime type for coloring and formatting for the terminal, e.g.:
|
||||
|
||||
\fB\,--response-mime\/\fR=application/json
|
||||
\fB\,--response-mime\/\fR=text/xml
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--format-options\/\fR"
|
||||
|
||||
|
||||
Controls output formatting. Only relevant when formatting is enabled
|
||||
through (explicit or implied) \fB\,--pretty\/\fR=all or \fB\,--pretty\/\fR=format.
|
||||
The following are the default options:
|
||||
|
||||
headers.sort:true
|
||||
json.format:true
|
||||
json.indent:4
|
||||
json.sort_keys:true
|
||||
xml.format:true
|
||||
xml.indent:2
|
||||
|
||||
You may use this option multiple times, as well as specify multiple
|
||||
comma-separated options at the same time. For example, this modifies the
|
||||
settings to disable the sorting of JSON keys, and sets the indent size to 2:
|
||||
|
||||
\fB\,--format-options\/\fR json.sort_keys:false,json.indent:2
|
||||
|
||||
This is something you will typically put into your config file.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Output options
|
||||
.IP "\fB\,--print\/\fR, \fB\,-p\/\fR \fI\,WHAT\/\fR"
|
||||
|
||||
|
||||
String specifying what the output should contain:
|
||||
|
||||
\[aq]H\[aq] request headers
|
||||
\[aq]B\[aq] request body
|
||||
\[aq]h\[aq] response headers
|
||||
\[aq]b\[aq] response body
|
||||
\[aq]m\[aq] response metadata
|
||||
|
||||
The default behaviour is \[aq]hb\[aq] (i.e., the response
|
||||
headers and body is printed), if standard output is not redirected.
|
||||
If the output is piped to another program or to a file, then only the
|
||||
response body is printed by default.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--headers\/\fR, \fB\,-h\/\fR"
|
||||
|
||||
|
||||
Print only the response headers. Shortcut for \fB\,--print\/\fR=h.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--meta\/\fR, \fB\,-m\/\fR"
|
||||
|
||||
|
||||
Print only the response metadata. Shortcut for \fB\,--print\/\fR=m.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--body\/\fR, \fB\,-b\/\fR"
|
||||
|
||||
|
||||
Print only the response body. Shortcut for \fB\,--print\/\fR=b.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--verbose\/\fR, \fB\,-v\/\fR"
|
||||
|
||||
|
||||
Verbose output. For the level one (with single `\fB\,-v\/\fR`/`\fB\,--verbose\/\fR`), print
|
||||
the whole request as well as the response. Also print any intermediary
|
||||
requests/responses (such as redirects). For the second level and higher,
|
||||
print these as well as the response metadata.
|
||||
|
||||
Level one is a shortcut for: \fB\,--all\/\fR \fB\,--print\/\fR=BHbh
|
||||
Level two is a shortcut for: \fB\,--all\/\fR \fB\,--print\/\fR=BHbhm
|
||||
|
||||
|
||||
.IP "\fB\,--all\/\fR"
|
||||
|
||||
|
||||
By default, only the final request/response is shown. Use this flag to show
|
||||
any intermediary requests/responses as well. Intermediary requests include
|
||||
followed redirects (with \fB\,--follow\/\fR), the first unauthorized request when
|
||||
Digest auth is used (\fB\,--auth\/\fR=digest), etc.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--stream\/\fR, \fB\,-S\/\fR"
|
||||
|
||||
|
||||
Always stream the response body by line, i.e., behave like `tail \fB\,-f\/\fR\[aq].
|
||||
|
||||
Without \fB\,--stream\/\fR and with \fB\,--pretty\/\fR (either set or implied),
|
||||
HTTPie fetches the whole response before it outputs the processed data.
|
||||
|
||||
Set this option when you want to continuously display a prettified
|
||||
long-lived response, such as one from the Twitter streaming API.
|
||||
|
||||
It is useful also without \fB\,--pretty\/\fR: It ensures that the output is flushed
|
||||
more often and in smaller chunks.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--output\/\fR, \fB\,-o\/\fR \fI\,FILE\/\fR"
|
||||
|
||||
|
||||
Save output to FILE instead of stdout. If \fB\,--download\/\fR is also set, then only
|
||||
the response body is saved to FILE. Other parts of the HTTP exchange are
|
||||
printed to stderr.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--download\/\fR, \fB\,-d\/\fR"
|
||||
|
||||
|
||||
Do not print the response body to stdout. Rather, download it and store it
|
||||
in a file. The filename is guessed unless specified with \fB\,--output\/\fR
|
||||
[filename]. This action is similar to the default behaviour of wget.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--continue\/\fR, \fB\,-c\/\fR"
|
||||
|
||||
|
||||
Resume an interrupted download. Note that the \fB\,--output\/\fR option needs to be
|
||||
specified as well.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--quiet\/\fR, \fB\,-q\/\fR"
|
||||
|
||||
|
||||
Do not print to stdout or stderr, except for errors and warnings when provided once.
|
||||
Provide twice to suppress warnings as well.
|
||||
stdout is still redirected if \fB\,--output\/\fR is specified.
|
||||
Flag doesn\[aq]t affect behaviour of download beyond not printing to terminal.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Sessions
|
||||
.IP "\fB\,--session\/\fR \fI\,SESSION_NAME_OR_PATH\/\fR"
|
||||
|
||||
|
||||
Create, or reuse and update a session. Within a session, custom headers,
|
||||
auth credential, as well as any cookies sent by the server persist between
|
||||
requests.
|
||||
|
||||
Session files are stored in:
|
||||
|
||||
[HTTPIE_CONFIG_DIR]/<HOST>/<SESSION_NAME>.json.
|
||||
|
||||
See the following page to find out your default HTTPIE_CONFIG_DIR:
|
||||
|
||||
https://httpie.io/docs/cli/config-file-directory
|
||||
|
||||
|
||||
.IP "\fB\,--session-read-only\/\fR \fI\,SESSION_NAME_OR_PATH\/\fR"
|
||||
|
||||
|
||||
Create or read a session without updating it form the request/response
|
||||
exchange.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH Authentication
|
||||
.IP "\fB\,--auth\/\fR, \fB\,-a\/\fR \fI\,USER[:PASS] | TOKEN\/\fR"
|
||||
|
||||
|
||||
For username/password based authentication mechanisms (e.g
|
||||
basic auth or digest auth) if only the username is provided
|
||||
(\fB\,-a\/\fR username), HTTPie will prompt for the password.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--auth-type\/\fR, \fB\,-A\/\fR"
|
||||
|
||||
|
||||
The authentication mechanism to be used. Defaults to \[dq]basic\[dq].
|
||||
|
||||
\[dq]basic\[dq]: Basic HTTP auth
|
||||
|
||||
\[dq]digest\[dq]: Digest HTTP auth
|
||||
|
||||
\[dq]bearer\[dq]: Bearer HTTP Auth
|
||||
|
||||
For finding out all available authentication types in your system, try:
|
||||
|
||||
$ http \fB\,--auth-type\/\fR
|
||||
|
||||
.IP "\fB\,--ignore-netrc\/\fR"
|
||||
|
||||
|
||||
Ignore credentials from .netrc.
|
||||
|
||||
|
||||
.PP
|
||||
.SH Network
|
||||
.IP "\fB\,--offline\/\fR"
|
||||
|
||||
|
||||
Build the request and print it but don\(gat actually send it.
|
||||
|
||||
|
||||
.IP "\fB\,--proxy\/\fR \fI\,PROTOCOL:PROXY_URL\/\fR"
|
||||
|
||||
|
||||
String mapping protocol to the URL of the proxy
|
||||
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
||||
different protocols. The environment variables $ALL_PROXY, $HTTP_PROXY,
|
||||
and $HTTPS_proxy are supported as well.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--follow\/\fR, \fB\,-F\/\fR"
|
||||
|
||||
|
||||
Follow 30x Location redirects.
|
||||
|
||||
|
||||
.IP "\fB\,--max-redirects\/\fR"
|
||||
|
||||
|
||||
By default, requests have a limit of 30 redirects (works with \fB\,--follow\/\fR).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--max-headers\/\fR"
|
||||
|
||||
|
||||
The maximum number of response headers to be read before giving up (default 0, i.e., no limit).
|
||||
|
||||
|
||||
.IP "\fB\,--timeout\/\fR \fI\,SECONDS\/\fR"
|
||||
|
||||
|
||||
The connection timeout of the request in seconds.
|
||||
The default value is 0, i.e., there is no timeout limit.
|
||||
This is not a time limit on the entire response download;
|
||||
rather, an error is reported if the server has not issued a response for
|
||||
timeout seconds (more precisely, if no bytes have been received on
|
||||
the underlying socket for timeout seconds).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--check-status\/\fR"
|
||||
|
||||
|
||||
By default, HTTPie exits with 0 when no network or other fatal errors
|
||||
occur. This flag instructs HTTPie to also check the HTTP status code and
|
||||
exit with an error if the status indicates one.
|
||||
|
||||
When the server replies with a 4xx (Client Error) or 5xx (Server Error)
|
||||
status code, HTTPie exits with 4 or 5 respectively. If the response is a
|
||||
3xx (Redirect) and \fB\,--follow\/\fR hasn\[aq]t been set, then the exit status is 3.
|
||||
Also an error message is written to stderr if stdout is redirected.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--path-as-is\/\fR"
|
||||
|
||||
|
||||
Bypass dot segment (/../ or /./) URL squashing.
|
||||
|
||||
|
||||
.IP "\fB\,--chunked\/\fR"
|
||||
|
||||
|
||||
Enable streaming via chunked transfer encoding. The Transfer-Encoding header is set to chunked.
|
||||
|
||||
|
||||
.PP
|
||||
.SH SSL
|
||||
.IP "\fB\,--verify\/\fR"
|
||||
|
||||
|
||||
Set to \[dq]no\[dq] (or \[dq]false\[dq]) to skip checking the host\[aq]s SSL certificate.
|
||||
Defaults to \[dq]yes\[dq] (\[dq]true\[dq]). You can also pass the path to a CA_BUNDLE file
|
||||
for private certs. (Or you can set the REQUESTS_CA_BUNDLE environment
|
||||
variable instead.)
|
||||
|
||||
|
||||
.IP "\fB\,--ssl\/\fR"
|
||||
|
||||
|
||||
The desired protocol version to use. This will default to
|
||||
SSL v2.3 which will negotiate the highest protocol that both
|
||||
the server and your installation of OpenSSL support. Available protocols
|
||||
may vary depending on OpenSSL installation (only the supported ones
|
||||
are shown here).
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--ciphers\/\fR"
|
||||
|
||||
|
||||
|
||||
A string in the OpenSSL cipher list format. By default, the following
|
||||
is used:
|
||||
|
||||
ECDHE+AESGCM:ECDHE+CHACHA20:DHE+AESGCM:DHE+CHACHA20:ECDH+AESGCM:DH+AESGCM:ECDH+AES:DH+AES:RSA+AESGCM:RSA+AES:!aNULL:!eNULL:!MD5:!DSS
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--cert\/\fR"
|
||||
|
||||
|
||||
You can specify a local cert to use as client side SSL certificate.
|
||||
This file may either contain both private key and certificate or you may
|
||||
specify \fB\,--cert-key\/\fR separately.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--cert-key\/\fR"
|
||||
|
||||
|
||||
The private key to use with SSL. Only needed if \fB\,--cert\/\fR is given and the
|
||||
certificate file does not contain the private key.
|
||||
|
||||
|
||||
|
||||
.IP "\fB\,--cert-key-pass\/\fR"
|
||||
|
||||
|
||||
The passphrase to be used to with the given private key. Only needed if \fB\,--cert-key\/\fR
|
||||
is given and the key file requires a passphrase.
|
||||
If not provided, you\(gall be prompted interactively.
|
||||
|
||||
|
||||
.PP
|
||||
.SH Troubleshooting
|
||||
.IP "\fB\,--ignore-stdin\/\fR, \fB\,-I\/\fR"
|
||||
|
||||
|
||||
Do not attempt to read stdin
|
||||
|
||||
|
||||
.IP "\fB\,--help\/\fR"
|
||||
|
||||
|
||||
Show this help message and exit.
|
||||
|
||||
|
||||
.IP "\fB\,--manual\/\fR"
|
||||
|
||||
|
||||
Show the full manual.
|
||||
|
||||
|
||||
.IP "\fB\,--version\/\fR"
|
||||
|
||||
|
||||
Show version and exit.
|
||||
|
||||
|
||||
.IP "\fB\,--traceback\/\fR"
|
||||
|
||||
|
||||
Prints the exception traceback should one occur.
|
||||
|
||||
|
||||
.IP "\fB\,--default-scheme\/\fR"
|
||||
|
||||
|
||||
The default scheme to use if not specified in the URL.
|
||||
|
||||
|
||||
.IP "\fB\,--debug\/\fR"
|
||||
|
||||
|
||||
Prints the exception traceback should one occur, as well as other
|
||||
information useful for debugging HTTPie itself and for reporting bugs.
|
||||
|
||||
|
||||
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
|
||||
For every \fB\,--OPTION\/\fR there is also a \fB\,--no-OPTION\/\fR that reverts OPTION
|
||||
to its default value.
|
||||
|
||||
Suggestions and bug reports are greatly appreciated:
|
||||
https://github.com/httpie/httpie/issues
|
33
extras/packaging/linux/Dockerfile
Normal file
33
extras/packaging/linux/Dockerfile
Normal file
@ -0,0 +1,33 @@
|
||||
# Use the oldest (but still supported) Ubuntu as the base for PyInstaller
|
||||
# packages. This will prevent stuff like glibc from conflicting.
|
||||
FROM ubuntu:18.04
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y software-properties-common binutils
|
||||
RUN apt-get install -y ruby-dev
|
||||
RUN gem install fpm
|
||||
|
||||
# Use deadsnakes for the latest Pythons (e.g 3.9)
|
||||
RUN add-apt-repository ppa:deadsnakes/ppa
|
||||
RUN apt-get update && apt-get install -y python3.9 python3.9-dev python3.9-venv
|
||||
|
||||
# Install rpm as well, since we are going to build fedora dists too
|
||||
RUN apt-get install -y rpm
|
||||
|
||||
ADD . /app
|
||||
WORKDIR /app/extras/packaging/linux
|
||||
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
RUN python3.9 -m venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
# Ensure that pip is renewed, otherwise we would be using distro-provided pip
|
||||
# which strips vendored packages and doesn't work with PyInstaller.
|
||||
RUN python -m pip install /app
|
||||
RUN python -m pip install pyinstaller wheel
|
||||
RUN python -m pip install --force-reinstall --upgrade pip
|
||||
|
||||
RUN echo 'BUILD_CHANNEL="pypi"' > /app/httpie/internal/__build_channel__.py
|
||||
RUN python build.py
|
||||
|
||||
ENTRYPOINT ["mv", "/app/extras/packaging/linux/dist/", "/artifacts"]
|
52
extras/packaging/linux/README.md
Normal file
52
extras/packaging/linux/README.md
Normal file
@ -0,0 +1,52 @@
|
||||
# Standalone Linux Packages
|
||||
|
||||

|
||||
|
||||
This directory contains the build scripts for creating:
|
||||
|
||||
- A self-contained binary executable for the HTTPie itself
|
||||
- `httpie.deb` and `httpie.rpm` packages for Debian and Fedora.
|
||||
|
||||
The process of constructing them are fully automated, and can be easily done through the [`Release as Standalone Linux Package`](https://github.com/httpie/httpie/actions/workflows/release-linux-standalone.yml)
|
||||
action. Once it finishes, the release artifacts will be attached in the summary page of the triggered run.
|
||||
|
||||
|
||||
## Hacking
|
||||
|
||||
The main entry point for the package builder is the [`build.py`](https://github.com/httpie/httpie/blob/master/extras/packaging/linux/build.py). It
|
||||
contains 2 major methods:
|
||||
|
||||
- `build_binaries`, for the self-contained executables
|
||||
- `build_packages`, for the OS-specific packages (which wrap the binaries)
|
||||
|
||||
### `build_binaries`
|
||||
|
||||
We use [PyInstaller](https://pyinstaller.readthedocs.io/en/stable/) for the binaries. Normally pyinstaller offers two different modes:
|
||||
|
||||
- Single directory (harder to distribute, low redundancy. Library files are shared across different executables)
|
||||
- Single binary (easier to distribute, higher redundancy. Same libraries are statically linked to different executables, so higher total size)
|
||||
|
||||
Since our binary size (in total 20 MiBs) is not that big, we have decided to choose the single binary mode for the sake of easier distribution.
|
||||
|
||||
We also disable `UPX`, which is a runtime decompression method since it adds some startup cost.
|
||||
|
||||
### `build_packages`
|
||||
|
||||
We build our OS-specific packages with [FPM](https://github.com/jordansissel/fpm) which offers a really nice abstraction. We use the `dir` mode,
|
||||
and package `http`, `https` and `httpie` commands. More can be added to the `files` option.
|
||||
|
||||
Since the `httpie` depends on having a pip executable, we explicitly depend on the system Python even though the core does not use it.
|
||||
|
||||
### Docker Image
|
||||
|
||||
This directory also contains a [docker image](https://github.com/httpie/httpie/blob/master/extras/packaging/linux/Dockerfile) which helps
|
||||
building our standalone binaries in an isolated environment with the lowest possible library versions. This is important, since even though
|
||||
the executables are standalone they still depend on some main system C libraries (like `glibc`) so we need to create our executables inside
|
||||
an environment with a very old (but not deprecated) glibc version. It makes us soundproof for all active Ubuntu/Debian versions.
|
||||
|
||||
It also contains the Python version we package our HTTPie with, so it is the place if you need to change it.
|
||||
|
||||
### `./get_release_artifacts.sh`
|
||||
|
||||
If you make a change in the `build.py`, run the following script to test it out. It will return multiple files under `artifacts/dist` which
|
||||
then you can test out and ensure their quality (it is also the script that we use in our automation).
|
109
extras/packaging/linux/build.py
Normal file
109
extras/packaging/linux/build.py
Normal file
@ -0,0 +1,109 @@
|
||||
import stat
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Iterator, Tuple
|
||||
|
||||
BUILD_DIR = Path(__file__).parent
|
||||
HTTPIE_DIR = BUILD_DIR.parent.parent.parent
|
||||
|
||||
EXTRAS_DIR = HTTPIE_DIR / 'extras'
|
||||
MAN_PAGES_DIR = EXTRAS_DIR / 'man'
|
||||
|
||||
SCRIPT_DIR = BUILD_DIR / Path('scripts')
|
||||
HOOKS_DIR = SCRIPT_DIR / 'hooks'
|
||||
|
||||
DIST_DIR = BUILD_DIR / 'dist'
|
||||
|
||||
TARGET_SCRIPTS = {
|
||||
SCRIPT_DIR / 'http_cli.py': [],
|
||||
SCRIPT_DIR / 'httpie_cli.py': ['--hidden-import=pip'],
|
||||
}
|
||||
|
||||
|
||||
def build_binaries() -> Iterator[Tuple[str, Path]]:
|
||||
for target_script, extra_args in TARGET_SCRIPTS.items():
|
||||
subprocess.check_call(
|
||||
[
|
||||
'pyinstaller',
|
||||
'--onefile',
|
||||
'--noupx',
|
||||
'-p',
|
||||
HTTPIE_DIR,
|
||||
'--additional-hooks-dir',
|
||||
HOOKS_DIR,
|
||||
*extra_args,
|
||||
target_script,
|
||||
]
|
||||
)
|
||||
|
||||
for executable_path in DIST_DIR.iterdir():
|
||||
if executable_path.suffix:
|
||||
continue
|
||||
stat_r = executable_path.stat()
|
||||
executable_path.chmod(stat_r.st_mode | stat.S_IEXEC)
|
||||
yield executable_path.stem, executable_path
|
||||
|
||||
|
||||
def build_packages(http_binary: Path, httpie_binary: Path) -> None:
|
||||
import httpie
|
||||
|
||||
# Mapping of src_file -> dst_file
|
||||
files = [
|
||||
(http_binary, '/usr/bin/http'),
|
||||
(http_binary, '/usr/bin/https'),
|
||||
(httpie_binary, '/usr/bin/httpie'),
|
||||
]
|
||||
files.extend(
|
||||
(man_page, f'/usr/share/man/man1/{man_page.name}')
|
||||
for man_page in MAN_PAGES_DIR.glob('*.1')
|
||||
)
|
||||
|
||||
# A list of additional dependencies
|
||||
deps = [
|
||||
'python3 >= 3.7',
|
||||
'python3-pip'
|
||||
]
|
||||
|
||||
processed_deps = [
|
||||
f'--depends={dep}'
|
||||
for dep in deps
|
||||
]
|
||||
processed_files = [
|
||||
'='.join([str(src.resolve()), dst]) for src, dst in files
|
||||
]
|
||||
for target in ['deb', 'rpm']:
|
||||
subprocess.check_call(
|
||||
[
|
||||
'fpm',
|
||||
'--force',
|
||||
'-s',
|
||||
'dir',
|
||||
'-t',
|
||||
target,
|
||||
'--name',
|
||||
'httpie',
|
||||
'--version',
|
||||
httpie.__version__,
|
||||
'--description',
|
||||
httpie.__doc__.strip(),
|
||||
'--license',
|
||||
httpie.__licence__,
|
||||
*processed_deps,
|
||||
*processed_files,
|
||||
],
|
||||
cwd=DIST_DIR,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
binaries = dict(build_binaries())
|
||||
build_packages(binaries['http_cli'], binaries['httpie_cli'])
|
||||
|
||||
# Rename http_cli/httpie_cli to http/httpie
|
||||
binaries['http_cli'].rename(DIST_DIR / 'http')
|
||||
binaries['httpie_cli'].rename(DIST_DIR / 'httpie')
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
22
extras/packaging/linux/get_release_artifacts.sh
Executable file
22
extras/packaging/linux/get_release_artifacts.sh
Executable file
@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -xe
|
||||
|
||||
REPO_ROOT=../../../
|
||||
ARTIFACTS_DIR=$(pwd)/artifacts
|
||||
|
||||
# Reset the ARTIFACTS_DIR.
|
||||
rm -rf $ARTIFACTS_DIR
|
||||
mkdir -p $ARTIFACTS_DIR
|
||||
|
||||
# Operate on the repository root to have the proper
|
||||
# docker context.
|
||||
pushd $REPO_ROOT
|
||||
|
||||
# Build the PyInstaller image
|
||||
docker build -t pyinstaller-httpie -f extras/packaging/linux/Dockerfile .
|
||||
|
||||
# Copy the artifacts to the designated directory.
|
||||
docker run --rm -i -v $ARTIFACTS_DIR:/artifacts pyinstaller-httpie:latest
|
||||
|
||||
popd
|
14
extras/packaging/linux/scripts/hooks/hook-pip.py
Normal file
14
extras/packaging/linux/scripts/hooks/hook-pip.py
Normal file
@ -0,0 +1,14 @@
|
||||
from pathlib import Path
|
||||
from PyInstaller.utils.hooks import collect_all
|
||||
|
||||
def hook(hook_api):
|
||||
for pkg in [
|
||||
'pip',
|
||||
'setuptools',
|
||||
'distutils',
|
||||
'pkg_resources'
|
||||
]:
|
||||
datas, binaries, hiddenimports = collect_all(pkg)
|
||||
hook_api.add_datas(datas)
|
||||
hook_api.add_binaries(binaries)
|
||||
hook_api.add_imports(*hiddenimports)
|
5
extras/packaging/linux/scripts/http_cli.py
Normal file
5
extras/packaging/linux/scripts/http_cli.py
Normal file
@ -0,0 +1,5 @@
|
||||
from httpie.__main__ import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
sys.exit(main())
|
5
extras/packaging/linux/scripts/httpie_cli.py
Normal file
5
extras/packaging/linux/scripts/httpie_cli.py
Normal file
@ -0,0 +1,5 @@
|
||||
from httpie.manager.__main__ import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
sys.exit(main())
|
@ -1,7 +1,7 @@
|
||||
# HTTPie Benchmarking Infrastructure
|
||||
|
||||
This directory includes the benchmarks we use for testing HTTPie's speed and the
|
||||
infrastructure to automate this testing accross versions.
|
||||
infrastructure to automate this testing across versions.
|
||||
|
||||
## Usage
|
||||
|
||||
@ -35,5 +35,5 @@ You can customize these branches by passing `--local-repo`/`--target-branch`,
|
||||
and customize the repos by passing `--local-repo`/`--target-repo` (can either
|
||||
take a URL or a path).
|
||||
|
||||
If you want to run a third enviroment with additional dependencies (such as
|
||||
If you want to run a third environment with additional dependencies (such as
|
||||
`pyOpenSSL`), you can pass `--complex`.
|
||||
|
@ -21,7 +21,7 @@ Examples:
|
||||
$ python extras/profiling/benchmarks.py --fast
|
||||
|
||||
# For verify everything works as expected, pass --debug-single-value.
|
||||
# It will only run everything once, so the resuls are not realiable. But
|
||||
# It will only run everything once, so the resuls are not reliable. But
|
||||
# very useful when iterating on a benchmark
|
||||
$ python extras/profiling/benchmarks.py --debug-single-value
|
||||
|
||||
|
183
extras/scripts/generate_man_pages.py
Normal file
183
extras/scripts/generate_man_pages.py
Normal file
@ -0,0 +1,183 @@
|
||||
import re
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Optional, Iterator, Iterable
|
||||
|
||||
import httpie
|
||||
from httpie.cli.definition import options as core_options
|
||||
from httpie.cli.options import ParserSpec
|
||||
from httpie.manager.cli import options as manager_options
|
||||
from httpie.output.ui.rich_help import OptionsHighlighter, to_usage
|
||||
from httpie.output.ui.rich_utils import render_as_string
|
||||
from httpie.utils import split
|
||||
|
||||
|
||||
# Escape certain characters so they are rendered properly on
|
||||
# all terminals.
|
||||
# https://man7.org/linux/man-pages/man7/groff_char.7.html
|
||||
ESCAPE_MAP = {
|
||||
'"': '\[dq]',
|
||||
"'": '\[aq]',
|
||||
'~': '\(ti',
|
||||
'’': "\(ga",
|
||||
'\\': '\e',
|
||||
}
|
||||
ESCAPE_MAP = {ord(key): value for key, value in ESCAPE_MAP.items()}
|
||||
|
||||
EXTRAS_DIR = Path(__file__).parent.parent
|
||||
MAN_PAGE_PATH = EXTRAS_DIR / 'man'
|
||||
PROJECT_ROOT = EXTRAS_DIR.parent
|
||||
|
||||
OPTION_HIGHLIGHT_RE = re.compile(
|
||||
OptionsHighlighter.highlights[0]
|
||||
)
|
||||
|
||||
class ManPageBuilder:
|
||||
def __init__(self):
|
||||
self.source = []
|
||||
|
||||
def title_line(
|
||||
self,
|
||||
full_name: str,
|
||||
program_name: str,
|
||||
program_version: str,
|
||||
last_edit_date: str,
|
||||
) -> None:
|
||||
self.source.append(
|
||||
f'.TH {program_name} 1 "{last_edit_date}" '
|
||||
f'"{full_name} {program_version}" "{full_name} Manual"'
|
||||
)
|
||||
|
||||
def set_name(self, program_name: str) -> None:
|
||||
with self.section('NAME'):
|
||||
self.write(program_name)
|
||||
|
||||
def write(self, text: str, *, bold: bool = False) -> None:
|
||||
if bold:
|
||||
text = '.B ' + text
|
||||
self.source.append(text)
|
||||
|
||||
def separate(self) -> None:
|
||||
self.source.append('.PP')
|
||||
|
||||
def format_desc(self, desc: str) -> str:
|
||||
description = _escape_and_dedent(desc)
|
||||
description = OPTION_HIGHLIGHT_RE.sub(
|
||||
# Boldify the option part, but don't remove the prefix (start of the match).
|
||||
lambda match: match[1] + self.boldify(match['option']),
|
||||
description
|
||||
)
|
||||
return description
|
||||
|
||||
def add_comment(self, comment: str) -> None:
|
||||
self.source.append(f'.\\" {comment}')
|
||||
|
||||
def add_options(self, options: Iterable[str], *, metavar: Optional[str] = None) -> None:
|
||||
text = ", ".join(map(self.boldify, options))
|
||||
if metavar:
|
||||
text += f' {self.underline(metavar)}'
|
||||
self.write(f'.IP "{text}"')
|
||||
|
||||
def build(self) -> str:
|
||||
return '\n'.join(self.source)
|
||||
|
||||
@contextmanager
|
||||
def section(self, section_name: str) -> Iterator[None]:
|
||||
self.write(f'.SH {section_name}')
|
||||
self.in_section = True
|
||||
yield
|
||||
self.in_section = False
|
||||
|
||||
def underline(self, text: str) -> str:
|
||||
return r'\fI\,{}\/\fR'.format(text)
|
||||
|
||||
def boldify(self, text: str) -> str:
|
||||
return r'\fB\,{}\/\fR'.format(text)
|
||||
|
||||
|
||||
def _escape_and_dedent(text: str) -> str:
|
||||
lines = []
|
||||
for should_act, line in enumerate(text.splitlines()):
|
||||
# Only dedent after the first line.
|
||||
if should_act:
|
||||
if line.startswith(' '):
|
||||
line = line[4:]
|
||||
|
||||
lines.append(line)
|
||||
return '\n'.join(lines).translate(ESCAPE_MAP)
|
||||
|
||||
|
||||
def to_man_page(program_name: str, spec: ParserSpec, *, is_top_level_cmd: bool = False) -> str:
|
||||
builder = ManPageBuilder()
|
||||
builder.add_comment(
|
||||
f"This file is auto-generated from the parser declaration "
|
||||
+ (f"in {Path(spec.source_file).relative_to(PROJECT_ROOT)} " if spec.source_file else "")
|
||||
+ f"by {Path(__file__).relative_to(PROJECT_ROOT)}."
|
||||
)
|
||||
|
||||
builder.title_line(
|
||||
full_name='HTTPie',
|
||||
program_name=program_name,
|
||||
program_version=httpie.__version__,
|
||||
last_edit_date=httpie.__date__,
|
||||
)
|
||||
builder.set_name(program_name)
|
||||
|
||||
with builder.section('SYNOPSIS'):
|
||||
# `http` and `https` are commands that can be directly used, so they can have
|
||||
# have a valid usage. But `httpie` is a top-level command with multiple sub commands,
|
||||
# so for the synopsis we'll only reference the `httpie` name.
|
||||
if is_top_level_cmd:
|
||||
synopsis = program_name
|
||||
else:
|
||||
synopsis = render_as_string(to_usage(spec, program_name=program_name))
|
||||
builder.write(synopsis)
|
||||
|
||||
with builder.section('DESCRIPTION'):
|
||||
builder.write(spec.description)
|
||||
if spec.man_page_hint:
|
||||
builder.write(spec.man_page_hint)
|
||||
|
||||
for index, group in enumerate(spec.groups, 1):
|
||||
with builder.section(group.name):
|
||||
if group.description:
|
||||
builder.write(group.description)
|
||||
|
||||
for argument in group.arguments:
|
||||
if argument.is_hidden:
|
||||
continue
|
||||
|
||||
raw_arg = argument.serialize(isolation_mode=True)
|
||||
|
||||
metavar = raw_arg.get('metavar')
|
||||
if raw_arg.get('is_positional'):
|
||||
# In case of positional arguments, metavar is always equal
|
||||
# to the list of options (e.g `METHOD`).
|
||||
metavar = None
|
||||
builder.add_options(raw_arg['options'], metavar=metavar)
|
||||
|
||||
desc = builder.format_desc(raw_arg.get('description', ''))
|
||||
builder.write('\n' + desc + '\n')
|
||||
|
||||
builder.separate()
|
||||
|
||||
if spec.epilog:
|
||||
with builder.section('SEE ALSO'):
|
||||
builder.write(builder.format_desc(spec.epilog))
|
||||
|
||||
return builder.build()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
for program_name, spec, config in [
|
||||
('http', core_options, {}),
|
||||
('https', core_options, {}),
|
||||
('httpie', manager_options, {'is_top_level_cmd': True}),
|
||||
]:
|
||||
with open((MAN_PAGE_PATH / program_name).with_suffix('.1'), 'w') as stream:
|
||||
stream.write(to_man_page(program_name, spec, **config))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -3,6 +3,7 @@ HTTPie: modern, user-friendly command-line HTTP client for the API era.
|
||||
|
||||
"""
|
||||
|
||||
__version__ = '3.1.0'
|
||||
__version__ = '3.2.1'
|
||||
__date__ = '2022-05-06'
|
||||
__author__ = 'Jakub Roztocil'
|
||||
__licence__ = 'BSD'
|
||||
|
@ -155,6 +155,7 @@ class HTTPieArgumentParser(BaseHTTPieArgumentParser):
|
||||
namespace=None
|
||||
) -> argparse.Namespace:
|
||||
self.env = env
|
||||
self.env.args = namespace = namespace or argparse.Namespace()
|
||||
self.args, no_options = super().parse_known_args(args, namespace)
|
||||
if self.args.debug:
|
||||
self.args.traceback = True
|
||||
@ -557,19 +558,56 @@ class HTTPieArgumentParser(BaseHTTPieArgumentParser):
|
||||
parsed_options = parse_format_options(options_group, defaults=parsed_options)
|
||||
self.args.format_options = parsed_options
|
||||
|
||||
def print_manual(self):
|
||||
from httpie.output.ui import man_pages
|
||||
|
||||
if man_pages.is_available(self.env.program_name):
|
||||
man_pages.display_for(self.env, self.env.program_name)
|
||||
return None
|
||||
|
||||
text = self.format_help()
|
||||
with self.env.rich_console.pager():
|
||||
self.env.rich_console.print(
|
||||
text,
|
||||
highlight=False
|
||||
)
|
||||
|
||||
def print_usage(self, file):
|
||||
from rich.text import Text
|
||||
from httpie.output.ui import rich_help
|
||||
|
||||
whitelist = set()
|
||||
_, exception, _ = sys.exc_info()
|
||||
if (
|
||||
isinstance(exception, argparse.ArgumentError)
|
||||
and len(exception.args) >= 1
|
||||
and isinstance(exception.args[0], argparse.Action)
|
||||
and exception.args[0].option_strings
|
||||
):
|
||||
# add_usage path is also taken when you pass an invalid option,
|
||||
# e.g --style=invalid. If something like that happens, we want
|
||||
# to include to action that caused to the invalid usage into
|
||||
# the list of actions we are displaying.
|
||||
whitelist.add(exception.args[0].option_strings[0])
|
||||
|
||||
usage_text = Text('usage', style='bold')
|
||||
usage_text.append(':\n ')
|
||||
usage_text.append(rich_help.to_usage(self.spec, whitelist=whitelist))
|
||||
self.env.rich_error_console.print(usage_text)
|
||||
|
||||
def error(self, message):
|
||||
"""Prints a usage message incorporating the message to stderr and
|
||||
exits."""
|
||||
self.print_usage(sys.stderr)
|
||||
self.exit(
|
||||
2,
|
||||
self.env.rich_error_console.print(
|
||||
dedent(
|
||||
f'''
|
||||
error:
|
||||
[bold]error[/bold]:
|
||||
{message}
|
||||
|
||||
for more information:
|
||||
[bold]for more information[/bold]:
|
||||
run '{self.prog} --help' or visit https://httpie.io/docs/cli
|
||||
'''
|
||||
'''.rstrip()
|
||||
)
|
||||
)
|
||||
self.exit(2)
|
||||
|
@ -9,6 +9,7 @@ URL_SCHEME_RE = re.compile(r'^[a-z][a-z0-9.+-]*://', re.IGNORECASE)
|
||||
|
||||
HTTP_POST = 'POST'
|
||||
HTTP_GET = 'GET'
|
||||
HTTP_OPTIONS = 'OPTIONS'
|
||||
|
||||
# Various separators used in args
|
||||
SEPARATOR_HEADER = ':'
|
||||
@ -131,10 +132,3 @@ class RequestType(enum.Enum):
|
||||
FORM = enum.auto()
|
||||
MULTIPART = enum.auto()
|
||||
JSON = enum.auto()
|
||||
|
||||
|
||||
EMPTY_STRING = ''
|
||||
OPEN_BRACKET = '['
|
||||
CLOSE_BRACKET = ']'
|
||||
BACKSLASH = '\\'
|
||||
HIGHLIGHTER = '^'
|
||||
|
@ -16,11 +16,10 @@ from httpie.cli.constants import (BASE_OUTPUT_OPTIONS, DEFAULT_FORMAT_OPTIONS,
|
||||
SORTED_FORMAT_OPTIONS_STRING,
|
||||
UNSORTED_FORMAT_OPTIONS_STRING, RequestType)
|
||||
from httpie.cli.options import ParserSpec, Qualifiers, to_argparse
|
||||
from httpie.output.formatters.colors import (AUTO_STYLE, DEFAULT_STYLE,
|
||||
from httpie.output.formatters.colors import (AUTO_STYLE, DEFAULT_STYLE, BUNDLED_STYLES,
|
||||
get_available_styles)
|
||||
from httpie.plugins.builtin import BuiltinAuthPlugin
|
||||
from httpie.plugins.registry import plugin_manager
|
||||
from httpie.sessions import DEFAULT_SESSIONS_DIR
|
||||
from httpie.ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
|
||||
|
||||
options = ParserSpec(
|
||||
@ -29,9 +28,11 @@ options = ParserSpec(
|
||||
epilog="""
|
||||
For every --OPTION there is also a --no-OPTION that reverts OPTION
|
||||
to its default value.
|
||||
|
||||
Suggestions and bug reports are greatly appreciated:
|
||||
https://github.com/httpie/httpie/issues
|
||||
""",
|
||||
source_file=__file__
|
||||
)
|
||||
|
||||
|
||||
@ -40,7 +41,7 @@ options = ParserSpec(
|
||||
#######################################################################
|
||||
|
||||
positional_arguments = options.add_group(
|
||||
'Positional Arguments',
|
||||
'Positional arguments',
|
||||
description="""
|
||||
These arguments come after any flags and in the order they are listed here.
|
||||
Only URL is required.
|
||||
@ -52,6 +53,7 @@ positional_arguments.add_argument(
|
||||
metavar='METHOD',
|
||||
nargs=Qualifiers.OPTIONAL,
|
||||
default=None,
|
||||
short_help='The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).',
|
||||
help="""
|
||||
The HTTP method to be used for the request (GET, POST, PUT, DELETE, ...).
|
||||
|
||||
@ -66,9 +68,10 @@ positional_arguments.add_argument(
|
||||
positional_arguments.add_argument(
|
||||
dest='url',
|
||||
metavar='URL',
|
||||
short_help='The request URL.',
|
||||
help="""
|
||||
The scheme defaults to 'http://' if the URL does not include one.
|
||||
(You can override this with: --default-scheme=https)
|
||||
The request URL. Scheme defaults to 'http://' if the URL
|
||||
does not include one. (You can override this with: --default-scheme=http/https)
|
||||
|
||||
You can also use a shorthand for localhost
|
||||
|
||||
@ -83,6 +86,17 @@ positional_arguments.add_argument(
|
||||
nargs=Qualifiers.ZERO_OR_MORE,
|
||||
default=None,
|
||||
type=KeyValueArgType(*SEPARATOR_GROUP_ALL_ITEMS),
|
||||
short_help=(
|
||||
'HTTPie’s request items syntax for specifying HTTP headers, JSON/Form'
|
||||
'data, files, and URL parameters.'
|
||||
),
|
||||
nested_options=[
|
||||
('HTTP Headers', 'Name:Value', 'Arbitrary HTTP header, e.g X-API-Token:123'),
|
||||
('URL Parameters', 'name==value', 'Querystring parameter to the URL, e.g limit==50'),
|
||||
('Data Fields', 'field=value', 'Data fields to be serialized as JSON (default) or Form Data (with --form)'),
|
||||
('Raw JSON Fields', 'field:=json', 'Data field for real JSON types.'),
|
||||
('File upload Fields', 'field@/dir/file', 'Path field for uploading a file.'),
|
||||
],
|
||||
help=r"""
|
||||
Optional key-value pairs to be included in the request. The separator used
|
||||
determines the type:
|
||||
@ -128,7 +142,7 @@ positional_arguments.add_argument(
|
||||
# Content type.
|
||||
#######################################################################
|
||||
|
||||
content_types = options.add_group('Predefined Content Types')
|
||||
content_types = options.add_group('Predefined content types')
|
||||
|
||||
content_types.add_argument(
|
||||
'--json',
|
||||
@ -136,6 +150,7 @@ content_types.add_argument(
|
||||
action='store_const',
|
||||
const=RequestType.JSON,
|
||||
dest='request_type',
|
||||
short_help='(default) Serialize data items from the command line as a JSON object.',
|
||||
help="""
|
||||
(default) Data items from the command line are serialized as a JSON object.
|
||||
The Content-Type and Accept headers are set to application/json
|
||||
@ -149,6 +164,7 @@ content_types.add_argument(
|
||||
action='store_const',
|
||||
const=RequestType.FORM,
|
||||
dest='request_type',
|
||||
short_help='Serialize data items from the command line as form field data.',
|
||||
help="""
|
||||
Data items from the command line are serialized as form fields.
|
||||
|
||||
@ -163,22 +179,21 @@ content_types.add_argument(
|
||||
action='store_const',
|
||||
const=RequestType.MULTIPART,
|
||||
dest='request_type',
|
||||
help="""
|
||||
Similar to --form, but always sends a multipart/form-data
|
||||
request (i.e., even without files).
|
||||
|
||||
""",
|
||||
short_help=(
|
||||
'Similar to --form, but always sends a multipart/form-data '
|
||||
'request (i.e., even without files).'
|
||||
)
|
||||
)
|
||||
content_types.add_argument(
|
||||
'--boundary',
|
||||
help="""
|
||||
Specify a custom boundary string for multipart/form-data requests.
|
||||
Only has effect only together with --form.
|
||||
|
||||
""",
|
||||
short_help=(
|
||||
'Specify a custom boundary string for multipart/form-data requests. '
|
||||
'Only has effect only together with --form.'
|
||||
)
|
||||
)
|
||||
content_types.add_argument(
|
||||
'--raw',
|
||||
short_help='Pass raw request data without extra processing.',
|
||||
help="""
|
||||
This option allows you to pass raw request data without extra processing
|
||||
(as opposed to the structured request items syntax):
|
||||
@ -201,13 +216,14 @@ content_types.add_argument(
|
||||
# Content processing.
|
||||
#######################################################################
|
||||
|
||||
processing_options = options.add_group('Content Processing Options')
|
||||
processing_options = options.add_group('Content processing options')
|
||||
|
||||
processing_options.add_argument(
|
||||
'--compress',
|
||||
'-x',
|
||||
action='count',
|
||||
default=0,
|
||||
short_help='Compress the content with Deflate algorithm.',
|
||||
help="""
|
||||
Content compressed (encoded) with Deflate algorithm.
|
||||
The Content-Encoding header is set to deflate.
|
||||
@ -223,22 +239,33 @@ processing_options.add_argument(
|
||||
#######################################################################
|
||||
|
||||
|
||||
def format_style_help(available_styles):
|
||||
return """
|
||||
def format_style_help(available_styles, *, isolation_mode: bool = False):
|
||||
text = """
|
||||
Output coloring style (default is "{default}"). It can be one of:
|
||||
|
||||
{available_styles}
|
||||
"""
|
||||
if isolation_mode:
|
||||
text += '\n\n'
|
||||
text += 'For finding out all available styles in your system, try:\n\n'
|
||||
text += ' $ http --style\n'
|
||||
text += textwrap.dedent("""
|
||||
The "{auto_style}" style follows your terminal's ANSI color styles.
|
||||
For non-{auto_style} styles to work properly, please make sure that the
|
||||
$TERM environment variable is set to "xterm-256color" or similar
|
||||
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
||||
""")
|
||||
|
||||
The "{auto_style}" style follows your terminal's ANSI color styles.
|
||||
For non-{auto_style} styles to work properly, please make sure that the
|
||||
$TERM environment variable is set to "xterm-256color" or similar
|
||||
(e.g., via `export TERM=xterm-256color' in your ~/.bashrc).
|
||||
""".format(
|
||||
if isolation_mode:
|
||||
available_styles = sorted(BUNDLED_STYLES)
|
||||
|
||||
available_styles_text = '\n'.join(
|
||||
f' {line.strip()}'
|
||||
for line in textwrap.wrap(', '.join(available_styles), 60)
|
||||
).strip()
|
||||
return text.format(
|
||||
default=DEFAULT_STYLE,
|
||||
available_styles='\n'.join(
|
||||
f' {line.strip()}'
|
||||
for line in textwrap.wrap(', '.join(available_styles), 60)
|
||||
).strip(),
|
||||
available_styles=available_styles_text,
|
||||
auto_style=AUTO_STYLE,
|
||||
)
|
||||
|
||||
@ -254,13 +281,14 @@ _unsorted_kwargs = {
|
||||
'dest': 'format_options',
|
||||
}
|
||||
|
||||
output_processing = options.add_group('Output Processing')
|
||||
output_processing = options.add_group('Output processing')
|
||||
|
||||
output_processing.add_argument(
|
||||
'--pretty',
|
||||
dest='prettify',
|
||||
default=PRETTY_STDOUT_TTY_ONLY,
|
||||
choices=sorted(PRETTY_MAP.keys()),
|
||||
short_help='Control the processing of console outputs.',
|
||||
help="""
|
||||
Controls output processing. The value can be "none" to not prettify
|
||||
the output (default for redirected output), "all" to apply both colors
|
||||
@ -276,6 +304,7 @@ output_processing.add_argument(
|
||||
default=DEFAULT_STYLE,
|
||||
action='lazy_choices',
|
||||
getter=get_available_styles,
|
||||
short_help=f'Output coloring style (default is "{DEFAULT_STYLE}").',
|
||||
help_formatter=format_style_help,
|
||||
)
|
||||
|
||||
@ -291,6 +320,7 @@ output_processing.add_argument(
|
||||
output_processing.add_argument(
|
||||
'--unsorted',
|
||||
**_unsorted_kwargs,
|
||||
short_help='Disables all sorting while formatting output.',
|
||||
help=f"""
|
||||
Disables all sorting while formatting output. It is a shortcut for:
|
||||
|
||||
@ -301,6 +331,7 @@ output_processing.add_argument(
|
||||
output_processing.add_argument(
|
||||
'--sorted',
|
||||
**_sorted_kwargs,
|
||||
short_help='Re-enables all sorting options while formatting output.',
|
||||
help=f"""
|
||||
Re-enables all sorting options while formatting output. It is a shortcut for:
|
||||
|
||||
@ -312,6 +343,7 @@ output_processing.add_argument(
|
||||
'--response-charset',
|
||||
metavar='ENCODING',
|
||||
type=response_charset_type,
|
||||
short_help='Override the response encoding for terminal display purposes.',
|
||||
help="""
|
||||
Override the response encoding for terminal display purposes, e.g.:
|
||||
|
||||
@ -324,6 +356,7 @@ output_processing.add_argument(
|
||||
'--response-mime',
|
||||
metavar='MIME_TYPE',
|
||||
type=response_mime_type,
|
||||
short_help='Override the response mime type for coloring and formatting for the terminal.',
|
||||
help="""
|
||||
Override the response mime type for coloring and formatting for the terminal, e.g.:
|
||||
|
||||
@ -335,6 +368,7 @@ output_processing.add_argument(
|
||||
output_processing.add_argument(
|
||||
'--format-options',
|
||||
action='append',
|
||||
short_help='Controls output formatting.',
|
||||
help="""
|
||||
Controls output formatting. Only relevant when formatting is enabled
|
||||
through (explicit or implied) --pretty=all or --pretty=format.
|
||||
@ -361,13 +395,14 @@ output_processing.add_argument(
|
||||
# Output options
|
||||
#######################################################################
|
||||
|
||||
output_options = options.add_group('Output Options')
|
||||
output_options = options.add_group('Output options')
|
||||
|
||||
output_options.add_argument(
|
||||
'--print',
|
||||
'-p',
|
||||
dest='output_options',
|
||||
metavar='WHAT',
|
||||
short_help='Options to specify what the console output should contain.',
|
||||
help=f"""
|
||||
String specifying what the output should contain:
|
||||
|
||||
@ -390,6 +425,7 @@ output_options.add_argument(
|
||||
dest='output_options',
|
||||
action='store_const',
|
||||
const=OUT_RESP_HEAD,
|
||||
short_help='Print only the response headers.',
|
||||
help=f"""
|
||||
Print only the response headers. Shortcut for --print={OUT_RESP_HEAD}.
|
||||
|
||||
@ -401,6 +437,7 @@ output_options.add_argument(
|
||||
dest='output_options',
|
||||
action='store_const',
|
||||
const=OUT_RESP_META,
|
||||
short_help='Print only the response metadata.',
|
||||
help=f"""
|
||||
Print only the response metadata. Shortcut for --print={OUT_RESP_META}.
|
||||
|
||||
@ -412,6 +449,7 @@ output_options.add_argument(
|
||||
dest='output_options',
|
||||
action='store_const',
|
||||
const=OUT_RESP_BODY,
|
||||
short_help='Print only the response body.',
|
||||
help=f"""
|
||||
Print only the response body. Shortcut for --print={OUT_RESP_BODY}.
|
||||
|
||||
@ -424,20 +462,22 @@ output_options.add_argument(
|
||||
dest='verbose',
|
||||
action='count',
|
||||
default=0,
|
||||
short_help='Make output more verbose.',
|
||||
help=f"""
|
||||
Verbose output. For the level one (with single `-v`/`--verbose`), print
|
||||
the whole request as well as the response. Also print any intermediary
|
||||
requests/responses (such as redirects). For the second level and higher,
|
||||
print these as well as the response metadata.
|
||||
|
||||
Level one is a shortcut for: --all --print={''.join(BASE_OUTPUT_OPTIONS)}
|
||||
Level two is a shortcut for: --all --print={''.join(OUTPUT_OPTIONS)}
|
||||
Level one is a shortcut for: --all --print={''.join(sorted(BASE_OUTPUT_OPTIONS))}
|
||||
Level two is a shortcut for: --all --print={''.join(sorted(OUTPUT_OPTIONS))}
|
||||
""",
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--all',
|
||||
default=False,
|
||||
action='store_true',
|
||||
short_help='Show any intermediary requests/responses.',
|
||||
help="""
|
||||
By default, only the final request/response is shown. Use this flag to show
|
||||
any intermediary requests/responses as well. Intermediary requests include
|
||||
@ -451,19 +491,14 @@ output_options.add_argument(
|
||||
'-P',
|
||||
dest='output_options_history',
|
||||
metavar='WHAT',
|
||||
help="""
|
||||
The same as --print, -p but applies only to intermediary requests/responses
|
||||
(such as redirects) when their inclusion is enabled with --all. If this
|
||||
options is not specified, then they are formatted the same way as the final
|
||||
response.
|
||||
|
||||
""",
|
||||
help=Qualifiers.SUPPRESS,
|
||||
)
|
||||
output_options.add_argument(
|
||||
'--stream',
|
||||
'-S',
|
||||
action='store_true',
|
||||
default=False,
|
||||
short_help='Always stream the response body by line, i.e., behave like `tail -f`.',
|
||||
help="""
|
||||
Always stream the response body by line, i.e., behave like `tail -f'.
|
||||
|
||||
@ -484,6 +519,7 @@ output_options.add_argument(
|
||||
type=FileType('a+b'),
|
||||
dest='output_file',
|
||||
metavar='FILE',
|
||||
short_help='Save output to FILE instead of stdout.',
|
||||
help="""
|
||||
Save output to FILE instead of stdout. If --download is also set, then only
|
||||
the response body is saved to FILE. Other parts of the HTTP exchange are
|
||||
@ -497,6 +533,7 @@ output_options.add_argument(
|
||||
'-d',
|
||||
action='store_true',
|
||||
default=False,
|
||||
short_help='Download the body to a file instead of printing it to stdout.',
|
||||
help="""
|
||||
Do not print the response body to stdout. Rather, download it and store it
|
||||
in a file. The filename is guessed unless specified with --output
|
||||
@ -510,6 +547,7 @@ output_options.add_argument(
|
||||
dest='download_resume',
|
||||
action='store_true',
|
||||
default=False,
|
||||
short_help='Resume an interrupted download (--output needs to be specified).',
|
||||
help="""
|
||||
Resume an interrupted download. Note that the --output option needs to be
|
||||
specified as well.
|
||||
@ -521,6 +559,7 @@ output_options.add_argument(
|
||||
'-q',
|
||||
action='count',
|
||||
default=0,
|
||||
short_help='Do not print to stdout or stderr, except for errors and warnings when provided once.',
|
||||
help="""
|
||||
Do not print to stdout or stderr, except for errors and warnings when provided once.
|
||||
Provide twice to suppress warnings as well.
|
||||
@ -544,21 +583,26 @@ sessions.add_argument(
|
||||
'--session',
|
||||
metavar='SESSION_NAME_OR_PATH',
|
||||
type=session_name_validator,
|
||||
help=f"""
|
||||
short_help='Create, or reuse and update a session.',
|
||||
help="""
|
||||
Create, or reuse and update a session. Within a session, custom headers,
|
||||
auth credential, as well as any cookies sent by the server persist between
|
||||
requests.
|
||||
|
||||
Session files are stored in:
|
||||
|
||||
{DEFAULT_SESSIONS_DIR}/<HOST>/<SESSION_NAME>.json.
|
||||
[HTTPIE_CONFIG_DIR]/<HOST>/<SESSION_NAME>.json.
|
||||
|
||||
See the following page to find out your default HTTPIE_CONFIG_DIR:
|
||||
|
||||
https://httpie.io/docs/cli/config-file-directory
|
||||
""",
|
||||
)
|
||||
sessions.add_argument(
|
||||
'--session-read-only',
|
||||
metavar='SESSION_NAME_OR_PATH',
|
||||
type=session_name_validator,
|
||||
short_help='Create or read a session without updating it',
|
||||
help="""
|
||||
Create or read a session without updating it form the request/response
|
||||
exchange.
|
||||
@ -571,33 +615,46 @@ sessions.add_argument(
|
||||
#######################################################################
|
||||
|
||||
|
||||
def format_auth_help(auth_plugins_mapping):
|
||||
auth_plugins = list(auth_plugins_mapping.values())
|
||||
return """
|
||||
def format_auth_help(auth_plugins_mapping, *, isolation_mode: bool = False):
|
||||
text = """
|
||||
The authentication mechanism to be used. Defaults to "{default}".
|
||||
|
||||
{types}
|
||||
{auth_types}
|
||||
"""
|
||||
|
||||
""".format(
|
||||
auth_plugins = list(auth_plugins_mapping.values())
|
||||
if isolation_mode:
|
||||
auth_plugins = [
|
||||
auth_plugin
|
||||
for auth_plugin in auth_plugins
|
||||
if issubclass(auth_plugin, BuiltinAuthPlugin)
|
||||
]
|
||||
text += '\n'
|
||||
text += 'For finding out all available authentication types in your system, try:\n\n'
|
||||
text += ' $ http --auth-type'
|
||||
|
||||
auth_types = '\n\n '.join(
|
||||
'"{type}": {name}{package}{description}'.format(
|
||||
type=plugin.auth_type,
|
||||
name=plugin.name,
|
||||
package=(
|
||||
''
|
||||
if issubclass(plugin, BuiltinAuthPlugin)
|
||||
else f' (provided by {plugin.package_name})'
|
||||
),
|
||||
description=(
|
||||
''
|
||||
if not plugin.description
|
||||
else '\n '
|
||||
+ ('\n '.join(textwrap.wrap(plugin.description)))
|
||||
),
|
||||
)
|
||||
for plugin in auth_plugins
|
||||
)
|
||||
|
||||
return text.format(
|
||||
default=auth_plugins[0].auth_type,
|
||||
types='\n '.join(
|
||||
'"{type}": {name}{package}{description}'.format(
|
||||
type=plugin.auth_type,
|
||||
name=plugin.name,
|
||||
package=(
|
||||
''
|
||||
if issubclass(plugin, BuiltinAuthPlugin)
|
||||
else f' (provided by {plugin.package_name})'
|
||||
),
|
||||
description=(
|
||||
''
|
||||
if not plugin.description
|
||||
else '\n '
|
||||
+ ('\n '.join(textwrap.wrap(plugin.description)))
|
||||
),
|
||||
)
|
||||
for plugin in auth_plugins
|
||||
),
|
||||
auth_types=auth_types,
|
||||
)
|
||||
|
||||
|
||||
@ -608,6 +665,7 @@ authentication.add_argument(
|
||||
'-a',
|
||||
default=None,
|
||||
metavar='USER[:PASS] | TOKEN',
|
||||
short_help='Credentials for the selected (-A) authentication method.',
|
||||
help="""
|
||||
For username/password based authentication mechanisms (e.g
|
||||
basic auth or digest auth) if only the username is provided
|
||||
@ -623,16 +681,14 @@ authentication.add_argument(
|
||||
getter=plugin_manager.get_auth_plugin_mapping,
|
||||
sort=True,
|
||||
cache=False,
|
||||
short_help='The authentication mechanism to be used.',
|
||||
help_formatter=format_auth_help,
|
||||
)
|
||||
authentication.add_argument(
|
||||
'--ignore-netrc',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
Ignore credentials from .netrc.
|
||||
|
||||
""",
|
||||
short_help='Ignore credentials from .netrc.'
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -645,9 +701,7 @@ network.add_argument(
|
||||
'--offline',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
Build the request and print it but don’t actually send it.
|
||||
""",
|
||||
short_help='Build the request and print it but don’t actually send it.'
|
||||
)
|
||||
network.add_argument(
|
||||
'--proxy',
|
||||
@ -655,6 +709,7 @@ network.add_argument(
|
||||
action='append',
|
||||
metavar='PROTOCOL:PROXY_URL',
|
||||
type=KeyValueArgType(SEPARATOR_PROXY),
|
||||
short_help='String mapping of protocol to the URL of the proxy.',
|
||||
help="""
|
||||
String mapping protocol to the URL of the proxy
|
||||
(e.g. http:http://foo.bar:3128). You can specify multiple proxies with
|
||||
@ -668,16 +723,14 @@ network.add_argument(
|
||||
'-F',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
Follow 30x Location redirects.
|
||||
|
||||
""",
|
||||
short_help='Follow 30x Location redirects.'
|
||||
)
|
||||
|
||||
network.add_argument(
|
||||
'--max-redirects',
|
||||
type=int,
|
||||
default=30,
|
||||
short_help='The maximum number of redirects that should be followed (with --follow).',
|
||||
help="""
|
||||
By default, requests have a limit of 30 redirects (works with --follow).
|
||||
|
||||
@ -687,11 +740,10 @@ network.add_argument(
|
||||
'--max-headers',
|
||||
type=int,
|
||||
default=0,
|
||||
help="""
|
||||
The maximum number of response headers to be read before giving up
|
||||
(default 0, i.e., no limit).
|
||||
|
||||
""",
|
||||
short_help=(
|
||||
'The maximum number of response headers to be read before '
|
||||
'giving up (default 0, i.e., no limit).'
|
||||
)
|
||||
)
|
||||
|
||||
network.add_argument(
|
||||
@ -699,6 +751,7 @@ network.add_argument(
|
||||
type=float,
|
||||
default=0,
|
||||
metavar='SECONDS',
|
||||
short_help='The connection timeout of the request in seconds.',
|
||||
help="""
|
||||
The connection timeout of the request in seconds.
|
||||
The default value is 0, i.e., there is no timeout limit.
|
||||
@ -713,6 +766,7 @@ network.add_argument(
|
||||
'--check-status',
|
||||
default=False,
|
||||
action='store_true',
|
||||
short_help='Exit with an error status code if the server replies with an error.',
|
||||
help="""
|
||||
By default, HTTPie exits with 0 when no network or other fatal errors
|
||||
occur. This flag instructs HTTPie to also check the HTTP status code and
|
||||
@ -729,20 +783,16 @@ network.add_argument(
|
||||
'--path-as-is',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
Bypass dot segment (/../ or /./) URL squashing.
|
||||
|
||||
""",
|
||||
short_help='Bypass dot segment (/../ or /./) URL squashing.'
|
||||
)
|
||||
network.add_argument(
|
||||
'--chunked',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help="""
|
||||
Enable streaming via chunked transfer encoding.
|
||||
The Transfer-Encoding header is set to chunked.
|
||||
|
||||
""",
|
||||
short_help=(
|
||||
'Enable streaming via chunked transfer encoding. '
|
||||
'The Transfer-Encoding header is set to chunked.'
|
||||
)
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -754,6 +804,7 @@ ssl = options.add_group('SSL')
|
||||
ssl.add_argument(
|
||||
'--verify',
|
||||
default='yes',
|
||||
short_help='If "no", skip SSL verification. If a file path, use it as a CA bundle.',
|
||||
help="""
|
||||
Set to "no" (or "false") to skip checking the host's SSL certificate.
|
||||
Defaults to "yes" ("true"). You can also pass the path to a CA_BUNDLE file
|
||||
@ -765,6 +816,7 @@ ssl.add_argument(
|
||||
'--ssl',
|
||||
dest='ssl_version',
|
||||
choices=sorted(AVAILABLE_SSL_VERSION_ARG_MAPPING.keys()),
|
||||
short_help='The desired protocol version to used.',
|
||||
help="""
|
||||
The desired protocol version to use. This will default to
|
||||
SSL v2.3 which will negotiate the highest protocol that both
|
||||
@ -776,6 +828,7 @@ ssl.add_argument(
|
||||
)
|
||||
ssl.add_argument(
|
||||
'--ciphers',
|
||||
short_help='A string in the OpenSSL cipher list format.',
|
||||
help=f"""
|
||||
|
||||
A string in the OpenSSL cipher list format. By default, the following
|
||||
@ -789,6 +842,7 @@ ssl.add_argument(
|
||||
'--cert',
|
||||
default=None,
|
||||
type=readable_file_arg,
|
||||
short_help='Specifys a local cert to use as client side SSL certificate.',
|
||||
help="""
|
||||
You can specify a local cert to use as client side SSL certificate.
|
||||
This file may either contain both private key and certificate or you may
|
||||
@ -800,6 +854,7 @@ ssl.add_argument(
|
||||
'--cert-key',
|
||||
default=None,
|
||||
type=readable_file_arg,
|
||||
short_help='The private key to use with SSL. Only needed if --cert is given.',
|
||||
help="""
|
||||
The private key to use with SSL. Only needed if --cert is given and the
|
||||
certificate file does not contain the private key.
|
||||
@ -811,11 +866,12 @@ ssl.add_argument(
|
||||
'--cert-key-pass',
|
||||
default=None,
|
||||
type=SSLCredentials,
|
||||
help='''
|
||||
short_help='The passphrase to be used to with the given private key.',
|
||||
help="""
|
||||
The passphrase to be used to with the given private key. Only needed if --cert-key
|
||||
is given and the key file requires a passphrase.
|
||||
If not provided, you’ll be prompted interactively.
|
||||
'''
|
||||
"""
|
||||
)
|
||||
|
||||
#######################################################################
|
||||
@ -828,50 +884,42 @@ troubleshooting.add_argument(
|
||||
'-I',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
Do not attempt to read stdin.
|
||||
|
||||
""",
|
||||
short_help='Do not attempt to read stdin'
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--help',
|
||||
action='help',
|
||||
default=Qualifiers.SUPPRESS,
|
||||
help="""
|
||||
Show this help message and exit.
|
||||
|
||||
""",
|
||||
short_help='Show this help message and exit.',
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--manual',
|
||||
action='manual',
|
||||
default=Qualifiers.SUPPRESS,
|
||||
short_help='Show the full manual.',
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--version',
|
||||
action='version',
|
||||
version=__version__,
|
||||
help="""
|
||||
Show version and exit.
|
||||
|
||||
""",
|
||||
short_help='Show version and exit.',
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--traceback',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help="""
|
||||
Prints the exception traceback should one occur.
|
||||
|
||||
""",
|
||||
short_help='Prints the exception traceback should one occur.',
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--default-scheme',
|
||||
default='http',
|
||||
help="""
|
||||
The default scheme to use if not specified in the URL.
|
||||
|
||||
""",
|
||||
short_help='The default scheme to use if not specified in the URL.'
|
||||
)
|
||||
troubleshooting.add_argument(
|
||||
'--debug',
|
||||
action='store_true',
|
||||
default=False,
|
||||
short_help='Print useful diagnostic information for bug reports.',
|
||||
help="""
|
||||
Prints the exception traceback should one occur, as well as other
|
||||
information useful for debugging HTTPie itself and for reporting bugs.
|
||||
|
@ -35,6 +35,16 @@ class HTTPHeadersDict(CIMultiDict, BaseMultiDict):
|
||||
|
||||
super().add(key, value)
|
||||
|
||||
def remove_item(self, key, value):
|
||||
"""
|
||||
Remove a (key, value) pair from the dict.
|
||||
"""
|
||||
existing_values = self.popall(key)
|
||||
existing_values.remove(value)
|
||||
|
||||
for value in existing_values:
|
||||
self.add(key, value)
|
||||
|
||||
|
||||
class RequestJSONDataDict(OrderedDict):
|
||||
pass
|
||||
|
@ -9,8 +9,14 @@ from typing import (
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
from httpie.cli.dicts import NestedJSONArray
|
||||
from httpie.cli.constants import EMPTY_STRING, OPEN_BRACKET, CLOSE_BRACKET, BACKSLASH, HIGHLIGHTER
|
||||
from .dicts import NestedJSONArray
|
||||
|
||||
|
||||
EMPTY_STRING = ''
|
||||
HIGHLIGHTER = '^'
|
||||
OPEN_BRACKET = '['
|
||||
CLOSE_BRACKET = ']'
|
||||
BACKSLASH = '\\'
|
||||
|
||||
|
||||
class HTTPieSyntaxError(ValueError):
|
||||
@ -31,7 +37,7 @@ class HTTPieSyntaxError(ValueError):
|
||||
if self.token is not None:
|
||||
lines.append(self.source)
|
||||
lines.append(
|
||||
' ' * (self.token.start)
|
||||
' ' * self.token.start
|
||||
+ HIGHLIGHTER * (self.token.end - self.token.start)
|
||||
)
|
||||
return '\n'.join(lines)
|
||||
@ -51,9 +57,15 @@ class TokenKind(Enum):
|
||||
return 'a ' + self.name.lower()
|
||||
|
||||
|
||||
OPERATORS = {OPEN_BRACKET: TokenKind.LEFT_BRACKET, CLOSE_BRACKET: TokenKind.RIGHT_BRACKET}
|
||||
OPERATORS = {
|
||||
OPEN_BRACKET: TokenKind.LEFT_BRACKET,
|
||||
CLOSE_BRACKET: TokenKind.RIGHT_BRACKET,
|
||||
}
|
||||
SPECIAL_CHARS = OPERATORS.keys() | {BACKSLASH}
|
||||
LITERAL_TOKENS = [TokenKind.TEXT, TokenKind.NUMBER]
|
||||
LITERAL_TOKENS = [
|
||||
TokenKind.TEXT,
|
||||
TokenKind.NUMBER,
|
||||
]
|
||||
|
||||
|
||||
class Token(NamedTuple):
|
||||
|
@ -3,15 +3,16 @@ import textwrap
|
||||
import typing
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum, auto
|
||||
from typing import Any, Optional, Dict, List, Type, TypeVar
|
||||
from typing import Any, Optional, Dict, List, Tuple, Type, TypeVar
|
||||
|
||||
from httpie.cli.argparser import HTTPieArgumentParser
|
||||
from httpie.cli.utils import LazyChoices
|
||||
from httpie.cli.utils import Manual, LazyChoices
|
||||
|
||||
|
||||
class Qualifiers(Enum):
|
||||
OPTIONAL = auto()
|
||||
ZERO_OR_MORE = auto()
|
||||
ONE_OR_MORE = auto()
|
||||
SUPPRESS = auto()
|
||||
|
||||
|
||||
@ -24,6 +25,16 @@ def map_qualifiers(
|
||||
}
|
||||
|
||||
|
||||
def drop_keys(
|
||||
configuration: Dict[str, Any], key_blacklist: Tuple[str, ...]
|
||||
):
|
||||
return {
|
||||
key: value
|
||||
for key, value in configuration.items()
|
||||
if key not in key_blacklist
|
||||
}
|
||||
|
||||
|
||||
PARSER_SPEC_VERSION = '0.0.1a0'
|
||||
|
||||
|
||||
@ -33,6 +44,8 @@ class ParserSpec:
|
||||
description: Optional[str] = None
|
||||
epilog: Optional[str] = None
|
||||
groups: List['Group'] = field(default_factory=list)
|
||||
man_page_hint: Optional[str] = None
|
||||
source_file: Optional[str] = None
|
||||
|
||||
def finalize(self) -> 'ParserSpec':
|
||||
if self.description:
|
||||
@ -69,6 +82,7 @@ class Group:
|
||||
|
||||
def add_argument(self, *args, **kwargs):
|
||||
argument = Argument(list(args), kwargs.copy())
|
||||
argument.post_init()
|
||||
self.arguments.append(argument)
|
||||
return argument
|
||||
|
||||
@ -85,14 +99,32 @@ class Argument(typing.NamedTuple):
|
||||
aliases: List[str]
|
||||
configuration: Dict[str, Any]
|
||||
|
||||
def serialize(self) -> Dict[str, Any]:
|
||||
def post_init(self):
|
||||
"""Run a bunch of post-init hooks."""
|
||||
# If there is a short help, then create the longer version from it.
|
||||
short_help = self.configuration.get('short_help')
|
||||
if (
|
||||
short_help
|
||||
and 'help' not in self.configuration
|
||||
and self.configuration.get('action') != 'lazy_choices'
|
||||
):
|
||||
self.configuration['help'] = f'\n{short_help}\n\n'
|
||||
|
||||
def serialize(self, *, isolation_mode: bool = False) -> Dict[str, Any]:
|
||||
configuration = self.configuration.copy()
|
||||
|
||||
# Unpack the dynamically computed choices, since we
|
||||
# will need to store the actual values somewhere.
|
||||
action = configuration.pop('action', None)
|
||||
short_help = configuration.pop('short_help', None)
|
||||
nested_options = configuration.pop('nested_options', None)
|
||||
|
||||
if action == 'lazy_choices':
|
||||
choices = LazyChoices(self.aliases, **{'dest': None, **configuration})
|
||||
choices = LazyChoices(
|
||||
self.aliases,
|
||||
**{'dest': None, **configuration},
|
||||
isolation_mode=isolation_mode
|
||||
)
|
||||
configuration['choices'] = list(choices.load())
|
||||
configuration['help'] = choices.help
|
||||
|
||||
@ -106,9 +138,13 @@ class Argument(typing.NamedTuple):
|
||||
qualifiers = JSON_QUALIFIER_TO_OPTIONS[configuration.get('nargs', Qualifiers.SUPPRESS)]
|
||||
result.update(qualifiers)
|
||||
|
||||
help_msg = configuration.get('help')
|
||||
if help_msg and help_msg is not Qualifiers.SUPPRESS:
|
||||
result['description'] = help_msg.strip()
|
||||
description = configuration.get('help')
|
||||
if description and description is not Qualifiers.SUPPRESS:
|
||||
result['short_description'] = short_help
|
||||
result['description'] = description
|
||||
|
||||
if nested_options:
|
||||
result['nested_options'] = nested_options
|
||||
|
||||
python_type = configuration.get('type')
|
||||
if python_type is not None:
|
||||
@ -123,10 +159,19 @@ class Argument(typing.NamedTuple):
|
||||
key: value
|
||||
for key, value in configuration.items()
|
||||
if key in JSON_DIRECT_MIRROR_OPTIONS
|
||||
if value is not Qualifiers.SUPPRESS
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
@property
|
||||
def is_positional(self):
|
||||
return len(self.aliases) == 0
|
||||
|
||||
@property
|
||||
def is_hidden(self):
|
||||
return self.configuration.get('help') is Qualifiers.SUPPRESS
|
||||
|
||||
def __getattr__(self, attribute_name):
|
||||
if attribute_name in self.configuration:
|
||||
return self.configuration[attribute_name]
|
||||
@ -140,7 +185,9 @@ ARGPARSE_QUALIFIER_MAP = {
|
||||
Qualifiers.OPTIONAL: argparse.OPTIONAL,
|
||||
Qualifiers.SUPPRESS: argparse.SUPPRESS,
|
||||
Qualifiers.ZERO_OR_MORE: argparse.ZERO_OR_MORE,
|
||||
Qualifiers.ONE_OR_MORE: argparse.ONE_OR_MORE
|
||||
}
|
||||
ARGPARSE_IGNORE_KEYS = ('short_help', 'nested_options')
|
||||
|
||||
|
||||
def to_argparse(
|
||||
@ -152,7 +199,9 @@ def to_argparse(
|
||||
description=abstract_options.description,
|
||||
epilog=abstract_options.epilog,
|
||||
)
|
||||
concrete_parser.spec = abstract_options
|
||||
concrete_parser.register('action', 'lazy_choices', LazyChoices)
|
||||
concrete_parser.register('action', 'manual', Manual)
|
||||
|
||||
for abstract_group in abstract_options.groups:
|
||||
concrete_group = concrete_parser.add_argument_group(
|
||||
@ -164,9 +213,9 @@ def to_argparse(
|
||||
for abstract_argument in abstract_group.arguments:
|
||||
concrete_group.add_argument(
|
||||
*abstract_argument.aliases,
|
||||
**map_qualifiers(
|
||||
**drop_keys(map_qualifiers(
|
||||
abstract_argument.configuration, ARGPARSE_QUALIFIER_MAP
|
||||
)
|
||||
), ARGPARSE_IGNORE_KEYS)
|
||||
)
|
||||
|
||||
return concrete_parser
|
||||
@ -181,9 +230,20 @@ JSON_DIRECT_MIRROR_OPTIONS = (
|
||||
JSON_QUALIFIER_TO_OPTIONS = {
|
||||
Qualifiers.OPTIONAL: {'is_optional': True},
|
||||
Qualifiers.ZERO_OR_MORE: {'is_optional': True, 'is_variadic': True},
|
||||
Qualifiers.ONE_OR_MORE: {'is_optional': False, 'is_variadic': True},
|
||||
Qualifiers.SUPPRESS: {}
|
||||
}
|
||||
|
||||
|
||||
def to_data(abstract_options: ParserSpec) -> Dict[str, Any]:
|
||||
return {'version': PARSER_SPEC_VERSION, 'spec': abstract_options.serialize()}
|
||||
|
||||
|
||||
def parser_to_parser_spec(parser: argparse.ArgumentParser, **kwargs) -> ParserSpec:
|
||||
"""Take an existing argparse parser, and create a spec from it."""
|
||||
return ParserSpec(
|
||||
program=parser.prog,
|
||||
description=parser.description,
|
||||
epilog=parser.epilog,
|
||||
**kwargs
|
||||
)
|
||||
|
@ -4,20 +4,43 @@ from typing import Any, Callable, Generic, Iterator, Iterable, Optional, TypeVar
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
class Manual(argparse.Action):
|
||||
def __init__(
|
||||
self,
|
||||
option_strings,
|
||||
dest=argparse.SUPPRESS,
|
||||
default=argparse.SUPPRESS,
|
||||
help=None
|
||||
):
|
||||
super().__init__(
|
||||
option_strings=option_strings,
|
||||
dest=dest,
|
||||
default=default,
|
||||
nargs=0,
|
||||
help=help
|
||||
)
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
parser.print_manual()
|
||||
parser.exit()
|
||||
|
||||
|
||||
class LazyChoices(argparse.Action, Generic[T]):
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
getter: Callable[[], Iterable[T]],
|
||||
help_formatter: Optional[Callable[[T], str]] = None,
|
||||
help_formatter: Optional[Callable[[T, bool], str]] = None,
|
||||
sort: bool = False,
|
||||
cache: bool = True,
|
||||
isolation_mode: bool = False,
|
||||
**kwargs
|
||||
) -> None:
|
||||
self.getter = getter
|
||||
self.help_formatter = help_formatter
|
||||
self.sort = sort
|
||||
self.cache = cache
|
||||
self.isolation_mode = isolation_mode
|
||||
self._help: Optional[str] = None
|
||||
self._obj: Optional[Iterable[T]] = None
|
||||
super().__init__(*args, **kwargs)
|
||||
@ -33,7 +56,10 @@ class LazyChoices(argparse.Action, Generic[T]):
|
||||
@property
|
||||
def help(self) -> str:
|
||||
if self._help is None and self.help_formatter is not None:
|
||||
self._help = self.help_formatter(self.load())
|
||||
self._help = self.help_formatter(
|
||||
self.load(),
|
||||
isolation_mode=self.isolation_mode
|
||||
)
|
||||
return self._help
|
||||
|
||||
@help.setter
|
||||
|
@ -13,7 +13,8 @@ import urllib3
|
||||
from . import __version__
|
||||
from .adapters import HTTPieHTTPAdapter
|
||||
from .context import Environment
|
||||
from .cli.constants import EMPTY_STRING
|
||||
from .cli.constants import HTTP_OPTIONS
|
||||
from .cli.nested_json import EMPTY_STRING
|
||||
from .cli.dicts import HTTPHeadersDict, NestedJSONArray
|
||||
from .encoding import UTF8
|
||||
from .models import RequestsMessage
|
||||
@ -34,6 +35,8 @@ JSON_CONTENT_TYPE = 'application/json'
|
||||
JSON_ACCEPT = f'{JSON_CONTENT_TYPE}, */*;q=0.5'
|
||||
DEFAULT_UA = f'HTTPie/{__version__}'
|
||||
|
||||
IGNORE_CONTENT_LENGTH_METHODS = frozenset([HTTP_OPTIONS])
|
||||
|
||||
|
||||
def collect_messages(
|
||||
env: Environment,
|
||||
@ -85,7 +88,7 @@ def collect_messages(
|
||||
|
||||
request = requests.Request(**request_kwargs)
|
||||
prepared_request = requests_session.prepare_request(request)
|
||||
apply_missing_repeated_headers(prepared_request, request.headers)
|
||||
transform_headers(request, prepared_request)
|
||||
if args.path_as_is:
|
||||
prepared_request.url = ensure_path_as_is(
|
||||
orig_url=args.url,
|
||||
@ -200,9 +203,30 @@ def finalize_headers(headers: HTTPHeadersDict) -> HTTPHeadersDict:
|
||||
return final_headers
|
||||
|
||||
|
||||
def transform_headers(
|
||||
request: requests.Request,
|
||||
prepared_request: requests.PreparedRequest
|
||||
) -> None:
|
||||
"""Apply various transformations on top of the `prepared_requests`'s
|
||||
headers to change the request prepreation behavior."""
|
||||
|
||||
# Remove 'Content-Length' when it is misplaced by requests.
|
||||
if (
|
||||
prepared_request.method in IGNORE_CONTENT_LENGTH_METHODS
|
||||
and prepared_request.headers.get('Content-Length') == '0'
|
||||
and request.headers.get('Content-Length') != '0'
|
||||
):
|
||||
prepared_request.headers.pop('Content-Length')
|
||||
|
||||
apply_missing_repeated_headers(
|
||||
request.headers,
|
||||
prepared_request
|
||||
)
|
||||
|
||||
|
||||
def apply_missing_repeated_headers(
|
||||
prepared_request: requests.PreparedRequest,
|
||||
original_headers: HTTPHeadersDict
|
||||
original_headers: HTTPHeadersDict,
|
||||
prepared_request: requests.PreparedRequest
|
||||
) -> None:
|
||||
"""Update the given `prepared_request`'s headers with the original
|
||||
ones. This allows the requests to be prepared as usual, and then later
|
||||
|
@ -1,9 +1,21 @@
|
||||
import sys
|
||||
from typing import Any, Optional, Iterable
|
||||
|
||||
from httpie.cookies import HTTPieCookiePolicy
|
||||
from http import cookiejar # noqa
|
||||
|
||||
|
||||
# Request does not carry the original policy attached to the
|
||||
# cookie jar, so until it is resolved we change the global cookie
|
||||
# policy. <https://github.com/psf/requests/issues/5449>
|
||||
cookiejar.DefaultCookiePolicy = HTTPieCookiePolicy
|
||||
|
||||
|
||||
is_windows = 'win32' in str(sys.platform).lower()
|
||||
is_frozen = getattr(sys, 'frozen', False)
|
||||
|
||||
MIN_SUPPORTED_PY_VERSION = (3, 7)
|
||||
MAX_SUPPORTED_PY_VERSION = (3, 11)
|
||||
|
||||
try:
|
||||
from functools import cached_property
|
||||
|
@ -149,6 +149,24 @@ class Config(BaseConfigDict):
|
||||
def default_options(self) -> list:
|
||||
return self['default_options']
|
||||
|
||||
def _configured_path(self, config_option: str, default: str) -> None:
|
||||
return Path(
|
||||
self.get(config_option, self.directory / default)
|
||||
).expanduser().resolve()
|
||||
|
||||
@property
|
||||
def plugins_dir(self) -> Path:
|
||||
return Path(self.get('plugins_dir', self.directory / 'plugins')).resolve()
|
||||
return self._configured_path('plugins_dir', 'plugins')
|
||||
|
||||
@property
|
||||
def version_info_file(self) -> Path:
|
||||
return self._configured_path('version_info_file', 'version_info.json')
|
||||
|
||||
@property
|
||||
def developer_mode(self) -> bool:
|
||||
"""This is a special setting for the development environment. It is
|
||||
different from the --debug mode in the terms that it might change
|
||||
the behavior for certain parameters (e.g updater system) that
|
||||
we usually ignore."""
|
||||
|
||||
return self.get('developer_mode')
|
||||
|
@ -1,9 +1,10 @@
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Iterator, IO, Optional
|
||||
from typing import Iterator, IO, Optional, TYPE_CHECKING
|
||||
from enum import Enum
|
||||
|
||||
|
||||
@ -12,21 +13,33 @@ try:
|
||||
except ImportError:
|
||||
curses = None # Compiled w/o curses
|
||||
|
||||
from .compat import is_windows
|
||||
from .compat import is_windows, cached_property
|
||||
from .config import DEFAULT_CONFIG_DIR, Config, ConfigFileError
|
||||
from .encoding import UTF8
|
||||
|
||||
from .utils import repr_dict
|
||||
from .output.ui.palette import GenericColor
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from rich.console import Console
|
||||
|
||||
|
||||
class Levels(str, Enum):
|
||||
class LogLevel(str, Enum):
|
||||
INFO = 'info'
|
||||
WARNING = 'warning'
|
||||
ERROR = 'error'
|
||||
|
||||
|
||||
DISPLAY_THRESHOLDS = {
|
||||
Levels.WARNING: 2,
|
||||
Levels.ERROR: float('inf'), # Never hide errors.
|
||||
LOG_LEVEL_COLORS = {
|
||||
LogLevel.INFO: GenericColor.PINK,
|
||||
LogLevel.WARNING: GenericColor.ORANGE,
|
||||
LogLevel.ERROR: GenericColor.RED,
|
||||
}
|
||||
|
||||
LOG_LEVEL_DISPLAY_THRESHOLDS = {
|
||||
LogLevel.INFO: 1,
|
||||
LogLevel.WARNING: 2,
|
||||
LogLevel.ERROR: float('inf'), # Never hide errors.
|
||||
}
|
||||
|
||||
|
||||
@ -40,6 +53,7 @@ class Environment:
|
||||
is used by the test suite to simulate various scenarios.
|
||||
|
||||
"""
|
||||
args = argparse.Namespace()
|
||||
is_windows: bool = is_windows
|
||||
config_dir: Path = DEFAULT_CONFIG_DIR
|
||||
stdin: Optional[IO] = sys.stdin # `None` when closed fd (#791)
|
||||
@ -52,6 +66,10 @@ class Environment:
|
||||
stderr_isatty: bool = stderr.isatty()
|
||||
colors = 256
|
||||
program_name: str = 'http'
|
||||
|
||||
# Whether to show progress bars / status spinners etc.
|
||||
show_displays: bool = True
|
||||
|
||||
if not is_windows:
|
||||
if curses:
|
||||
try:
|
||||
@ -149,14 +167,51 @@ class Environment:
|
||||
self.stdout = original_stdout
|
||||
self.stderr = original_stderr
|
||||
|
||||
def log_error(self, msg: str, level: Levels = Levels.ERROR) -> None:
|
||||
if self.stdout_isatty and self.quiet >= DISPLAY_THRESHOLDS[level]:
|
||||
def log_error(self, msg: str, level: LogLevel = LogLevel.ERROR) -> None:
|
||||
if self.stdout_isatty and self.quiet >= LOG_LEVEL_DISPLAY_THRESHOLDS[level]:
|
||||
stderr = self.stderr # Not directly /dev/null, since stderr might be mocked
|
||||
else:
|
||||
stderr = self._orig_stderr
|
||||
|
||||
stderr.write(f'\n{self.program_name}: {level}: {msg}\n\n')
|
||||
rich_console = self._make_rich_console(file=stderr, force_terminal=stderr.isatty())
|
||||
rich_console.print(
|
||||
f'\n{self.program_name}: {level}: {msg}\n\n',
|
||||
style=LOG_LEVEL_COLORS[level],
|
||||
markup=False,
|
||||
highlight=False,
|
||||
soft_wrap=True
|
||||
)
|
||||
|
||||
def apply_warnings_filter(self) -> None:
|
||||
if self.quiet >= DISPLAY_THRESHOLDS[Levels.WARNING]:
|
||||
if self.quiet >= LOG_LEVEL_DISPLAY_THRESHOLDS[LogLevel.WARNING]:
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
def _make_rich_console(
|
||||
self,
|
||||
file: IO[str],
|
||||
force_terminal: bool
|
||||
) -> 'Console':
|
||||
from rich.console import Console
|
||||
from httpie.output.ui.rich_palette import _make_rich_color_theme
|
||||
|
||||
style = getattr(self.args, 'style', None)
|
||||
theme = _make_rich_color_theme(style)
|
||||
# Rich infers the rest of the knowledge (e.g encoding)
|
||||
# dynamically by looking at the file/stderr.
|
||||
return Console(
|
||||
file=file,
|
||||
force_terminal=force_terminal,
|
||||
no_color=(self.colors == 0),
|
||||
theme=theme
|
||||
)
|
||||
|
||||
# Rich recommends separating the actual console (stdout) from
|
||||
# the error (stderr) console for better isolation between parts.
|
||||
# https://rich.readthedocs.io/en/stable/console.html#error-console
|
||||
|
||||
@cached_property
|
||||
def rich_console(self):
|
||||
return self._make_rich_console(self.stdout, self.stdout_isatty)
|
||||
|
||||
@cached_property
|
||||
def rich_error_console(self):
|
||||
return self._make_rich_console(self.stderr, self.stderr_isatty)
|
||||
|
25
httpie/cookies.py
Normal file
25
httpie/cookies.py
Normal file
@ -0,0 +1,25 @@
|
||||
from http import cookiejar
|
||||
|
||||
|
||||
_LOCALHOST = 'localhost'
|
||||
_LOCALHOST_SUFFIX = '.localhost'
|
||||
|
||||
|
||||
class HTTPieCookiePolicy(cookiejar.DefaultCookiePolicy):
|
||||
def return_ok_secure(self, cookie, request):
|
||||
"""Check whether the given cookie is sent to a secure host."""
|
||||
|
||||
is_secure_protocol = super().return_ok_secure(cookie, request)
|
||||
if is_secure_protocol:
|
||||
return True
|
||||
|
||||
# The original implementation of this method only takes secure protocols
|
||||
# (e.g., https) into account, but the latest developments in modern browsers
|
||||
# (chrome, firefox) assume 'localhost' is also a secure location. So we
|
||||
# override it with our own strategy.
|
||||
return self._is_local_host(cookiejar.request_host(request))
|
||||
|
||||
def _is_local_host(self, hostname):
|
||||
# Implements the static localhost detection algorithm in firefox.
|
||||
# <https://searchfox.org/mozilla-central/rev/d4d7611ee4dd0003b492b865bc5988a4e6afc985/netwerk/dns/DNS.cpp#205-218>
|
||||
return hostname == _LOCALHOST or hostname.endswith(_LOCALHOST_SUFFIX)
|
@ -13,7 +13,7 @@ from . import __version__ as httpie_version
|
||||
from .cli.constants import OUT_REQ_BODY
|
||||
from .cli.nested_json import HTTPieSyntaxError
|
||||
from .client import collect_messages
|
||||
from .context import Environment, Levels
|
||||
from .context import Environment, LogLevel
|
||||
from .downloads import Downloader
|
||||
from .models import (
|
||||
RequestsMessageKind,
|
||||
@ -24,6 +24,8 @@ from .output.writer import write_message, write_stream, write_raw_data, MESSAGE_
|
||||
from .plugins.registry import plugin_manager
|
||||
from .status import ExitStatus, http_status_to_exit_status
|
||||
from .utils import unwrap_context
|
||||
from .internal.update_warnings import check_updates
|
||||
from .internal.daemon_runner import is_daemon_mode, run_daemon_task
|
||||
|
||||
|
||||
# noinspection PyDefaultArgument
|
||||
@ -37,6 +39,10 @@ def raw_main(
|
||||
program_name, *args = args
|
||||
env.program_name = os.path.basename(program_name)
|
||||
args = decode_raw_args(args, env.stdin_encoding)
|
||||
|
||||
if is_daemon_mode(args):
|
||||
return run_daemon_task(env, args)
|
||||
|
||||
plugin_manager.load_installed_plugins(env.config.plugins_dir)
|
||||
|
||||
if use_default_options and env.config.default_options:
|
||||
@ -89,6 +95,7 @@ def raw_main(
|
||||
raise
|
||||
exit_status = ExitStatus.ERROR
|
||||
else:
|
||||
check_updates(env)
|
||||
try:
|
||||
exit_status = main_program(
|
||||
args=parsed_args,
|
||||
@ -195,7 +202,7 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
||||
try:
|
||||
if args.download:
|
||||
args.follow = True # --download implies --follow.
|
||||
downloader = Downloader(output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume)
|
||||
downloader = Downloader(env, output_file=args.output_file, resume=args.download_resume)
|
||||
downloader.pre_request(args.headers)
|
||||
messages = collect_messages(env, args=args,
|
||||
request_body_read_callback=request_body_read_callback)
|
||||
@ -223,7 +230,7 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
||||
if args.check_status or downloader:
|
||||
exit_status = http_status_to_exit_status(http_status=message.status_code, follow=args.follow)
|
||||
if exit_status != ExitStatus.SUCCESS and (not env.stdout_isatty or args.quiet == 1):
|
||||
env.log_error(f'HTTP {message.raw.status} {message.raw.reason}', level=Levels.WARNING)
|
||||
env.log_error(f'HTTP {message.raw.status} {message.raw.reason}', level=LogLevel.WARNING)
|
||||
write_message(
|
||||
requests_message=message,
|
||||
env=env,
|
||||
|
@ -5,10 +5,8 @@ Download mode implementation.
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import threading
|
||||
from mailbox import Message
|
||||
from time import sleep, monotonic
|
||||
from time import monotonic
|
||||
from typing import IO, Optional, Tuple
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
@ -16,22 +14,11 @@ import requests
|
||||
|
||||
from .models import HTTPResponse, OutputOptions
|
||||
from .output.streams import RawStream
|
||||
from .utils import humanize_bytes
|
||||
from .context import Environment
|
||||
|
||||
|
||||
PARTIAL_CONTENT = 206
|
||||
|
||||
CLEAR_LINE = '\r\033[K'
|
||||
PROGRESS = (
|
||||
'{percentage: 6.2f} %'
|
||||
' {downloaded: >10}'
|
||||
' {speed: >10}/s'
|
||||
' {eta: >8} ETA'
|
||||
)
|
||||
PROGRESS_NO_CONTENT_LENGTH = '{downloaded: >10} {speed: >10}/s'
|
||||
SUMMARY = 'Done. {downloaded} in {time:0.5f}s ({speed}/s)\n'
|
||||
SPINNER = '|/-\\'
|
||||
|
||||
|
||||
class ContentRangeError(ValueError):
|
||||
pass
|
||||
@ -176,9 +163,9 @@ class Downloader:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
env: Environment,
|
||||
output_file: IO = None,
|
||||
resume: bool = False,
|
||||
progress_file: IO = sys.stderr
|
||||
resume: bool = False
|
||||
):
|
||||
"""
|
||||
:param resume: Should the download resume if partial download
|
||||
@ -191,14 +178,10 @@ class Downloader:
|
||||
|
||||
"""
|
||||
self.finished = False
|
||||
self.status = DownloadStatus()
|
||||
self.status = DownloadStatus(env=env)
|
||||
self._output_file = output_file
|
||||
self._resume = resume
|
||||
self._resumed_from = 0
|
||||
self._progress_reporter = ProgressReporterThread(
|
||||
status=self.status,
|
||||
output=progress_file
|
||||
)
|
||||
|
||||
def pre_request(self, request_headers: dict):
|
||||
"""Called just before the HTTP request is sent.
|
||||
@ -261,11 +244,6 @@ class Downloader:
|
||||
except OSError:
|
||||
pass # stdout
|
||||
|
||||
self.status.started(
|
||||
resumed_from=self._resumed_from,
|
||||
total_size=total_size
|
||||
)
|
||||
|
||||
output_options = OutputOptions.from_message(final_response, headers=False, body=True)
|
||||
stream = RawStream(
|
||||
msg=HTTPResponse(final_response),
|
||||
@ -273,11 +251,11 @@ class Downloader:
|
||||
on_body_chunk_downloaded=self.chunk_downloaded,
|
||||
)
|
||||
|
||||
self._progress_reporter.output.write(
|
||||
f'Downloading {humanize_bytes(total_size) + " " if total_size is not None else ""}'
|
||||
f'to "{self._output_file.name}"\n'
|
||||
self.status.started(
|
||||
output_file=self._output_file,
|
||||
resumed_from=self._resumed_from,
|
||||
total_size=total_size
|
||||
)
|
||||
self._progress_reporter.start()
|
||||
|
||||
return stream, self._output_file
|
||||
|
||||
@ -287,7 +265,7 @@ class Downloader:
|
||||
self.status.finished()
|
||||
|
||||
def failed(self):
|
||||
self._progress_reporter.stop()
|
||||
self.status.terminate()
|
||||
|
||||
@property
|
||||
def interrupted(self) -> bool:
|
||||
@ -329,127 +307,71 @@ class Downloader:
|
||||
class DownloadStatus:
|
||||
"""Holds details about the download status."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, env):
|
||||
self.env = env
|
||||
self.downloaded = 0
|
||||
self.total_size = None
|
||||
self.resumed_from = 0
|
||||
self.time_started = None
|
||||
self.time_finished = None
|
||||
|
||||
def started(self, resumed_from=0, total_size=None):
|
||||
def started(self, output_file, resumed_from=0, total_size=None):
|
||||
assert self.time_started is None
|
||||
self.total_size = total_size
|
||||
self.downloaded = self.resumed_from = resumed_from
|
||||
self.time_started = monotonic()
|
||||
self.start_display(output_file=output_file)
|
||||
|
||||
def start_display(self, output_file):
|
||||
from httpie.output.ui.rich_progress import (
|
||||
DummyDisplay,
|
||||
StatusDisplay,
|
||||
ProgressDisplay
|
||||
)
|
||||
|
||||
message = f'Downloading to {output_file.name}'
|
||||
if self.env.show_displays:
|
||||
if self.total_size is None:
|
||||
# Rich does not support progress bars without a total
|
||||
# size given. Instead we use status objects.
|
||||
self.display = StatusDisplay(self.env)
|
||||
else:
|
||||
self.display = ProgressDisplay(self.env)
|
||||
else:
|
||||
self.display = DummyDisplay(self.env)
|
||||
|
||||
self.display.start(
|
||||
total=self.total_size,
|
||||
at=self.downloaded,
|
||||
description=message
|
||||
)
|
||||
|
||||
def chunk_downloaded(self, size):
|
||||
assert self.time_finished is None
|
||||
self.downloaded += size
|
||||
self.display.update(size)
|
||||
|
||||
@property
|
||||
def has_finished(self):
|
||||
return self.time_finished is not None
|
||||
|
||||
@property
|
||||
def time_spent(self):
|
||||
if (
|
||||
self.time_started is not None
|
||||
and self.time_finished is not None
|
||||
):
|
||||
return self.time_finished - self.time_started
|
||||
else:
|
||||
return None
|
||||
|
||||
def finished(self):
|
||||
assert self.time_started is not None
|
||||
assert self.time_finished is None
|
||||
self.time_finished = monotonic()
|
||||
if hasattr(self, 'display'):
|
||||
self.display.stop(self.time_spent)
|
||||
|
||||
|
||||
class ProgressReporterThread(threading.Thread):
|
||||
"""
|
||||
Reports download progress based on its status.
|
||||
|
||||
Uses threading to periodically update the status (speed, ETA, etc.).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status: DownloadStatus,
|
||||
output: IO,
|
||||
tick=.1,
|
||||
update_interval=1
|
||||
):
|
||||
super().__init__()
|
||||
self.status = status
|
||||
self.output = output
|
||||
self._tick = tick
|
||||
self._update_interval = update_interval
|
||||
self._spinner_pos = 0
|
||||
self._status_line = ''
|
||||
self._prev_bytes = 0
|
||||
self._prev_time = monotonic()
|
||||
self._should_stop = threading.Event()
|
||||
|
||||
def stop(self):
|
||||
"""Stop reporting on next tick."""
|
||||
self._should_stop.set()
|
||||
|
||||
def run(self):
|
||||
while not self._should_stop.is_set():
|
||||
if self.status.has_finished:
|
||||
self.sum_up()
|
||||
break
|
||||
|
||||
self.report_speed()
|
||||
sleep(self._tick)
|
||||
|
||||
def report_speed(self):
|
||||
now = monotonic()
|
||||
if now - self._prev_time >= self._update_interval:
|
||||
downloaded = self.status.downloaded
|
||||
speed = ((downloaded - self._prev_bytes)
|
||||
/ (now - self._prev_time))
|
||||
|
||||
if not self.status.total_size:
|
||||
self._status_line = PROGRESS_NO_CONTENT_LENGTH.format(
|
||||
downloaded=humanize_bytes(downloaded),
|
||||
speed=humanize_bytes(speed),
|
||||
)
|
||||
else:
|
||||
percentage = (downloaded / self.status.total_size * 100
|
||||
if self.status.total_size
|
||||
else 0)
|
||||
|
||||
if not speed:
|
||||
eta = '-:--:--'
|
||||
else:
|
||||
s = int((self.status.total_size - downloaded) / speed)
|
||||
h, s = divmod(s, 60 * 60)
|
||||
m, s = divmod(s, 60)
|
||||
eta = f'{h}:{m:0>2}:{s:0>2}'
|
||||
|
||||
self._status_line = PROGRESS.format(
|
||||
percentage=percentage,
|
||||
downloaded=humanize_bytes(downloaded),
|
||||
speed=humanize_bytes(speed),
|
||||
eta=eta,
|
||||
)
|
||||
|
||||
self._prev_time = now
|
||||
self._prev_bytes = downloaded
|
||||
|
||||
self.output.write(
|
||||
f'{CLEAR_LINE} {SPINNER[self._spinner_pos]} {self._status_line}'
|
||||
)
|
||||
self.output.flush()
|
||||
|
||||
self._spinner_pos = (self._spinner_pos + 1) % len(SPINNER)
|
||||
|
||||
def sum_up(self):
|
||||
actually_downloaded = (
|
||||
self.status.downloaded - self.status.resumed_from)
|
||||
time_taken = self.status.time_finished - self.status.time_started
|
||||
speed = actually_downloaded / time_taken if time_taken else actually_downloaded
|
||||
|
||||
self.output.write(CLEAR_LINE)
|
||||
|
||||
self.output.write(SUMMARY.format(
|
||||
downloaded=humanize_bytes(actually_downloaded),
|
||||
total=(self.status.total_size
|
||||
and humanize_bytes(self.status.total_size)),
|
||||
speed=humanize_bytes(speed),
|
||||
time=time_taken,
|
||||
))
|
||||
self.output.flush()
|
||||
def terminate(self):
|
||||
if hasattr(self, 'display'):
|
||||
self.display.stop(self.time_spent)
|
||||
|
5
httpie/internal/__build_channel__.py
Normal file
5
httpie/internal/__build_channel__.py
Normal file
@ -0,0 +1,5 @@
|
||||
# Represents the packaging method. This file should
|
||||
# be overridden by every build system we support on
|
||||
# the packaging step.
|
||||
|
||||
BUILD_CHANNEL = 'unknown'
|
0
httpie/internal/__init__.py
Normal file
0
httpie/internal/__init__.py
Normal file
50
httpie/internal/daemon_runner.py
Normal file
50
httpie/internal/daemon_runner.py
Normal file
@ -0,0 +1,50 @@
|
||||
import argparse
|
||||
from contextlib import redirect_stderr, redirect_stdout
|
||||
from typing import List
|
||||
|
||||
from httpie.context import Environment
|
||||
from httpie.internal.update_warnings import _fetch_updates, _get_suppress_context
|
||||
from httpie.status import ExitStatus
|
||||
|
||||
STATUS_FILE = '.httpie-test-daemon-status'
|
||||
|
||||
|
||||
def _check_status(env):
|
||||
# This function is used only for the testing (test_update_warnings).
|
||||
# Since we don't want to trigger the fetch_updates (which would interact
|
||||
# with real world resources), we'll only trigger this pseudo task
|
||||
# and check whether the STATUS_FILE is created or not.
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
status_file = Path(tempfile.gettempdir()) / STATUS_FILE
|
||||
status_file.touch()
|
||||
|
||||
|
||||
DAEMONIZED_TASKS = {
|
||||
'check_status': _check_status,
|
||||
'fetch_updates': _fetch_updates,
|
||||
}
|
||||
|
||||
|
||||
def _parse_options(args: List[str]) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('task_id')
|
||||
parser.add_argument('--daemon', action='store_true')
|
||||
return parser.parse_known_args(args)[0]
|
||||
|
||||
|
||||
def is_daemon_mode(args: List[str]) -> bool:
|
||||
return '--daemon' in args
|
||||
|
||||
|
||||
def run_daemon_task(env: Environment, args: List[str]) -> ExitStatus:
|
||||
options = _parse_options(args)
|
||||
|
||||
assert options.daemon
|
||||
assert options.task_id in DAEMONIZED_TASKS
|
||||
with redirect_stdout(env.devnull), redirect_stderr(env.devnull):
|
||||
with _get_suppress_context(env):
|
||||
DAEMONIZED_TASKS[options.task_id](env)
|
||||
|
||||
return ExitStatus.SUCCESS
|
121
httpie/internal/daemons.py
Normal file
121
httpie/internal/daemons.py
Normal file
@ -0,0 +1,121 @@
|
||||
"""
|
||||
This module provides an interface to spawn a detached task to be
|
||||
run with httpie.internal.daemon_runner on a separate process. It is
|
||||
based on DVC's daemon system.
|
||||
https://github.com/iterative/dvc/blob/main/dvc/daemon.py
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import httpie.__main__
|
||||
from contextlib import suppress
|
||||
from subprocess import Popen, DEVNULL
|
||||
from typing import Dict, List
|
||||
from httpie.compat import is_frozen, is_windows
|
||||
|
||||
|
||||
ProcessContext = Dict[str, str]
|
||||
|
||||
|
||||
def _start_process(cmd: List[str], **kwargs) -> Popen:
|
||||
prefix = [sys.executable]
|
||||
# If it is frozen, sys.executable points to the binary (http).
|
||||
# Otherwise it points to the python interpreter.
|
||||
if not is_frozen:
|
||||
main_entrypoint = httpie.__main__.__file__
|
||||
prefix += [main_entrypoint]
|
||||
return Popen(prefix + cmd, close_fds=True, shell=False, stdout=DEVNULL, stderr=DEVNULL, **kwargs)
|
||||
|
||||
|
||||
def _spawn_windows(cmd: List[str], process_context: ProcessContext) -> None:
|
||||
from subprocess import (
|
||||
CREATE_NEW_PROCESS_GROUP,
|
||||
CREATE_NO_WINDOW,
|
||||
STARTF_USESHOWWINDOW,
|
||||
STARTUPINFO,
|
||||
)
|
||||
|
||||
# https://stackoverflow.com/a/7006424
|
||||
# https://bugs.python.org/issue41619
|
||||
creationflags = CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW
|
||||
|
||||
startupinfo = STARTUPINFO()
|
||||
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
|
||||
|
||||
_start_process(
|
||||
cmd,
|
||||
env=process_context,
|
||||
creationflags=creationflags,
|
||||
startupinfo=startupinfo,
|
||||
)
|
||||
|
||||
|
||||
def _spawn_posix(args: List[str], process_context: ProcessContext) -> None:
|
||||
"""
|
||||
Perform a double fork procedure* to detach from the parent
|
||||
process so that we don't block the user even if their original
|
||||
command's execution is done but the release fetcher is not.
|
||||
|
||||
[1]: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap11.html#tag_11_01_03
|
||||
"""
|
||||
|
||||
from httpie.core import main
|
||||
|
||||
try:
|
||||
pid = os.fork()
|
||||
if pid > 0:
|
||||
return
|
||||
except OSError:
|
||||
os._exit(1)
|
||||
|
||||
os.setsid()
|
||||
|
||||
try:
|
||||
pid = os.fork()
|
||||
if pid > 0:
|
||||
os._exit(0)
|
||||
except OSError:
|
||||
os._exit(1)
|
||||
|
||||
# Close all standard inputs/outputs
|
||||
sys.stdin.close()
|
||||
sys.stdout.close()
|
||||
sys.stderr.close()
|
||||
|
||||
if platform.system() == 'Darwin':
|
||||
# Double-fork is not reliable on MacOS, so we'll use a subprocess
|
||||
# to ensure the task is isolated properly.
|
||||
process = _start_process(args, env=process_context)
|
||||
# Unlike windows, since we already completed the fork procedure
|
||||
# we can simply join the process and wait for it.
|
||||
process.communicate()
|
||||
else:
|
||||
os.environ.update(process_context)
|
||||
with suppress(BaseException):
|
||||
main(['http'] + args)
|
||||
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def _spawn(args: List[str], process_context: ProcessContext) -> None:
|
||||
"""
|
||||
Spawn a new process to run the given command.
|
||||
"""
|
||||
if is_windows:
|
||||
_spawn_windows(args, process_context)
|
||||
else:
|
||||
_spawn_posix(args, process_context)
|
||||
|
||||
|
||||
def spawn_daemon(task: str) -> None:
|
||||
args = [task, '--daemon']
|
||||
process_context = os.environ.copy()
|
||||
if not is_frozen:
|
||||
file_path = os.path.abspath(inspect.stack()[0][1])
|
||||
process_context['PYTHONPATH'] = os.path.dirname(
|
||||
os.path.dirname(os.path.dirname(file_path))
|
||||
)
|
||||
|
||||
_spawn(args, process_context)
|
171
httpie/internal/update_warnings.py
Normal file
171
httpie/internal/update_warnings.py
Normal file
@ -0,0 +1,171 @@
|
||||
import json
|
||||
from contextlib import nullcontext, suppress
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Callable
|
||||
|
||||
import requests
|
||||
|
||||
import httpie
|
||||
from httpie.context import Environment, LogLevel
|
||||
from httpie.internal.__build_channel__ import BUILD_CHANNEL
|
||||
from httpie.internal.daemons import spawn_daemon
|
||||
from httpie.utils import is_version_greater, open_with_lockfile
|
||||
|
||||
# Automatically updated package version index.
|
||||
PACKAGE_INDEX_LINK = 'https://packages.httpie.io/latest.json'
|
||||
|
||||
FETCH_INTERVAL = timedelta(weeks=2)
|
||||
WARN_INTERVAL = timedelta(weeks=1)
|
||||
|
||||
UPDATE_MESSAGE_FORMAT = """\
|
||||
A new HTTPie release ({last_released_version}) is available.
|
||||
To see how you can update, please visit https://httpie.io/docs/cli/{installation_method}
|
||||
"""
|
||||
|
||||
ALREADY_UP_TO_DATE_MESSAGE = """\
|
||||
You are already up-to-date.
|
||||
"""
|
||||
|
||||
|
||||
def _read_data_error_free(file: Path) -> Any:
|
||||
# If the file is broken / non-existent, ignore it.
|
||||
try:
|
||||
with open(file) as stream:
|
||||
return json.load(stream)
|
||||
except (ValueError, OSError):
|
||||
return {}
|
||||
|
||||
|
||||
def _fetch_updates(env: Environment) -> str:
|
||||
file = env.config.version_info_file
|
||||
data = _read_data_error_free(file)
|
||||
|
||||
response = requests.get(PACKAGE_INDEX_LINK, verify=False)
|
||||
response.raise_for_status()
|
||||
|
||||
data.setdefault('last_warned_date', None)
|
||||
data['last_fetched_date'] = datetime.now().isoformat()
|
||||
data['last_released_versions'] = response.json()
|
||||
|
||||
with open_with_lockfile(file, 'w') as stream:
|
||||
json.dump(data, stream)
|
||||
|
||||
|
||||
def fetch_updates(env: Environment, lazy: bool = True):
|
||||
if lazy:
|
||||
spawn_daemon('fetch_updates')
|
||||
else:
|
||||
_fetch_updates(env)
|
||||
|
||||
|
||||
def maybe_fetch_updates(env: Environment) -> None:
|
||||
if env.config.get('disable_update_warnings'):
|
||||
return None
|
||||
|
||||
data = _read_data_error_free(env.config.version_info_file)
|
||||
|
||||
if data:
|
||||
current_date = datetime.now()
|
||||
last_fetched_date = datetime.fromisoformat(data['last_fetched_date'])
|
||||
earliest_fetch_date = last_fetched_date + FETCH_INTERVAL
|
||||
if current_date < earliest_fetch_date:
|
||||
return None
|
||||
|
||||
fetch_updates(env)
|
||||
|
||||
|
||||
def _get_suppress_context(env: Environment) -> Any:
|
||||
"""Return a context manager that suppress
|
||||
all possible errors.
|
||||
|
||||
Note: if you have set the developer_mode=True in
|
||||
your config, then it will show all errors for easier
|
||||
debugging."""
|
||||
if env.config.developer_mode:
|
||||
return nullcontext()
|
||||
else:
|
||||
return suppress(BaseException)
|
||||
|
||||
|
||||
def _update_checker(
|
||||
func: Callable[[Environment], None]
|
||||
) -> Callable[[Environment], None]:
|
||||
"""Control the execution of the update checker (suppress errors, trigger
|
||||
auto updates etc.)"""
|
||||
|
||||
def wrapper(env: Environment) -> None:
|
||||
with _get_suppress_context(env):
|
||||
func(env)
|
||||
|
||||
with _get_suppress_context(env):
|
||||
maybe_fetch_updates(env)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def _get_update_status(env: Environment) -> Optional[str]:
|
||||
"""If there is a new update available, return the warning text.
|
||||
Otherwise just return None."""
|
||||
file = env.config.version_info_file
|
||||
if not file.exists():
|
||||
return None
|
||||
|
||||
with _get_suppress_context(env):
|
||||
# If the user quickly spawns multiple httpie processes
|
||||
# we don't want to end in a race.
|
||||
with open_with_lockfile(file) as stream:
|
||||
version_info = json.load(stream)
|
||||
|
||||
available_channels = version_info['last_released_versions']
|
||||
if BUILD_CHANNEL not in available_channels:
|
||||
return None
|
||||
|
||||
current_version = httpie.__version__
|
||||
last_released_version = available_channels[BUILD_CHANNEL]
|
||||
if not is_version_greater(last_released_version, current_version):
|
||||
return None
|
||||
|
||||
text = UPDATE_MESSAGE_FORMAT.format(
|
||||
last_released_version=last_released_version,
|
||||
installation_method=BUILD_CHANNEL,
|
||||
)
|
||||
return text
|
||||
|
||||
|
||||
def get_update_status(env: Environment) -> str:
|
||||
return _get_update_status(env) or ALREADY_UP_TO_DATE_MESSAGE
|
||||
|
||||
|
||||
@_update_checker
|
||||
def check_updates(env: Environment) -> None:
|
||||
if env.config.get('disable_update_warnings'):
|
||||
return None
|
||||
|
||||
file = env.config.version_info_file
|
||||
update_status = _get_update_status(env)
|
||||
|
||||
if not update_status:
|
||||
return None
|
||||
|
||||
# If the user quickly spawns multiple httpie processes
|
||||
# we don't want to end in a race.
|
||||
with open_with_lockfile(file) as stream:
|
||||
version_info = json.load(stream)
|
||||
|
||||
# We don't want to spam the user with too many warnings,
|
||||
# so we'll only warn every once a while (WARN_INTERNAL).
|
||||
current_date = datetime.now()
|
||||
last_warned_date = version_info['last_warned_date']
|
||||
if last_warned_date is not None:
|
||||
earliest_warn_date = (
|
||||
datetime.fromisoformat(last_warned_date) + WARN_INTERVAL
|
||||
)
|
||||
if current_date < earliest_warn_date:
|
||||
return None
|
||||
|
||||
env.log_error(update_status, level=LogLevel.INFO)
|
||||
version_info['last_warned_date'] = current_date.isoformat()
|
||||
|
||||
with open_with_lockfile(file, 'w') as stream:
|
||||
json.dump(version_info, stream)
|
@ -4,6 +4,7 @@ from typing import Any, Type, List, Dict, TYPE_CHECKING
|
||||
if TYPE_CHECKING:
|
||||
from httpie.sessions import Session
|
||||
|
||||
|
||||
INSECURE_COOKIE_JAR_WARNING = '''\
|
||||
Outdated layout detected for the current session. Please consider updating it,
|
||||
in order to not get affected by potential security problems.
|
||||
@ -53,16 +54,12 @@ def pre_process(session: 'Session', cookies: Any) -> List[Dict[str, Any]]:
|
||||
for cookie in normalized_cookies
|
||||
)
|
||||
|
||||
if should_issue_warning and not session.refactor_mode:
|
||||
if should_issue_warning:
|
||||
warning = INSECURE_COOKIE_JAR_WARNING.format(hostname=session.bound_host, session_id=session.session_id)
|
||||
if not session.is_anonymous:
|
||||
warning += INSECURE_COOKIE_JAR_WARNING_FOR_NAMED_SESSIONS
|
||||
warning += INSECURE_COOKIE_SECURITY_LINK
|
||||
|
||||
session.env.log_error(
|
||||
warning,
|
||||
level='warning'
|
||||
)
|
||||
session.warn_legacy_usage(warning)
|
||||
|
||||
return normalized_cookies
|
||||
|
73
httpie/legacy/v3_2_0_session_header_format.py
Normal file
73
httpie/legacy/v3_2_0_session_header_format.py
Normal file
@ -0,0 +1,73 @@
|
||||
from typing import Any, Type, List, Dict, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from httpie.sessions import Session
|
||||
|
||||
|
||||
OLD_HEADER_STORE_WARNING = '''\
|
||||
Outdated layout detected for the current session. Please consider updating it,
|
||||
in order to use the latest features regarding the header layout.
|
||||
|
||||
For fixing the current session:
|
||||
|
||||
$ httpie cli sessions upgrade {hostname} {session_id}
|
||||
'''
|
||||
|
||||
OLD_HEADER_STORE_WARNING_FOR_NAMED_SESSIONS = '''\
|
||||
|
||||
For fixing all named sessions:
|
||||
|
||||
$ httpie cli sessions upgrade-all
|
||||
'''
|
||||
|
||||
OLD_HEADER_STORE_LINK = '\nSee $INSERT_LINK for more information.'
|
||||
|
||||
|
||||
def pre_process(session: 'Session', headers: Any) -> List[Dict[str, Any]]:
|
||||
"""Serialize the headers into a unified form and issue a warning if
|
||||
the session file is using the old layout."""
|
||||
|
||||
is_old_style = isinstance(headers, dict)
|
||||
if is_old_style:
|
||||
normalized_headers = list(headers.items())
|
||||
else:
|
||||
normalized_headers = [
|
||||
(item['name'], item['value'])
|
||||
for item in headers
|
||||
]
|
||||
|
||||
if is_old_style:
|
||||
warning = OLD_HEADER_STORE_WARNING.format(hostname=session.bound_host, session_id=session.session_id)
|
||||
if not session.is_anonymous:
|
||||
warning += OLD_HEADER_STORE_WARNING_FOR_NAMED_SESSIONS
|
||||
warning += OLD_HEADER_STORE_LINK
|
||||
session.warn_legacy_usage(warning)
|
||||
|
||||
return normalized_headers
|
||||
|
||||
|
||||
def post_process(
|
||||
normalized_headers: List[Dict[str, Any]],
|
||||
*,
|
||||
original_type: Type[Any]
|
||||
) -> Any:
|
||||
"""Deserialize given header store into the original form it was
|
||||
used in."""
|
||||
|
||||
if issubclass(original_type, dict):
|
||||
# For the legacy behavior, preserve the last value.
|
||||
return {
|
||||
item['name']: item['value']
|
||||
for item in normalized_headers
|
||||
}
|
||||
else:
|
||||
return normalized_headers
|
||||
|
||||
|
||||
def fix_layout(session: 'Session', *args, **kwargs) -> None:
|
||||
from httpie.sessions import materialize_headers
|
||||
|
||||
if not isinstance(session['headers'], dict):
|
||||
return None
|
||||
|
||||
session['headers'] = materialize_headers(session['headers'])
|
@ -1,5 +1,6 @@
|
||||
from textwrap import dedent
|
||||
from httpie.cli.argparser import HTTPieManagerArgumentParser
|
||||
from httpie.cli.options import Qualifiers, ARGPARSE_QUALIFIER_MAP, map_qualifiers, parser_to_parser_spec
|
||||
from httpie import __version__
|
||||
|
||||
CLI_SESSION_UPGRADE_FLAGS = [
|
||||
@ -12,37 +13,6 @@ CLI_SESSION_UPGRADE_FLAGS = [
|
||||
]
|
||||
|
||||
COMMANDS = {
|
||||
'plugins': {
|
||||
'help': 'Manage HTTPie plugins.',
|
||||
'install': [
|
||||
'Install the given targets from PyPI '
|
||||
'or from a local paths.',
|
||||
{
|
||||
'dest': 'targets',
|
||||
'nargs': '+',
|
||||
'help': 'targets to install'
|
||||
}
|
||||
],
|
||||
'upgrade': [
|
||||
'Upgrade the given plugins',
|
||||
{
|
||||
'dest': 'targets',
|
||||
'nargs': '+',
|
||||
'help': 'targets to upgrade'
|
||||
}
|
||||
],
|
||||
'uninstall': [
|
||||
'Uninstall the given HTTPie plugins.',
|
||||
{
|
||||
'dest': 'targets',
|
||||
'nargs': '+',
|
||||
'help': 'targets to install'
|
||||
}
|
||||
],
|
||||
'list': [
|
||||
'List all installed HTTPie plugins.'
|
||||
],
|
||||
},
|
||||
'cli': {
|
||||
'help': 'Manage HTTPie for Terminal',
|
||||
'export-args': [
|
||||
@ -50,9 +20,13 @@ COMMANDS = {
|
||||
{
|
||||
'flags': ['-f', '--format'],
|
||||
'choices': ['json'],
|
||||
'help': 'Format to export in.',
|
||||
'default': 'json'
|
||||
}
|
||||
],
|
||||
'check-updates': [
|
||||
'Check for updates'
|
||||
],
|
||||
'sessions': {
|
||||
'help': 'Manage HTTPie sessions',
|
||||
'upgrade': [
|
||||
@ -82,6 +56,42 @@ COMMANDS = {
|
||||
}
|
||||
|
||||
|
||||
COMMANDS['plugins'] = COMMANDS['cli']['plugins'] = {
|
||||
'help': 'Manage HTTPie plugins.',
|
||||
'install': [
|
||||
'Install the given targets from PyPI '
|
||||
'or from a local paths.',
|
||||
{
|
||||
'dest': 'targets',
|
||||
'metavar': 'TARGET',
|
||||
'nargs': Qualifiers.ONE_OR_MORE,
|
||||
'help': 'targets to install'
|
||||
}
|
||||
],
|
||||
'upgrade': [
|
||||
'Upgrade the given plugins',
|
||||
{
|
||||
'dest': 'targets',
|
||||
'metavar': 'TARGET',
|
||||
'nargs': Qualifiers.ONE_OR_MORE,
|
||||
'help': 'targets to upgrade'
|
||||
}
|
||||
],
|
||||
'uninstall': [
|
||||
'Uninstall the given HTTPie plugins.',
|
||||
{
|
||||
'dest': 'targets',
|
||||
'metavar': 'TARGET',
|
||||
'nargs': Qualifiers.ONE_OR_MORE,
|
||||
'help': 'targets to install'
|
||||
}
|
||||
],
|
||||
'list': [
|
||||
'List all installed HTTPie plugins.'
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def missing_subcommand(*args) -> str:
|
||||
base = COMMANDS
|
||||
for arg in args:
|
||||
@ -92,7 +102,7 @@ def missing_subcommand(*args) -> str:
|
||||
return f'Please specify one of these: {subcommands}'
|
||||
|
||||
|
||||
def generate_subparsers(root, parent_parser, definitions):
|
||||
def generate_subparsers(root, parent_parser, definitions, spec):
|
||||
action_dest = '_'.join(parent_parser.prog.split()[1:] + ['action'])
|
||||
actions = parent_parser.add_subparsers(
|
||||
dest=action_dest
|
||||
@ -105,13 +115,15 @@ def generate_subparsers(root, parent_parser, definitions):
|
||||
command_parser = actions.add_parser(command, description=descr)
|
||||
command_parser.root = root
|
||||
if is_subparser:
|
||||
generate_subparsers(root, command_parser, properties)
|
||||
generate_subparsers(root, command_parser, properties, spec)
|
||||
continue
|
||||
|
||||
group = spec.add_group(parent_parser.prog + ' ' + command, description=descr)
|
||||
for argument in properties:
|
||||
argument = argument.copy()
|
||||
flags = argument.pop('flags', [])
|
||||
command_parser.add_argument(*flags, **argument)
|
||||
command_parser.add_argument(*flags, **map_qualifiers(argument, ARGPARSE_QUALIFIER_MAP))
|
||||
group.add_argument(*flags, **argument)
|
||||
|
||||
|
||||
parser = HTTPieManagerArgumentParser(
|
||||
@ -158,4 +170,12 @@ parser.add_argument(
|
||||
'''
|
||||
)
|
||||
|
||||
generate_subparsers(parser, parser, COMMANDS)
|
||||
man_page_hint = '''
|
||||
If you are looking for the man pages of http/https commands, try one of the following:
|
||||
$ man http
|
||||
$ man https
|
||||
|
||||
'''
|
||||
|
||||
options = parser_to_parser_spec(parser, man_page_hint=man_page_hint, source_file=__file__)
|
||||
generate_subparsers(parser, parser, COMMANDS, options)
|
||||
|
69
httpie/manager/compat.py
Normal file
69
httpie/manager/compat.py
Normal file
@ -0,0 +1,69 @@
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from contextlib import suppress
|
||||
from typing import List, Optional
|
||||
from httpie.compat import is_frozen
|
||||
|
||||
|
||||
class PipError(Exception):
|
||||
"""An exception that occurs when pip exits with an error status code."""
|
||||
|
||||
def __init__(self, stdout, stderr):
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
|
||||
|
||||
def _discover_system_pip() -> List[str]:
|
||||
# When we are running inside of a frozen binary, we need the system
|
||||
# pip to install plugins since there is no way for us to execute any
|
||||
# code outside of the HTTPie.
|
||||
#
|
||||
# We explicitly depend on system pip, so the SystemError should not
|
||||
# be executed (except for broken installations).
|
||||
def _check_pip_version(pip_location: Optional[str]) -> bool:
|
||||
if not pip_location:
|
||||
return False
|
||||
|
||||
with suppress(subprocess.CalledProcessError):
|
||||
stdout = subprocess.check_output([pip_location, "--version"], text=True)
|
||||
return "python 3" in stdout
|
||||
|
||||
targets = [
|
||||
"pip",
|
||||
"pip3"
|
||||
]
|
||||
for target in targets:
|
||||
pip_location = shutil.which(target)
|
||||
if _check_pip_version(pip_location):
|
||||
return pip_location
|
||||
|
||||
raise SystemError("Couldn't find 'pip' executable. Please ensure that pip in your system is available.")
|
||||
|
||||
|
||||
def _run_pip_subprocess(pip_executable: List[str], args: List[str]) -> bytes:
|
||||
import subprocess
|
||||
|
||||
cmd = [*pip_executable, *args]
|
||||
try:
|
||||
process = subprocess.run(
|
||||
cmd,
|
||||
check=True,
|
||||
shell=False,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
)
|
||||
except subprocess.CalledProcessError as error:
|
||||
raise PipError(error.stdout, error.stderr) from error
|
||||
else:
|
||||
return process.stdout
|
||||
|
||||
|
||||
def run_pip(args: List[str]) -> bytes:
|
||||
if is_frozen:
|
||||
pip_executable = [_discover_system_pip()]
|
||||
else:
|
||||
pip_executable = [sys.executable, '-m', 'pip']
|
||||
|
||||
return _run_pip_subprocess(pip_executable, args)
|
@ -2,7 +2,6 @@ import argparse
|
||||
from typing import Optional
|
||||
|
||||
from httpie.context import Environment
|
||||
from httpie.manager.plugins import PluginInstaller
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.manager.cli import missing_subcommand, parser
|
||||
from httpie.manager.tasks import CLI_TASKS
|
||||
@ -36,8 +35,7 @@ def program(args: argparse.Namespace, env: Environment) -> ExitStatus:
|
||||
parser.error(MSG_NAKED_INVOCATION)
|
||||
|
||||
if args.action == 'plugins':
|
||||
plugins = PluginInstaller(env, debug=args.debug)
|
||||
return plugins.run(args.plugins_action, args)
|
||||
return dispatch_cli_task(env, args.action, args)
|
||||
elif args.action == 'cli':
|
||||
return dispatch_cli_task(env, args.cli_action, args)
|
||||
|
||||
|
11
httpie/manager/tasks/__init__.py
Normal file
11
httpie/manager/tasks/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
from httpie.manager.tasks.sessions import cli_sessions
|
||||
from httpie.manager.tasks.export_args import cli_export_args
|
||||
from httpie.manager.tasks.plugins import cli_plugins
|
||||
from httpie.manager.tasks.check_updates import cli_check_updates
|
||||
|
||||
CLI_TASKS = {
|
||||
'sessions': cli_sessions,
|
||||
'export-args': cli_export_args,
|
||||
'plugins': cli_plugins,
|
||||
'check-updates': cli_check_updates
|
||||
}
|
10
httpie/manager/tasks/check_updates.py
Normal file
10
httpie/manager/tasks/check_updates.py
Normal file
@ -0,0 +1,10 @@
|
||||
import argparse
|
||||
from httpie.context import Environment
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.internal.update_warnings import fetch_updates, get_update_status
|
||||
|
||||
|
||||
def cli_check_updates(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
||||
fetch_updates(env, lazy=False)
|
||||
env.stdout.write(get_update_status(env))
|
||||
return ExitStatus.SUCCESS
|
27
httpie/manager/tasks/export_args.py
Normal file
27
httpie/manager/tasks/export_args.py
Normal file
@ -0,0 +1,27 @@
|
||||
import argparse
|
||||
import json
|
||||
|
||||
from httpie.cli.definition import options
|
||||
from httpie.cli.options import to_data
|
||||
from httpie.output.writer import write_raw_data
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.context import Environment
|
||||
|
||||
|
||||
FORMAT_TO_CONTENT_TYPE = {
|
||||
'json': 'application/json'
|
||||
}
|
||||
|
||||
|
||||
def cli_export_args(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
||||
if args.format == 'json':
|
||||
data = json.dumps(to_data(options))
|
||||
else:
|
||||
raise NotImplementedError(f'Unexpected format value: {args.format}')
|
||||
|
||||
write_raw_data(
|
||||
env,
|
||||
data,
|
||||
stream_kwargs={'mime_overwrite': FORMAT_TO_CONTENT_TYPE[args.format]},
|
||||
)
|
||||
return ExitStatus.SUCCESS
|
@ -1,20 +1,19 @@
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
import re
|
||||
import shutil
|
||||
from collections import defaultdict
|
||||
from contextlib import suppress
|
||||
from pathlib import Path
|
||||
from typing import Tuple, Optional, List
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from httpie.manager.compat import PipError, run_pip
|
||||
from httpie.manager.cli import parser, missing_subcommand
|
||||
from httpie.compat import importlib_metadata, get_dist_name
|
||||
from httpie.compat import get_dist_name, importlib_metadata
|
||||
from httpie.context import Environment
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.utils import as_site
|
||||
from httpie.utils import get_site_paths
|
||||
|
||||
PEP_503 = re.compile(r"[-_.]+")
|
||||
|
||||
@ -58,46 +57,37 @@ class PluginInstaller:
|
||||
self.env.stderr.write(message + '\n')
|
||||
return ExitStatus.ERROR
|
||||
|
||||
def pip(self, *args, **kwargs) -> subprocess.CompletedProcess:
|
||||
options = {
|
||||
'check': True,
|
||||
'shell': False,
|
||||
'stdout': self.env.stdout,
|
||||
'stderr': subprocess.PIPE,
|
||||
}
|
||||
options.update(kwargs)
|
||||
|
||||
cmd = [sys.executable, '-m', 'pip', *args]
|
||||
return subprocess.run(
|
||||
cmd,
|
||||
**options
|
||||
)
|
||||
|
||||
def _install(self, targets: List[str], mode='install', **process_options) -> Tuple[
|
||||
Optional[bytes], ExitStatus
|
||||
def _install(self, targets: List[str], mode='install') -> Tuple[
|
||||
bytes, ExitStatus
|
||||
]:
|
||||
pip_args = [
|
||||
'install',
|
||||
'--prefer-binary',
|
||||
f'--prefix={self.dir}',
|
||||
'--no-warn-script-location',
|
||||
]
|
||||
if mode == 'upgrade':
|
||||
pip_args.append('--upgrade')
|
||||
pip_args.extend(targets)
|
||||
|
||||
try:
|
||||
process = self.pip(
|
||||
*pip_args,
|
||||
*targets,
|
||||
**process_options,
|
||||
)
|
||||
except subprocess.CalledProcessError as error:
|
||||
stdout = run_pip(pip_args)
|
||||
except PipError as pip_error:
|
||||
error = pip_error
|
||||
stdout = pip_error.stdout
|
||||
else:
|
||||
error = None
|
||||
|
||||
self.env.stdout.write(stdout.decode())
|
||||
|
||||
if error:
|
||||
reason = None
|
||||
if error.stderr:
|
||||
stderr = error.stderr.decode()
|
||||
|
||||
if self.debug:
|
||||
self.env.stderr.write('Command failed: ')
|
||||
self.env.stderr.write(' '.join(error.cmd) + '\n')
|
||||
self.env.stderr.write('pip ' + ' '.join(pip_args) + '\n')
|
||||
self.env.stderr.write(textwrap.indent(' ', stderr))
|
||||
|
||||
last_line = stderr.strip().splitlines()[-1]
|
||||
@ -108,7 +98,6 @@ class PluginInstaller:
|
||||
stdout = error.stdout
|
||||
exit_status = self.fail(mode, ', '.join(targets), reason)
|
||||
else:
|
||||
stdout = process.stdout
|
||||
exit_status = ExitStatus.SUCCESS
|
||||
|
||||
return stdout, exit_status
|
||||
@ -124,10 +113,11 @@ class PluginInstaller:
|
||||
# existing metadata for old versions manually.
|
||||
# [0]: https://github.com/pypa/pip/issues/10727
|
||||
result_deps = defaultdict(list)
|
||||
for child in as_site(self.dir).iterdir():
|
||||
if child.suffix in {'.dist-info', '.egg-info'}:
|
||||
name, _, version = child.stem.rpartition('-')
|
||||
result_deps[name].append((version, child))
|
||||
for site_dir in get_site_paths(self.dir):
|
||||
for child in site_dir.iterdir():
|
||||
if child.suffix in {'.dist-info', '.egg-info'}:
|
||||
name, _, version = child.stem.rpartition('-')
|
||||
result_deps[name].append((version, child))
|
||||
|
||||
for target in targets:
|
||||
name, _, version = target.rpartition('-')
|
||||
@ -145,15 +135,12 @@ class PluginInstaller:
|
||||
|
||||
raw_stdout, exit_status = self._install(
|
||||
targets,
|
||||
mode='upgrade',
|
||||
stdout=subprocess.PIPE
|
||||
mode='upgrade'
|
||||
)
|
||||
if not raw_stdout:
|
||||
return exit_status
|
||||
|
||||
stdout = raw_stdout.decode()
|
||||
self.env.stdout.write(stdout)
|
||||
|
||||
installation_line = stdout.splitlines()[-1]
|
||||
if installation_line.startswith('Successfully installed'):
|
||||
self._clear_metadata(installation_line.split()[2:])
|
||||
@ -178,7 +165,7 @@ class PluginInstaller:
|
||||
return self.fail('uninstall', target, 'couldn\'t locate the package')
|
||||
|
||||
# TODO: Consider handling failures here (e.g if it fails,
|
||||
# just rever the operation and leave the site-packages
|
||||
# just revert the operation and leave the site-packages
|
||||
# in a proper shape).
|
||||
for file in files:
|
||||
with suppress(FileNotFoundError):
|
||||
@ -248,3 +235,14 @@ class PluginInstaller:
|
||||
status = self.list()
|
||||
|
||||
return status or ExitStatus.SUCCESS
|
||||
|
||||
|
||||
def cli_plugins(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
||||
plugins = PluginInstaller(env, debug=args.debug)
|
||||
|
||||
try:
|
||||
action = args.cli_plugins_action
|
||||
except AttributeError:
|
||||
action = args.plugins_action
|
||||
|
||||
return plugins.run(action, args)
|
@ -1,25 +1,19 @@
|
||||
import argparse
|
||||
from typing import TypeVar, Callable, Tuple
|
||||
|
||||
from httpie.sessions import SESSIONS_DIR_NAME, get_httpie_session
|
||||
from httpie.status import ExitStatus
|
||||
from httpie.context import Environment
|
||||
from httpie.legacy import cookie_format as legacy_cookies
|
||||
from httpie.legacy import v3_1_0_session_cookie_format, v3_2_0_session_header_format
|
||||
from httpie.manager.cli import missing_subcommand, parser
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
CLI_TASKS = {}
|
||||
from httpie.utils import is_version_greater
|
||||
|
||||
|
||||
def task(name: str) -> Callable[[T], T]:
|
||||
def wrapper(func: T) -> T:
|
||||
CLI_TASKS[name] = func
|
||||
return func
|
||||
return wrapper
|
||||
FIXERS_TO_VERSIONS = {
|
||||
'3.1.0': v3_1_0_session_cookie_format.fix_layout,
|
||||
'3.2.0': v3_2_0_session_header_format.fix_layout,
|
||||
}
|
||||
|
||||
|
||||
@task('sessions')
|
||||
def cli_sessions(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
||||
action = args.cli_sessions_action
|
||||
if action is None:
|
||||
@ -33,30 +27,6 @@ def cli_sessions(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
||||
raise ValueError(f'Unexpected action: {action}')
|
||||
|
||||
|
||||
def is_version_greater(version_1: str, version_2: str) -> bool:
|
||||
# In an ideal scenerio, we would depend on `packaging` in order
|
||||
# to offer PEP 440 compatible parsing. But since it might not be
|
||||
# commonly available for outside packages, and since we are only
|
||||
# going to parse HTTPie's own version it should be fine to compare
|
||||
# this in a SemVer subset fashion.
|
||||
|
||||
def split_version(version: str) -> Tuple[int, ...]:
|
||||
parts = []
|
||||
for part in version.split('.')[:3]:
|
||||
try:
|
||||
parts.append(int(part))
|
||||
except ValueError:
|
||||
break
|
||||
return tuple(parts)
|
||||
|
||||
return split_version(version_1) > split_version(version_2)
|
||||
|
||||
|
||||
FIXERS_TO_VERSIONS = {
|
||||
'3.1.0': legacy_cookies.fix_layout
|
||||
}
|
||||
|
||||
|
||||
def upgrade_session(env: Environment, args: argparse.Namespace, hostname: str, session_name: str):
|
||||
session = get_httpie_session(
|
||||
env=env,
|
||||
@ -64,7 +34,7 @@ def upgrade_session(env: Environment, args: argparse.Namespace, hostname: str, s
|
||||
session_name=session_name,
|
||||
host=hostname,
|
||||
url=hostname,
|
||||
refactor_mode=True
|
||||
suppress_legacy_warnings=True
|
||||
)
|
||||
|
||||
session_name = session.path.stem
|
||||
@ -114,28 +84,3 @@ def cli_upgrade_all_sessions(env: Environment, args: argparse.Namespace) -> Exit
|
||||
session_name=session_name
|
||||
)
|
||||
return status
|
||||
|
||||
|
||||
FORMAT_TO_CONTENT_TYPE = {
|
||||
'json': 'application/json'
|
||||
}
|
||||
|
||||
|
||||
@task('export-args')
|
||||
def cli_export(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
||||
import json
|
||||
from httpie.cli.definition import options
|
||||
from httpie.cli.options import to_data
|
||||
from httpie.output.writer import write_raw_data
|
||||
|
||||
if args.format == 'json':
|
||||
data = json.dumps(to_data(options))
|
||||
else:
|
||||
raise NotImplementedError(f'Unexpected format value: {args.format}')
|
||||
|
||||
write_raw_data(
|
||||
env,
|
||||
data,
|
||||
stream_kwargs={'mime_overwrite': FORMAT_TO_CONTENT_TYPE[args.format]},
|
||||
)
|
||||
return ExitStatus.SUCCESS
|
@ -71,7 +71,11 @@ class HTTPResponse(HTTPMessage):
|
||||
@property
|
||||
def headers(self):
|
||||
try:
|
||||
raw_version = self._orig.raw._original_response.version
|
||||
raw = self._orig.raw
|
||||
if getattr(raw, '_original_response', None):
|
||||
raw_version = raw._original_response.version
|
||||
else:
|
||||
raw_version = raw.version
|
||||
except AttributeError:
|
||||
# Assume HTTP/1.1
|
||||
raw_version = 11
|
||||
@ -79,7 +83,7 @@ class HTTPResponse(HTTPMessage):
|
||||
9: '0.9',
|
||||
10: '1.0',
|
||||
11: '1.1',
|
||||
20: '2',
|
||||
20: '2.0',
|
||||
}[raw_version]
|
||||
|
||||
original = self._orig
|
||||
|
@ -17,14 +17,15 @@ from pygments.util import ClassNotFound
|
||||
|
||||
from ..lexers.json import EnhancedJsonLexer
|
||||
from ..lexers.metadata import MetadataLexer
|
||||
from ..ui.palette import SHADE_NAMES, get_color
|
||||
from ..ui.palette import AUTO_STYLE, SHADE_TO_PIE_STYLE, PieColor, ColorString, get_color
|
||||
from ...context import Environment
|
||||
from ...plugins import FormatterPlugin
|
||||
|
||||
|
||||
AUTO_STYLE = 'auto' # Follows terminal ANSI color styles
|
||||
DEFAULT_STYLE = AUTO_STYLE
|
||||
SOLARIZED_STYLE = 'solarized' # Bundled here
|
||||
PYGMENTS_BOLD = ColorString('bold')
|
||||
PYGMENTS_ITALIC = ColorString('italic')
|
||||
|
||||
BUNDLED_STYLES = {
|
||||
SOLARIZED_STYLE,
|
||||
@ -33,7 +34,7 @@ BUNDLED_STYLES = {
|
||||
|
||||
|
||||
def get_available_styles():
|
||||
return BUNDLED_STYLES | set(pygments.styles.get_all_styles())
|
||||
return sorted(BUNDLED_STYLES | set(pygments.styles.get_all_styles()))
|
||||
|
||||
|
||||
class ColorFormatter(FormatterPlugin):
|
||||
@ -254,11 +255,11 @@ class Solarized256Style(pygments.style.Style):
|
||||
pygments.token.Comment.Preproc: GREEN,
|
||||
pygments.token.Comment.Special: GREEN,
|
||||
pygments.token.Generic.Deleted: CYAN,
|
||||
pygments.token.Generic.Emph: 'italic',
|
||||
pygments.token.Generic.Emph: PYGMENTS_ITALIC,
|
||||
pygments.token.Generic.Error: RED,
|
||||
pygments.token.Generic.Heading: ORANGE,
|
||||
pygments.token.Generic.Inserted: GREEN,
|
||||
pygments.token.Generic.Strong: 'bold',
|
||||
pygments.token.Generic.Strong: PYGMENTS_BOLD,
|
||||
pygments.token.Generic.Subheading: ORANGE,
|
||||
pygments.token.Token: BASE1,
|
||||
pygments.token.Token.Other: ORANGE,
|
||||
@ -267,86 +268,86 @@ class Solarized256Style(pygments.style.Style):
|
||||
|
||||
PIE_HEADER_STYLE = {
|
||||
# HTTP line / Headers / Etc.
|
||||
pygments.token.Name.Namespace: 'bold primary',
|
||||
pygments.token.Keyword.Reserved: 'bold grey',
|
||||
pygments.token.Operator: 'bold grey',
|
||||
pygments.token.Number: 'bold grey',
|
||||
pygments.token.Name.Function.Magic: 'bold green',
|
||||
pygments.token.Name.Exception: 'bold green',
|
||||
pygments.token.Name.Attribute: 'blue',
|
||||
pygments.token.String: 'primary',
|
||||
pygments.token.Name.Namespace: PYGMENTS_BOLD | PieColor.PRIMARY,
|
||||
pygments.token.Keyword.Reserved: PYGMENTS_BOLD | PieColor.GREY,
|
||||
pygments.token.Operator: PYGMENTS_BOLD | PieColor.GREY,
|
||||
pygments.token.Number: PYGMENTS_BOLD | PieColor.GREY,
|
||||
pygments.token.Name.Function.Magic: PYGMENTS_BOLD | PieColor.GREEN,
|
||||
pygments.token.Name.Exception: PYGMENTS_BOLD | PieColor.GREEN,
|
||||
pygments.token.Name.Attribute: PieColor.BLUE,
|
||||
pygments.token.String: PieColor.PRIMARY,
|
||||
|
||||
# HTTP Methods
|
||||
pygments.token.Name.Function: 'bold grey',
|
||||
pygments.token.Name.Function.HTTP.GET: 'bold green',
|
||||
pygments.token.Name.Function.HTTP.HEAD: 'bold green',
|
||||
pygments.token.Name.Function.HTTP.POST: 'bold yellow',
|
||||
pygments.token.Name.Function.HTTP.PUT: 'bold orange',
|
||||
pygments.token.Name.Function.HTTP.PATCH: 'bold orange',
|
||||
pygments.token.Name.Function.HTTP.DELETE: 'bold red',
|
||||
pygments.token.Name.Function: PYGMENTS_BOLD | PieColor.GREY,
|
||||
pygments.token.Name.Function.HTTP.GET: PYGMENTS_BOLD | PieColor.GREEN,
|
||||
pygments.token.Name.Function.HTTP.HEAD: PYGMENTS_BOLD | PieColor.GREEN,
|
||||
pygments.token.Name.Function.HTTP.POST: PYGMENTS_BOLD | PieColor.YELLOW,
|
||||
pygments.token.Name.Function.HTTP.PUT: PYGMENTS_BOLD | PieColor.ORANGE,
|
||||
pygments.token.Name.Function.HTTP.PATCH: PYGMENTS_BOLD | PieColor.ORANGE,
|
||||
pygments.token.Name.Function.HTTP.DELETE: PYGMENTS_BOLD | PieColor.RED,
|
||||
|
||||
# HTTP status codes
|
||||
pygments.token.Number.HTTP.INFO: 'bold aqua',
|
||||
pygments.token.Number.HTTP.OK: 'bold green',
|
||||
pygments.token.Number.HTTP.REDIRECT: 'bold yellow',
|
||||
pygments.token.Number.HTTP.CLIENT_ERR: 'bold orange',
|
||||
pygments.token.Number.HTTP.SERVER_ERR: 'bold red',
|
||||
pygments.token.Number.HTTP.INFO: PYGMENTS_BOLD | PieColor.AQUA,
|
||||
pygments.token.Number.HTTP.OK: PYGMENTS_BOLD | PieColor.GREEN,
|
||||
pygments.token.Number.HTTP.REDIRECT: PYGMENTS_BOLD | PieColor.YELLOW,
|
||||
pygments.token.Number.HTTP.CLIENT_ERR: PYGMENTS_BOLD | PieColor.ORANGE,
|
||||
pygments.token.Number.HTTP.SERVER_ERR: PYGMENTS_BOLD | PieColor.RED,
|
||||
|
||||
# Metadata
|
||||
pygments.token.Name.Decorator: 'grey',
|
||||
pygments.token.Number.SPEED.FAST: 'bold green',
|
||||
pygments.token.Number.SPEED.AVG: 'bold yellow',
|
||||
pygments.token.Number.SPEED.SLOW: 'bold orange',
|
||||
pygments.token.Number.SPEED.VERY_SLOW: 'bold red',
|
||||
pygments.token.Name.Decorator: PieColor.GREY,
|
||||
pygments.token.Number.SPEED.FAST: PYGMENTS_BOLD | PieColor.GREEN,
|
||||
pygments.token.Number.SPEED.AVG: PYGMENTS_BOLD | PieColor.YELLOW,
|
||||
pygments.token.Number.SPEED.SLOW: PYGMENTS_BOLD | PieColor.ORANGE,
|
||||
pygments.token.Number.SPEED.VERY_SLOW: PYGMENTS_BOLD | PieColor.RED,
|
||||
}
|
||||
|
||||
PIE_BODY_STYLE = {
|
||||
# {}[]:
|
||||
pygments.token.Punctuation: 'grey',
|
||||
pygments.token.Punctuation: PieColor.GREY,
|
||||
|
||||
# Keys
|
||||
pygments.token.Name.Tag: 'pink',
|
||||
pygments.token.Name.Tag: PieColor.PINK,
|
||||
|
||||
# Values
|
||||
pygments.token.Literal.String: 'green',
|
||||
pygments.token.Literal.String.Double: 'green',
|
||||
pygments.token.Literal.Number: 'aqua',
|
||||
pygments.token.Keyword: 'orange',
|
||||
pygments.token.Literal.String: PieColor.GREEN,
|
||||
pygments.token.Literal.String.Double: PieColor.GREEN,
|
||||
pygments.token.Literal.Number: PieColor.AQUA,
|
||||
pygments.token.Keyword: PieColor.ORANGE,
|
||||
|
||||
# Other stuff
|
||||
pygments.token.Text: 'primary',
|
||||
pygments.token.Name.Attribute: 'primary',
|
||||
pygments.token.Name.Builtin: 'blue',
|
||||
pygments.token.Name.Builtin.Pseudo: 'blue',
|
||||
pygments.token.Name.Class: 'blue',
|
||||
pygments.token.Name.Constant: 'orange',
|
||||
pygments.token.Name.Decorator: 'blue',
|
||||
pygments.token.Name.Entity: 'orange',
|
||||
pygments.token.Name.Exception: 'yellow',
|
||||
pygments.token.Name.Function: 'blue',
|
||||
pygments.token.Name.Variable: 'blue',
|
||||
pygments.token.String: 'aqua',
|
||||
pygments.token.String.Backtick: 'secondary',
|
||||
pygments.token.String.Char: 'aqua',
|
||||
pygments.token.String.Doc: 'aqua',
|
||||
pygments.token.String.Escape: 'red',
|
||||
pygments.token.String.Heredoc: 'aqua',
|
||||
pygments.token.String.Regex: 'red',
|
||||
pygments.token.Number: 'aqua',
|
||||
pygments.token.Operator: 'primary',
|
||||
pygments.token.Operator.Word: 'green',
|
||||
pygments.token.Comment: 'secondary',
|
||||
pygments.token.Comment.Preproc: 'green',
|
||||
pygments.token.Comment.Special: 'green',
|
||||
pygments.token.Generic.Deleted: 'aqua',
|
||||
pygments.token.Generic.Emph: 'italic',
|
||||
pygments.token.Generic.Error: 'red',
|
||||
pygments.token.Generic.Heading: 'orange',
|
||||
pygments.token.Generic.Inserted: 'green',
|
||||
pygments.token.Generic.Strong: 'bold',
|
||||
pygments.token.Generic.Subheading: 'orange',
|
||||
pygments.token.Token: 'primary',
|
||||
pygments.token.Token.Other: 'orange',
|
||||
pygments.token.Text: PieColor.PRIMARY,
|
||||
pygments.token.Name.Attribute: PieColor.PRIMARY,
|
||||
pygments.token.Name.Builtin: PieColor.BLUE,
|
||||
pygments.token.Name.Builtin.Pseudo: PieColor.BLUE,
|
||||
pygments.token.Name.Class: PieColor.BLUE,
|
||||
pygments.token.Name.Constant: PieColor.ORANGE,
|
||||
pygments.token.Name.Decorator: PieColor.BLUE,
|
||||
pygments.token.Name.Entity: PieColor.ORANGE,
|
||||
pygments.token.Name.Exception: PieColor.YELLOW,
|
||||
pygments.token.Name.Function: PieColor.BLUE,
|
||||
pygments.token.Name.Variable: PieColor.BLUE,
|
||||
pygments.token.String: PieColor.AQUA,
|
||||
pygments.token.String.Backtick: PieColor.SECONDARY,
|
||||
pygments.token.String.Char: PieColor.AQUA,
|
||||
pygments.token.String.Doc: PieColor.AQUA,
|
||||
pygments.token.String.Escape: PieColor.RED,
|
||||
pygments.token.String.Heredoc: PieColor.AQUA,
|
||||
pygments.token.String.Regex: PieColor.RED,
|
||||
pygments.token.Number: PieColor.AQUA,
|
||||
pygments.token.Operator: PieColor.PRIMARY,
|
||||
pygments.token.Operator.Word: PieColor.GREEN,
|
||||
pygments.token.Comment: PieColor.SECONDARY,
|
||||
pygments.token.Comment.Preproc: PieColor.GREEN,
|
||||
pygments.token.Comment.Special: PieColor.GREEN,
|
||||
pygments.token.Generic.Deleted: PieColor.AQUA,
|
||||
pygments.token.Generic.Emph: PYGMENTS_ITALIC,
|
||||
pygments.token.Generic.Error: PieColor.RED,
|
||||
pygments.token.Generic.Heading: PieColor.ORANGE,
|
||||
pygments.token.Generic.Inserted: PieColor.GREEN,
|
||||
pygments.token.Generic.Strong: PYGMENTS_BOLD,
|
||||
pygments.token.Generic.Subheading: PieColor.ORANGE,
|
||||
pygments.token.Token: PieColor.PRIMARY,
|
||||
pygments.token.Token.Other: PieColor.ORANGE,
|
||||
}
|
||||
|
||||
|
||||
@ -370,7 +371,7 @@ def make_style(name, raw_styles, shade):
|
||||
def make_styles():
|
||||
styles = {}
|
||||
|
||||
for shade, name in SHADE_NAMES.items():
|
||||
for shade, name in SHADE_TO_PIE_STYLE.items():
|
||||
styles[name] = [
|
||||
make_style(name, style_map, shade)
|
||||
for style_name, style_map in [
|
||||
|
47
httpie/output/ui/man_pages.py
Normal file
47
httpie/output/ui/man_pages.py
Normal file
@ -0,0 +1,47 @@
|
||||
"""Logic for checking and displaying man pages."""
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
from httpie.context import Environment
|
||||
|
||||
MAN_COMMAND = 'man'
|
||||
NO_MAN_PAGES = os.getenv('HTTPIE_NO_MAN_PAGES', False)
|
||||
|
||||
# On some systems, HTTP(n) might exist but we are only
|
||||
# interested in HTTP(1).
|
||||
#
|
||||
# For more information on man page sections: https://unix.stackexchange.com/a/138643
|
||||
|
||||
MAN_PAGE_SECTION = '1'
|
||||
|
||||
|
||||
def is_available(program: str) -> bool:
|
||||
"""Check whether HTTPie's man pages are available in this system."""
|
||||
|
||||
if NO_MAN_PAGES or os.system == 'nt':
|
||||
return False
|
||||
|
||||
try:
|
||||
process = subprocess.run(
|
||||
[MAN_COMMAND, MAN_PAGE_SECTION, program],
|
||||
shell=False,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
except Exception:
|
||||
# There might be some errors outside of the process, e.g
|
||||
# a permission error to execute something that is not an
|
||||
# executable.
|
||||
return False
|
||||
else:
|
||||
return process.returncode == 0
|
||||
|
||||
|
||||
def display_for(env: Environment, program: str) -> None:
|
||||
"""Display the man page for the given command (http/https)."""
|
||||
|
||||
subprocess.run(
|
||||
[MAN_COMMAND, MAN_PAGE_SECTION, program],
|
||||
stdout=env.stdout,
|
||||
stderr=env.stderr
|
||||
)
|
@ -1,17 +1,118 @@
|
||||
from typing import Optional
|
||||
|
||||
STYLE_PIE = 'pie'
|
||||
STYLE_PIE_DARK = 'pie-dark'
|
||||
STYLE_PIE_LIGHT = 'pie-light'
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
PYGMENTS_BRIGHT_BLACK = 'ansibrightblack'
|
||||
|
||||
AUTO_STYLE = 'auto' # Follows terminal ANSI color styles
|
||||
|
||||
|
||||
class Styles(Enum):
|
||||
PIE = auto()
|
||||
ANSI = auto()
|
||||
|
||||
|
||||
class PieStyle(str, Enum):
|
||||
UNIVERSAL = 'pie'
|
||||
DARK = 'pie-dark'
|
||||
LIGHT = 'pie-light'
|
||||
|
||||
|
||||
PIE_STYLE_TO_SHADE = {
|
||||
PieStyle.DARK: '500',
|
||||
PieStyle.UNIVERSAL: '600',
|
||||
PieStyle.LIGHT: '700',
|
||||
}
|
||||
SHADE_TO_PIE_STYLE = {
|
||||
shade: style for style, shade in PIE_STYLE_TO_SHADE.items()
|
||||
}
|
||||
|
||||
|
||||
class ColorString(str):
|
||||
def __or__(self, other: str) -> 'ColorString':
|
||||
"""Combine a style with a property.
|
||||
|
||||
E.g: PieColor.BLUE | BOLD | ITALIC
|
||||
"""
|
||||
if isinstance(other, str):
|
||||
# In case of PieColor.BLUE | SOMETHING
|
||||
# we just create a new string.
|
||||
return ColorString(self + ' ' + other)
|
||||
elif isinstance(other, GenericColor):
|
||||
# If we see a GenericColor, then we'll wrap it
|
||||
# in with the desired property in a different class.
|
||||
return _StyledGenericColor(other, styles=self.split())
|
||||
elif isinstance(other, _StyledGenericColor):
|
||||
# And if it is already wrapped, we'll just extend the
|
||||
# list of properties.
|
||||
other.styles.extend(self.split())
|
||||
return other
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class PieColor(ColorString, Enum):
|
||||
"""Styles that are available only in Pie themes."""
|
||||
|
||||
PRIMARY = 'primary'
|
||||
SECONDARY = 'secondary'
|
||||
|
||||
WHITE = 'white'
|
||||
BLACK = 'black'
|
||||
GREY = 'grey'
|
||||
AQUA = 'aqua'
|
||||
PURPLE = 'purple'
|
||||
ORANGE = 'orange'
|
||||
RED = 'red'
|
||||
BLUE = 'blue'
|
||||
PINK = 'pink'
|
||||
GREEN = 'green'
|
||||
YELLOW = 'yellow'
|
||||
|
||||
|
||||
class GenericColor(Enum):
|
||||
"""Generic colors that are safe to use everywhere."""
|
||||
|
||||
# <https://rich.readthedocs.io/en/stable/appendix/colors.html>
|
||||
|
||||
WHITE = {Styles.PIE: PieColor.WHITE, Styles.ANSI: 'white'}
|
||||
BLACK = {Styles.PIE: PieColor.BLACK, Styles.ANSI: 'black'}
|
||||
GREEN = {Styles.PIE: PieColor.GREEN, Styles.ANSI: 'green'}
|
||||
ORANGE = {Styles.PIE: PieColor.ORANGE, Styles.ANSI: 'yellow'}
|
||||
YELLOW = {Styles.PIE: PieColor.YELLOW, Styles.ANSI: 'bright_yellow'}
|
||||
BLUE = {Styles.PIE: PieColor.BLUE, Styles.ANSI: 'blue'}
|
||||
PINK = {Styles.PIE: PieColor.PINK, Styles.ANSI: 'bright_magenta'}
|
||||
PURPLE = {Styles.PIE: PieColor.PURPLE, Styles.ANSI: 'magenta'}
|
||||
RED = {Styles.PIE: PieColor.RED, Styles.ANSI: 'red'}
|
||||
AQUA = {Styles.PIE: PieColor.AQUA, Styles.ANSI: 'cyan'}
|
||||
GREY = {Styles.PIE: PieColor.GREY, Styles.ANSI: 'bright_black'}
|
||||
|
||||
def apply_style(
|
||||
self, style: Styles, *, style_name: Optional[str] = None
|
||||
) -> str:
|
||||
"""Apply the given style to a particular value."""
|
||||
exposed_color = self.value[style]
|
||||
if style is Styles.PIE:
|
||||
assert style_name is not None
|
||||
shade = PIE_STYLE_TO_SHADE[PieStyle(style_name)]
|
||||
return get_color(exposed_color, shade)
|
||||
else:
|
||||
return exposed_color
|
||||
|
||||
|
||||
@dataclass
|
||||
class _StyledGenericColor:
|
||||
color: 'GenericColor'
|
||||
styles: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
# noinspection PyDictCreation
|
||||
COLOR_PALETTE = {
|
||||
# Copy the brand palette
|
||||
'transparent': 'transparent',
|
||||
'current': 'currentColor',
|
||||
'white': '#F5F5F0',
|
||||
'black': '#1C1818',
|
||||
'grey': {
|
||||
PieColor.WHITE: '#F5F5F0',
|
||||
PieColor.BLACK: '#1C1818',
|
||||
PieColor.GREY: {
|
||||
'50': '#F5F5F0',
|
||||
'100': '#EDEDEB',
|
||||
'200': '#D1D1CF',
|
||||
@ -24,7 +125,7 @@ COLOR_PALETTE = {
|
||||
'900': '#1C1818',
|
||||
'DEFAULT': '#7D7D7D',
|
||||
},
|
||||
'aqua': {
|
||||
PieColor.AQUA: {
|
||||
'50': '#E8F0F5',
|
||||
'100': '#D6E3ED',
|
||||
'200': '#C4D9E5',
|
||||
@ -37,7 +138,7 @@ COLOR_PALETTE = {
|
||||
'900': '#455966',
|
||||
'DEFAULT': '#8CB4CD',
|
||||
},
|
||||
'purple': {
|
||||
PieColor.PURPLE: {
|
||||
'50': '#F0E0FC',
|
||||
'100': '#E3C7FA',
|
||||
'200': '#D9ADF7',
|
||||
@ -50,7 +151,7 @@ COLOR_PALETTE = {
|
||||
'900': '#5C2982',
|
||||
'DEFAULT': '#B464F0',
|
||||
},
|
||||
'orange': {
|
||||
PieColor.ORANGE: {
|
||||
'50': '#FFEDDB',
|
||||
'100': '#FFDEBF',
|
||||
'200': '#FFCFA3',
|
||||
@ -63,7 +164,7 @@ COLOR_PALETTE = {
|
||||
'900': '#C75E0A',
|
||||
'DEFAULT': '#FFA24E',
|
||||
},
|
||||
'red': {
|
||||
PieColor.RED: {
|
||||
'50': '#FFE0DE',
|
||||
'100': '#FFC7C4',
|
||||
'200': '#FFB0AB',
|
||||
@ -76,7 +177,7 @@ COLOR_PALETTE = {
|
||||
'900': '#910A00',
|
||||
'DEFAULT': '#FF665B',
|
||||
},
|
||||
'blue': {
|
||||
PieColor.BLUE: {
|
||||
'50': '#DBE3FA',
|
||||
'100': '#BFCFF5',
|
||||
'200': '#A1B8F2',
|
||||
@ -89,7 +190,7 @@ COLOR_PALETTE = {
|
||||
'900': '#2B478F',
|
||||
'DEFAULT': '#4B78E6',
|
||||
},
|
||||
'pink': {
|
||||
PieColor.PINK: {
|
||||
'50': '#FFEBFF',
|
||||
'100': '#FCDBFC',
|
||||
'200': '#FCCCFC',
|
||||
@ -102,7 +203,7 @@ COLOR_PALETTE = {
|
||||
'900': '#8C3D8A',
|
||||
'DEFAULT': '#FA9BFA',
|
||||
},
|
||||
'green': {
|
||||
PieColor.GREEN: {
|
||||
'50': '#E3F7E8',
|
||||
'100': '#CCF2D6',
|
||||
'200': '#B5EDC4',
|
||||
@ -115,7 +216,7 @@ COLOR_PALETTE = {
|
||||
'900': '#307842',
|
||||
'DEFAULT': '#73DC8C',
|
||||
},
|
||||
'yellow': {
|
||||
PieColor.YELLOW: {
|
||||
'50': '#F7F7DB',
|
||||
'100': '#F2F2BF',
|
||||
'200': '#EDEDA6',
|
||||
@ -129,38 +230,39 @@ COLOR_PALETTE = {
|
||||
'DEFAULT': '#DBDE52',
|
||||
},
|
||||
}
|
||||
|
||||
# Grey is the same no matter shade for the colors
|
||||
COLOR_PALETTE['grey'] = {
|
||||
shade: COLOR_PALETTE['grey']['500'] for shade in COLOR_PALETTE['grey'].keys()
|
||||
}
|
||||
|
||||
COLOR_PALETTE['primary'] = {
|
||||
'700': COLOR_PALETTE['black'],
|
||||
'600': 'ansibrightblack',
|
||||
'500': COLOR_PALETTE['white'],
|
||||
}
|
||||
|
||||
COLOR_PALETTE['secondary'] = {'700': '#37523C', '600': '#6c6969', '500': '#6c6969'}
|
||||
COLOR_PALETTE.update(
|
||||
{
|
||||
# Terminal-specific palette customizations.
|
||||
PieColor.GREY: {
|
||||
# Grey is the same no matter shade for the colors
|
||||
shade: COLOR_PALETTE[PieColor.GREY]['500']
|
||||
for shade in COLOR_PALETTE[PieColor.GREY].keys()
|
||||
},
|
||||
PieColor.PRIMARY: {
|
||||
'700': COLOR_PALETTE[PieColor.BLACK],
|
||||
'600': PYGMENTS_BRIGHT_BLACK,
|
||||
'500': COLOR_PALETTE[PieColor.WHITE],
|
||||
},
|
||||
PieColor.SECONDARY: {
|
||||
'700': '#37523C',
|
||||
'600': '#6c6969',
|
||||
'500': '#6c6969',
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SHADE_NAMES = {
|
||||
'500': STYLE_PIE_DARK,
|
||||
'600': STYLE_PIE,
|
||||
'700': STYLE_PIE_LIGHT
|
||||
}
|
||||
|
||||
SHADES = [
|
||||
'50',
|
||||
*map(str, range(100, 1000, 100))
|
||||
]
|
||||
def boldify(color: PieColor) -> str:
|
||||
return f'bold {color}'
|
||||
|
||||
|
||||
def get_color(color: str, shade: str) -> Optional[str]:
|
||||
if color not in COLOR_PALETTE:
|
||||
# noinspection PyDefaultArgument
|
||||
def get_color(
|
||||
color: PieColor, shade: str, *, palette=COLOR_PALETTE
|
||||
) -> Optional[str]:
|
||||
if color not in palette:
|
||||
return None
|
||||
|
||||
color_code = COLOR_PALETTE[color]
|
||||
color_code = palette[color]
|
||||
if isinstance(color_code, dict) and shade in color_code:
|
||||
return color_code[shade]
|
||||
else:
|
||||
|
230
httpie/output/ui/rich_help.py
Normal file
230
httpie/output/ui/rich_help.py
Normal file
@ -0,0 +1,230 @@
|
||||
import re
|
||||
import textwrap
|
||||
from typing import AbstractSet, Iterable, Optional, Tuple
|
||||
|
||||
from rich.console import RenderableType
|
||||
from rich.highlighter import RegexHighlighter
|
||||
from rich.padding import Padding
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from httpie.cli.constants import SEPARATOR_GROUP_ALL_ITEMS
|
||||
from httpie.cli.options import Argument, ParserSpec, Qualifiers
|
||||
from httpie.output.ui.palette import GenericColor
|
||||
|
||||
SEPARATORS = '|'.join(map(re.escape, SEPARATOR_GROUP_ALL_ITEMS))
|
||||
|
||||
STYLE_METAVAR = GenericColor.YELLOW
|
||||
STYLE_SWITCH = GenericColor.GREEN
|
||||
STYLE_PROGRAM_NAME = GenericColor.GREEN # .boldify()
|
||||
STYLE_USAGE_OPTIONAL = GenericColor.GREY
|
||||
STYLE_USAGE_REGULAR = GenericColor.WHITE
|
||||
STYLE_USAGE_ERROR = GenericColor.RED
|
||||
STYLE_USAGE_MISSING = GenericColor.YELLOW
|
||||
STYLE_BOLD = 'bold'
|
||||
|
||||
MAX_CHOICE_CHARS = 80
|
||||
|
||||
LEFT_PADDING_2 = (0, 0, 0, 2)
|
||||
LEFT_PADDING_3 = (0, 0, 0, 3)
|
||||
LEFT_PADDING_4 = (0, 0, 0, 4)
|
||||
LEFT_PADDING_5 = (0, 0, 0, 4)
|
||||
|
||||
LEFT_INDENT_2 = (1, 0, 0, 2)
|
||||
LEFT_INDENT_3 = (1, 0, 0, 3)
|
||||
LEFT_INDENT_BOTTOM_3 = (0, 0, 1, 3)
|
||||
|
||||
MORE_INFO_COMMANDS = """
|
||||
To learn more, you can try:
|
||||
-> running 'http --manual'
|
||||
-> visiting our full documentation at https://httpie.io/docs/cli
|
||||
"""
|
||||
|
||||
|
||||
class OptionsHighlighter(RegexHighlighter):
|
||||
highlights = [
|
||||
r'(^|\W)(?P<option>\-{1,2}[\w|-]+)(?![a-zA-Z0-9])',
|
||||
r'(?P<bold>HTTPie)',
|
||||
]
|
||||
|
||||
|
||||
options_highlighter = OptionsHighlighter()
|
||||
|
||||
|
||||
def unpack_argument(
|
||||
argument: Argument,
|
||||
) -> Tuple[Text, Text]:
|
||||
opt1 = opt2 = ''
|
||||
|
||||
style = None
|
||||
if argument.aliases:
|
||||
if len(argument.aliases) >= 2:
|
||||
opt2, opt1 = argument.aliases
|
||||
else:
|
||||
(opt1,) = argument.aliases
|
||||
else:
|
||||
opt1 = argument.metavar
|
||||
style = STYLE_USAGE_REGULAR
|
||||
|
||||
return Text(opt1, style=style), Text(opt2)
|
||||
|
||||
|
||||
def to_usage(
|
||||
spec: ParserSpec,
|
||||
*,
|
||||
program_name: Optional[str] = None,
|
||||
whitelist: AbstractSet[str] = frozenset()
|
||||
) -> RenderableType:
|
||||
shown_arguments = [
|
||||
argument
|
||||
for group in spec.groups
|
||||
for argument in group.arguments
|
||||
if (not argument.aliases or whitelist.intersection(argument.aliases))
|
||||
]
|
||||
|
||||
# Sort the shown_arguments so that --dash options are
|
||||
# shown first
|
||||
shown_arguments.sort(key=lambda argument: argument.aliases, reverse=True)
|
||||
|
||||
text = Text(program_name or spec.program, style=STYLE_BOLD)
|
||||
for argument in shown_arguments:
|
||||
text.append(' ')
|
||||
|
||||
is_whitelisted = whitelist.intersection(argument.aliases)
|
||||
if argument.aliases:
|
||||
name = '/'.join(sorted(argument.aliases, key=len))
|
||||
else:
|
||||
name = argument.metavar
|
||||
|
||||
nargs = argument.configuration.get('nargs')
|
||||
if nargs is Qualifiers.OPTIONAL:
|
||||
text.append('[' + name + ']', style=STYLE_USAGE_OPTIONAL)
|
||||
elif nargs is Qualifiers.ZERO_OR_MORE:
|
||||
text.append(
|
||||
'[' + name + ' ...]',
|
||||
style=STYLE_USAGE_OPTIONAL,
|
||||
)
|
||||
else:
|
||||
text.append(
|
||||
name,
|
||||
style=STYLE_USAGE_ERROR
|
||||
if is_whitelisted
|
||||
else STYLE_USAGE_REGULAR,
|
||||
)
|
||||
|
||||
raw_form = argument.serialize()
|
||||
if raw_form.get('choices'):
|
||||
text.append(' ')
|
||||
text.append(
|
||||
'{' + ', '.join(raw_form['choices']) + '}',
|
||||
style=STYLE_USAGE_MISSING,
|
||||
)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
# This part is loosely based on the rich-click's help message
|
||||
# generation.
|
||||
def to_help_message(
|
||||
spec: ParserSpec,
|
||||
) -> Iterable[RenderableType]:
|
||||
yield Padding(
|
||||
options_highlighter(spec.description),
|
||||
LEFT_INDENT_2,
|
||||
)
|
||||
|
||||
yield Padding(
|
||||
Text('Usage', style=STYLE_SWITCH),
|
||||
LEFT_INDENT_2,
|
||||
)
|
||||
yield Padding(to_usage(spec), LEFT_INDENT_3)
|
||||
|
||||
group_rows = {}
|
||||
for group in spec.groups:
|
||||
options_rows = []
|
||||
|
||||
for argument in group.arguments:
|
||||
if argument.is_hidden:
|
||||
continue
|
||||
|
||||
opt1, opt2 = unpack_argument(argument)
|
||||
if opt2:
|
||||
opt1.append('/')
|
||||
opt1.append(opt2)
|
||||
|
||||
# Column for a metavar, if we have one
|
||||
metavar = Text(style=STYLE_METAVAR)
|
||||
metavar.append(argument.configuration.get('metavar', ''))
|
||||
|
||||
if opt1 == metavar:
|
||||
metavar = Text('')
|
||||
|
||||
raw_form = argument.serialize()
|
||||
desc = raw_form.get('short_description', '')
|
||||
if raw_form.get('choices'):
|
||||
desc += ' (choices: '
|
||||
desc += textwrap.shorten(
|
||||
', '.join(raw_form.get('choices')),
|
||||
MAX_CHOICE_CHARS,
|
||||
)
|
||||
desc += ')'
|
||||
|
||||
rows = [
|
||||
Padding(
|
||||
options_highlighter(opt1),
|
||||
LEFT_PADDING_2,
|
||||
),
|
||||
metavar,
|
||||
options_highlighter(desc),
|
||||
]
|
||||
|
||||
options_rows.append(rows)
|
||||
if argument.configuration.get('nested_options'):
|
||||
options_rows.extend(
|
||||
[
|
||||
(
|
||||
Padding(
|
||||
Text(
|
||||
key,
|
||||
style=STYLE_USAGE_OPTIONAL,
|
||||
),
|
||||
LEFT_PADDING_4,
|
||||
),
|
||||
value,
|
||||
dec,
|
||||
)
|
||||
for key, value, dec in argument.nested_options
|
||||
]
|
||||
)
|
||||
|
||||
group_rows[group.name] = options_rows
|
||||
|
||||
options_table = Table(highlight=False, box=None, show_header=False)
|
||||
for group_name, options_rows in group_rows.items():
|
||||
options_table.add_row(Text(), Text(), Text())
|
||||
options_table.add_row(
|
||||
Text(group_name, style=STYLE_SWITCH),
|
||||
Text(),
|
||||
Text(),
|
||||
)
|
||||
options_table.add_row(Text(), Text(), Text())
|
||||
for row in options_rows:
|
||||
options_table.add_row(*row)
|
||||
|
||||
yield Padding(
|
||||
Text('Options', style=STYLE_SWITCH),
|
||||
LEFT_INDENT_2,
|
||||
)
|
||||
yield Padding(options_table, LEFT_PADDING_2)
|
||||
yield Padding(
|
||||
Text('More Information', style=STYLE_SWITCH),
|
||||
LEFT_INDENT_2,
|
||||
)
|
||||
yield Padding(
|
||||
MORE_INFO_COMMANDS.rstrip('\n'),
|
||||
LEFT_PADDING_3
|
||||
)
|
||||
yield Padding(
|
||||
spec.epilog.rstrip('\n'),
|
||||
LEFT_INDENT_BOTTOM_3,
|
||||
)
|
73
httpie/output/ui/rich_palette.py
Normal file
73
httpie/output/ui/rich_palette.py
Normal file
@ -0,0 +1,73 @@
|
||||
from collections import ChainMap
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from rich.theme import Theme
|
||||
|
||||
from httpie.output.ui.palette import GenericColor, PieStyle, Styles, ColorString, _StyledGenericColor # noqa
|
||||
|
||||
RICH_BOLD = ColorString('bold')
|
||||
|
||||
# Rich-specific color code declarations
|
||||
# <https://github.com/Textualize/rich/blob/fcd684dd3a482977cab620e71ccaebb94bf13ac9/rich/default_styles.py>
|
||||
CUSTOM_STYLES = {
|
||||
'progress.description': RICH_BOLD | GenericColor.WHITE,
|
||||
'progress.data.speed': RICH_BOLD | GenericColor.GREEN,
|
||||
'progress.percentage': RICH_BOLD | GenericColor.AQUA,
|
||||
'progress.download': RICH_BOLD | GenericColor.AQUA,
|
||||
'progress.remaining': RICH_BOLD | GenericColor.ORANGE,
|
||||
'bar.complete': RICH_BOLD | GenericColor.PURPLE,
|
||||
'bar.finished': RICH_BOLD | GenericColor.GREEN,
|
||||
'bar.pulse': RICH_BOLD | GenericColor.PURPLE,
|
||||
'option': RICH_BOLD | GenericColor.PINK,
|
||||
}
|
||||
|
||||
|
||||
class _GenericColorCaster(dict):
|
||||
"""
|
||||
Translate GenericColor to a regular string on the attribute access
|
||||
phase.
|
||||
"""
|
||||
|
||||
def _translate(self, key: Any) -> Any:
|
||||
if isinstance(key, GenericColor):
|
||||
return key.name.lower()
|
||||
else:
|
||||
return key
|
||||
|
||||
def __getitem__(self, key: Any) -> Any:
|
||||
return super().__getitem__(self._translate(key))
|
||||
|
||||
def get(self, key: Any) -> Any:
|
||||
return super().get(self._translate(key))
|
||||
|
||||
|
||||
def _make_rich_color_theme(style_name: Optional[str] = None) -> 'Theme':
|
||||
from rich.style import Style
|
||||
from rich.theme import Theme
|
||||
|
||||
try:
|
||||
PieStyle(style_name)
|
||||
except ValueError:
|
||||
style = Styles.ANSI
|
||||
else:
|
||||
style = Styles.PIE
|
||||
|
||||
theme = Theme()
|
||||
for color, color_set in ChainMap(
|
||||
GenericColor.__members__, CUSTOM_STYLES
|
||||
).items():
|
||||
if isinstance(color_set, _StyledGenericColor):
|
||||
properties = dict.fromkeys(color_set.styles, True)
|
||||
color_set = color_set.color
|
||||
else:
|
||||
properties = {}
|
||||
|
||||
theme.styles[color.lower()] = Style(
|
||||
color=color_set.apply_style(style, style_name=style_name),
|
||||
**properties,
|
||||
)
|
||||
|
||||
# E.g translate GenericColor.BLUE into blue on key access
|
||||
theme.styles = _GenericColorCaster(theme.styles)
|
||||
return theme
|
141
httpie/output/ui/rich_progress.py
Normal file
141
httpie/output/ui/rich_progress.py
Normal file
@ -0,0 +1,141 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from httpie.context import Environment
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from rich.console import Console
|
||||
|
||||
|
||||
@dataclass
|
||||
class BaseDisplay:
|
||||
env: Environment
|
||||
|
||||
def start(
|
||||
self, *, total: Optional[float], at: float, description: str
|
||||
) -> None:
|
||||
...
|
||||
|
||||
def update(self, steps: float) -> None:
|
||||
...
|
||||
|
||||
def stop(self, time_spent: float) -> None:
|
||||
...
|
||||
|
||||
@property
|
||||
def console(self) -> 'Console':
|
||||
"""Returns the default console to be used with displays (stderr)."""
|
||||
return self.env.rich_error_console
|
||||
|
||||
def _print_summary(
|
||||
self, is_finished: bool, observed_steps: int, time_spent: float
|
||||
):
|
||||
from rich import filesize
|
||||
|
||||
if is_finished:
|
||||
verb = 'Done'
|
||||
else:
|
||||
verb = 'Interrupted'
|
||||
|
||||
total_size = filesize.decimal(observed_steps)
|
||||
avg_speed = filesize.decimal(observed_steps / time_spent)
|
||||
|
||||
minutes, seconds = divmod(time_spent, 60)
|
||||
hours, minutes = divmod(int(minutes), 60)
|
||||
if hours:
|
||||
total_time = f'{hours:d}:{minutes:02d}:{seconds:0.5f}'
|
||||
else:
|
||||
total_time = f'{minutes:02d}:{seconds:0.5f}'
|
||||
|
||||
self.console.print(
|
||||
f'[progress.description]{verb}. {total_size} in {total_time} ({avg_speed}/s)'
|
||||
)
|
||||
|
||||
|
||||
class DummyDisplay(BaseDisplay):
|
||||
"""
|
||||
A dummy display object to be used when the progress bars,
|
||||
spinners etc. are disabled globally (or during tests).
|
||||
"""
|
||||
|
||||
|
||||
class StatusDisplay(BaseDisplay):
|
||||
def start(
|
||||
self, *, total: Optional[float], at: float, description: str
|
||||
) -> None:
|
||||
self.observed = at
|
||||
self.description = (
|
||||
f'[progress.description]{description}[/progress.description]'
|
||||
)
|
||||
|
||||
self.status = self.console.status(self.description, spinner='line')
|
||||
self.status.start()
|
||||
|
||||
def update(self, steps: float) -> None:
|
||||
from rich import filesize
|
||||
|
||||
self.observed += steps
|
||||
|
||||
observed_amount, observed_unit = filesize.decimal(
|
||||
self.observed
|
||||
).split()
|
||||
self.status.update(
|
||||
status=f'{self.description} [progress.download]{observed_amount}/? {observed_unit}[/progress.download]'
|
||||
)
|
||||
|
||||
def stop(self, time_spent: float) -> None:
|
||||
self.status.stop()
|
||||
self.console.print(self.description)
|
||||
if time_spent:
|
||||
self._print_summary(
|
||||
is_finished=True,
|
||||
observed_steps=self.observed,
|
||||
time_spent=time_spent,
|
||||
)
|
||||
|
||||
|
||||
class ProgressDisplay(BaseDisplay):
|
||||
def start(
|
||||
self, *, total: Optional[float], at: float, description: str
|
||||
) -> None:
|
||||
from rich.progress import (
|
||||
Progress,
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
|
||||
assert total is not None
|
||||
self.console.print(f'[progress.description]{description}')
|
||||
self.progress_bar = Progress(
|
||||
'[',
|
||||
BarColumn(),
|
||||
']',
|
||||
'[progress.percentage]{task.percentage:>3.0f}%',
|
||||
'(',
|
||||
DownloadColumn(),
|
||||
')',
|
||||
TimeRemainingColumn(),
|
||||
TransferSpeedColumn(),
|
||||
console=self.console,
|
||||
transient=True,
|
||||
)
|
||||
self.progress_bar.start()
|
||||
self.transfer_task = self.progress_bar.add_task(
|
||||
description, completed=at, total=total
|
||||
)
|
||||
|
||||
def update(self, steps: float) -> None:
|
||||
self.progress_bar.advance(self.transfer_task, steps)
|
||||
|
||||
def stop(self, time_spent: Optional[float]) -> None:
|
||||
self.progress_bar.stop()
|
||||
|
||||
if time_spent:
|
||||
[task] = self.progress_bar.tasks
|
||||
self._print_summary(
|
||||
is_finished=task.finished,
|
||||
observed_steps=task.completed,
|
||||
time_spent=time_spent,
|
||||
)
|
34
httpie/output/ui/rich_utils.py
Normal file
34
httpie/output/ui/rich_utils.py
Normal file
@ -0,0 +1,34 @@
|
||||
import os
|
||||
|
||||
from typing import Iterator
|
||||
from contextlib import contextmanager
|
||||
|
||||
from rich.console import Console, RenderableType
|
||||
from rich.highlighter import Highlighter
|
||||
|
||||
from httpie.output.ui.rich_palette import _make_rich_color_theme
|
||||
|
||||
|
||||
def render_as_string(renderable: RenderableType) -> str:
|
||||
"""Render any `rich` object in a fake console and
|
||||
return a *style-less* version of it as a string."""
|
||||
|
||||
with open(os.devnull, 'w') as null_stream:
|
||||
fake_console = Console(file=null_stream, record=True, theme=_make_rich_color_theme())
|
||||
fake_console.print(renderable)
|
||||
return fake_console.export_text()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def enable_highlighter(
|
||||
console: Console,
|
||||
highlighter: Highlighter,
|
||||
) -> Iterator[Console]:
|
||||
"""Enable a higlighter temporarily."""
|
||||
|
||||
original_highlighter = console.highlighter
|
||||
try:
|
||||
console.highlighter = highlighter
|
||||
yield console
|
||||
finally:
|
||||
console.highlighter = original_highlighter
|
@ -17,6 +17,7 @@ from .processing import Conversion, Formatting
|
||||
from .streams import (
|
||||
BaseStream, BufferedPrettyStream, EncodedStream, PrettyStream, RawStream,
|
||||
)
|
||||
from ..utils import parse_content_type_header
|
||||
|
||||
|
||||
MESSAGE_SEPARATOR = '\n\n'
|
||||
@ -163,7 +164,10 @@ def get_stream_type_and_kwargs(
|
||||
if not is_stream and message_type is HTTPResponse:
|
||||
# If this is a response, then check the headers for determining
|
||||
# auto-streaming.
|
||||
is_stream = headers.get('Content-Type') == 'text/event-stream'
|
||||
raw_content_type_header = headers.get('Content-Type', None)
|
||||
if raw_content_type_header:
|
||||
content_type_header, _ = parse_content_type_header(raw_content_type_header)
|
||||
is_stream = (content_type_header == 'text/event-stream')
|
||||
|
||||
if not env.stdout_isatty and not prettify_groups:
|
||||
stream_class = RawStream
|
||||
|
@ -4,13 +4,13 @@ import warnings
|
||||
|
||||
from itertools import groupby
|
||||
from operator import attrgetter
|
||||
from typing import Dict, List, Type, Iterator, Optional, ContextManager
|
||||
from typing import Dict, List, Type, Iterator, Iterable, Optional, ContextManager
|
||||
from pathlib import Path
|
||||
from contextlib import contextmanager, nullcontext
|
||||
|
||||
from ..compat import importlib_metadata, find_entry_points, get_dist_name
|
||||
|
||||
from ..utils import repr_dict, as_site
|
||||
from ..utils import repr_dict, get_site_paths
|
||||
from . import AuthPlugin, ConverterPlugin, FormatterPlugin, TransportPlugin
|
||||
from .base import BasePlugin
|
||||
|
||||
@ -25,20 +25,24 @@ ENTRY_POINT_NAMES = list(ENTRY_POINT_CLASSES.keys())
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _load_directory(plugins_dir: Path) -> Iterator[None]:
|
||||
plugins_path = os.fspath(plugins_dir)
|
||||
sys.path.insert(0, plugins_path)
|
||||
def _load_directories(site_dirs: Iterable[Path]) -> Iterator[None]:
|
||||
plugin_dirs = [
|
||||
os.fspath(site_dir)
|
||||
for site_dir in site_dirs
|
||||
]
|
||||
sys.path.extend(plugin_dirs)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.path.remove(plugins_path)
|
||||
for plugin_dir in plugin_dirs:
|
||||
sys.path.remove(plugin_dir)
|
||||
|
||||
|
||||
def enable_plugins(plugins_dir: Optional[Path]) -> ContextManager[None]:
|
||||
if plugins_dir is None:
|
||||
return nullcontext()
|
||||
else:
|
||||
return _load_directory(as_site(plugins_dir))
|
||||
return _load_directories(get_site_paths(plugins_dir))
|
||||
|
||||
|
||||
class PluginManager(list):
|
||||
@ -66,7 +70,7 @@ class PluginManager(list):
|
||||
plugin = entry_point.load()
|
||||
except BaseException as exc:
|
||||
warnings.warn(
|
||||
f'While loading "{plugin_name}", an error ocurred: {exc}\n'
|
||||
f'While loading "{plugin_name}", an error occurred: {exc}\n'
|
||||
f'For uninstallations, please use either "httpie plugins uninstall {plugin_name}" '
|
||||
f'or "pip uninstall {plugin_name}" (depending on how you installed it in the first '
|
||||
'place).'
|
||||
|
@ -13,12 +13,17 @@ from typing import Any, Dict, List, Optional, Union
|
||||
from requests.auth import AuthBase
|
||||
from requests.cookies import RequestsCookieJar, remove_cookie_by_name
|
||||
|
||||
from .context import Environment
|
||||
from .context import Environment, LogLevel
|
||||
from .cookies import HTTPieCookiePolicy
|
||||
from .cli.dicts import HTTPHeadersDict
|
||||
from .config import BaseConfigDict, DEFAULT_CONFIG_DIR
|
||||
from .utils import url_as_host
|
||||
from .plugins.registry import plugin_manager
|
||||
from .legacy import cookie_format as legacy_cookies
|
||||
|
||||
from .legacy import (
|
||||
v3_1_0_session_cookie_format as legacy_cookies,
|
||||
v3_2_0_session_header_format as legacy_headers
|
||||
)
|
||||
|
||||
|
||||
SESSIONS_DIR_NAME = 'sessions'
|
||||
@ -67,6 +72,23 @@ def materialize_cookie(cookie: Cookie) -> Dict[str, Any]:
|
||||
return materialized_cookie
|
||||
|
||||
|
||||
def materialize_cookies(jar: RequestsCookieJar) -> List[Dict[str, Any]]:
|
||||
return [
|
||||
materialize_cookie(cookie)
|
||||
for cookie in jar
|
||||
]
|
||||
|
||||
|
||||
def materialize_headers(headers: Dict[str, str]) -> List[Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
'name': name,
|
||||
'value': value
|
||||
}
|
||||
for name, value in headers.copy().items()
|
||||
]
|
||||
|
||||
|
||||
def get_httpie_session(
|
||||
env: Environment,
|
||||
config_dir: Path,
|
||||
@ -74,7 +96,7 @@ def get_httpie_session(
|
||||
host: Optional[str],
|
||||
url: str,
|
||||
*,
|
||||
refactor_mode: bool = False
|
||||
suppress_legacy_warnings: bool = False
|
||||
) -> 'Session':
|
||||
bound_hostname = host or url_as_host(url)
|
||||
if not bound_hostname:
|
||||
@ -93,7 +115,7 @@ def get_httpie_session(
|
||||
env=env,
|
||||
session_id=session_id,
|
||||
bound_host=strip_port(bound_hostname),
|
||||
refactor_mode=refactor_mode
|
||||
suppress_legacy_warnings=suppress_legacy_warnings
|
||||
)
|
||||
session.load()
|
||||
return session
|
||||
@ -109,30 +131,32 @@ class Session(BaseConfigDict):
|
||||
env: Environment,
|
||||
bound_host: str,
|
||||
session_id: str,
|
||||
refactor_mode: bool = False,
|
||||
suppress_legacy_warnings: bool = False,
|
||||
):
|
||||
super().__init__(path=Path(path))
|
||||
self['headers'] = {}
|
||||
|
||||
# Default values for the session files
|
||||
self['headers'] = []
|
||||
self['cookies'] = []
|
||||
self['auth'] = {
|
||||
'type': None,
|
||||
'username': None,
|
||||
'password': None
|
||||
}
|
||||
|
||||
# Runtime state of the Session objects.
|
||||
self.env = env
|
||||
self.cookie_jar = RequestsCookieJar()
|
||||
self._headers = HTTPHeadersDict()
|
||||
self.cookie_jar = RequestsCookieJar(
|
||||
# See also a temporary workaround for a Requests bug in `compat.py`.
|
||||
policy=HTTPieCookiePolicy(),
|
||||
)
|
||||
self.session_id = session_id
|
||||
self.bound_host = bound_host
|
||||
self.refactor_mode = refactor_mode
|
||||
self.suppress_legacy_warnings = suppress_legacy_warnings
|
||||
|
||||
def pre_process_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
cookies = data.get('cookies')
|
||||
if cookies:
|
||||
normalized_cookies = legacy_cookies.pre_process(self, cookies)
|
||||
else:
|
||||
normalized_cookies = []
|
||||
|
||||
for cookie in normalized_cookies:
|
||||
def _add_cookies(self, cookies: List[Dict[str, Any]]) -> None:
|
||||
for cookie in cookies:
|
||||
domain = cookie.get('domain', '')
|
||||
if domain is None:
|
||||
# domain = None means explicitly lack of cookie, though
|
||||
@ -143,29 +167,38 @@ class Session(BaseConfigDict):
|
||||
|
||||
self.cookie_jar.set(**cookie)
|
||||
|
||||
def pre_process_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
for key, deserializer, importer in [
|
||||
('cookies', legacy_cookies.pre_process, self._add_cookies),
|
||||
('headers', legacy_headers.pre_process, self._headers.update),
|
||||
]:
|
||||
values = data.get(key)
|
||||
if values:
|
||||
normalized_values = deserializer(self, values)
|
||||
else:
|
||||
normalized_values = []
|
||||
|
||||
importer(normalized_values)
|
||||
|
||||
return data
|
||||
|
||||
def post_process_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
cookies = data.get('cookies')
|
||||
for key, store, serializer, exporter in [
|
||||
('cookies', self.cookie_jar, materialize_cookies, legacy_cookies.post_process),
|
||||
('headers', self._headers, materialize_headers, legacy_headers.post_process),
|
||||
]:
|
||||
original_type = type(data.get(key))
|
||||
values = serializer(store)
|
||||
|
||||
normalized_cookies = [
|
||||
materialize_cookie(cookie)
|
||||
for cookie in self.cookie_jar
|
||||
]
|
||||
data['cookies'] = legacy_cookies.post_process(
|
||||
normalized_cookies,
|
||||
original_type=type(cookies)
|
||||
)
|
||||
data[key] = exporter(
|
||||
values,
|
||||
original_type=original_type
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
def update_headers(self, request_headers: HTTPHeadersDict):
|
||||
"""
|
||||
Update the session headers with the request ones while ignoring
|
||||
certain name prefixes.
|
||||
|
||||
"""
|
||||
headers = self.headers
|
||||
def _compute_new_headers(self, request_headers: HTTPHeadersDict) -> HTTPHeadersDict:
|
||||
new_headers = HTTPHeadersDict()
|
||||
for name, value in request_headers.copy().items():
|
||||
if value is None:
|
||||
continue # Ignore explicitly unset headers
|
||||
@ -183,24 +216,40 @@ class Session(BaseConfigDict):
|
||||
morsel['path'] = DEFAULT_COOKIE_PATH
|
||||
self.cookie_jar.set(cookie_name, morsel)
|
||||
|
||||
all_cookie_headers = request_headers.getall(name)
|
||||
if len(all_cookie_headers) > 1:
|
||||
all_cookie_headers.remove(original_value)
|
||||
else:
|
||||
request_headers.popall(name)
|
||||
request_headers.remove_item(name, original_value)
|
||||
continue
|
||||
|
||||
for prefix in SESSION_IGNORED_HEADER_PREFIXES:
|
||||
if name.lower().startswith(prefix.lower()):
|
||||
break
|
||||
else:
|
||||
headers[name] = value
|
||||
new_headers.add(name, value)
|
||||
|
||||
self['headers'] = dict(headers)
|
||||
return new_headers
|
||||
|
||||
def update_headers(self, request_headers: HTTPHeadersDict):
|
||||
"""
|
||||
Update the session headers with the request ones while ignoring
|
||||
certain name prefixes.
|
||||
|
||||
"""
|
||||
|
||||
new_headers = self._compute_new_headers(request_headers)
|
||||
new_keys = new_headers.copy().keys()
|
||||
|
||||
# New headers will take priority over the existing ones, and override
|
||||
# them directly instead of extending them.
|
||||
for key, value in self._headers.copy().items():
|
||||
if key in new_keys:
|
||||
continue
|
||||
|
||||
new_headers.add(key, value)
|
||||
|
||||
self._headers = new_headers
|
||||
|
||||
@property
|
||||
def headers(self) -> HTTPHeadersDict:
|
||||
return HTTPHeadersDict(self['headers'])
|
||||
return self._headers.copy()
|
||||
|
||||
@property
|
||||
def cookies(self) -> RequestsCookieJar:
|
||||
@ -257,3 +306,17 @@ class Session(BaseConfigDict):
|
||||
@property
|
||||
def is_anonymous(self):
|
||||
return is_anonymous_session(self.session_id)
|
||||
|
||||
def warn_legacy_usage(self, warning: str) -> None:
|
||||
if self.suppress_legacy_warnings:
|
||||
return None
|
||||
|
||||
self.env.log_error(
|
||||
warning,
|
||||
level=LogLevel.WARNING
|
||||
)
|
||||
|
||||
# We don't want to spam multiple warnings on each usage,
|
||||
# so if there is already a warning for the legacy usage
|
||||
# we'll skip the next ones.
|
||||
self.suppress_legacy_warnings = True
|
||||
|
@ -2,7 +2,6 @@ import sys
|
||||
import os
|
||||
import zlib
|
||||
import functools
|
||||
import time
|
||||
import threading
|
||||
from typing import Any, Callable, IO, Iterable, Optional, Tuple, Union, TYPE_CHECKING
|
||||
from urllib.parse import urlencode
|
||||
@ -110,17 +109,20 @@ def observe_stdin_for_data_thread(env: Environment, file: IO, read_event: thread
|
||||
return None
|
||||
|
||||
def worker(event: threading.Event) -> None:
|
||||
time.sleep(READ_THRESHOLD)
|
||||
if not event.is_set():
|
||||
if not event.wait(timeout=READ_THRESHOLD):
|
||||
env.stderr.write(
|
||||
f'> warning: no stdin data read in {READ_THRESHOLD}s '
|
||||
f'(perhaps you want to --ignore-stdin)\n'
|
||||
f'> See: https://httpie.io/docs/cli/best-practices\n'
|
||||
)
|
||||
|
||||
# Making it a daemon ensures that if the user exits from the main program
|
||||
# (e.g. either regularly or with Ctrl-C), the thread will not
|
||||
# block them.
|
||||
thread = threading.Thread(
|
||||
target=worker,
|
||||
args=(read_event,)
|
||||
args=(read_event,),
|
||||
daemon=True
|
||||
)
|
||||
thread.start()
|
||||
|
||||
|
@ -1,16 +1,20 @@
|
||||
import os
|
||||
import base64
|
||||
import json
|
||||
import mimetypes
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import tempfile
|
||||
import sysconfig
|
||||
|
||||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
from http.cookiejar import parse_ns_headers
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from urllib.parse import urlsplit
|
||||
from typing import Any, List, Optional, Tuple, Callable, Iterable, TypeVar
|
||||
from typing import Any, List, Optional, Tuple, Generator, Callable, Iterable, IO, TypeVar
|
||||
|
||||
import requests.auth
|
||||
|
||||
@ -214,14 +218,33 @@ def parse_content_type_header(header):
|
||||
return content_type, params_dict
|
||||
|
||||
|
||||
def as_site(path: Path) -> Path:
|
||||
def as_site(path: Path, **extra_vars) -> Path:
|
||||
site_packages_path = sysconfig.get_path(
|
||||
'purelib',
|
||||
vars={'base': str(path)}
|
||||
vars={'base': str(path), **extra_vars}
|
||||
)
|
||||
return Path(site_packages_path)
|
||||
|
||||
|
||||
def get_site_paths(path: Path) -> Iterable[Path]:
|
||||
from httpie.compat import (
|
||||
MIN_SUPPORTED_PY_VERSION,
|
||||
MAX_SUPPORTED_PY_VERSION,
|
||||
is_frozen
|
||||
)
|
||||
|
||||
if is_frozen:
|
||||
[major, min_minor] = MIN_SUPPORTED_PY_VERSION
|
||||
[major, max_minor] = MAX_SUPPORTED_PY_VERSION
|
||||
for minor in range(min_minor, max_minor + 1):
|
||||
yield as_site(
|
||||
path,
|
||||
py_version_short=f'{major}.{minor}'
|
||||
)
|
||||
else:
|
||||
yield as_site(path)
|
||||
|
||||
|
||||
def split(iterable: Iterable[T], key: Callable[[T], bool]) -> Tuple[List[T], List[T]]:
|
||||
left, right = [], []
|
||||
for item in iterable:
|
||||
@ -242,3 +265,45 @@ def unwrap_context(exc: Exception) -> Optional[Exception]:
|
||||
|
||||
def url_as_host(url: str) -> str:
|
||||
return urlsplit(url).netloc.split('@')[-1]
|
||||
|
||||
|
||||
class LockFileError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
@contextmanager
|
||||
def open_with_lockfile(file: Path, *args, **kwargs) -> Generator[IO[Any], None, None]:
|
||||
file_id = base64.b64encode(os.fsencode(file)).decode()
|
||||
target_file = Path(tempfile.gettempdir()) / file_id
|
||||
|
||||
# Have an atomic-like touch here, so we'll tighten the possibility of
|
||||
# a race occurring between multiple processes accessing the same file.
|
||||
try:
|
||||
target_file.touch(exist_ok=False)
|
||||
except FileExistsError as exc:
|
||||
raise LockFileError("Can't modify a locked file.") from exc
|
||||
|
||||
try:
|
||||
with open(file, *args, **kwargs) as stream:
|
||||
yield stream
|
||||
finally:
|
||||
target_file.unlink()
|
||||
|
||||
|
||||
def is_version_greater(version_1: str, version_2: str) -> bool:
|
||||
# In an ideal scenario, we would depend on `packaging` in order
|
||||
# to offer PEP 440 compatible parsing. But since it might not be
|
||||
# commonly available for outside packages, and since we are only
|
||||
# going to parse HTTPie's own version it should be fine to compare
|
||||
# this in a SemVer subset fashion.
|
||||
|
||||
def split_version(version: str) -> Tuple[int, ...]:
|
||||
parts = []
|
||||
for part in version.split('.')[:3]:
|
||||
try:
|
||||
parts.append(int(part))
|
||||
except ValueError:
|
||||
break
|
||||
return tuple(parts)
|
||||
|
||||
return split_version(version_1) > split_version(version_2)
|
||||
|
9
setup.py
9
setup.py
@ -13,6 +13,8 @@ tests_require = [
|
||||
'pytest-httpbin>=0.0.6',
|
||||
'pytest-lazy-fixture>=0.0.6',
|
||||
'responses',
|
||||
'pytest-mock',
|
||||
'werkzeug<2.1.0'
|
||||
]
|
||||
dev_require = [
|
||||
*tests_require,
|
||||
@ -29,6 +31,7 @@ dev_require = [
|
||||
'Jinja2'
|
||||
]
|
||||
install_requires = [
|
||||
'pip',
|
||||
'charset_normalizer>=2.0.0',
|
||||
'defusedxml>=0.6.0',
|
||||
'requests[socks]>=2.22.0',
|
||||
@ -37,6 +40,7 @@ install_requires = [
|
||||
'multidict>=4.7.0',
|
||||
'setuptools',
|
||||
'importlib-metadata>=1.4.0; python_version < "3.8"',
|
||||
'rich>=9.10.0'
|
||||
]
|
||||
install_requires_win_only = [
|
||||
'colorama>=0.2.4',
|
||||
@ -110,4 +114,9 @@ setup(
|
||||
'Documentation': 'https://httpie.io/docs',
|
||||
'Online Demo': 'https://httpie.io/run',
|
||||
},
|
||||
data_files=[
|
||||
('share/man/man1', ['extras/man/http.1']),
|
||||
('share/man/man1', ['extras/man/https.1']),
|
||||
('share/man/man1', ['extras/man/httpie.1']),
|
||||
]
|
||||
)
|
||||
|
@ -1,4 +1,3 @@
|
||||
import os
|
||||
import socket
|
||||
|
||||
import pytest
|
||||
@ -7,6 +6,8 @@ from pytest_httpbin import certs
|
||||
from .utils import ( # noqa
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN,
|
||||
HTTPBIN_WITH_CHUNKED_SUPPORT,
|
||||
REMOTE_HTTPBIN_DOMAIN,
|
||||
IS_PYOPENSSL,
|
||||
mock_env
|
||||
)
|
||||
from .utils.plugins_cli import ( # noqa
|
||||
@ -17,7 +18,7 @@ from .utils.plugins_cli import ( # noqa
|
||||
httpie_plugins_success,
|
||||
interface,
|
||||
)
|
||||
from .utils.http_server import http_server # noqa
|
||||
from .utils.http_server import http_server, localhost_http_server # noqa
|
||||
|
||||
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
@ -58,13 +59,29 @@ def httpbin_with_chunked_support(_httpbin_with_chunked_support_available):
|
||||
pytest.skip(f'{HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN} not resolvable')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def _remote_httpbin_available():
|
||||
try:
|
||||
socket.gethostbyname(REMOTE_HTTPBIN_DOMAIN)
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def remote_httpbin(_remote_httpbin_available):
|
||||
if _remote_httpbin_available:
|
||||
return 'http://' + REMOTE_HTTPBIN_DOMAIN
|
||||
pytest.skip(f'{REMOTE_HTTPBIN_DOMAIN} not resolvable')
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope='session')
|
||||
def pyopenssl_inject():
|
||||
"""
|
||||
Injects `pyOpenSSL` module to make sure `requests` will use it.
|
||||
<https://github.com/psf/requests/pull/5443#issuecomment-645740394>
|
||||
"""
|
||||
if os.getenv('HTTPIE_TEST_WITH_PYOPENSSL', '0') == '1':
|
||||
if IS_PYOPENSSL:
|
||||
try:
|
||||
import urllib3.contrib.pyopenssl
|
||||
urllib3.contrib.pyopenssl.inject_into_urllib3()
|
||||
|
@ -27,5 +27,5 @@
|
||||
"value": "bar"
|
||||
}
|
||||
],
|
||||
"headers": {}
|
||||
"headers": []
|
||||
}
|
||||
|
@ -27,5 +27,5 @@
|
||||
"value": "bar"
|
||||
}
|
||||
],
|
||||
"headers": {}
|
||||
"headers": []
|
||||
}
|
||||
|
@ -26,8 +26,14 @@
|
||||
"value": "bar"
|
||||
}
|
||||
],
|
||||
"headers": {
|
||||
"X-Data": "value",
|
||||
"X-Foo": "bar"
|
||||
}
|
||||
"headers": [
|
||||
{
|
||||
"name": "X-Data",
|
||||
"value": "value"
|
||||
},
|
||||
{
|
||||
"name": "X-Foo",
|
||||
"value": "bar"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -10,5 +10,5 @@
|
||||
"username": null
|
||||
},
|
||||
"cookies": [],
|
||||
"headers": {}
|
||||
"headers": []
|
||||
}
|
||||
|
@ -10,5 +10,5 @@
|
||||
"username": null
|
||||
},
|
||||
"cookies": [],
|
||||
"headers": {}
|
||||
"headers": []
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user