mirror of
https://github.com/httpie/cli.git
synced 2025-07-13 20:45:06 +02:00
Automatic release update warnings. (#1336)
* Hide pretty help * Automatic release update warnings. * `httpie cli check-updates` * adapt to the new loglevel construct * Don't make the pie-colors the bold * Apply review feedback. Co-authored-by: Jakub Roztocil <jakub@roztocil.co>
This commit is contained in:
5
.github/workflows/release-pypi.yml
vendored
5
.github/workflows/release-pypi.yml
vendored
@ -21,11 +21,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
|
|
||||||
- name: Install pypa/build
|
|
||||||
run: python -m pip install build
|
|
||||||
|
|
||||||
- name: Build a binary wheel and a source tarball
|
- name: Build a binary wheel and a source tarball
|
||||||
run: python -m build --sdist --wheel --outdir dist/
|
run: make build
|
||||||
|
|
||||||
- name: Release on PyPI
|
- name: Release on PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@master
|
uses: pypa/gh-action-pypi-publish@master
|
||||||
|
11
Makefile
11
Makefile
@ -30,7 +30,7 @@ install: venv install-reqs
|
|||||||
|
|
||||||
install-reqs:
|
install-reqs:
|
||||||
@echo $(H1)Updating package tools$(H1END)
|
@echo $(H1)Updating package tools$(H1END)
|
||||||
$(VENV_PIP) install --upgrade pip wheel
|
$(VENV_PIP) install --upgrade pip wheel build
|
||||||
|
|
||||||
@echo $(H1)Installing dev requirements$(H1END)
|
@echo $(H1)Installing dev requirements$(H1END)
|
||||||
$(VENV_PIP) install --upgrade --editable '.[dev]'
|
$(VENV_PIP) install --upgrade --editable '.[dev]'
|
||||||
@ -153,8 +153,11 @@ doc-check:
|
|||||||
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
rm -rf build/
|
rm -rf build/ dist/
|
||||||
$(VENV_PYTHON) setup.py sdist bdist_wheel
|
mv httpie/internal/__build_channel__.py httpie/internal/__build_channel__.py.original
|
||||||
|
echo 'BUILD_CHANNEL = "pip"' > httpie/internal/__build_channel__.py
|
||||||
|
$(VENV_PYTHON) -m build --sdist --wheel --outdir dist/
|
||||||
|
mv httpie/internal/__build_channel__.py.original httpie/internal/__build_channel__.py
|
||||||
|
|
||||||
|
|
||||||
publish: test-all publish-no-test
|
publish: test-all publish-no-test
|
||||||
@ -198,7 +201,7 @@ brew-test:
|
|||||||
- brew uninstall httpie
|
- brew uninstall httpie
|
||||||
|
|
||||||
@echo $(H1)Building from source…$(H1END)
|
@echo $(H1)Building from source…$(H1END)
|
||||||
- brew install --build-from-source ./docs/packaging/brew/httpie.rb
|
- brew install --HEAD --build-from-source ./docs/packaging/brew/httpie.rb
|
||||||
|
|
||||||
@echo $(H1)Verifying…$(H1END)
|
@echo $(H1)Verifying…$(H1END)
|
||||||
http --version
|
http --version
|
||||||
|
@ -1655,6 +1655,10 @@ If you’d like to silence warnings as well, use `-q` or `--quiet` twice:
|
|||||||
Let’s say that there is an API that returns the whole resource when it is updated, but you are only interested in the response headers to see the status code after an update:
|
Let’s say that there is an API that returns the whole resource when it is updated, but you are only interested in the response headers to see the status code after an update:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
$ http --headers PATCH pie.dev/patch name='New Name'
|
||||||
|
```
|
||||||
|
|
||||||
|
Since you are only printing the HTTP headers here, the connection to the server is closed as soon as all the response headers have been received.
|
||||||
Therefore, bandwidth and time isn’t wasted downloading the body which you don’t care about.
|
Therefore, bandwidth and time isn’t wasted downloading the body which you don’t care about.
|
||||||
The response headers are downloaded always, even if they are not part of the output
|
The response headers are downloaded always, even if they are not part of the output
|
||||||
|
|
||||||
@ -2400,6 +2404,14 @@ This command is currently in beta.
|
|||||||
"Program: http, Version: 0.0.1a0"
|
"Program: http, Version: 0.0.1a0"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### `httpie cli plugins`
|
||||||
|
|
||||||
|
`plugins` interface is a very simple plugin manager for installing, listing and uninstalling HTTPie plugins.
|
||||||
|
|
||||||
|
In the past `pip` was used to install/uninstall plugins, but on some environments (e.g., brew installed
|
||||||
|
packages) it wasn’t working properly. The new interface is a very simple overlay on top of `pip` to allow
|
||||||
|
plugin installations on every installation method.
|
||||||
|
|
||||||
By default, the plugins (and their missing dependencies) will be stored under the configuration directory,
|
By default, the plugins (and their missing dependencies) will be stored under the configuration directory,
|
||||||
but this can be modified through `plugins_dir` variable on the config.
|
but this can be modified through `plugins_dir` variable on the config.
|
||||||
|
|
||||||
|
@ -27,6 +27,7 @@ RUN python -m pip install /app
|
|||||||
RUN python -m pip install pyinstaller wheel
|
RUN python -m pip install pyinstaller wheel
|
||||||
RUN python -m pip install --force-reinstall --upgrade pip
|
RUN python -m pip install --force-reinstall --upgrade pip
|
||||||
|
|
||||||
|
RUN echo 'BUILD_CHANNEL="pypi"' > /app/httpie/internal/__build_channel__.py
|
||||||
RUN python build.py
|
RUN python build.py
|
||||||
|
|
||||||
ENTRYPOINT ["mv", "/app/extras/packaging/linux/dist/", "/artifacts"]
|
ENTRYPOINT ["mv", "/app/extras/packaging/linux/dist/", "/artifacts"]
|
||||||
|
@ -92,8 +92,9 @@ def main():
|
|||||||
build_packages(binaries['http_cli'], binaries['httpie_cli'])
|
build_packages(binaries['http_cli'], binaries['httpie_cli'])
|
||||||
|
|
||||||
# Rename http_cli/httpie_cli to http/httpie
|
# Rename http_cli/httpie_cli to http/httpie
|
||||||
binaries['http_cli'].rename('http')
|
binaries['http_cli'].rename(DIST_DIR / 'http')
|
||||||
binaries['httpie_cli'].rename('httpie')
|
binaries['httpie_cli'].rename(DIST_DIR / 'httpie')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -149,6 +149,24 @@ class Config(BaseConfigDict):
|
|||||||
def default_options(self) -> list:
|
def default_options(self) -> list:
|
||||||
return self['default_options']
|
return self['default_options']
|
||||||
|
|
||||||
|
def _configured_path(self, config_option: str, default: str) -> None:
|
||||||
|
return Path(
|
||||||
|
self.get(config_option, self.directory / default)
|
||||||
|
).expanduser().resolve()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugins_dir(self) -> Path:
|
def plugins_dir(self) -> Path:
|
||||||
return Path(self.get('plugins_dir', self.directory / 'plugins')).resolve()
|
return self._configured_path('plugins_dir', 'plugins')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version_info_file(self) -> Path:
|
||||||
|
return self._configured_path('version_info_file', 'version_info.json')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def developer_mode(self) -> bool:
|
||||||
|
"""This is a special setting for the development environment. It is
|
||||||
|
different from the --debug mode in the terms that it might change
|
||||||
|
the behavior for certain parameters (e.g updater system) that
|
||||||
|
we usually ignore."""
|
||||||
|
|
||||||
|
return self.get('developer_mode')
|
||||||
|
@ -24,6 +24,8 @@ from .output.writer import write_message, write_stream, write_raw_data, MESSAGE_
|
|||||||
from .plugins.registry import plugin_manager
|
from .plugins.registry import plugin_manager
|
||||||
from .status import ExitStatus, http_status_to_exit_status
|
from .status import ExitStatus, http_status_to_exit_status
|
||||||
from .utils import unwrap_context
|
from .utils import unwrap_context
|
||||||
|
from .internal.update_warnings import check_updates
|
||||||
|
from .internal.daemon_runner import is_daemon_mode, run_daemon_task
|
||||||
|
|
||||||
|
|
||||||
# noinspection PyDefaultArgument
|
# noinspection PyDefaultArgument
|
||||||
@ -37,6 +39,10 @@ def raw_main(
|
|||||||
program_name, *args = args
|
program_name, *args = args
|
||||||
env.program_name = os.path.basename(program_name)
|
env.program_name = os.path.basename(program_name)
|
||||||
args = decode_raw_args(args, env.stdin_encoding)
|
args = decode_raw_args(args, env.stdin_encoding)
|
||||||
|
|
||||||
|
if is_daemon_mode(args):
|
||||||
|
return run_daemon_task(env, args)
|
||||||
|
|
||||||
plugin_manager.load_installed_plugins(env.config.plugins_dir)
|
plugin_manager.load_installed_plugins(env.config.plugins_dir)
|
||||||
|
|
||||||
if use_default_options and env.config.default_options:
|
if use_default_options and env.config.default_options:
|
||||||
@ -89,6 +95,7 @@ def raw_main(
|
|||||||
raise
|
raise
|
||||||
exit_status = ExitStatus.ERROR
|
exit_status = ExitStatus.ERROR
|
||||||
else:
|
else:
|
||||||
|
check_updates(env)
|
||||||
try:
|
try:
|
||||||
exit_status = main_program(
|
exit_status = main_program(
|
||||||
args=parsed_args,
|
args=parsed_args,
|
||||||
|
5
httpie/internal/__build_channel__.py
Normal file
5
httpie/internal/__build_channel__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Represents the packaging method. This file should
|
||||||
|
# be overridden by every build system we support on
|
||||||
|
# the packaging step.
|
||||||
|
|
||||||
|
BUILD_CHANNEL = 'unknown'
|
0
httpie/internal/__init__.py
Normal file
0
httpie/internal/__init__.py
Normal file
49
httpie/internal/daemon_runner.py
Normal file
49
httpie/internal/daemon_runner.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import argparse
|
||||||
|
from contextlib import redirect_stderr, redirect_stdout
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.internal.update_warnings import _fetch_updates
|
||||||
|
from httpie.status import ExitStatus
|
||||||
|
|
||||||
|
STATUS_FILE = '.httpie-test-daemon-status'
|
||||||
|
|
||||||
|
|
||||||
|
def _check_status(env):
|
||||||
|
# This function is used only for the testing (test_update_warnings).
|
||||||
|
# Since we don't want to trigger the fetch_updates (which would interact
|
||||||
|
# with real world resources), we'll only trigger this pseudo task
|
||||||
|
# and check whether the STATUS_FILE is created or not.
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
status_file = Path(tempfile.gettempdir()) / STATUS_FILE
|
||||||
|
status_file.touch()
|
||||||
|
|
||||||
|
|
||||||
|
DAEMONIZED_TASKS = {
|
||||||
|
'check_status': _check_status,
|
||||||
|
'fetch_updates': _fetch_updates,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_options(args: List[str]) -> argparse.Namespace:
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('task_id')
|
||||||
|
parser.add_argument('--daemon', action='store_true')
|
||||||
|
return parser.parse_known_args(args)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def is_daemon_mode(args: List[str]) -> bool:
|
||||||
|
return '--daemon' in args
|
||||||
|
|
||||||
|
|
||||||
|
def run_daemon_task(env: Environment, args: List[str]) -> ExitStatus:
|
||||||
|
options = _parse_options(args)
|
||||||
|
|
||||||
|
assert options.daemon
|
||||||
|
assert options.task_id in DAEMONIZED_TASKS
|
||||||
|
with redirect_stdout(env.devnull), redirect_stderr(env.devnull):
|
||||||
|
DAEMONIZED_TASKS[options.task_id](env)
|
||||||
|
|
||||||
|
return ExitStatus.SUCCESS
|
121
httpie/internal/daemons.py
Normal file
121
httpie/internal/daemons.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
"""
|
||||||
|
This module provides an interface to spawn a detached task to be
|
||||||
|
runned with httpie.internal.daemon_runner on a separate process. It is
|
||||||
|
based on DVC's daemon system.
|
||||||
|
https://github.com/iterative/dvc/blob/main/dvc/daemon.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import httpie.__main__
|
||||||
|
from contextlib import suppress
|
||||||
|
from subprocess import Popen
|
||||||
|
from typing import Dict, List
|
||||||
|
from httpie.compat import is_frozen, is_windows
|
||||||
|
|
||||||
|
|
||||||
|
ProcessContext = Dict[str, str]
|
||||||
|
|
||||||
|
|
||||||
|
def _start_process(cmd: List[str], **kwargs) -> Popen:
|
||||||
|
prefix = [sys.executable]
|
||||||
|
# If it is frozen, sys.executable points to the binary (http).
|
||||||
|
# Otherwise it points to the python interpreter.
|
||||||
|
if not is_frozen:
|
||||||
|
main_entrypoint = httpie.__main__.__file__
|
||||||
|
prefix += [main_entrypoint]
|
||||||
|
return Popen(prefix + cmd, close_fds=True, shell=False, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def _spawn_windows(cmd: List[str], process_context: ProcessContext) -> None:
|
||||||
|
from subprocess import (
|
||||||
|
CREATE_NEW_PROCESS_GROUP,
|
||||||
|
CREATE_NO_WINDOW,
|
||||||
|
STARTF_USESHOWWINDOW,
|
||||||
|
STARTUPINFO,
|
||||||
|
)
|
||||||
|
|
||||||
|
# https://stackoverflow.com/a/7006424
|
||||||
|
# https://bugs.python.org/issue41619
|
||||||
|
creationflags = CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW
|
||||||
|
|
||||||
|
startupinfo = STARTUPINFO()
|
||||||
|
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
|
||||||
|
|
||||||
|
_start_process(
|
||||||
|
cmd,
|
||||||
|
env=process_context,
|
||||||
|
creationflags=creationflags,
|
||||||
|
startupinfo=startupinfo,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _spawn_posix(args: List[str], process_context: ProcessContext) -> None:
|
||||||
|
"""
|
||||||
|
Perform a double fork procedure* to detach from the parent
|
||||||
|
process so that we don't block the user even if their original
|
||||||
|
command's execution is done but the release fetcher is not.
|
||||||
|
|
||||||
|
[1]: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap11.html#tag_11_01_03
|
||||||
|
"""
|
||||||
|
|
||||||
|
from httpie.core import main
|
||||||
|
|
||||||
|
try:
|
||||||
|
pid = os.fork()
|
||||||
|
if pid > 0:
|
||||||
|
return
|
||||||
|
except OSError:
|
||||||
|
os._exit(1)
|
||||||
|
|
||||||
|
os.setsid()
|
||||||
|
|
||||||
|
try:
|
||||||
|
pid = os.fork()
|
||||||
|
if pid > 0:
|
||||||
|
os._exit(0)
|
||||||
|
except OSError:
|
||||||
|
os._exit(1)
|
||||||
|
|
||||||
|
# Close all standard inputs/outputs
|
||||||
|
sys.stdin.close()
|
||||||
|
sys.stdout.close()
|
||||||
|
sys.stderr.close()
|
||||||
|
|
||||||
|
if platform.system() == 'Darwin':
|
||||||
|
# Double-fork is not reliable on MacOS, so we'll use a subprocess
|
||||||
|
# to ensure the task is isolated properly.
|
||||||
|
process = _start_process(args, env=process_context)
|
||||||
|
# Unlike windows, since we already completed the fork procedure
|
||||||
|
# we can simply join the process and wait for it.
|
||||||
|
process.communicate()
|
||||||
|
else:
|
||||||
|
os.environ.update(process_context)
|
||||||
|
with suppress(BaseException):
|
||||||
|
main(['http'] + args)
|
||||||
|
|
||||||
|
os._exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def _spawn(args: List[str], process_context: ProcessContext) -> None:
|
||||||
|
"""
|
||||||
|
Spawn a new process to run the given command.
|
||||||
|
"""
|
||||||
|
if is_windows:
|
||||||
|
_spawn_windows(args, process_context)
|
||||||
|
else:
|
||||||
|
_spawn_posix(args, process_context)
|
||||||
|
|
||||||
|
|
||||||
|
def spawn_daemon(task: str) -> None:
|
||||||
|
args = [task, '--daemon']
|
||||||
|
process_context = os.environ.copy()
|
||||||
|
if not is_frozen:
|
||||||
|
file_path = os.path.abspath(inspect.stack()[0][1])
|
||||||
|
process_context['PYTHONPATH'] = os.path.dirname(
|
||||||
|
os.path.dirname(os.path.dirname(file_path))
|
||||||
|
)
|
||||||
|
|
||||||
|
_spawn(args, process_context)
|
171
httpie/internal/update_warnings.py
Normal file
171
httpie/internal/update_warnings.py
Normal file
@ -0,0 +1,171 @@
|
|||||||
|
import json
|
||||||
|
from contextlib import nullcontext, suppress
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional, Callable
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
import httpie
|
||||||
|
from httpie.context import Environment, LogLevel
|
||||||
|
from httpie.internal.__build_channel__ import BUILD_CHANNEL
|
||||||
|
from httpie.internal.daemons import spawn_daemon
|
||||||
|
from httpie.utils import is_version_greater, open_with_lockfile
|
||||||
|
|
||||||
|
# Automatically updated package version index.
|
||||||
|
PACKAGE_INDEX_LINK = 'https://packages.httpie.io/latest.json'
|
||||||
|
|
||||||
|
FETCH_INTERVAL = timedelta(weeks=2)
|
||||||
|
WARN_INTERVAL = timedelta(weeks=1)
|
||||||
|
|
||||||
|
UPDATE_MESSAGE_FORMAT = """\
|
||||||
|
A new HTTPie release ({last_released_version}) is available.
|
||||||
|
To see how you can update, please visit https://httpie.io/docs/cli/{installation_method}
|
||||||
|
"""
|
||||||
|
|
||||||
|
ALREADY_UP_TO_DATE_MESSAGE = """\
|
||||||
|
You are already up-to-date.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def _read_data_error_free(file: Path) -> Any:
|
||||||
|
# If the file is broken / non-existent, ignore it.
|
||||||
|
try:
|
||||||
|
with open(file) as stream:
|
||||||
|
return json.load(stream)
|
||||||
|
except (ValueError, OSError):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def _fetch_updates(env: Environment) -> str:
|
||||||
|
file = env.config.version_info_file
|
||||||
|
data = _read_data_error_free(file)
|
||||||
|
|
||||||
|
response = requests.get(PACKAGE_INDEX_LINK, verify=False)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
data.setdefault('last_warned_date', None)
|
||||||
|
data['last_fetched_date'] = datetime.now().isoformat()
|
||||||
|
data['last_released_versions'] = response.json()
|
||||||
|
|
||||||
|
with open_with_lockfile(file, 'w') as stream:
|
||||||
|
json.dump(data, stream)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_updates(env: Environment, lazy: bool = True):
|
||||||
|
if lazy:
|
||||||
|
spawn_daemon('fetch_updates')
|
||||||
|
else:
|
||||||
|
_fetch_updates(env)
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_fetch_updates(env: Environment) -> None:
|
||||||
|
if env.config.get('disable_update_warnings'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
data = _read_data_error_free(env.config.version_info_file)
|
||||||
|
|
||||||
|
if data:
|
||||||
|
current_date = datetime.now()
|
||||||
|
last_fetched_date = datetime.fromisoformat(data['last_fetched_date'])
|
||||||
|
earliest_fetch_date = last_fetched_date + FETCH_INTERVAL
|
||||||
|
if current_date < earliest_fetch_date:
|
||||||
|
return None
|
||||||
|
|
||||||
|
fetch_updates(env)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_suppress_context(env: Environment) -> Any:
|
||||||
|
"""Return a context manager that suppress
|
||||||
|
all possible errors.
|
||||||
|
|
||||||
|
Note: if you have set the developer_mode=True in
|
||||||
|
your config, then it will show all errors for easier
|
||||||
|
debugging."""
|
||||||
|
if env.config.developer_mode:
|
||||||
|
return nullcontext()
|
||||||
|
else:
|
||||||
|
return suppress(BaseException)
|
||||||
|
|
||||||
|
|
||||||
|
def _update_checker(
|
||||||
|
func: Callable[[Environment], None]
|
||||||
|
) -> Callable[[Environment], None]:
|
||||||
|
"""Control the execution of the update checker (suppress errors, trigger
|
||||||
|
auto updates etc.)"""
|
||||||
|
|
||||||
|
def wrapper(env: Environment) -> None:
|
||||||
|
with _get_suppress_context(env):
|
||||||
|
func(env)
|
||||||
|
|
||||||
|
with _get_suppress_context(env):
|
||||||
|
maybe_fetch_updates(env)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def _get_update_status(env: Environment) -> Optional[str]:
|
||||||
|
"""If there is a new update available, return the warning text.
|
||||||
|
Otherwise just return None."""
|
||||||
|
file = env.config.version_info_file
|
||||||
|
if not file.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
with _get_suppress_context(env):
|
||||||
|
# If the user quickly spawns multiple httpie processes
|
||||||
|
# we don't want to end in a race.
|
||||||
|
with open_with_lockfile(file) as stream:
|
||||||
|
version_info = json.load(stream)
|
||||||
|
|
||||||
|
available_channels = version_info['last_released_versions']
|
||||||
|
if BUILD_CHANNEL not in available_channels:
|
||||||
|
return None
|
||||||
|
|
||||||
|
current_version = httpie.__version__
|
||||||
|
last_released_version = available_channels[BUILD_CHANNEL]
|
||||||
|
if not is_version_greater(last_released_version, current_version):
|
||||||
|
return None
|
||||||
|
|
||||||
|
text = UPDATE_MESSAGE_FORMAT.format(
|
||||||
|
last_released_version=last_released_version,
|
||||||
|
installation_method=BUILD_CHANNEL,
|
||||||
|
)
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def get_update_status(env: Environment) -> str:
|
||||||
|
return _get_update_status(env) or ALREADY_UP_TO_DATE_MESSAGE
|
||||||
|
|
||||||
|
|
||||||
|
@_update_checker
|
||||||
|
def check_updates(env: Environment) -> None:
|
||||||
|
if env.config.get('disable_update_warnings'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
file = env.config.version_info_file
|
||||||
|
update_status = _get_update_status(env)
|
||||||
|
|
||||||
|
if not update_status:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# If the user quickly spawns multiple httpie processes
|
||||||
|
# we don't want to end in a race.
|
||||||
|
with open_with_lockfile(file) as stream:
|
||||||
|
version_info = json.load(stream)
|
||||||
|
|
||||||
|
# We don't want to spam the user with too many warnings,
|
||||||
|
# so we'll only warn every once a while (WARN_INTERNAL).
|
||||||
|
current_date = datetime.now()
|
||||||
|
last_warned_date = version_info['last_warned_date']
|
||||||
|
if last_warned_date is not None:
|
||||||
|
earliest_warn_date = (
|
||||||
|
datetime.fromisoformat(last_warned_date) + WARN_INTERVAL
|
||||||
|
)
|
||||||
|
if current_date < earliest_warn_date:
|
||||||
|
return None
|
||||||
|
|
||||||
|
env.log_error(update_status, level=LogLevel.INFO)
|
||||||
|
version_info['last_warned_date'] = current_date.isoformat()
|
||||||
|
|
||||||
|
with open_with_lockfile(file, 'w') as stream:
|
||||||
|
json.dump(version_info, stream)
|
@ -24,6 +24,9 @@ COMMANDS = {
|
|||||||
'default': 'json'
|
'default': 'json'
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
'check-updates': [
|
||||||
|
'Check for updates'
|
||||||
|
],
|
||||||
'sessions': {
|
'sessions': {
|
||||||
'help': 'Manage HTTPie sessions',
|
'help': 'Manage HTTPie sessions',
|
||||||
'upgrade': [
|
'upgrade': [
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
from httpie.manager.tasks.sessions import cli_sessions
|
from httpie.manager.tasks.sessions import cli_sessions
|
||||||
from httpie.manager.tasks.export_args import cli_export_args
|
from httpie.manager.tasks.export_args import cli_export_args
|
||||||
from httpie.manager.tasks.plugins import cli_plugins
|
from httpie.manager.tasks.plugins import cli_plugins
|
||||||
|
from httpie.manager.tasks.check_updates import cli_check_updates
|
||||||
|
|
||||||
CLI_TASKS = {
|
CLI_TASKS = {
|
||||||
'sessions': cli_sessions,
|
'sessions': cli_sessions,
|
||||||
'export-args': cli_export_args,
|
'export-args': cli_export_args,
|
||||||
'plugins': cli_plugins,
|
'plugins': cli_plugins,
|
||||||
|
'check-updates': cli_check_updates
|
||||||
}
|
}
|
||||||
|
10
httpie/manager/tasks/check_updates.py
Normal file
10
httpie/manager/tasks/check_updates.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import argparse
|
||||||
|
from httpie.context import Environment
|
||||||
|
from httpie.status import ExitStatus
|
||||||
|
from httpie.internal.update_warnings import fetch_updates, get_update_status
|
||||||
|
|
||||||
|
|
||||||
|
def cli_check_updates(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
||||||
|
fetch_updates(env, lazy=False)
|
||||||
|
env.stdout.write(get_update_status(env))
|
||||||
|
return ExitStatus.SUCCESS
|
@ -1,11 +1,11 @@
|
|||||||
import argparse
|
import argparse
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from httpie.sessions import SESSIONS_DIR_NAME, get_httpie_session
|
from httpie.sessions import SESSIONS_DIR_NAME, get_httpie_session
|
||||||
from httpie.status import ExitStatus
|
from httpie.status import ExitStatus
|
||||||
from httpie.context import Environment
|
from httpie.context import Environment
|
||||||
from httpie.legacy import v3_1_0_session_cookie_format, v3_2_0_session_header_format
|
from httpie.legacy import v3_1_0_session_cookie_format, v3_2_0_session_header_format
|
||||||
from httpie.manager.cli import missing_subcommand, parser
|
from httpie.manager.cli import missing_subcommand, parser
|
||||||
|
from httpie.utils import is_version_greater
|
||||||
|
|
||||||
|
|
||||||
FIXERS_TO_VERSIONS = {
|
FIXERS_TO_VERSIONS = {
|
||||||
@ -27,25 +27,6 @@ def cli_sessions(env: Environment, args: argparse.Namespace) -> ExitStatus:
|
|||||||
raise ValueError(f'Unexpected action: {action}')
|
raise ValueError(f'Unexpected action: {action}')
|
||||||
|
|
||||||
|
|
||||||
def is_version_greater(version_1: str, version_2: str) -> bool:
|
|
||||||
# In an ideal scenario, we would depend on `packaging` in order
|
|
||||||
# to offer PEP 440 compatible parsing. But since it might not be
|
|
||||||
# commonly available for outside packages, and since we are only
|
|
||||||
# going to parse HTTPie's own version it should be fine to compare
|
|
||||||
# this in a SemVer subset fashion.
|
|
||||||
|
|
||||||
def split_version(version: str) -> Tuple[int, ...]:
|
|
||||||
parts = []
|
|
||||||
for part in version.split('.')[:3]:
|
|
||||||
try:
|
|
||||||
parts.append(int(part))
|
|
||||||
except ValueError:
|
|
||||||
break
|
|
||||||
return tuple(parts)
|
|
||||||
|
|
||||||
return split_version(version_1) > split_version(version_2)
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade_session(env: Environment, args: argparse.Namespace, hostname: str, session_name: str):
|
def upgrade_session(env: Environment, args: argparse.Namespace, hostname: str, session_name: str):
|
||||||
session = get_httpie_session(
|
session = get_httpie_session(
|
||||||
env=env,
|
env=env,
|
||||||
|
@ -1,16 +1,20 @@
|
|||||||
|
import os
|
||||||
|
import base64
|
||||||
import json
|
import json
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import tempfile
|
||||||
import sysconfig
|
import sysconfig
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
from contextlib import contextmanager
|
||||||
from http.cookiejar import parse_ns_headers
|
from http.cookiejar import parse_ns_headers
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from urllib.parse import urlsplit
|
from urllib.parse import urlsplit
|
||||||
from typing import Any, List, Optional, Tuple, Callable, Iterable, TypeVar
|
from typing import Any, List, Optional, Tuple, Generator, Callable, Iterable, IO, TypeVar
|
||||||
|
|
||||||
import requests.auth
|
import requests.auth
|
||||||
|
|
||||||
@ -261,3 +265,45 @@ def unwrap_context(exc: Exception) -> Optional[Exception]:
|
|||||||
|
|
||||||
def url_as_host(url: str) -> str:
|
def url_as_host(url: str) -> str:
|
||||||
return urlsplit(url).netloc.split('@')[-1]
|
return urlsplit(url).netloc.split('@')[-1]
|
||||||
|
|
||||||
|
|
||||||
|
class LockFileError(ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def open_with_lockfile(file: Path, *args, **kwargs) -> Generator[IO[Any], None, None]:
|
||||||
|
file_id = base64.b64encode(os.fsencode(file)).decode()
|
||||||
|
target_file = Path(tempfile.gettempdir()) / file_id
|
||||||
|
|
||||||
|
# Have an atomic-like touch here, so we'll tighten the possibility of
|
||||||
|
# a race occuring between multiple processes accessing the same file.
|
||||||
|
try:
|
||||||
|
target_file.touch(exist_ok=False)
|
||||||
|
except FileExistsError as exc:
|
||||||
|
raise LockFileError("Can't modify a locked file.") from exc
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(file, *args, **kwargs) as stream:
|
||||||
|
yield stream
|
||||||
|
finally:
|
||||||
|
target_file.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
def is_version_greater(version_1: str, version_2: str) -> bool:
|
||||||
|
# In an ideal scenario, we would depend on `packaging` in order
|
||||||
|
# to offer PEP 440 compatible parsing. But since it might not be
|
||||||
|
# commonly available for outside packages, and since we are only
|
||||||
|
# going to parse HTTPie's own version it should be fine to compare
|
||||||
|
# this in a SemVer subset fashion.
|
||||||
|
|
||||||
|
def split_version(version: str) -> Tuple[int, ...]:
|
||||||
|
parts = []
|
||||||
|
for part in version.split('.')[:3]:
|
||||||
|
try:
|
||||||
|
parts.append(int(part))
|
||||||
|
except ValueError:
|
||||||
|
break
|
||||||
|
return tuple(parts)
|
||||||
|
|
||||||
|
return split_version(version_1) > split_version(version_2)
|
||||||
|
1
setup.py
1
setup.py
@ -13,6 +13,7 @@ tests_require = [
|
|||||||
'pytest-httpbin>=0.0.6',
|
'pytest-httpbin>=0.0.6',
|
||||||
'pytest-lazy-fixture>=0.0.6',
|
'pytest-lazy-fixture>=0.0.6',
|
||||||
'responses',
|
'responses',
|
||||||
|
'pytest-mock',
|
||||||
'werkzeug<2.1.0'
|
'werkzeug<2.1.0'
|
||||||
]
|
]
|
||||||
dev_require = [
|
dev_require = [
|
||||||
|
237
tests/test_update_warnings.py
Normal file
237
tests/test_update_warnings.py
Normal file
@ -0,0 +1,237 @@
|
|||||||
|
import json
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
from contextlib import suppress
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from httpie.internal.daemon_runner import STATUS_FILE
|
||||||
|
from httpie.internal.daemons import spawn_daemon
|
||||||
|
from httpie.status import ExitStatus
|
||||||
|
|
||||||
|
from .utils import PersistentMockEnvironment, http, httpie
|
||||||
|
|
||||||
|
BUILD_CHANNEL = 'test'
|
||||||
|
BUILD_CHANNEL_2 = 'test2'
|
||||||
|
UNKNOWN_BUILD_CHANNEL = 'test3'
|
||||||
|
|
||||||
|
HIGHEST_VERSION = '999.999.999'
|
||||||
|
LOWEST_VERSION = '1.1.1'
|
||||||
|
|
||||||
|
FIXED_DATE = datetime(1970, 1, 1).isoformat()
|
||||||
|
|
||||||
|
MAX_ATTEMPT = 40
|
||||||
|
MAX_TIMEOUT = 2.0
|
||||||
|
|
||||||
|
|
||||||
|
def check_update_warnings(text):
|
||||||
|
return 'A new HTTPie release' in text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.requires_external_processes
|
||||||
|
def test_daemon_runner():
|
||||||
|
# We have a pseudo daemon task called 'check_status'
|
||||||
|
# which creates a temp file called STATUS_FILE under
|
||||||
|
# user's temp directory. This test simply ensures that
|
||||||
|
# we create a daemon that successfully performs the
|
||||||
|
# external task.
|
||||||
|
|
||||||
|
status_file = Path(tempfile.gettempdir()) / STATUS_FILE
|
||||||
|
with suppress(FileNotFoundError):
|
||||||
|
status_file.unlink()
|
||||||
|
|
||||||
|
spawn_daemon('check_status')
|
||||||
|
|
||||||
|
for attempt in range(MAX_ATTEMPT):
|
||||||
|
time.sleep(MAX_TIMEOUT / MAX_ATTEMPT)
|
||||||
|
if status_file.exists():
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
pytest.fail(
|
||||||
|
'Maximum number of attempts failed for daemon status check.'
|
||||||
|
)
|
||||||
|
|
||||||
|
assert status_file.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_fetch(static_fetch_data, without_warnings):
|
||||||
|
http('fetch_updates', '--daemon', env=without_warnings)
|
||||||
|
|
||||||
|
with open(without_warnings.config.version_info_file) as stream:
|
||||||
|
version_data = json.load(stream)
|
||||||
|
|
||||||
|
assert version_data['last_warned_date'] is None
|
||||||
|
assert version_data['last_fetched_date'] is not None
|
||||||
|
assert (
|
||||||
|
version_data['last_released_versions'][BUILD_CHANNEL]
|
||||||
|
== HIGHEST_VERSION
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
version_data['last_released_versions'][BUILD_CHANNEL_2]
|
||||||
|
== LOWEST_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_fetch_dont_override_existing_layout(
|
||||||
|
static_fetch_data, without_warnings
|
||||||
|
):
|
||||||
|
with open(without_warnings.config.version_info_file, 'w') as stream:
|
||||||
|
existing_layout = {
|
||||||
|
'last_warned_date': FIXED_DATE,
|
||||||
|
'last_fetched_date': FIXED_DATE,
|
||||||
|
'last_released_versions': {BUILD_CHANNEL: LOWEST_VERSION},
|
||||||
|
}
|
||||||
|
json.dump(existing_layout, stream)
|
||||||
|
|
||||||
|
http('fetch_updates', '--daemon', env=without_warnings)
|
||||||
|
|
||||||
|
with open(without_warnings.config.version_info_file) as stream:
|
||||||
|
version_data = json.load(stream)
|
||||||
|
|
||||||
|
# The "last updated at" field should not be modified, but the
|
||||||
|
# rest need to be updated.
|
||||||
|
assert version_data['last_warned_date'] == FIXED_DATE
|
||||||
|
assert version_data['last_fetched_date'] != FIXED_DATE
|
||||||
|
assert (
|
||||||
|
version_data['last_released_versions'][BUILD_CHANNEL]
|
||||||
|
== HIGHEST_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_fetch_broken_json(static_fetch_data, without_warnings):
|
||||||
|
with open(without_warnings.config.version_info_file, 'w') as stream:
|
||||||
|
stream.write('$$broken$$')
|
||||||
|
|
||||||
|
http('fetch_updates', '--daemon', env=without_warnings)
|
||||||
|
|
||||||
|
with open(without_warnings.config.version_info_file) as stream:
|
||||||
|
version_data = json.load(stream)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
version_data['last_released_versions'][BUILD_CHANNEL]
|
||||||
|
== HIGHEST_VERSION
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_updates_disable_warnings(
|
||||||
|
without_warnings, httpbin, fetch_update_mock
|
||||||
|
):
|
||||||
|
r = http(httpbin + '/get', env=without_warnings)
|
||||||
|
assert not fetch_update_mock.called
|
||||||
|
assert not check_update_warnings(r.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_updates_first_invocation(
|
||||||
|
with_warnings, httpbin, fetch_update_mock
|
||||||
|
):
|
||||||
|
r = http(httpbin + '/get', env=with_warnings)
|
||||||
|
assert fetch_update_mock.called
|
||||||
|
assert not check_update_warnings(r.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'should_issue_warning, build_channel',
|
||||||
|
[
|
||||||
|
(False, pytest.lazy_fixture('lower_build_channel')),
|
||||||
|
(True, pytest.lazy_fixture('higher_build_channel')),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_check_updates_first_time_after_data_fetch(
|
||||||
|
with_warnings,
|
||||||
|
httpbin,
|
||||||
|
fetch_update_mock,
|
||||||
|
static_fetch_data,
|
||||||
|
should_issue_warning,
|
||||||
|
build_channel,
|
||||||
|
):
|
||||||
|
http('fetch_updates', '--daemon', env=with_warnings)
|
||||||
|
r = http(httpbin + '/get', env=with_warnings)
|
||||||
|
|
||||||
|
assert not fetch_update_mock.called
|
||||||
|
assert (not should_issue_warning) or check_update_warnings(r.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_updates_first_time_after_data_fetch_unknown_build_channel(
|
||||||
|
with_warnings,
|
||||||
|
httpbin,
|
||||||
|
fetch_update_mock,
|
||||||
|
static_fetch_data,
|
||||||
|
unknown_build_channel,
|
||||||
|
):
|
||||||
|
http('fetch_updates', '--daemon', env=with_warnings)
|
||||||
|
r = http(httpbin + '/get', env=with_warnings)
|
||||||
|
|
||||||
|
assert not fetch_update_mock.called
|
||||||
|
assert not check_update_warnings(r.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def test_cli_check_updates(
|
||||||
|
static_fetch_data, higher_build_channel
|
||||||
|
):
|
||||||
|
r = httpie('cli', 'check-updates')
|
||||||
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
assert check_update_warnings(r)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"build_channel", [
|
||||||
|
pytest.lazy_fixture("lower_build_channel"),
|
||||||
|
pytest.lazy_fixture("unknown_build_channel")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_cli_check_updates_not_shown(
|
||||||
|
static_fetch_data, build_channel
|
||||||
|
):
|
||||||
|
r = httpie('cli', 'check-updates')
|
||||||
|
assert r.exit_status == ExitStatus.SUCCESS
|
||||||
|
assert not check_update_warnings(r)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def with_warnings(tmp_path):
|
||||||
|
env = PersistentMockEnvironment()
|
||||||
|
env.config['version_info_file'] = tmp_path / 'version.json'
|
||||||
|
env.config['disable_update_warnings'] = False
|
||||||
|
return env
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def without_warnings(tmp_path):
|
||||||
|
env = PersistentMockEnvironment()
|
||||||
|
env.config['version_info_file'] = tmp_path / 'version.json'
|
||||||
|
env.config['disable_update_warnings'] = True
|
||||||
|
return env
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def fetch_update_mock(mocker):
|
||||||
|
mock_fetch = mocker.patch('httpie.internal.update_warnings.fetch_updates')
|
||||||
|
return mock_fetch
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def static_fetch_data(mocker):
|
||||||
|
mock_get = mocker.patch('requests.get')
|
||||||
|
mock_get.return_value.status_code = 200
|
||||||
|
mock_get.return_value.json.return_value = {
|
||||||
|
BUILD_CHANNEL: HIGHEST_VERSION,
|
||||||
|
BUILD_CHANNEL_2: LOWEST_VERSION,
|
||||||
|
}
|
||||||
|
return mock_get
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def unknown_build_channel(mocker):
|
||||||
|
mocker.patch('httpie.internal.update_warnings.BUILD_CHANNEL', UNKNOWN_BUILD_CHANNEL)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def higher_build_channel(mocker):
|
||||||
|
mocker.patch('httpie.internal.update_warnings.BUILD_CHANNEL', BUILD_CHANNEL)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def lower_build_channel(mocker):
|
||||||
|
mocker.patch('httpie.internal.update_warnings.BUILD_CHANNEL', BUILD_CHANNEL_2)
|
@ -18,7 +18,7 @@ from .utils import (
|
|||||||
)
|
)
|
||||||
from .fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT
|
from .fixtures import FILE_PATH_ARG, FILE_PATH, FILE_CONTENT
|
||||||
|
|
||||||
MAX_RESPONSE_WAIT_TIME = 2
|
MAX_RESPONSE_WAIT_TIME = 5
|
||||||
|
|
||||||
|
|
||||||
def test_chunked_json(httpbin_with_chunked_support):
|
def test_chunked_json(httpbin_with_chunked_support):
|
||||||
|
@ -49,6 +49,10 @@ HTTP_OK_COLOR = (
|
|||||||
DUMMY_URL = 'http://this-should.never-resolve' # Note: URL never fetched
|
DUMMY_URL = 'http://this-should.never-resolve' # Note: URL never fetched
|
||||||
DUMMY_HOST = url_as_host(DUMMY_URL)
|
DUMMY_HOST = url_as_host(DUMMY_URL)
|
||||||
|
|
||||||
|
# We don't want hundreds of subprocesses trying to access GitHub API
|
||||||
|
# during the tests.
|
||||||
|
Config.DEFAULTS['disable_update_warnings'] = True
|
||||||
|
|
||||||
|
|
||||||
def strip_colors(colorized_msg: str) -> str:
|
def strip_colors(colorized_msg: str) -> str:
|
||||||
return COLOR_RE.sub('', colorized_msg)
|
return COLOR_RE.sub('', colorized_msg)
|
||||||
@ -163,6 +167,7 @@ class MockEnvironment(Environment):
|
|||||||
self._delete_config_dir = True
|
self._delete_config_dir = True
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
|
self.devnull.close()
|
||||||
self.stdout.close()
|
self.stdout.close()
|
||||||
self.stderr.close()
|
self.stderr.close()
|
||||||
warnings.resetwarnings()
|
warnings.resetwarnings()
|
||||||
@ -179,6 +184,11 @@ class MockEnvironment(Environment):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PersistentMockEnvironment(MockEnvironment):
|
||||||
|
def cleanup(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class BaseCLIResponse:
|
class BaseCLIResponse:
|
||||||
"""
|
"""
|
||||||
Represents the result of simulated `$ http' invocation via `http()`.
|
Represents the result of simulated `$ http' invocation via `http()`.
|
||||||
@ -442,7 +452,4 @@ def http(
|
|||||||
return r
|
return r
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
devnull.close()
|
|
||||||
stdout.close()
|
|
||||||
stderr.close()
|
|
||||||
env.cleanup()
|
env.cleanup()
|
||||||
|
Reference in New Issue
Block a user