2013-04-11 23:51:21 +02:00
|
|
|
# coding=utf-8
|
2013-02-26 15:12:33 +01:00
|
|
|
"""
|
|
|
|
Download mode implementation.
|
|
|
|
|
|
|
|
"""
|
|
|
|
from __future__ import division
|
|
|
|
import os
|
2013-04-11 07:29:10 +02:00
|
|
|
import re
|
2013-02-26 15:12:33 +01:00
|
|
|
import sys
|
2016-03-17 08:58:01 +01:00
|
|
|
import errno
|
2013-04-11 07:29:10 +02:00
|
|
|
import mimetypes
|
2013-04-11 23:51:21 +02:00
|
|
|
import threading
|
2013-05-14 12:49:03 +02:00
|
|
|
from time import sleep, time
|
2013-09-24 19:50:37 +02:00
|
|
|
from mailbox import Message
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2014-04-27 00:07:13 +02:00
|
|
|
from httpie.output.streams import RawStream
|
|
|
|
from httpie.models import HTTPResponse
|
|
|
|
from httpie.utils import humanize_bytes
|
|
|
|
from httpie.compat import urlsplit
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2013-04-11 09:00:41 +02:00
|
|
|
|
2013-04-11 07:29:10 +02:00
|
|
|
PARTIAL_CONTENT = 206
|
|
|
|
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2013-04-11 23:51:21 +02:00
|
|
|
CLEAR_LINE = '\r\033[K'
|
2013-04-13 02:49:27 +02:00
|
|
|
PROGRESS = (
|
2013-04-16 09:55:45 +02:00
|
|
|
'{percentage: 6.2f} %'
|
2013-04-15 05:56:47 +02:00
|
|
|
' {downloaded: >10}'
|
2013-04-13 02:49:27 +02:00
|
|
|
' {speed: >10}/s'
|
|
|
|
' {eta: >8} ETA'
|
|
|
|
)
|
|
|
|
PROGRESS_NO_CONTENT_LENGTH = '{downloaded: >10} {speed: >10}/s'
|
|
|
|
SUMMARY = 'Done. {downloaded} in {time:0.5f}s ({speed}/s)\n'
|
2013-04-11 23:51:21 +02:00
|
|
|
SPINNER = '|/-\\'
|
|
|
|
|
|
|
|
|
2013-04-11 08:24:59 +02:00
|
|
|
class ContentRangeError(ValueError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2013-04-12 15:19:49 +02:00
|
|
|
def parse_content_range(content_range, resumed_from):
|
2013-04-11 08:24:59 +02:00
|
|
|
"""
|
|
|
|
Parse and validate Content-Range header.
|
|
|
|
|
|
|
|
<http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html>
|
|
|
|
|
|
|
|
:param content_range: the value of a Content-Range response header
|
|
|
|
eg. "bytes 21010-47021/47022"
|
|
|
|
:param resumed_from: first byte pos. from the Range request header
|
|
|
|
:return: total size of the response body when fully downloaded.
|
|
|
|
|
|
|
|
"""
|
2013-04-15 05:56:47 +02:00
|
|
|
if content_range is None:
|
|
|
|
raise ContentRangeError('Missing Content-Range')
|
|
|
|
|
2013-04-11 08:24:59 +02:00
|
|
|
pattern = (
|
|
|
|
'^bytes (?P<first_byte_pos>\d+)-(?P<last_byte_pos>\d+)'
|
|
|
|
'/(\*|(?P<instance_length>\d+))$'
|
|
|
|
)
|
|
|
|
match = re.match(pattern, content_range)
|
|
|
|
|
|
|
|
if not match:
|
|
|
|
raise ContentRangeError(
|
|
|
|
'Invalid Content-Range format %r' % content_range)
|
|
|
|
|
|
|
|
content_range_dict = match.groupdict()
|
|
|
|
first_byte_pos = int(content_range_dict['first_byte_pos'])
|
|
|
|
last_byte_pos = int(content_range_dict['last_byte_pos'])
|
|
|
|
instance_length = (
|
|
|
|
int(content_range_dict['instance_length'])
|
|
|
|
if content_range_dict['instance_length']
|
|
|
|
else None
|
|
|
|
)
|
|
|
|
|
|
|
|
# "A byte-content-range-spec with a byte-range-resp-spec whose
|
|
|
|
# last- byte-pos value is less than its first-byte-pos value,
|
|
|
|
# or whose instance-length value is less than or equal to its
|
|
|
|
# last-byte-pos value, is invalid. The recipient of an invalid
|
|
|
|
# byte-content-range- spec MUST ignore it and any content
|
|
|
|
# transferred along with it."
|
2016-07-04 20:30:55 +02:00
|
|
|
if (first_byte_pos >= last_byte_pos or
|
|
|
|
(instance_length is not None and
|
|
|
|
instance_length <= last_byte_pos)):
|
2013-04-11 08:24:59 +02:00
|
|
|
raise ContentRangeError(
|
|
|
|
'Invalid Content-Range returned: %r' % content_range)
|
|
|
|
|
2016-07-04 20:30:55 +02:00
|
|
|
if (first_byte_pos != resumed_from or
|
|
|
|
(instance_length is not None and
|
|
|
|
last_byte_pos + 1 != instance_length)):
|
2013-04-11 08:24:59 +02:00
|
|
|
# Not what we asked for.
|
|
|
|
raise ContentRangeError(
|
2013-04-11 08:49:01 +02:00
|
|
|
'Unexpected Content-Range returned (%r)'
|
2013-04-11 09:00:41 +02:00
|
|
|
' for the requested Range ("bytes=%d-")'
|
2013-04-11 08:49:01 +02:00
|
|
|
% (content_range, resumed_from)
|
|
|
|
)
|
2013-04-11 08:24:59 +02:00
|
|
|
|
|
|
|
return last_byte_pos + 1
|
|
|
|
|
|
|
|
|
2013-04-12 15:19:49 +02:00
|
|
|
def filename_from_content_disposition(content_disposition):
|
|
|
|
"""
|
|
|
|
Extract and validate filename from a Content-Disposition header.
|
|
|
|
|
|
|
|
:param content_disposition: Content-Disposition value
|
|
|
|
:return: the filename if present and valid, otherwise `None`
|
|
|
|
|
|
|
|
"""
|
2017-03-10 11:27:38 +01:00
|
|
|
# attachment; filename=jakubroztocil-httpie-0.4.1-20-g40bd8f6.tar.gz
|
2013-09-24 19:50:37 +02:00
|
|
|
|
|
|
|
msg = Message('Content-Disposition: %s' % content_disposition)
|
|
|
|
filename = msg.get_filename()
|
|
|
|
if filename:
|
|
|
|
# Basic sanitation.
|
|
|
|
filename = os.path.basename(filename).lstrip('.').strip()
|
|
|
|
if filename:
|
|
|
|
return filename
|
2013-04-12 15:19:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def filename_from_url(url, content_type):
|
|
|
|
fn = urlsplit(url).path.rstrip('/')
|
|
|
|
fn = os.path.basename(fn) if fn else 'index'
|
|
|
|
if '.' not in fn and content_type:
|
|
|
|
content_type = content_type.split(';')[0]
|
|
|
|
if content_type == 'text/plain':
|
|
|
|
# mimetypes returns '.ksh'
|
|
|
|
ext = '.txt'
|
|
|
|
else:
|
|
|
|
ext = mimetypes.guess_extension(content_type)
|
|
|
|
|
2013-04-12 19:01:24 +02:00
|
|
|
if ext == '.htm': # Python 3
|
|
|
|
ext = '.html'
|
|
|
|
|
2013-04-12 15:19:49 +02:00
|
|
|
if ext:
|
|
|
|
fn += ext
|
|
|
|
|
|
|
|
return fn
|
|
|
|
|
|
|
|
|
2016-03-17 08:58:01 +01:00
|
|
|
def trim_filename(filename, max_len):
|
|
|
|
if len(filename) > max_len:
|
|
|
|
trim_by = len(filename) - max_len
|
|
|
|
name, ext = os.path.splitext(filename)
|
|
|
|
if trim_by >= len(name):
|
|
|
|
filename = filename[:-trim_by]
|
|
|
|
else:
|
|
|
|
filename = name[:-trim_by] + ext
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
|
|
|
def get_filename_max_length(directory):
|
2016-03-17 09:14:14 +01:00
|
|
|
max_len = 255
|
2016-03-17 08:58:01 +01:00
|
|
|
try:
|
2016-03-17 09:14:14 +01:00
|
|
|
pathconf = os.pathconf
|
|
|
|
except AttributeError:
|
|
|
|
pass # non-posix
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
max_len = pathconf(directory, 'PC_NAME_MAX')
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.EINVAL:
|
|
|
|
raise
|
2016-03-17 08:58:01 +01:00
|
|
|
return max_len
|
|
|
|
|
|
|
|
|
|
|
|
def trim_filename_if_needed(filename, directory='.', extra=0):
|
|
|
|
max_len = get_filename_max_length(directory) - extra
|
|
|
|
if len(filename) > max_len:
|
|
|
|
filename = trim_filename(filename, max_len)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
2014-06-28 20:44:40 +02:00
|
|
|
def get_unique_filename(filename, exists=os.path.exists):
|
2013-04-12 15:19:49 +02:00
|
|
|
attempt = 0
|
|
|
|
while True:
|
|
|
|
suffix = '-' + str(attempt) if attempt > 0 else ''
|
2016-03-17 08:58:01 +01:00
|
|
|
try_filename = trim_filename_if_needed(filename, extra=len(suffix))
|
|
|
|
try_filename += suffix
|
|
|
|
if not exists(try_filename):
|
|
|
|
return try_filename
|
2013-04-12 15:19:49 +02:00
|
|
|
attempt += 1
|
|
|
|
|
|
|
|
|
2016-02-29 08:00:17 +01:00
|
|
|
class Downloader(object):
|
2013-02-26 15:12:33 +01:00
|
|
|
|
|
|
|
def __init__(self, output_file=None,
|
|
|
|
resume=False, progress_file=sys.stderr):
|
|
|
|
"""
|
|
|
|
:param resume: Should the download resume if partial download
|
|
|
|
already exists.
|
|
|
|
:type resume: bool
|
|
|
|
|
|
|
|
:param output_file: The file to store response body in. If not
|
|
|
|
provided, it will be guessed from the response.
|
|
|
|
|
|
|
|
:param progress_file: Where to report download progress.
|
|
|
|
|
|
|
|
"""
|
2013-03-24 15:23:18 +01:00
|
|
|
self._output_file = output_file
|
|
|
|
self._resume = resume
|
|
|
|
self._resumed_from = 0
|
2013-04-16 09:55:45 +02:00
|
|
|
self.finished = False
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2013-07-07 17:00:03 +02:00
|
|
|
self.status = Status()
|
2013-05-14 12:49:03 +02:00
|
|
|
self._progress_reporter = ProgressReporterThread(
|
2013-07-07 17:00:03 +02:00
|
|
|
status=self.status,
|
2013-04-11 23:51:21 +02:00
|
|
|
output=progress_file
|
|
|
|
)
|
|
|
|
|
2013-04-11 07:29:10 +02:00
|
|
|
def pre_request(self, request_headers):
|
2013-03-24 15:23:18 +01:00
|
|
|
"""Called just before the HTTP request is sent.
|
|
|
|
|
2013-04-11 07:29:10 +02:00
|
|
|
Might alter `request_headers`.
|
2013-03-24 15:23:18 +01:00
|
|
|
|
2013-04-11 07:29:10 +02:00
|
|
|
:type request_headers: dict
|
2013-03-24 15:23:18 +01:00
|
|
|
|
|
|
|
"""
|
2016-03-07 04:46:59 +01:00
|
|
|
# Ask the server not to encode the content so that we can resume, etc.
|
|
|
|
request_headers['Accept-Encoding'] = 'identity'
|
2013-03-24 15:23:18 +01:00
|
|
|
if self._resume:
|
2013-04-12 16:56:05 +02:00
|
|
|
bytes_have = os.path.getsize(self._output_file.name)
|
|
|
|
if bytes_have:
|
2013-02-26 15:12:33 +01:00
|
|
|
# Set ``Range`` header to resume the download
|
2013-04-11 21:23:15 +02:00
|
|
|
# TODO: Use "If-Range: mtime" to make sure it's fresh?
|
2013-04-11 07:29:10 +02:00
|
|
|
request_headers['Range'] = 'bytes=%d-' % bytes_have
|
2013-04-11 21:23:15 +02:00
|
|
|
self._resumed_from = bytes_have
|
2013-02-26 15:12:33 +01:00
|
|
|
|
|
|
|
def start(self, response):
|
|
|
|
"""
|
|
|
|
Initiate and return a stream for `response` body with progress
|
|
|
|
callback attached. Can be called only once.
|
|
|
|
|
2013-04-11 21:23:15 +02:00
|
|
|
:param response: Initiated response object with headers already fetched
|
2013-02-26 15:12:33 +01:00
|
|
|
:type response: requests.models.Response
|
|
|
|
|
|
|
|
:return: RawStream, output_file
|
|
|
|
|
|
|
|
"""
|
2013-07-07 17:00:03 +02:00
|
|
|
assert not self.status.time_started
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2016-03-07 04:46:59 +01:00
|
|
|
# FIXME: some servers still might sent Content-Encoding: gzip
|
2017-03-10 11:27:38 +01:00
|
|
|
# <https://github.com/jakubroztocil/httpie/issues/423>
|
2013-04-11 21:23:15 +02:00
|
|
|
try:
|
|
|
|
total_size = int(response.headers['Content-Length'])
|
2013-04-13 19:50:32 +02:00
|
|
|
except (KeyError, ValueError, TypeError):
|
2013-04-11 21:23:15 +02:00
|
|
|
total_size = None
|
2013-03-24 15:23:18 +01:00
|
|
|
|
|
|
|
if self._output_file:
|
2013-04-11 07:29:10 +02:00
|
|
|
if self._resume and response.status_code == PARTIAL_CONTENT:
|
2013-04-15 05:56:47 +02:00
|
|
|
total_size = parse_content_range(
|
|
|
|
response.headers.get('Content-Range'),
|
|
|
|
self._resumed_from
|
|
|
|
)
|
2013-04-11 08:24:59 +02:00
|
|
|
|
2013-04-11 07:29:10 +02:00
|
|
|
else:
|
|
|
|
self._resumed_from = 0
|
2013-04-12 16:04:14 +02:00
|
|
|
try:
|
|
|
|
self._output_file.seek(0)
|
|
|
|
self._output_file.truncate()
|
|
|
|
except IOError:
|
|
|
|
pass # stdout
|
2013-02-26 15:12:33 +01:00
|
|
|
else:
|
2013-04-10 16:48:18 +02:00
|
|
|
# TODO: Should the filename be taken from response.history[0].url?
|
2013-02-26 15:12:33 +01:00
|
|
|
# Output file not specified. Pick a name that doesn't exist yet.
|
2014-06-28 20:44:40 +02:00
|
|
|
filename = None
|
2013-04-12 15:19:49 +02:00
|
|
|
if 'Content-Disposition' in response.headers:
|
2014-06-28 20:44:40 +02:00
|
|
|
filename = filename_from_content_disposition(
|
2013-04-12 15:19:49 +02:00
|
|
|
response.headers['Content-Disposition'])
|
2014-06-28 20:44:40 +02:00
|
|
|
if not filename:
|
|
|
|
filename = filename_from_url(
|
2013-02-26 15:12:33 +01:00
|
|
|
url=response.url,
|
2013-04-12 15:19:49 +02:00
|
|
|
content_type=response.headers.get('Content-Type'),
|
|
|
|
)
|
2014-06-28 20:44:40 +02:00
|
|
|
self._output_file = open(get_unique_filename(filename), mode='a+b')
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2013-07-07 17:00:03 +02:00
|
|
|
self.status.started(
|
2013-03-24 15:23:18 +01:00
|
|
|
resumed_from=self._resumed_from,
|
2013-04-11 07:29:10 +02:00
|
|
|
total_size=total_size
|
2013-03-24 15:23:18 +01:00
|
|
|
)
|
2013-02-26 15:12:33 +01:00
|
|
|
|
|
|
|
stream = RawStream(
|
|
|
|
msg=HTTPResponse(response),
|
|
|
|
with_headers=False,
|
|
|
|
with_body=True,
|
2013-08-18 00:59:10 +02:00
|
|
|
on_body_chunk_downloaded=self.chunk_downloaded,
|
2013-04-12 13:59:33 +02:00
|
|
|
chunk_size=1024 * 8
|
2013-02-26 15:12:33 +01:00
|
|
|
)
|
|
|
|
|
2013-04-11 23:51:21 +02:00
|
|
|
self._progress_reporter.output.write(
|
2013-04-13 03:02:34 +02:00
|
|
|
'Downloading %sto "%s"\n' % (
|
|
|
|
(humanize_bytes(total_size) + ' '
|
|
|
|
if total_size is not None
|
|
|
|
else ''),
|
2013-04-13 02:49:27 +02:00
|
|
|
self._output_file.name
|
|
|
|
)
|
|
|
|
)
|
2013-05-14 12:49:03 +02:00
|
|
|
self._progress_reporter.start()
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2013-03-24 15:23:18 +01:00
|
|
|
return stream, self._output_file
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2013-03-24 15:23:18 +01:00
|
|
|
def finish(self):
|
2013-04-16 09:55:45 +02:00
|
|
|
assert not self.finished
|
|
|
|
self.finished = True
|
2013-07-07 17:00:03 +02:00
|
|
|
self.status.finished()
|
2013-04-16 09:55:45 +02:00
|
|
|
|
|
|
|
def failed(self):
|
|
|
|
self._progress_reporter.stop()
|
2013-03-07 17:32:48 +01:00
|
|
|
|
2013-03-24 15:23:18 +01:00
|
|
|
@property
|
|
|
|
def interrupted(self):
|
|
|
|
return (
|
2016-07-04 20:30:55 +02:00
|
|
|
self.finished and
|
|
|
|
self.status.total_size and
|
|
|
|
self.status.total_size != self.status.downloaded
|
2013-03-07 17:32:48 +01:00
|
|
|
)
|
|
|
|
|
2013-08-18 00:59:10 +02:00
|
|
|
def chunk_downloaded(self, chunk):
|
2013-03-07 17:32:48 +01:00
|
|
|
"""
|
|
|
|
A download progress callback.
|
|
|
|
|
|
|
|
:param chunk: A chunk of response body data that has just
|
|
|
|
been downloaded and written to the output.
|
|
|
|
:type chunk: bytes
|
|
|
|
|
|
|
|
"""
|
2013-07-07 17:00:03 +02:00
|
|
|
self.status.chunk_downloaded(len(chunk))
|
2013-02-26 15:12:33 +01:00
|
|
|
|
2013-03-24 15:23:18 +01:00
|
|
|
|
2013-04-16 09:55:45 +02:00
|
|
|
class Status(object):
|
|
|
|
"""Holds details about the downland status."""
|
2013-03-24 15:23:18 +01:00
|
|
|
|
2013-04-11 23:51:21 +02:00
|
|
|
def __init__(self):
|
2013-03-24 15:23:18 +01:00
|
|
|
self.downloaded = 0
|
2013-04-11 07:29:10 +02:00
|
|
|
self.total_size = None
|
2013-04-11 23:51:21 +02:00
|
|
|
self.resumed_from = 0
|
|
|
|
self.time_started = None
|
|
|
|
self.time_finished = None
|
2013-03-24 15:23:18 +01:00
|
|
|
|
2013-04-11 07:29:10 +02:00
|
|
|
def started(self, resumed_from=0, total_size=None):
|
2013-04-11 23:51:21 +02:00
|
|
|
assert self.time_started is None
|
2016-03-07 04:46:59 +01:00
|
|
|
self.total_size = total_size
|
2013-04-11 23:51:21 +02:00
|
|
|
self.downloaded = self.resumed_from = resumed_from
|
|
|
|
self.time_started = time()
|
2013-03-24 15:23:18 +01:00
|
|
|
|
|
|
|
def chunk_downloaded(self, size):
|
2013-04-11 23:51:21 +02:00
|
|
|
assert self.time_finished is None
|
2013-03-24 15:23:18 +01:00
|
|
|
self.downloaded += size
|
|
|
|
|
2013-04-11 23:51:21 +02:00
|
|
|
@property
|
|
|
|
def has_finished(self):
|
|
|
|
return self.time_finished is not None
|
|
|
|
|
|
|
|
def finished(self):
|
|
|
|
assert self.time_started is not None
|
|
|
|
assert self.time_finished is None
|
|
|
|
self.time_finished = time()
|
2013-03-24 15:23:18 +01:00
|
|
|
|
|
|
|
|
2013-05-14 12:49:03 +02:00
|
|
|
class ProgressReporterThread(threading.Thread):
|
2013-04-16 09:55:45 +02:00
|
|
|
"""
|
|
|
|
Reports download progress based on its status.
|
2013-03-24 15:23:18 +01:00
|
|
|
|
2013-04-16 09:55:45 +02:00
|
|
|
Uses threading to periodically update the status (speed, ETA, etc.).
|
|
|
|
|
|
|
|
"""
|
|
|
|
def __init__(self, status, output, tick=.1, update_interval=1):
|
2013-04-11 23:51:21 +02:00
|
|
|
"""
|
|
|
|
|
2013-04-16 09:55:45 +02:00
|
|
|
:type status: Status
|
2013-04-11 23:51:21 +02:00
|
|
|
:type output: file
|
|
|
|
"""
|
2013-05-14 12:49:03 +02:00
|
|
|
super(ProgressReporterThread, self).__init__()
|
2013-04-16 09:55:45 +02:00
|
|
|
self.status = status
|
2013-04-11 23:51:21 +02:00
|
|
|
self.output = output
|
2013-04-12 13:59:33 +02:00
|
|
|
self._tick = tick
|
|
|
|
self._update_interval = update_interval
|
2013-04-15 05:56:47 +02:00
|
|
|
self._spinner_pos = 0
|
2013-04-12 13:59:33 +02:00
|
|
|
self._status_line = ''
|
2013-04-15 05:56:47 +02:00
|
|
|
self._prev_bytes = 0
|
|
|
|
self._prev_time = time()
|
2013-05-14 12:49:03 +02:00
|
|
|
self._should_stop = threading.Event()
|
2013-04-16 09:55:45 +02:00
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
"""Stop reporting on next tick."""
|
2013-05-14 12:49:03 +02:00
|
|
|
self._should_stop.set()
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
while not self._should_stop.is_set():
|
|
|
|
if self.status.has_finished:
|
|
|
|
self.sum_up()
|
|
|
|
break
|
2013-04-11 23:51:21 +02:00
|
|
|
|
|
|
|
self.report_speed()
|
2013-05-14 12:49:03 +02:00
|
|
|
sleep(self._tick)
|
2013-04-11 23:51:21 +02:00
|
|
|
|
|
|
|
def report_speed(self):
|
|
|
|
|
|
|
|
now = time()
|
|
|
|
|
2013-04-12 13:59:33 +02:00
|
|
|
if now - self._prev_time >= self._update_interval:
|
2013-04-16 09:55:45 +02:00
|
|
|
downloaded = self.status.downloaded
|
2013-04-12 13:59:33 +02:00
|
|
|
try:
|
2016-07-04 20:30:55 +02:00
|
|
|
speed = ((downloaded - self._prev_bytes) /
|
|
|
|
(now - self._prev_time))
|
2013-04-12 13:59:33 +02:00
|
|
|
except ZeroDivisionError:
|
|
|
|
speed = 0
|
2013-04-13 19:50:32 +02:00
|
|
|
|
2013-04-16 09:55:45 +02:00
|
|
|
if not self.status.total_size:
|
2013-04-13 20:34:31 +02:00
|
|
|
self._status_line = PROGRESS_NO_CONTENT_LENGTH.format(
|
|
|
|
downloaded=humanize_bytes(downloaded),
|
|
|
|
speed=humanize_bytes(speed),
|
|
|
|
)
|
2013-04-13 02:49:27 +02:00
|
|
|
else:
|
2013-04-13 20:34:31 +02:00
|
|
|
try:
|
2013-04-16 09:55:45 +02:00
|
|
|
percentage = downloaded / self.status.total_size * 100
|
2013-04-13 20:34:31 +02:00
|
|
|
except ZeroDivisionError:
|
|
|
|
percentage = 0
|
|
|
|
|
2013-04-13 19:50:32 +02:00
|
|
|
if not speed:
|
|
|
|
eta = '-:--:--'
|
|
|
|
else:
|
2013-04-16 09:55:45 +02:00
|
|
|
s = int((self.status.total_size - downloaded) / speed)
|
2013-04-13 19:50:32 +02:00
|
|
|
h, s = divmod(s, 60 * 60)
|
|
|
|
m, s = divmod(s, 60)
|
2013-05-14 12:49:29 +02:00
|
|
|
eta = '{0}:{1:0>2}:{2:0>2}'.format(h, m, s)
|
2013-04-12 13:59:33 +02:00
|
|
|
|
2013-04-13 20:34:31 +02:00
|
|
|
self._status_line = PROGRESS.format(
|
|
|
|
percentage=percentage,
|
|
|
|
downloaded=humanize_bytes(downloaded),
|
|
|
|
speed=humanize_bytes(speed),
|
|
|
|
eta=eta,
|
|
|
|
)
|
2013-03-24 15:23:18 +01:00
|
|
|
|
2013-04-11 23:51:21 +02:00
|
|
|
self._prev_time = now
|
|
|
|
self._prev_bytes = downloaded
|
|
|
|
|
2013-04-12 13:59:33 +02:00
|
|
|
self.output.write(
|
2016-07-04 20:30:55 +02:00
|
|
|
CLEAR_LINE +
|
|
|
|
' ' +
|
|
|
|
SPINNER[self._spinner_pos] +
|
|
|
|
' ' +
|
|
|
|
self._status_line
|
2013-04-12 13:59:33 +02:00
|
|
|
)
|
2013-03-24 15:23:18 +01:00
|
|
|
self.output.flush()
|
|
|
|
|
2013-04-15 05:56:47 +02:00
|
|
|
self._spinner_pos = (self._spinner_pos + 1
|
|
|
|
if self._spinner_pos + 1 != len(SPINNER)
|
|
|
|
else 0)
|
2013-04-11 23:51:21 +02:00
|
|
|
|
|
|
|
def sum_up(self):
|
2016-07-04 20:30:55 +02:00
|
|
|
actually_downloaded = (
|
|
|
|
self.status.downloaded - self.status.resumed_from)
|
2013-04-16 09:55:45 +02:00
|
|
|
time_taken = self.status.time_finished - self.status.time_started
|
2013-04-11 23:51:21 +02:00
|
|
|
|
|
|
|
self.output.write(CLEAR_LINE)
|
2014-02-18 13:06:12 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
speed = actually_downloaded / time_taken
|
|
|
|
except ZeroDivisionError:
|
|
|
|
# Either time is 0 (not all systems provide `time.time`
|
|
|
|
# with a better precision than 1 second), and/or nothing
|
|
|
|
# has been downloaded.
|
|
|
|
speed = actually_downloaded
|
|
|
|
|
2013-04-11 23:51:21 +02:00
|
|
|
self.output.write(SUMMARY.format(
|
|
|
|
downloaded=humanize_bytes(actually_downloaded),
|
2016-07-04 20:30:55 +02:00
|
|
|
total=(self.status.total_size and
|
|
|
|
humanize_bytes(self.status.total_size)),
|
2014-02-18 13:06:12 +01:00
|
|
|
speed=humanize_bytes(speed),
|
2013-03-24 15:23:18 +01:00
|
|
|
time=time_taken,
|
|
|
|
))
|
2013-04-11 23:51:21 +02:00
|
|
|
self.output.flush()
|