Introduce --http-chunk-size
This commit is contained in:
parent
e2e18694db
commit
ba515388b8
|
@ -0,0 +1,123 @@
|
|||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from youtube_dl import YoutubeDL
|
||||
from youtube_dl.compat import compat_http_server
|
||||
from youtube_dl.downloader.http import HttpFD
|
||||
from youtube_dl.utils import encodeFilename
|
||||
import ssl
|
||||
import threading
|
||||
|
||||
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def http_server_port(httpd):
|
||||
if os.name == 'java' and isinstance(httpd.socket, ssl.SSLSocket):
|
||||
# In Jython SSLSocket is not a subclass of socket.socket
|
||||
sock = httpd.socket.sock
|
||||
else:
|
||||
sock = httpd.socket
|
||||
return sock.getsockname()[1]
|
||||
|
||||
|
||||
TEST_SIZE = 10 * 1024
|
||||
|
||||
|
||||
class HTTPTestRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
def send_content_range(self, total=None):
|
||||
range_header = self.headers.get('Range')
|
||||
start = end = None
|
||||
if range_header:
|
||||
mobj = re.search(r'^bytes=(\d+)-(\d+)', range_header)
|
||||
if mobj:
|
||||
start = int(mobj.group(1))
|
||||
end = int(mobj.group(2))
|
||||
valid_range = start is not None and end is not None
|
||||
if valid_range:
|
||||
content_range = 'bytes %d-%d' % (start, end)
|
||||
if total:
|
||||
content_range += '/%d' % total
|
||||
self.send_header('Content-Range', content_range)
|
||||
return (end - start + 1) if valid_range else total
|
||||
|
||||
def serve(self, range=True, content_length=True):
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'video/mp4')
|
||||
size = TEST_SIZE
|
||||
if range:
|
||||
size = self.send_content_range(TEST_SIZE)
|
||||
if content_length:
|
||||
self.send_header('Content-Length', size)
|
||||
self.end_headers()
|
||||
self.wfile.write(b'#' * size)
|
||||
|
||||
def do_GET(self):
|
||||
if self.path == '/regular':
|
||||
self.serve()
|
||||
elif self.path == '/no-content-length':
|
||||
self.serve(content_length=False)
|
||||
elif self.path == '/no-range':
|
||||
self.serve(range=False)
|
||||
elif self.path == '/no-range-no-content-length':
|
||||
self.serve(range=False, content_length=False)
|
||||
else:
|
||||
assert False
|
||||
|
||||
|
||||
class FakeLogger(object):
|
||||
def debug(self, msg):
|
||||
pass
|
||||
|
||||
def warning(self, msg):
|
||||
pass
|
||||
|
||||
def error(self, msg):
|
||||
pass
|
||||
|
||||
|
||||
class TestHttpFD(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.httpd = compat_http_server.HTTPServer(
|
||||
('127.0.0.1', 0), HTTPTestRequestHandler)
|
||||
self.port = http_server_port(self.httpd)
|
||||
self.server_thread = threading.Thread(target=self.httpd.serve_forever)
|
||||
self.server_thread.daemon = True
|
||||
self.server_thread.start()
|
||||
|
||||
def download(self, params, ep):
|
||||
params['logger'] = FakeLogger()
|
||||
ydl = YoutubeDL(params)
|
||||
downloader = HttpFD(ydl, params)
|
||||
filename = 'testfile.mp4'
|
||||
self.assertTrue(downloader.real_download(filename, {
|
||||
'url': 'http://127.0.0.1:%d/%s' % (self.port, ep),
|
||||
}))
|
||||
self.assertEqual(os.path.getsize(encodeFilename(filename)), TEST_SIZE)
|
||||
os.remove(encodeFilename(filename))
|
||||
|
||||
def download_all(self, params):
|
||||
for ep in ('regular', 'no-content-length', 'no-range', 'no-range-no-content-length'):
|
||||
self.download(params, ep)
|
||||
|
||||
def test_regular(self):
|
||||
self.download_all({})
|
||||
|
||||
def test_chunked(self):
|
||||
self.download_all({
|
||||
'http_chunk_size': 1000,
|
||||
})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -191,6 +191,11 @@ def _real_main(argv=None):
|
|||
if numeric_buffersize is None:
|
||||
parser.error('invalid buffer size specified')
|
||||
opts.buffersize = numeric_buffersize
|
||||
if opts.http_chunk_size is not None:
|
||||
numeric_chunksize = FileDownloader.parse_bytes(opts.http_chunk_size)
|
||||
if not numeric_chunksize:
|
||||
parser.error('invalid http chunk size specified')
|
||||
opts.http_chunk_size = numeric_chunksize
|
||||
if opts.playliststart <= 0:
|
||||
raise ValueError('Playlist start must be positive')
|
||||
if opts.playlistend not in (-1, None) and opts.playlistend < opts.playliststart:
|
||||
|
@ -346,6 +351,7 @@ def _real_main(argv=None):
|
|||
'keep_fragments': opts.keep_fragments,
|
||||
'buffersize': opts.buffersize,
|
||||
'noresizebuffer': opts.noresizebuffer,
|
||||
'http_chunk_size': opts.http_chunk_size,
|
||||
'continuedl': opts.continue_dl,
|
||||
'noprogress': opts.noprogress,
|
||||
'progress_with_newline': opts.progress_with_newline,
|
||||
|
|
|
@ -7,10 +7,14 @@ import time
|
|||
import re
|
||||
|
||||
from .common import FileDownloader
|
||||
from ..compat import compat_urllib_error
|
||||
from ..compat import (
|
||||
compat_str,
|
||||
compat_urllib_error,
|
||||
)
|
||||
from ..utils import (
|
||||
ContentTooShortError,
|
||||
encodeFilename,
|
||||
int_or_none,
|
||||
sanitize_open,
|
||||
sanitized_Request,
|
||||
write_xattr,
|
||||
|
@ -42,17 +46,22 @@ class HttpFD(FileDownloader):
|
|||
request = sanitized_Request(url, None, headers)
|
||||
|
||||
is_test = self.params.get('test', False)
|
||||
|
||||
if is_test:
|
||||
request.add_header('Range', 'bytes=0-%s' % str(self._TEST_FILE_SIZE - 1))
|
||||
chunk_size = self._TEST_FILE_SIZE if is_test else (
|
||||
self.params.get('http_chunk_size') or 0)
|
||||
|
||||
ctx.open_mode = 'wb'
|
||||
ctx.resume_len = 0
|
||||
ctx.data_len = None
|
||||
ctx.block_size = self.params.get('buffersize', 1024)
|
||||
ctx.start_time = time.time()
|
||||
|
||||
if self.params.get('continuedl', True):
|
||||
# Establish possible resume length
|
||||
if os.path.isfile(encodeFilename(ctx.tmpfilename)):
|
||||
ctx.resume_len = os.path.getsize(encodeFilename(ctx.tmpfilename))
|
||||
ctx.resume_len = os.path.getsize(
|
||||
encodeFilename(ctx.tmpfilename))
|
||||
|
||||
ctx.is_resume = ctx.resume_len > 0
|
||||
|
||||
count = 0
|
||||
retries = self.params.get('retries', 0)
|
||||
|
@ -64,11 +73,33 @@ class HttpFD(FileDownloader):
|
|||
def __init__(self, source_error):
|
||||
self.source_error = source_error
|
||||
|
||||
class NextFragment(Exception):
|
||||
pass
|
||||
|
||||
def set_range(req, start, end):
|
||||
range_header = 'bytes=%d-' % start
|
||||
if end:
|
||||
range_header += compat_str(end)
|
||||
req.add_header('Range', range_header)
|
||||
|
||||
def establish_connection():
|
||||
if ctx.resume_len != 0:
|
||||
self.report_resuming_byte(ctx.resume_len)
|
||||
request.add_header('Range', 'bytes=%d-' % ctx.resume_len)
|
||||
if ctx.resume_len > 0:
|
||||
range_start = ctx.resume_len
|
||||
if ctx.is_resume:
|
||||
self.report_resuming_byte(ctx.resume_len)
|
||||
ctx.open_mode = 'ab'
|
||||
elif chunk_size > 0:
|
||||
range_start = 0
|
||||
else:
|
||||
range_start = None
|
||||
ctx.is_resume = False
|
||||
range_end = range_start + chunk_size - 1 if chunk_size else None
|
||||
if range_end and ctx.data_len is not None and range_end >= ctx.data_len:
|
||||
range_end = ctx.data_len - 1
|
||||
has_range = range_start is not None
|
||||
ctx.has_range = has_range
|
||||
if has_range:
|
||||
set_range(request, range_start, range_end)
|
||||
# Establish connection
|
||||
try:
|
||||
ctx.data = self.ydl.urlopen(request)
|
||||
|
@ -77,25 +108,35 @@ class HttpFD(FileDownloader):
|
|||
# that don't support resuming and serve a whole file with no Content-Range
|
||||
# set in response despite of requested Range (see
|
||||
# https://github.com/rg3/youtube-dl/issues/6057#issuecomment-126129799)
|
||||
if ctx.resume_len > 0:
|
||||
if has_range:
|
||||
content_range = ctx.data.headers.get('Content-Range')
|
||||
if content_range:
|
||||
content_range_m = re.search(r'bytes (\d+)-', content_range)
|
||||
content_range_m = re.search(r'bytes (\d+)-(\d+)?(?:/(\d+))?', content_range)
|
||||
# Content-Range is present and matches requested Range, resume is possible
|
||||
if content_range_m and ctx.resume_len == int(content_range_m.group(1)):
|
||||
return
|
||||
if content_range_m:
|
||||
if range_start == int(content_range_m.group(1)):
|
||||
content_range_end = int_or_none(content_range_m.group(2))
|
||||
content_len = int_or_none(content_range_m.group(3))
|
||||
accept_content_len = (
|
||||
# Non-chunked download
|
||||
not chunk_size or
|
||||
# Chunked download and requested piece or
|
||||
# its part is promised to be served
|
||||
content_range_end == range_end or
|
||||
content_len < range_end)
|
||||
if accept_content_len:
|
||||
ctx.data_len = content_len
|
||||
return
|
||||
# Content-Range is either not present or invalid. Assuming remote webserver is
|
||||
# trying to send the whole file, resume is not possible, so wiping the local file
|
||||
# and performing entire redownload
|
||||
self.report_unable_to_resume()
|
||||
ctx.resume_len = 0
|
||||
ctx.open_mode = 'wb'
|
||||
ctx.data_len = int_or_none(ctx.data.info().get('Content-length', None))
|
||||
return
|
||||
except (compat_urllib_error.HTTPError, ) as err:
|
||||
if (err.code < 500 or err.code >= 600) and err.code != 416:
|
||||
# Unexpected HTTP error
|
||||
raise
|
||||
elif err.code == 416:
|
||||
if err.code == 416:
|
||||
# Unable to resume (requested range not satisfiable)
|
||||
try:
|
||||
# Open the connection again without the range header
|
||||
|
@ -130,6 +171,15 @@ class HttpFD(FileDownloader):
|
|||
ctx.resume_len = 0
|
||||
ctx.open_mode = 'wb'
|
||||
return
|
||||
elif err.code == 302:
|
||||
if not chunk_size:
|
||||
raise
|
||||
# HTTP Error 302: The HTTP server returned a redirect error that would lead to an infinite loop.
|
||||
# may happen during chunk downloading. This is usually fixed
|
||||
# with a retry.
|
||||
elif err.code < 500 or err.code >= 600:
|
||||
# Unexpected HTTP error
|
||||
raise
|
||||
raise RetryDownload(err)
|
||||
except socket.error as err:
|
||||
if err.errno != errno.ECONNRESET:
|
||||
|
@ -160,7 +210,7 @@ class HttpFD(FileDownloader):
|
|||
return False
|
||||
|
||||
byte_counter = 0 + ctx.resume_len
|
||||
block_size = self.params.get('buffersize', 1024)
|
||||
block_size = ctx.block_size
|
||||
start = time.time()
|
||||
|
||||
# measure time over whole while-loop, so slow_down() and best_block_size() work together properly
|
||||
|
@ -233,25 +283,30 @@ class HttpFD(FileDownloader):
|
|||
|
||||
# Progress message
|
||||
speed = self.calc_speed(start, now, byte_counter - ctx.resume_len)
|
||||
if data_len is None:
|
||||
if ctx.data_len is None:
|
||||
eta = None
|
||||
else:
|
||||
eta = self.calc_eta(start, time.time(), data_len - ctx.resume_len, byte_counter - ctx.resume_len)
|
||||
eta = self.calc_eta(start, time.time(), ctx.data_len - ctx.resume_len, byte_counter - ctx.resume_len)
|
||||
|
||||
self._hook_progress({
|
||||
'status': 'downloading',
|
||||
'downloaded_bytes': byte_counter,
|
||||
'total_bytes': data_len,
|
||||
'total_bytes': ctx.data_len,
|
||||
'tmpfilename': ctx.tmpfilename,
|
||||
'filename': ctx.filename,
|
||||
'eta': eta,
|
||||
'speed': speed,
|
||||
'elapsed': now - start,
|
||||
'elapsed': now - ctx.start_time,
|
||||
})
|
||||
|
||||
if is_test and byte_counter == data_len:
|
||||
break
|
||||
|
||||
if not is_test and chunk_size and ctx.data_len is not None and byte_counter < ctx.data_len:
|
||||
ctx.resume_len = byte_counter
|
||||
# ctx.block_size = block_size
|
||||
raise NextFragment()
|
||||
|
||||
if ctx.stream is None:
|
||||
self.to_stderr('\n')
|
||||
self.report_error('Did not get any data blocks')
|
||||
|
@ -276,7 +331,7 @@ class HttpFD(FileDownloader):
|
|||
'total_bytes': byte_counter,
|
||||
'filename': ctx.filename,
|
||||
'status': 'finished',
|
||||
'elapsed': time.time() - start,
|
||||
'elapsed': time.time() - ctx.start_time,
|
||||
})
|
||||
|
||||
return True
|
||||
|
@ -290,6 +345,8 @@ class HttpFD(FileDownloader):
|
|||
if count <= retries:
|
||||
self.report_retry(e.source_error, count, retries)
|
||||
continue
|
||||
except NextFragment:
|
||||
continue
|
||||
except SucceedDownload:
|
||||
return True
|
||||
|
||||
|
|
|
@ -478,6 +478,11 @@ def parseOpts(overrideArguments=None):
|
|||
'--no-resize-buffer',
|
||||
action='store_true', dest='noresizebuffer', default=False,
|
||||
help='Do not automatically adjust the buffer size. By default, the buffer size is automatically resized from an initial value of SIZE.')
|
||||
downloader.add_option(
|
||||
'--http-chunk-size',
|
||||
dest='http_chunk_size', metavar='SIZE', default=None,
|
||||
help='Size of a chunk for chunk-based HTTP downloading (e.g. 10485760 or 10M) (default is disabled). '
|
||||
'May be useful for bypassing bandwidth throttling imposed by a webserver (experimental)')
|
||||
downloader.add_option(
|
||||
'--test',
|
||||
action='store_true', dest='test', default=False,
|
||||
|
|
Loading…
Reference in New Issue