2014-09-24 08:16:56 -04:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2021-04-25 21:52:24 -04:00
|
|
|
import errno
|
2014-09-24 08:16:56 -04:00
|
|
|
import re
|
2016-01-10 14:09:53 -05:00
|
|
|
import binascii
|
|
|
|
try:
|
|
|
|
from Crypto.Cipher import AES
|
|
|
|
can_decrypt_frag = True
|
|
|
|
except ImportError:
|
|
|
|
can_decrypt_frag = False
|
2021-03-12 23:46:58 -05:00
|
|
|
try:
|
|
|
|
import concurrent.futures
|
|
|
|
can_threaded_download = True
|
|
|
|
except ImportError:
|
|
|
|
can_threaded_download = False
|
2013-09-23 11:59:27 -04:00
|
|
|
|
2021-02-08 11:46:01 -05:00
|
|
|
from ..downloader import _get_real_downloader
|
2015-07-28 16:28:30 -04:00
|
|
|
from .fragment import FragmentFD
|
2016-05-01 03:56:51 -04:00
|
|
|
from .external import FFmpegFD
|
2015-07-28 16:28:30 -04:00
|
|
|
|
2016-01-10 14:09:53 -05:00
|
|
|
from ..compat import (
|
2016-08-26 17:55:55 -04:00
|
|
|
compat_urllib_error,
|
2016-01-10 14:09:53 -05:00
|
|
|
compat_urlparse,
|
|
|
|
compat_struct_pack,
|
|
|
|
)
|
2014-12-13 06:24:42 -05:00
|
|
|
from ..utils import (
|
2016-01-10 14:09:53 -05:00
|
|
|
parse_m3u8_attributes,
|
2021-03-12 23:46:58 -05:00
|
|
|
sanitize_open,
|
2016-08-13 17:53:07 -04:00
|
|
|
update_url_query,
|
2013-09-23 11:59:27 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2016-02-19 13:29:24 -05:00
|
|
|
class HlsFD(FragmentFD):
|
2021-03-10 10:26:24 -05:00
|
|
|
"""
|
|
|
|
Download segments in a m3u8 manifest. External downloaders can take over
|
2021-04-10 11:08:33 -04:00
|
|
|
the fragment downloads by supporting the 'm3u8_frag_urls' protocol and
|
2021-03-10 10:26:24 -05:00
|
|
|
re-defining 'supports_manifest' function
|
|
|
|
"""
|
2014-09-24 08:16:56 -04:00
|
|
|
|
2015-07-28 16:28:30 -04:00
|
|
|
FD_NAME = 'hlsnative'
|
|
|
|
|
2016-05-01 03:56:51 -04:00
|
|
|
@staticmethod
|
2021-03-10 10:26:24 -05:00
|
|
|
def can_download(manifest, info_dict, allow_unplayable_formats=False, with_crypto=can_decrypt_frag):
|
2021-02-11 22:51:59 -05:00
|
|
|
UNSUPPORTED_FEATURES = [
|
2017-04-13 07:21:17 -04:00
|
|
|
# r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [2]
|
2016-06-04 16:16:05 -04:00
|
|
|
|
2016-05-09 10:45:03 -04:00
|
|
|
# Live streams heuristic does not always work (e.g. geo restricted to Germany
|
|
|
|
# http://hls-geo.daserste.de/i/videoportal/Film/c_620000/622873/format,716451,716457,716450,716458,716459,.mp4.csmil/index_4_av.m3u8?null=0)
|
2016-05-09 12:16:33 -04:00
|
|
|
# r'#EXT-X-MEDIA-SEQUENCE:(?!0$)', # live streams [3]
|
2016-06-04 16:16:05 -04:00
|
|
|
|
|
|
|
# This heuristic also is not correct since segments may not be appended as well.
|
2016-06-04 16:31:10 -04:00
|
|
|
# Twitch vods of finished streams have EXT-X-PLAYLIST-TYPE:EVENT despite
|
|
|
|
# no segments will definitely be appended to the end of the playlist.
|
2016-06-04 16:16:05 -04:00
|
|
|
# r'#EXT-X-PLAYLIST-TYPE:EVENT', # media segments may be appended to the end of
|
2016-06-04 16:21:43 -04:00
|
|
|
# # event media playlists [4]
|
2021-02-23 11:00:56 -05:00
|
|
|
# r'#EXT-X-MAP:', # media initialization [5]
|
2016-05-01 03:56:51 -04:00
|
|
|
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.4
|
|
|
|
# 2. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
|
|
|
|
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.2
|
2016-05-09 10:55:37 -04:00
|
|
|
# 4. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.5
|
2021-01-01 07:26:37 -05:00
|
|
|
# 5. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.5
|
2021-02-11 22:51:59 -05:00
|
|
|
]
|
|
|
|
if not allow_unplayable_formats:
|
|
|
|
UNSUPPORTED_FEATURES += [
|
|
|
|
r'#EXT-X-KEY:METHOD=(?!NONE|AES-128)', # encrypted streams [1]
|
|
|
|
]
|
2021-03-10 10:26:24 -05:00
|
|
|
|
|
|
|
def check_results():
|
|
|
|
yield not info_dict.get('is_live')
|
|
|
|
is_aes128_enc = '#EXT-X-KEY:METHOD=AES-128' in manifest
|
|
|
|
yield with_crypto or not is_aes128_enc
|
|
|
|
yield not (is_aes128_enc and r'#EXT-X-BYTERANGE' in manifest)
|
|
|
|
for feature in UNSUPPORTED_FEATURES:
|
|
|
|
yield not re.search(feature, manifest)
|
|
|
|
return all(check_results())
|
2016-05-01 03:56:51 -04:00
|
|
|
|
2014-09-24 08:16:56 -04:00
|
|
|
def real_download(self, filename, info_dict):
|
2015-07-28 16:28:30 -04:00
|
|
|
man_url = info_dict['url']
|
|
|
|
self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME)
|
2016-11-13 10:06:16 -05:00
|
|
|
|
2017-07-29 04:02:41 -04:00
|
|
|
urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url))
|
|
|
|
man_url = urlh.geturl()
|
|
|
|
s = urlh.read().decode('utf-8', 'ignore')
|
2016-05-01 03:56:51 -04:00
|
|
|
|
2021-03-10 10:26:24 -05:00
|
|
|
if not self.can_download(s, info_dict, self.params.get('allow_unplayable_formats')):
|
2019-12-03 06:23:08 -05:00
|
|
|
if info_dict.get('extra_param_to_segment_url') or info_dict.get('_decryption_key_url'):
|
2021-03-19 23:20:08 -04:00
|
|
|
self.report_error('pycryptodome not found. Please install')
|
2016-12-20 13:49:45 -05:00
|
|
|
return False
|
2021-02-20 16:48:03 -05:00
|
|
|
if self.can_download(s, info_dict, with_crypto=True):
|
2021-03-19 23:20:08 -04:00
|
|
|
self.report_warning('pycryptodome is needed to download this file natively')
|
2017-03-25 18:06:33 -04:00
|
|
|
fd = FFmpegFD(self.ydl, self.params)
|
2021-03-19 23:20:08 -04:00
|
|
|
self.report_warning(
|
|
|
|
'%s detected unsupported features; extraction will be delegated to %s' % (self.FD_NAME, fd.get_basename()))
|
2021-02-08 11:46:01 -05:00
|
|
|
# TODO: Make progress updates work without hooking twice
|
|
|
|
# for ph in self._progress_hooks:
|
|
|
|
# fd.add_progress_hook(ph)
|
2017-03-25 18:06:33 -04:00
|
|
|
return fd.real_download(filename, info_dict)
|
2016-05-01 03:56:51 -04:00
|
|
|
|
2021-04-10 11:08:33 -04:00
|
|
|
real_downloader = _get_real_downloader(info_dict, 'm3u8_frag_urls', self.params, None)
|
2021-03-10 10:26:24 -05:00
|
|
|
if real_downloader and not real_downloader.supports_manifest(s):
|
|
|
|
real_downloader = None
|
2021-03-19 23:20:08 -04:00
|
|
|
if real_downloader:
|
|
|
|
self.to_screen(
|
|
|
|
'[%s] Fragment downloads will be delegated to %s' % (self.FD_NAME, real_downloader.get_basename()))
|
2021-03-10 10:26:24 -05:00
|
|
|
|
2019-01-13 04:01:26 -05:00
|
|
|
def is_ad_fragment_start(s):
|
2019-05-10 16:56:22 -04:00
|
|
|
return (s.startswith('#ANVATO-SEGMENT-INFO') and 'type=ad' in s
|
|
|
|
or s.startswith('#UPLYNK-SEGMENT') and s.endswith(',ad'))
|
2017-10-14 19:13:48 -04:00
|
|
|
|
2019-01-13 04:01:26 -05:00
|
|
|
def is_ad_fragment_end(s):
|
2019-05-10 16:56:22 -04:00
|
|
|
return (s.startswith('#ANVATO-SEGMENT-INFO') and 'type=master' in s
|
|
|
|
or s.startswith('#UPLYNK-SEGMENT') and s.endswith(',segment'))
|
2019-01-13 04:01:26 -05:00
|
|
|
|
2021-03-10 09:39:40 -05:00
|
|
|
fragments = []
|
2021-02-08 11:46:01 -05:00
|
|
|
|
2017-10-14 19:13:48 -04:00
|
|
|
media_frags = 0
|
|
|
|
ad_frags = 0
|
|
|
|
ad_frag_next = False
|
2014-09-24 08:16:56 -04:00
|
|
|
for line in s.splitlines():
|
|
|
|
line = line.strip()
|
2017-10-14 19:13:48 -04:00
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
if line.startswith('#'):
|
2019-01-13 04:01:26 -05:00
|
|
|
if is_ad_fragment_start(line):
|
2017-10-15 00:03:54 -04:00
|
|
|
ad_frag_next = True
|
2019-01-13 04:01:26 -05:00
|
|
|
elif is_ad_fragment_end(line):
|
|
|
|
ad_frag_next = False
|
2017-10-14 19:13:48 -04:00
|
|
|
continue
|
|
|
|
if ad_frag_next:
|
2019-01-13 04:01:26 -05:00
|
|
|
ad_frags += 1
|
2017-10-14 19:13:48 -04:00
|
|
|
continue
|
|
|
|
media_frags += 1
|
2014-09-24 08:16:56 -04:00
|
|
|
|
2015-07-28 16:28:30 -04:00
|
|
|
ctx = {
|
2014-09-24 08:16:56 -04:00
|
|
|
'filename': filename,
|
2017-10-14 19:13:48 -04:00
|
|
|
'total_frags': media_frags,
|
|
|
|
'ad_frags': ad_frags,
|
2015-07-28 16:28:30 -04:00
|
|
|
}
|
|
|
|
|
2021-02-08 11:46:01 -05:00
|
|
|
if real_downloader:
|
|
|
|
self._prepare_external_frag_download(ctx)
|
|
|
|
else:
|
|
|
|
self._prepare_and_start_frag_download(ctx)
|
2015-07-28 16:28:30 -04:00
|
|
|
|
2016-08-26 17:55:55 -04:00
|
|
|
fragment_retries = self.params.get('fragment_retries', 0)
|
|
|
|
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
|
|
|
|
test = self.params.get('test', False)
|
|
|
|
|
2021-02-24 09:47:53 -05:00
|
|
|
format_index = info_dict.get('format_index')
|
2016-08-28 12:51:53 -04:00
|
|
|
extra_query = None
|
2016-08-13 17:53:07 -04:00
|
|
|
extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url')
|
2016-08-28 12:51:53 -04:00
|
|
|
if extra_param_to_segment_url:
|
|
|
|
extra_query = compat_urlparse.parse_qs(extra_param_to_segment_url)
|
2016-01-10 14:09:53 -05:00
|
|
|
i = 0
|
|
|
|
media_sequence = 0
|
|
|
|
decrypt_info = {'METHOD': 'NONE'}
|
2017-04-13 07:21:17 -04:00
|
|
|
byte_range = {}
|
2021-02-24 09:47:53 -05:00
|
|
|
discontinuity_count = 0
|
2016-06-28 13:07:50 -04:00
|
|
|
frag_index = 0
|
2017-10-14 19:13:48 -04:00
|
|
|
ad_frag_next = False
|
2016-01-10 14:09:53 -05:00
|
|
|
for line in s.splitlines():
|
|
|
|
line = line.strip()
|
|
|
|
if line:
|
|
|
|
if not line.startswith('#'):
|
2021-02-24 09:47:53 -05:00
|
|
|
if format_index and discontinuity_count != format_index:
|
|
|
|
continue
|
2017-10-14 19:13:48 -04:00
|
|
|
if ad_frag_next:
|
|
|
|
continue
|
2016-06-28 13:07:50 -04:00
|
|
|
frag_index += 1
|
2017-04-22 11:42:24 -04:00
|
|
|
if frag_index <= ctx['fragment_index']:
|
2016-06-28 13:07:50 -04:00
|
|
|
continue
|
2016-01-10 14:09:53 -05:00
|
|
|
frag_url = (
|
|
|
|
line
|
|
|
|
if re.match(r'^https?://', line)
|
|
|
|
else compat_urlparse.urljoin(man_url, line))
|
2016-08-28 12:51:53 -04:00
|
|
|
if extra_query:
|
|
|
|
frag_url = update_url_query(frag_url, extra_query)
|
2021-02-08 11:46:01 -05:00
|
|
|
|
2021-03-12 23:46:58 -05:00
|
|
|
fragments.append({
|
|
|
|
'frag_index': frag_index,
|
|
|
|
'url': frag_url,
|
|
|
|
'decrypt_info': decrypt_info,
|
|
|
|
'byte_range': byte_range,
|
|
|
|
'media_sequence': media_sequence,
|
|
|
|
})
|
2021-02-08 11:46:01 -05:00
|
|
|
|
2021-02-23 11:00:56 -05:00
|
|
|
elif line.startswith('#EXT-X-MAP'):
|
2021-02-24 09:47:53 -05:00
|
|
|
if format_index and discontinuity_count != format_index:
|
|
|
|
continue
|
2021-02-23 11:00:56 -05:00
|
|
|
if frag_index > 0:
|
|
|
|
self.report_error(
|
2021-03-19 23:20:08 -04:00
|
|
|
'Initialization fragment found after media fragments, unable to download')
|
2021-02-23 11:00:56 -05:00
|
|
|
return False
|
|
|
|
frag_index += 1
|
|
|
|
map_info = parse_m3u8_attributes(line[11:])
|
|
|
|
frag_url = (
|
|
|
|
map_info.get('URI')
|
|
|
|
if re.match(r'^https?://', map_info.get('URI'))
|
|
|
|
else compat_urlparse.urljoin(man_url, map_info.get('URI')))
|
|
|
|
if extra_query:
|
|
|
|
frag_url = update_url_query(frag_url, extra_query)
|
2021-03-12 23:46:58 -05:00
|
|
|
|
|
|
|
fragments.append({
|
|
|
|
'frag_index': frag_index,
|
|
|
|
'url': frag_url,
|
|
|
|
'decrypt_info': decrypt_info,
|
|
|
|
'byte_range': byte_range,
|
|
|
|
'media_sequence': media_sequence
|
|
|
|
})
|
2021-02-23 11:00:56 -05:00
|
|
|
|
|
|
|
if map_info.get('BYTERANGE'):
|
|
|
|
splitted_byte_range = map_info.get('BYTERANGE').split('@')
|
|
|
|
sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end']
|
|
|
|
byte_range = {
|
|
|
|
'start': sub_range_start,
|
|
|
|
'end': sub_range_start + int(splitted_byte_range[0]),
|
|
|
|
}
|
|
|
|
|
|
|
|
elif line.startswith('#EXT-X-KEY'):
|
|
|
|
decrypt_url = decrypt_info.get('URI')
|
|
|
|
decrypt_info = parse_m3u8_attributes(line[11:])
|
|
|
|
if decrypt_info['METHOD'] == 'AES-128':
|
|
|
|
if 'IV' in decrypt_info:
|
|
|
|
decrypt_info['IV'] = binascii.unhexlify(decrypt_info['IV'][2:].zfill(32))
|
|
|
|
if not re.match(r'^https?://', decrypt_info['URI']):
|
|
|
|
decrypt_info['URI'] = compat_urlparse.urljoin(
|
|
|
|
man_url, decrypt_info['URI'])
|
|
|
|
if extra_query:
|
|
|
|
decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query)
|
|
|
|
if decrypt_url != decrypt_info['URI']:
|
|
|
|
decrypt_info['KEY'] = None
|
|
|
|
|
|
|
|
elif line.startswith('#EXT-X-MEDIA-SEQUENCE'):
|
|
|
|
media_sequence = int(line[22:])
|
|
|
|
elif line.startswith('#EXT-X-BYTERANGE'):
|
|
|
|
splitted_byte_range = line[17:].split('@')
|
|
|
|
sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else byte_range['end']
|
|
|
|
byte_range = {
|
|
|
|
'start': sub_range_start,
|
|
|
|
'end': sub_range_start + int(splitted_byte_range[0]),
|
|
|
|
}
|
|
|
|
elif is_ad_fragment_start(line):
|
|
|
|
ad_frag_next = True
|
|
|
|
elif is_ad_fragment_end(line):
|
|
|
|
ad_frag_next = False
|
2021-02-24 09:47:53 -05:00
|
|
|
elif line.startswith('#EXT-X-DISCONTINUITY'):
|
|
|
|
discontinuity_count += 1
|
2021-03-12 23:46:58 -05:00
|
|
|
i += 1
|
|
|
|
media_sequence += 1
|
2021-02-23 11:00:56 -05:00
|
|
|
|
2021-03-12 23:46:58 -05:00
|
|
|
# We only download the first fragment during the test
|
|
|
|
if test:
|
|
|
|
fragments = [fragments[0] if fragments else None]
|
2015-07-28 16:28:30 -04:00
|
|
|
|
2021-02-08 11:46:01 -05:00
|
|
|
if real_downloader:
|
|
|
|
info_copy = info_dict.copy()
|
2021-03-10 09:39:40 -05:00
|
|
|
info_copy['fragments'] = fragments
|
2021-02-08 11:46:01 -05:00
|
|
|
fd = real_downloader(self.ydl, self.params)
|
|
|
|
# TODO: Make progress updates work without hooking twice
|
|
|
|
# for ph in self._progress_hooks:
|
|
|
|
# fd.add_progress_hook(ph)
|
|
|
|
success = fd.real_download(filename, info_copy)
|
|
|
|
if not success:
|
|
|
|
return False
|
|
|
|
else:
|
2021-03-12 23:46:58 -05:00
|
|
|
def download_fragment(fragment):
|
|
|
|
frag_index = fragment['frag_index']
|
|
|
|
frag_url = fragment['url']
|
|
|
|
decrypt_info = fragment['decrypt_info']
|
|
|
|
byte_range = fragment['byte_range']
|
|
|
|
media_sequence = fragment['media_sequence']
|
|
|
|
|
|
|
|
ctx['fragment_index'] = frag_index
|
|
|
|
|
|
|
|
count = 0
|
|
|
|
headers = info_dict.get('http_headers', {})
|
|
|
|
if byte_range:
|
|
|
|
headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'] - 1)
|
|
|
|
while count <= fragment_retries:
|
|
|
|
try:
|
|
|
|
success, frag_content = self._download_fragment(
|
|
|
|
ctx, frag_url, info_dict, headers)
|
|
|
|
if not success:
|
|
|
|
return False, frag_index
|
|
|
|
break
|
|
|
|
except compat_urllib_error.HTTPError as err:
|
|
|
|
# Unavailable (possibly temporary) fragments may be served.
|
|
|
|
# First we try to retry then either skip or abort.
|
|
|
|
# See https://github.com/ytdl-org/youtube-dl/issues/10165,
|
|
|
|
# https://github.com/ytdl-org/youtube-dl/issues/10448).
|
|
|
|
count += 1
|
|
|
|
if count <= fragment_retries:
|
|
|
|
self.report_retry_fragment(err, frag_index, count, fragment_retries)
|
|
|
|
if count > fragment_retries:
|
2021-03-19 23:20:08 -04:00
|
|
|
self.report_error('Giving up after %s fragment retries' % fragment_retries)
|
2021-03-12 23:46:58 -05:00
|
|
|
return False, frag_index
|
|
|
|
|
|
|
|
if decrypt_info['METHOD'] == 'AES-128':
|
|
|
|
iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', media_sequence)
|
|
|
|
decrypt_info['KEY'] = decrypt_info.get('KEY') or self.ydl.urlopen(
|
|
|
|
self._prepare_url(info_dict, info_dict.get('_decryption_key_url') or decrypt_info['URI'])).read()
|
|
|
|
# Don't decrypt the content in tests since the data is explicitly truncated and it's not to a valid block
|
|
|
|
# size (see https://github.com/ytdl-org/youtube-dl/pull/27660). Tests only care that the correct data downloaded,
|
|
|
|
# not what it decrypts to.
|
|
|
|
if not test:
|
|
|
|
frag_content = AES.new(
|
|
|
|
decrypt_info['KEY'], AES.MODE_CBC, iv).decrypt(frag_content)
|
|
|
|
|
|
|
|
return frag_content, frag_index
|
|
|
|
|
|
|
|
def append_fragment(frag_content, frag_index):
|
|
|
|
if frag_content:
|
|
|
|
fragment_filename = '%s-Frag%d' % (ctx['tmpfilename'], frag_index)
|
|
|
|
try:
|
|
|
|
file, frag_sanitized = sanitize_open(fragment_filename, 'rb')
|
|
|
|
ctx['fragment_filename_sanitized'] = frag_sanitized
|
|
|
|
file.close()
|
|
|
|
self._append_fragment(ctx, frag_content)
|
|
|
|
return True
|
2021-04-25 21:52:24 -04:00
|
|
|
except EnvironmentError as ose:
|
|
|
|
if ose.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
# FileNotFoundError
|
2021-03-12 23:46:58 -05:00
|
|
|
if skip_unavailable_fragments:
|
|
|
|
self.report_skip_fragment(frag_index)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
self.report_error(
|
|
|
|
'fragment %s not found, unable to continue' % frag_index)
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if skip_unavailable_fragments:
|
|
|
|
self.report_skip_fragment(frag_index)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
self.report_error(
|
|
|
|
'fragment %s not found, unable to continue' % frag_index)
|
|
|
|
return False
|
|
|
|
|
|
|
|
max_workers = self.params.get('concurrent_fragment_downloads', 1)
|
|
|
|
if can_threaded_download and max_workers > 1:
|
|
|
|
self.report_warning('The download speed shown is only of one thread. This is a known issue')
|
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers) as pool:
|
|
|
|
futures = [pool.submit(download_fragment, fragment) for fragment in fragments]
|
|
|
|
# timeout must be 0 to return instantly
|
|
|
|
done, not_done = concurrent.futures.wait(futures, timeout=0)
|
|
|
|
try:
|
|
|
|
while not_done:
|
|
|
|
# Check every 1 second for KeyboardInterrupt
|
|
|
|
freshly_done, not_done = concurrent.futures.wait(not_done, timeout=1)
|
|
|
|
done |= freshly_done
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
for future in not_done:
|
|
|
|
future.cancel()
|
|
|
|
# timeout must be none to cancel
|
|
|
|
concurrent.futures.wait(not_done, timeout=None)
|
|
|
|
raise KeyboardInterrupt
|
|
|
|
results = [future.result() for future in futures]
|
|
|
|
|
|
|
|
for frag_content, frag_index in results:
|
|
|
|
result = append_fragment(frag_content, frag_index)
|
|
|
|
if not result:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
for fragment in fragments:
|
|
|
|
frag_content, frag_index = download_fragment(fragment)
|
|
|
|
result = append_fragment(frag_content, frag_index)
|
|
|
|
if not result:
|
|
|
|
return False
|
|
|
|
|
2021-02-08 11:46:01 -05:00
|
|
|
self._finish_frag_download(ctx)
|
2014-09-24 08:16:56 -04:00
|
|
|
return True
|