2015-06-03 17:10:18 +02:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2021-02-08 17:46:01 +01:00
|
|
|
from ..downloader import _get_real_downloader
|
2016-02-09 17:25:02 +01:00
|
|
|
from .fragment import FragmentFD
|
2021-02-08 17:46:01 +01:00
|
|
|
|
2021-06-21 20:59:50 +02:00
|
|
|
from ..utils import urljoin
|
2015-06-04 16:12:05 +02:00
|
|
|
|
2015-06-03 17:10:18 +02:00
|
|
|
|
2016-02-09 17:25:02 +01:00
|
|
|
class DashSegmentsFD(FragmentFD):
|
2015-06-03 17:10:18 +02:00
|
|
|
"""
|
2021-03-10 16:26:24 +01:00
|
|
|
Download segments in a DASH manifest. External downloaders can take over
|
2021-04-10 17:08:33 +02:00
|
|
|
the fragment downloads by supporting the 'dash_frag_urls' protocol
|
2015-06-03 17:10:18 +02:00
|
|
|
"""
|
|
|
|
|
2016-02-09 17:25:02 +01:00
|
|
|
FD_NAME = 'dashsegments'
|
2015-06-10 08:45:54 +02:00
|
|
|
|
2016-02-09 17:25:02 +01:00
|
|
|
def real_download(self, filename, info_dict):
|
2017-08-05 01:57:19 +02:00
|
|
|
fragment_base_url = info_dict.get('fragment_base_url')
|
|
|
|
fragments = info_dict['fragments'][:1] if self.params.get(
|
2016-09-17 15:35:22 +02:00
|
|
|
'test', False) else info_dict['fragments']
|
2015-06-10 08:45:54 +02:00
|
|
|
|
2021-04-10 17:08:33 +02:00
|
|
|
real_downloader = _get_real_downloader(info_dict, 'dash_frag_urls', self.params, None)
|
2021-02-08 17:46:01 +01:00
|
|
|
|
2016-02-09 17:25:02 +01:00
|
|
|
ctx = {
|
|
|
|
'filename': filename,
|
2017-08-05 01:57:19 +02:00
|
|
|
'total_frags': len(fragments),
|
2016-02-09 17:25:02 +01:00
|
|
|
}
|
2015-06-10 08:45:54 +02:00
|
|
|
|
2021-02-08 17:46:01 +01:00
|
|
|
if real_downloader:
|
|
|
|
self._prepare_external_frag_download(ctx)
|
|
|
|
else:
|
|
|
|
self._prepare_and_start_frag_download(ctx)
|
2015-06-03 17:10:18 +02:00
|
|
|
|
2021-03-10 16:26:24 +01:00
|
|
|
fragments_to_download = []
|
2016-06-28 19:07:50 +02:00
|
|
|
frag_index = 0
|
2017-08-05 01:57:19 +02:00
|
|
|
for i, fragment in enumerate(fragments):
|
2016-06-28 19:07:50 +02:00
|
|
|
frag_index += 1
|
2017-04-22 17:42:24 +02:00
|
|
|
if frag_index <= ctx['fragment_index']:
|
2016-06-28 19:07:50 +02:00
|
|
|
continue
|
2021-02-08 17:46:01 +01:00
|
|
|
fragment_url = fragment.get('url')
|
|
|
|
if not fragment_url:
|
|
|
|
assert fragment_base_url
|
|
|
|
fragment_url = urljoin(fragment_base_url, fragment['path'])
|
|
|
|
|
2021-03-13 05:46:58 +01:00
|
|
|
fragments_to_download.append({
|
|
|
|
'frag_index': frag_index,
|
|
|
|
'index': i,
|
|
|
|
'url': fragment_url,
|
|
|
|
})
|
2016-02-09 17:25:02 +01:00
|
|
|
|
2021-02-08 17:46:01 +01:00
|
|
|
if real_downloader:
|
2021-03-20 04:20:08 +01:00
|
|
|
self.to_screen(
|
|
|
|
'[%s] Fragment downloads will be delegated to %s' % (self.FD_NAME, real_downloader.get_basename()))
|
2021-02-08 17:46:01 +01:00
|
|
|
info_copy = info_dict.copy()
|
2021-03-10 16:26:24 +01:00
|
|
|
info_copy['fragments'] = fragments_to_download
|
2021-02-08 17:46:01 +01:00
|
|
|
fd = real_downloader(self.ydl, self.params)
|
|
|
|
# TODO: Make progress updates work without hooking twice
|
|
|
|
# for ph in self._progress_hooks:
|
|
|
|
# fd.add_progress_hook(ph)
|
2021-06-24 14:24:05 +02:00
|
|
|
return fd.real_download(filename, info_copy)
|
2021-02-08 17:46:01 +01:00
|
|
|
else:
|
2021-06-24 14:24:05 +02:00
|
|
|
return self.download_and_append_fragments(ctx, fragments_to_download, info_dict)
|