2015-06-03 09:10:18 -06:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2021-02-08 09:46:01 -07:00
|
|
|
from ..downloader import _get_real_downloader
|
2016-02-09 09:25:02 -07:00
|
|
|
from .fragment import FragmentFD
|
2021-02-08 09:46:01 -07:00
|
|
|
|
2021-06-21 12:59:50 -06:00
|
|
|
from ..utils import urljoin
|
2015-06-04 08:12:05 -06:00
|
|
|
|
2015-06-03 09:10:18 -06:00
|
|
|
|
2016-02-09 09:25:02 -07:00
|
|
|
class DashSegmentsFD(FragmentFD):
|
2015-06-03 09:10:18 -06:00
|
|
|
"""
|
2021-03-10 08:26:24 -07:00
|
|
|
Download segments in a DASH manifest. External downloaders can take over
|
2021-04-10 09:08:33 -06:00
|
|
|
the fragment downloads by supporting the 'dash_frag_urls' protocol
|
2015-06-03 09:10:18 -06:00
|
|
|
"""
|
|
|
|
|
2016-02-09 09:25:02 -07:00
|
|
|
FD_NAME = 'dashsegments'
|
2015-06-10 00:45:54 -06:00
|
|
|
|
2016-02-09 09:25:02 -07:00
|
|
|
def real_download(self, filename, info_dict):
|
2017-08-04 17:57:19 -06:00
|
|
|
fragment_base_url = info_dict.get('fragment_base_url')
|
|
|
|
fragments = info_dict['fragments'][:1] if self.params.get(
|
2016-09-17 07:35:22 -06:00
|
|
|
'test', False) else info_dict['fragments']
|
2015-06-10 00:45:54 -06:00
|
|
|
|
2021-04-10 09:08:33 -06:00
|
|
|
real_downloader = _get_real_downloader(info_dict, 'dash_frag_urls', self.params, None)
|
2021-02-08 09:46:01 -07:00
|
|
|
|
2016-02-09 09:25:02 -07:00
|
|
|
ctx = {
|
|
|
|
'filename': filename,
|
2017-08-04 17:57:19 -06:00
|
|
|
'total_frags': len(fragments),
|
2016-02-09 09:25:02 -07:00
|
|
|
}
|
2015-06-10 00:45:54 -06:00
|
|
|
|
2021-02-08 09:46:01 -07:00
|
|
|
if real_downloader:
|
|
|
|
self._prepare_external_frag_download(ctx)
|
|
|
|
else:
|
2021-07-21 11:28:43 -06:00
|
|
|
self._prepare_and_start_frag_download(ctx, info_dict)
|
2015-06-03 09:10:18 -06:00
|
|
|
|
2021-03-10 08:26:24 -07:00
|
|
|
fragments_to_download = []
|
2016-06-28 11:07:50 -06:00
|
|
|
frag_index = 0
|
2017-08-04 17:57:19 -06:00
|
|
|
for i, fragment in enumerate(fragments):
|
2016-06-28 11:07:50 -06:00
|
|
|
frag_index += 1
|
2017-04-22 09:42:24 -06:00
|
|
|
if frag_index <= ctx['fragment_index']:
|
2016-06-28 11:07:50 -06:00
|
|
|
continue
|
2021-02-08 09:46:01 -07:00
|
|
|
fragment_url = fragment.get('url')
|
|
|
|
if not fragment_url:
|
|
|
|
assert fragment_base_url
|
|
|
|
fragment_url = urljoin(fragment_base_url, fragment['path'])
|
|
|
|
|
2021-03-12 21:46:58 -07:00
|
|
|
fragments_to_download.append({
|
|
|
|
'frag_index': frag_index,
|
|
|
|
'index': i,
|
|
|
|
'url': fragment_url,
|
|
|
|
})
|
2016-02-09 09:25:02 -07:00
|
|
|
|
2021-02-08 09:46:01 -07:00
|
|
|
if real_downloader:
|
2021-03-19 21:20:08 -06:00
|
|
|
self.to_screen(
|
|
|
|
'[%s] Fragment downloads will be delegated to %s' % (self.FD_NAME, real_downloader.get_basename()))
|
2021-02-08 09:46:01 -07:00
|
|
|
info_copy = info_dict.copy()
|
2021-03-10 08:26:24 -07:00
|
|
|
info_copy['fragments'] = fragments_to_download
|
2021-02-08 09:46:01 -07:00
|
|
|
fd = real_downloader(self.ydl, self.params)
|
|
|
|
# TODO: Make progress updates work without hooking twice
|
|
|
|
# for ph in self._progress_hooks:
|
|
|
|
# fd.add_progress_hook(ph)
|
2021-06-24 06:24:05 -06:00
|
|
|
return fd.real_download(filename, info_copy)
|
2021-06-24 10:53:33 -06:00
|
|
|
|
|
|
|
return self.download_and_append_fragments(ctx, fragments_to_download, info_dict)
|