2022-01-01 15:01:49 -07:00
|
|
|
import collections
|
2022-07-30 12:17:14 -06:00
|
|
|
import contextvars
|
2024-07-01 16:51:27 -06:00
|
|
|
import functools
|
2021-07-10 12:56:35 -06:00
|
|
|
import itertools
|
2022-04-11 16:32:57 -06:00
|
|
|
import json
|
2014-01-06 21:59:22 -07:00
|
|
|
import os
|
2022-04-11 16:32:57 -06:00
|
|
|
import re
|
2014-01-06 21:59:22 -07:00
|
|
|
import subprocess
|
|
|
|
import time
|
|
|
|
|
2022-06-06 10:19:57 -06:00
|
|
|
from .common import PostProcessor
|
2024-07-01 16:51:27 -06:00
|
|
|
from ..compat import imghdr
|
2014-11-02 03:23:40 -07:00
|
|
|
from ..utils import (
|
2022-07-30 14:45:22 -06:00
|
|
|
MEDIA_EXTENSIONS,
|
2022-04-11 16:32:57 -06:00
|
|
|
ISO639Utils,
|
|
|
|
Popen,
|
|
|
|
PostProcessingError,
|
|
|
|
_get_exe_version_output,
|
2022-08-30 09:28:28 -06:00
|
|
|
deprecation_warning,
|
2022-04-11 16:32:57 -06:00
|
|
|
detect_exe_version,
|
2022-01-11 20:22:09 -07:00
|
|
|
determine_ext,
|
2021-09-01 14:55:16 -06:00
|
|
|
dfxp2srt,
|
2014-05-16 07:47:54 -06:00
|
|
|
encodeArgument,
|
2022-05-22 08:21:22 -06:00
|
|
|
filter_dict,
|
2021-09-03 14:07:41 -06:00
|
|
|
float_or_none,
|
2014-10-26 09:46:34 -06:00
|
|
|
is_outdated_version,
|
2021-09-01 14:55:16 -06:00
|
|
|
orderedSet,
|
2014-01-06 21:59:22 -07:00
|
|
|
prepend_extension,
|
2021-01-27 08:02:51 -07:00
|
|
|
replace_extension,
|
2021-09-01 14:55:16 -06:00
|
|
|
shell_quote,
|
2021-06-08 02:53:56 -06:00
|
|
|
traverse_obj,
|
2021-07-10 15:59:44 -06:00
|
|
|
variadic,
|
2021-11-14 15:33:41 -07:00
|
|
|
write_json_file,
|
2014-01-06 21:59:22 -07:00
|
|
|
)
|
|
|
|
|
2016-03-13 05:15:29 -06:00
|
|
|
EXT_TO_OUT_FORMATS = {
|
2017-03-16 05:50:45 -06:00
|
|
|
'aac': 'adts',
|
|
|
|
'flac': 'flac',
|
|
|
|
'm4a': 'ipod',
|
|
|
|
'mka': 'matroska',
|
|
|
|
'mkv': 'matroska',
|
|
|
|
'mpg': 'mpeg',
|
|
|
|
'ogv': 'ogg',
|
|
|
|
'ts': 'mpegts',
|
|
|
|
'wma': 'asf',
|
|
|
|
'wmv': 'asf',
|
2023-01-02 19:35:45 -07:00
|
|
|
'weba': 'webm',
|
2021-10-26 08:42:30 -06:00
|
|
|
'vtt': 'webvtt',
|
2017-03-16 05:50:45 -06:00
|
|
|
}
|
|
|
|
ACODECS = {
|
2022-06-06 10:19:57 -06:00
|
|
|
# name: (ext, encoder, opts)
|
|
|
|
'mp3': ('mp3', 'libmp3lame', ()),
|
|
|
|
'aac': ('m4a', 'aac', ('-f', 'adts')),
|
|
|
|
'm4a': ('m4a', 'aac', ('-bsf:a', 'aac_adtstoasc')),
|
|
|
|
'opus': ('opus', 'libopus', ()),
|
|
|
|
'vorbis': ('ogg', 'libvorbis', ()),
|
|
|
|
'flac': ('flac', 'flac', ()),
|
|
|
|
'alac': ('m4a', None, ('-acodec', 'alac')),
|
|
|
|
'wav': ('wav', None, ('-f', 'wav')),
|
2016-03-13 05:15:29 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2022-06-05 08:28:56 -06:00
|
|
|
def create_mapping_re(supported):
|
2024-06-11 17:09:58 -06:00
|
|
|
return re.compile(r'{0}(?:/{0})*$'.format(r'(?:\s*\w+\s*>)?\s*(?:{})\s*'.format('|'.join(supported))))
|
2022-06-05 08:28:56 -06:00
|
|
|
|
|
|
|
|
|
|
|
def resolve_mapping(source, mapping):
|
|
|
|
"""
|
|
|
|
Get corresponding item from a mapping string like 'A>B/C>D/E'
|
|
|
|
@returns (target, error_message)
|
|
|
|
"""
|
|
|
|
for pair in mapping.lower().split('/'):
|
|
|
|
kv = pair.split('>', 1)
|
|
|
|
if len(kv) == 1 or kv[0].strip() == source:
|
|
|
|
target = kv[-1].strip()
|
|
|
|
if target == source:
|
|
|
|
return target, f'already is in target format {source}'
|
|
|
|
return target, None
|
|
|
|
return None, f'could not find a mapping for {source}'
|
|
|
|
|
|
|
|
|
2014-01-06 21:59:22 -07:00
|
|
|
class FFmpegPostProcessorError(PostProcessingError):
|
|
|
|
pass
|
|
|
|
|
2014-07-22 18:55:06 -06:00
|
|
|
|
2014-01-06 21:59:22 -07:00
|
|
|
class FFmpegPostProcessor(PostProcessor):
|
2022-07-30 12:17:14 -06:00
|
|
|
_ffmpeg_location = contextvars.ContextVar('ffmpeg_location', default=None)
|
|
|
|
|
2015-04-18 03:52:36 -06:00
|
|
|
def __init__(self, downloader=None):
|
2014-01-06 21:59:22 -07:00
|
|
|
PostProcessor.__init__(self, downloader)
|
2022-05-22 08:21:22 -06:00
|
|
|
self._prefer_ffmpeg = self.get_param('prefer_ffmpeg', True)
|
|
|
|
self._paths = self._determine_executables()
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-11-03 15:40:49 -06:00
|
|
|
@staticmethod
|
|
|
|
def get_versions_and_features(downloader=None):
|
|
|
|
pp = FFmpegPostProcessor(downloader)
|
|
|
|
return pp._versions, pp._features
|
|
|
|
|
2014-01-06 21:59:22 -07:00
|
|
|
@staticmethod
|
2015-02-13 03:14:01 -07:00
|
|
|
def get_versions(downloader=None):
|
2022-03-28 16:14:17 -06:00
|
|
|
return FFmpegPostProcessor.get_versions_and_features(downloader)[0]
|
|
|
|
|
2022-05-22 08:21:22 -06:00
|
|
|
_ffmpeg_to_avconv = {'ffmpeg': 'avconv', 'ffprobe': 'avprobe'}
|
2015-01-09 21:45:51 -07:00
|
|
|
|
2015-02-13 03:14:01 -07:00
|
|
|
def _determine_executables(self):
|
2022-05-22 08:21:22 -06:00
|
|
|
programs = [*self._ffmpeg_to_avconv.keys(), *self._ffmpeg_to_avconv.values()]
|
|
|
|
|
2022-07-30 12:17:14 -06:00
|
|
|
location = self.get_param('ffmpeg_location', self._ffmpeg_location.get())
|
2021-11-03 12:53:48 -06:00
|
|
|
if location is None:
|
2022-05-22 08:21:22 -06:00
|
|
|
return {p: p for p in programs}
|
|
|
|
|
|
|
|
if not os.path.exists(location):
|
2022-08-08 16:45:37 -06:00
|
|
|
self.report_warning(
|
|
|
|
f'ffmpeg-location {location} does not exist! Continuing without ffmpeg', only_once=True)
|
2022-05-22 08:21:22 -06:00
|
|
|
return {}
|
|
|
|
elif os.path.isdir(location):
|
2022-08-08 17:44:51 -06:00
|
|
|
dirname, basename, filename = location, None, None
|
2021-11-03 12:53:48 -06:00
|
|
|
else:
|
2022-08-08 17:44:51 -06:00
|
|
|
filename = os.path.basename(location)
|
|
|
|
basename = next((p for p in programs if p in filename), 'ffmpeg')
|
2022-05-22 08:21:22 -06:00
|
|
|
dirname = os.path.dirname(os.path.abspath(location))
|
2024-06-11 17:09:58 -06:00
|
|
|
if basename in self._ffmpeg_to_avconv:
|
2022-05-22 08:21:22 -06:00
|
|
|
self._prefer_ffmpeg = True
|
|
|
|
|
|
|
|
paths = {p: os.path.join(dirname, p) for p in programs}
|
2022-08-08 17:44:51 -06:00
|
|
|
if basename and basename in filename:
|
|
|
|
for p in programs:
|
|
|
|
path = os.path.join(dirname, filename.replace(basename, p))
|
|
|
|
if os.path.exists(path):
|
|
|
|
paths[p] = path
|
2022-05-22 08:21:22 -06:00
|
|
|
if basename:
|
|
|
|
paths[basename] = location
|
|
|
|
return paths
|
|
|
|
|
|
|
|
_version_cache, _features_cache = {None: None}, {}
|
|
|
|
|
|
|
|
def _get_ffmpeg_version(self, prog):
|
|
|
|
path = self._paths.get(prog)
|
|
|
|
if path in self._version_cache:
|
|
|
|
return self._version_cache[path], self._features_cache.get(path, {})
|
2022-11-10 20:13:08 -07:00
|
|
|
out = _get_exe_version_output(path, ['-bsfs'])
|
2022-05-22 08:21:22 -06:00
|
|
|
ver = detect_exe_version(out) if out else False
|
|
|
|
if ver:
|
|
|
|
regexs = [
|
|
|
|
r'(?:\d+:)?([0-9.]+)-[0-9]+ubuntu[0-9.]+$', # Ubuntu, see [1]
|
|
|
|
r'n([0-9.]+)$', # Arch Linux
|
|
|
|
# 1. http://www.ducea.com/2006/06/17/ubuntu-package-version-naming-explanation/
|
|
|
|
]
|
|
|
|
for regex in regexs:
|
|
|
|
mobj = re.match(regex, ver)
|
|
|
|
if mobj:
|
|
|
|
ver = mobj.group(1)
|
|
|
|
self._version_cache[path] = ver
|
|
|
|
if prog != 'ffmpeg' or not out:
|
|
|
|
return ver, {}
|
|
|
|
|
|
|
|
mobj = re.search(r'(?m)^\s+libavformat\s+(?:[0-9. ]+)\s+/\s+(?P<runtime>[0-9. ]+)', out)
|
|
|
|
lavf_runtime_version = mobj.group('runtime').replace(' ', '') if mobj else None
|
|
|
|
self._features_cache[path] = features = {
|
|
|
|
'fdk': '--enable-libfdk-aac' in out,
|
|
|
|
'setts': 'setts' in out.splitlines(),
|
|
|
|
'needs_adtstoasc': is_outdated_version(lavf_runtime_version, '57.56.100', False),
|
|
|
|
}
|
|
|
|
return ver, features
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _versions(self):
|
|
|
|
return filter_dict({self.basename: self._version, self.probe_basename: self._probe_version})
|
|
|
|
|
|
|
|
@functools.cached_property
|
|
|
|
def basename(self):
|
2024-06-11 17:09:58 -06:00
|
|
|
_ = self._version # run property
|
2022-05-22 08:21:22 -06:00
|
|
|
return self.basename
|
|
|
|
|
|
|
|
@functools.cached_property
|
|
|
|
def probe_basename(self):
|
2024-06-11 17:09:58 -06:00
|
|
|
_ = self._probe_version # run property
|
2022-05-22 08:21:22 -06:00
|
|
|
return self.probe_basename
|
|
|
|
|
|
|
|
def _get_version(self, kind):
|
2022-08-08 16:45:37 -06:00
|
|
|
executables = (kind, )
|
2022-05-22 08:21:22 -06:00
|
|
|
if not self._prefer_ffmpeg:
|
2022-08-08 16:45:37 -06:00
|
|
|
executables = (kind, self._ffmpeg_to_avconv[kind])
|
2022-05-22 08:21:22 -06:00
|
|
|
basename, version, features = next(filter(
|
|
|
|
lambda x: x[1], ((p, *self._get_ffmpeg_version(p)) for p in executables)), (None, None, {}))
|
|
|
|
if kind == 'ffmpeg':
|
|
|
|
self.basename, self._features = basename, features
|
|
|
|
else:
|
|
|
|
self.probe_basename = basename
|
|
|
|
if basename == self._ffmpeg_to_avconv[kind]:
|
2022-08-30 09:28:28 -06:00
|
|
|
self.deprecated_feature(f'Support for {self._ffmpeg_to_avconv[kind]} is deprecated and '
|
|
|
|
f'may be removed in a future version. Use {kind} instead')
|
2022-05-22 08:21:22 -06:00
|
|
|
return version
|
|
|
|
|
|
|
|
@functools.cached_property
|
|
|
|
def _version(self):
|
|
|
|
return self._get_version('ffmpeg')
|
|
|
|
|
|
|
|
@functools.cached_property
|
|
|
|
def _probe_version(self):
|
|
|
|
return self._get_version('ffprobe')
|
2021-11-29 10:46:06 -07:00
|
|
|
|
2015-02-17 09:26:41 -07:00
|
|
|
@property
|
2015-02-13 03:14:01 -07:00
|
|
|
def available(self):
|
|
|
|
return self.basename is not None
|
2014-10-26 14:03:16 -06:00
|
|
|
|
2015-02-13 03:14:01 -07:00
|
|
|
@property
|
|
|
|
def executable(self):
|
2022-05-22 08:21:22 -06:00
|
|
|
return self._paths.get(self.basename)
|
2015-02-13 03:14:01 -07:00
|
|
|
|
2015-04-03 06:09:50 -06:00
|
|
|
@property
|
|
|
|
def probe_available(self):
|
|
|
|
return self.probe_basename is not None
|
|
|
|
|
2015-02-13 03:14:01 -07:00
|
|
|
@property
|
|
|
|
def probe_executable(self):
|
2022-05-22 08:21:22 -06:00
|
|
|
return self._paths.get(self.probe_basename)
|
2014-01-08 09:53:34 -07:00
|
|
|
|
2022-01-11 20:22:09 -07:00
|
|
|
@staticmethod
|
|
|
|
def stream_copy_opts(copy=True, *, ext=None):
|
|
|
|
yield from ('-map', '0')
|
|
|
|
# Don't copy Apple TV chapters track, bin_data
|
|
|
|
# See https://github.com/yt-dlp/yt-dlp/issues/2, #19042, #19024, https://trac.ffmpeg.org/ticket/6016
|
2022-01-13 03:39:19 -07:00
|
|
|
yield from ('-dn', '-ignore_unknown')
|
2022-01-11 20:22:09 -07:00
|
|
|
if copy:
|
|
|
|
yield from ('-c', 'copy')
|
2022-04-12 08:27:08 -06:00
|
|
|
if ext in ('mp4', 'mov', 'm4a'):
|
2022-01-11 20:22:09 -07:00
|
|
|
yield from ('-c:s', 'mov_text')
|
|
|
|
|
2022-05-26 17:06:23 -06:00
|
|
|
def check_version(self):
|
|
|
|
if not self.available:
|
|
|
|
raise FFmpegPostProcessorError('ffmpeg not found. Please install or provide the path using --ffmpeg-location')
|
|
|
|
|
|
|
|
required_version = '10-0' if self.basename == 'avconv' else '1.0'
|
|
|
|
if is_outdated_version(self._version, required_version):
|
|
|
|
self.report_warning(f'Your copy of {self.basename} is outdated, update {self.basename} '
|
|
|
|
f'to version {required_version} or newer if you encounter any errors')
|
|
|
|
|
2016-09-16 15:06:55 -06:00
|
|
|
def get_audio_codec(self, path):
|
2019-01-24 12:23:04 -07:00
|
|
|
if not self.probe_available and not self.available:
|
2021-03-19 21:20:08 -06:00
|
|
|
raise PostProcessingError('ffprobe and ffmpeg not found. Please install or provide the path using --ffmpeg-location')
|
2016-09-16 15:06:55 -06:00
|
|
|
try:
|
2019-01-24 12:23:04 -07:00
|
|
|
if self.probe_available:
|
|
|
|
cmd = [
|
2024-11-16 16:24:11 -07:00
|
|
|
self.probe_executable,
|
2019-01-24 12:23:04 -07:00
|
|
|
encodeArgument('-show_streams')]
|
|
|
|
else:
|
|
|
|
cmd = [
|
2024-11-16 16:24:11 -07:00
|
|
|
self.executable,
|
2019-01-24 12:23:04 -07:00
|
|
|
encodeArgument('-i')]
|
2024-11-16 16:24:11 -07:00
|
|
|
cmd.append(self._ffmpeg_filename_argument(path))
|
2022-04-11 09:10:28 -06:00
|
|
|
self.write_debug(f'{self.basename} command line: {shell_quote(cmd)}')
|
2022-06-17 19:57:22 -06:00
|
|
|
stdout, stderr, returncode = Popen.run(
|
|
|
|
cmd, text=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
2022-06-15 14:55:43 -06:00
|
|
|
if returncode != (0 if self.probe_available else 1):
|
2016-09-16 15:06:55 -06:00
|
|
|
return None
|
2022-04-11 09:10:28 -06:00
|
|
|
except OSError:
|
2016-09-16 15:06:55 -06:00
|
|
|
return None
|
2022-06-15 14:55:43 -06:00
|
|
|
output = stdout if self.probe_available else stderr
|
2019-01-24 12:23:04 -07:00
|
|
|
if self.probe_available:
|
|
|
|
audio_codec = None
|
|
|
|
for line in output.split('\n'):
|
|
|
|
if line.startswith('codec_name='):
|
|
|
|
audio_codec = line.split('=')[1].strip()
|
|
|
|
elif line.strip() == 'codec_type=audio' and audio_codec is not None:
|
|
|
|
return audio_codec
|
|
|
|
else:
|
|
|
|
# Stream #FILE_INDEX:STREAM_INDEX[STREAM_ID](LANGUAGE): CODEC_TYPE: CODEC_NAME
|
|
|
|
mobj = re.search(
|
|
|
|
r'Stream\s*#\d+:\d+(?:\[0x[0-9a-f]+\])?(?:\([a-z]{3}\))?:\s*Audio:\s*([0-9a-z]+)',
|
|
|
|
output)
|
|
|
|
if mobj:
|
|
|
|
return mobj.group(1)
|
2016-09-16 15:06:55 -06:00
|
|
|
return None
|
|
|
|
|
2021-01-27 08:02:51 -07:00
|
|
|
def get_metadata_object(self, path, opts=[]):
|
|
|
|
if self.probe_basename != 'ffprobe':
|
|
|
|
if self.probe_available:
|
|
|
|
self.report_warning('Only ffprobe is supported for metadata extraction')
|
2021-03-19 21:20:08 -06:00
|
|
|
raise PostProcessingError('ffprobe not found. Please install or provide the path using --ffmpeg-location')
|
2021-01-27 08:02:51 -07:00
|
|
|
self.check_version()
|
|
|
|
|
|
|
|
cmd = [
|
2024-11-16 16:24:11 -07:00
|
|
|
self.probe_executable,
|
2021-01-27 08:02:51 -07:00
|
|
|
encodeArgument('-hide_banner'),
|
|
|
|
encodeArgument('-show_format'),
|
|
|
|
encodeArgument('-show_streams'),
|
|
|
|
encodeArgument('-print_format'),
|
|
|
|
encodeArgument('json'),
|
|
|
|
]
|
|
|
|
|
|
|
|
cmd += opts
|
2022-06-15 14:55:43 -06:00
|
|
|
cmd.append(self._ffmpeg_filename_argument(path))
|
|
|
|
self.write_debug(f'ffprobe command line: {shell_quote(cmd)}')
|
|
|
|
stdout, _, _ = Popen.run(cmd, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
|
|
|
return json.loads(stdout)
|
2021-01-27 08:02:51 -07:00
|
|
|
|
|
|
|
def get_stream_number(self, path, keys, value):
|
|
|
|
streams = self.get_metadata_object(path)['streams']
|
|
|
|
num = next(
|
2021-06-08 02:53:56 -06:00
|
|
|
(i for i, stream in enumerate(streams) if traverse_obj(stream, keys, casesense=False) == value),
|
2021-01-27 08:02:51 -07:00
|
|
|
None)
|
|
|
|
return num, len(streams)
|
|
|
|
|
2023-03-08 06:10:19 -07:00
|
|
|
def _fixup_chapters(self, info):
|
|
|
|
last_chapter = traverse_obj(info, ('chapters', -1))
|
|
|
|
if last_chapter and not last_chapter.get('end_time'):
|
|
|
|
last_chapter['end_time'] = self._get_real_video_duration(info['filepath'])
|
|
|
|
|
2021-11-27 06:49:02 -07:00
|
|
|
def _get_real_video_duration(self, filepath, fatal=True):
|
2021-09-03 14:07:41 -06:00
|
|
|
try:
|
2021-11-27 06:49:02 -07:00
|
|
|
duration = float_or_none(
|
|
|
|
traverse_obj(self.get_metadata_object(filepath), ('format', 'duration')))
|
|
|
|
if not duration:
|
2021-09-03 14:07:41 -06:00
|
|
|
raise PostProcessingError('ffprobe returned empty duration')
|
2021-11-27 06:49:02 -07:00
|
|
|
return duration
|
2021-09-03 14:07:41 -06:00
|
|
|
except PostProcessingError as e:
|
|
|
|
if fatal:
|
2021-11-27 06:49:02 -07:00
|
|
|
raise PostProcessingError(f'Unable to determine video duration: {e.msg}')
|
2021-09-03 14:07:41 -06:00
|
|
|
|
2022-05-23 04:40:09 -06:00
|
|
|
def _duration_mismatch(self, d1, d2, tolerance=2):
|
2021-09-03 14:07:41 -06:00
|
|
|
if not d1 or not d2:
|
|
|
|
return None
|
2021-11-27 06:49:02 -07:00
|
|
|
# The duration is often only known to nearest second. So there can be <1sec disparity natually.
|
|
|
|
# Further excuse an additional <1sec difference.
|
2022-05-23 04:40:09 -06:00
|
|
|
return abs(d1 - d2) > tolerance
|
2021-09-03 14:07:41 -06:00
|
|
|
|
2021-07-10 16:37:25 -06:00
|
|
|
def run_ffmpeg_multiple_files(self, input_paths, out_path, opts, **kwargs):
|
2021-03-08 19:17:21 -07:00
|
|
|
return self.real_run_ffmpeg(
|
|
|
|
[(path, []) for path in input_paths],
|
2021-07-10 16:37:25 -06:00
|
|
|
[(out_path, opts)], **kwargs)
|
2021-03-08 19:17:21 -07:00
|
|
|
|
2021-07-10 16:37:25 -06:00
|
|
|
def real_run_ffmpeg(self, input_path_opts, output_path_opts, *, expected_retcodes=(0,)):
|
2014-10-26 09:46:34 -06:00
|
|
|
self.check_version()
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2015-01-09 22:13:18 -07:00
|
|
|
oldest_mtime = min(
|
2024-11-16 16:24:11 -07:00
|
|
|
os.stat(path).st_mtime for path, _ in input_path_opts if path)
|
2015-01-09 22:10:18 -07:00
|
|
|
|
2024-11-16 16:24:11 -07:00
|
|
|
cmd = [self.executable, encodeArgument('-y')]
|
2019-01-29 16:15:23 -07:00
|
|
|
# avconv does not have repeat option
|
|
|
|
if self.basename == 'ffmpeg':
|
|
|
|
cmd += [encodeArgument('-loglevel'), encodeArgument('repeat+info')]
|
2021-02-24 09:05:18 -07:00
|
|
|
|
2021-03-08 19:17:21 -07:00
|
|
|
def make_args(file, args, name, number):
|
2024-06-11 17:09:58 -06:00
|
|
|
keys = [f'_{name}{number}', f'_{name}']
|
2021-12-19 19:56:03 -07:00
|
|
|
if name == 'o':
|
|
|
|
args += ['-movflags', '+faststart']
|
2021-12-24 20:12:08 -07:00
|
|
|
if number == 1:
|
|
|
|
keys.append('')
|
2021-03-08 19:17:21 -07:00
|
|
|
args += self._configuration_args(self.basename, keys)
|
|
|
|
if name == 'i':
|
|
|
|
args.append('-i')
|
2021-02-24 09:05:18 -07:00
|
|
|
return (
|
2021-03-08 19:17:21 -07:00
|
|
|
[encodeArgument(arg) for arg in args]
|
2024-11-16 16:24:11 -07:00
|
|
|
+ [self._ffmpeg_filename_argument(file)])
|
2021-02-24 09:05:18 -07:00
|
|
|
|
2021-03-08 19:17:21 -07:00
|
|
|
for arg_type, path_opts in (('i', input_path_opts), ('o', output_path_opts)):
|
2021-07-10 12:56:35 -06:00
|
|
|
cmd += itertools.chain.from_iterable(
|
|
|
|
make_args(path, list(opts), arg_type, i + 1)
|
|
|
|
for i, (path, opts) in enumerate(path_opts) if path)
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2024-06-11 17:09:58 -06:00
|
|
|
self.write_debug(f'ffmpeg command line: {shell_quote(cmd)}')
|
2022-06-17 19:57:22 -06:00
|
|
|
_, stderr, returncode = Popen.run(
|
|
|
|
cmd, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
2022-06-15 14:55:43 -06:00
|
|
|
if returncode not in variadic(expected_retcodes):
|
2022-06-22 20:43:15 -06:00
|
|
|
self.write_debug(stderr)
|
2022-06-15 14:55:43 -06:00
|
|
|
raise FFmpegPostProcessorError(stderr.strip().splitlines()[-1])
|
2021-03-08 19:17:21 -07:00
|
|
|
for out_path, _ in output_path_opts:
|
2021-07-10 12:56:35 -06:00
|
|
|
if out_path:
|
|
|
|
self.try_utime(out_path, oldest_mtime, oldest_mtime)
|
2022-06-15 14:55:43 -06:00
|
|
|
return stderr
|
2015-04-07 15:33:18 -06:00
|
|
|
|
2021-07-10 16:37:25 -06:00
|
|
|
def run_ffmpeg(self, path, out_path, opts, **kwargs):
|
|
|
|
return self.run_ffmpeg_multiple_files([path], out_path, opts, **kwargs)
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-09-01 14:55:16 -06:00
|
|
|
@staticmethod
|
|
|
|
def _ffmpeg_filename_argument(fn):
|
2015-09-17 07:22:19 -06:00
|
|
|
# Always use 'file:' because the filename may contain ':' (ffmpeg
|
|
|
|
# interprets that as a protocol) or can start with '-' (-- is broken in
|
|
|
|
# ffmpeg, see https://ffmpeg.org/trac/ffmpeg/ticket/2127 for details)
|
2016-04-16 14:49:13 -06:00
|
|
|
# Also leave '-' intact in order not to break streaming to stdout.
|
2021-01-27 08:02:51 -07:00
|
|
|
if fn.startswith(('http://', 'https://')):
|
|
|
|
return fn
|
2016-04-16 12:45:56 -06:00
|
|
|
return 'file:' + fn if fn != '-' else fn
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-09-01 14:55:16 -06:00
|
|
|
@staticmethod
|
|
|
|
def _quote_for_ffmpeg(string):
|
|
|
|
# See https://ffmpeg.org/ffmpeg-utils.html#toc-Quoting-and-escaping
|
|
|
|
# A sequence of '' produces '\'''\'';
|
|
|
|
# final replace removes the empty '' between \' \'.
|
|
|
|
string = string.replace("'", r"'\''").replace("'''", "'")
|
|
|
|
# Handle potential ' at string boundaries.
|
|
|
|
string = string[1:] if string[0] == "'" else "'" + string
|
|
|
|
return string[:-1] if string[-1] == "'" else string + "'"
|
|
|
|
|
|
|
|
def force_keyframes(self, filename, timestamps):
|
|
|
|
timestamps = orderedSet(timestamps)
|
|
|
|
if timestamps[0] == 0:
|
|
|
|
timestamps = timestamps[1:]
|
|
|
|
keyframe_file = prepend_extension(filename, 'keyframes.temp')
|
|
|
|
self.to_screen(f'Re-encoding "{filename}" with appropriate keyframes')
|
2022-01-11 20:22:09 -07:00
|
|
|
self.run_ffmpeg(filename, keyframe_file, [
|
|
|
|
*self.stream_copy_opts(False, ext=determine_ext(filename)),
|
|
|
|
'-force_key_frames', ','.join(f'{t:.6f}' for t in timestamps)])
|
2021-09-01 14:55:16 -06:00
|
|
|
return keyframe_file
|
|
|
|
|
|
|
|
def concat_files(self, in_files, out_file, concat_opts=None):
|
|
|
|
"""
|
|
|
|
Use concat demuxer to concatenate multiple files having identical streams.
|
|
|
|
|
|
|
|
Only inpoint, outpoint, and duration concat options are supported.
|
|
|
|
See https://ffmpeg.org/ffmpeg-formats.html#concat-1 for details
|
|
|
|
"""
|
|
|
|
concat_file = f'{out_file}.concat'
|
|
|
|
self.write_debug(f'Writing concat spec to {concat_file}')
|
2023-01-02 07:09:03 -07:00
|
|
|
with open(concat_file, 'w', encoding='utf-8') as f:
|
2021-09-01 14:55:16 -06:00
|
|
|
f.writelines(self._concat_spec(in_files, concat_opts))
|
|
|
|
|
2022-01-11 20:22:09 -07:00
|
|
|
out_flags = list(self.stream_copy_opts(ext=determine_ext(out_file)))
|
2021-09-01 14:55:16 -06:00
|
|
|
|
2022-02-17 10:10:34 -07:00
|
|
|
self.real_run_ffmpeg(
|
|
|
|
[(concat_file, ['-hide_banner', '-nostdin', '-f', 'concat', '-safe', '0'])],
|
|
|
|
[(out_file, out_flags)])
|
2022-04-30 17:28:26 -06:00
|
|
|
self._delete_downloaded_files(concat_file)
|
2021-09-01 14:55:16 -06:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _concat_spec(cls, in_files, concat_opts=None):
|
|
|
|
if concat_opts is None:
|
|
|
|
concat_opts = [{}] * len(in_files)
|
|
|
|
yield 'ffconcat version 1.0\n'
|
|
|
|
for file, opts in zip(in_files, concat_opts):
|
|
|
|
yield f'file {cls._quote_for_ffmpeg(cls._ffmpeg_filename_argument(file))}\n'
|
|
|
|
# Iterate explicitly to yield the following directives in order, ignoring the rest.
|
|
|
|
for directive in 'inpoint', 'outpoint', 'duration':
|
|
|
|
if directive in opts:
|
|
|
|
yield f'{directive} {opts[directive]}\n'
|
|
|
|
|
2014-01-06 21:59:22 -07:00
|
|
|
|
|
|
|
class FFmpegExtractAudioPP(FFmpegPostProcessor):
|
2024-06-11 17:09:58 -06:00
|
|
|
COMMON_AUDIO_EXTS = (*MEDIA_EXTENSIONS.common_audio, 'wma')
|
2022-06-06 10:19:57 -06:00
|
|
|
SUPPORTED_EXTS = tuple(ACODECS.keys())
|
2022-06-06 09:48:44 -06:00
|
|
|
FORMAT_RE = create_mapping_re(('best', *SUPPORTED_EXTS))
|
2021-02-15 10:46:11 -07:00
|
|
|
|
2014-01-06 21:59:22 -07:00
|
|
|
def __init__(self, downloader=None, preferredcodec=None, preferredquality=None, nopostoverwrites=False):
|
|
|
|
FFmpegPostProcessor.__init__(self, downloader)
|
2022-06-06 09:48:44 -06:00
|
|
|
self.mapping = preferredcodec or 'best'
|
2021-11-03 12:35:53 -06:00
|
|
|
self._preferredquality = float_or_none(preferredquality)
|
2014-01-06 21:59:22 -07:00
|
|
|
self._nopostoverwrites = nopostoverwrites
|
|
|
|
|
2021-11-03 12:35:53 -06:00
|
|
|
def _quality_args(self, codec):
|
|
|
|
if self._preferredquality is None:
|
|
|
|
return []
|
|
|
|
elif self._preferredquality > 10:
|
|
|
|
return ['-b:a', f'{self._preferredquality}k']
|
|
|
|
|
|
|
|
limits = {
|
|
|
|
'libmp3lame': (10, 0),
|
2021-11-18 16:50:13 -07:00
|
|
|
'libvorbis': (0, 10),
|
2021-11-03 12:53:48 -06:00
|
|
|
# FFmpeg's AAC encoder does not have an upper limit for the value of -q:a.
|
|
|
|
# Experimentally, with values over 4, bitrate changes were minimal or non-existent
|
|
|
|
'aac': (0.1, 4),
|
2021-11-03 14:53:40 -06:00
|
|
|
'libfdk_aac': (1, 5),
|
2021-11-13 02:41:33 -07:00
|
|
|
}.get(codec)
|
2021-11-03 12:35:53 -06:00
|
|
|
if not limits:
|
|
|
|
return []
|
|
|
|
|
|
|
|
q = limits[1] + (limits[0] - limits[1]) * (self._preferredquality / 10)
|
2021-11-03 14:53:40 -06:00
|
|
|
if codec == 'libfdk_aac':
|
|
|
|
return ['-vbr', f'{int(q)}']
|
2021-11-03 12:35:53 -06:00
|
|
|
return ['-q:a', f'{q}']
|
|
|
|
|
2014-01-06 21:59:22 -07:00
|
|
|
def run_ffmpeg(self, path, out_path, codec, more_opts):
|
|
|
|
if codec is None:
|
|
|
|
acodec_opts = []
|
|
|
|
else:
|
|
|
|
acodec_opts = ['-acodec', codec]
|
2024-06-11 17:09:58 -06:00
|
|
|
opts = ['-vn', *acodec_opts, *more_opts]
|
2014-01-06 21:59:22 -07:00
|
|
|
try:
|
|
|
|
FFmpegPostProcessor.run_ffmpeg(self, path, out_path, opts)
|
|
|
|
except FFmpegPostProcessorError as err:
|
2022-06-06 10:19:57 -06:00
|
|
|
raise PostProcessingError(f'audio conversion failed: {err.msg}')
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
2014-01-06 21:59:22 -07:00
|
|
|
def run(self, information):
|
2021-11-18 16:50:13 -07:00
|
|
|
orig_path = path = information['filepath']
|
2022-06-06 09:48:44 -06:00
|
|
|
target_format, _skip_msg = resolve_mapping(information['ext'], self.mapping)
|
2022-06-06 10:19:57 -06:00
|
|
|
if target_format == 'best' and information['ext'] in self.COMMON_AUDIO_EXTS:
|
2022-06-06 09:48:44 -06:00
|
|
|
target_format, _skip_msg = None, 'the file is already in a common audio format'
|
|
|
|
if not target_format:
|
|
|
|
self.to_screen(f'Not converting audio {orig_path}; {_skip_msg}')
|
2021-02-16 02:28:12 -07:00
|
|
|
return [], information
|
2014-01-06 21:59:22 -07:00
|
|
|
|
|
|
|
filecodec = self.get_audio_codec(path)
|
|
|
|
if filecodec is None:
|
2014-11-26 05:05:11 -07:00
|
|
|
raise PostProcessingError('WARNING: unable to obtain file audio codec with ffprobe')
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2022-06-06 10:19:57 -06:00
|
|
|
if filecodec == 'aac' and target_format in ('m4a', 'best'):
|
|
|
|
# Lossless, but in another container
|
|
|
|
extension, _, more_opts, acodec = *ACODECS['m4a'], 'copy'
|
|
|
|
elif target_format == 'best' or target_format == filecodec:
|
|
|
|
# Lossless if possible
|
|
|
|
try:
|
|
|
|
extension, _, more_opts, acodec = *ACODECS[filecodec], 'copy'
|
|
|
|
except KeyError:
|
|
|
|
extension, acodec, more_opts = ACODECS['mp3']
|
2014-01-06 21:59:22 -07:00
|
|
|
else:
|
2017-03-16 05:50:45 -06:00
|
|
|
# We convert the audio (lossy if codec is lossy)
|
2022-06-06 10:19:57 -06:00
|
|
|
extension, acodec, more_opts = ACODECS[target_format]
|
2021-11-03 14:53:40 -06:00
|
|
|
if acodec == 'aac' and self._features.get('fdk'):
|
2022-06-06 10:19:57 -06:00
|
|
|
acodec, more_opts = 'libfdk_aac', []
|
|
|
|
|
|
|
|
more_opts = list(more_opts)
|
|
|
|
if acodec != 'copy':
|
2021-11-03 12:35:53 -06:00
|
|
|
more_opts = self._quality_args(acodec)
|
2022-06-06 10:19:57 -06:00
|
|
|
|
2023-02-17 01:06:15 -07:00
|
|
|
temp_path = new_path = replace_extension(path, extension, information['ext'])
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-11-18 16:50:13 -07:00
|
|
|
if new_path == path:
|
2022-03-26 20:20:43 -06:00
|
|
|
if acodec == 'copy':
|
2022-06-06 10:19:57 -06:00
|
|
|
self.to_screen(f'Not converting audio {orig_path}; file is already in target format {target_format}')
|
2022-03-26 20:20:43 -06:00
|
|
|
return [], information
|
2021-11-18 16:50:13 -07:00
|
|
|
orig_path = prepend_extension(path, 'orig')
|
|
|
|
temp_path = prepend_extension(path, 'temp')
|
2024-11-16 16:24:11 -07:00
|
|
|
if (self._nopostoverwrites and os.path.exists(new_path)
|
|
|
|
and os.path.exists(orig_path)):
|
2024-06-11 17:09:58 -06:00
|
|
|
self.to_screen(f'Post-process file {new_path} exists, skipping')
|
2015-04-18 03:36:42 -06:00
|
|
|
return [], information
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2022-06-06 10:19:57 -06:00
|
|
|
self.to_screen(f'Destination: {new_path}')
|
|
|
|
self.run_ffmpeg(path, temp_path, acodec, more_opts)
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-11-18 16:50:13 -07:00
|
|
|
os.replace(path, orig_path)
|
|
|
|
os.replace(temp_path, new_path)
|
|
|
|
information['filepath'] = new_path
|
|
|
|
information['ext'] = extension
|
|
|
|
|
2014-01-06 21:59:22 -07:00
|
|
|
# Try to update the date time for extracted audio file.
|
|
|
|
if information.get('filetime') is not None:
|
2015-04-08 09:40:31 -06:00
|
|
|
self.try_utime(
|
2022-06-06 10:19:57 -06:00
|
|
|
new_path, time.time(), information['filetime'], errnote='Cannot update utime of audio file')
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-11-18 16:50:13 -07:00
|
|
|
return [orig_path], information
|
2014-01-06 21:59:22 -07:00
|
|
|
|
|
|
|
|
2021-05-22 01:38:12 -06:00
|
|
|
class FFmpegVideoConvertorPP(FFmpegPostProcessor):
|
2022-12-19 12:11:45 -07:00
|
|
|
SUPPORTED_EXTS = (
|
|
|
|
*sorted((*MEDIA_EXTENSIONS.common_video, 'gif')),
|
|
|
|
*sorted((*MEDIA_EXTENSIONS.common_audio, 'aac', 'vorbis')),
|
|
|
|
)
|
2022-06-05 08:28:56 -06:00
|
|
|
FORMAT_RE = create_mapping_re(SUPPORTED_EXTS)
|
2021-09-17 12:23:55 -06:00
|
|
|
_ACTION = 'converting'
|
2021-05-22 01:38:12 -06:00
|
|
|
|
2020-05-16 10:09:12 -06:00
|
|
|
def __init__(self, downloader=None, preferedformat=None):
|
2022-04-11 09:10:28 -06:00
|
|
|
super().__init__(downloader)
|
2022-06-05 08:28:56 -06:00
|
|
|
self.mapping = preferedformat
|
2021-01-27 08:02:51 -07:00
|
|
|
|
2021-05-22 01:38:12 -06:00
|
|
|
@staticmethod
|
|
|
|
def _options(target_ext):
|
2022-03-26 21:57:38 -06:00
|
|
|
yield from FFmpegPostProcessor.stream_copy_opts(False)
|
2021-05-22 01:38:12 -06:00
|
|
|
if target_ext == 'avi':
|
2022-03-26 21:57:38 -06:00
|
|
|
yield from ('-c:v', 'libxvid', '-vtag', 'XVID')
|
2021-05-22 01:38:12 -06:00
|
|
|
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
2021-09-17 12:23:55 -06:00
|
|
|
def run(self, info):
|
|
|
|
filename, source_ext = info['filepath'], info['ext'].lower()
|
2022-06-05 08:28:56 -06:00
|
|
|
target_ext, _skip_msg = resolve_mapping(source_ext, self.mapping)
|
2021-01-27 08:02:51 -07:00
|
|
|
if _skip_msg:
|
2022-01-19 15:57:36 -07:00
|
|
|
self.to_screen(f'Not {self._ACTION} media file "{filename}"; {_skip_msg}')
|
2021-09-17 12:23:55 -06:00
|
|
|
return [], info
|
2021-01-27 08:02:51 -07:00
|
|
|
|
2021-09-17 12:23:55 -06:00
|
|
|
outpath = replace_extension(filename, target_ext, source_ext)
|
|
|
|
self.to_screen(f'{self._ACTION.title()} video from {source_ext} to {target_ext}; Destination: {outpath}')
|
|
|
|
self.run_ffmpeg(filename, outpath, self._options(target_ext))
|
2021-05-22 01:38:12 -06:00
|
|
|
|
2021-09-17 12:23:55 -06:00
|
|
|
info['filepath'] = outpath
|
|
|
|
info['format'] = info['ext'] = target_ext
|
|
|
|
return [filename], info
|
2020-05-16 10:09:12 -06:00
|
|
|
|
|
|
|
|
2021-05-22 01:38:12 -06:00
|
|
|
class FFmpegVideoRemuxerPP(FFmpegVideoConvertorPP):
|
2021-09-17 12:23:55 -06:00
|
|
|
_ACTION = 'remuxing'
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-05-22 01:38:12 -06:00
|
|
|
@staticmethod
|
|
|
|
def _options(target_ext):
|
2022-01-11 20:30:21 -07:00
|
|
|
return FFmpegPostProcessor.stream_copy_opts()
|
2014-01-06 21:59:22 -07:00
|
|
|
|
|
|
|
|
|
|
|
class FFmpegEmbedSubtitlePP(FFmpegPostProcessor):
|
2022-04-11 17:57:17 -06:00
|
|
|
SUPPORTED_EXTS = ('mp4', 'mov', 'm4a', 'webm', 'mkv', 'mka')
|
|
|
|
|
2021-02-09 11:37:10 -07:00
|
|
|
def __init__(self, downloader=None, already_have_subtitle=False):
|
2022-04-11 09:10:28 -06:00
|
|
|
super().__init__(downloader)
|
2021-02-09 11:37:10 -07:00
|
|
|
self._already_have_subtitle = already_have_subtitle
|
|
|
|
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
2021-11-27 06:49:02 -07:00
|
|
|
def run(self, info):
|
2022-04-11 17:57:17 -06:00
|
|
|
if info['ext'] not in self.SUPPORTED_EXTS:
|
|
|
|
self.to_screen(f'Subtitles can only be embedded in {", ".join(self.SUPPORTED_EXTS)} files')
|
2021-11-27 06:49:02 -07:00
|
|
|
return [], info
|
|
|
|
subtitles = info.get('requested_subtitles')
|
2015-02-16 13:12:31 -07:00
|
|
|
if not subtitles:
|
2021-01-07 12:28:41 -07:00
|
|
|
self.to_screen('There aren\'t any subtitles to embed')
|
2021-11-27 06:49:02 -07:00
|
|
|
return [], info
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-11-27 06:49:02 -07:00
|
|
|
filename = info['filepath']
|
2021-12-02 07:13:41 -07:00
|
|
|
|
2022-06-26 18:50:06 -06:00
|
|
|
# Disabled temporarily. There needs to be a way to override this
|
2021-12-02 07:13:41 -07:00
|
|
|
# in case of duration actually mismatching in extractor
|
|
|
|
# See: https://github.com/yt-dlp/yt-dlp/issues/1870, https://github.com/yt-dlp/yt-dlp/issues/1385
|
|
|
|
'''
|
2021-11-27 06:49:02 -07:00
|
|
|
if info.get('duration') and not info.get('__real_download') and self._duration_mismatch(
|
|
|
|
self._get_real_video_duration(filename, False), info['duration']):
|
2021-09-03 14:07:41 -06:00
|
|
|
self.to_screen(f'Skipping {self.pp_key()} since the real and expected durations mismatch')
|
2021-11-27 06:49:02 -07:00
|
|
|
return [], info
|
2021-12-02 07:13:41 -07:00
|
|
|
'''
|
2016-03-19 16:12:34 -06:00
|
|
|
|
2021-11-27 06:49:02 -07:00
|
|
|
ext = info['ext']
|
2021-05-12 13:37:58 -06:00
|
|
|
sub_langs, sub_names, sub_filenames = [], [], []
|
2016-03-19 16:12:34 -06:00
|
|
|
webm_vtt_warn = False
|
2021-01-27 08:02:51 -07:00
|
|
|
mp4_ass_warn = False
|
2016-03-19 16:12:34 -06:00
|
|
|
|
|
|
|
for lang, sub_info in subtitles.items():
|
2021-10-02 11:06:31 -06:00
|
|
|
if not os.path.exists(sub_info.get('filepath', '')):
|
2021-08-31 21:15:56 -06:00
|
|
|
self.report_warning(f'Skipping embedding {lang} subtitle because the file is missing')
|
|
|
|
continue
|
2016-03-19 16:12:34 -06:00
|
|
|
sub_ext = sub_info['ext']
|
2020-11-04 13:17:52 -07:00
|
|
|
if sub_ext == 'json':
|
2021-01-27 08:02:51 -07:00
|
|
|
self.report_warning('JSON subtitles cannot be embedded')
|
2020-11-04 13:17:52 -07:00
|
|
|
elif ext != 'webm' or ext == 'webm' and sub_ext == 'vtt':
|
2016-03-19 16:12:34 -06:00
|
|
|
sub_langs.append(lang)
|
2021-05-12 13:37:58 -06:00
|
|
|
sub_names.append(sub_info.get('name'))
|
2021-03-18 09:24:53 -06:00
|
|
|
sub_filenames.append(sub_info['filepath'])
|
2016-03-19 16:12:34 -06:00
|
|
|
else:
|
|
|
|
if not webm_vtt_warn and ext == 'webm' and sub_ext != 'vtt':
|
|
|
|
webm_vtt_warn = True
|
2021-01-27 08:02:51 -07:00
|
|
|
self.report_warning('Only WebVTT subtitles can be embedded in webm files')
|
|
|
|
if not mp4_ass_warn and ext == 'mp4' and sub_ext == 'ass':
|
|
|
|
mp4_ass_warn = True
|
|
|
|
self.report_warning('ASS subtitles cannot be properly embedded in mp4 files; expect issues')
|
2016-03-19 16:12:34 -06:00
|
|
|
|
|
|
|
if not sub_langs:
|
2021-11-27 06:49:02 -07:00
|
|
|
return [], info
|
2016-03-19 16:12:34 -06:00
|
|
|
|
2024-06-11 17:09:58 -06:00
|
|
|
input_files = [filename, *sub_filenames]
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2015-01-16 05:29:01 -07:00
|
|
|
opts = [
|
2022-01-11 20:22:09 -07:00
|
|
|
*self.stream_copy_opts(ext=info['ext']),
|
2015-01-16 05:29:01 -07:00
|
|
|
# Don't copy the existing subtitles, we may be running the
|
|
|
|
# postprocessor a second time
|
|
|
|
'-map', '-0:s',
|
|
|
|
]
|
2021-05-12 13:37:58 -06:00
|
|
|
for i, (lang, name) in enumerate(zip(sub_langs, sub_names)):
|
2024-06-11 17:09:58 -06:00
|
|
|
opts.extend(['-map', f'{i + 1}:0'])
|
2019-01-06 10:57:24 -07:00
|
|
|
lang_code = ISO639Utils.short2long(lang) or lang
|
2024-06-11 17:09:58 -06:00
|
|
|
opts.extend([f'-metadata:s:s:{i}', f'language={lang_code}'])
|
2021-05-12 13:37:58 -06:00
|
|
|
if name:
|
2024-06-11 17:09:58 -06:00
|
|
|
opts.extend([f'-metadata:s:s:{i}', f'handler_name={name}',
|
|
|
|
f'-metadata:s:s:{i}', f'title={name}'])
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2015-01-16 05:37:37 -07:00
|
|
|
temp_filename = prepend_extension(filename, 'temp')
|
2024-06-11 17:09:58 -06:00
|
|
|
self.to_screen(f'Embedding subtitles in "{filename}"')
|
2014-01-06 21:59:22 -07:00
|
|
|
self.run_ffmpeg_multiple_files(input_files, temp_filename, opts)
|
2021-08-26 20:27:20 -06:00
|
|
|
os.replace(temp_filename, filename)
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-02-09 11:37:10 -07:00
|
|
|
files_to_delete = [] if self._already_have_subtitle else sub_filenames
|
2021-11-27 06:49:02 -07:00
|
|
|
return files_to_delete, info
|
2014-01-06 21:59:22 -07:00
|
|
|
|
|
|
|
|
|
|
|
class FFmpegMetadataPP(FFmpegPostProcessor):
|
2021-07-10 12:56:35 -06:00
|
|
|
|
2021-11-14 15:33:41 -07:00
|
|
|
def __init__(self, downloader, add_metadata=True, add_chapters=True, add_infojson='if_exists'):
|
2021-09-01 14:55:16 -06:00
|
|
|
FFmpegPostProcessor.__init__(self, downloader)
|
|
|
|
self._add_metadata = add_metadata
|
|
|
|
self._add_chapters = add_chapters
|
2021-11-14 15:33:41 -07:00
|
|
|
self._add_infojson = add_infojson
|
2021-09-01 14:55:16 -06:00
|
|
|
|
2021-07-10 12:56:35 -06:00
|
|
|
@staticmethod
|
|
|
|
def _options(target_ext):
|
2022-01-11 20:22:09 -07:00
|
|
|
audio_only = target_ext == 'm4a'
|
2022-01-11 20:30:21 -07:00
|
|
|
yield from FFmpegPostProcessor.stream_copy_opts(not audio_only)
|
2022-01-11 20:22:09 -07:00
|
|
|
if audio_only:
|
2021-07-10 12:56:35 -06:00
|
|
|
yield from ('-vn', '-acodec', 'copy')
|
|
|
|
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
2014-01-06 21:59:22 -07:00
|
|
|
def run(self, info):
|
2023-03-08 06:10:19 -07:00
|
|
|
self._fixup_chapters(info)
|
2021-09-01 14:55:16 -06:00
|
|
|
filename, metadata_filename = info['filepath'], None
|
2021-11-14 15:33:41 -07:00
|
|
|
files_to_delete, options = [], []
|
2021-09-01 14:55:16 -06:00
|
|
|
if self._add_chapters and info.get('chapters'):
|
|
|
|
metadata_filename = replace_extension(filename, 'meta')
|
|
|
|
options.extend(self._get_chapter_opts(info['chapters'], metadata_filename))
|
2021-11-14 15:33:41 -07:00
|
|
|
files_to_delete.append(metadata_filename)
|
2021-09-01 14:55:16 -06:00
|
|
|
if self._add_metadata:
|
|
|
|
options.extend(self._get_metadata_opts(info))
|
|
|
|
|
2021-11-14 15:33:41 -07:00
|
|
|
if self._add_infojson:
|
|
|
|
if info['ext'] in ('mkv', 'mka'):
|
|
|
|
infojson_filename = info.get('infojson_filename')
|
|
|
|
options.extend(self._get_infojson_opts(info, infojson_filename))
|
|
|
|
if not infojson_filename:
|
|
|
|
files_to_delete.append(info.get('infojson_filename'))
|
|
|
|
elif self._add_infojson is True:
|
|
|
|
self.to_screen('The info-json can only be attached to mkv/mka files')
|
|
|
|
|
2021-09-01 14:55:16 -06:00
|
|
|
if not options:
|
|
|
|
self.to_screen('There isn\'t any metadata to add')
|
|
|
|
return [], info
|
|
|
|
|
|
|
|
temp_filename = prepend_extension(filename, 'temp')
|
2024-06-11 17:09:58 -06:00
|
|
|
self.to_screen(f'Adding metadata to "{filename}"')
|
2021-09-01 14:55:16 -06:00
|
|
|
self.run_ffmpeg_multiple_files(
|
|
|
|
(filename, metadata_filename), temp_filename,
|
|
|
|
itertools.chain(self._options(info['ext']), *options))
|
2022-04-30 17:28:26 -06:00
|
|
|
self._delete_downloaded_files(*files_to_delete)
|
2021-09-01 14:55:16 -06:00
|
|
|
os.replace(temp_filename, filename)
|
|
|
|
return [], info
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _get_chapter_opts(chapters, metadata_filename):
|
2023-01-02 07:09:03 -07:00
|
|
|
with open(metadata_filename, 'w', encoding='utf-8') as f:
|
2021-09-01 14:55:16 -06:00
|
|
|
def ffmpeg_escape(text):
|
|
|
|
return re.sub(r'([\\=;#\n])', r'\\\1', text)
|
|
|
|
|
|
|
|
metadata_file_content = ';FFMETADATA1\n'
|
|
|
|
for chapter in chapters:
|
|
|
|
metadata_file_content += '[CHAPTER]\nTIMEBASE=1/1000\n'
|
|
|
|
metadata_file_content += 'START=%d\n' % (chapter['start_time'] * 1000)
|
|
|
|
metadata_file_content += 'END=%d\n' % (chapter['end_time'] * 1000)
|
|
|
|
chapter_title = chapter.get('title')
|
|
|
|
if chapter_title:
|
2024-06-11 17:09:58 -06:00
|
|
|
metadata_file_content += f'title={ffmpeg_escape(chapter_title)}\n'
|
2021-09-01 14:55:16 -06:00
|
|
|
f.write(metadata_file_content)
|
|
|
|
yield ('-map_metadata', '1')
|
|
|
|
|
|
|
|
def _get_metadata_opts(self, info):
|
2022-01-01 15:01:49 -07:00
|
|
|
meta_prefix = 'meta'
|
|
|
|
metadata = collections.defaultdict(dict)
|
2016-04-30 22:56:54 -06:00
|
|
|
|
|
|
|
def add(meta_list, info_list=None):
|
2021-10-17 21:49:25 -06:00
|
|
|
value = next((
|
2024-06-11 17:09:58 -06:00
|
|
|
info[key] for key in [f'{meta_prefix}_', *variadic(info_list or meta_list)]
|
2021-10-17 21:49:25 -06:00
|
|
|
if info.get(key) is not None), None)
|
|
|
|
if value not in ('', None):
|
2024-02-20 00:19:24 -07:00
|
|
|
value = ', '.join(map(str, variadic(value)))
|
2022-04-11 02:33:13 -06:00
|
|
|
value = value.replace('\0', '') # nul character cannot be passed in command line
|
2022-01-01 15:01:49 -07:00
|
|
|
metadata['common'].update({meta_f: value for meta_f in variadic(meta_list)})
|
2016-04-30 22:56:54 -06:00
|
|
|
|
2022-07-08 13:37:47 -06:00
|
|
|
# Info on media metadata/metadata supported by ffmpeg:
|
|
|
|
# https://wiki.multimedia.cx/index.php/FFmpeg_Metadata
|
|
|
|
# https://kdenlive.org/en/project/adding-meta-data-to-mp4-video/
|
|
|
|
# https://kodi.wiki/view/Video_file_tagging
|
2020-05-22 23:26:21 -06:00
|
|
|
|
2016-04-30 22:56:54 -06:00
|
|
|
add('title', ('track', 'title'))
|
|
|
|
add('date', 'upload_date')
|
2021-03-19 06:12:29 -06:00
|
|
|
add(('description', 'synopsis'), 'description')
|
|
|
|
add(('purl', 'comment'), 'webpage_url')
|
2016-04-30 22:56:54 -06:00
|
|
|
add('track', 'track_number')
|
2024-02-20 00:19:24 -07:00
|
|
|
add('artist', ('artist', 'artists', 'creator', 'creators', 'uploader', 'uploader_id'))
|
|
|
|
add('composer', ('composer', 'composers'))
|
|
|
|
add('genre', ('genre', 'genres'))
|
2016-04-30 22:56:54 -06:00
|
|
|
add('album')
|
2024-02-20 00:19:24 -07:00
|
|
|
add('album_artist', ('album_artist', 'album_artists'))
|
2016-04-30 22:56:54 -06:00
|
|
|
add('disc', 'disc_number')
|
2020-05-22 23:26:21 -06:00
|
|
|
add('show', 'series')
|
|
|
|
add('season_number')
|
|
|
|
add('episode_id', ('episode', 'episode_id'))
|
|
|
|
add('episode_sort', 'episode_number')
|
2021-11-14 12:55:47 -07:00
|
|
|
if 'embed-metadata' in self.get_param('compat_opts', []):
|
|
|
|
add('comment', 'description')
|
2022-01-01 15:01:49 -07:00
|
|
|
metadata['common'].pop('synopsis', None)
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2022-01-01 15:01:49 -07:00
|
|
|
meta_regex = rf'{re.escape(meta_prefix)}(?P<i>\d+)?_(?P<key>.+)'
|
2021-10-17 21:49:25 -06:00
|
|
|
for key, value in info.items():
|
2022-01-01 15:01:49 -07:00
|
|
|
mobj = re.fullmatch(meta_regex, key)
|
|
|
|
if value is not None and mobj:
|
2022-06-03 09:59:03 -06:00
|
|
|
metadata[mobj.group('i') or 'common'][mobj.group('key')] = value.replace('\0', '')
|
2021-04-03 02:29:55 -06:00
|
|
|
|
2022-04-07 04:16:53 -06:00
|
|
|
# Write id3v1 metadata also since Windows Explorer can't handle id3v2 tags
|
|
|
|
yield ('-write_id3v1', '1')
|
|
|
|
|
2022-01-01 15:01:49 -07:00
|
|
|
for name, value in metadata['common'].items():
|
2021-09-01 14:55:16 -06:00
|
|
|
yield ('-metadata', f'{name}={value}')
|
2014-02-22 10:23:30 -07:00
|
|
|
|
2021-07-10 12:56:35 -06:00
|
|
|
stream_idx = 0
|
2023-11-25 19:40:09 -07:00
|
|
|
for fmt in info.get('requested_formats') or [info]:
|
2021-07-10 12:56:35 -06:00
|
|
|
stream_count = 2 if 'none' not in (fmt.get('vcodec'), fmt.get('acodec')) else 1
|
2022-01-03 08:09:46 -07:00
|
|
|
lang = ISO639Utils.short2long(fmt.get('language') or '') or fmt.get('language')
|
2022-01-01 15:01:49 -07:00
|
|
|
for i in range(stream_idx, stream_idx + stream_count):
|
|
|
|
if lang:
|
|
|
|
metadata[str(i)].setdefault('language', lang)
|
|
|
|
for name, value in metadata[str(i)].items():
|
|
|
|
yield (f'-metadata:s:{i}', f'{name}={value}')
|
2021-07-10 12:56:35 -06:00
|
|
|
stream_idx += stream_count
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2021-11-14 15:33:41 -07:00
|
|
|
def _get_infojson_opts(self, info, infofn):
|
|
|
|
if not infofn or not os.path.exists(infofn):
|
|
|
|
if self._add_infojson is not True:
|
|
|
|
return
|
|
|
|
infofn = infofn or '%s.temp' % (
|
|
|
|
self._downloader.prepare_filename(info, 'infojson')
|
|
|
|
or replace_extension(self._downloader.prepare_filename(info), 'info.json', info['ext']))
|
|
|
|
if not self._downloader._ensure_dir_exists(infofn):
|
|
|
|
return
|
|
|
|
self.write_debug(f'Writing info-json to: {infofn}')
|
|
|
|
write_json_file(self._downloader.sanitize_info(info, self.get_param('clean_infojson', True)), infofn)
|
|
|
|
info['infojson_filename'] = infofn
|
|
|
|
|
|
|
|
old_stream, new_stream = self.get_stream_number(info['filepath'], ('tags', 'mimetype'), 'application/json')
|
|
|
|
if old_stream is not None:
|
2024-06-11 17:09:58 -06:00
|
|
|
yield ('-map', f'-0:{old_stream}')
|
2021-11-14 15:33:41 -07:00
|
|
|
new_stream -= 1
|
2021-01-27 08:02:51 -07:00
|
|
|
|
2022-04-26 13:48:50 -06:00
|
|
|
yield (
|
2023-03-27 10:17:42 -06:00
|
|
|
'-attach', self._ffmpeg_filename_argument(infofn),
|
2022-04-26 13:48:50 -06:00
|
|
|
f'-metadata:s:{new_stream}', 'mimetype=application/json',
|
|
|
|
f'-metadata:s:{new_stream}', 'filename=info.json',
|
|
|
|
)
|
2014-01-06 21:59:22 -07:00
|
|
|
|
|
|
|
|
|
|
|
class FFmpegMergerPP(FFmpegPostProcessor):
|
2022-07-30 14:05:56 -06:00
|
|
|
SUPPORTED_EXTS = MEDIA_EXTENSIONS.common_video
|
|
|
|
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
2014-01-06 21:59:22 -07:00
|
|
|
def run(self, info):
|
|
|
|
filename = info['filepath']
|
2015-04-19 08:56:22 -06:00
|
|
|
temp_filename = prepend_extension(filename, 'temp')
|
2015-08-04 01:07:44 -06:00
|
|
|
args = ['-c', 'copy']
|
2021-09-23 00:18:49 -06:00
|
|
|
audio_streams = 0
|
2015-08-04 01:07:44 -06:00
|
|
|
for (i, fmt) in enumerate(info['requested_formats']):
|
|
|
|
if fmt.get('acodec') != 'none':
|
2021-09-22 08:21:40 -06:00
|
|
|
args.extend(['-map', f'{i}:a:0'])
|
2021-10-11 03:57:00 -06:00
|
|
|
aac_fixup = fmt['protocol'].startswith('m3u8') and self.get_audio_codec(fmt['filepath']) == 'aac'
|
|
|
|
if aac_fixup:
|
2021-09-23 00:18:49 -06:00
|
|
|
args.extend([f'-bsf:a:{audio_streams}', 'aac_adtstoasc'])
|
|
|
|
audio_streams += 1
|
2015-08-04 01:07:44 -06:00
|
|
|
if fmt.get('vcodec') != 'none':
|
2024-06-11 17:09:58 -06:00
|
|
|
args.extend(['-map', f'{i}:v:0'])
|
|
|
|
self.to_screen(f'Merging formats into "{filename}"')
|
2015-04-19 08:56:22 -06:00
|
|
|
self.run_ffmpeg_multiple_files(info['__files_to_merge'], temp_filename, args)
|
2024-11-16 16:24:11 -07:00
|
|
|
os.rename(temp_filename, filename)
|
2015-04-18 03:52:36 -06:00
|
|
|
return info['__files_to_merge'], info
|
2014-01-06 21:59:22 -07:00
|
|
|
|
2015-05-10 14:00:31 -06:00
|
|
|
def can_merge(self):
|
|
|
|
# TODO: figure out merge-capable ffmpeg version
|
|
|
|
if self.basename != 'avconv':
|
|
|
|
return True
|
|
|
|
|
|
|
|
required_version = '10-0'
|
|
|
|
if is_outdated_version(
|
|
|
|
self._versions[self.basename], required_version):
|
2024-06-11 17:09:58 -06:00
|
|
|
warning = (f'Your copy of {self.basename} is outdated and unable to properly mux separate video and audio files, '
|
2021-02-24 11:45:56 -07:00
|
|
|
'yt-dlp will download single file media. '
|
2024-06-11 17:09:58 -06:00
|
|
|
f'Update {self.basename} to version {required_version} or newer to fix this.')
|
2021-01-10 06:44:54 -07:00
|
|
|
self.report_warning(warning)
|
2015-05-10 14:00:31 -06:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2014-02-22 05:55:51 -07:00
|
|
|
|
2021-06-19 16:19:23 -06:00
|
|
|
class FFmpegFixupPostProcessor(FFmpegPostProcessor):
|
|
|
|
def _fixup(self, msg, filename, options):
|
2015-01-09 21:45:51 -07:00
|
|
|
temp_filename = prepend_extension(filename, 'temp')
|
|
|
|
|
2021-06-19 16:45:19 -06:00
|
|
|
self.to_screen(f'{msg} of "{filename}"')
|
2015-01-09 21:45:51 -07:00
|
|
|
self.run_ffmpeg(filename, temp_filename, options)
|
|
|
|
|
2021-08-26 20:27:20 -06:00
|
|
|
os.replace(temp_filename, filename)
|
2015-01-09 21:45:51 -07:00
|
|
|
|
2021-06-19 16:19:23 -06:00
|
|
|
|
|
|
|
class FFmpegFixupStretchedPP(FFmpegFixupPostProcessor):
|
|
|
|
@PostProcessor._restrict_to(images=False, audio=False)
|
|
|
|
def run(self, info):
|
|
|
|
stretched_ratio = info.get('stretched_ratio')
|
|
|
|
if stretched_ratio not in (None, 1):
|
|
|
|
self._fixup('Fixing aspect ratio', info['filepath'], [
|
2024-06-11 17:09:58 -06:00
|
|
|
*self.stream_copy_opts(), '-aspect', f'{stretched_ratio:f}'])
|
2015-04-18 03:36:42 -06:00
|
|
|
return [], info
|
2015-01-23 10:39:12 -07:00
|
|
|
|
|
|
|
|
2021-06-19 16:19:23 -06:00
|
|
|
class FFmpegFixupM4aPP(FFmpegFixupPostProcessor):
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False, video=False)
|
2015-01-23 10:39:12 -07:00
|
|
|
def run(self, info):
|
2021-06-19 16:19:23 -06:00
|
|
|
if info.get('container') == 'm4a_dash':
|
2022-01-11 20:22:09 -07:00
|
|
|
self._fixup('Correcting container', info['filepath'], [*self.stream_copy_opts(), '-f', 'mp4'])
|
2015-04-18 03:36:42 -06:00
|
|
|
return [], info
|
2015-02-28 06:43:24 -07:00
|
|
|
|
|
|
|
|
2021-06-19 16:19:23 -06:00
|
|
|
class FFmpegFixupM3u8PP(FFmpegFixupPostProcessor):
|
2021-11-27 06:50:39 -07:00
|
|
|
def _needs_fixup(self, info):
|
|
|
|
yield info['ext'] in ('mp4', 'm4a')
|
|
|
|
yield info['protocol'].startswith('m3u8')
|
|
|
|
try:
|
|
|
|
metadata = self.get_metadata_object(info['filepath'])
|
|
|
|
except PostProcessingError as e:
|
|
|
|
self.report_warning(f'Unable to extract metadata: {e.msg}')
|
|
|
|
yield True
|
|
|
|
else:
|
|
|
|
yield traverse_obj(metadata, ('format', 'format_name'), casesense=False) == 'mpegts'
|
|
|
|
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
2016-03-01 13:08:50 -07:00
|
|
|
def run(self, info):
|
2021-11-27 06:50:39 -07:00
|
|
|
if all(self._needs_fixup(info)):
|
2023-04-13 13:21:09 -06:00
|
|
|
args = ['-f', 'mp4']
|
|
|
|
if self.get_audio_codec(info['filepath']) == 'aac':
|
|
|
|
args.extend(['-bsf:a', 'aac_adtstoasc'])
|
2021-11-27 06:50:39 -07:00
|
|
|
self._fixup('Fixing MPEG-TS in MP4 container', info['filepath'], [
|
2023-04-13 13:21:09 -06:00
|
|
|
*self.stream_copy_opts(), *args])
|
2016-03-01 13:08:50 -07:00
|
|
|
return [], info
|
|
|
|
|
|
|
|
|
2021-06-21 11:23:17 -06:00
|
|
|
class FFmpegFixupTimestampPP(FFmpegFixupPostProcessor):
|
|
|
|
|
|
|
|
def __init__(self, downloader=None, trim=0.001):
|
|
|
|
# "trim" should be used when the video contains unintended packets
|
2022-04-11 09:10:28 -06:00
|
|
|
super().__init__(downloader)
|
2021-06-21 11:23:17 -06:00
|
|
|
assert isinstance(trim, (int, float))
|
|
|
|
self.trim = str(trim)
|
|
|
|
|
|
|
|
@PostProcessor._restrict_to(images=False)
|
|
|
|
def run(self, info):
|
2021-11-03 14:54:12 -06:00
|
|
|
if not self._features.get('setts'):
|
2021-06-21 11:23:17 -06:00
|
|
|
self.report_warning(
|
|
|
|
'A re-encode is needed to fix timestamps in older versions of ffmpeg. '
|
2021-11-03 14:54:12 -06:00
|
|
|
'Please install ffmpeg 4.4 or later to fixup without re-encoding')
|
2021-06-21 11:23:17 -06:00
|
|
|
opts = ['-vf', 'setpts=PTS-STARTPTS']
|
|
|
|
else:
|
|
|
|
opts = ['-c', 'copy', '-bsf', 'setts=ts=TS-STARTPTS']
|
2024-06-11 17:09:58 -06:00
|
|
|
self._fixup('Fixing frame timestamp', info['filepath'], [*opts, *self.stream_copy_opts(False), '-ss', self.trim])
|
2021-06-21 11:23:17 -06:00
|
|
|
return [], info
|
|
|
|
|
|
|
|
|
2022-01-19 15:57:36 -07:00
|
|
|
class FFmpegCopyStreamPP(FFmpegFixupPostProcessor):
|
2021-12-19 23:06:46 -07:00
|
|
|
MESSAGE = 'Copying stream'
|
|
|
|
|
2021-06-21 11:23:17 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
|
|
|
def run(self, info):
|
2022-01-11 20:22:09 -07:00
|
|
|
self._fixup(self.MESSAGE, info['filepath'], self.stream_copy_opts())
|
2021-06-21 11:23:17 -06:00
|
|
|
return [], info
|
|
|
|
|
|
|
|
|
2022-01-19 15:57:36 -07:00
|
|
|
class FFmpegFixupDurationPP(FFmpegCopyStreamPP):
|
2021-12-19 23:06:46 -07:00
|
|
|
MESSAGE = 'Fixing video duration'
|
|
|
|
|
|
|
|
|
2022-01-19 15:57:36 -07:00
|
|
|
class FFmpegFixupDuplicateMoovPP(FFmpegCopyStreamPP):
|
2021-12-19 23:06:46 -07:00
|
|
|
MESSAGE = 'Fixing duplicate MOOV atoms'
|
|
|
|
|
|
|
|
|
2015-02-28 06:43:24 -07:00
|
|
|
class FFmpegSubtitlesConvertorPP(FFmpegPostProcessor):
|
2022-07-30 14:45:22 -06:00
|
|
|
SUPPORTED_EXTS = MEDIA_EXTENSIONS.subtitles
|
2021-05-22 02:24:12 -06:00
|
|
|
|
2015-02-28 06:43:24 -07:00
|
|
|
def __init__(self, downloader=None, format=None):
|
2022-04-11 09:10:28 -06:00
|
|
|
super().__init__(downloader)
|
2015-02-28 06:43:24 -07:00
|
|
|
self.format = format
|
|
|
|
|
|
|
|
def run(self, info):
|
|
|
|
subs = info.get('requested_subtitles')
|
|
|
|
new_ext = self.format
|
|
|
|
new_format = new_ext
|
|
|
|
if new_format == 'vtt':
|
|
|
|
new_format = 'webvtt'
|
|
|
|
if subs is None:
|
2021-01-07 12:28:41 -07:00
|
|
|
self.to_screen('There aren\'t any subtitles to convert')
|
2015-04-18 03:36:42 -06:00
|
|
|
return [], info
|
2021-01-07 12:28:41 -07:00
|
|
|
self.to_screen('Converting subtitles')
|
2016-01-31 06:22:36 -07:00
|
|
|
sub_filenames = []
|
2015-02-28 06:43:24 -07:00
|
|
|
for lang, sub in subs.items():
|
2021-10-02 11:06:31 -06:00
|
|
|
if not os.path.exists(sub.get('filepath', '')):
|
|
|
|
self.report_warning(f'Skipping embedding {lang} subtitle because the file is missing')
|
|
|
|
continue
|
2015-02-28 06:43:24 -07:00
|
|
|
ext = sub['ext']
|
|
|
|
if ext == new_ext:
|
2024-06-11 17:09:58 -06:00
|
|
|
self.to_screen(f'Subtitle file for {new_ext} is already in the requested format')
|
2015-02-28 06:43:24 -07:00
|
|
|
continue
|
2020-11-04 13:17:52 -07:00
|
|
|
elif ext == 'json':
|
2021-01-07 12:28:41 -07:00
|
|
|
self.to_screen(
|
|
|
|
'You have requested to convert json subtitles into another format, '
|
2020-11-04 13:17:52 -07:00
|
|
|
'which is currently not possible')
|
|
|
|
continue
|
2021-03-18 09:24:53 -06:00
|
|
|
old_file = sub['filepath']
|
2016-01-31 06:22:36 -07:00
|
|
|
sub_filenames.append(old_file)
|
2021-03-18 09:24:53 -06:00
|
|
|
new_file = replace_extension(old_file, new_ext)
|
2015-04-25 09:15:05 -06:00
|
|
|
|
2017-04-12 13:38:43 -06:00
|
|
|
if ext in ('dfxp', 'ttml', 'tt'):
|
2021-01-10 06:44:54 -07:00
|
|
|
self.report_warning(
|
2021-01-07 12:28:41 -07:00
|
|
|
'You have requested to convert dfxp (TTML) subtitles into another format, '
|
2015-04-25 09:15:05 -06:00
|
|
|
'which results in style information loss')
|
|
|
|
|
2016-01-31 06:22:36 -07:00
|
|
|
dfxp_file = old_file
|
2021-03-18 09:24:53 -06:00
|
|
|
srt_file = replace_extension(old_file, 'srt')
|
2015-04-25 09:15:05 -06:00
|
|
|
|
2017-09-15 22:18:38 -06:00
|
|
|
with open(dfxp_file, 'rb') as f:
|
2015-04-25 09:15:05 -06:00
|
|
|
srt_data = dfxp2srt(f.read())
|
|
|
|
|
2023-01-02 07:09:03 -07:00
|
|
|
with open(srt_file, 'w', encoding='utf-8') as f:
|
2015-04-25 09:15:05 -06:00
|
|
|
f.write(srt_data)
|
2016-02-06 10:51:05 -07:00
|
|
|
old_file = srt_file
|
2015-04-25 09:15:05 -06:00
|
|
|
|
|
|
|
subs[lang] = {
|
|
|
|
'ext': 'srt',
|
2021-03-18 09:24:53 -06:00
|
|
|
'data': srt_data,
|
|
|
|
'filepath': srt_file,
|
2015-04-25 09:15:05 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if new_ext == 'srt':
|
|
|
|
continue
|
2016-02-06 11:04:18 -07:00
|
|
|
else:
|
|
|
|
sub_filenames.append(srt_file)
|
2015-04-25 09:15:05 -06:00
|
|
|
|
2016-01-31 06:22:36 -07:00
|
|
|
self.run_ffmpeg(old_file, new_file, ['-f', new_format])
|
2015-02-28 06:43:24 -07:00
|
|
|
|
2022-04-11 09:10:28 -06:00
|
|
|
with open(new_file, encoding='utf-8') as f:
|
2015-02-28 06:43:24 -07:00
|
|
|
subs[lang] = {
|
2016-02-06 10:58:18 -07:00
|
|
|
'ext': new_ext,
|
2015-02-28 06:43:24 -07:00
|
|
|
'data': f.read(),
|
2021-03-18 09:24:53 -06:00
|
|
|
'filepath': new_file,
|
2015-02-28 06:43:24 -07:00
|
|
|
}
|
|
|
|
|
2021-03-18 09:24:53 -06:00
|
|
|
info['__files_to_move'][new_file] = replace_extension(
|
2021-08-13 09:10:13 -06:00
|
|
|
info['__files_to_move'][sub['filepath']], new_ext)
|
2021-03-18 09:24:53 -06:00
|
|
|
|
2016-01-31 06:22:36 -07:00
|
|
|
return sub_filenames, info
|
2021-03-14 17:02:13 -06:00
|
|
|
|
|
|
|
|
|
|
|
class FFmpegSplitChaptersPP(FFmpegPostProcessor):
|
2021-09-01 14:55:16 -06:00
|
|
|
def __init__(self, downloader, force_keyframes=False):
|
|
|
|
FFmpegPostProcessor.__init__(self, downloader)
|
|
|
|
self._force_keyframes = force_keyframes
|
2021-03-14 17:02:13 -06:00
|
|
|
|
|
|
|
def _prepare_filename(self, number, chapter, info):
|
|
|
|
info = info.copy()
|
|
|
|
info.update({
|
|
|
|
'section_number': number,
|
|
|
|
'section_title': chapter.get('title'),
|
|
|
|
'section_start': chapter.get('start_time'),
|
|
|
|
'section_end': chapter.get('end_time'),
|
|
|
|
})
|
|
|
|
return self._downloader.prepare_filename(info, 'chapter')
|
|
|
|
|
|
|
|
def _ffmpeg_args_for_chapter(self, number, chapter, info):
|
|
|
|
destination = self._prepare_filename(number, chapter, info)
|
2024-11-16 16:24:11 -07:00
|
|
|
if not self._downloader._ensure_dir_exists(destination):
|
2021-03-14 17:02:13 -06:00
|
|
|
return
|
|
|
|
|
2021-03-18 09:24:53 -06:00
|
|
|
chapter['filepath'] = destination
|
2021-03-14 17:02:13 -06:00
|
|
|
self.to_screen('Chapter %03d; Destination: %s' % (number, destination))
|
|
|
|
return (
|
|
|
|
destination,
|
2022-05-17 08:09:28 -06:00
|
|
|
['-ss', str(chapter['start_time']),
|
|
|
|
'-t', str(chapter['end_time'] - chapter['start_time'])])
|
2021-03-14 17:02:13 -06:00
|
|
|
|
2021-06-12 14:02:19 -06:00
|
|
|
@PostProcessor._restrict_to(images=False)
|
2021-03-14 17:02:13 -06:00
|
|
|
def run(self, info):
|
2023-03-08 06:10:19 -07:00
|
|
|
self._fixup_chapters(info)
|
2021-03-14 17:02:13 -06:00
|
|
|
chapters = info.get('chapters') or []
|
|
|
|
if not chapters:
|
2021-09-01 14:55:16 -06:00
|
|
|
self.to_screen('Chapter information is unavailable')
|
2021-03-14 17:02:13 -06:00
|
|
|
return [], info
|
|
|
|
|
2021-09-01 14:55:16 -06:00
|
|
|
in_file = info['filepath']
|
|
|
|
if self._force_keyframes and len(chapters) > 1:
|
|
|
|
in_file = self.force_keyframes(in_file, (c['start_time'] for c in chapters))
|
2024-06-11 17:09:58 -06:00
|
|
|
self.to_screen(f'Splitting video by chapters; {len(chapters)} chapters found')
|
2021-03-14 17:02:13 -06:00
|
|
|
for idx, chapter in enumerate(chapters):
|
|
|
|
destination, opts = self._ffmpeg_args_for_chapter(idx + 1, chapter, info)
|
2022-01-11 20:22:09 -07:00
|
|
|
self.real_run_ffmpeg([(in_file, opts)], [(destination, self.stream_copy_opts())])
|
2021-09-01 14:55:16 -06:00
|
|
|
if in_file != info['filepath']:
|
2022-04-30 17:28:26 -06:00
|
|
|
self._delete_downloaded_files(in_file, msg=None)
|
2021-03-14 17:02:13 -06:00
|
|
|
return [], info
|
2021-04-10 16:18:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
class FFmpegThumbnailsConvertorPP(FFmpegPostProcessor):
|
2022-07-30 14:45:22 -06:00
|
|
|
SUPPORTED_EXTS = MEDIA_EXTENSIONS.thumbnails
|
2022-06-05 08:28:56 -06:00
|
|
|
FORMAT_RE = create_mapping_re(SUPPORTED_EXTS)
|
2021-05-22 02:24:12 -06:00
|
|
|
|
2021-04-10 16:18:52 -06:00
|
|
|
def __init__(self, downloader=None, format=None):
|
2022-04-11 09:10:28 -06:00
|
|
|
super().__init__(downloader)
|
2022-06-05 08:28:56 -06:00
|
|
|
self.mapping = format
|
2021-04-10 16:18:52 -06:00
|
|
|
|
2022-05-17 08:09:28 -06:00
|
|
|
@classmethod
|
|
|
|
def is_webp(cls, path):
|
2022-08-30 09:28:28 -06:00
|
|
|
deprecation_warning(f'{cls.__module__}.{cls.__name__}.is_webp is deprecated')
|
2022-05-17 08:09:28 -06:00
|
|
|
return imghdr.what(path) == 'webp'
|
2021-04-10 16:18:52 -06:00
|
|
|
|
|
|
|
def fixup_webp(self, info, idx=-1):
|
|
|
|
thumbnail_filename = info['thumbnails'][idx]['filepath']
|
|
|
|
_, thumbnail_ext = os.path.splitext(thumbnail_filename)
|
|
|
|
if thumbnail_ext:
|
2022-05-17 08:09:28 -06:00
|
|
|
if thumbnail_ext.lower() != '.webp' and imghdr.what(thumbnail_filename) == 'webp':
|
2024-06-11 17:09:58 -06:00
|
|
|
self.to_screen(f'Correcting thumbnail "{thumbnail_filename}" extension to webp')
|
2021-04-10 16:18:52 -06:00
|
|
|
webp_filename = replace_extension(thumbnail_filename, 'webp')
|
2021-08-26 20:27:20 -06:00
|
|
|
os.replace(thumbnail_filename, webp_filename)
|
2021-04-10 16:18:52 -06:00
|
|
|
info['thumbnails'][idx]['filepath'] = webp_filename
|
|
|
|
info['__files_to_move'][webp_filename] = replace_extension(
|
|
|
|
info['__files_to_move'].pop(thumbnail_filename), 'webp')
|
|
|
|
|
2021-05-22 02:24:12 -06:00
|
|
|
@staticmethod
|
|
|
|
def _options(target_ext):
|
2022-11-11 01:38:12 -07:00
|
|
|
yield from ('-update', '1')
|
2021-05-22 02:24:12 -06:00
|
|
|
if target_ext == 'jpg':
|
2022-11-11 01:38:12 -07:00
|
|
|
yield from ('-bsf:v', 'mjpeg2jpeg')
|
2021-05-22 02:24:12 -06:00
|
|
|
|
|
|
|
def convert_thumbnail(self, thumbnail_filename, target_ext):
|
|
|
|
thumbnail_conv_filename = replace_extension(thumbnail_filename, target_ext)
|
2021-05-28 14:09:07 -06:00
|
|
|
|
2022-04-11 09:10:28 -06:00
|
|
|
self.to_screen(f'Converting thumbnail "{thumbnail_filename}" to {target_ext}')
|
2022-07-31 19:44:32 -06:00
|
|
|
_, source_ext = os.path.splitext(thumbnail_filename)
|
2021-05-28 14:09:07 -06:00
|
|
|
self.real_run_ffmpeg(
|
2022-07-31 19:44:32 -06:00
|
|
|
[(thumbnail_filename, [] if source_ext == '.gif' else ['-f', 'image2', '-pattern_type', 'none'])],
|
2022-11-11 01:38:12 -07:00
|
|
|
[(thumbnail_conv_filename, self._options(target_ext))])
|
2021-05-21 12:09:48 -06:00
|
|
|
return thumbnail_conv_filename
|
2021-04-10 16:18:52 -06:00
|
|
|
|
|
|
|
def run(self, info):
|
|
|
|
files_to_delete = []
|
|
|
|
has_thumbnail = False
|
|
|
|
|
2021-12-26 14:48:11 -07:00
|
|
|
for idx, thumbnail_dict in enumerate(info.get('thumbnails') or []):
|
|
|
|
original_thumbnail = thumbnail_dict.get('filepath')
|
|
|
|
if not original_thumbnail:
|
2021-04-10 16:18:52 -06:00
|
|
|
continue
|
|
|
|
has_thumbnail = True
|
|
|
|
self.fixup_webp(info, idx)
|
2022-08-14 09:04:55 -06:00
|
|
|
original_thumbnail = thumbnail_dict['filepath'] # Path can change during fixup
|
2022-06-05 08:28:56 -06:00
|
|
|
thumbnail_ext = os.path.splitext(original_thumbnail)[1][1:].lower()
|
2021-06-24 18:06:35 -06:00
|
|
|
if thumbnail_ext == 'jpeg':
|
|
|
|
thumbnail_ext = 'jpg'
|
2022-06-05 08:28:56 -06:00
|
|
|
target_ext, _skip_msg = resolve_mapping(thumbnail_ext, self.mapping)
|
|
|
|
if _skip_msg:
|
|
|
|
self.to_screen(f'Not converting thumbnail "{original_thumbnail}"; {_skip_msg}')
|
2021-04-10 16:18:52 -06:00
|
|
|
continue
|
2022-06-05 08:28:56 -06:00
|
|
|
thumbnail_dict['filepath'] = self.convert_thumbnail(original_thumbnail, target_ext)
|
2021-04-10 16:18:52 -06:00
|
|
|
files_to_delete.append(original_thumbnail)
|
|
|
|
info['__files_to_move'][thumbnail_dict['filepath']] = replace_extension(
|
2022-06-05 08:28:56 -06:00
|
|
|
info['__files_to_move'][original_thumbnail], target_ext)
|
2021-04-10 16:18:52 -06:00
|
|
|
|
|
|
|
if not has_thumbnail:
|
|
|
|
self.to_screen('There aren\'t any thumbnails to convert')
|
|
|
|
return files_to_delete, info
|
2022-01-13 04:01:08 -07:00
|
|
|
|
|
|
|
|
|
|
|
class FFmpegConcatPP(FFmpegPostProcessor):
|
|
|
|
def __init__(self, downloader, only_multi_video=False):
|
|
|
|
self._only_multi_video = only_multi_video
|
|
|
|
super().__init__(downloader)
|
|
|
|
|
2022-03-26 20:20:43 -06:00
|
|
|
def _get_codecs(self, file):
|
|
|
|
codecs = traverse_obj(self.get_metadata_object(file), ('streams', ..., 'codec_name'))
|
|
|
|
self.write_debug(f'Codecs = {", ".join(codecs)}')
|
|
|
|
return tuple(codecs)
|
|
|
|
|
2022-01-13 04:01:08 -07:00
|
|
|
def concat_files(self, in_files, out_file):
|
2022-03-26 17:22:11 -06:00
|
|
|
if not self._downloader._ensure_dir_exists(out_file):
|
|
|
|
return
|
2022-01-13 04:01:08 -07:00
|
|
|
if len(in_files) == 1:
|
2022-01-19 15:57:36 -07:00
|
|
|
if os.path.realpath(in_files[0]) != os.path.realpath(out_file):
|
|
|
|
self.to_screen(f'Moving "{in_files[0]}" to "{out_file}"')
|
2022-01-13 04:01:08 -07:00
|
|
|
os.replace(in_files[0], out_file)
|
2022-01-19 15:57:36 -07:00
|
|
|
return []
|
2022-01-13 04:01:08 -07:00
|
|
|
|
2022-03-26 20:20:43 -06:00
|
|
|
if len(set(map(self._get_codecs, in_files))) > 1:
|
2022-01-13 04:01:08 -07:00
|
|
|
raise PostProcessingError(
|
|
|
|
'The files have different streams/codecs and cannot be concatenated. '
|
|
|
|
'Either select different formats or --recode-video them to a common format')
|
2022-01-19 15:57:36 -07:00
|
|
|
|
|
|
|
self.to_screen(f'Concatenating {len(in_files)} files; Destination: {out_file}')
|
2022-01-13 04:01:08 -07:00
|
|
|
super().concat_files(in_files, out_file)
|
2022-01-19 15:57:36 -07:00
|
|
|
return in_files
|
2022-01-13 04:01:08 -07:00
|
|
|
|
2022-02-18 10:46:16 -07:00
|
|
|
@PostProcessor._restrict_to(images=False, simulated=False)
|
2022-01-13 04:01:08 -07:00
|
|
|
def run(self, info):
|
2022-02-03 07:56:27 -07:00
|
|
|
entries = info.get('entries') or []
|
2022-02-18 10:46:16 -07:00
|
|
|
if not any(entries) or (self._only_multi_video and info['_type'] != 'multi_video'):
|
2022-01-13 04:01:08 -07:00
|
|
|
return [], info
|
2022-04-18 15:27:20 -06:00
|
|
|
elif traverse_obj(entries, (..., lambda k, v: k == 'requested_downloads' and len(v) > 1)):
|
2022-01-13 04:01:08 -07:00
|
|
|
raise PostProcessingError('Concatenation is not supported when downloading multiple separate formats')
|
|
|
|
|
2022-02-18 10:46:16 -07:00
|
|
|
in_files = traverse_obj(entries, (..., 'requested_downloads', 0, 'filepath')) or []
|
2022-02-03 07:56:27 -07:00
|
|
|
if len(in_files) < len(entries):
|
|
|
|
raise PostProcessingError('Aborting concatenation because some downloads failed')
|
2022-01-13 04:01:08 -07:00
|
|
|
|
2022-02-03 07:56:27 -07:00
|
|
|
exts = traverse_obj(entries, (..., 'requested_downloads', 0, 'ext'), (..., 'ext'))
|
2022-07-25 21:58:37 -06:00
|
|
|
ie_copy = collections.ChainMap({'ext': exts[0] if len(set(exts)) == 1 else 'mkv'},
|
|
|
|
info, self._downloader._playlist_infodict(info))
|
2022-01-13 04:01:08 -07:00
|
|
|
out_file = self._downloader.prepare_filename(ie_copy, 'pl_video')
|
|
|
|
|
2022-01-19 15:57:36 -07:00
|
|
|
files_to_delete = self.concat_files(in_files, out_file)
|
2022-01-13 04:01:08 -07:00
|
|
|
|
|
|
|
info['requested_downloads'] = [{
|
|
|
|
'filepath': out_file,
|
|
|
|
'ext': ie_copy['ext'],
|
|
|
|
}]
|
2022-01-19 15:57:36 -07:00
|
|
|
return files_to_delete, info
|