1
mirror of https://github.com/yt-dlp/yt-dlp synced 2024-11-22 01:20:13 +01:00

[devscripts/cli_to_api] Add script

This commit is contained in:
pukkandan 2023-05-24 23:29:30 +05:30
parent 69a40e4a7f
commit 46f1370e9a
No known key found for this signature in database
GPG Key ID: 7EEE9E1E817D0A39
5 changed files with 62 additions and 11 deletions

48
devscripts/cli_to_api.py Normal file
View File

@ -0,0 +1,48 @@
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import yt_dlp
import yt_dlp.options
create_parser = yt_dlp.options.create_parser
def parse_patched_options(opts):
patched_parser = create_parser()
patched_parser.defaults.update({
'ignoreerrors': False,
'retries': 0,
'fragment_retries': 0,
'extract_flat': False,
'concat_playlist': 'never',
})
yt_dlp.options.__dict__['create_parser'] = lambda: patched_parser
try:
return yt_dlp.parse_options(opts)
finally:
yt_dlp.options.__dict__['create_parser'] = create_parser
default_opts = parse_patched_options([]).ydl_opts
def cli_to_api(opts, cli_defaults=False):
opts = (yt_dlp.parse_options if cli_defaults else parse_patched_options)(opts).ydl_opts
diff = {k: v for k, v in opts.items() if default_opts[k] != v}
if 'postprocessors' in diff:
diff['postprocessors'] = [pp for pp in diff['postprocessors']
if pp not in default_opts['postprocessors']]
return diff
if __name__ == '__main__':
from pprint import pprint
print('\nThe arguments passed translate to:\n')
pprint(cli_to_api(sys.argv[1:]))
print('\nCombining these with the CLI defaults gives:\n')
pprint(cli_to_api(sys.argv[1:], True))

View File

@ -280,7 +280,7 @@ class YoutubeDL:
subtitles. The language can be prefixed with a "-" to
exclude it from the requested languages, e.g. ['all', '-live_chat']
keepvideo: Keep the video file after post-processing
daterange: A DateRange object, download only if the upload_date is in the range.
daterange: A utils.DateRange object, download only if the upload_date is in the range.
skip_download: Skip the actual download of the video file
cachedir: Location of the cache files in the filesystem.
False to disable filesystem cache.
@ -329,13 +329,13 @@ class YoutubeDL:
'auto' for elaborate guessing
encoding: Use this encoding instead of the system-specified.
extract_flat: Whether to resolve and process url_results further
* False: Always process (default)
* False: Always process. Default for API
* True: Never process
* 'in_playlist': Do not process inside playlist/multi_video
* 'discard': Always process, but don't return the result
from inside playlist/multi_video
* 'discard_in_playlist': Same as "discard", but only for
playlists (not multi_video)
playlists (not multi_video). Default for CLI
wait_for_video: If given, wait for scheduled streams to become available.
The value should be a tuple containing the range
(min_secs, max_secs) to wait between retries
@ -472,7 +472,7 @@ class YoutubeDL:
can also be used
The following options are used by the extractors:
extractor_retries: Number of times to retry for known errors
extractor_retries: Number of times to retry for known errors (default: 3)
dynamic_mpd: Whether to process dynamic DASH manifests (default: True)
hls_split_discontinuity: Split HLS playlists to different formats at
discontinuities such as ad breaks (default: False)

View File

@ -51,8 +51,9 @@ class FileDownloader:
ratelimit: Download speed limit, in bytes/sec.
continuedl: Attempt to continue downloads if possible
throttledratelimit: Assume the download is being throttled below this speed (bytes/sec)
retries: Number of times to retry for HTTP error 5xx
file_access_retries: Number of times to retry on file access error
retries: Number of times to retry for expected network errors.
Default is 0 for API, but 10 for CLI
file_access_retries: Number of times to retry on file access error (default: 3)
buffersize: Size of download buffer in bytes.
noresizebuffer: Do not automatically resize the download buffer.
continuedl: Try to continue downloads if possible.
@ -225,7 +226,7 @@ class FileDownloader:
sleep_func=fd.params.get('retry_sleep_functions', {}).get('file_access'))
def wrapper(self, func, *args, **kwargs):
for retry in RetryManager(self.params.get('file_access_retries'), error_callback, fd=self):
for retry in RetryManager(self.params.get('file_access_retries', 3), error_callback, fd=self):
try:
return func(self, *args, **kwargs)
except OSError as err:

View File

@ -34,8 +34,8 @@ class FragmentFD(FileDownloader):
Available options:
fragment_retries: Number of times to retry a fragment for HTTP error (DASH
and hlsnative only)
fragment_retries: Number of times to retry a fragment for HTTP error
(DASH and hlsnative only). Default is 0 for API, but 10 for CLI
skip_unavailable_fragments:
Skip unavailable fragments (DASH and hlsnative only)
keep_fragments: Keep downloaded fragments on disk after downloading is

View File

@ -60,6 +60,8 @@ from ..compat import (
from ..dependencies import brotli, certifi, websockets, xattr
from ..socks import ProxyType, sockssocket
__name__ = __name__.rsplit('.', 1)[0] # Pretend to be the parent module
# This is not clearly defined otherwise
compiled_regex_type = type(re.compile(''))
@ -1957,8 +1959,8 @@ class DateRange:
date = date_from_str(date)
return self.start <= date <= self.end
def __str__(self):
return f'{self.start.isoformat()} - {self.end.isoformat()}'
def __repr__(self):
return f'{__name__}.{type(self).__name__}({self.start.isoformat()!r}, {self.end.isoformat()!r})'
def __eq__(self, other):
return (isinstance(other, DateRange)