2017-08-02 20:24:42 +02:00
|
|
|
import ast
|
2022-08-25 11:26:25 +02:00
|
|
|
import inspect
|
2018-05-30 03:15:11 +02:00
|
|
|
import logging
|
2013-08-16 00:46:47 +02:00
|
|
|
import operator
|
2013-08-08 15:01:32 +02:00
|
|
|
import re
|
2018-06-03 20:17:11 +02:00
|
|
|
import time
|
2023-01-06 22:54:16 +01:00
|
|
|
import warnings
|
2020-10-26 14:46:17 +01:00
|
|
|
from functools import partial
|
2022-08-20 14:23:22 +02:00
|
|
|
from http.cookiejar import Cookie
|
2022-09-04 16:15:21 +02:00
|
|
|
from typing import (
|
2023-02-09 15:44:12 +01:00
|
|
|
TYPE_CHECKING,
|
2022-09-04 16:15:21 +02:00
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
ClassVar,
|
|
|
|
Dict,
|
|
|
|
List,
|
|
|
|
Match,
|
|
|
|
NamedTuple,
|
|
|
|
Optional,
|
|
|
|
Pattern,
|
|
|
|
Sequence,
|
2023-01-20 01:35:30 +01:00
|
|
|
Tuple,
|
2022-09-04 16:15:21 +02:00
|
|
|
Type,
|
2023-01-20 01:35:30 +01:00
|
|
|
TypeVar,
|
2022-09-04 16:15:21 +02:00
|
|
|
Union,
|
|
|
|
)
|
2020-10-26 14:46:17 +01:00
|
|
|
|
|
|
|
import requests.cookies
|
2013-08-08 15:01:32 +02:00
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
from streamlink.cache import Cache
|
2020-10-26 14:46:17 +01:00
|
|
|
from streamlink.exceptions import FatalPluginError, NoStreamsError, PluginError
|
2022-08-19 00:34:08 +02:00
|
|
|
from streamlink.options import Argument, Arguments, Options
|
2022-08-18 16:32:28 +02:00
|
|
|
from streamlink.user_input import UserInputRequester
|
|
|
|
|
2023-02-09 15:44:12 +01:00
|
|
|
|
2022-09-04 16:15:21 +02:00
|
|
|
if TYPE_CHECKING: # pragma: no cover
|
|
|
|
from streamlink.session import Streamlink
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2018-06-22 01:15:21 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
# FIXME: This is a crude attempt at making a bitrate's
|
|
|
|
# weight end up similar to the weight of a resolution.
|
|
|
|
# Someone who knows math, please fix.
|
|
|
|
BIT_RATE_WEIGHT_RATIO = 2.8
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
ALT_WEIGHT_MOD = 0.01
|
|
|
|
|
2013-08-01 14:04:20 +02:00
|
|
|
QUALITY_WEIGTHS_EXTRA = {
|
2013-08-16 00:46:47 +02:00
|
|
|
"other": {
|
2013-08-27 19:36:21 +02:00
|
|
|
"live": 1080,
|
2013-08-16 00:46:47 +02:00
|
|
|
},
|
|
|
|
"tv": {
|
|
|
|
"hd": 1080,
|
|
|
|
"sd": 576,
|
|
|
|
},
|
|
|
|
"quality": {
|
|
|
|
"ehq": 720,
|
2017-01-16 18:57:12 +01:00
|
|
|
"hq": 576,
|
|
|
|
"sq": 360,
|
2013-08-16 00:46:47 +02:00
|
|
|
},
|
2013-02-08 02:00:44 +01:00
|
|
|
}
|
|
|
|
|
2013-08-16 00:46:47 +02:00
|
|
|
FILTER_OPERATORS = {
|
|
|
|
"<": operator.lt,
|
|
|
|
"<=": operator.le,
|
|
|
|
">": operator.gt,
|
|
|
|
">=": operator.ge,
|
|
|
|
}
|
|
|
|
|
2017-08-02 20:24:42 +02:00
|
|
|
PARAMS_REGEX = r"(\w+)=({.+?}|\[.+?\]|\(.+?\)|'(?:[^'\\]|\\')*'|\"(?:[^\"\\]|\\\")*\"|\S+)"
|
|
|
|
|
2017-08-03 14:45:00 +02:00
|
|
|
HIGH_PRIORITY = 30
|
|
|
|
NORMAL_PRIORITY = 20
|
|
|
|
LOW_PRIORITY = 10
|
|
|
|
NO_PRIORITY = 0
|
|
|
|
|
2022-08-20 14:23:22 +02:00
|
|
|
_COOKIE_KEYS = \
|
|
|
|
"version", "name", "value", "port", "domain", "path", "secure", "expires", "discard", "comment", "comment_url", "rfc2109"
|
|
|
|
|
2017-08-02 20:24:42 +02:00
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
def stream_weight(stream):
|
2013-08-16 00:46:47 +02:00
|
|
|
for group, weights in QUALITY_WEIGTHS_EXTRA.items():
|
2013-08-29 01:49:49 +02:00
|
|
|
if stream in weights:
|
|
|
|
return weights[stream], group
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2018-06-28 12:55:31 +02:00
|
|
|
match = re.match(r"^(\d+)(k|p)?(\d+)?(\+)?(?:[a_](\d+)k)?(?:_(alt)(\d)?)?$", stream)
|
2013-02-08 02:00:44 +01:00
|
|
|
|
|
|
|
if match:
|
2017-06-30 00:25:51 +02:00
|
|
|
weight = 0
|
|
|
|
|
|
|
|
if match.group(6):
|
|
|
|
if match.group(7):
|
|
|
|
weight -= ALT_WEIGHT_MOD * int(match.group(7))
|
|
|
|
else:
|
|
|
|
weight -= ALT_WEIGHT_MOD
|
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
name_type = match.group(2)
|
|
|
|
if name_type == "k": # bit rate
|
2013-02-08 02:00:44 +01:00
|
|
|
bitrate = int(match.group(1))
|
2017-06-30 00:25:51 +02:00
|
|
|
weight += bitrate / BIT_RATE_WEIGHT_RATIO
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
return weight, "bitrate"
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
elif name_type == "p": # resolution
|
2017-06-30 00:25:51 +02:00
|
|
|
weight += int(match.group(1))
|
2013-03-22 02:13:06 +01:00
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
if match.group(3): # fps eg. 60p or 50p
|
2016-08-09 02:28:55 +02:00
|
|
|
weight += int(match.group(3))
|
|
|
|
|
|
|
|
if match.group(4) == "+":
|
2013-03-22 02:13:06 +01:00
|
|
|
weight += 1
|
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
if match.group(5): # bit rate classifier for resolution
|
|
|
|
weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO
|
|
|
|
|
2013-08-16 00:46:47 +02:00
|
|
|
return weight, "pixels"
|
|
|
|
|
|
|
|
return 0, "none"
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-08-16 00:46:47 +02:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
def iterate_streams(streams):
|
2014-08-29 00:13:18 +02:00
|
|
|
for name, stream in streams:
|
2013-08-01 22:19:16 +02:00
|
|
|
if isinstance(stream, list):
|
|
|
|
for sub_stream in stream:
|
2023-03-24 14:22:33 +01:00
|
|
|
yield name, sub_stream
|
2013-08-01 22:19:16 +02:00
|
|
|
else:
|
2023-03-24 14:22:33 +01:00
|
|
|
yield name, stream
|
2013-08-01 22:19:16 +02:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
def stream_type_priority(stream_types, stream):
|
2013-08-08 15:01:32 +02:00
|
|
|
stream_type = type(stream[1]).shortname()
|
2013-08-01 22:19:16 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
prio = stream_types.index(stream_type)
|
|
|
|
except ValueError:
|
cli: add wildcard to --stream-types option
When changing the stream priority using `--stream-types` any stream
types that are not explicitly included are ignored. For example if a
user wants HTTP streams to be preferred and then HLS streams, with the
other stream types in any order they might expect that `--stream-types
"http,hls"` would be sufficient. However, any stream type not included
in this list will be omitted. The default behavior for the
`--stream-types` option means that other types are included.
This commit introduces a wildcard character (`*`), where any stream
type not listed can be matched. For example, to order the streams with
`HTTP`, then `HLS`, then others, the user could specify the
`--stream-types` option as `"http,hls,*"`.
The documentation has been clarified to highlight the fact that other
stream types not listed will be omitted and that a wildcard character
can be used to match the other stream types. The addition of the
wildcard does not change the existing behaviour of `--stream-types`.
2017-04-19 12:24:40 +02:00
|
|
|
try:
|
|
|
|
prio = stream_types.index("*")
|
|
|
|
except ValueError:
|
|
|
|
prio = 99
|
2013-08-01 22:19:16 +02:00
|
|
|
|
|
|
|
return prio
|
|
|
|
|
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
def stream_sorting_filter(expr, stream_weight):
|
2017-03-07 14:36:30 +01:00
|
|
|
match = re.match(r"(?P<op><=|>=|<|>)?(?P<value>[\w+]+)", expr)
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
if not match:
|
|
|
|
raise PluginError("Invalid filter expression: {0}".format(expr))
|
|
|
|
|
|
|
|
op, value = match.group("op", "value")
|
|
|
|
op = FILTER_OPERATORS.get(op, operator.eq)
|
2013-08-29 01:49:49 +02:00
|
|
|
filter_weight, filter_group = stream_weight(value)
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
def func(quality):
|
2013-08-29 01:49:49 +02:00
|
|
|
weight, group = stream_weight(quality)
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
if group == filter_group:
|
|
|
|
return not op(weight, filter_weight)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
return func
|
2013-08-08 15:01:32 +02:00
|
|
|
|
|
|
|
|
2021-06-22 22:37:04 +02:00
|
|
|
def parse_params(params: Optional[str] = None) -> Dict[str, Any]:
|
2022-05-21 00:38:18 +02:00
|
|
|
rval: Dict[str, Any] = {}
|
2021-06-22 22:37:04 +02:00
|
|
|
if not params:
|
|
|
|
return rval
|
|
|
|
|
2017-08-02 20:24:42 +02:00
|
|
|
matches = re.findall(PARAMS_REGEX, params)
|
|
|
|
|
|
|
|
for key, value in matches:
|
|
|
|
try:
|
|
|
|
value = ast.literal_eval(value)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
rval[key] = value
|
|
|
|
|
|
|
|
return rval
|
|
|
|
|
|
|
|
|
2021-06-22 22:25:35 +02:00
|
|
|
class Matcher(NamedTuple):
|
|
|
|
pattern: Pattern
|
|
|
|
priority: int
|
2023-01-20 01:35:30 +01:00
|
|
|
name: Optional[str] = None
|
|
|
|
|
|
|
|
|
|
|
|
MType = TypeVar("MType")
|
|
|
|
|
|
|
|
|
|
|
|
class _MCollection(List[MType]):
|
|
|
|
def __init__(self, **kwargs) -> None:
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
self._names: Dict[str, MType] = {}
|
|
|
|
|
|
|
|
def __getitem__(self, item):
|
|
|
|
return self._names[item] if type(item) is str else super().__getitem__(item)
|
|
|
|
|
|
|
|
|
|
|
|
class Matchers(_MCollection[Matcher]):
|
|
|
|
def register(self, matcher: Matcher) -> None:
|
|
|
|
super().insert(0, matcher)
|
|
|
|
if matcher.name:
|
|
|
|
if matcher.name in self._names:
|
|
|
|
raise ValueError(f"A matcher named '{matcher.name}' has already been registered")
|
|
|
|
self._names[matcher.name] = matcher
|
|
|
|
|
|
|
|
|
|
|
|
class Matches(_MCollection[Optional[Match]]):
|
|
|
|
def update(self, matchers: Matchers, value: str) -> Tuple[Optional[Pattern], Optional[Match]]:
|
|
|
|
matches = [(matcher, matcher.pattern.match(value)) for matcher in matchers]
|
|
|
|
|
|
|
|
self.clear()
|
|
|
|
self.extend(match for matcher, match in matches)
|
|
|
|
self._names.clear()
|
|
|
|
self._names.update((matcher.name, match) for matcher, match in matches if matcher.name)
|
|
|
|
|
|
|
|
return next(((matcher.pattern, match) for matcher, match in matches if match is not None), (None, None))
|
2021-06-22 22:25:35 +02:00
|
|
|
|
|
|
|
|
2022-08-25 11:26:25 +02:00
|
|
|
# Add front- and back-wrappers to the deprecated plugin's method resolution order (see Plugin.__new__)
|
|
|
|
class PluginWrapperMeta(type):
|
|
|
|
def mro(cls):
|
|
|
|
# cls.__base__ is the PluginWrapperFront which is based on the deprecated plugin class
|
|
|
|
mro = list(cls.__base__.__mro__)
|
|
|
|
# cls is the PluginWrapperBack and needs to be inserted after the deprecated plugin class
|
|
|
|
mro.insert(2, cls)
|
|
|
|
return mro
|
|
|
|
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
class Plugin:
|
2022-04-03 20:58:13 +02:00
|
|
|
"""
|
|
|
|
Plugin base class for retrieving streams and metadata from the URL specified.
|
2013-02-08 02:00:44 +01:00
|
|
|
"""
|
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
matchers: ClassVar[Optional[Matchers]] = None
|
2022-04-03 20:58:13 +02:00
|
|
|
"""
|
2023-01-20 01:35:30 +01:00
|
|
|
The list of plugin matchers (URL pattern + priority + optional name).
|
|
|
|
This list supports matcher lookups both by matcher index, as well as matcher name, if defined.
|
2022-08-26 19:36:20 +02:00
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
Use the :func:`pluginmatcher` decorator to initialize plugin matchers.
|
2022-08-26 19:36:20 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
arguments: ClassVar[Optional[Arguments]] = None
|
|
|
|
"""
|
|
|
|
The plugin's :class:`Arguments <streamlink.options.Arguments>` collection.
|
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
Use the :func:`pluginargument` decorator to initialize plugin arguments.
|
2022-04-03 20:58:13 +02:00
|
|
|
"""
|
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
matches: Matches
|
|
|
|
"""
|
|
|
|
A list of optional :class:`re.Match` results of all defined matchers.
|
|
|
|
This list supports match lookups both by the respective matcher index, as well as matcher name, if defined.
|
|
|
|
"""
|
2022-04-03 20:58:13 +02:00
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
matcher: Optional[Pattern] = None
|
2022-04-03 20:58:13 +02:00
|
|
|
"""A reference to the compiled :class:`re.Pattern` of the first matching matcher"""
|
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
match: Optional[Match] = None
|
2022-04-03 20:58:13 +02:00
|
|
|
"""A reference to the :class:`re.Match` result of the first matching matcher"""
|
2021-06-22 22:25:35 +02:00
|
|
|
|
2021-08-30 06:20:29 +02:00
|
|
|
# plugin metadata attributes
|
2021-11-23 12:25:32 +01:00
|
|
|
id: Optional[str] = None
|
2022-04-03 20:58:13 +02:00
|
|
|
"""Metadata 'id' attribute: unique stream ID, etc."""
|
|
|
|
title: Optional[str] = None
|
|
|
|
"""Metadata 'title' attribute: the stream's short descriptive title"""
|
2021-08-30 06:20:29 +02:00
|
|
|
author: Optional[str] = None
|
2022-04-03 20:58:13 +02:00
|
|
|
"""Metadata 'author' attribute: the channel or broadcaster name, etc."""
|
2021-08-30 06:20:29 +02:00
|
|
|
category: Optional[str] = None
|
2022-04-03 20:58:13 +02:00
|
|
|
"""Metadata 'category' attribute: name of a game being played, a music genre, etc."""
|
2021-08-30 06:20:29 +02:00
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
options = Options()
|
2022-09-10 22:41:10 +02:00
|
|
|
_url: str = ""
|
2013-02-25 04:25:06 +01:00
|
|
|
|
2022-05-21 00:38:18 +02:00
|
|
|
# deprecated
|
|
|
|
can_handle_url: Callable[[str], bool]
|
|
|
|
# deprecated
|
|
|
|
priority: Callable[[str], int]
|
|
|
|
|
2022-08-25 11:26:25 +02:00
|
|
|
# Handle deprecated plugin constructors which only take the url argument
|
|
|
|
def __new__(cls, *args, **kwargs):
|
|
|
|
# Ignore plugins without custom constructors or wrappers
|
|
|
|
if cls.__init__ is Plugin.__init__ or hasattr(cls, "_IS_DEPRECATED_PLUGIN_WRAPPER"):
|
|
|
|
return super().__new__(cls)
|
|
|
|
|
|
|
|
# Ignore custom constructors which have a formal "session" parameter or a variable positional parameter
|
|
|
|
sig = inspect.signature(cls.__init__).parameters
|
|
|
|
if "session" in sig or any(param.kind == inspect.Parameter.VAR_POSITIONAL for param in sig.values()):
|
|
|
|
return super().__new__(cls)
|
|
|
|
|
|
|
|
# Wrapper class which overrides the very first constructor in the MRO
|
|
|
|
# noinspection PyAbstractClass
|
|
|
|
class PluginWrapperFront(cls):
|
|
|
|
_IS_DEPRECATED_PLUGIN_WRAPPER = True
|
|
|
|
|
|
|
|
# The __module__ value needs to be copied
|
|
|
|
__module__ = cls.__module__
|
|
|
|
|
|
|
|
def __init__(self, session, url):
|
|
|
|
# Take any arguments, but only pass the URL to the custom constructor of the deprecated plugin
|
|
|
|
# noinspection PyArgumentList
|
|
|
|
super().__init__(url)
|
2023-01-06 22:54:16 +01:00
|
|
|
warnings.warn(
|
|
|
|
f"Initialized {self.module} plugin with deprecated constructor",
|
|
|
|
FutureWarning,
|
2023-03-24 15:33:48 +01:00
|
|
|
stacklevel=2,
|
2023-01-06 22:54:16 +01:00
|
|
|
)
|
2022-08-25 11:26:25 +02:00
|
|
|
|
|
|
|
# Wrapper class which comes after the deprecated plugin in the MRO
|
|
|
|
# noinspection PyAbstractClass
|
|
|
|
class PluginWrapperBack(PluginWrapperFront, metaclass=PluginWrapperMeta):
|
|
|
|
def __init__(self, *_, **__):
|
|
|
|
# Take any arguments from the super() call of the constructor of the deprecated plugin,
|
|
|
|
# but pass the right args and keywords to the Plugin constructor
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
return cls.__new__(PluginWrapperBack, *args, **kwargs)
|
|
|
|
|
2022-09-04 16:15:21 +02:00
|
|
|
def __init__(self, session: "Streamlink", url: str):
|
plugin: remove Plugin.bind()
This changes the way how the Streamlink session and other objects like
the plugin cache and logger are stored on each plugin.
Previously, those objects were set as class attributes on every `Plugin`
class via `Plugin.bind()` when loading plugins via the session's
`load_plugins()` method that gets called on initialization.
This meant that whenever a new Streamlink session was initialized,
references to it (including a dict of every loaded plugin) were set
on each `Plugin` class as a class attribute, and Python's garbage
collector could not get rid of this memory when deleting the session
instance that was created last.
Removing `Plugin.bind()`, passing the session via the `Plugin.__init__`
constructor, and setting the cache, logger, etc. on `Plugin` instances
instead (only one gets initialized by `streamlink_cli`), removes those
static references that prevent the garbage collector to work.
Since the plugin "module" name now doesn't get set via `Plugin.bind()`
anymore, it derives its name via `self.__class__.__module__` on its own,
which means a change of the return type of `Streamlink.resolve_url()`
is necessary in order to pass the plugin name to `streamlink_cli`,
so that it can load config files and initialize plugin arguments, etc.
Breaking changes:
- Remove `Plugin.bind()`
- Pass the `session` instance via the Plugin constructor and set the
`module`, `cache` and `logger` on the plugin instance instead.
Derive `module` from the actual module name.
- Change the return type of `Session.resolve_url()` and include the
resolved plugin name in the returned tuple
Other changes:
- Remove `pluginclass.bind()` call from `Session.load_plugins()` and
use the loader's module name directly on the `Session.plugins` dict
- Remove initialization check from `Plugin` cookie methods
- Update streamlink_cli.main module according to breaking changes
- Update tests respectively
- Add explicit plugin initialization test
- Update tests with plugin constructors and custom plugin names
- Move testplugin override module, so that it shares the same module
name as the main testplugin module. Rel `Session.load_plugins()`
- Refactor most session tests and replace unneeded `resolve_url()`
wrappers in favor of calling `session.streams()`
2022-08-25 10:55:38 +02:00
|
|
|
"""
|
|
|
|
:param session: The Streamlink session instance
|
|
|
|
:param url: The input URL used for finding and resolving streams
|
|
|
|
"""
|
|
|
|
|
|
|
|
modulename = self.__class__.__module__
|
|
|
|
self.module = modulename.split(".")[-1]
|
|
|
|
self.logger = logging.getLogger(modulename)
|
|
|
|
self.cache = Cache(
|
|
|
|
filename="plugin-cache.json",
|
|
|
|
key_prefix=self.module,
|
|
|
|
)
|
|
|
|
|
2022-09-04 16:15:21 +02:00
|
|
|
self.session: "Streamlink" = session
|
2023-01-20 01:35:30 +01:00
|
|
|
self.matches = Matches()
|
2022-09-04 16:15:21 +02:00
|
|
|
self.url: str = url
|
plugin: remove Plugin.bind()
This changes the way how the Streamlink session and other objects like
the plugin cache and logger are stored on each plugin.
Previously, those objects were set as class attributes on every `Plugin`
class via `Plugin.bind()` when loading plugins via the session's
`load_plugins()` method that gets called on initialization.
This meant that whenever a new Streamlink session was initialized,
references to it (including a dict of every loaded plugin) were set
on each `Plugin` class as a class attribute, and Python's garbage
collector could not get rid of this memory when deleting the session
instance that was created last.
Removing `Plugin.bind()`, passing the session via the `Plugin.__init__`
constructor, and setting the cache, logger, etc. on `Plugin` instances
instead (only one gets initialized by `streamlink_cli`), removes those
static references that prevent the garbage collector to work.
Since the plugin "module" name now doesn't get set via `Plugin.bind()`
anymore, it derives its name via `self.__class__.__module__` on its own,
which means a change of the return type of `Streamlink.resolve_url()`
is necessary in order to pass the plugin name to `streamlink_cli`,
so that it can load config files and initialize plugin arguments, etc.
Breaking changes:
- Remove `Plugin.bind()`
- Pass the `session` instance via the Plugin constructor and set the
`module`, `cache` and `logger` on the plugin instance instead.
Derive `module` from the actual module name.
- Change the return type of `Session.resolve_url()` and include the
resolved plugin name in the returned tuple
Other changes:
- Remove `pluginclass.bind()` call from `Session.load_plugins()` and
use the loader's module name directly on the `Session.plugins` dict
- Remove initialization check from `Plugin` cookie methods
- Update streamlink_cli.main module according to breaking changes
- Update tests respectively
- Add explicit plugin initialization test
- Update tests with plugin constructors and custom plugin names
- Move testplugin override module, so that it shares the same module
name as the main testplugin module. Rel `Session.load_plugins()`
- Refactor most session tests and replace unneeded `resolve_url()`
wrappers in favor of calling `session.streams()`
2022-08-25 10:55:38 +02:00
|
|
|
|
|
|
|
self.load_cookies()
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2021-06-22 22:25:35 +02:00
|
|
|
@property
|
2022-09-10 22:41:10 +02:00
|
|
|
def url(self) -> str:
|
2022-04-03 20:58:13 +02:00
|
|
|
"""
|
|
|
|
The plugin's input URL.
|
|
|
|
Setting a new value will automatically update the :attr:`matches`, :attr:`matcher` and :attr:`match` data.
|
|
|
|
"""
|
|
|
|
|
2021-06-22 22:25:35 +02:00
|
|
|
return self._url
|
|
|
|
|
|
|
|
@url.setter
|
|
|
|
def url(self, value: str):
|
|
|
|
self._url = value
|
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
if self.matchers:
|
|
|
|
self.matcher, self.match = self.matches.update(self.matchers, value)
|
2021-06-22 22:25:35 +02:00
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
@classmethod
|
|
|
|
def set_option(cls, key, value):
|
|
|
|
cls.options.set(key, value)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_option(cls, key):
|
|
|
|
return cls.options.get(key)
|
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
@classmethod
|
|
|
|
def get_argument(cls, key):
|
2022-09-27 18:45:03 +02:00
|
|
|
return cls.arguments and cls.arguments.get(key)
|
2018-05-25 23:25:15 +02:00
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
@classmethod
|
|
|
|
def stream_weight(cls, stream):
|
|
|
|
return stream_weight(stream)
|
|
|
|
|
2013-10-20 15:39:56 +02:00
|
|
|
@classmethod
|
|
|
|
def default_stream_types(cls, streams):
|
2021-11-13 11:36:32 +01:00
|
|
|
stream_types = ["hls", "http"]
|
2013-10-20 15:39:56 +02:00
|
|
|
|
2023-03-24 14:22:33 +01:00
|
|
|
for _name, stream in iterate_streams(streams):
|
2013-10-20 15:39:56 +02:00
|
|
|
stream_type = type(stream).shortname()
|
|
|
|
|
|
|
|
if stream_type not in stream_types:
|
|
|
|
stream_types.append(stream_type)
|
|
|
|
|
|
|
|
return stream_types
|
|
|
|
|
2014-06-06 18:13:43 +02:00
|
|
|
def streams(self, stream_types=None, sorting_excludes=None):
|
2022-04-03 20:58:13 +02:00
|
|
|
"""
|
|
|
|
Attempts to extract available streams.
|
2013-07-11 01:59:41 +02:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
Returns a :class:`dict` containing the streams, where the key is
|
2022-04-03 20:58:13 +02:00
|
|
|
the name of the stream (most commonly the quality name), with the value
|
|
|
|
being a :class:`Stream` instance.
|
2013-02-25 04:21:37 +01:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
The result can contain the synonyms **best** and **worst** which
|
2022-04-03 20:58:13 +02:00
|
|
|
point to the streams which are likely to be of highest and
|
2013-08-08 15:01:32 +02:00
|
|
|
lowest quality respectively.
|
2013-02-25 04:21:37 +01:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
If multiple streams with the same name are found, the order of
|
|
|
|
streams specified in *stream_types* will determine which stream
|
|
|
|
gets to keep the name while the rest will be renamed to
|
|
|
|
"<name>_<stream type>".
|
2013-02-25 04:21:37 +01:00
|
|
|
|
2022-04-03 20:58:13 +02:00
|
|
|
The synonyms can be fine-tuned with the *sorting_excludes*
|
|
|
|
parameter, which can be one of these types:
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
- A list of filter expressions in the format
|
2022-04-03 20:58:13 +02:00
|
|
|
``[operator]<value>``. For example the filter ">480p" will
|
2013-08-16 00:46:47 +02:00
|
|
|
exclude streams ranked higher than "480p" from the list
|
2022-04-03 20:58:13 +02:00
|
|
|
used in the synonyms ranking. Valid operators are ``>``, ``>=``, ``<``
|
|
|
|
and ``<=``. If no operator is specified then equality will be tested.
|
2013-08-16 00:46:47 +02:00
|
|
|
|
2022-04-03 20:58:13 +02:00
|
|
|
- A function that is passed to :meth:`filter` with a list of
|
2013-08-16 00:46:47 +02:00
|
|
|
stream names as input.
|
|
|
|
|
|
|
|
|
2022-04-03 20:58:13 +02:00
|
|
|
:param stream_types: A list of stream types to return
|
|
|
|
:param sorting_excludes: Specify which streams to exclude from the best/worst synonyms
|
|
|
|
:returns: A :class:`dict` of stream names and :class:`streamlink.stream.Stream` instances
|
2013-02-08 02:00:44 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
ostreams = self._get_streams()
|
2014-08-30 17:05:48 +02:00
|
|
|
if isinstance(ostreams, dict):
|
|
|
|
ostreams = ostreams.items()
|
|
|
|
|
|
|
|
# Flatten the iterator to a list so we can reuse it.
|
|
|
|
if ostreams:
|
2014-08-29 00:13:18 +02:00
|
|
|
ostreams = list(ostreams)
|
2013-02-08 02:00:44 +01:00
|
|
|
except NoStreamsError:
|
|
|
|
return {}
|
2020-11-26 22:39:22 +01:00
|
|
|
except (OSError, ValueError) as err:
|
2023-03-24 14:22:33 +01:00
|
|
|
raise PluginError(err) from err
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
if not ostreams:
|
|
|
|
return {}
|
|
|
|
|
2013-08-01 14:03:28 +02:00
|
|
|
if stream_types is None:
|
2013-10-20 15:39:56 +02:00
|
|
|
stream_types = self.default_stream_types(ostreams)
|
2013-08-01 14:03:28 +02:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
# Add streams depending on stream type and priorities
|
|
|
|
sorted_streams = sorted(iterate_streams(ostreams),
|
2013-08-08 15:01:32 +02:00
|
|
|
key=partial(stream_type_priority,
|
|
|
|
stream_types))
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2014-08-30 17:05:48 +02:00
|
|
|
streams = {}
|
2013-08-01 22:19:16 +02:00
|
|
|
for name, stream in sorted_streams:
|
2013-08-01 14:04:20 +02:00
|
|
|
stream_type = type(stream).shortname()
|
|
|
|
|
cli: add wildcard to --stream-types option
When changing the stream priority using `--stream-types` any stream
types that are not explicitly included are ignored. For example if a
user wants HTTP streams to be preferred and then HLS streams, with the
other stream types in any order they might expect that `--stream-types
"http,hls"` would be sufficient. However, any stream type not included
in this list will be omitted. The default behavior for the
`--stream-types` option means that other types are included.
This commit introduces a wildcard character (`*`), where any stream
type not listed can be matched. For example, to order the streams with
`HTTP`, then `HLS`, then others, the user could specify the
`--stream-types` option as `"http,hls,*"`.
The documentation has been clarified to highlight the fact that other
stream types not listed will be omitted and that a wildcard character
can be used to match the other stream types. The addition of the
wildcard does not change the existing behaviour of `--stream-types`.
2017-04-19 12:24:40 +02:00
|
|
|
# Use * as wildcard to match other stream types
|
|
|
|
if "*" not in stream_types and stream_type not in stream_types:
|
2013-08-01 22:19:16 +02:00
|
|
|
continue
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-01-13 20:28:38 +01:00
|
|
|
# drop _alt from any stream names
|
|
|
|
if name.endswith("_alt"):
|
|
|
|
name = name[:-len("_alt")]
|
|
|
|
|
2014-08-31 17:20:35 +02:00
|
|
|
existing = streams.get(name)
|
|
|
|
if existing:
|
|
|
|
existing_stream_type = type(existing).shortname()
|
|
|
|
if existing_stream_type != stream_type:
|
|
|
|
name = "{0}_{1}".format(name, stream_type)
|
|
|
|
|
|
|
|
if name in streams:
|
|
|
|
name = "{0}_alt".format(name)
|
2014-09-03 20:28:42 +02:00
|
|
|
num_alts = len(list(filter(lambda n: n.startswith(name), streams.keys())))
|
|
|
|
|
|
|
|
# We shouldn't need more than 2 alt streams
|
|
|
|
if num_alts >= 2:
|
|
|
|
continue
|
|
|
|
elif num_alts > 0:
|
2014-08-31 17:20:35 +02:00
|
|
|
name = "{0}{1}".format(name, num_alts + 1)
|
2013-08-01 22:19:16 +02:00
|
|
|
|
2013-11-07 01:28:34 +01:00
|
|
|
# Validate stream name and discard the stream if it's bad.
|
|
|
|
match = re.match("([A-z0-9_+]+)", name)
|
|
|
|
if match:
|
|
|
|
name = match.group(1)
|
|
|
|
else:
|
2020-10-19 21:43:20 +02:00
|
|
|
self.logger.debug(f"The stream '{name}' has been ignored since it is badly named.")
|
2013-11-07 01:28:34 +01:00
|
|
|
continue
|
|
|
|
|
2013-09-28 19:51:22 +02:00
|
|
|
# Force lowercase name and replace space with underscore.
|
2013-11-07 01:28:34 +01:00
|
|
|
streams[name.lower()] = stream
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2020-06-07 19:11:24 +02:00
|
|
|
# Create the best/worst synonyms
|
2017-01-16 18:57:12 +01:00
|
|
|
def stream_weight_only(s):
|
2023-03-24 14:22:33 +01:00
|
|
|
return self.stream_weight(s)[0] or (len(streams) == 1 and 1)
|
2018-06-03 20:17:11 +02:00
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
stream_names = filter(stream_weight_only, streams.keys())
|
|
|
|
sorted_streams = sorted(stream_names, key=stream_weight_only)
|
2018-10-22 14:12:29 +02:00
|
|
|
unfiltered_sorted_streams = sorted_streams
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
if isinstance(sorting_excludes, list):
|
|
|
|
for expr in sorting_excludes:
|
2013-08-29 01:49:49 +02:00
|
|
|
filter_func = stream_sorting_filter(expr, self.stream_weight)
|
|
|
|
sorted_streams = list(filter(filter_func, sorted_streams))
|
2013-08-16 00:46:47 +02:00
|
|
|
elif callable(sorting_excludes):
|
|
|
|
sorted_streams = list(filter(sorting_excludes, sorted_streams))
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2022-04-17 14:07:46 +02:00
|
|
|
final_sorted_streams = {}
|
2018-01-16 03:49:52 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
for stream_name in sorted(streams, key=stream_weight_only):
|
|
|
|
final_sorted_streams[stream_name] = streams[stream_name]
|
2018-01-16 03:49:52 +01:00
|
|
|
|
2013-07-11 01:59:41 +02:00
|
|
|
if len(sorted_streams) > 0:
|
|
|
|
best = sorted_streams[-1]
|
|
|
|
worst = sorted_streams[0]
|
2017-06-30 00:25:51 +02:00
|
|
|
final_sorted_streams["worst"] = streams[worst]
|
|
|
|
final_sorted_streams["best"] = streams[best]
|
2018-10-22 14:12:29 +02:00
|
|
|
elif len(unfiltered_sorted_streams) > 0:
|
|
|
|
best = unfiltered_sorted_streams[-1]
|
|
|
|
worst = unfiltered_sorted_streams[0]
|
|
|
|
final_sorted_streams["worst-unfiltered"] = streams[worst]
|
|
|
|
final_sorted_streams["best-unfiltered"] = streams[best]
|
2018-01-16 03:49:52 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
return final_sorted_streams
|
2013-02-08 02:00:44 +01:00
|
|
|
|
|
|
|
def _get_streams(self):
|
2022-04-03 20:58:13 +02:00
|
|
|
"""
|
|
|
|
Implement the stream and metadata retrieval here.
|
|
|
|
|
|
|
|
Needs to return either a dict of :class:`streamlink.stream.Stream` instances mapped by stream name, or needs to act
|
|
|
|
as a generator which yields tuples of stream names and :class:`streamlink.stream.Stream` instances.
|
|
|
|
"""
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
raise NotImplementedError
|
|
|
|
|
2021-09-03 14:05:00 +02:00
|
|
|
def get_metadata(self) -> Dict[str, Optional[str]]:
|
|
|
|
return dict(
|
2021-11-23 12:25:32 +01:00
|
|
|
id=self.get_id(),
|
2021-09-03 14:05:00 +02:00
|
|
|
author=self.get_author(),
|
|
|
|
category=self.get_category(),
|
|
|
|
title=self.get_title(),
|
|
|
|
)
|
|
|
|
|
2021-11-23 12:25:32 +01:00
|
|
|
def get_id(self) -> Optional[str]:
|
|
|
|
return None if self.id is None else str(self.id).strip()
|
|
|
|
|
2021-08-30 06:20:29 +02:00
|
|
|
def get_title(self) -> Optional[str]:
|
2021-10-25 00:01:04 +02:00
|
|
|
return None if self.title is None else str(self.title).strip()
|
2018-07-11 20:52:17 +02:00
|
|
|
|
2021-08-30 06:20:29 +02:00
|
|
|
def get_author(self) -> Optional[str]:
|
2021-10-25 00:01:04 +02:00
|
|
|
return None if self.author is None else str(self.author).strip()
|
2018-07-11 20:52:17 +02:00
|
|
|
|
2021-08-30 06:20:29 +02:00
|
|
|
def get_category(self) -> Optional[str]:
|
2021-10-25 00:01:04 +02:00
|
|
|
return None if self.category is None else str(self.category).strip()
|
2018-07-11 20:52:17 +02:00
|
|
|
|
2022-04-03 20:58:13 +02:00
|
|
|
def save_cookies(
|
|
|
|
self,
|
2022-08-20 14:23:22 +02:00
|
|
|
cookie_filter: Optional[Callable[[Cookie], bool]] = None,
|
|
|
|
default_expires: int = 60 * 60 * 24 * 7,
|
2022-04-03 20:58:13 +02:00
|
|
|
) -> List[str]:
|
2018-06-03 20:17:11 +02:00
|
|
|
"""
|
2022-04-03 20:58:13 +02:00
|
|
|
Store the cookies from :attr:`session.http` in the plugin cache until they expire. The cookies can be filtered
|
|
|
|
by supplying a filter method. e.g. ``lambda c: "auth" in c.name``. If no expiry date is given in the
|
2018-06-03 20:17:11 +02:00
|
|
|
cookie then the ``default_expires`` value will be used.
|
|
|
|
|
|
|
|
:param cookie_filter: a function to filter the cookies
|
|
|
|
:param default_expires: time (in seconds) until cookies with no expiry will expire
|
|
|
|
:return: list of the saved cookie names
|
|
|
|
"""
|
2022-04-03 20:58:13 +02:00
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
cookie_filter = cookie_filter or (lambda c: True)
|
|
|
|
saved = []
|
|
|
|
|
|
|
|
for cookie in filter(cookie_filter, self.session.http.cookies):
|
|
|
|
cookie_dict = {}
|
2022-08-20 14:23:22 +02:00
|
|
|
for key in _COOKIE_KEYS:
|
|
|
|
cookie_dict[key] = getattr(cookie, key, None)
|
2018-06-03 20:17:11 +02:00
|
|
|
cookie_dict["rest"] = getattr(cookie, "rest", getattr(cookie, "_rest", None))
|
|
|
|
|
|
|
|
expires = default_expires
|
2022-08-20 14:23:22 +02:00
|
|
|
if cookie_dict["expires"]:
|
|
|
|
expires = int(cookie_dict["expires"] - time.time())
|
|
|
|
key = "__cookie:{0}:{1}:{2}:{3}".format(
|
|
|
|
cookie.name,
|
|
|
|
cookie.domain,
|
|
|
|
cookie.port_specified and cookie.port or "80",
|
|
|
|
cookie.path_specified and cookie.path or "*",
|
|
|
|
)
|
2018-06-03 20:17:11 +02:00
|
|
|
self.cache.set(key, cookie_dict, expires)
|
|
|
|
saved.append(cookie.name)
|
|
|
|
|
2022-08-20 14:23:22 +02:00
|
|
|
if saved: # pragma: no branch
|
|
|
|
self.logger.debug(f"Saved cookies: {', '.join(saved)}")
|
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
return saved
|
|
|
|
|
2022-04-03 20:58:13 +02:00
|
|
|
def load_cookies(self) -> List[str]:
|
2018-06-03 20:17:11 +02:00
|
|
|
"""
|
|
|
|
Load any stored cookies for the plugin that have not expired.
|
|
|
|
|
|
|
|
:return: list of the restored cookie names
|
|
|
|
"""
|
2022-04-03 20:58:13 +02:00
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
restored = []
|
|
|
|
|
|
|
|
for key, value in self.cache.get_all().items():
|
|
|
|
if key.startswith("__cookie"):
|
|
|
|
cookie = requests.cookies.create_cookie(**value)
|
|
|
|
self.session.http.cookies.set_cookie(cookie)
|
|
|
|
restored.append(cookie.name)
|
|
|
|
|
2022-08-20 14:23:22 +02:00
|
|
|
if restored: # pragma: no branch
|
|
|
|
self.logger.debug(f"Restored cookies: {', '.join(restored)}")
|
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
return restored
|
|
|
|
|
2022-04-03 20:58:13 +02:00
|
|
|
def clear_cookies(self, cookie_filter: Optional[Callable] = None) -> List[str]:
|
2018-06-03 20:17:11 +02:00
|
|
|
"""
|
2022-04-03 20:58:13 +02:00
|
|
|
Removes all saved cookies for this plugin. To filter the cookies that are deleted
|
2018-06-03 20:17:11 +02:00
|
|
|
specify the ``cookie_filter`` argument (see :func:`save_cookies`).
|
|
|
|
|
|
|
|
:param cookie_filter: a function to filter the cookies
|
|
|
|
:type cookie_filter: function
|
|
|
|
:return: list of the removed cookie names
|
|
|
|
"""
|
2022-04-03 20:58:13 +02:00
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
cookie_filter = cookie_filter or (lambda c: True)
|
|
|
|
removed = []
|
|
|
|
|
|
|
|
for key, value in sorted(self.cache.get_all().items(), key=operator.itemgetter(0), reverse=True):
|
|
|
|
if key.startswith("__cookie"):
|
|
|
|
cookie = requests.cookies.create_cookie(**value)
|
|
|
|
if cookie_filter(cookie):
|
|
|
|
del self.session.http.cookies[cookie.name]
|
|
|
|
self.cache.set(key, None, 0)
|
|
|
|
removed.append(key)
|
|
|
|
|
|
|
|
return removed
|
|
|
|
|
2022-08-18 16:32:28 +02:00
|
|
|
def input_ask(self, prompt: str) -> str:
|
plugin: remove Plugin.bind()
This changes the way how the Streamlink session and other objects like
the plugin cache and logger are stored on each plugin.
Previously, those objects were set as class attributes on every `Plugin`
class via `Plugin.bind()` when loading plugins via the session's
`load_plugins()` method that gets called on initialization.
This meant that whenever a new Streamlink session was initialized,
references to it (including a dict of every loaded plugin) were set
on each `Plugin` class as a class attribute, and Python's garbage
collector could not get rid of this memory when deleting the session
instance that was created last.
Removing `Plugin.bind()`, passing the session via the `Plugin.__init__`
constructor, and setting the cache, logger, etc. on `Plugin` instances
instead (only one gets initialized by `streamlink_cli`), removes those
static references that prevent the garbage collector to work.
Since the plugin "module" name now doesn't get set via `Plugin.bind()`
anymore, it derives its name via `self.__class__.__module__` on its own,
which means a change of the return type of `Streamlink.resolve_url()`
is necessary in order to pass the plugin name to `streamlink_cli`,
so that it can load config files and initialize plugin arguments, etc.
Breaking changes:
- Remove `Plugin.bind()`
- Pass the `session` instance via the Plugin constructor and set the
`module`, `cache` and `logger` on the plugin instance instead.
Derive `module` from the actual module name.
- Change the return type of `Session.resolve_url()` and include the
resolved plugin name in the returned tuple
Other changes:
- Remove `pluginclass.bind()` call from `Session.load_plugins()` and
use the loader's module name directly on the `Session.plugins` dict
- Remove initialization check from `Plugin` cookie methods
- Update streamlink_cli.main module according to breaking changes
- Update tests respectively
- Add explicit plugin initialization test
- Update tests with plugin constructors and custom plugin names
- Move testplugin override module, so that it shares the same module
name as the main testplugin module. Rel `Session.load_plugins()`
- Refactor most session tests and replace unneeded `resolve_url()`
wrappers in favor of calling `session.streams()`
2022-08-25 10:55:38 +02:00
|
|
|
user_input_requester: Optional[UserInputRequester] = self.session.get_option("user-input-requester")
|
2022-08-18 16:32:28 +02:00
|
|
|
if user_input_requester:
|
2018-06-22 01:15:21 +02:00
|
|
|
try:
|
2022-08-18 16:32:28 +02:00
|
|
|
return user_input_requester.ask(prompt)
|
|
|
|
except OSError as err:
|
2023-03-24 14:22:33 +01:00
|
|
|
raise FatalPluginError(f"User input error: {err}") from err
|
2018-06-22 01:15:21 +02:00
|
|
|
raise FatalPluginError("This plugin requires user input, however it is not supported on this platform")
|
|
|
|
|
2022-08-18 16:32:28 +02:00
|
|
|
def input_ask_password(self, prompt: str) -> str:
|
plugin: remove Plugin.bind()
This changes the way how the Streamlink session and other objects like
the plugin cache and logger are stored on each plugin.
Previously, those objects were set as class attributes on every `Plugin`
class via `Plugin.bind()` when loading plugins via the session's
`load_plugins()` method that gets called on initialization.
This meant that whenever a new Streamlink session was initialized,
references to it (including a dict of every loaded plugin) were set
on each `Plugin` class as a class attribute, and Python's garbage
collector could not get rid of this memory when deleting the session
instance that was created last.
Removing `Plugin.bind()`, passing the session via the `Plugin.__init__`
constructor, and setting the cache, logger, etc. on `Plugin` instances
instead (only one gets initialized by `streamlink_cli`), removes those
static references that prevent the garbage collector to work.
Since the plugin "module" name now doesn't get set via `Plugin.bind()`
anymore, it derives its name via `self.__class__.__module__` on its own,
which means a change of the return type of `Streamlink.resolve_url()`
is necessary in order to pass the plugin name to `streamlink_cli`,
so that it can load config files and initialize plugin arguments, etc.
Breaking changes:
- Remove `Plugin.bind()`
- Pass the `session` instance via the Plugin constructor and set the
`module`, `cache` and `logger` on the plugin instance instead.
Derive `module` from the actual module name.
- Change the return type of `Session.resolve_url()` and include the
resolved plugin name in the returned tuple
Other changes:
- Remove `pluginclass.bind()` call from `Session.load_plugins()` and
use the loader's module name directly on the `Session.plugins` dict
- Remove initialization check from `Plugin` cookie methods
- Update streamlink_cli.main module according to breaking changes
- Update tests respectively
- Add explicit plugin initialization test
- Update tests with plugin constructors and custom plugin names
- Move testplugin override module, so that it shares the same module
name as the main testplugin module. Rel `Session.load_plugins()`
- Refactor most session tests and replace unneeded `resolve_url()`
wrappers in favor of calling `session.streams()`
2022-08-25 10:55:38 +02:00
|
|
|
user_input_requester: Optional[UserInputRequester] = self.session.get_option("user-input-requester")
|
2022-08-18 16:32:28 +02:00
|
|
|
if user_input_requester:
|
2018-06-22 01:15:21 +02:00
|
|
|
try:
|
2022-08-18 16:32:28 +02:00
|
|
|
return user_input_requester.ask_password(prompt)
|
|
|
|
except OSError as err:
|
2023-03-24 14:22:33 +01:00
|
|
|
raise FatalPluginError(f"User input error: {err}") from err
|
2018-06-22 01:15:21 +02:00
|
|
|
raise FatalPluginError("This plugin requires user input, however it is not supported on this platform")
|
|
|
|
|
2020-02-19 05:01:35 +01:00
|
|
|
|
2022-08-19 17:27:03 +02:00
|
|
|
def pluginmatcher(
|
|
|
|
pattern: Pattern,
|
|
|
|
priority: int = NORMAL_PRIORITY,
|
2023-01-20 01:35:30 +01:00
|
|
|
name: Optional[str] = None,
|
2022-08-19 17:27:03 +02:00
|
|
|
) -> Callable[[Type[Plugin]], Type[Plugin]]:
|
2022-08-26 19:36:20 +02:00
|
|
|
"""
|
|
|
|
Decorator for plugin URL matchers.
|
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
A matcher consists of a compiled regular expression pattern for the plugin's input URL,
|
|
|
|
a priority value and an optional name.
|
2022-08-26 19:36:20 +02:00
|
|
|
The priority value determines which plugin gets chosen by
|
|
|
|
:meth:`Streamlink.resolve_url <streamlink.Streamlink.resolve_url>` if multiple plugins match the input URL.
|
2023-01-20 01:35:30 +01:00
|
|
|
The matcher name can be used for accessing it and its matching result when multiple matchers are defined.
|
2022-08-26 19:36:20 +02:00
|
|
|
|
|
|
|
Plugins must at least have one matcher. If multiple matchers are defined, then the first matching one
|
|
|
|
according to the order of which they have been defined (top to bottom) will be responsible for setting the
|
|
|
|
:attr:`Plugin.matcher` and :attr:`Plugin.match` attributes on the :class:`Plugin` instance.
|
2023-01-20 01:35:30 +01:00
|
|
|
The :attr:`Plugin.matchers` and :attr:`Plugin.matches` attributes are affected by all defined matchers,
|
|
|
|
and both support referencing matchers and matches by matcher index and name.
|
2022-08-26 19:36:20 +02:00
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
import re
|
|
|
|
|
|
|
|
from streamlink.plugin import HIGH_PRIORITY, Plugin, pluginmatcher
|
|
|
|
|
|
|
|
|
|
|
|
@pluginmatcher(re.compile("https?://example:1234/(?:foo|bar)/(?P<name>[^/]+)"))
|
|
|
|
@pluginmatcher(priority=HIGH_PRIORITY, pattern=re.compile(\"\"\"
|
|
|
|
https?://(?:
|
|
|
|
sitenumberone
|
|
|
|
|adifferentsite
|
|
|
|
|somethingelse
|
|
|
|
)
|
|
|
|
/.+\\.m3u8
|
|
|
|
\"\"\", re.VERBOSE))
|
|
|
|
class MyPlugin(Plugin):
|
|
|
|
...
|
|
|
|
"""
|
|
|
|
|
2023-01-20 01:35:30 +01:00
|
|
|
matcher = Matcher(pattern, priority, name)
|
2021-06-22 22:25:35 +02:00
|
|
|
|
|
|
|
def decorator(cls: Type[Plugin]) -> Type[Plugin]:
|
|
|
|
if not issubclass(cls, Plugin):
|
2023-01-20 01:35:30 +01:00
|
|
|
raise TypeError(f"{cls.__name__} is not a Plugin")
|
2021-06-22 22:25:35 +02:00
|
|
|
if cls.matchers is None:
|
2023-01-20 01:35:30 +01:00
|
|
|
cls.matchers = Matchers()
|
|
|
|
cls.matchers.register(matcher)
|
2021-06-22 22:25:35 +02:00
|
|
|
|
|
|
|
return cls
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2022-08-19 00:34:08 +02:00
|
|
|
def pluginargument(
|
|
|
|
name: str,
|
|
|
|
required: bool = False,
|
|
|
|
requires: Optional[Union[str, Sequence[str]]] = None,
|
|
|
|
prompt: Optional[str] = None,
|
|
|
|
sensitive: bool = False,
|
|
|
|
argument_name: Optional[str] = None,
|
|
|
|
dest: Optional[str] = None,
|
|
|
|
is_global: bool = False,
|
|
|
|
**options,
|
2022-08-19 17:27:03 +02:00
|
|
|
) -> Callable[[Type[Plugin]], Type[Plugin]]:
|
2022-08-26 19:36:20 +02:00
|
|
|
"""
|
|
|
|
Decorator for plugin arguments. Takes the same arguments as :class:`streamlink.options.Argument`.
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
from streamlink.plugin import Plugin, pluginargument
|
|
|
|
|
|
|
|
|
|
|
|
@pluginargument(
|
|
|
|
"username",
|
|
|
|
requires=["password"],
|
|
|
|
metavar="EMAIL",
|
|
|
|
help="The username for your account.",
|
|
|
|
)
|
|
|
|
@pluginargument(
|
|
|
|
"password",
|
|
|
|
sensitive=True,
|
|
|
|
metavar="PASSWORD",
|
|
|
|
help="The password for your account.",
|
|
|
|
)
|
|
|
|
class MyPlugin(Plugin):
|
|
|
|
...
|
|
|
|
|
|
|
|
This will add the ``--myplugin-username`` and ``--myplugin-password`` arguments to the CLI,
|
|
|
|
assuming the plugin's module name is ``myplugin``.
|
|
|
|
"""
|
|
|
|
|
2022-08-19 00:34:08 +02:00
|
|
|
arg = Argument(
|
|
|
|
name,
|
|
|
|
required=required,
|
|
|
|
requires=requires,
|
|
|
|
prompt=prompt,
|
|
|
|
sensitive=sensitive,
|
|
|
|
argument_name=argument_name,
|
|
|
|
dest=dest,
|
|
|
|
is_global=is_global,
|
|
|
|
**options,
|
|
|
|
)
|
|
|
|
|
|
|
|
def decorator(cls: Type[Plugin]) -> Type[Plugin]:
|
|
|
|
if not issubclass(cls, Plugin):
|
|
|
|
raise TypeError(f"{repr(cls)} is not a Plugin")
|
|
|
|
if cls.arguments is None:
|
|
|
|
cls.arguments = Arguments()
|
|
|
|
cls.arguments.add(arg)
|
|
|
|
|
|
|
|
return cls
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2021-06-22 22:25:35 +02:00
|
|
|
__all__ = [
|
|
|
|
"HIGH_PRIORITY", "NORMAL_PRIORITY", "LOW_PRIORITY", "NO_PRIORITY",
|
|
|
|
"Plugin",
|
|
|
|
"Matcher", "pluginmatcher",
|
2022-08-19 00:34:08 +02:00
|
|
|
"pluginargument",
|
2021-06-22 22:25:35 +02:00
|
|
|
]
|