2017-08-02 20:24:42 +02:00
|
|
|
import ast
|
2018-05-30 03:15:11 +02:00
|
|
|
import logging
|
2013-08-16 00:46:47 +02:00
|
|
|
import operator
|
2013-08-08 15:01:32 +02:00
|
|
|
import re
|
2018-06-03 20:17:11 +02:00
|
|
|
import time
|
|
|
|
import requests.cookies
|
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
from functools import partial
|
2018-06-03 20:17:11 +02:00
|
|
|
from collections import OrderedDict
|
2013-08-08 15:01:32 +02:00
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
from streamlink.cache import Cache
|
2018-06-22 01:15:21 +02:00
|
|
|
from streamlink.exceptions import PluginError, NoStreamsError, FatalPluginError
|
2018-06-03 20:17:11 +02:00
|
|
|
from streamlink.options import Options, Arguments
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2018-06-22 01:15:21 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
# FIXME: This is a crude attempt at making a bitrate's
|
|
|
|
# weight end up similar to the weight of a resolution.
|
|
|
|
# Someone who knows math, please fix.
|
|
|
|
BIT_RATE_WEIGHT_RATIO = 2.8
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
ALT_WEIGHT_MOD = 0.01
|
|
|
|
|
2013-08-01 14:04:20 +02:00
|
|
|
QUALITY_WEIGTHS_EXTRA = {
|
2013-08-16 00:46:47 +02:00
|
|
|
"other": {
|
2013-08-27 19:36:21 +02:00
|
|
|
"live": 1080,
|
2013-08-16 00:46:47 +02:00
|
|
|
},
|
|
|
|
"tv": {
|
|
|
|
"hd": 1080,
|
|
|
|
"sd": 576,
|
|
|
|
},
|
|
|
|
"quality": {
|
|
|
|
"ehq": 720,
|
2017-01-16 18:57:12 +01:00
|
|
|
"hq": 576,
|
|
|
|
"sq": 360,
|
2013-08-16 00:46:47 +02:00
|
|
|
},
|
2013-02-08 02:00:44 +01:00
|
|
|
}
|
|
|
|
|
2013-08-16 00:46:47 +02:00
|
|
|
FILTER_OPERATORS = {
|
|
|
|
"<": operator.lt,
|
|
|
|
"<=": operator.le,
|
|
|
|
">": operator.gt,
|
|
|
|
">=": operator.ge,
|
|
|
|
}
|
|
|
|
|
2017-08-02 20:24:42 +02:00
|
|
|
PARAMS_REGEX = r"(\w+)=({.+?}|\[.+?\]|\(.+?\)|'(?:[^'\\]|\\')*'|\"(?:[^\"\\]|\\\")*\"|\S+)"
|
|
|
|
|
2017-08-03 14:45:00 +02:00
|
|
|
HIGH_PRIORITY = 30
|
|
|
|
NORMAL_PRIORITY = 20
|
|
|
|
LOW_PRIORITY = 10
|
|
|
|
NO_PRIORITY = 0
|
|
|
|
|
2017-08-02 20:24:42 +02:00
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
def stream_weight(stream):
|
2013-08-16 00:46:47 +02:00
|
|
|
for group, weights in QUALITY_WEIGTHS_EXTRA.items():
|
2013-08-29 01:49:49 +02:00
|
|
|
if stream in weights:
|
|
|
|
return weights[stream], group
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
match = re.match(r"^(\d+)(k|p)?(\d+)?(\+)?(?:_(\d+)k)?(?:_(alt)(\d)?)?$", stream)
|
2013-02-08 02:00:44 +01:00
|
|
|
|
|
|
|
if match:
|
2017-06-30 00:25:51 +02:00
|
|
|
weight = 0
|
|
|
|
|
|
|
|
if match.group(6):
|
|
|
|
if match.group(7):
|
|
|
|
weight -= ALT_WEIGHT_MOD * int(match.group(7))
|
|
|
|
else:
|
|
|
|
weight -= ALT_WEIGHT_MOD
|
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
name_type = match.group(2)
|
|
|
|
if name_type == "k": # bit rate
|
2013-02-08 02:00:44 +01:00
|
|
|
bitrate = int(match.group(1))
|
2017-06-30 00:25:51 +02:00
|
|
|
weight += bitrate / BIT_RATE_WEIGHT_RATIO
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
return weight, "bitrate"
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
elif name_type == "p": # resolution
|
2017-06-30 00:25:51 +02:00
|
|
|
weight += int(match.group(1))
|
2013-03-22 02:13:06 +01:00
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
if match.group(3): # fps eg. 60p or 50p
|
2016-08-09 02:28:55 +02:00
|
|
|
weight += int(match.group(3))
|
|
|
|
|
|
|
|
if match.group(4) == "+":
|
2013-03-22 02:13:06 +01:00
|
|
|
weight += 1
|
|
|
|
|
2017-03-07 14:36:30 +01:00
|
|
|
if match.group(5): # bit rate classifier for resolution
|
|
|
|
weight += int(match.group(5)) / BIT_RATE_WEIGHT_RATIO
|
|
|
|
|
2013-08-16 00:46:47 +02:00
|
|
|
return weight, "pixels"
|
|
|
|
|
|
|
|
return 0, "none"
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-08-16 00:46:47 +02:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
def iterate_streams(streams):
|
2014-08-29 00:13:18 +02:00
|
|
|
for name, stream in streams:
|
2013-08-01 22:19:16 +02:00
|
|
|
if isinstance(stream, list):
|
|
|
|
for sub_stream in stream:
|
|
|
|
yield (name, sub_stream)
|
|
|
|
else:
|
|
|
|
yield (name, stream)
|
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
def stream_type_priority(stream_types, stream):
|
2013-08-08 15:01:32 +02:00
|
|
|
stream_type = type(stream[1]).shortname()
|
2013-08-01 22:19:16 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
prio = stream_types.index(stream_type)
|
|
|
|
except ValueError:
|
cli: add wildcard to --stream-types option
When changing the stream priority using `--stream-types` any stream
types that are not explicitly included are ignored. For example if a
user wants HTTP streams to be preferred and then HLS streams, with the
other stream types in any order they might expect that `--stream-types
"http,hls"` would be sufficient. However, any stream type not included
in this list will be omitted. The default behavior for the
`--stream-types` option means that other types are included.
This commit introduces a wildcard character (`*`), where any stream
type not listed can be matched. For example, to order the streams with
`HTTP`, then `HLS`, then others, the user could specify the
`--stream-types` option as `"http,hls,*"`.
The documentation has been clarified to highlight the fact that other
stream types not listed will be omitted and that a wildcard character
can be used to match the other stream types. The addition of the
wildcard does not change the existing behaviour of `--stream-types`.
2017-04-19 12:24:40 +02:00
|
|
|
try:
|
|
|
|
prio = stream_types.index("*")
|
|
|
|
except ValueError:
|
|
|
|
prio = 99
|
2013-08-01 22:19:16 +02:00
|
|
|
|
|
|
|
return prio
|
|
|
|
|
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
def stream_sorting_filter(expr, stream_weight):
|
2017-03-07 14:36:30 +01:00
|
|
|
match = re.match(r"(?P<op><=|>=|<|>)?(?P<value>[\w+]+)", expr)
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
if not match:
|
|
|
|
raise PluginError("Invalid filter expression: {0}".format(expr))
|
|
|
|
|
|
|
|
op, value = match.group("op", "value")
|
|
|
|
op = FILTER_OPERATORS.get(op, operator.eq)
|
2013-08-29 01:49:49 +02:00
|
|
|
filter_weight, filter_group = stream_weight(value)
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
def func(quality):
|
2013-08-29 01:49:49 +02:00
|
|
|
weight, group = stream_weight(quality)
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
if group == filter_group:
|
|
|
|
return not op(weight, filter_weight)
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
return func
|
2013-08-08 15:01:32 +02:00
|
|
|
|
|
|
|
|
2017-08-02 20:24:42 +02:00
|
|
|
def parse_url_params(url):
|
|
|
|
split = url.split(" ", 1)
|
|
|
|
url = split[0]
|
|
|
|
params = split[1] if len(split) > 1 else ''
|
|
|
|
return url, parse_params(params)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_params(params):
|
|
|
|
rval = {}
|
|
|
|
matches = re.findall(PARAMS_REGEX, params)
|
|
|
|
|
|
|
|
for key, value in matches:
|
|
|
|
try:
|
|
|
|
value = ast.literal_eval(value)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
rval[key] = value
|
|
|
|
|
|
|
|
return rval
|
|
|
|
|
|
|
|
|
2018-06-22 01:15:21 +02:00
|
|
|
class UserInputRequester(object):
|
|
|
|
"""
|
|
|
|
Base Class / Interface for requesting user input
|
|
|
|
|
|
|
|
eg. From the console
|
|
|
|
"""
|
|
|
|
def ask(self, prompt):
|
|
|
|
"""
|
|
|
|
Ask the user for a text input, the input is not sensitive
|
|
|
|
and can be echoed to the user
|
|
|
|
|
|
|
|
:param prompt: message to display when asking for the input
|
|
|
|
:return: the value the user input
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def ask_password(self, prompt):
|
|
|
|
"""
|
|
|
|
Ask the user for a text input, the input _is_ sensitive
|
|
|
|
and should be masked as the user gives the input
|
|
|
|
|
|
|
|
:param prompt: message to display when asking for the input
|
|
|
|
:return: the value the user input
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
class Plugin(object):
|
2013-08-08 15:01:32 +02:00
|
|
|
"""A plugin can retrieve stream information from the URL specified.
|
2013-02-25 04:21:37 +01:00
|
|
|
|
|
|
|
:param url: URL that the plugin will operate on
|
2013-02-08 02:00:44 +01:00
|
|
|
"""
|
|
|
|
|
2013-03-16 02:51:14 +01:00
|
|
|
cache = None
|
|
|
|
logger = None
|
2013-02-25 04:25:06 +01:00
|
|
|
module = "unknown"
|
2013-02-08 02:00:44 +01:00
|
|
|
options = Options()
|
2018-05-25 23:25:15 +02:00
|
|
|
arguments = Arguments()
|
2013-02-25 04:25:06 +01:00
|
|
|
session = None
|
2018-06-22 01:15:21 +02:00
|
|
|
_user_input_requester = None
|
2013-02-25 04:25:06 +01:00
|
|
|
|
|
|
|
@classmethod
|
2018-06-22 01:15:21 +02:00
|
|
|
def bind(cls, session, module, user_input_requester=None):
|
2013-03-16 02:51:14 +01:00
|
|
|
cls.cache = Cache(filename="plugin-cache.json",
|
|
|
|
key_prefix=module)
|
2018-05-30 03:15:11 +02:00
|
|
|
cls.logger = logging.getLogger("streamlink.plugin." + module)
|
2013-02-25 04:25:06 +01:00
|
|
|
cls.module = module
|
2013-03-16 02:51:14 +01:00
|
|
|
cls.session = session
|
2018-06-22 01:15:21 +02:00
|
|
|
if user_input_requester is not None:
|
|
|
|
if isinstance(user_input_requester, UserInputRequester):
|
|
|
|
cls._user_input_requester = user_input_requester
|
|
|
|
else:
|
|
|
|
raise RuntimeError("user-input-requester must be an instance of UserInputRequester")
|
2013-02-08 02:00:44 +01:00
|
|
|
|
|
|
|
def __init__(self, url):
|
|
|
|
self.url = url
|
2018-06-03 20:17:11 +02:00
|
|
|
try:
|
|
|
|
self.load_cookies()
|
|
|
|
except RuntimeError:
|
|
|
|
pass # unbound cannot load
|
2013-02-08 02:00:44 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def can_handle_url(cls, url):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def set_option(cls, key, value):
|
|
|
|
cls.options.set(key, value)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_option(cls, key):
|
|
|
|
return cls.options.get(key)
|
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
@classmethod
|
|
|
|
def get_argument(cls, key):
|
|
|
|
return cls.arguments.get(key)
|
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
@classmethod
|
|
|
|
def stream_weight(cls, stream):
|
|
|
|
return stream_weight(stream)
|
|
|
|
|
2013-10-20 15:39:56 +02:00
|
|
|
@classmethod
|
|
|
|
def default_stream_types(cls, streams):
|
|
|
|
stream_types = ["rtmp", "hls", "hds", "http"]
|
|
|
|
|
|
|
|
for name, stream in iterate_streams(streams):
|
|
|
|
stream_type = type(stream).shortname()
|
|
|
|
|
|
|
|
if stream_type not in stream_types:
|
|
|
|
stream_types.append(stream_type)
|
|
|
|
|
|
|
|
return stream_types
|
|
|
|
|
2014-11-23 11:17:08 +01:00
|
|
|
@classmethod
|
|
|
|
def broken(cls, issue=None):
|
|
|
|
def func(*args, **kwargs):
|
|
|
|
msg = (
|
|
|
|
"This plugin has been marked as broken. This is likely due to "
|
|
|
|
"changes to the service preventing a working implementation. "
|
|
|
|
)
|
|
|
|
|
|
|
|
if issue:
|
2016-09-19 21:46:06 +02:00
|
|
|
msg += "More info: https://github.com/streamlink/streamlink/issues/{0}".format(issue)
|
2014-11-23 11:17:08 +01:00
|
|
|
|
|
|
|
raise PluginError(msg)
|
|
|
|
|
|
|
|
def decorator(*args, **kwargs):
|
|
|
|
return func
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
2017-08-03 14:45:00 +02:00
|
|
|
@classmethod
|
|
|
|
def priority(cls, url):
|
|
|
|
"""
|
|
|
|
Return the plugin priority for a given URL, by default it returns
|
|
|
|
NORMAL priority.
|
|
|
|
:return: priority level
|
|
|
|
"""
|
|
|
|
return NORMAL_PRIORITY
|
|
|
|
|
2014-06-06 18:13:43 +02:00
|
|
|
def streams(self, stream_types=None, sorting_excludes=None):
|
2013-08-08 15:01:32 +02:00
|
|
|
"""Attempts to extract available streams.
|
2013-07-11 01:59:41 +02:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
Returns a :class:`dict` containing the streams, where the key is
|
|
|
|
the name of the stream, most commonly the quality and the value
|
|
|
|
is a :class:`Stream` object.
|
2013-02-25 04:21:37 +01:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
The result can contain the synonyms **best** and **worst** which
|
|
|
|
points to the streams which are likely to be of highest and
|
|
|
|
lowest quality respectively.
|
2013-02-25 04:21:37 +01:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
If multiple streams with the same name are found, the order of
|
|
|
|
streams specified in *stream_types* will determine which stream
|
|
|
|
gets to keep the name while the rest will be renamed to
|
|
|
|
"<name>_<stream type>".
|
2013-02-25 04:21:37 +01:00
|
|
|
|
2013-08-16 00:46:47 +02:00
|
|
|
The synonyms can be fine tuned with the *sorting_excludes*
|
|
|
|
parameter. This can be either of these types:
|
|
|
|
|
|
|
|
- A list of filter expressions in the format
|
|
|
|
*[operator]<value>*. For example the filter ">480p" will
|
|
|
|
exclude streams ranked higher than "480p" from the list
|
|
|
|
used in the synonyms ranking. Valid operators are >, >=, <
|
|
|
|
and <=. If no operator is specified then equality will be
|
|
|
|
tested.
|
|
|
|
|
|
|
|
- A function that is passed to filter() with a list of
|
|
|
|
stream names as input.
|
|
|
|
|
|
|
|
|
|
|
|
:param stream_types: A list of stream types to return.
|
|
|
|
:param sorting_excludes: Specify which streams to exclude from
|
|
|
|
the best/worst synonyms.
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-07-11 01:59:41 +02:00
|
|
|
.. versionchanged:: 1.4.2
|
|
|
|
Added *priority* parameter.
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-09-29 22:01:31 +02:00
|
|
|
.. versionchanged:: 1.5.0
|
2013-08-08 15:01:32 +02:00
|
|
|
Renamed *priority* to *stream_types* and changed behaviour
|
|
|
|
slightly.
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-09-29 22:01:31 +02:00
|
|
|
.. versionchanged:: 1.5.0
|
2013-07-11 01:59:41 +02:00
|
|
|
Added *sorting_excludes* parameter.
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-09-29 22:01:31 +02:00
|
|
|
.. versionchanged:: 1.6.0
|
2013-08-16 00:46:47 +02:00
|
|
|
*sorting_excludes* can now be a list of filter expressions
|
|
|
|
or a function that is passed to filter().
|
|
|
|
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
ostreams = self._get_streams()
|
2014-08-30 17:05:48 +02:00
|
|
|
if isinstance(ostreams, dict):
|
|
|
|
ostreams = ostreams.items()
|
|
|
|
|
|
|
|
# Flatten the iterator to a list so we can reuse it.
|
|
|
|
if ostreams:
|
2014-08-29 00:13:18 +02:00
|
|
|
ostreams = list(ostreams)
|
2013-02-08 02:00:44 +01:00
|
|
|
except NoStreamsError:
|
|
|
|
return {}
|
2014-02-26 00:20:58 +01:00
|
|
|
except (IOError, OSError, ValueError) as err:
|
|
|
|
raise PluginError(err)
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
if not ostreams:
|
|
|
|
return {}
|
|
|
|
|
2013-08-01 14:03:28 +02:00
|
|
|
if stream_types is None:
|
2013-10-20 15:39:56 +02:00
|
|
|
stream_types = self.default_stream_types(ostreams)
|
2013-08-01 14:03:28 +02:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
# Add streams depending on stream type and priorities
|
|
|
|
sorted_streams = sorted(iterate_streams(ostreams),
|
2013-08-08 15:01:32 +02:00
|
|
|
key=partial(stream_type_priority,
|
|
|
|
stream_types))
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2014-08-30 17:05:48 +02:00
|
|
|
streams = {}
|
2013-08-01 22:19:16 +02:00
|
|
|
for name, stream in sorted_streams:
|
2013-08-01 14:04:20 +02:00
|
|
|
stream_type = type(stream).shortname()
|
|
|
|
|
cli: add wildcard to --stream-types option
When changing the stream priority using `--stream-types` any stream
types that are not explicitly included are ignored. For example if a
user wants HTTP streams to be preferred and then HLS streams, with the
other stream types in any order they might expect that `--stream-types
"http,hls"` would be sufficient. However, any stream type not included
in this list will be omitted. The default behavior for the
`--stream-types` option means that other types are included.
This commit introduces a wildcard character (`*`), where any stream
type not listed can be matched. For example, to order the streams with
`HTTP`, then `HLS`, then others, the user could specify the
`--stream-types` option as `"http,hls,*"`.
The documentation has been clarified to highlight the fact that other
stream types not listed will be omitted and that a wildcard character
can be used to match the other stream types. The addition of the
wildcard does not change the existing behaviour of `--stream-types`.
2017-04-19 12:24:40 +02:00
|
|
|
# Use * as wildcard to match other stream types
|
|
|
|
if "*" not in stream_types and stream_type not in stream_types:
|
2013-08-01 22:19:16 +02:00
|
|
|
continue
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-01-13 20:28:38 +01:00
|
|
|
# drop _alt from any stream names
|
|
|
|
if name.endswith("_alt"):
|
|
|
|
name = name[:-len("_alt")]
|
|
|
|
|
2014-08-31 17:20:35 +02:00
|
|
|
existing = streams.get(name)
|
|
|
|
if existing:
|
|
|
|
existing_stream_type = type(existing).shortname()
|
|
|
|
if existing_stream_type != stream_type:
|
|
|
|
name = "{0}_{1}".format(name, stream_type)
|
|
|
|
|
|
|
|
if name in streams:
|
|
|
|
name = "{0}_alt".format(name)
|
2014-09-03 20:28:42 +02:00
|
|
|
num_alts = len(list(filter(lambda n: n.startswith(name), streams.keys())))
|
|
|
|
|
|
|
|
# We shouldn't need more than 2 alt streams
|
|
|
|
if num_alts >= 2:
|
|
|
|
continue
|
|
|
|
elif num_alts > 0:
|
2014-08-31 17:20:35 +02:00
|
|
|
name = "{0}{1}".format(name, num_alts + 1)
|
2013-08-01 22:19:16 +02:00
|
|
|
|
2013-11-07 01:28:34 +01:00
|
|
|
# Validate stream name and discard the stream if it's bad.
|
|
|
|
match = re.match("([A-z0-9_+]+)", name)
|
|
|
|
if match:
|
|
|
|
name = match.group(1)
|
|
|
|
else:
|
|
|
|
self.logger.debug("The stream '{0}' has been ignored "
|
|
|
|
"since it is badly named.", name)
|
|
|
|
continue
|
|
|
|
|
2013-09-28 19:51:22 +02:00
|
|
|
# Force lowercase name and replace space with underscore.
|
2013-11-07 01:28:34 +01:00
|
|
|
streams[name.lower()] = stream
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2013-08-01 22:19:16 +02:00
|
|
|
# Create the best/worst synonmys
|
2017-01-16 18:57:12 +01:00
|
|
|
def stream_weight_only(s):
|
|
|
|
return (self.stream_weight(s)[0] or
|
|
|
|
(len(streams) == 1 and 1))
|
2018-06-03 20:17:11 +02:00
|
|
|
|
2013-08-29 01:49:49 +02:00
|
|
|
stream_names = filter(stream_weight_only, streams.keys())
|
|
|
|
sorted_streams = sorted(stream_names, key=stream_weight_only)
|
2013-08-16 00:46:47 +02:00
|
|
|
|
|
|
|
if isinstance(sorting_excludes, list):
|
|
|
|
for expr in sorting_excludes:
|
2013-08-29 01:49:49 +02:00
|
|
|
filter_func = stream_sorting_filter(expr, self.stream_weight)
|
|
|
|
sorted_streams = list(filter(filter_func, sorted_streams))
|
2013-08-16 00:46:47 +02:00
|
|
|
elif callable(sorting_excludes):
|
|
|
|
sorted_streams = list(filter(sorting_excludes, sorted_streams))
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
final_sorted_streams = OrderedDict()
|
2018-01-16 03:49:52 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
for stream_name in sorted(streams, key=stream_weight_only):
|
|
|
|
final_sorted_streams[stream_name] = streams[stream_name]
|
2018-01-16 03:49:52 +01:00
|
|
|
|
2013-07-11 01:59:41 +02:00
|
|
|
if len(sorted_streams) > 0:
|
|
|
|
best = sorted_streams[-1]
|
|
|
|
worst = sorted_streams[0]
|
2017-06-30 00:25:51 +02:00
|
|
|
final_sorted_streams["worst"] = streams[worst]
|
|
|
|
final_sorted_streams["best"] = streams[best]
|
2018-01-16 03:49:52 +01:00
|
|
|
|
2017-06-30 00:25:51 +02:00
|
|
|
return final_sorted_streams
|
2013-02-08 02:00:44 +01:00
|
|
|
|
2014-06-06 18:13:43 +02:00
|
|
|
def get_streams(self, *args, **kwargs):
|
|
|
|
"""Deprecated since version 1.9.0.
|
|
|
|
|
|
|
|
Has been renamed to :func:`Plugin.streams`, this is an alias
|
|
|
|
for backwards compatibility.
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.streams(*args, **kwargs)
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
def _get_streams(self):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2018-06-03 20:17:11 +02:00
|
|
|
def save_cookies(self, cookie_filter=None, default_expires=60 * 60 * 24 * 7):
|
|
|
|
"""
|
|
|
|
Store the cookies from ``http`` in the plugin cache until they expire. The cookies can be filtered
|
|
|
|
by supplying a filter method. eg. ``lambda c: "auth" in c.name``. If no expiry date is given in the
|
|
|
|
cookie then the ``default_expires`` value will be used.
|
|
|
|
|
|
|
|
:param cookie_filter: a function to filter the cookies
|
|
|
|
:type cookie_filter: function
|
|
|
|
:param default_expires: time (in seconds) until cookies with no expiry will expire
|
|
|
|
:type default_expires: int
|
|
|
|
:return: list of the saved cookie names
|
|
|
|
"""
|
|
|
|
if not self.session or not self.cache:
|
|
|
|
raise RuntimeError("Cannot cache cookies in unbound plugin")
|
|
|
|
|
|
|
|
cookie_filter = cookie_filter or (lambda c: True)
|
|
|
|
saved = []
|
|
|
|
|
|
|
|
for cookie in filter(cookie_filter, self.session.http.cookies):
|
|
|
|
cookie_dict = {}
|
|
|
|
for attr in ("version", "name", "value", "port", "domain", "path", "secure", "expires", "discard",
|
|
|
|
"comment", "comment_url", "rfc2109"):
|
|
|
|
cookie_dict[attr] = getattr(cookie, attr, None)
|
|
|
|
cookie_dict["rest"] = getattr(cookie, "rest", getattr(cookie, "_rest", None))
|
|
|
|
|
|
|
|
expires = default_expires
|
|
|
|
if cookie_dict['expires']:
|
|
|
|
expires = int(cookie_dict['expires'] - time.time())
|
|
|
|
key = "__cookie:{0}:{1}:{2}:{3}".format(cookie.name,
|
|
|
|
cookie.domain,
|
|
|
|
cookie.port_specified and cookie.port or "80",
|
|
|
|
cookie.path_specified and cookie.path or "*")
|
|
|
|
self.cache.set(key, cookie_dict, expires)
|
|
|
|
saved.append(cookie.name)
|
|
|
|
|
|
|
|
if saved:
|
|
|
|
self.logger.debug("Saved cookies: {0}".format(", ".join(saved)))
|
|
|
|
return saved
|
|
|
|
|
|
|
|
def load_cookies(self):
|
|
|
|
"""
|
|
|
|
Load any stored cookies for the plugin that have not expired.
|
|
|
|
|
|
|
|
:return: list of the restored cookie names
|
|
|
|
"""
|
|
|
|
if not self.session or not self.cache:
|
|
|
|
raise RuntimeError("Cannot loaded cached cookies in unbound plugin")
|
|
|
|
|
|
|
|
restored = []
|
|
|
|
|
|
|
|
for key, value in self.cache.get_all().items():
|
|
|
|
if key.startswith("__cookie"):
|
|
|
|
cookie = requests.cookies.create_cookie(**value)
|
|
|
|
self.session.http.cookies.set_cookie(cookie)
|
|
|
|
restored.append(cookie.name)
|
|
|
|
|
|
|
|
if restored:
|
|
|
|
self.logger.debug("Restored cookies: {0}".format(", ".join(restored)))
|
|
|
|
return restored
|
|
|
|
|
|
|
|
def clear_cookies(self, cookie_filter=None):
|
|
|
|
"""
|
|
|
|
Removes all of the saved cookies for this Plugin. To filter the cookies that are deleted
|
|
|
|
specify the ``cookie_filter`` argument (see :func:`save_cookies`).
|
|
|
|
|
|
|
|
:param cookie_filter: a function to filter the cookies
|
|
|
|
:type cookie_filter: function
|
|
|
|
:return: list of the removed cookie names
|
|
|
|
"""
|
|
|
|
if not self.session or not self.cache:
|
|
|
|
raise RuntimeError("Cannot loaded cached cookies in unbound plugin")
|
|
|
|
|
|
|
|
cookie_filter = cookie_filter or (lambda c: True)
|
|
|
|
removed = []
|
|
|
|
|
|
|
|
for key, value in sorted(self.cache.get_all().items(), key=operator.itemgetter(0), reverse=True):
|
|
|
|
if key.startswith("__cookie"):
|
|
|
|
cookie = requests.cookies.create_cookie(**value)
|
|
|
|
if cookie_filter(cookie):
|
|
|
|
del self.session.http.cookies[cookie.name]
|
|
|
|
self.cache.set(key, None, 0)
|
|
|
|
removed.append(key)
|
|
|
|
|
|
|
|
return removed
|
|
|
|
|
2018-06-22 01:15:21 +02:00
|
|
|
def input_ask(self, prompt):
|
|
|
|
if self._user_input_requester:
|
|
|
|
try:
|
|
|
|
return self._user_input_requester.ask(prompt)
|
|
|
|
except IOError as e:
|
|
|
|
raise FatalPluginError("User input error: {0}".format(e))
|
|
|
|
except NotImplementedError: # ignore this and raise a FatalPluginError
|
|
|
|
pass
|
|
|
|
raise FatalPluginError("This plugin requires user input, however it is not supported on this platform")
|
|
|
|
|
|
|
|
def input_ask_password(self, prompt):
|
|
|
|
if self._user_input_requester:
|
|
|
|
try:
|
|
|
|
return self._user_input_requester.ask_password(prompt)
|
|
|
|
except IOError as e:
|
|
|
|
raise FatalPluginError("User input error: {0}".format(e))
|
|
|
|
except NotImplementedError: # ignore this and raise a FatalPluginError
|
|
|
|
pass
|
|
|
|
raise FatalPluginError("This plugin requires user input, however it is not supported on this platform")
|
|
|
|
|
|
|
|
|
2013-02-08 02:00:44 +01:00
|
|
|
|
|
|
|
__all__ = ["Plugin"]
|