2018-06-30 23:59:17 +02:00
|
|
|
import argparse
|
2013-01-31 00:06:27 +01:00
|
|
|
import errno
|
2018-05-30 03:15:11 +02:00
|
|
|
import logging
|
2012-08-23 22:46:06 +02:00
|
|
|
import os
|
2017-12-13 19:12:03 +01:00
|
|
|
import platform
|
2013-06-09 07:50:22 +02:00
|
|
|
import signal
|
2020-10-26 14:46:17 +01:00
|
|
|
import sys
|
|
|
|
from collections import OrderedDict
|
2013-11-15 06:27:08 +01:00
|
|
|
from contextlib import closing
|
2013-08-31 14:24:12 +02:00
|
|
|
from distutils.version import StrictVersion
|
2014-08-12 21:10:20 +02:00
|
|
|
from functools import partial
|
2020-10-26 14:46:17 +01:00
|
|
|
from gettext import gettext
|
2014-08-12 21:10:20 +02:00
|
|
|
from itertools import chain
|
2021-05-28 20:54:42 +02:00
|
|
|
from pathlib import Path
|
2014-08-12 21:10:20 +02:00
|
|
|
from time import sleep
|
2021-11-10 11:20:14 +01:00
|
|
|
from typing import Dict, List, Type
|
2020-10-26 14:46:17 +01:00
|
|
|
|
|
|
|
import requests
|
|
|
|
from socks import __version__ as socks_version
|
2017-12-13 19:12:03 +01:00
|
|
|
from websocket import __version__ as websocket_version
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2020-10-26 14:46:17 +01:00
|
|
|
import streamlink.logger as logger
|
|
|
|
from streamlink import NoPluginError, PluginError, StreamError, Streamlink, __version__ as streamlink_version
|
2016-09-19 21:46:06 +02:00
|
|
|
from streamlink.cache import Cache
|
2018-06-22 01:15:21 +02:00
|
|
|
from streamlink.exceptions import FatalPluginError
|
2021-09-02 17:35:59 +02:00
|
|
|
from streamlink.plugin import Plugin, PluginOptions
|
2021-11-10 11:20:14 +01:00
|
|
|
from streamlink.stream.stream import Stream, StreamIO
|
2021-09-17 16:19:46 +02:00
|
|
|
from streamlink.utils.named_pipe import NamedPipe
|
2020-10-26 14:46:17 +01:00
|
|
|
from streamlink_cli.argparser import build_parser
|
2021-06-05 22:17:42 +02:00
|
|
|
from streamlink_cli.compat import DeprecatedPath, is_win32, stdout
|
2020-10-26 14:46:17 +01:00
|
|
|
from streamlink_cli.console import ConsoleOutput, ConsoleUserInputRequester
|
2021-06-01 17:47:06 +02:00
|
|
|
from streamlink_cli.constants import CONFIG_FILES, DEFAULT_STREAM_METADATA, LOG_DIR, PLUGIN_DIRS, STREAM_SYNONYMS
|
2021-09-02 17:35:59 +02:00
|
|
|
from streamlink_cli.output import FileOutput, Output, PlayerOutput
|
2021-09-05 06:22:03 +02:00
|
|
|
from streamlink_cli.utils import Formatter, HTTPServer, datetime, ignored, progress, stream_to_url
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
ACCEPTABLE_ERRNO = (errno.EPIPE, errno.EINVAL, errno.ECONNRESET)
|
2017-11-07 16:47:08 +01:00
|
|
|
try:
|
|
|
|
ACCEPTABLE_ERRNO += (errno.WSAECONNABORTED,)
|
|
|
|
except AttributeError:
|
|
|
|
pass # Not windows
|
2021-11-13 11:36:32 +01:00
|
|
|
QUIET_OPTIONS = ("json", "stream_url", "quiet")
|
2013-03-15 21:41:40 +01:00
|
|
|
|
2021-09-02 17:35:59 +02:00
|
|
|
args = None
|
|
|
|
console: ConsoleOutput = None
|
|
|
|
output: Output = None
|
|
|
|
stream_fd: StreamIO = None
|
|
|
|
streamlink: Streamlink = None
|
2011-08-15 04:37:22 +02:00
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
Streams = Dict[str, Stream]
|
|
|
|
|
2018-06-11 11:28:23 +02:00
|
|
|
log = logging.getLogger("streamlink.cli")
|
2018-05-30 03:15:11 +02:00
|
|
|
|
2012-08-15 19:49:54 +02:00
|
|
|
|
2021-09-05 06:22:03 +02:00
|
|
|
def get_formatter(plugin: Plugin):
|
|
|
|
return Formatter(
|
|
|
|
{
|
|
|
|
"url": lambda: args.url,
|
2021-11-23 12:25:32 +01:00
|
|
|
"id": lambda: plugin.get_id(),
|
2021-09-05 06:22:03 +02:00
|
|
|
"author": lambda: plugin.get_author(),
|
|
|
|
"category": lambda: plugin.get_category(),
|
|
|
|
"game": lambda: plugin.get_category(),
|
|
|
|
"title": lambda: plugin.get_title(),
|
|
|
|
"time": lambda: datetime.now()
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"time": lambda dt, fmt: dt.strftime(fmt)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-10-10 15:30:01 +02:00
|
|
|
def check_file_output(path: Path, force):
|
2013-03-19 04:44:41 +01:00
|
|
|
"""Checks if file already exists and ask the user if it should
|
|
|
|
be overwritten if it does."""
|
2013-01-01 04:34:41 +01:00
|
|
|
|
2018-05-30 03:15:11 +02:00
|
|
|
log.debug("Checking file output")
|
2012-08-15 22:26:05 +02:00
|
|
|
|
2021-10-10 15:30:01 +02:00
|
|
|
if path.is_file() and not force:
|
2018-10-23 20:44:05 +02:00
|
|
|
if sys.stdin.isatty():
|
2021-10-10 15:30:01 +02:00
|
|
|
answer = console.ask(f"File {path} already exists! Overwrite it? [y/N] ")
|
2018-10-23 20:44:05 +02:00
|
|
|
if answer.lower() != "y":
|
|
|
|
sys.exit()
|
|
|
|
else:
|
2021-10-10 15:30:01 +02:00
|
|
|
log.error(f"File {path} already exists, use --force to overwrite it.")
|
2013-03-19 04:44:41 +01:00
|
|
|
sys.exit()
|
2013-01-31 03:25:26 +01:00
|
|
|
|
2021-10-10 15:30:01 +02:00
|
|
|
return FileOutput(path)
|
2012-05-25 17:26:11 +02:00
|
|
|
|
|
|
|
|
2021-08-29 23:57:55 +02:00
|
|
|
def create_output(formatter: Formatter):
|
2013-03-19 04:44:41 +01:00
|
|
|
"""Decides where to write the stream.
|
2012-05-25 17:26:11 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
Depending on arguments it can be one of these:
|
|
|
|
- The stdout pipe
|
|
|
|
- A subprocess' stdin pipe
|
|
|
|
- A named pipe that the subprocess reads from
|
|
|
|
- A regular file
|
2012-05-25 17:26:11 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
"""
|
2012-05-26 21:44:15 +02:00
|
|
|
|
2019-01-04 03:21:48 +01:00
|
|
|
if (args.output or args.stdout) and (args.record or args.record_and_pipe):
|
|
|
|
console.exit("Cannot use record options with other file output options.")
|
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
if args.output:
|
|
|
|
if args.output == "-":
|
|
|
|
out = FileOutput(fd=stdout)
|
|
|
|
else:
|
2021-10-10 15:30:01 +02:00
|
|
|
out = check_file_output(formatter.path(args.output, args.fs_safe_rules), args.force)
|
2013-03-19 04:44:41 +01:00
|
|
|
elif args.stdout:
|
|
|
|
out = FileOutput(fd=stdout)
|
2019-01-04 03:21:48 +01:00
|
|
|
elif args.record_and_pipe:
|
2021-10-10 15:30:01 +02:00
|
|
|
record = check_file_output(formatter.path(args.record_and_pipe, args.fs_safe_rules), args.force)
|
2019-01-04 03:21:48 +01:00
|
|
|
out = FileOutput(fd=stdout, record=record)
|
2013-03-19 04:44:41 +01:00
|
|
|
else:
|
2019-01-04 03:21:48 +01:00
|
|
|
http = namedpipe = record = None
|
2013-07-02 12:59:00 +02:00
|
|
|
|
2013-12-13 16:04:12 +01:00
|
|
|
if not args.player:
|
2013-08-08 15:01:32 +02:00
|
|
|
console.exit("The default player (VLC) does not seem to be "
|
|
|
|
"installed. You must specify the path to a player "
|
|
|
|
"executable with --player.")
|
2013-03-19 17:29:08 +01:00
|
|
|
|
2013-09-19 22:52:34 +02:00
|
|
|
if args.player_fifo:
|
2013-03-19 04:44:41 +01:00
|
|
|
try:
|
2021-04-27 00:13:15 +02:00
|
|
|
namedpipe = NamedPipe()
|
2020-11-26 22:39:22 +01:00
|
|
|
except OSError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"Failed to create pipe: {err}")
|
2013-09-19 22:52:34 +02:00
|
|
|
elif args.player_http:
|
|
|
|
http = create_http_server()
|
2012-05-27 00:51:00 +02:00
|
|
|
|
2019-01-04 03:21:48 +01:00
|
|
|
if args.record:
|
2021-10-10 15:30:01 +02:00
|
|
|
record = check_file_output(formatter.path(args.record, args.fs_safe_rules), args.force)
|
2019-01-04 03:21:48 +01:00
|
|
|
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Starting player: {args.player}")
|
2019-01-04 03:21:48 +01:00
|
|
|
|
2021-08-29 23:57:55 +02:00
|
|
|
out = PlayerOutput(
|
|
|
|
args.player,
|
|
|
|
args=args.player_args,
|
|
|
|
quiet=not args.verbose_player,
|
|
|
|
kill=not args.player_no_close,
|
|
|
|
namedpipe=namedpipe,
|
|
|
|
http=http,
|
|
|
|
record=record,
|
|
|
|
title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url
|
|
|
|
)
|
2012-05-26 21:44:15 +02:00
|
|
|
|
|
|
|
return out
|
2012-05-25 17:26:11 +02:00
|
|
|
|
2012-05-28 01:11:52 +02:00
|
|
|
|
2020-12-26 17:02:57 +01:00
|
|
|
def create_http_server(*_args, **_kwargs):
|
2015-01-25 19:04:30 +01:00
|
|
|
"""Creates a HTTP server listening on a given host and port.
|
|
|
|
|
|
|
|
If host is empty, listen on all available interfaces, and if port is 0,
|
|
|
|
listen on a random high port.
|
|
|
|
"""
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
http = HTTPServer()
|
2020-12-26 17:02:57 +01:00
|
|
|
http.bind(*_args, **_kwargs)
|
2013-09-19 22:52:34 +02:00
|
|
|
except OSError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"Failed to create HTTP server: {err}")
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
return http
|
|
|
|
|
|
|
|
|
|
|
|
def iter_http_requests(server, player):
|
2015-01-25 19:04:30 +01:00
|
|
|
"""Repeatedly accept HTTP connections on a server.
|
|
|
|
|
|
|
|
Forever if the serving externally, or while a player is running if it is not
|
|
|
|
empty.
|
|
|
|
"""
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2015-01-24 22:13:19 +01:00
|
|
|
while not player or player.running:
|
2013-09-19 22:52:34 +02:00
|
|
|
try:
|
|
|
|
yield server.open(timeout=2.5)
|
|
|
|
except OSError:
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
def output_stream_http(plugin: Plugin, initial_streams: Streams, formatter: Formatter, external=False, port=0):
|
2013-09-19 22:52:34 +02:00
|
|
|
"""Continuously output the stream over HTTP."""
|
2015-08-02 13:01:46 +02:00
|
|
|
global output
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2015-01-24 22:13:19 +01:00
|
|
|
if not external:
|
|
|
|
if not args.player:
|
|
|
|
console.exit("The default player (VLC) does not seem to be "
|
|
|
|
"installed. You must specify the path to a player "
|
|
|
|
"executable with --player.")
|
2013-12-09 16:17:20 +01:00
|
|
|
|
2015-01-24 22:13:19 +01:00
|
|
|
server = create_http_server()
|
2021-08-29 23:57:55 +02:00
|
|
|
player = output = PlayerOutput(
|
|
|
|
args.player,
|
|
|
|
args=args.player_args,
|
|
|
|
filename=server.url,
|
|
|
|
quiet=not args.verbose_player,
|
|
|
|
title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url
|
|
|
|
)
|
2013-12-09 16:17:20 +01:00
|
|
|
|
2015-01-24 22:13:19 +01:00
|
|
|
try:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Starting player: {args.player}")
|
2015-01-24 22:13:19 +01:00
|
|
|
if player:
|
|
|
|
player.open()
|
|
|
|
except OSError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"Failed to start player: {args.player} ({err})")
|
2015-01-24 22:13:19 +01:00
|
|
|
else:
|
|
|
|
server = create_http_server(host=None, port=port)
|
|
|
|
player = None
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info("Starting server, access with one of:")
|
2015-01-24 22:13:19 +01:00
|
|
|
for url in server.urls:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info(" " + url)
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
for req in iter_http_requests(server, player):
|
|
|
|
user_agent = req.headers.get("User-Agent") or "unknown player"
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Got HTTP request from {user_agent}")
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2015-02-06 20:27:12 +01:00
|
|
|
stream_fd = prebuffer = None
|
2015-01-24 22:13:19 +01:00
|
|
|
while not stream_fd and (not player or player.running):
|
2013-09-19 22:52:34 +02:00
|
|
|
try:
|
2014-11-23 12:10:55 +01:00
|
|
|
streams = initial_streams or fetch_streams(plugin)
|
|
|
|
initial_streams = None
|
2013-11-03 23:04:47 +01:00
|
|
|
|
2014-11-23 12:10:55 +01:00
|
|
|
for stream_name in (resolve_stream_name(streams, s) for s in args.stream):
|
|
|
|
if stream_name in streams:
|
|
|
|
stream = streams[stream_name]
|
|
|
|
break
|
|
|
|
else:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info("Stream not available, will re-fetch streams in 10 sec")
|
2014-11-23 12:10:55 +01:00
|
|
|
sleep(10)
|
|
|
|
continue
|
2013-09-19 22:52:34 +02:00
|
|
|
except PluginError as err:
|
2020-10-31 09:23:57 +01:00
|
|
|
log.error(f"Unable to fetch new streams: {err}")
|
2013-09-19 22:52:34 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Opening stream: {stream_name} ({type(stream).shortname()})")
|
2013-09-19 22:52:34 +02:00
|
|
|
stream_fd, prebuffer = open_stream(stream)
|
|
|
|
except StreamError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.error(err)
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2015-02-06 20:27:12 +01:00
|
|
|
if stream_fd and prebuffer:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.debug("Writing stream to player")
|
2021-08-29 23:57:55 +02:00
|
|
|
read_stream(stream_fd, server, prebuffer, formatter)
|
2015-02-06 20:27:12 +01:00
|
|
|
|
2013-09-19 22:52:34 +02:00
|
|
|
server.close(True)
|
|
|
|
|
|
|
|
player.close()
|
2013-12-08 04:39:22 +01:00
|
|
|
server.close()
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
|
2021-08-29 23:57:55 +02:00
|
|
|
def output_stream_passthrough(stream, formatter: Formatter):
|
2013-09-19 22:52:34 +02:00
|
|
|
"""Prepares a filename to be passed to the player."""
|
2015-08-02 13:01:46 +02:00
|
|
|
global output
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2021-08-29 23:57:55 +02:00
|
|
|
filename = f'"{stream_to_url(stream)}"'
|
|
|
|
output = PlayerOutput(
|
|
|
|
args.player,
|
|
|
|
args=args.player_args,
|
|
|
|
filename=filename,
|
|
|
|
call=True,
|
|
|
|
quiet=not args.verbose_player,
|
|
|
|
title=formatter.title(args.title, defaults=DEFAULT_STREAM_METADATA) if args.title else args.url
|
|
|
|
)
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
try:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Starting player: {args.player}")
|
2015-08-02 13:01:46 +02:00
|
|
|
output.open()
|
2013-09-19 22:52:34 +02:00
|
|
|
except OSError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"Failed to start player: {args.player} ({err})")
|
2013-09-19 22:52:34 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def open_stream(stream):
|
|
|
|
"""Opens a stream and reads 8192 bytes from it.
|
|
|
|
|
|
|
|
This is useful to check if a stream actually has data
|
|
|
|
before opening the output.
|
|
|
|
|
|
|
|
"""
|
2014-07-25 18:11:48 +02:00
|
|
|
global stream_fd
|
2012-08-15 19:49:54 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
# Attempts to open the stream
|
2012-05-28 01:11:52 +02:00
|
|
|
try:
|
2013-09-19 22:52:34 +02:00
|
|
|
stream_fd = stream.open()
|
2012-08-23 22:46:06 +02:00
|
|
|
except StreamError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
raise StreamError(f"Could not open stream: {err}")
|
2012-05-28 01:11:52 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
# Read 8192 bytes before proceeding to check for errors.
|
|
|
|
# This is to avoid opening the output unnecessarily.
|
2012-08-15 14:57:58 +02:00
|
|
|
try:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.debug("Pre-buffering 8192 bytes")
|
2013-09-19 22:52:34 +02:00
|
|
|
prebuffer = stream_fd.read(8192)
|
2020-11-26 22:39:22 +01:00
|
|
|
except OSError as err:
|
2018-05-30 21:30:38 +02:00
|
|
|
stream_fd.close()
|
2021-09-02 17:08:39 +02:00
|
|
|
raise StreamError(f"Failed to read data from stream: {err}")
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
if not prebuffer:
|
2018-05-30 21:30:38 +02:00
|
|
|
stream_fd.close()
|
2013-09-19 22:52:34 +02:00
|
|
|
raise StreamError("No data returned from stream")
|
|
|
|
|
|
|
|
return stream_fd, prebuffer
|
2012-08-16 18:08:03 +02:00
|
|
|
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2021-08-29 23:57:55 +02:00
|
|
|
def output_stream(stream, formatter: Formatter):
|
2013-09-19 22:52:34 +02:00
|
|
|
"""Open stream, create output and finally write the stream to output."""
|
2015-08-02 13:01:46 +02:00
|
|
|
global output
|
2013-09-19 22:52:34 +02:00
|
|
|
|
2021-12-14 15:29:55 +01:00
|
|
|
# create output before opening the stream, so file outputs can prompt on existing output
|
|
|
|
output = create_output(formatter)
|
|
|
|
|
2017-04-13 17:21:07 +02:00
|
|
|
success_open = False
|
2014-03-20 22:06:51 +01:00
|
|
|
for i in range(args.retry_open):
|
|
|
|
try:
|
|
|
|
stream_fd, prebuffer = open_stream(stream)
|
2017-04-13 17:21:07 +02:00
|
|
|
success_open = True
|
2014-03-20 22:06:51 +01:00
|
|
|
break
|
|
|
|
except StreamError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.error(f"Try {i + 1}/{args.retry_open}: Could not open stream {stream} ({err})")
|
2017-04-13 17:21:07 +02:00
|
|
|
|
|
|
|
if not success_open:
|
2021-12-14 15:29:55 +01:00
|
|
|
return console.exit(f"Could not open stream {stream}, tried {args.retry_open} times, exiting")
|
2012-06-14 01:03:44 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
try:
|
|
|
|
output.open()
|
2020-11-26 22:39:22 +01:00
|
|
|
except OSError as err:
|
2013-09-19 22:52:34 +02:00
|
|
|
if isinstance(output, PlayerOutput):
|
2021-08-29 23:57:55 +02:00
|
|
|
console.exit(f"Failed to start player: {args.player} ({err})")
|
2013-09-19 22:52:34 +02:00
|
|
|
else:
|
2021-08-29 23:57:55 +02:00
|
|
|
console.exit(f"Failed to open output: {output.filename} ({err})")
|
2012-06-22 00:07:06 +02:00
|
|
|
|
2013-11-15 06:27:08 +01:00
|
|
|
with closing(output):
|
2018-05-30 03:15:11 +02:00
|
|
|
log.debug("Writing stream to output")
|
2021-08-29 23:57:55 +02:00
|
|
|
read_stream(stream_fd, output, prebuffer, formatter)
|
2013-03-19 04:44:41 +01:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-08-29 23:57:55 +02:00
|
|
|
def read_stream(stream, output, prebuffer, formatter: Formatter, chunk_size=8192):
|
2013-03-19 04:44:41 +01:00
|
|
|
"""Reads data from stream and then writes it to the output."""
|
|
|
|
is_player = isinstance(output, PlayerOutput)
|
2013-09-19 22:52:34 +02:00
|
|
|
is_http = isinstance(output, HTTPServer)
|
2013-03-22 18:12:24 +01:00
|
|
|
is_fifo = is_player and output.namedpipe
|
2020-03-07 21:22:57 +01:00
|
|
|
show_progress = (
|
|
|
|
isinstance(output, FileOutput)
|
|
|
|
and output.fd is not stdout
|
|
|
|
and (sys.stdout.isatty() or args.force_progress)
|
|
|
|
)
|
2020-02-20 01:10:21 +01:00
|
|
|
show_record_progress = (
|
|
|
|
hasattr(output, "record")
|
|
|
|
and isinstance(output.record, FileOutput)
|
|
|
|
and output.record.fd is not stdout
|
2020-03-07 21:22:57 +01:00
|
|
|
and (sys.stdout.isatty() or args.force_progress)
|
2020-02-20 01:10:21 +01:00
|
|
|
)
|
2012-08-15 21:05:35 +02:00
|
|
|
|
2014-08-12 21:10:20 +02:00
|
|
|
stream_iterator = chain(
|
|
|
|
[prebuffer],
|
|
|
|
iter(partial(stream.read, chunk_size), b"")
|
|
|
|
)
|
|
|
|
if show_progress:
|
2021-08-29 23:57:55 +02:00
|
|
|
stream_iterator = progress(
|
|
|
|
stream_iterator,
|
|
|
|
prefix=os.path.basename(output.filename)
|
|
|
|
)
|
2019-01-04 03:21:48 +01:00
|
|
|
elif show_record_progress:
|
2021-08-29 23:57:55 +02:00
|
|
|
stream_iterator = progress(
|
|
|
|
stream_iterator,
|
|
|
|
prefix=os.path.basename(output.record.filename)
|
|
|
|
)
|
2013-03-22 18:12:24 +01:00
|
|
|
|
2014-08-12 21:10:20 +02:00
|
|
|
try:
|
|
|
|
for data in stream_iterator:
|
|
|
|
# We need to check if the player process still exists when
|
|
|
|
# using named pipes on Windows since the named pipe is not
|
|
|
|
# automatically closed by the player.
|
|
|
|
if is_win32 and is_fifo:
|
|
|
|
output.player.poll()
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2014-08-12 21:10:20 +02:00
|
|
|
if output.player.returncode is not None:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info("Player closed")
|
2014-08-12 21:10:20 +02:00
|
|
|
break
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2014-08-12 21:10:20 +02:00
|
|
|
try:
|
|
|
|
output.write(data)
|
2020-11-26 22:39:22 +01:00
|
|
|
except OSError as err:
|
2014-08-12 21:10:20 +02:00
|
|
|
if is_player and err.errno in ACCEPTABLE_ERRNO:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info("Player closed")
|
2014-08-12 21:10:20 +02:00
|
|
|
elif is_http and err.errno in ACCEPTABLE_ERRNO:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info("HTTP connection closed")
|
2014-08-12 21:10:20 +02:00
|
|
|
else:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"Error when writing to output: {err}, exiting")
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2014-08-12 21:10:20 +02:00
|
|
|
break
|
2020-11-26 22:39:22 +01:00
|
|
|
except OSError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"Error when reading from stream: {err}, exiting")
|
2017-04-13 17:21:07 +02:00
|
|
|
finally:
|
|
|
|
stream.close()
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info("Stream ended")
|
2012-09-18 22:38:07 +02:00
|
|
|
|
2012-11-22 11:43:32 +01:00
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
def handle_stream(plugin: Plugin, streams: Streams, stream_name: str) -> None:
|
2013-03-19 04:44:41 +01:00
|
|
|
"""Decides what to do with the selected stream.
|
|
|
|
|
|
|
|
Depending on arguments it can be one of these:
|
|
|
|
- Output JSON represenation
|
2021-11-13 11:36:32 +01:00
|
|
|
- Output the stream URL
|
2013-09-19 22:52:34 +02:00
|
|
|
- Continuously output the stream over HTTP
|
2013-03-19 04:44:41 +01:00
|
|
|
- Output stream data to selected output
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2013-11-03 23:04:47 +01:00
|
|
|
stream_name = resolve_stream_name(streams, stream_name)
|
2013-09-19 22:52:34 +02:00
|
|
|
stream = streams[stream_name]
|
2012-11-22 11:43:32 +01:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
# Print JSON representation of the stream
|
2021-11-13 11:36:32 +01:00
|
|
|
if args.json:
|
2021-09-03 14:05:00 +02:00
|
|
|
console.msg_json(
|
|
|
|
stream,
|
|
|
|
metadata=plugin.get_metadata()
|
|
|
|
)
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2014-06-03 17:56:05 +02:00
|
|
|
elif args.stream_url:
|
2017-02-10 13:09:09 +01:00
|
|
|
try:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.msg(stream.to_url())
|
2017-02-10 13:09:09 +01:00
|
|
|
except TypeError:
|
2014-06-03 17:56:05 +02:00
|
|
|
console.exit("The stream specified cannot be translated to a URL")
|
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
# Output the stream
|
2012-11-22 11:43:32 +01:00
|
|
|
else:
|
2013-03-19 04:44:41 +01:00
|
|
|
# Find any streams with a '_alt' suffix and attempt
|
|
|
|
# to use these in case the main stream is not usable.
|
2013-09-19 22:52:34 +02:00
|
|
|
alt_streams = list(filter(lambda k: stream_name + "_alt" in k,
|
|
|
|
sorted(streams.keys())))
|
2014-04-17 22:02:33 +02:00
|
|
|
file_output = args.output or args.stdout
|
2012-11-22 11:43:32 +01:00
|
|
|
|
2021-09-05 06:22:03 +02:00
|
|
|
formatter = get_formatter(plugin)
|
2021-08-29 23:57:55 +02:00
|
|
|
|
2013-09-19 22:52:34 +02:00
|
|
|
for stream_name in [stream_name] + alt_streams:
|
|
|
|
stream = streams[stream_name]
|
|
|
|
stream_type = type(stream).shortname()
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2014-04-17 22:02:33 +02:00
|
|
|
if stream_type in args.player_passthrough and not file_output:
|
2021-08-29 23:57:55 +02:00
|
|
|
log.info(f"Opening stream: {stream_name} ({stream_type})")
|
|
|
|
success = output_stream_passthrough(stream, formatter)
|
2015-01-24 22:13:19 +01:00
|
|
|
elif args.player_external_http:
|
2021-08-29 23:57:55 +02:00
|
|
|
return output_stream_http(plugin, streams, formatter, external=True,
|
2015-01-25 18:45:19 +01:00
|
|
|
port=args.player_external_http_port)
|
2014-04-17 22:02:33 +02:00
|
|
|
elif args.player_continuous_http and not file_output:
|
2021-08-29 23:57:55 +02:00
|
|
|
return output_stream_http(plugin, streams, formatter)
|
2013-09-19 22:52:34 +02:00
|
|
|
else:
|
2021-08-29 23:57:55 +02:00
|
|
|
log.info(f"Opening stream: {stream_name} ({stream_type})")
|
|
|
|
success = output_stream(stream, formatter)
|
2012-11-22 11:43:32 +01:00
|
|
|
|
2012-12-30 01:11:48 +01:00
|
|
|
if success:
|
2012-11-22 11:43:32 +01:00
|
|
|
break
|
|
|
|
|
2011-08-15 04:37:22 +02:00
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
def fetch_streams(plugin: Plugin) -> Streams:
|
2013-09-19 22:52:34 +02:00
|
|
|
"""Fetches streams using correct parameters."""
|
|
|
|
|
2018-08-04 16:31:09 +02:00
|
|
|
return plugin.streams(stream_types=args.stream_types,
|
|
|
|
sorting_excludes=args.stream_sorting_excludes)
|
2013-09-19 22:52:34 +02:00
|
|
|
|
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
def fetch_streams_with_retry(plugin: Plugin, interval: float, count: int) -> Streams:
|
2017-12-14 05:14:49 +01:00
|
|
|
"""Attempts to fetch streams repeatedly
|
|
|
|
until some are returned or limit hit."""
|
2014-03-20 22:06:51 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
streams = fetch_streams(plugin)
|
|
|
|
except PluginError as err:
|
2020-10-31 09:23:57 +01:00
|
|
|
log.error(err)
|
2014-03-20 22:06:51 +01:00
|
|
|
streams = None
|
|
|
|
|
|
|
|
if not streams:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Waiting for streams, retrying every {interval} second(s)")
|
2017-12-14 05:14:49 +01:00
|
|
|
attempts = 0
|
|
|
|
|
2014-03-20 22:06:51 +01:00
|
|
|
while not streams:
|
2016-11-04 18:19:03 +01:00
|
|
|
sleep(interval)
|
2014-03-20 22:06:51 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
streams = fetch_streams(plugin)
|
2020-02-19 18:58:10 +01:00
|
|
|
except FatalPluginError:
|
2018-06-22 01:15:21 +02:00
|
|
|
raise
|
2014-03-20 22:06:51 +01:00
|
|
|
except PluginError as err:
|
2020-10-31 09:23:57 +01:00
|
|
|
log.error(err)
|
2014-03-20 22:06:51 +01:00
|
|
|
|
2017-12-14 05:14:49 +01:00
|
|
|
if count > 0:
|
|
|
|
attempts += 1
|
|
|
|
if attempts >= count:
|
|
|
|
break
|
|
|
|
|
2014-03-20 22:06:51 +01:00
|
|
|
return streams
|
|
|
|
|
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
def resolve_stream_name(streams: Streams, stream_name: str) -> str:
|
2013-09-19 22:52:34 +02:00
|
|
|
"""Returns the real stream name of a synonym."""
|
|
|
|
|
2014-08-13 15:31:24 +02:00
|
|
|
if stream_name in STREAM_SYNONYMS and stream_name in streams:
|
2013-09-19 22:52:34 +02:00
|
|
|
for name, stream in streams.items():
|
|
|
|
if stream is streams[stream_name] and name not in STREAM_SYNONYMS:
|
|
|
|
return name
|
|
|
|
|
|
|
|
return stream_name
|
|
|
|
|
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
def format_valid_streams(plugin: Plugin, streams: Streams) -> str:
|
2013-03-19 04:44:41 +01:00
|
|
|
"""Formats a dict of streams.
|
2012-08-15 19:49:54 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
Filters out synonyms and displays them next to
|
|
|
|
the stream they point to.
|
|
|
|
|
2015-07-30 11:40:40 +02:00
|
|
|
Streams are sorted according to their quality
|
|
|
|
(based on plugin.stream_weight).
|
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
"""
|
2011-08-15 04:37:22 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
delimiter = ", "
|
2012-12-31 22:58:34 +01:00
|
|
|
validstreams = []
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2015-08-03 02:41:54 +02:00
|
|
|
for name, stream in sorted(streams.items(),
|
|
|
|
key=lambda stream: plugin.stream_weight(stream[0])):
|
2013-03-19 04:44:41 +01:00
|
|
|
if name in STREAM_SYNONYMS:
|
2012-12-31 22:58:34 +01:00
|
|
|
continue
|
|
|
|
|
2017-01-16 18:57:12 +01:00
|
|
|
def synonymfilter(n):
|
|
|
|
return stream is streams[n] and n is not name
|
2018-05-25 23:25:15 +02:00
|
|
|
|
2012-12-31 22:58:34 +01:00
|
|
|
synonyms = list(filter(synonymfilter, streams.keys()))
|
|
|
|
|
|
|
|
if len(synonyms) > 0:
|
2013-03-19 04:44:41 +01:00
|
|
|
joined = delimiter.join(synonyms)
|
2021-09-02 17:08:39 +02:00
|
|
|
name = f"{name} ({joined})"
|
2012-12-31 22:58:34 +01:00
|
|
|
|
2015-08-03 02:41:54 +02:00
|
|
|
validstreams.append(name)
|
2012-12-31 22:58:34 +01:00
|
|
|
|
2015-08-03 02:41:54 +02:00
|
|
|
return delimiter.join(validstreams)
|
2013-03-19 04:44:41 +01:00
|
|
|
|
|
|
|
|
2013-07-17 01:44:42 +02:00
|
|
|
def handle_url():
|
2013-03-19 04:44:41 +01:00
|
|
|
"""The URL handler.
|
|
|
|
|
|
|
|
Attempts to resolve the URL to a plugin and then attempts
|
|
|
|
to fetch a list of available streams.
|
|
|
|
|
|
|
|
Proceeds to handle stream if user specified a valid one,
|
|
|
|
otherwise output list of valid streams.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
2021-11-10 10:52:12 +01:00
|
|
|
pluginclass, resolved_url = streamlink.resolve_url(args.url)
|
|
|
|
setup_plugin_options(streamlink, pluginclass)
|
|
|
|
plugin = pluginclass(resolved_url)
|
2021-03-21 02:00:01 +01:00
|
|
|
log.info(f"Found matching plugin {plugin.module} for URL {args.url}")
|
2018-05-25 23:25:15 +02:00
|
|
|
|
2017-12-14 05:14:49 +01:00
|
|
|
if args.retry_max or args.retry_streams:
|
|
|
|
retry_streams = 1
|
|
|
|
retry_max = 0
|
|
|
|
if args.retry_streams:
|
|
|
|
retry_streams = args.retry_streams
|
|
|
|
if args.retry_max:
|
|
|
|
retry_max = args.retry_max
|
2021-11-10 10:52:12 +01:00
|
|
|
streams = fetch_streams_with_retry(plugin, retry_streams, retry_max)
|
2014-03-20 22:06:51 +01:00
|
|
|
else:
|
|
|
|
streams = fetch_streams(plugin)
|
2013-03-19 04:44:41 +01:00
|
|
|
except NoPluginError:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"No plugin can handle URL: {args.url}")
|
2013-09-19 22:52:34 +02:00
|
|
|
except PluginError as err:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(err)
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2013-09-19 22:52:34 +02:00
|
|
|
if not streams:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"No playable streams found on this URL: {args.url}")
|
2011-08-15 04:37:22 +02:00
|
|
|
|
2014-06-06 14:53:33 +02:00
|
|
|
if args.default_stream and not args.stream and not args.json:
|
|
|
|
args.stream = args.default_stream
|
2014-04-17 21:41:55 +02:00
|
|
|
|
2011-08-15 04:37:22 +02:00
|
|
|
if args.stream:
|
2015-07-30 11:40:40 +02:00
|
|
|
validstreams = format_valid_streams(plugin, streams)
|
2013-11-03 23:04:47 +01:00
|
|
|
for stream_name in args.stream:
|
|
|
|
if stream_name in streams:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Available streams: {validstreams}")
|
2013-11-03 23:04:47 +01:00
|
|
|
handle_stream(plugin, streams, stream_name)
|
|
|
|
return
|
2013-01-01 04:34:41 +01:00
|
|
|
|
2021-09-02 17:08:39 +02:00
|
|
|
err = f"The specified stream(s) '{', '.join(args.stream)}' could not be found"
|
2021-09-02 17:35:07 +02:00
|
|
|
if args.json:
|
2021-09-03 14:05:00 +02:00
|
|
|
console.msg_json(
|
|
|
|
plugin=plugin.module,
|
|
|
|
metadata=plugin.get_metadata(),
|
|
|
|
streams=streams,
|
|
|
|
error=err
|
|
|
|
)
|
2013-11-03 23:04:47 +01:00
|
|
|
else:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.exit(f"{err}.\n Available streams: {validstreams}")
|
2021-09-02 17:35:07 +02:00
|
|
|
elif args.json:
|
2021-09-03 14:05:00 +02:00
|
|
|
console.msg_json(
|
|
|
|
plugin=plugin.module,
|
|
|
|
metadata=plugin.get_metadata(),
|
|
|
|
streams=streams
|
|
|
|
)
|
2020-10-28 03:44:30 +01:00
|
|
|
elif args.stream_url:
|
|
|
|
try:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.msg(streams[list(streams)[-1]].to_manifest_url())
|
2020-10-28 03:44:30 +01:00
|
|
|
except TypeError:
|
|
|
|
console.exit("The stream specified cannot be translated to a URL")
|
2011-08-15 04:37:22 +02:00
|
|
|
else:
|
2020-10-28 03:44:30 +01:00
|
|
|
validstreams = format_valid_streams(plugin, streams)
|
2021-09-02 17:08:39 +02:00
|
|
|
console.msg(f"Available streams: {validstreams}")
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2011-08-15 04:37:22 +02:00
|
|
|
|
2013-07-17 01:44:42 +02:00
|
|
|
def print_plugins():
|
2016-09-19 21:46:06 +02:00
|
|
|
"""Outputs a list of all plugins Streamlink has loaded."""
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2016-09-19 21:46:06 +02:00
|
|
|
pluginlist = list(streamlink.get_plugins().keys())
|
2013-07-17 01:44:42 +02:00
|
|
|
pluginlist_formatted = ", ".join(sorted(pluginlist))
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2021-09-02 17:35:07 +02:00
|
|
|
if args.json:
|
2013-03-19 04:44:41 +01:00
|
|
|
console.msg_json(pluginlist)
|
2013-01-01 04:34:41 +01:00
|
|
|
else:
|
2021-09-02 17:08:39 +02:00
|
|
|
console.msg(f"Loaded plugins: {pluginlist_formatted}")
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2011-08-15 04:37:22 +02:00
|
|
|
|
2021-06-01 17:47:06 +02:00
|
|
|
def load_plugins(dirs: List[Path], showwarning: bool = True):
|
2013-03-19 04:44:41 +01:00
|
|
|
"""Attempts to load plugins from a list of directories."""
|
2012-09-29 22:04:44 +02:00
|
|
|
for directory in dirs:
|
2021-06-01 17:47:06 +02:00
|
|
|
if directory.is_dir():
|
2021-06-05 22:17:42 +02:00
|
|
|
success = streamlink.load_plugins(str(directory))
|
|
|
|
if success and type(directory) is DeprecatedPath:
|
|
|
|
log.info(f"Loaded plugins from deprecated path, see CLI docs for how to migrate: {directory}")
|
2021-06-01 17:47:06 +02:00
|
|
|
elif showwarning:
|
|
|
|
log.warning(f"Plugin path {directory} does not exist or is not a directory!")
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2011-08-15 04:37:22 +02:00
|
|
|
|
2021-06-01 17:47:06 +02:00
|
|
|
def setup_args(parser: argparse.ArgumentParser, config_files: List[Path] = None, ignore_unknown: bool = False):
|
2013-07-17 01:44:42 +02:00
|
|
|
"""Parses arguments."""
|
|
|
|
global args
|
|
|
|
arglist = sys.argv[1:]
|
|
|
|
|
2014-06-04 23:20:33 +02:00
|
|
|
# Load arguments from config files
|
2021-06-06 22:06:02 +02:00
|
|
|
configs = [f"@{config_file}" for config_file in config_files or []]
|
2013-07-17 01:44:42 +02:00
|
|
|
|
2021-06-06 22:06:02 +02:00
|
|
|
args, unknown = parser.parse_known_args(configs + arglist)
|
2018-05-25 23:25:15 +02:00
|
|
|
if unknown and not ignore_unknown:
|
2021-09-02 17:08:39 +02:00
|
|
|
msg = gettext("unrecognized arguments: %s")
|
|
|
|
parser.error(msg % " ".join(unknown))
|
2013-07-17 01:44:42 +02:00
|
|
|
|
2013-09-20 13:14:34 +02:00
|
|
|
# Force lowercase to allow case-insensitive lookup
|
|
|
|
if args.stream:
|
2013-11-03 23:04:47 +01:00
|
|
|
args.stream = [stream.lower() for stream in args.stream]
|
2013-09-20 13:14:34 +02:00
|
|
|
|
2017-04-18 15:06:03 +02:00
|
|
|
if not args.url and args.url_param:
|
|
|
|
args.url = args.url_param
|
|
|
|
|
2013-07-17 01:44:42 +02:00
|
|
|
|
2020-10-17 22:17:20 +02:00
|
|
|
def setup_config_args(parser, ignore_unknown=False):
|
2014-06-05 00:28:05 +02:00
|
|
|
config_files = []
|
|
|
|
|
|
|
|
if args.config:
|
|
|
|
# We want the config specified last to get highest priority
|
2021-06-06 22:06:02 +02:00
|
|
|
for config_file in map(lambda path: Path(path).expanduser(), reversed(args.config)):
|
|
|
|
if config_file.is_file():
|
|
|
|
config_files.append(config_file)
|
2014-07-07 17:29:17 +02:00
|
|
|
else:
|
|
|
|
# Only load first available default config
|
2021-06-01 17:47:06 +02:00
|
|
|
for config_file in filter(lambda path: path.is_file(), CONFIG_FILES):
|
2021-06-06 22:06:02 +02:00
|
|
|
if type(config_file) is DeprecatedPath:
|
|
|
|
log.info(f"Loaded config from deprecated path, see CLI docs for how to migrate: {config_file}")
|
2014-07-07 17:29:17 +02:00
|
|
|
config_files.append(config_file)
|
|
|
|
break
|
2014-06-05 00:28:05 +02:00
|
|
|
|
2021-06-06 22:06:02 +02:00
|
|
|
if streamlink and args.url:
|
|
|
|
# Only load first available plugin config
|
|
|
|
with ignored(NoPluginError):
|
2021-11-10 10:52:12 +01:00
|
|
|
pluginclass, resolved_url = streamlink.resolve_url(args.url)
|
2021-06-06 22:06:02 +02:00
|
|
|
for config_file in CONFIG_FILES:
|
2021-11-10 10:52:12 +01:00
|
|
|
config_file = config_file.with_name(f"{config_file.name}.{pluginclass.module}")
|
2021-06-06 22:06:02 +02:00
|
|
|
if not config_file.is_file():
|
|
|
|
continue
|
|
|
|
if type(config_file) is DeprecatedPath:
|
|
|
|
log.info(f"Loaded plugin config from deprecated path, see CLI docs for how to migrate: {config_file}")
|
|
|
|
config_files.append(config_file)
|
|
|
|
break
|
|
|
|
|
2014-06-05 00:28:05 +02:00
|
|
|
if config_files:
|
2020-10-17 22:17:20 +02:00
|
|
|
setup_args(parser, config_files, ignore_unknown=ignore_unknown)
|
2014-06-04 23:20:33 +02:00
|
|
|
|
|
|
|
|
2021-05-28 20:54:42 +02:00
|
|
|
def setup_signals():
|
2021-11-20 14:29:17 +01:00
|
|
|
# restore default behavior of raising a KeyboardInterrupt on SIGINT (and SIGTERM)
|
|
|
|
# so cleanup code can be run when the user stops execution
|
|
|
|
signal.signal(signal.SIGINT, signal.default_int_handler)
|
2013-07-17 01:44:42 +02:00
|
|
|
signal.signal(signal.SIGTERM, signal.default_int_handler)
|
|
|
|
|
|
|
|
|
2014-03-14 21:02:34 +01:00
|
|
|
def setup_http_session():
|
|
|
|
"""Sets the global HTTP settings, such as proxy and headers."""
|
2013-10-26 02:36:16 +02:00
|
|
|
if args.http_proxy:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-proxy", args.http_proxy)
|
2013-10-26 02:36:16 +02:00
|
|
|
|
|
|
|
if args.https_proxy:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("https-proxy", args.https_proxy)
|
2013-10-26 02:36:16 +02:00
|
|
|
|
2014-12-06 17:07:14 +01:00
|
|
|
if args.http_cookie:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-cookies", dict(args.http_cookie))
|
2014-03-14 21:02:34 +01:00
|
|
|
|
2014-12-06 17:07:14 +01:00
|
|
|
if args.http_header:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-headers", dict(args.http_header))
|
2014-03-14 21:02:34 +01:00
|
|
|
|
2014-12-06 17:07:14 +01:00
|
|
|
if args.http_query_param:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-query-params", dict(args.http_query_param))
|
2014-03-14 21:02:34 +01:00
|
|
|
|
2014-03-20 01:33:06 +01:00
|
|
|
if args.http_ignore_env:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-trust-env", False)
|
2014-03-20 01:33:06 +01:00
|
|
|
|
2014-03-14 21:02:34 +01:00
|
|
|
if args.http_no_ssl_verify:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-ssl-verify", False)
|
2014-03-14 21:02:34 +01:00
|
|
|
|
2017-01-19 10:33:34 +01:00
|
|
|
if args.http_disable_dh:
|
|
|
|
streamlink.set_option("http-disable-dh", True)
|
|
|
|
|
2014-03-14 21:02:34 +01:00
|
|
|
if args.http_ssl_cert:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-ssl-cert", args.http_ssl_cert)
|
2014-03-14 21:02:34 +01:00
|
|
|
|
|
|
|
if args.http_ssl_cert_crt_key:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-ssl-cert", tuple(args.http_ssl_cert_crt_key))
|
2014-03-14 21:02:34 +01:00
|
|
|
|
2014-04-20 17:07:16 +02:00
|
|
|
if args.http_timeout:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("http-timeout", args.http_timeout)
|
2013-10-26 02:36:16 +02:00
|
|
|
|
2014-12-06 17:07:14 +01:00
|
|
|
|
2018-06-01 00:16:52 +02:00
|
|
|
def setup_plugins(extra_plugin_dir=None):
|
2013-09-19 22:52:34 +02:00
|
|
|
"""Loads any additional plugins."""
|
2021-06-01 17:47:06 +02:00
|
|
|
load_plugins(PLUGIN_DIRS, showwarning=False)
|
2013-07-17 01:44:42 +02:00
|
|
|
|
2018-06-01 00:16:52 +02:00
|
|
|
if extra_plugin_dir:
|
2021-06-01 17:47:06 +02:00
|
|
|
load_plugins([Path(path).expanduser() for path in extra_plugin_dir])
|
2013-07-17 01:44:42 +02:00
|
|
|
|
|
|
|
|
2016-09-19 21:46:06 +02:00
|
|
|
def setup_streamlink():
|
|
|
|
"""Creates the Streamlink session."""
|
|
|
|
global streamlink
|
2013-07-17 01:44:42 +02:00
|
|
|
|
2018-06-22 01:15:21 +02:00
|
|
|
streamlink = Streamlink({"user-input-requester": ConsoleUserInputRequester(console)})
|
2013-07-17 01:44:42 +02:00
|
|
|
|
2013-03-19 04:44:41 +01:00
|
|
|
|
2013-07-17 01:44:42 +02:00
|
|
|
def setup_options():
|
2016-09-19 21:46:06 +02:00
|
|
|
"""Sets Streamlink options."""
|
2021-01-09 18:45:04 +01:00
|
|
|
if args.interface:
|
|
|
|
streamlink.set_option("interface", args.interface)
|
|
|
|
if args.ipv4:
|
|
|
|
streamlink.set_option("ipv4", args.ipv4)
|
|
|
|
if args.ipv6:
|
|
|
|
streamlink.set_option("ipv6", args.ipv6)
|
|
|
|
|
2021-08-02 21:51:11 +02:00
|
|
|
if args.ringbuffer_size:
|
|
|
|
streamlink.set_option("ringbuffer-size", args.ringbuffer_size)
|
|
|
|
if args.mux_subtitles:
|
|
|
|
streamlink.set_option("mux-subtitles", args.mux_subtitles)
|
|
|
|
|
2014-04-18 14:20:45 +02:00
|
|
|
if args.hls_live_edge:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("hls-live-edge", args.hls_live_edge)
|
2021-11-09 16:18:28 +01:00
|
|
|
if args.hls_segment_stream_data:
|
|
|
|
streamlink.set_option("hls-segment-stream-data", args.hls_segment_stream_data)
|
|
|
|
|
2017-02-03 18:26:56 +01:00
|
|
|
if args.hls_playlist_reload_attempts:
|
|
|
|
streamlink.set_option("hls-playlist-reload-attempts", args.hls_playlist_reload_attempts)
|
2020-04-21 03:30:53 +02:00
|
|
|
if args.hls_playlist_reload_time:
|
|
|
|
streamlink.set_option("hls-playlist-reload-time", args.hls_playlist_reload_time)
|
2018-01-13 23:44:18 +01:00
|
|
|
if args.hls_segment_ignore_names:
|
|
|
|
streamlink.set_option("hls-segment-ignore-names", args.hls_segment_ignore_names)
|
2018-12-08 21:38:37 +01:00
|
|
|
if args.hls_segment_key_uri:
|
|
|
|
streamlink.set_option("hls-segment-key-uri", args.hls_segment_key_uri)
|
2017-03-09 14:12:20 +01:00
|
|
|
if args.hls_audio_select:
|
|
|
|
streamlink.set_option("hls-audio-select", args.hls_audio_select)
|
2017-07-26 14:08:12 +02:00
|
|
|
if args.hls_start_offset:
|
|
|
|
streamlink.set_option("hls-start-offset", args.hls_start_offset)
|
|
|
|
if args.hls_duration:
|
|
|
|
streamlink.set_option("hls-duration", args.hls_duration)
|
2017-05-19 20:15:01 +02:00
|
|
|
if args.hls_live_restart:
|
|
|
|
streamlink.set_option("hls-live-restart", args.hls_live_restart)
|
|
|
|
|
session: deprecate options for spec. stream types
Having multiple session options and CLI arguments for different stream
types which are already covered by generic options/arguments is not only
redundant, but also confusing.
A distinction between different stream types does only make sense when
multiple different stream types are available and the user needs to
explicitly set different values for each of them, but since it's not
always clear which stream type is returned when a stream is selected
via the "best" stream name synonym for example, this makes it even more
confusing. And it's redundant as well, since only one stream can be
selected anyway.
- deprecate `{dash,hds,hls}-segment-attempts`
in favor of `stream-segment-attempts`
- deprecate `{dash,hds,hls}-segment-threads`
in favor of `stream-segment-threads`
- deprecate `{dash,hds,hls}-segment-timeout`
in favor of `stream-segment-timeout`
- deprecate `{dash,hds,hls,rtmp,http-stream}-timeout`
in favor of `stream-timeout` (dash/http-stream were never used)
- fix `HTTPStream` and use `stream-timeout` instead of `http-timeout`
- suppress deprecated CLI arguments
- update help texts of generic CLI arguments
- fix docs and add entry to deprecations page
2021-07-30 22:34:22 +02:00
|
|
|
# deprecated
|
|
|
|
if args.hls_segment_attempts:
|
|
|
|
streamlink.set_option("hls-segment-attempts", args.hls_segment_attempts)
|
|
|
|
if args.hls_segment_threads:
|
|
|
|
streamlink.set_option("hls-segment-threads", args.hls_segment_threads)
|
|
|
|
if args.hls_segment_timeout:
|
|
|
|
streamlink.set_option("hls-segment-timeout", args.hls_segment_timeout)
|
|
|
|
if args.hls_timeout:
|
|
|
|
streamlink.set_option("hls-timeout", args.hls_timeout)
|
|
|
|
if args.http_stream_timeout:
|
|
|
|
streamlink.set_option("http-stream-timeout", args.http_stream_timeout)
|
2014-04-18 18:21:43 +02:00
|
|
|
|
session: deprecate options for spec. stream types
Having multiple session options and CLI arguments for different stream
types which are already covered by generic options/arguments is not only
redundant, but also confusing.
A distinction between different stream types does only make sense when
multiple different stream types are available and the user needs to
explicitly set different values for each of them, but since it's not
always clear which stream type is returned when a stream is selected
via the "best" stream name synonym for example, this makes it even more
confusing. And it's redundant as well, since only one stream can be
selected anyway.
- deprecate `{dash,hds,hls}-segment-attempts`
in favor of `stream-segment-attempts`
- deprecate `{dash,hds,hls}-segment-threads`
in favor of `stream-segment-threads`
- deprecate `{dash,hds,hls}-segment-timeout`
in favor of `stream-segment-timeout`
- deprecate `{dash,hds,hls,rtmp,http-stream}-timeout`
in favor of `stream-timeout` (dash/http-stream were never used)
- fix `HTTPStream` and use `stream-timeout` instead of `http-timeout`
- suppress deprecated CLI arguments
- update help texts of generic CLI arguments
- fix docs and add entry to deprecations page
2021-07-30 22:34:22 +02:00
|
|
|
# generic stream- arguments take precedence over deprecated stream-type arguments
|
2014-07-27 15:55:10 +02:00
|
|
|
if args.stream_segment_attempts:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("stream-segment-attempts", args.stream_segment_attempts)
|
2014-07-27 15:55:10 +02:00
|
|
|
if args.stream_segment_threads:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("stream-segment-threads", args.stream_segment_threads)
|
2014-07-27 15:55:10 +02:00
|
|
|
if args.stream_segment_timeout:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("stream-segment-timeout", args.stream_segment_timeout)
|
2014-07-27 15:55:10 +02:00
|
|
|
if args.stream_timeout:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.set_option("stream-timeout", args.stream_timeout)
|
2014-07-27 15:55:10 +02:00
|
|
|
|
2017-01-10 18:44:32 +01:00
|
|
|
if args.ffmpeg_ffmpeg:
|
|
|
|
streamlink.set_option("ffmpeg-ffmpeg", args.ffmpeg_ffmpeg)
|
|
|
|
if args.ffmpeg_verbose:
|
|
|
|
streamlink.set_option("ffmpeg-verbose", args.ffmpeg_verbose)
|
|
|
|
if args.ffmpeg_verbose_path:
|
|
|
|
streamlink.set_option("ffmpeg-verbose-path", args.ffmpeg_verbose_path)
|
2020-11-23 18:29:26 +01:00
|
|
|
if args.ffmpeg_fout:
|
|
|
|
streamlink.set_option("ffmpeg-fout", args.ffmpeg_fout)
|
2017-01-10 18:44:32 +01:00
|
|
|
if args.ffmpeg_video_transcode:
|
|
|
|
streamlink.set_option("ffmpeg-video-transcode", args.ffmpeg_video_transcode)
|
|
|
|
if args.ffmpeg_audio_transcode:
|
|
|
|
streamlink.set_option("ffmpeg-audio-transcode", args.ffmpeg_audio_transcode)
|
2020-12-16 17:52:10 +01:00
|
|
|
if args.ffmpeg_copyts:
|
|
|
|
streamlink.set_option("ffmpeg-copyts", args.ffmpeg_copyts)
|
2020-12-15 16:57:50 +01:00
|
|
|
if args.ffmpeg_start_at_zero:
|
|
|
|
streamlink.set_option("ffmpeg-start-at-zero", args.ffmpeg_start_at_zero)
|
2017-01-10 18:44:32 +01:00
|
|
|
|
2017-01-20 14:02:14 +01:00
|
|
|
streamlink.set_option("locale", args.locale)
|
|
|
|
|
2015-02-03 20:18:40 +01:00
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
def setup_plugin_args(session, parser):
|
2016-09-19 21:46:06 +02:00
|
|
|
"""Sets Streamlink plugin options."""
|
2017-01-09 20:28:24 +01:00
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
plugin_args = parser.add_argument_group("Plugin options")
|
|
|
|
for pname, plugin in session.plugins.items():
|
|
|
|
defaults = {}
|
2020-11-14 11:16:12 +01:00
|
|
|
group = plugin_args.add_argument_group(pname.capitalize())
|
2020-11-08 05:44:23 +01:00
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
for parg in plugin.arguments:
|
2020-11-08 05:44:23 +01:00
|
|
|
if not parg.is_global:
|
2020-11-14 11:16:12 +01:00
|
|
|
group.add_argument(parg.argument_name(pname), **parg.options)
|
2020-11-08 05:44:23 +01:00
|
|
|
defaults[parg.dest] = parg.default
|
|
|
|
else:
|
|
|
|
pargdest = parg.dest
|
|
|
|
for action in parser._actions:
|
|
|
|
# find matching global argument
|
|
|
|
if pargdest != action.dest:
|
|
|
|
continue
|
|
|
|
defaults[pargdest] = action.default
|
|
|
|
|
|
|
|
# add plugin to global argument
|
|
|
|
plugins = getattr(action, "plugins", [])
|
|
|
|
plugins.append(pname)
|
|
|
|
setattr(action, "plugins", plugins)
|
2017-03-02 12:13:41 +01:00
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
plugin.options = PluginOptions(defaults)
|
2017-03-02 12:13:41 +01:00
|
|
|
|
2017-03-03 20:15:08 +01:00
|
|
|
|
2021-11-10 11:20:14 +01:00
|
|
|
def setup_plugin_options(session: Streamlink, plugin: Type[Plugin]):
|
2018-05-25 23:25:15 +02:00
|
|
|
"""Sets Streamlink plugin options."""
|
|
|
|
pname = plugin.module
|
|
|
|
required = OrderedDict({})
|
2020-11-08 05:44:23 +01:00
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
for parg in plugin.arguments:
|
2020-11-08 05:44:23 +01:00
|
|
|
if parg.options.get("help") == argparse.SUPPRESS:
|
|
|
|
continue
|
|
|
|
|
|
|
|
value = getattr(args, parg.dest if parg.is_global else parg.namespace_dest(pname))
|
|
|
|
session.set_plugin_option(pname, parg.dest, value)
|
|
|
|
|
|
|
|
if not parg.is_global:
|
2018-06-30 23:59:17 +02:00
|
|
|
if parg.required:
|
|
|
|
required[parg.name] = parg
|
|
|
|
# if the value is set, check to see if any of the required arguments are not set
|
|
|
|
if parg.required or value:
|
|
|
|
try:
|
|
|
|
for rparg in plugin.arguments.requires(parg.name):
|
|
|
|
required[rparg.name] = rparg
|
|
|
|
except RuntimeError:
|
2020-11-08 05:44:23 +01:00
|
|
|
log.error(f"{pname} plugin has a configuration error and the arguments cannot be parsed")
|
2018-06-30 23:59:17 +02:00
|
|
|
break
|
2020-11-08 05:44:23 +01:00
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
if required:
|
|
|
|
for req in required.values():
|
|
|
|
if not session.get_plugin_option(pname, req.dest):
|
2021-09-02 17:08:39 +02:00
|
|
|
prompt = f"{req.prompt or f'Enter {pname} {req.name}'}: "
|
|
|
|
session.set_plugin_option(
|
|
|
|
pname,
|
|
|
|
req.dest,
|
|
|
|
console.askpass(prompt) if req.sensitive else console.ask(prompt)
|
|
|
|
)
|
2018-04-30 04:29:24 +02:00
|
|
|
|
2013-04-11 13:28:12 +02:00
|
|
|
|
2021-05-28 20:28:23 +02:00
|
|
|
def log_root_warning():
|
2013-07-17 02:14:47 +02:00
|
|
|
if hasattr(os, "getuid"):
|
2016-10-29 03:58:28 +02:00
|
|
|
if os.geteuid() == 0:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info("streamlink is running as root! Be careful!")
|
2013-07-17 02:14:47 +02:00
|
|
|
|
2013-11-03 23:04:47 +01:00
|
|
|
|
2018-01-16 19:11:58 +01:00
|
|
|
def log_current_versions():
|
2017-12-13 19:12:03 +01:00
|
|
|
"""Show current installed versions"""
|
2021-03-21 12:03:51 +01:00
|
|
|
if not logger.root.isEnabledFor(logging.DEBUG):
|
|
|
|
return
|
2017-12-13 19:12:03 +01:00
|
|
|
|
2021-03-21 12:03:51 +01:00
|
|
|
# macOS
|
|
|
|
if sys.platform == "darwin":
|
|
|
|
os_version = f"macOS {platform.mac_ver()[0]}"
|
|
|
|
# Windows
|
|
|
|
elif sys.platform == "win32":
|
|
|
|
os_version = f"{platform.system()} {platform.release()}"
|
|
|
|
# Linux / other
|
|
|
|
else:
|
|
|
|
os_version = platform.platform()
|
|
|
|
|
|
|
|
log.debug(f"OS: {os_version}")
|
|
|
|
log.debug(f"Python: {platform.python_version()}")
|
|
|
|
log.debug(f"Streamlink: {streamlink_version}")
|
|
|
|
log.debug(f"Requests({requests.__version__}), "
|
|
|
|
f"Socks({socks_version}), "
|
|
|
|
f"Websocket({websocket_version})")
|
2017-12-13 19:12:03 +01:00
|
|
|
|
|
|
|
|
2021-03-21 02:00:01 +01:00
|
|
|
def log_current_arguments(session, parser):
|
|
|
|
global args
|
|
|
|
if not logger.root.isEnabledFor(logging.DEBUG):
|
|
|
|
return
|
|
|
|
|
|
|
|
sensitive = set()
|
|
|
|
for pname, plugin in session.plugins.items():
|
|
|
|
for parg in plugin.arguments:
|
|
|
|
if parg.sensitive:
|
|
|
|
sensitive.add(parg.argument_name(pname))
|
|
|
|
|
|
|
|
log.debug("Arguments:")
|
|
|
|
for action in parser._actions:
|
|
|
|
if not hasattr(args, action.dest):
|
|
|
|
continue
|
|
|
|
value = getattr(args, action.dest)
|
|
|
|
if action.default != value:
|
|
|
|
name = next( # pragma: no branch
|
|
|
|
(option for option in action.option_strings if option.startswith("--")),
|
|
|
|
action.option_strings[0]
|
|
|
|
) if action.option_strings else action.dest
|
|
|
|
log.debug(f" {name}={value if name not in sensitive else '*' * 8}")
|
|
|
|
|
|
|
|
|
2015-02-03 20:18:40 +01:00
|
|
|
def check_version(force=False):
|
2013-08-31 14:24:12 +02:00
|
|
|
cache = Cache(filename="cli.json")
|
|
|
|
latest_version = cache.get("latest_version")
|
|
|
|
|
2015-02-03 20:18:40 +01:00
|
|
|
if force or not latest_version:
|
2016-09-19 21:46:06 +02:00
|
|
|
res = requests.get("https://pypi.python.org/pypi/streamlink/json")
|
2013-08-31 14:24:12 +02:00
|
|
|
data = res.json()
|
|
|
|
latest_version = data.get("info").get("version")
|
|
|
|
cache.set("latest_version", latest_version, (60 * 60 * 24))
|
|
|
|
|
2014-06-13 12:07:48 +02:00
|
|
|
version_info_printed = cache.get("version_info_printed")
|
2015-02-03 20:18:40 +01:00
|
|
|
if not force and version_info_printed:
|
2014-06-13 12:07:48 +02:00
|
|
|
return
|
|
|
|
|
2016-09-19 21:46:06 +02:00
|
|
|
installed_version = StrictVersion(streamlink.version)
|
2013-08-31 14:24:12 +02:00
|
|
|
latest_version = StrictVersion(latest_version)
|
|
|
|
|
|
|
|
if latest_version > installed_version:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"A new version of Streamlink ({latest_version}) is available!")
|
2014-06-13 12:07:48 +02:00
|
|
|
cache.set("version_info_printed", True, (60 * 60 * 6))
|
2015-02-03 20:18:40 +01:00
|
|
|
elif force:
|
2021-09-02 17:08:39 +02:00
|
|
|
log.info(f"Your Streamlink version ({installed_version}) is up to date!")
|
2015-02-03 20:18:40 +01:00
|
|
|
|
|
|
|
if force:
|
|
|
|
sys.exit()
|
2013-08-31 14:24:12 +02:00
|
|
|
|
2013-08-08 15:01:32 +02:00
|
|
|
|
2021-05-28 20:54:42 +02:00
|
|
|
def setup_logger_and_console(stream=sys.stdout, filename=None, level="info", json=False):
|
|
|
|
global console
|
|
|
|
|
|
|
|
if filename == "-":
|
2021-09-05 06:22:03 +02:00
|
|
|
filename = LOG_DIR / f"{datetime.now()}.log"
|
2021-05-28 20:54:42 +02:00
|
|
|
elif filename:
|
|
|
|
filename = Path(filename).expanduser().resolve()
|
|
|
|
|
|
|
|
if filename:
|
|
|
|
filename.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
|
streamhandler = logger.basicConfig(
|
2020-10-13 15:50:23 +02:00
|
|
|
stream=stream,
|
2021-05-28 20:54:42 +02:00
|
|
|
filename=filename,
|
2020-10-13 15:50:23 +02:00
|
|
|
level=level,
|
|
|
|
style="{",
|
|
|
|
format=("[{asctime}]" if level == "trace" else "") + "[{name}][{levelname}] {message}",
|
|
|
|
datefmt="%H:%M:%S" + (".%f" if level == "trace" else "")
|
|
|
|
)
|
2018-05-30 03:15:11 +02:00
|
|
|
|
2021-05-28 20:54:42 +02:00
|
|
|
console = ConsoleOutput(streamhandler.stream, json)
|
|
|
|
|
2018-05-30 03:15:11 +02:00
|
|
|
|
2011-08-15 04:37:22 +02:00
|
|
|
def main():
|
2017-09-29 14:43:39 +02:00
|
|
|
error_code = 0
|
2018-05-25 23:25:15 +02:00
|
|
|
parser = build_parser()
|
2017-09-29 14:43:39 +02:00
|
|
|
|
2018-05-25 23:25:15 +02:00
|
|
|
setup_args(parser, ignore_unknown=True)
|
2020-10-17 22:17:20 +02:00
|
|
|
# call argument set up as early as possible to load args from config files
|
|
|
|
setup_config_args(parser, ignore_unknown=True)
|
2018-05-30 03:15:11 +02:00
|
|
|
|
|
|
|
# Console output should be on stderr if we are outputting
|
|
|
|
# a stream to stdout.
|
2019-01-04 03:21:48 +01:00
|
|
|
if args.stdout or args.output == "-" or args.record_and_pipe:
|
2018-05-30 03:15:11 +02:00
|
|
|
console_out = sys.stderr
|
|
|
|
else:
|
|
|
|
console_out = sys.stdout
|
2018-06-04 11:49:40 +02:00
|
|
|
|
|
|
|
# We don't want log output when we are printing JSON or a command-line.
|
|
|
|
silent_log = any(getattr(args, attr) for attr in QUIET_OPTIONS)
|
|
|
|
log_level = args.loglevel if not silent_log else "none"
|
2021-05-28 20:54:42 +02:00
|
|
|
log_file = args.logfile if log_level != "none" else None
|
|
|
|
setup_logger_and_console(console_out, log_file, log_level, args.json)
|
|
|
|
|
2016-09-19 21:46:06 +02:00
|
|
|
setup_streamlink()
|
2018-06-01 00:16:52 +02:00
|
|
|
# load additional plugins
|
|
|
|
setup_plugins(args.plugin_dirs)
|
2018-05-25 23:25:15 +02:00
|
|
|
setup_plugin_args(streamlink, parser)
|
|
|
|
# call setup args again once the plugin specific args have been added
|
|
|
|
setup_args(parser)
|
|
|
|
setup_config_args(parser)
|
2018-06-04 11:49:40 +02:00
|
|
|
|
2018-05-30 23:32:48 +02:00
|
|
|
# update the logging level if changed by a plugin specific config
|
2018-06-04 11:49:40 +02:00
|
|
|
log_level = args.loglevel if not silent_log else "none"
|
|
|
|
logger.root.setLevel(log_level)
|
|
|
|
|
2014-03-14 21:02:34 +01:00
|
|
|
setup_http_session()
|
2021-05-28 20:28:23 +02:00
|
|
|
|
|
|
|
log_root_warning()
|
2018-01-16 19:11:58 +01:00
|
|
|
log_current_versions()
|
2021-03-21 02:00:01 +01:00
|
|
|
log_current_arguments(streamlink, parser)
|
2012-09-29 22:04:44 +02:00
|
|
|
|
2021-11-20 14:29:17 +01:00
|
|
|
setup_signals()
|
|
|
|
|
2020-10-22 02:37:40 +02:00
|
|
|
if args.version_check or args.auto_version_check:
|
2014-03-09 15:57:03 +01:00
|
|
|
with ignored(Exception):
|
2015-02-03 20:18:40 +01:00
|
|
|
check_version(force=args.version_check)
|
2013-08-31 14:24:12 +02:00
|
|
|
|
2021-11-26 23:30:37 +01:00
|
|
|
if args.help:
|
|
|
|
parser.print_help()
|
|
|
|
elif args.plugins:
|
2013-10-28 17:38:14 +01:00
|
|
|
print_plugins()
|
2014-12-20 18:56:07 +01:00
|
|
|
elif args.can_handle_url:
|
|
|
|
try:
|
2016-09-19 21:46:06 +02:00
|
|
|
streamlink.resolve_url(args.can_handle_url)
|
2014-12-20 18:56:07 +01:00
|
|
|
except NoPluginError:
|
2017-09-29 15:06:24 +02:00
|
|
|
error_code = 1
|
2019-05-25 09:49:40 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
error_code = 130
|
2017-01-10 20:16:05 +01:00
|
|
|
elif args.can_handle_url_no_redirect:
|
|
|
|
try:
|
|
|
|
streamlink.resolve_url_no_redirect(args.can_handle_url_no_redirect)
|
|
|
|
except NoPluginError:
|
2017-09-29 15:06:24 +02:00
|
|
|
error_code = 1
|
2019-05-25 09:49:40 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
error_code = 130
|
2013-10-28 17:38:14 +01:00
|
|
|
elif args.url:
|
2014-07-25 18:11:48 +02:00
|
|
|
try:
|
2013-10-30 01:12:36 +01:00
|
|
|
setup_options()
|
|
|
|
handle_url()
|
2017-09-29 14:43:39 +02:00
|
|
|
except KeyboardInterrupt:
|
2015-08-02 13:01:46 +02:00
|
|
|
# Close output
|
|
|
|
if output:
|
|
|
|
output.close()
|
2017-01-16 18:53:23 +01:00
|
|
|
console.msg("Interrupted! Exiting...")
|
2017-09-29 14:43:39 +02:00
|
|
|
error_code = 130
|
2017-01-16 18:53:23 +01:00
|
|
|
finally:
|
2014-07-25 18:11:48 +02:00
|
|
|
if stream_fd:
|
2014-08-16 17:24:30 +02:00
|
|
|
try:
|
2018-05-30 03:15:11 +02:00
|
|
|
log.info("Closing currently open stream...")
|
2014-08-16 17:24:30 +02:00
|
|
|
stream_fd.close()
|
|
|
|
except KeyboardInterrupt:
|
2017-09-29 14:43:39 +02:00
|
|
|
error_code = 130
|
2015-08-02 13:42:09 +02:00
|
|
|
else:
|
|
|
|
usage = parser.format_usage()
|
2021-09-02 17:08:39 +02:00
|
|
|
console.msg(
|
|
|
|
f"{usage}\n"
|
|
|
|
f"Use -h/--help to see the available options or read the manual at https://streamlink.github.io"
|
|
|
|
)
|
2017-09-29 15:06:24 +02:00
|
|
|
|
|
|
|
sys.exit(error_code)
|
2018-05-25 23:25:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def parser_helper():
|
|
|
|
session = Streamlink()
|
|
|
|
parser = build_parser()
|
|
|
|
setup_plugin_args(session, parser)
|
|
|
|
return parser
|