chore: add "ISC" rules to ruff config

This commit is contained in:
bastimeyer 2023-02-09 20:06:20 +01:00 committed by Forrest
parent 17daf563a3
commit 4622c9728e
20 changed files with 144 additions and 124 deletions

View File

@ -82,9 +82,12 @@ select = [
"COM",
# flake8-comprehensions
"C4",
# flake8-implicit-str-concat
"ISC",
]
extend-ignore = [
"C408", # unnecessary-collection-call
"ISC003", # explicit-string-concatenation
]
extend-exclude = [
"docs/conf.py",
@ -109,6 +112,9 @@ ban-relative-imports = "all"
[tool.ruff.flake8-quotes]
avoid-escape = false
[tool.ruff.flake8-implicit-str-concat]
allow-multiline = false
# https://mypy.readthedocs.io/en/stable/config_file.html
[tool.mypy]

View File

@ -141,9 +141,11 @@ class AbemaTV(Plugin):
_SLOTM3U8 = "https://vod-abematv.akamaized.net/slot/{0}/playlist.m3u8"
SECRETKEY = (b"v+Gjs=25Aw5erR!J8ZuvRrCx*rGswhB&qdHd_SYerEWdU&a?3DzN9B"
b"Rbp5KwY4hEmcj5#fykMjJ=AuWz5GSMY-d@H7DMEh3M@9n2G552Us$$"
b"k9cD=3TxwWe86!x#Zyhe")
SECRETKEY = (
b"v+Gjs=25Aw5erR!J8ZuvRrCx*rGswhB&qdHd_SYerEWdU&a?3DzN9B"
+ b"Rbp5KwY4hEmcj5#fykMjJ=AuWz5GSMY-d@H7DMEh3M@9n2G552Us$$"
+ b"k9cD=3TxwWe86!x#Zyhe"
)
_USER_SCHEMA = validate.Schema({"profile": {"userId": str}, "token": str})

View File

@ -58,8 +58,10 @@ class BBCiPlayer(Plugin):
state_re = re.compile(r"window.__IPLAYER_REDUX_STATE__\s*=\s*({.*?});</script>")
account_locals_re = re.compile(r"window.bbcAccount.locals\s*=\s*({.*?});")
hash = base64.b64decode(b"N2RmZjc2NzFkMGM2OTdmZWRiMWQ5MDVkOWExMjE3MTk5MzhiOTJiZg==")
api_url = "https://open.live.bbc.co.uk/mediaselector/6/select/version/2.0/mediaset/" \
"{platform}/vpid/{vpid}/format/json/atk/{vpid_hash}/asn/1/"
api_url = (
"https://open.live.bbc.co.uk/mediaselector/6/select/version/2.0/mediaset/"
+ "{platform}/vpid/{vpid}/format/json/atk/{vpid_hash}/asn/1/"
)
platforms = ("pc", "iptv-all")
session_url = "https://session.bbc.com/session"
auth_url = "https://account.bbc.com/signin"
@ -184,11 +186,13 @@ class BBCiPlayer(Plugin):
if not self.get_option("username"):
log.error(
"BBC iPlayer requires an account you must login using "
"--bbciplayer-username and --bbciplayer-password")
+ "--bbciplayer-username and --bbciplayer-password",
)
return
log.info(
"A TV License is required to watch BBC iPlayer streams, see the BBC website for more "
"information: https://www.bbc.co.uk/iplayer/help/tvlicence")
+ "information: https://www.bbc.co.uk/iplayer/help/tvlicence",
)
if not self.login(self.url):
log.error(
"Could not authenticate, check your username and password")

View File

@ -105,7 +105,7 @@ class CMMedia(Plugin):
self.session,
(
f"https://cdnapisec.kaltura.com/p/{p}/sp/{sp}/playManifest/entryId/{json['meta']['id']}"
f"/flavorIds/{asset['id']}/format/applehttp/protocol/https/a.m3u8"
+ f"/flavorIds/{asset['id']}/format/applehttp/protocol/https/a.m3u8"
),
name_fmt="{pixels}_{bitrate}",
).items()

View File

@ -401,10 +401,7 @@ class Crunchyroll(Plugin):
except CrunchyrollAPIError as err:
raise PluginError(f"Authentication error: {err.msg}")
if not api.auth:
log.warning(
"No authentication provided, you won't be able to access "
"premium restricted content",
)
log.warning("No authentication provided, you won't be able to access premium restricted content")
return api

View File

@ -186,8 +186,7 @@ class NicoLive(Plugin):
if not wss_api_url:
log.error(
"Failed to get wss_api_url. "
"Please check if the URL is correct, "
"and make sure your account has access to the video.",
+ "Please check if the URL is correct, and make sure your account has access to the video.",
)
return

View File

@ -6,6 +6,7 @@ $type live, vod
import logging
import re
from textwrap import dedent
from urllib.parse import urlparse
from streamlink.plugin import Plugin, pluginmatcher
@ -89,19 +90,19 @@ class Picarto(Plugin):
def get_vod(self, vod_id):
data = {
"query": (
"query ($videoId: ID!) {\n"
" video(id: $videoId) {\n"
" id\n"
" title\n"
" file_name\n"
" video_recording_image_url\n"
" channel {\n"
" name\n"
" }"
" }\n"
"}\n"
),
"query": dedent("""
query ($videoId: ID!) {
video(id: $videoId) {
id
title
file_name
video_recording_image_url
channel {
name
}
}
}
""").lstrip(),
"variables": {"videoId": vod_id},
}
vod_data = self.session.http.post(self.API_URL_VOD, json=data, schema=validate.Schema(

View File

@ -51,9 +51,12 @@ class Streann(Plugin):
base_url = "https://ott.streann.com"
get_time_url = base_url + "/web/services/public/get-server-time"
token_url = base_url + "/loadbalancer/services/web-players/{playerId}/token/{type}/{dataId}/{deviceId}"
stream_url = base_url + "/loadbalancer/services/web-players/{type}s-reseller-secure/{dataId}/{playerId}" \
"/{token}/{resellerId}/playlist.m3u8?date={time}&device-type=web&device-name=web" \
"&device-os=web&device-id={deviceId}"
stream_url = (
base_url
+ "/loadbalancer/services/web-players/{type}s-reseller-secure/{dataId}/{playerId}"
+ "/{token}/{resellerId}/playlist.m3u8?date={time}&device-type=web&device-name=web"
+ "&device-os=web&device-id={deviceId}"
)
passphrase_re = re.compile(r"""CryptoJS\.AES\.decrypt\(.*?,\s*(['"])(?P<passphrase>(?:(?!\1).)*)\1\s*?\);""")
_device_id = None

View File

@ -522,11 +522,12 @@ class UStreamTV(Plugin):
password=self.get_option("password"),
)
log.debug(
f"Connecting to UStream API:"
f" media_id={media_id},"
f" application={application},"
f" referrer={self.url},"
f" cluster=live",
"Connecting to UStream API: " + ", ".join([
f"media_id={media_id}",
f"application={application}",
f"referrer={self.url}",
f"cluster={'live'}",
]),
)
wsclient.start()

View File

@ -347,9 +347,7 @@ class Zattoo(Plugin):
self._authed = False
if not self._authed and (not email and not password):
log.error(
"A login for Zattoo is required, use --zattoo-email EMAIL"
" --zattoo-password PASSWORD to set them")
log.error("A login for Zattoo is required, use --zattoo-email EMAIL --zattoo-password PASSWORD to set them")
return
if not self._authed:

View File

@ -218,8 +218,7 @@ class HLSStreamWriter(SegmentedStreamWriter):
# Also check if the output will be resumed after data has already been written to the buffer before.
if sequence.segment.discontinuity or is_paused and written_once:
log.warning(
"Encountered a stream discontinuity. "
"This is unsupported and will result in incoherent output data.",
"Encountered a stream discontinuity. This is unsupported and will result in incoherent output data.",
)
# unblock reader thread after writing data to the buffer
@ -422,12 +421,16 @@ class HLSStreamWorker(SegmentedStreamWorker):
self.playlist_sequence = self.duration_to_sequence(self.duration_offset_start, self.playlist_sequences)
if self.playlist_sequences:
log.debug(f"First Sequence: {self.playlist_sequences[0].num}; "
f"Last Sequence: {self.playlist_sequences[-1].num}")
log.debug(f"Start offset: {self.duration_offset_start}; "
f"Duration: {self.duration_limit}; "
f"Start Sequence: {self.playlist_sequence}; "
f"End Sequence: {self.playlist_end}")
log.debug("; ".join([
f"First Sequence: {self.playlist_sequences[0].num}",
f"Last Sequence: {self.playlist_sequences[-1].num}",
]))
log.debug("; ".join([
f"Start offset: {self.duration_offset_start}",
f"Duration: {self.duration_limit}",
f"Start Sequence: {self.playlist_sequence}",
f"End Sequence: {self.playlist_end}",
]))
total_duration = 0
while not self.closed:

View File

@ -108,11 +108,10 @@ def create_output(formatter: Formatter) -> Union[FileOutput, PlayerOutput]:
elif not args.player:
console.exit(
"The default player (VLC) does not seem to be "
"installed. You must specify the path to a player "
"executable with --player, a file path to save the "
"stream with --output, or pipe the stream to "
"another program with --stdout.",
"The default player (VLC) does not seem to be installed."
+ " You must specify the path to a player executable with --player,"
+ " a file path to save the stream with --output,"
+ " or pipe the stream to another program with --stdout.",
)
return # type: ignore
@ -192,9 +191,10 @@ def output_stream_http(
if not external:
if not args.player:
console.exit("The default player (VLC) does not seem to be "
"installed. You must specify the path to a player "
"executable with --player.")
console.exit(
"The default player (VLC) does not seem to be installed."
+ " You must specify the path to a player executable with --player.",
)
server = create_http_server()
player = output = PlayerOutput(
@ -943,7 +943,7 @@ def main():
usage = parser.format_usage()
console.msg(
f"{usage}\n"
f"Use -h/--help to see the available options or read the manual at https://streamlink.github.io",
+ "Use -h/--help to see the available options or read the manual at https://streamlink.github.io",
)
sys.exit(error_code)

View File

@ -1,5 +1,6 @@
import unittest
from io import StringIO
from textwrap import dedent
from unittest.mock import Mock, patch
from streamlink_cli.console import ConsoleOutput
@ -39,14 +40,13 @@ class TestConsoleOutput(unittest.TestCase):
test_obj1 = {"test": 1, "foo": "foo"}
test_obj2 = Mock(__json__=Mock(return_value={"test": 2}))
console.msg_json(test_obj1, test_obj2, ["qux"], foo="bar", baz="qux")
self.assertEqual(
'{\n'
' "test": 2,\n'
' "foo": "bar",\n'
' "baz": "qux"\n'
'}\n',
output.getvalue(),
)
assert output.getvalue() == dedent("""
{
"test": 2,
"foo": "bar",
"baz": "qux"
}
""").lstrip()
self.assertEqual([("test", 1), ("foo", "foo")], list(test_obj1.items()))
def test_msg_json_merge_list(self):
@ -55,17 +55,23 @@ class TestConsoleOutput(unittest.TestCase):
test_list1 = ["foo", "bar"]
test_list2 = Mock(__json__=Mock(return_value={"foo": "bar"}))
console.msg_json(test_list1, ["baz"], test_list2, {"foo": "bar"}, foo="bar", baz="qux")
self.assertEqual(
'[\n'
' "foo",\n'
' "bar",\n'
' "baz",\n'
' {\n "foo": "bar"\n },\n'
' {\n "foo": "bar"\n },\n'
' {\n "foo": "bar",\n "baz": "qux"\n }\n'
']\n',
output.getvalue(),
)
assert output.getvalue() == dedent("""
[
"foo",
"bar",
"baz",
{
"foo": "bar"
},
{
"foo": "bar"
},
{
"foo": "bar",
"baz": "qux"
}
]
""").lstrip()
self.assertEqual(["foo", "bar"], test_list1)
@patch("streamlink_cli.console.sys.exit")

View File

@ -6,7 +6,7 @@ class TestPluginCanHandleUrlApp17(PluginCanHandleUrl):
__plugin__ = App17
should_match = [
"https://17.live/en-US/live/123123"
"https://17.live/en-US/live/123123",
"https://17.live/en/live/123123",
"https://17.live/ja/live/123123",
]

View File

@ -22,7 +22,7 @@ class TestPluginCanHandleUrlBigo(PluginCanHandleUrl):
"http://www.bigoweb.co/show/00000000",
"https://www.bigoweb.co/show/00000000",
"http://bigoweb.co/show/00000000",
"https://bigoweb.co/show/00000000"
"https://bigoweb.co/show/00000000",
# Wrong URL structure
"https://www.bigo.tv/show/00000000",

View File

@ -13,9 +13,9 @@ class TestPluginCanHandleUrlN13TV(PluginCanHandleUrl):
"http://13tv.co.il/item/entertainment/ambush/season-02/episodes/ffuk3-2026112/",
"https://13tv.co.il/item/entertainment/ambush/season-02/episodes/ffuk3-2026112/",
"http://www.13tv.co.il/item/entertainment/ambush/season-02/episodes/ffuk3-2026112/",
"https://www.13tv.co.il/item/entertainment/ambush/season-02/episodes/ffuk3-2026112/"
"https://www.13tv.co.il/item/entertainment/ambush/season-02/episodes/ffuk3-2026112/",
"http://13tv.co.il/item/entertainment/tzhok-mehamatzav/season-01/episodes/vkdoc-2023442/",
"https://13tv.co.il/item/entertainment/tzhok-mehamatzav/season-01/episodes/vkdoc-2023442/",
"http://www.13tv.co.il/item/entertainment/tzhok-mehamatzav/season-01/episodes/vkdoc-2023442/"
"http://www.13tv.co.il/item/entertainment/tzhok-mehamatzav/season-01/episodes/vkdoc-2023442/",
"https://www.13tv.co.il/item/entertainment/tzhok-mehamatzav/season-01/episodes/vkdoc-2023442/",
]

View File

@ -11,9 +11,9 @@ class TestPluginCanHandleUrlOlympicChannel(PluginCanHandleUrl):
"https://www.olympicchannel.com/en/live/video/detail/olympic-ceremonies-channel/",
"https://www.olympicchannel.com/de/video/detail/stefanidi-husband-coach-krier-relationship/",
"https://www.olympicchannel.com/de/original-series/detail/body/body-season-season-1/episodes/"
"treffen-sie-aaron-wheelz-fotheringham-den-paten-des-rollstuhl-extremsports/",
+ "treffen-sie-aaron-wheelz-fotheringham-den-paten-des-rollstuhl-extremsports/",
"https://olympics.com/en/sport-events/2021-fiba-3x3-olympic-qualifier-graz/?"
"slug=final-day-fiba-3x3-olympic-qualifier-graz",
+ "slug=final-day-fiba-3x3-olympic-qualifier-graz",
"https://olympics.com/en/video/spider-woman-shauna-coxsey-great-britain-climbing-interview",
"https://olympics.com/en/original-series/episode/how-fun-fuels-this-para-taekwondo-world-champion-unleash-the-new",
"https://olympics.com/tokyo-2020/en/news/videos/tokyo-2020-1-message",

View File

@ -27,37 +27,39 @@ def test_translate_no_content():
def test_translate_no_streams():
# real payload without any tEXt chunks that match the expected format
data = \
"iVBORw0KGgoAAAANSUhEUgAAAsAAAAGMAQMAAADuk4YmAAAAA1BMVEX///+nxBvIAAAAAXRSTlMA" \
"QObYZgAAADlJREFUeF7twDEBAAAAwiD7p7bGDlgYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" \
"AAAAAAAAwAGJrAABgPqdWQAAAcp0RVh0ak9lNmRyNkUtV2hmeEE0dERMdS9FOTlCT2d3MF9HMDdG" \
"RmxQNy1ZLTdFOFRac0MmbD93VEp5SENvUUlseVY1bjdrYmF2ZkhUUjc4aTBHAEBxY08zdk4yYldE" \
"bm09TDVaNGMyVVpNdklVbS5LVUNCUTdZNVpfSUZMVmRMNlN0VE14TmFPLUFGaTF6ai9YenE6PVg9" \
"dnJBb3BFU3BBJlpoWFViSER3MCZxbj9AS0d1Si5OSnAudiMwMTYxNDA2MDU2NjcyMDE3MzI4NTcw" \
"ODcwMDc3MDI3NjIwNjczMTA0ODEyNDY3MzMwNzgxMDQwMTE4NzQ4MDYwMjIwODgxNTI0ODEzNjQ4" \
"MjU0MTEyMzEyNjUxMzc2NTM3MTMzNzgwNTYwNDE0NjI4NDM1NjIzNTA1MTAxNjYwMDExNzE4MDQx" \
"MTc3MDMxNTQ2MDEzNDUwMDQ2MTg4MDgwNzMxNDM3MjgwMDQ4NDA3Mzg0MzYxODA0NjU0NDYzMTY1" \
"NDIxMzY4ODAzNTQ3MjMyMjYzODUwMzY5MTE3MTMwOTMzMjAwNDg1MDExNTE4MTgxMTgwMTAwNjU0" \
"NTg1MzcxNDQ5MDM5MzY2ODMxNTc0MjUyNDVZsdrfAAAAAElFTkSuQmCC"
data = (
"iVBORw0KGgoAAAANSUhEUgAAAsAAAAGMAQMAAADuk4YmAAAAA1BMVEX///+nxBvIAAAAAXRSTlMA"
+ "QObYZgAAADlJREFUeF7twDEBAAAAwiD7p7bGDlgYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
+ "AAAAAAAAwAGJrAABgPqdWQAAAcp0RVh0ak9lNmRyNkUtV2hmeEE0dERMdS9FOTlCT2d3MF9HMDdG"
+ "RmxQNy1ZLTdFOFRac0MmbD93VEp5SENvUUlseVY1bjdrYmF2ZkhUUjc4aTBHAEBxY08zdk4yYldE"
+ "bm09TDVaNGMyVVpNdklVbS5LVUNCUTdZNVpfSUZMVmRMNlN0VE14TmFPLUFGaTF6ai9YenE6PVg9"
+ "dnJBb3BFU3BBJlpoWFViSER3MCZxbj9AS0d1Si5OSnAudiMwMTYxNDA2MDU2NjcyMDE3MzI4NTcw"
+ "ODcwMDc3MDI3NjIwNjczMTA0ODEyNDY3MzMwNzgxMDQwMTE4NzQ4MDYwMjIwODgxNTI0ODEzNjQ4"
+ "MjU0MTEyMzEyNjUxMzc2NTM3MTMzNzgwNTYwNDE0NjI4NDM1NjIzNTA1MTAxNjYwMDExNzE4MDQx"
+ "MTc3MDMxNTQ2MDEzNDUwMDQ2MTg4MDgwNzMxNDM3MjgwMDQ4NDA3Mzg0MzYxODA0NjU0NDYzMTY1"
+ "NDIxMzY4ODAzNTQ3MjMyMjYzODUwMzY5MTE3MTMwOTMzMjAwNDg1MDExNTE4MTgxMTgwMTAwNjU0"
+ "NTg1MzcxNDQ5MDM5MzY2ODMxNTc0MjUyNDVZsdrfAAAAAElFTkSuQmCC"
)
assert list(ZTNR.translate(data)) == []
def test_translate_has_streams():
# real payload with modified end (IEND chunk of size 0), to reduce test size
data = \
"iVBORw0KGgoAAAANSUhEUgAAAVQAAAFUCAIAAAD08FPiAAACr3RFWHRXczlVSWdtM2ZPTGY4b2R4" \
"dWo5aHZnRlRhOndvZEtxN3pLOG5oNGRpbT1vREBTWHhOMGtzUVomNndAWkV5cz1GOUlCSiYxdDcy" \
"QmdDOFM2NGFVJmh1Nzk2bUpwOFVJOE1DJlpAY2lzdGcmbEUmRE5DZFV4SHpEOFgvLmppZ1l4b3M1" \
"QU1lOnl3ZS04VlBwQkZvLlFMUWZHTy1vQjNVeHhfVDF1JkRSQTpPP2J4Wm0zbFlxS3IjAEhEX1JF" \
"QURZJSUwNTYwNzI4Mjg4MzUyNjQyMzUxMTA0Mzg0NzI4NzY4NDEyODAzODU0ODMwMDQ3NzcwNDEx" \
"MDAyODE1MzM3NDU3ODAxMDg3MjgxNTg1MzMzNDE3MTYxMTE4NzQ1MTU3MjYxOTUwNzI4NzEyNDgw" \
"MzI4NTM1ODM1ODU3MzQyNzE0NjcyODE2NTgzNDI4NTE0NTg1MzIwMzgxODU3NDY0NzUwODI3OTQ0" \
"ODg3NjEzMTUzNDMxMTUxNzYzNDU1NzE0MDA1MDUzNDIxODE0ODYyNDIzODM2MTczMzQ0NjAwNTIw" \
"NTU2NDYyNDgxODYzNDA2MzA4MTE0ODUxMTQ2Mzg2MzYyMjQ4Mjc3MjIyMjUzNjMxMjI1MjEzMTU0" \
"NjI1NjIyMjM3MTA4NjEwNjI0NTYyNTMxNTA2ODEyMjQ2MzYzNzE0MzY4MDU1MTgxNTQ2NTU3MTMx" \
"NTI0NzU4MTU2NjAxMjY0MjA1MDU2MzcwMDM3NzcwMjA0MTYxMzE3MjQxMTI2NzYzMzUyNjY3NTQ1" \
"NTA1MTUxNTc2NTEzMTUwNjcxNDcyMDI2MTQyMjczNTI4NzExNjA4NTU3NjIzMzMxMzU0NDM1Mzgw" \
"MTI0MTQzMTU1MTMyNzc4ODI1MjcyMjUwMjY4MzYyMDUzMjQzNjA0MTYyMzkhB8fSAAAAAElFTkQAAAAACg=="
data = (
"iVBORw0KGgoAAAANSUhEUgAAAVQAAAFUCAIAAAD08FPiAAACr3RFWHRXczlVSWdtM2ZPTGY4b2R4"
+ "dWo5aHZnRlRhOndvZEtxN3pLOG5oNGRpbT1vREBTWHhOMGtzUVomNndAWkV5cz1GOUlCSiYxdDcy"
+ "QmdDOFM2NGFVJmh1Nzk2bUpwOFVJOE1DJlpAY2lzdGcmbEUmRE5DZFV4SHpEOFgvLmppZ1l4b3M1"
+ "QU1lOnl3ZS04VlBwQkZvLlFMUWZHTy1vQjNVeHhfVDF1JkRSQTpPP2J4Wm0zbFlxS3IjAEhEX1JF"
+ "QURZJSUwNTYwNzI4Mjg4MzUyNjQyMzUxMTA0Mzg0NzI4NzY4NDEyODAzODU0ODMwMDQ3NzcwNDEx"
+ "MDAyODE1MzM3NDU3ODAxMDg3MjgxNTg1MzMzNDE3MTYxMTE4NzQ1MTU3MjYxOTUwNzI4NzEyNDgw"
+ "MzI4NTM1ODM1ODU3MzQyNzE0NjcyODE2NTgzNDI4NTE0NTg1MzIwMzgxODU3NDY0NzUwODI3OTQ0"
+ "ODg3NjEzMTUzNDMxMTUxNzYzNDU1NzE0MDA1MDUzNDIxODE0ODYyNDIzODM2MTczMzQ0NjAwNTIw"
+ "NTU2NDYyNDgxODYzNDA2MzA4MTE0ODUxMTQ2Mzg2MzYyMjQ4Mjc3MjIyMjUzNjMxMjI1MjEzMTU0"
+ "NjI1NjIyMjM3MTA4NjEwNjI0NTYyNTMxNTA2ODEyMjQ2MzYzNzE0MzY4MDU1MTgxNTQ2NTU3MTMx"
+ "NTI0NzU4MTU2NjAxMjY0MjA1MDU2MzcwMDM3NzcwMjA0MTYxMzE3MjQxMTI2NzYzMzUyNjY3NTQ1"
+ "NTA1MTUxNTc2NTEzMTUwNjcxNDcyMDI2MTQyMjczNTI4NzExNjA4NTU3NjIzMzMxMzU0NDM1Mzgw"
+ "MTI0MTQzMTU1MTMyNzc4ODI1MjcyMjUwMjY4MzYyMDUzMjQzNjA0MTYyMzkhB8fSAAAAAElFTkQAAAAACg=="
)
assert list(ZTNR.translate(data)) == [
(

View File

@ -704,26 +704,26 @@ class TestXmlElementSchema:
validate.xml_element(),
(
"<parent attrkey1=\"attrval1\" attrkey2=\"attrval2\">"
"parenttext"
"<childA a=\"1\">childAtext</childA>"
"childAtail"
"<childB b=\"2\">childBtext<childC/></childB>"
"childBtail"
"</parent>"
"parenttail"
+ "parenttext"
+ "<childA a=\"1\">childAtext</childA>"
+ "childAtail"
+ "<childB b=\"2\">childBtext<childC/></childB>"
+ "childBtail"
+ "</parent>"
+ "parenttail"
),
),
(
validate.xml_element(tag=upper, attrib={upper: upper}, text=upper, tail=upper),
(
"<PARENT ATTRKEY1=\"ATTRVAL1\" ATTRKEY2=\"ATTRVAL2\">"
"PARENTTEXT"
"<childA a=\"1\">childAtext</childA>"
"childAtail"
"<childB b=\"2\">childBtext<childC/></childB>"
"childBtail"
"</PARENT>"
"PARENTTAIL"
+ "PARENTTEXT"
+ "<childA a=\"1\">childAtext</childA>"
+ "childAtail"
+ "<childB b=\"2\">childBtext<childC/></childB>"
+ "childBtail"
+ "</PARENT>"
+ "PARENTTAIL"
),
),
],

View File

@ -76,10 +76,8 @@ class TestUtilsArgs(unittest.TestCase):
("X-Forwarded-For=127.0.0.1", ("X-Forwarded-For", "127.0.0.1")),
("Referer=https://foo.bar", ("Referer", "https://foo.bar")),
(
"User-Agent=Mozilla/5.0 (X11; Linux x86_64; rv:60.0)"
" Gecko/20100101 Firefox/60.0",
("User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:60.0) "
"Gecko/20100101 Firefox/60.0"),
"User-Agent=Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0",
("User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0"),
),
("domain=example.com", ("domain", "example.com")),
]