2021-06-03 05:43:42 -04:00
|
|
|
|
#!/usr/bin/env python3
|
2022-06-24 07:06:16 -04:00
|
|
|
|
|
2013-10-14 20:00:53 -04:00
|
|
|
|
# Allow direct execution
|
|
|
|
|
import os
|
2022-10-08 21:31:37 -04:00
|
|
|
|
import re
|
2012-11-27 17:20:29 -05:00
|
|
|
|
import sys
|
2012-09-28 08:47:01 -04:00
|
|
|
|
import unittest
|
2022-04-11 18:32:57 -04:00
|
|
|
|
|
2013-10-14 20:00:53 -04:00
|
|
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
2012-09-28 08:47:01 -04:00
|
|
|
|
|
2013-10-14 20:00:53 -04:00
|
|
|
|
|
2022-06-24 07:06:16 -04:00
|
|
|
|
import contextlib
|
2014-02-24 19:43:17 -05:00
|
|
|
|
import io
|
2021-06-12 11:14:30 -04:00
|
|
|
|
import itertools
|
2014-03-24 18:21:20 -04:00
|
|
|
|
import json
|
2013-10-14 20:00:53 -04:00
|
|
|
|
import xml.etree.ElementTree
|
2012-11-27 17:20:29 -05:00
|
|
|
|
|
2022-04-11 18:32:57 -04:00
|
|
|
|
from yt_dlp.compat import (
|
|
|
|
|
compat_etree_fromstring,
|
|
|
|
|
compat_HTMLParseError,
|
|
|
|
|
compat_os_name,
|
|
|
|
|
)
|
2021-02-24 13:45:56 -05:00
|
|
|
|
from yt_dlp.utils import (
|
2022-04-11 18:32:57 -04:00
|
|
|
|
Config,
|
|
|
|
|
DateRange,
|
|
|
|
|
ExtractorError,
|
|
|
|
|
InAdvancePagedList,
|
|
|
|
|
LazyList,
|
|
|
|
|
OnDemandPagedList,
|
2015-01-07 01:20:20 -05:00
|
|
|
|
age_restricted,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
args_to_str,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
base_url,
|
2019-11-26 14:26:42 -05:00
|
|
|
|
caesar,
|
2014-11-13 09:02:31 -05:00
|
|
|
|
clean_html,
|
2021-01-08 11:14:50 -05:00
|
|
|
|
clean_podcast_url,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
cli_bool_option,
|
|
|
|
|
cli_option,
|
|
|
|
|
cli_valueless_option,
|
2016-04-09 16:40:05 -04:00
|
|
|
|
date_from_str,
|
2021-04-06 02:45:15 -04:00
|
|
|
|
datetime_from_str,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
detect_exe_version,
|
2015-11-21 19:33:52 -05:00
|
|
|
|
determine_ext,
|
2022-07-15 07:52:14 -04:00
|
|
|
|
determine_file_encoding,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
dfxp2srt,
|
2016-02-06 19:12:53 -05:00
|
|
|
|
dict_get,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
encode_base_n,
|
2015-12-19 20:07:14 -05:00
|
|
|
|
encode_compat_str,
|
2013-12-16 22:13:36 -05:00
|
|
|
|
encodeFilename,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
escape_rfc3986,
|
|
|
|
|
escape_url,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
expand_path,
|
2016-01-02 14:49:59 -05:00
|
|
|
|
extract_attributes,
|
2013-09-13 16:05:29 -04:00
|
|
|
|
find_xpath_attr,
|
2014-01-20 16:11:34 -05:00
|
|
|
|
fix_xml_ampersands,
|
2019-03-22 14:08:54 -04:00
|
|
|
|
float_or_none,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
format_bytes,
|
2022-08-03 20:42:12 -04:00
|
|
|
|
get_compatible_ext,
|
2017-02-11 04:16:54 -05:00
|
|
|
|
get_element_by_attribute,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
get_element_by_class,
|
2022-01-05 13:37:49 -05:00
|
|
|
|
get_element_html_by_attribute,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
get_element_html_by_class,
|
|
|
|
|
get_element_text_and_html_by_tag,
|
|
|
|
|
get_elements_by_attribute,
|
|
|
|
|
get_elements_by_class,
|
2022-01-05 13:37:49 -05:00
|
|
|
|
get_elements_html_by_attribute,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
get_elements_html_by_class,
|
2022-01-05 13:37:49 -05:00
|
|
|
|
get_elements_text_and_html_by_attribute,
|
2019-03-22 14:08:54 -04:00
|
|
|
|
int_or_none,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
intlist_to_bytes,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
iri_to_uri,
|
2015-01-22 19:21:30 -05:00
|
|
|
|
is_html,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
js_to_json,
|
|
|
|
|
limit_length,
|
2022-04-07 02:00:46 -04:00
|
|
|
|
locked_file,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
lowercase_escape,
|
|
|
|
|
match_str,
|
2018-04-27 15:47:17 -04:00
|
|
|
|
merge_dicts,
|
2016-09-02 11:57:48 -04:00
|
|
|
|
mimetype2ext,
|
2016-09-14 12:13:55 -04:00
|
|
|
|
month_by_name,
|
2017-05-01 11:09:18 -04:00
|
|
|
|
multipart_encode,
|
2016-02-16 17:01:44 -05:00
|
|
|
|
ohdave_rsa_encrypt,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
orderedSet,
|
2016-08-07 09:45:18 -04:00
|
|
|
|
parse_age_limit,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
parse_bitrate,
|
|
|
|
|
parse_codecs,
|
|
|
|
|
parse_count,
|
|
|
|
|
parse_dfxp_time_expr,
|
2013-12-26 07:49:44 -05:00
|
|
|
|
parse_duration,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
parse_filesize,
|
|
|
|
|
parse_iso8601,
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
parse_resolution,
|
2017-02-27 05:50:19 -05:00
|
|
|
|
pkcs1pad,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
prepend_extension,
|
2014-02-24 19:43:17 -05:00
|
|
|
|
read_batch_urls,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
remove_end,
|
|
|
|
|
remove_quotes,
|
|
|
|
|
remove_start,
|
|
|
|
|
render_table,
|
|
|
|
|
replace_extension,
|
|
|
|
|
rot47,
|
2013-12-16 22:13:36 -05:00
|
|
|
|
sanitize_filename,
|
2015-03-08 10:55:22 -04:00
|
|
|
|
sanitize_path,
|
2018-02-19 10:50:23 -05:00
|
|
|
|
sanitize_url,
|
2021-04-19 08:07:45 -04:00
|
|
|
|
sanitized_Request,
|
2013-11-21 08:09:28 -05:00
|
|
|
|
shell_quote,
|
2013-12-16 22:13:36 -05:00
|
|
|
|
smuggle_url,
|
2023-02-02 00:40:19 -05:00
|
|
|
|
str_or_none,
|
2013-12-06 07:36:36 -05:00
|
|
|
|
str_to_int,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
strip_jsonp,
|
2019-05-23 12:58:35 -04:00
|
|
|
|
strip_or_none,
|
2019-10-17 17:03:53 -04:00
|
|
|
|
subtitles_filename,
|
2013-12-16 22:13:36 -05:00
|
|
|
|
timeconvert,
|
2022-09-25 17:03:19 -04:00
|
|
|
|
traverse_obj,
|
2013-12-16 22:13:36 -05:00
|
|
|
|
unescapeHTML,
|
|
|
|
|
unified_strdate,
|
2016-06-25 11:30:35 -04:00
|
|
|
|
unified_timestamp,
|
2013-12-16 22:13:36 -05:00
|
|
|
|
unsmuggle_url,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
update_url_query,
|
2014-12-14 15:59:59 -05:00
|
|
|
|
uppercase_escape,
|
2013-12-16 22:13:36 -05:00
|
|
|
|
url_basename,
|
2018-07-21 07:01:06 -04:00
|
|
|
|
url_or_none,
|
2014-03-07 09:25:33 -05:00
|
|
|
|
urlencode_postdata,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
urljoin,
|
2016-06-26 03:16:49 -04:00
|
|
|
|
urshift,
|
2014-12-06 06:14:26 -05:00
|
|
|
|
version_tuple,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
xpath_attr,
|
2015-09-04 14:36:16 -04:00
|
|
|
|
xpath_element,
|
2015-03-21 09:12:43 -04:00
|
|
|
|
xpath_text,
|
2022-04-11 18:32:57 -04:00
|
|
|
|
xpath_with_ns,
|
2015-10-25 15:04:55 -04:00
|
|
|
|
)
|
2012-09-28 08:47:01 -04:00
|
|
|
|
|
2012-11-28 06:59:27 -05:00
|
|
|
|
|
2012-09-28 08:47:01 -04:00
|
|
|
|
class TestUtil(unittest.TestCase):
|
2012-11-27 20:04:46 -05:00
|
|
|
|
def test_timeconvert(self):
|
|
|
|
|
self.assertTrue(timeconvert('') is None)
|
|
|
|
|
self.assertTrue(timeconvert('bougrg') is None)
|
|
|
|
|
|
|
|
|
|
def test_sanitize_filename(self):
|
2021-06-09 05:13:51 -04:00
|
|
|
|
self.assertEqual(sanitize_filename(''), '')
|
2012-11-27 20:04:46 -05:00
|
|
|
|
self.assertEqual(sanitize_filename('abc'), 'abc')
|
|
|
|
|
self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_filename('123'), '123')
|
|
|
|
|
|
2022-08-04 14:17:55 -04:00
|
|
|
|
self.assertEqual('abc⧸de', sanitize_filename('abc/de'))
|
2012-11-27 20:04:46 -05:00
|
|
|
|
self.assertFalse('/' in sanitize_filename('abc/de///'))
|
|
|
|
|
|
2022-08-04 14:17:55 -04:00
|
|
|
|
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', is_id=False))
|
|
|
|
|
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', is_id=False))
|
|
|
|
|
self.assertEqual('yes no', sanitize_filename('yes? no', is_id=False))
|
|
|
|
|
self.assertEqual('this - that', sanitize_filename('this: that', is_id=False))
|
2012-11-27 20:04:46 -05:00
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_filename('AT&T'), 'AT&T')
|
2014-08-27 13:11:45 -04:00
|
|
|
|
aumlaut = 'ä'
|
2012-11-27 20:04:46 -05:00
|
|
|
|
self.assertEqual(sanitize_filename(aumlaut), aumlaut)
|
2014-08-27 13:11:45 -04:00
|
|
|
|
tests = '\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430'
|
2012-11-27 20:04:46 -05:00
|
|
|
|
self.assertEqual(sanitize_filename(tests), tests)
|
|
|
|
|
|
2015-01-11 11:40:45 -05:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
sanitize_filename('New World record at 0:12:34'),
|
|
|
|
|
'New World record at 0_12_34')
|
2015-03-02 13:07:17 -05:00
|
|
|
|
|
2022-03-27 00:34:04 -04:00
|
|
|
|
self.assertEqual(sanitize_filename('--gasdgf'), '--gasdgf')
|
2015-02-24 05:38:01 -05:00
|
|
|
|
self.assertEqual(sanitize_filename('--gasdgf', is_id=True), '--gasdgf')
|
2022-03-27 00:34:04 -04:00
|
|
|
|
self.assertEqual(sanitize_filename('--gasdgf', is_id=False), '_-gasdgf')
|
|
|
|
|
self.assertEqual(sanitize_filename('.gasdgf'), '.gasdgf')
|
2015-03-02 13:07:17 -05:00
|
|
|
|
self.assertEqual(sanitize_filename('.gasdgf', is_id=True), '.gasdgf')
|
2022-03-27 00:34:04 -04:00
|
|
|
|
self.assertEqual(sanitize_filename('.gasdgf', is_id=False), 'gasdgf')
|
2015-01-11 11:40:45 -05:00
|
|
|
|
|
2012-11-27 20:04:46 -05:00
|
|
|
|
forbidden = '"\0\\/'
|
|
|
|
|
for fc in forbidden:
|
|
|
|
|
for fbc in forbidden:
|
|
|
|
|
self.assertTrue(fbc not in sanitize_filename(fc))
|
|
|
|
|
|
|
|
|
|
def test_sanitize_filename_restricted(self):
|
|
|
|
|
self.assertEqual(sanitize_filename('abc', restricted=True), 'abc')
|
|
|
|
|
self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_filename('123', restricted=True), '123')
|
|
|
|
|
|
|
|
|
|
self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True))
|
|
|
|
|
self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True))
|
|
|
|
|
|
|
|
|
|
self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True))
|
|
|
|
|
self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True))
|
|
|
|
|
self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True))
|
|
|
|
|
self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True))
|
|
|
|
|
|
2016-05-01 23:21:39 -04:00
|
|
|
|
tests = 'aäb\u4e2d\u56fd\u7684c'
|
|
|
|
|
self.assertEqual(sanitize_filename(tests, restricted=True), 'aab_c')
|
2014-08-27 13:11:45 -04:00
|
|
|
|
self.assertTrue(sanitize_filename('\xf6', restricted=True) != '') # No empty filename
|
2012-11-27 20:04:46 -05:00
|
|
|
|
|
2012-11-28 06:59:27 -05:00
|
|
|
|
forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#'
|
2012-11-27 20:04:46 -05:00
|
|
|
|
for fc in forbidden:
|
|
|
|
|
for fbc in forbidden:
|
|
|
|
|
self.assertTrue(fbc not in sanitize_filename(fc, restricted=True))
|
|
|
|
|
|
|
|
|
|
# Handle a common case more neatly
|
2014-08-27 13:11:45 -04:00
|
|
|
|
self.assertEqual(sanitize_filename('\u5927\u58f0\u5e26 - Song', restricted=True), 'Song')
|
|
|
|
|
self.assertEqual(sanitize_filename('\u603b\u7edf: Speech', restricted=True), 'Speech')
|
2012-11-27 20:04:46 -05:00
|
|
|
|
# .. but make sure the file name is never empty
|
|
|
|
|
self.assertTrue(sanitize_filename('-', restricted=True) != '')
|
|
|
|
|
self.assertTrue(sanitize_filename(':', restricted=True) != '')
|
|
|
|
|
|
2016-05-01 23:21:39 -04:00
|
|
|
|
self.assertEqual(sanitize_filename(
|
2016-06-02 05:39:32 -04:00
|
|
|
|
'ÂÃÄÀÁÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖŐØŒÙÚÛÜŰÝÞßàáâãäåæçèéêëìíîïðñòóôõöőøœùúûüűýþÿ', restricted=True),
|
2019-05-10 14:42:32 -04:00
|
|
|
|
'AAAAAAAECEEEEIIIIDNOOOOOOOOEUUUUUYTHssaaaaaaaeceeeeiiiionooooooooeuuuuuythy')
|
2016-05-01 23:21:39 -04:00
|
|
|
|
|
2012-12-03 09:36:24 -05:00
|
|
|
|
def test_sanitize_ids(self):
|
2012-12-20 07:26:37 -05:00
|
|
|
|
self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw')
|
|
|
|
|
self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw')
|
|
|
|
|
self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI')
|
2012-12-03 09:36:24 -05:00
|
|
|
|
|
2015-03-08 10:55:22 -04:00
|
|
|
|
def test_sanitize_path(self):
|
|
|
|
|
if sys.platform != 'win32':
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_path('abc'), 'abc')
|
|
|
|
|
self.assertEqual(sanitize_path('abc/def'), 'abc\\def')
|
|
|
|
|
self.assertEqual(sanitize_path('abc\\def'), 'abc\\def')
|
|
|
|
|
self.assertEqual(sanitize_path('abc|def'), 'abc#def')
|
|
|
|
|
self.assertEqual(sanitize_path('<>:"|?*'), '#######')
|
|
|
|
|
self.assertEqual(sanitize_path('C:/abc/def'), 'C:\\abc\\def')
|
|
|
|
|
self.assertEqual(sanitize_path('C?:/abc/def'), 'C##\\abc\\def')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_path('\\\\?\\UNC\\ComputerName\\abc'), '\\\\?\\UNC\\ComputerName\\abc')
|
|
|
|
|
self.assertEqual(sanitize_path('\\\\?\\UNC/ComputerName/abc'), '\\\\?\\UNC\\ComputerName\\abc')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
|
|
|
|
|
self.assertEqual(sanitize_path('\\\\?\\C:/abc'), '\\\\?\\C:\\abc')
|
|
|
|
|
self.assertEqual(sanitize_path('\\\\?\\C:\\ab?c\\de:f'), '\\\\?\\C:\\ab#c\\de#f')
|
|
|
|
|
self.assertEqual(sanitize_path('\\\\?\\C:\\abc'), '\\\\?\\C:\\abc')
|
|
|
|
|
|
2015-03-08 12:08:48 -04:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
sanitize_path('youtube/%(uploader)s/%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s'),
|
|
|
|
|
'youtube\\%(uploader)s\\%(autonumber)s-%(title)s-%(upload_date)s.%(ext)s')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
sanitize_path('youtube/TheWreckingYard ./00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part'),
|
|
|
|
|
'youtube\\TheWreckingYard #\\00001-Not bad, Especially for Free! (1987 Yamaha 700)-20141116.mp4.part')
|
|
|
|
|
self.assertEqual(sanitize_path('abc/def...'), 'abc\\def..#')
|
|
|
|
|
self.assertEqual(sanitize_path('abc.../def'), 'abc..#\\def')
|
|
|
|
|
self.assertEqual(sanitize_path('abc.../def...'), 'abc..#\\def..#')
|
|
|
|
|
|
2015-03-09 14:50:11 -04:00
|
|
|
|
self.assertEqual(sanitize_path('../abc'), '..\\abc')
|
|
|
|
|
self.assertEqual(sanitize_path('../../abc'), '..\\..\\abc')
|
|
|
|
|
self.assertEqual(sanitize_path('./abc'), 'abc')
|
|
|
|
|
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
|
|
|
|
|
|
2018-02-19 10:50:23 -05:00
|
|
|
|
def test_sanitize_url(self):
|
|
|
|
|
self.assertEqual(sanitize_url('//foo.bar'), 'http://foo.bar')
|
|
|
|
|
self.assertEqual(sanitize_url('httpss://foo.bar'), 'https://foo.bar')
|
|
|
|
|
self.assertEqual(sanitize_url('rmtps://foo.bar'), 'rtmps://foo.bar')
|
|
|
|
|
self.assertEqual(sanitize_url('https://foo.bar'), 'https://foo.bar')
|
2021-06-01 08:35:41 -04:00
|
|
|
|
self.assertEqual(sanitize_url('foo bar'), 'foo bar')
|
2018-02-19 10:50:23 -05:00
|
|
|
|
|
2021-04-19 08:07:45 -04:00
|
|
|
|
def test_extract_basic_auth(self):
|
|
|
|
|
auth_header = lambda url: sanitized_Request(url).get_header('Authorization')
|
|
|
|
|
self.assertFalse(auth_header('http://foo.bar'))
|
|
|
|
|
self.assertFalse(auth_header('http://:foo.bar'))
|
|
|
|
|
self.assertEqual(auth_header('http://@foo.bar'), 'Basic Og==')
|
|
|
|
|
self.assertEqual(auth_header('http://:pass@foo.bar'), 'Basic OnBhc3M=')
|
|
|
|
|
self.assertEqual(auth_header('http://user:@foo.bar'), 'Basic dXNlcjo=')
|
|
|
|
|
self.assertEqual(auth_header('http://user:pass@foo.bar'), 'Basic dXNlcjpwYXNz')
|
|
|
|
|
|
2017-03-25 15:30:10 -04:00
|
|
|
|
def test_expand_path(self):
|
2017-03-25 16:07:56 -04:00
|
|
|
|
def env(var):
|
2022-04-11 11:10:28 -04:00
|
|
|
|
return f'%{var}%' if sys.platform == 'win32' else f'${var}'
|
2017-03-25 16:07:56 -04:00
|
|
|
|
|
2022-06-24 04:10:17 -04:00
|
|
|
|
os.environ['yt_dlp_EXPATH_PATH'] = 'expanded'
|
2021-02-24 13:45:56 -05:00
|
|
|
|
self.assertEqual(expand_path(env('yt_dlp_EXPATH_PATH')), 'expanded')
|
2022-04-17 16:58:28 -04:00
|
|
|
|
|
|
|
|
|
old_home = os.environ.get('HOME')
|
|
|
|
|
test_str = R'C:\Documents and Settings\тест\Application Data'
|
|
|
|
|
try:
|
2022-06-24 04:10:17 -04:00
|
|
|
|
os.environ['HOME'] = test_str
|
|
|
|
|
self.assertEqual(expand_path(env('HOME')), os.getenv('HOME'))
|
|
|
|
|
self.assertEqual(expand_path('~'), os.getenv('HOME'))
|
2022-04-17 16:58:28 -04:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
expand_path('~/%s' % env('yt_dlp_EXPATH_PATH')),
|
2022-06-24 04:10:17 -04:00
|
|
|
|
'%s/expanded' % os.getenv('HOME'))
|
2022-04-17 16:58:28 -04:00
|
|
|
|
finally:
|
2022-06-24 04:10:17 -04:00
|
|
|
|
os.environ['HOME'] = old_home or ''
|
2017-03-25 15:30:10 -04:00
|
|
|
|
|
2015-05-02 13:10:48 -04:00
|
|
|
|
def test_prepend_extension(self):
|
|
|
|
|
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
|
|
|
|
|
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')
|
|
|
|
|
self.assertEqual(prepend_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
|
|
|
|
|
self.assertEqual(prepend_extension('abc', 'temp'), 'abc.temp')
|
|
|
|
|
self.assertEqual(prepend_extension('.abc', 'temp'), '.abc.temp')
|
|
|
|
|
self.assertEqual(prepend_extension('.abc.ext', 'temp'), '.abc.temp.ext')
|
|
|
|
|
|
2015-05-02 13:23:06 -04:00
|
|
|
|
def test_replace_extension(self):
|
|
|
|
|
self.assertEqual(replace_extension('abc.ext', 'temp'), 'abc.temp')
|
|
|
|
|
self.assertEqual(replace_extension('abc.ext', 'temp', 'ext'), 'abc.temp')
|
|
|
|
|
self.assertEqual(replace_extension('abc.unexpected_ext', 'temp', 'ext'), 'abc.unexpected_ext.temp')
|
|
|
|
|
self.assertEqual(replace_extension('abc', 'temp'), 'abc.temp')
|
|
|
|
|
self.assertEqual(replace_extension('.abc', 'temp'), '.abc.temp')
|
|
|
|
|
self.assertEqual(replace_extension('.abc.ext', 'temp'), '.abc.temp')
|
|
|
|
|
|
2019-10-17 17:03:53 -04:00
|
|
|
|
def test_subtitles_filename(self):
|
|
|
|
|
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt'), 'abc.en.vtt')
|
|
|
|
|
self.assertEqual(subtitles_filename('abc.ext', 'en', 'vtt', 'ext'), 'abc.en.vtt')
|
|
|
|
|
self.assertEqual(subtitles_filename('abc.unexpected_ext', 'en', 'vtt', 'ext'), 'abc.unexpected_ext.en.vtt')
|
|
|
|
|
|
2016-05-18 18:31:30 -04:00
|
|
|
|
def test_remove_start(self):
|
|
|
|
|
self.assertEqual(remove_start(None, 'A - '), None)
|
|
|
|
|
self.assertEqual(remove_start('A - B', 'A - '), 'B')
|
|
|
|
|
self.assertEqual(remove_start('B - A', 'A - '), 'B - A')
|
|
|
|
|
|
|
|
|
|
def test_remove_end(self):
|
|
|
|
|
self.assertEqual(remove_end(None, ' - B'), None)
|
|
|
|
|
self.assertEqual(remove_end('A - B', ' - B'), 'A')
|
|
|
|
|
self.assertEqual(remove_end('B - A', ' - B'), 'B - A')
|
|
|
|
|
|
2015-12-14 10:30:58 -05:00
|
|
|
|
def test_remove_quotes(self):
|
|
|
|
|
self.assertEqual(remove_quotes(None), None)
|
|
|
|
|
self.assertEqual(remove_quotes('"'), '"')
|
|
|
|
|
self.assertEqual(remove_quotes("'"), "'")
|
|
|
|
|
self.assertEqual(remove_quotes(';'), ';')
|
|
|
|
|
self.assertEqual(remove_quotes('";'), '";')
|
|
|
|
|
self.assertEqual(remove_quotes('""'), '')
|
|
|
|
|
self.assertEqual(remove_quotes('";"'), ';')
|
|
|
|
|
|
2012-11-27 20:04:46 -05:00
|
|
|
|
def test_ordered_set(self):
|
2012-11-28 06:59:27 -05:00
|
|
|
|
self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7])
|
2012-11-27 20:04:46 -05:00
|
|
|
|
self.assertEqual(orderedSet([]), [])
|
|
|
|
|
self.assertEqual(orderedSet([1]), [1])
|
2014-11-23 14:41:03 -05:00
|
|
|
|
# keep the list ordered
|
2012-11-28 06:59:27 -05:00
|
|
|
|
self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1])
|
2012-11-27 20:04:46 -05:00
|
|
|
|
|
|
|
|
|
def test_unescape_html(self):
|
2014-08-27 13:11:45 -04:00
|
|
|
|
self.assertEqual(unescapeHTML('%20;'), '%20;')
|
2015-03-26 11:15:27 -04:00
|
|
|
|
self.assertEqual(unescapeHTML('/'), '/')
|
|
|
|
|
self.assertEqual(unescapeHTML('/'), '/')
|
2015-11-16 09:20:16 -05:00
|
|
|
|
self.assertEqual(unescapeHTML('é'), 'é')
|
|
|
|
|
self.assertEqual(unescapeHTML('�'), '�')
|
2017-08-19 09:40:53 -04:00
|
|
|
|
self.assertEqual(unescapeHTML('&a"'), '&a"')
|
2016-06-10 03:11:55 -04:00
|
|
|
|
# HTML5 entities
|
|
|
|
|
self.assertEqual(unescapeHTML('.''), '.\'')
|
2014-11-23 14:41:03 -05:00
|
|
|
|
|
2016-04-09 16:40:05 -04:00
|
|
|
|
def test_date_from_str(self):
|
|
|
|
|
self.assertEqual(date_from_str('yesterday'), date_from_str('now-1day'))
|
|
|
|
|
self.assertEqual(date_from_str('now+7day'), date_from_str('now+1week'))
|
|
|
|
|
self.assertEqual(date_from_str('now+14day'), date_from_str('now+2week'))
|
2021-04-06 02:45:15 -04:00
|
|
|
|
self.assertEqual(date_from_str('20200229+365day'), date_from_str('20200229+1year'))
|
|
|
|
|
self.assertEqual(date_from_str('20210131+28day'), date_from_str('20210131+1month'))
|
|
|
|
|
|
|
|
|
|
def test_datetime_from_str(self):
|
|
|
|
|
self.assertEqual(datetime_from_str('yesterday', precision='day'), datetime_from_str('now-1day', precision='auto'))
|
|
|
|
|
self.assertEqual(datetime_from_str('now+7day', precision='day'), datetime_from_str('now+1week', precision='auto'))
|
|
|
|
|
self.assertEqual(datetime_from_str('now+14day', precision='day'), datetime_from_str('now+2week', precision='auto'))
|
|
|
|
|
self.assertEqual(datetime_from_str('20200229+365day', precision='day'), datetime_from_str('20200229+1year', precision='auto'))
|
|
|
|
|
self.assertEqual(datetime_from_str('20210131+28day', precision='day'), datetime_from_str('20210131+1month', precision='auto'))
|
|
|
|
|
self.assertEqual(datetime_from_str('20210131+59day', precision='day'), datetime_from_str('20210131+2month', precision='auto'))
|
|
|
|
|
self.assertEqual(datetime_from_str('now+1day', precision='hour'), datetime_from_str('now+24hours', precision='auto'))
|
|
|
|
|
self.assertEqual(datetime_from_str('now+23hours', precision='hour'), datetime_from_str('now+23hours', precision='auto'))
|
2016-04-09 16:40:05 -04:00
|
|
|
|
|
2013-04-27 08:01:55 -04:00
|
|
|
|
def test_daterange(self):
|
2014-11-23 14:41:03 -05:00
|
|
|
|
_20century = DateRange("19000101", "20000101")
|
2013-04-27 08:01:55 -04:00
|
|
|
|
self.assertFalse("17890714" in _20century)
|
|
|
|
|
_ac = DateRange("00010101")
|
|
|
|
|
self.assertTrue("19690721" in _ac)
|
|
|
|
|
_firstmilenium = DateRange(end="10000101")
|
|
|
|
|
self.assertTrue("07110427" in _firstmilenium)
|
2013-04-28 05:39:37 -04:00
|
|
|
|
|
2013-04-27 09:14:20 -04:00
|
|
|
|
def test_unified_dates(self):
|
|
|
|
|
self.assertEqual(unified_strdate('December 21, 2010'), '20101221')
|
|
|
|
|
self.assertEqual(unified_strdate('8/7/2009'), '20090708')
|
|
|
|
|
self.assertEqual(unified_strdate('Dec 14, 2012'), '20121214')
|
|
|
|
|
self.assertEqual(unified_strdate('2012/10/11 01:56:38 +0000'), '20121011')
|
2015-01-13 17:16:34 -05:00
|
|
|
|
self.assertEqual(unified_strdate('1968 12 10'), '19681210')
|
2014-02-09 12:09:57 -05:00
|
|
|
|
self.assertEqual(unified_strdate('1968-12-10'), '19681210')
|
2022-08-01 15:25:48 -04:00
|
|
|
|
self.assertEqual(unified_strdate('31-07-2022 20:00'), '20220731')
|
2014-09-29 06:45:18 -04:00
|
|
|
|
self.assertEqual(unified_strdate('28/01/2014 21:00:00 +0100'), '20140128')
|
2014-12-11 20:57:36 -05:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
unified_strdate('11/26/2014 11:30:00 AM PST', day_first=False),
|
|
|
|
|
'20141126')
|
2015-02-03 04:58:28 -05:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
unified_strdate('2/2/2015 6:47:40 PM', day_first=False),
|
|
|
|
|
'20150202')
|
2016-02-24 13:52:49 -05:00
|
|
|
|
self.assertEqual(unified_strdate('Feb 14th 2016 5:45PM'), '20160214')
|
2015-04-04 09:11:01 -04:00
|
|
|
|
self.assertEqual(unified_strdate('25-09-2014'), '20140925')
|
2016-06-25 11:30:35 -04:00
|
|
|
|
self.assertEqual(unified_strdate('27.02.2016 17:30'), '20160227')
|
2015-11-02 08:08:38 -05:00
|
|
|
|
self.assertEqual(unified_strdate('UNKNOWN DATE FORMAT'), None)
|
2016-09-29 12:47:25 -04:00
|
|
|
|
self.assertEqual(unified_strdate('Feb 7, 2016 at 6:35 pm'), '20160207')
|
2017-01-12 10:39:45 -05:00
|
|
|
|
self.assertEqual(unified_strdate('July 15th, 2013'), '20130715')
|
|
|
|
|
self.assertEqual(unified_strdate('September 1st, 2013'), '20130901')
|
|
|
|
|
self.assertEqual(unified_strdate('Sep 2nd, 2013'), '20130902')
|
2019-11-26 12:08:37 -05:00
|
|
|
|
self.assertEqual(unified_strdate('November 3rd, 2019'), '20191103')
|
|
|
|
|
self.assertEqual(unified_strdate('October 23rd, 2005'), '20051023')
|
2012-11-27 17:20:29 -05:00
|
|
|
|
|
2016-06-25 11:30:35 -04:00
|
|
|
|
def test_unified_timestamps(self):
|
|
|
|
|
self.assertEqual(unified_timestamp('December 21, 2010'), 1292889600)
|
|
|
|
|
self.assertEqual(unified_timestamp('8/7/2009'), 1247011200)
|
|
|
|
|
self.assertEqual(unified_timestamp('Dec 14, 2012'), 1355443200)
|
|
|
|
|
self.assertEqual(unified_timestamp('2012/10/11 01:56:38 +0000'), 1349920598)
|
|
|
|
|
self.assertEqual(unified_timestamp('1968 12 10'), -33436800)
|
|
|
|
|
self.assertEqual(unified_timestamp('1968-12-10'), -33436800)
|
|
|
|
|
self.assertEqual(unified_timestamp('28/01/2014 21:00:00 +0100'), 1390939200)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
unified_timestamp('11/26/2014 11:30:00 AM PST', day_first=False),
|
|
|
|
|
1417001400)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
unified_timestamp('2/2/2015 6:47:40 PM', day_first=False),
|
|
|
|
|
1422902860)
|
|
|
|
|
self.assertEqual(unified_timestamp('Feb 14th 2016 5:45PM'), 1455471900)
|
|
|
|
|
self.assertEqual(unified_timestamp('25-09-2014'), 1411603200)
|
|
|
|
|
self.assertEqual(unified_timestamp('27.02.2016 17:30'), 1456594200)
|
|
|
|
|
self.assertEqual(unified_timestamp('UNKNOWN DATE FORMAT'), None)
|
2016-08-04 23:41:55 -04:00
|
|
|
|
self.assertEqual(unified_timestamp('May 16, 2016 11:15 PM'), 1463440500)
|
2016-09-29 12:47:25 -04:00
|
|
|
|
self.assertEqual(unified_timestamp('Feb 7, 2016 at 6:35 pm'), 1454870100)
|
2017-04-30 10:07:30 -04:00
|
|
|
|
self.assertEqual(unified_timestamp('2017-03-30T17:52:41Q'), 1490896361)
|
2017-06-11 10:27:22 -04:00
|
|
|
|
self.assertEqual(unified_timestamp('Sep 11, 2013 | 5:49 AM'), 1378878540)
|
2017-12-16 09:56:16 -05:00
|
|
|
|
self.assertEqual(unified_timestamp('December 15, 2017 at 7:49 am'), 1513324140)
|
2018-03-13 20:28:40 -04:00
|
|
|
|
self.assertEqual(unified_timestamp('2018-03-14T08:32:43.1493874+00:00'), 1521016363)
|
2016-06-25 11:30:35 -04:00
|
|
|
|
|
2022-08-13 19:21:54 -04:00
|
|
|
|
self.assertEqual(unified_timestamp('December 31 1969 20:00:01 EDT'), 1)
|
|
|
|
|
self.assertEqual(unified_timestamp('Wednesday 31 December 1969 18:01:26 MDT'), 86)
|
|
|
|
|
self.assertEqual(unified_timestamp('12/31/1969 20:01:18 EDT', False), 78)
|
|
|
|
|
|
2015-11-21 19:33:52 -05:00
|
|
|
|
def test_determine_ext(self):
|
|
|
|
|
self.assertEqual(determine_ext('http://example.com/foo/bar.mp4/?download'), 'mp4')
|
|
|
|
|
self.assertEqual(determine_ext('http://example.com/foo/bar/?download', None), None)
|
2015-11-22 06:27:13 -05:00
|
|
|
|
self.assertEqual(determine_ext('http://example.com/foo/bar.nonext/?download', None), None)
|
|
|
|
|
self.assertEqual(determine_ext('http://example.com/foo/bar/mp4?download', None), None)
|
|
|
|
|
self.assertEqual(determine_ext('http://example.com/foo/bar.m3u8//?download'), 'm3u8')
|
2018-06-01 13:16:22 -04:00
|
|
|
|
self.assertEqual(determine_ext('foobar', None), None)
|
2015-11-21 19:33:52 -05:00
|
|
|
|
|
2013-07-11 10:12:08 -04:00
|
|
|
|
def test_find_xpath_attr(self):
|
2014-08-27 13:11:45 -04:00
|
|
|
|
testxml = '''<root>
|
2013-07-11 10:12:08 -04:00
|
|
|
|
<node/>
|
|
|
|
|
<node x="a"/>
|
|
|
|
|
<node x="a" y="c" />
|
|
|
|
|
<node x="b" y="d" />
|
2015-08-01 10:22:13 -04:00
|
|
|
|
<node x="" />
|
2013-07-11 10:12:08 -04:00
|
|
|
|
</root>'''
|
2015-10-25 15:04:55 -04:00
|
|
|
|
doc = compat_etree_fromstring(testxml)
|
2013-07-11 10:12:08 -04:00
|
|
|
|
|
2015-08-01 10:22:13 -04:00
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n'), None)
|
2013-07-11 10:12:08 -04:00
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/fourohfour', 'n', 'v'), None)
|
2015-08-01 10:22:13 -04:00
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'n'), None)
|
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'n', 'v'), None)
|
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'x'), doc[1])
|
2013-07-11 10:12:08 -04:00
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'a'), doc[1])
|
2015-08-01 10:22:13 -04:00
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'x', 'b'), doc[3])
|
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'y'), doc[2])
|
2013-07-11 10:12:08 -04:00
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'c'), doc[2])
|
2015-08-01 10:22:13 -04:00
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'y', 'd'), doc[3])
|
|
|
|
|
self.assertEqual(find_xpath_attr(doc, './/node', 'x', ''), doc[4])
|
2013-07-11 10:12:08 -04:00
|
|
|
|
|
2013-10-12 15:34:04 -04:00
|
|
|
|
def test_xpath_with_ns(self):
|
2014-08-27 13:11:45 -04:00
|
|
|
|
testxml = '''<root xmlns:media="http://example.com/">
|
2013-10-12 15:34:04 -04:00
|
|
|
|
<media:song>
|
|
|
|
|
<media:author>The Author</media:author>
|
|
|
|
|
<url>http://server.com/download.mp3</url>
|
|
|
|
|
</media:song>
|
|
|
|
|
</root>'''
|
2015-10-25 15:04:55 -04:00
|
|
|
|
doc = compat_etree_fromstring(testxml)
|
2013-10-12 15:34:04 -04:00
|
|
|
|
find = lambda p: doc.find(xpath_with_ns(p, {'media': 'http://example.com/'}))
|
|
|
|
|
self.assertTrue(find('media:song') is not None)
|
2014-08-27 13:11:45 -04:00
|
|
|
|
self.assertEqual(find('media:song/media:author').text, 'The Author')
|
|
|
|
|
self.assertEqual(find('media:song/url').text, 'http://server.com/download.mp3')
|
2013-10-12 15:34:04 -04:00
|
|
|
|
|
2015-09-04 14:36:16 -04:00
|
|
|
|
def test_xpath_element(self):
|
|
|
|
|
doc = xml.etree.ElementTree.Element('root')
|
|
|
|
|
div = xml.etree.ElementTree.SubElement(doc, 'div')
|
|
|
|
|
p = xml.etree.ElementTree.SubElement(div, 'p')
|
|
|
|
|
p.text = 'Foo'
|
|
|
|
|
self.assertEqual(xpath_element(doc, 'div/p'), p)
|
2015-10-31 12:39:44 -04:00
|
|
|
|
self.assertEqual(xpath_element(doc, ['div/p']), p)
|
|
|
|
|
self.assertEqual(xpath_element(doc, ['div/bar', 'div/p']), p)
|
2015-09-04 14:36:16 -04:00
|
|
|
|
self.assertEqual(xpath_element(doc, 'div/bar', default='default'), 'default')
|
2015-10-31 12:39:44 -04:00
|
|
|
|
self.assertEqual(xpath_element(doc, ['div/bar'], default='default'), 'default')
|
2015-09-04 14:36:16 -04:00
|
|
|
|
self.assertTrue(xpath_element(doc, 'div/bar') is None)
|
2015-10-31 12:39:44 -04:00
|
|
|
|
self.assertTrue(xpath_element(doc, ['div/bar']) is None)
|
|
|
|
|
self.assertTrue(xpath_element(doc, ['div/bar'], 'div/baz') is None)
|
2015-09-04 14:36:16 -04:00
|
|
|
|
self.assertRaises(ExtractorError, xpath_element, doc, 'div/bar', fatal=True)
|
2015-10-31 12:39:44 -04:00
|
|
|
|
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar'], fatal=True)
|
|
|
|
|
self.assertRaises(ExtractorError, xpath_element, doc, ['div/bar', 'div/baz'], fatal=True)
|
2015-09-04 14:36:16 -04:00
|
|
|
|
|
2015-03-21 09:12:43 -04:00
|
|
|
|
def test_xpath_text(self):
|
|
|
|
|
testxml = '''<root>
|
|
|
|
|
<div>
|
|
|
|
|
<p>Foo</p>
|
|
|
|
|
</div>
|
|
|
|
|
</root>'''
|
2015-10-25 15:04:55 -04:00
|
|
|
|
doc = compat_etree_fromstring(testxml)
|
2015-03-21 09:12:43 -04:00
|
|
|
|
self.assertEqual(xpath_text(doc, 'div/p'), 'Foo')
|
2015-09-04 14:36:16 -04:00
|
|
|
|
self.assertEqual(xpath_text(doc, 'div/bar', default='default'), 'default')
|
2015-03-21 09:12:43 -04:00
|
|
|
|
self.assertTrue(xpath_text(doc, 'div/bar') is None)
|
|
|
|
|
self.assertRaises(ExtractorError, xpath_text, doc, 'div/bar', fatal=True)
|
|
|
|
|
|
2015-09-04 14:36:16 -04:00
|
|
|
|
def test_xpath_attr(self):
|
|
|
|
|
testxml = '''<root>
|
|
|
|
|
<div>
|
|
|
|
|
<p x="a">Foo</p>
|
|
|
|
|
</div>
|
|
|
|
|
</root>'''
|
2015-10-25 15:04:55 -04:00
|
|
|
|
doc = compat_etree_fromstring(testxml)
|
2015-09-04 14:36:16 -04:00
|
|
|
|
self.assertEqual(xpath_attr(doc, 'div/p', 'x'), 'a')
|
|
|
|
|
self.assertEqual(xpath_attr(doc, 'div/bar', 'x'), None)
|
|
|
|
|
self.assertEqual(xpath_attr(doc, 'div/p', 'y'), None)
|
|
|
|
|
self.assertEqual(xpath_attr(doc, 'div/bar', 'x', default='default'), 'default')
|
|
|
|
|
self.assertEqual(xpath_attr(doc, 'div/p', 'y', default='default'), 'default')
|
|
|
|
|
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/bar', 'x', fatal=True)
|
|
|
|
|
self.assertRaises(ExtractorError, xpath_attr, doc, 'div/p', 'y', fatal=True)
|
|
|
|
|
|
2013-10-15 06:05:13 -04:00
|
|
|
|
def test_smuggle_url(self):
|
2014-11-26 07:07:32 -05:00
|
|
|
|
data = {"ö": "ö", "abc": [3]}
|
2013-10-15 06:05:13 -04:00
|
|
|
|
url = 'https://foo.bar/baz?x=y#a'
|
|
|
|
|
smug_url = smuggle_url(url, data)
|
|
|
|
|
unsmug_url, unsmug_data = unsmuggle_url(smug_url)
|
|
|
|
|
self.assertEqual(url, unsmug_url)
|
|
|
|
|
self.assertEqual(data, unsmug_data)
|
|
|
|
|
|
|
|
|
|
res_url, res_data = unsmuggle_url(url)
|
|
|
|
|
self.assertEqual(res_url, url)
|
|
|
|
|
self.assertEqual(res_data, None)
|
|
|
|
|
|
2016-07-04 16:36:32 -04:00
|
|
|
|
smug_url = smuggle_url(url, {'a': 'b'})
|
|
|
|
|
smug_smug_url = smuggle_url(smug_url, {'c': 'd'})
|
|
|
|
|
res_url, res_data = unsmuggle_url(smug_smug_url)
|
|
|
|
|
self.assertEqual(res_url, url)
|
|
|
|
|
self.assertEqual(res_data, {'a': 'b', 'c': 'd'})
|
|
|
|
|
|
2013-11-21 08:09:28 -05:00
|
|
|
|
def test_shell_quote(self):
|
2014-08-27 13:11:45 -04:00
|
|
|
|
args = ['ffmpeg', '-i', encodeFilename('ñ€ß\'.mp4')]
|
2017-07-05 13:25:37 -04:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
shell_quote(args),
|
|
|
|
|
"""ffmpeg -i 'ñ€ß'"'"'.mp4'""" if compat_os_name != 'nt' else '''ffmpeg -i "ñ€ß'.mp4"''')
|
2013-11-21 08:09:28 -05:00
|
|
|
|
|
2019-03-22 14:08:54 -04:00
|
|
|
|
def test_float_or_none(self):
|
|
|
|
|
self.assertEqual(float_or_none('42.42'), 42.42)
|
|
|
|
|
self.assertEqual(float_or_none('42'), 42.0)
|
|
|
|
|
self.assertEqual(float_or_none(''), None)
|
|
|
|
|
self.assertEqual(float_or_none(None), None)
|
|
|
|
|
self.assertEqual(float_or_none([]), None)
|
|
|
|
|
self.assertEqual(float_or_none(set()), None)
|
|
|
|
|
|
|
|
|
|
def test_int_or_none(self):
|
|
|
|
|
self.assertEqual(int_or_none('42'), 42)
|
|
|
|
|
self.assertEqual(int_or_none(''), None)
|
|
|
|
|
self.assertEqual(int_or_none(None), None)
|
|
|
|
|
self.assertEqual(int_or_none([]), None)
|
|
|
|
|
self.assertEqual(int_or_none(set()), None)
|
|
|
|
|
|
2013-12-06 07:36:36 -05:00
|
|
|
|
def test_str_to_int(self):
|
|
|
|
|
self.assertEqual(str_to_int('123,456'), 123456)
|
|
|
|
|
self.assertEqual(str_to_int('123.456'), 123456)
|
2019-11-29 11:05:06 -05:00
|
|
|
|
self.assertEqual(str_to_int(523), 523)
|
2019-12-15 11:15:24 -05:00
|
|
|
|
self.assertEqual(str_to_int('noninteger'), None)
|
|
|
|
|
self.assertEqual(str_to_int([]), None)
|
2013-12-06 07:36:36 -05:00
|
|
|
|
|
2013-12-16 22:13:36 -05:00
|
|
|
|
def test_url_basename(self):
|
2014-08-27 13:11:45 -04:00
|
|
|
|
self.assertEqual(url_basename('http://foo.de/'), '')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz'), 'baz')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz?x=y'), 'baz')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz#x=y'), 'baz')
|
|
|
|
|
self.assertEqual(url_basename('http://foo.de/bar/baz/'), 'baz')
|
2013-12-17 06:32:58 -05:00
|
|
|
|
self.assertEqual(
|
2014-08-27 13:11:45 -04:00
|
|
|
|
url_basename('http://media.w3.org/2010/05/sintel/trailer.mp4'),
|
|
|
|
|
'trailer.mp4')
|
2013-10-15 06:05:13 -04:00
|
|
|
|
|
2016-11-01 15:14:01 -04:00
|
|
|
|
def test_base_url(self):
|
|
|
|
|
self.assertEqual(base_url('http://foo.de/'), 'http://foo.de/')
|
|
|
|
|
self.assertEqual(base_url('http://foo.de/bar'), 'http://foo.de/')
|
|
|
|
|
self.assertEqual(base_url('http://foo.de/bar/'), 'http://foo.de/bar/')
|
|
|
|
|
self.assertEqual(base_url('http://foo.de/bar/baz'), 'http://foo.de/bar/')
|
|
|
|
|
self.assertEqual(base_url('http://foo.de/bar/baz?x=z/x/c'), 'http://foo.de/bar/')
|
2022-09-03 23:09:45 -04:00
|
|
|
|
self.assertEqual(base_url('http://foo.de/bar/baz&x=z&w=y/x/c'), 'http://foo.de/bar/baz&x=z&w=y/x/')
|
2016-11-01 15:14:01 -04:00
|
|
|
|
|
2016-12-12 14:23:49 -05:00
|
|
|
|
def test_urljoin(self):
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
2017-03-05 15:57:46 -05:00
|
|
|
|
self.assertEqual(urljoin(b'http://foo.de/', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
|
|
|
|
self.assertEqual(urljoin(b'http://foo.de/', b'/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
2016-12-17 06:44:53 -05:00
|
|
|
|
self.assertEqual(urljoin('//foo.de/', '/a/b/c.txt'), '//foo.de/a/b/c.txt')
|
2016-12-12 14:23:49 -05:00
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de', '/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de', 'a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
2016-12-17 06:44:53 -05:00
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
|
2016-12-12 14:23:49 -05:00
|
|
|
|
self.assertEqual(urljoin(None, 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
2016-12-17 06:44:53 -05:00
|
|
|
|
self.assertEqual(urljoin(None, '//foo.de/a/b/c.txt'), '//foo.de/a/b/c.txt')
|
2016-12-12 14:23:49 -05:00
|
|
|
|
self.assertEqual(urljoin('', 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
|
|
|
|
self.assertEqual(urljoin(['foobar'], 'http://foo.de/a/b/c.txt'), 'http://foo.de/a/b/c.txt')
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', None), None)
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', ''), None)
|
|
|
|
|
self.assertEqual(urljoin('http://foo.de/', ['foobar']), None)
|
2016-12-20 06:23:16 -05:00
|
|
|
|
self.assertEqual(urljoin('http://foo.de/a/b/c.txt', '.././../d.txt'), 'http://foo.de/d.txt')
|
2019-01-20 08:21:24 -05:00
|
|
|
|
self.assertEqual(urljoin('http://foo.de/a/b/c.txt', 'rtmp://foo.de'), 'rtmp://foo.de')
|
|
|
|
|
self.assertEqual(urljoin(None, 'rtmp://foo.de'), 'rtmp://foo.de')
|
2016-12-12 14:23:49 -05:00
|
|
|
|
|
2018-07-21 07:01:06 -04:00
|
|
|
|
def test_url_or_none(self):
|
|
|
|
|
self.assertEqual(url_or_none(None), None)
|
|
|
|
|
self.assertEqual(url_or_none(''), None)
|
|
|
|
|
self.assertEqual(url_or_none('foo'), None)
|
|
|
|
|
self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
|
|
|
|
|
self.assertEqual(url_or_none('https://foo.de'), 'https://foo.de')
|
|
|
|
|
self.assertEqual(url_or_none('http$://foo.de'), None)
|
|
|
|
|
self.assertEqual(url_or_none('http://foo.de'), 'http://foo.de')
|
|
|
|
|
self.assertEqual(url_or_none('//foo.de'), '//foo.de')
|
2021-01-01 07:26:37 -05:00
|
|
|
|
self.assertEqual(url_or_none('s3://foo.de'), None)
|
|
|
|
|
self.assertEqual(url_or_none('rtmpte://foo.de'), 'rtmpte://foo.de')
|
|
|
|
|
self.assertEqual(url_or_none('mms://foo.de'), 'mms://foo.de')
|
|
|
|
|
self.assertEqual(url_or_none('rtspu://foo.de'), 'rtspu://foo.de')
|
|
|
|
|
self.assertEqual(url_or_none('ftps://foo.de'), 'ftps://foo.de')
|
2018-07-21 07:01:06 -04:00
|
|
|
|
|
2016-08-07 09:45:18 -04:00
|
|
|
|
def test_parse_age_limit(self):
|
|
|
|
|
self.assertEqual(parse_age_limit(None), None)
|
|
|
|
|
self.assertEqual(parse_age_limit(False), None)
|
|
|
|
|
self.assertEqual(parse_age_limit('invalid'), None)
|
|
|
|
|
self.assertEqual(parse_age_limit(0), 0)
|
|
|
|
|
self.assertEqual(parse_age_limit(18), 18)
|
|
|
|
|
self.assertEqual(parse_age_limit(21), 21)
|
|
|
|
|
self.assertEqual(parse_age_limit(22), None)
|
|
|
|
|
self.assertEqual(parse_age_limit('18'), 18)
|
|
|
|
|
self.assertEqual(parse_age_limit('18+'), 18)
|
|
|
|
|
self.assertEqual(parse_age_limit('PG-13'), 13)
|
|
|
|
|
self.assertEqual(parse_age_limit('TV-14'), 14)
|
|
|
|
|
self.assertEqual(parse_age_limit('TV-MA'), 17)
|
2018-05-23 07:20:05 -04:00
|
|
|
|
self.assertEqual(parse_age_limit('TV14'), 14)
|
|
|
|
|
self.assertEqual(parse_age_limit('TV_G'), 0)
|
2016-08-07 09:45:18 -04:00
|
|
|
|
|
2013-12-26 07:49:44 -05:00
|
|
|
|
def test_parse_duration(self):
|
|
|
|
|
self.assertEqual(parse_duration(None), None)
|
2015-01-12 10:39:58 -05:00
|
|
|
|
self.assertEqual(parse_duration(False), None)
|
|
|
|
|
self.assertEqual(parse_duration('invalid'), None)
|
2013-12-26 07:49:44 -05:00
|
|
|
|
self.assertEqual(parse_duration('1'), 1)
|
|
|
|
|
self.assertEqual(parse_duration('1337:12'), 80232)
|
|
|
|
|
self.assertEqual(parse_duration('9:12:43'), 33163)
|
2014-02-16 15:46:26 -05:00
|
|
|
|
self.assertEqual(parse_duration('12:00'), 720)
|
|
|
|
|
self.assertEqual(parse_duration('00:01:01'), 61)
|
2013-12-26 07:49:44 -05:00
|
|
|
|
self.assertEqual(parse_duration('x:y'), None)
|
2014-02-16 15:46:26 -05:00
|
|
|
|
self.assertEqual(parse_duration('3h11m53s'), 11513)
|
2014-08-30 19:41:30 -04:00
|
|
|
|
self.assertEqual(parse_duration('3h 11m 53s'), 11513)
|
|
|
|
|
self.assertEqual(parse_duration('3 hours 11 minutes 53 seconds'), 11513)
|
|
|
|
|
self.assertEqual(parse_duration('3 hours 11 mins 53 secs'), 11513)
|
2022-03-27 20:49:42 -04:00
|
|
|
|
self.assertEqual(parse_duration('3 hours, 11 minutes, 53 seconds'), 11513)
|
|
|
|
|
self.assertEqual(parse_duration('3 hours, 11 mins, 53 secs'), 11513)
|
2014-02-16 15:46:26 -05:00
|
|
|
|
self.assertEqual(parse_duration('62m45s'), 3765)
|
|
|
|
|
self.assertEqual(parse_duration('6m59s'), 419)
|
|
|
|
|
self.assertEqual(parse_duration('49s'), 49)
|
|
|
|
|
self.assertEqual(parse_duration('0h0m0s'), 0)
|
|
|
|
|
self.assertEqual(parse_duration('0m0s'), 0)
|
|
|
|
|
self.assertEqual(parse_duration('0s'), 0)
|
2014-08-25 06:59:53 -04:00
|
|
|
|
self.assertEqual(parse_duration('01:02:03.05'), 3723.05)
|
2014-11-16 08:55:22 -05:00
|
|
|
|
self.assertEqual(parse_duration('T30M38S'), 1838)
|
2014-12-04 11:35:40 -05:00
|
|
|
|
self.assertEqual(parse_duration('5 s'), 5)
|
|
|
|
|
self.assertEqual(parse_duration('3 min'), 180)
|
|
|
|
|
self.assertEqual(parse_duration('2.5 hours'), 9000)
|
2015-02-02 15:48:54 -05:00
|
|
|
|
self.assertEqual(parse_duration('02:03:04'), 7384)
|
|
|
|
|
self.assertEqual(parse_duration('01:02:03:04'), 93784)
|
2015-02-25 19:25:00 -05:00
|
|
|
|
self.assertEqual(parse_duration('1 hour 3 minutes'), 3780)
|
2015-07-22 11:15:22 -04:00
|
|
|
|
self.assertEqual(parse_duration('87 Min.'), 5220)
|
2016-04-07 14:30:47 -04:00
|
|
|
|
self.assertEqual(parse_duration('PT1H0.040S'), 3600.04)
|
2017-01-26 11:23:08 -05:00
|
|
|
|
self.assertEqual(parse_duration('PT00H03M30SZ'), 210)
|
2017-10-28 20:04:48 -04:00
|
|
|
|
self.assertEqual(parse_duration('P0Y0M0DT0H4M20.880S'), 260.88)
|
2022-01-19 07:41:27 -05:00
|
|
|
|
self.assertEqual(parse_duration('01:02:03:050'), 3723.05)
|
|
|
|
|
self.assertEqual(parse_duration('103:050'), 103.05)
|
2013-12-26 07:49:44 -05:00
|
|
|
|
|
2014-01-20 16:11:34 -05:00
|
|
|
|
def test_fix_xml_ampersands(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('"&x=y&z=a'), '"&x=y&z=a')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('"&x=y&wrong;&z=a'),
|
|
|
|
|
'"&x=y&wrong;&z=a')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('&'><"'),
|
|
|
|
|
'&'><"')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
fix_xml_ampersands('Ӓ᪼'), 'Ӓ᪼')
|
|
|
|
|
self.assertEqual(fix_xml_ampersands('&#&#'), '&#&#')
|
|
|
|
|
|
2014-01-20 05:36:47 -05:00
|
|
|
|
def test_paged_list(self):
|
|
|
|
|
def testPL(size, pagesize, sliceargs, expected):
|
|
|
|
|
def get_page(pagenum):
|
|
|
|
|
firstid = pagenum * pagesize
|
|
|
|
|
upto = min(size, pagenum * pagesize + pagesize)
|
2022-04-11 11:10:28 -04:00
|
|
|
|
yield from range(firstid, upto)
|
2014-01-20 05:36:47 -05:00
|
|
|
|
|
2014-09-28 18:36:06 -04:00
|
|
|
|
pl = OnDemandPagedList(get_page, pagesize)
|
2014-01-20 05:36:47 -05:00
|
|
|
|
got = pl.getslice(*sliceargs)
|
|
|
|
|
self.assertEqual(got, expected)
|
|
|
|
|
|
2014-09-28 18:36:06 -04:00
|
|
|
|
iapl = InAdvancePagedList(get_page, size // pagesize + 1, pagesize)
|
|
|
|
|
got = iapl.getslice(*sliceargs)
|
|
|
|
|
self.assertEqual(got, expected)
|
|
|
|
|
|
2014-01-20 05:36:47 -05:00
|
|
|
|
testPL(5, 2, (), [0, 1, 2, 3, 4])
|
|
|
|
|
testPL(5, 2, (1,), [1, 2, 3, 4])
|
|
|
|
|
testPL(5, 2, (2,), [2, 3, 4])
|
|
|
|
|
testPL(5, 2, (4,), [4])
|
|
|
|
|
testPL(5, 2, (0, 3), [0, 1, 2])
|
|
|
|
|
testPL(5, 2, (1, 4), [1, 2, 3])
|
|
|
|
|
testPL(5, 2, (2, 99), [2, 3, 4])
|
|
|
|
|
testPL(5, 2, (20, 99), [])
|
|
|
|
|
|
2014-02-24 19:43:17 -05:00
|
|
|
|
def test_read_batch_urls(self):
|
2014-08-27 13:11:45 -04:00
|
|
|
|
f = io.StringIO('''\xef\xbb\xbf foo
|
2014-02-24 19:43:17 -05:00
|
|
|
|
bar\r
|
|
|
|
|
baz
|
|
|
|
|
# More after this line\r
|
|
|
|
|
; or after this
|
|
|
|
|
bam''')
|
2014-08-27 13:11:45 -04:00
|
|
|
|
self.assertEqual(read_batch_urls(f), ['foo', 'bar', 'baz', 'bam'])
|
2014-02-24 19:43:17 -05:00
|
|
|
|
|
2014-03-07 09:25:33 -05:00
|
|
|
|
def test_urlencode_postdata(self):
|
|
|
|
|
data = urlencode_postdata({'username': 'foo@bar.com', 'password': '1234'})
|
|
|
|
|
self.assertTrue(isinstance(data, bytes))
|
|
|
|
|
|
2016-03-03 12:40:05 -05:00
|
|
|
|
def test_update_url_query(self):
|
2021-08-22 15:02:00 -04:00
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 12:40:05 -05:00
|
|
|
|
'http://example.com/path', {'quality': ['HD'], 'format': ['mp4']})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?quality=HD&format=mp4'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 12:40:05 -05:00
|
|
|
|
'http://example.com/path', {'system': ['LINUX', 'WINDOWS']})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?system=LINUX&system=WINDOWS'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 12:40:05 -05:00
|
|
|
|
'http://example.com/path', {'fields': 'id,formats,subtitles'})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?fields=id,formats,subtitles'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 12:40:05 -05:00
|
|
|
|
'http://example.com/path', {'fields': ('id,formats,subtitles', 'thumbnails')})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?fields=id,formats,subtitles&fields=thumbnails'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 12:40:05 -05:00
|
|
|
|
'http://example.com/path?manifest=f4m', {'manifest': []})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 12:40:05 -05:00
|
|
|
|
'http://example.com/path?system=LINUX&system=WINDOWS', {'system': 'LINUX'})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?system=LINUX'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 12:40:05 -05:00
|
|
|
|
'http://example.com/path', {'fields': b'id,formats,subtitles'})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?fields=id,formats,subtitles'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 13:18:57 -05:00
|
|
|
|
'http://example.com/path', {'width': 1080, 'height': 720})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?width=1080&height=720'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 13:18:57 -05:00
|
|
|
|
'http://example.com/path', {'bitrate': 5020.43})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?bitrate=5020.43'))
|
|
|
|
|
self.assertEqual(parse_qs(update_url_query(
|
2016-03-03 13:18:57 -05:00
|
|
|
|
'http://example.com/path', {'test': '第二行тест'})),
|
2021-08-22 15:02:00 -04:00
|
|
|
|
parse_qs('http://example.com/path?test=%E7%AC%AC%E4%BA%8C%E8%A1%8C%D1%82%D0%B5%D1%81%D1%82'))
|
2016-03-03 12:40:05 -05:00
|
|
|
|
|
2017-05-01 11:09:18 -04:00
|
|
|
|
def test_multipart_encode(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
multipart_encode({b'field': b'value'}, boundary='AAAAAA')[0],
|
|
|
|
|
b'--AAAAAA\r\nContent-Disposition: form-data; name="field"\r\n\r\nvalue\r\n--AAAAAA--\r\n')
|
|
|
|
|
self.assertEqual(
|
2022-04-11 11:10:28 -04:00
|
|
|
|
multipart_encode({'欄位'.encode(): '值'.encode()}, boundary='AAAAAA')[0],
|
2017-05-01 11:09:18 -04:00
|
|
|
|
b'--AAAAAA\r\nContent-Disposition: form-data; name="\xe6\xac\x84\xe4\xbd\x8d"\r\n\r\n\xe5\x80\xbc\r\n--AAAAAA--\r\n')
|
|
|
|
|
self.assertRaises(
|
|
|
|
|
ValueError, multipart_encode, {b'field': b'value'}, boundary='value')
|
|
|
|
|
|
2016-02-06 19:12:53 -05:00
|
|
|
|
def test_dict_get(self):
|
2016-02-06 21:13:04 -05:00
|
|
|
|
FALSE_VALUES = {
|
|
|
|
|
'none': None,
|
|
|
|
|
'false': False,
|
|
|
|
|
'zero': 0,
|
|
|
|
|
'empty_string': '',
|
|
|
|
|
'empty_list': [],
|
2016-02-06 19:12:53 -05:00
|
|
|
|
}
|
2016-02-06 21:13:04 -05:00
|
|
|
|
d = FALSE_VALUES.copy()
|
|
|
|
|
d['a'] = 42
|
2016-02-06 19:12:53 -05:00
|
|
|
|
self.assertEqual(dict_get(d, 'a'), 42)
|
|
|
|
|
self.assertEqual(dict_get(d, 'b'), None)
|
|
|
|
|
self.assertEqual(dict_get(d, 'b', 42), 42)
|
|
|
|
|
self.assertEqual(dict_get(d, ('a', )), 42)
|
|
|
|
|
self.assertEqual(dict_get(d, ('b', 'a', )), 42)
|
|
|
|
|
self.assertEqual(dict_get(d, ('b', 'c', 'a', 'd', )), 42)
|
|
|
|
|
self.assertEqual(dict_get(d, ('b', 'c', )), None)
|
|
|
|
|
self.assertEqual(dict_get(d, ('b', 'c', ), 42), 42)
|
2016-02-06 21:13:04 -05:00
|
|
|
|
for key, false_value in FALSE_VALUES.items():
|
|
|
|
|
self.assertEqual(dict_get(d, ('b', 'c', key, )), None)
|
|
|
|
|
self.assertEqual(dict_get(d, ('b', 'c', key, ), skip_false_values=False), false_value)
|
2016-02-06 19:12:53 -05:00
|
|
|
|
|
2018-04-27 15:47:17 -04:00
|
|
|
|
def test_merge_dicts(self):
|
|
|
|
|
self.assertEqual(merge_dicts({'a': 1}, {'b': 2}), {'a': 1, 'b': 2})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': 1}, {'a': 2}), {'a': 1})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': 1}, {'a': None}), {'a': 1})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': 1}, {'a': ''}), {'a': 1})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': 1}, {}), {'a': 1})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': None}, {'a': 1}), {'a': 1})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': ''}, {'a': 1}), {'a': ''})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': ''}, {'a': 'abc'}), {'a': 'abc'})
|
|
|
|
|
self.assertEqual(merge_dicts({'a': None}, {'a': ''}, {'a': 'abc'}), {'a': 'abc'})
|
|
|
|
|
|
2015-12-19 20:07:14 -05:00
|
|
|
|
def test_encode_compat_str(self):
|
|
|
|
|
self.assertEqual(encode_compat_str(b'\xd1\x82\xd0\xb5\xd1\x81\xd1\x82', 'utf-8'), 'тест')
|
|
|
|
|
self.assertEqual(encode_compat_str('тест', 'utf-8'), 'тест')
|
|
|
|
|
|
2014-03-23 20:40:09 -04:00
|
|
|
|
def test_parse_iso8601(self):
|
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T23:04:26+0100'), 1395612266)
|
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T22:04:26+0000'), 1395612266)
|
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T22:04:26Z'), 1395612266)
|
2014-10-29 15:10:00 -04:00
|
|
|
|
self.assertEqual(parse_iso8601('2014-03-23T22:04:26.1234Z'), 1395612266)
|
2015-10-28 11:40:22 -04:00
|
|
|
|
self.assertEqual(parse_iso8601('2015-09-29T08:27:31.727'), 1443515251)
|
|
|
|
|
self.assertEqual(parse_iso8601('2015-09-29T08-27-31.727'), None)
|
2014-03-23 20:40:09 -04:00
|
|
|
|
|
2014-03-24 18:21:20 -04:00
|
|
|
|
def test_strip_jsonp(self):
|
|
|
|
|
stripped = strip_jsonp('cb ([ {"id":"532cb",\n\n\n"x":\n3}\n]\n);')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, [{"id": "532cb", "x": 3}])
|
|
|
|
|
|
2014-11-13 10:28:05 -05:00
|
|
|
|
stripped = strip_jsonp('parseMetadata({"STATUS":"OK"})\n\n\n//epc')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, {'STATUS': 'OK'})
|
|
|
|
|
|
2016-02-07 08:47:09 -05:00
|
|
|
|
stripped = strip_jsonp('ps.embedHandler({"status": "success"});')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, {'status': 'success'})
|
|
|
|
|
|
2017-05-26 09:58:18 -04:00
|
|
|
|
stripped = strip_jsonp('window.cb && window.cb({"status": "success"});')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, {'status': 'success'})
|
|
|
|
|
|
|
|
|
|
stripped = strip_jsonp('window.cb && cb({"status": "success"});')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, {'status': 'success'})
|
2018-07-21 01:30:18 -04:00
|
|
|
|
|
|
|
|
|
stripped = strip_jsonp('({"status": "success"});')
|
|
|
|
|
d = json.loads(stripped)
|
|
|
|
|
self.assertEqual(d, {'status': 'success'})
|
2017-05-26 09:58:18 -04:00
|
|
|
|
|
2019-05-23 12:58:35 -04:00
|
|
|
|
def test_strip_or_none(self):
|
|
|
|
|
self.assertEqual(strip_or_none(' abc'), 'abc')
|
|
|
|
|
self.assertEqual(strip_or_none('abc '), 'abc')
|
|
|
|
|
self.assertEqual(strip_or_none(' abc '), 'abc')
|
|
|
|
|
self.assertEqual(strip_or_none('\tabc\t'), 'abc')
|
|
|
|
|
self.assertEqual(strip_or_none('\n\tabc\n\t'), 'abc')
|
|
|
|
|
self.assertEqual(strip_or_none('abc'), 'abc')
|
|
|
|
|
self.assertEqual(strip_or_none(''), '')
|
|
|
|
|
self.assertEqual(strip_or_none(None), None)
|
|
|
|
|
self.assertEqual(strip_or_none(42), None)
|
|
|
|
|
self.assertEqual(strip_or_none([]), None)
|
|
|
|
|
|
2014-08-10 05:08:56 -04:00
|
|
|
|
def test_uppercase_escape(self):
|
2014-08-27 13:11:45 -04:00
|
|
|
|
self.assertEqual(uppercase_escape('aä'), 'aä')
|
|
|
|
|
self.assertEqual(uppercase_escape('\\U0001d550'), '𝕐')
|
2014-03-24 18:21:20 -04:00
|
|
|
|
|
2015-05-04 09:53:05 -04:00
|
|
|
|
def test_lowercase_escape(self):
|
|
|
|
|
self.assertEqual(lowercase_escape('aä'), 'aä')
|
|
|
|
|
self.assertEqual(lowercase_escape('\\u0026'), '&')
|
|
|
|
|
|
2014-09-15 09:10:24 -04:00
|
|
|
|
def test_limit_length(self):
|
|
|
|
|
self.assertEqual(limit_length(None, 12), None)
|
|
|
|
|
self.assertEqual(limit_length('foo', 12), 'foo')
|
|
|
|
|
self.assertTrue(
|
|
|
|
|
limit_length('foo bar baz asd', 12).startswith('foo bar'))
|
|
|
|
|
self.assertTrue('...' in limit_length('foo bar baz asd', 12))
|
|
|
|
|
|
2016-09-02 11:57:48 -04:00
|
|
|
|
def test_mimetype2ext(self):
|
|
|
|
|
self.assertEqual(mimetype2ext(None), None)
|
|
|
|
|
self.assertEqual(mimetype2ext('video/x-flv'), 'flv')
|
|
|
|
|
self.assertEqual(mimetype2ext('application/x-mpegURL'), 'm3u8')
|
|
|
|
|
self.assertEqual(mimetype2ext('text/vtt'), 'vtt')
|
|
|
|
|
self.assertEqual(mimetype2ext('text/vtt;charset=utf-8'), 'vtt')
|
|
|
|
|
self.assertEqual(mimetype2ext('text/html; charset=utf-8'), 'html')
|
2020-09-11 23:08:57 -04:00
|
|
|
|
self.assertEqual(mimetype2ext('audio/x-wav'), 'wav')
|
|
|
|
|
self.assertEqual(mimetype2ext('audio/x-wav;codec=pcm'), 'wav')
|
2016-09-02 11:57:48 -04:00
|
|
|
|
|
2016-09-14 12:13:55 -04:00
|
|
|
|
def test_month_by_name(self):
|
|
|
|
|
self.assertEqual(month_by_name(None), None)
|
|
|
|
|
self.assertEqual(month_by_name('December', 'en'), 12)
|
2016-09-14 12:57:01 -04:00
|
|
|
|
self.assertEqual(month_by_name('décembre', 'fr'), 12)
|
2016-09-14 12:13:55 -04:00
|
|
|
|
self.assertEqual(month_by_name('December'), 12)
|
2016-09-14 12:57:01 -04:00
|
|
|
|
self.assertEqual(month_by_name('décembre'), None)
|
2016-09-14 12:13:55 -04:00
|
|
|
|
self.assertEqual(month_by_name('Unknown', 'unknown'), None)
|
|
|
|
|
|
2016-03-16 13:48:06 -04:00
|
|
|
|
def test_parse_codecs(self):
|
|
|
|
|
self.assertEqual(parse_codecs(''), {})
|
|
|
|
|
self.assertEqual(parse_codecs('avc1.77.30, mp4a.40.2'), {
|
|
|
|
|
'vcodec': 'avc1.77.30',
|
|
|
|
|
'acodec': 'mp4a.40.2',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'dynamic_range': None,
|
2016-03-16 13:48:06 -04:00
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('mp4a.40.2'), {
|
|
|
|
|
'vcodec': 'none',
|
|
|
|
|
'acodec': 'mp4a.40.2',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'dynamic_range': None,
|
2016-03-16 13:48:06 -04:00
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('mp4a.40.5,avc1.42001e'), {
|
|
|
|
|
'vcodec': 'avc1.42001e',
|
|
|
|
|
'acodec': 'mp4a.40.5',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'dynamic_range': None,
|
2016-03-16 13:48:06 -04:00
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('avc3.640028'), {
|
|
|
|
|
'vcodec': 'avc3.640028',
|
|
|
|
|
'acodec': 'none',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'dynamic_range': None,
|
2016-03-16 13:48:06 -04:00
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs(', h264,,newcodec,aac'), {
|
|
|
|
|
'vcodec': 'h264',
|
|
|
|
|
'acodec': 'aac',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'dynamic_range': None,
|
2016-03-16 13:48:06 -04:00
|
|
|
|
})
|
2018-09-09 15:37:22 -04:00
|
|
|
|
self.assertEqual(parse_codecs('av01.0.05M.08'), {
|
|
|
|
|
'vcodec': 'av01.0.05M.08',
|
|
|
|
|
'acodec': 'none',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'dynamic_range': None,
|
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('vp9.2'), {
|
|
|
|
|
'vcodec': 'vp9.2',
|
|
|
|
|
'acodec': 'none',
|
|
|
|
|
'dynamic_range': 'HDR10',
|
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('av01.0.12M.10.0.110.09.16.09.0'), {
|
2022-07-10 15:47:48 -04:00
|
|
|
|
'vcodec': 'av01.0.12M.10.0.110.09.16.09.0',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'acodec': 'none',
|
|
|
|
|
'dynamic_range': 'HDR10',
|
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('dvhe'), {
|
|
|
|
|
'vcodec': 'dvhe',
|
|
|
|
|
'acodec': 'none',
|
|
|
|
|
'dynamic_range': 'DV',
|
2018-09-09 15:37:22 -04:00
|
|
|
|
})
|
2019-06-13 14:56:17 -04:00
|
|
|
|
self.assertEqual(parse_codecs('theora, vorbis'), {
|
|
|
|
|
'vcodec': 'theora',
|
|
|
|
|
'acodec': 'vorbis',
|
2021-10-18 09:04:21 -04:00
|
|
|
|
'dynamic_range': None,
|
2019-06-13 14:56:17 -04:00
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('unknownvcodec, unknownacodec'), {
|
|
|
|
|
'vcodec': 'unknownvcodec',
|
|
|
|
|
'acodec': 'unknownacodec',
|
|
|
|
|
})
|
|
|
|
|
self.assertEqual(parse_codecs('unknown'), {})
|
2016-03-16 13:48:06 -04:00
|
|
|
|
|
2014-09-13 09:59:16 -04:00
|
|
|
|
def test_escape_rfc3986(self):
|
|
|
|
|
reserved = "!*'();:@&=+$,/?#[]"
|
|
|
|
|
unreserved = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~'
|
|
|
|
|
self.assertEqual(escape_rfc3986(reserved), reserved)
|
|
|
|
|
self.assertEqual(escape_rfc3986(unreserved), unreserved)
|
|
|
|
|
self.assertEqual(escape_rfc3986('тест'), '%D1%82%D0%B5%D1%81%D1%82')
|
|
|
|
|
self.assertEqual(escape_rfc3986('%D1%82%D0%B5%D1%81%D1%82'), '%D1%82%D0%B5%D1%81%D1%82')
|
|
|
|
|
self.assertEqual(escape_rfc3986('foo bar'), 'foo%20bar')
|
|
|
|
|
self.assertEqual(escape_rfc3986('foo%20bar'), 'foo%20bar')
|
|
|
|
|
|
|
|
|
|
def test_escape_url(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavré_FD.mp4'),
|
|
|
|
|
'http://wowza.imust.org/srv/vod/telemb/new/UPLOAD/UPLOAD/20224_IncendieHavre%CC%81_FD.mp4'
|
|
|
|
|
)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erklärt/Das-Erste/Video?documentId=22673108&bcastId=5290'),
|
|
|
|
|
'http://www.ardmediathek.de/tv/Sturm-der-Liebe/Folge-2036-Zu-Mann-und-Frau-erkl%C3%A4rt/Das-Erste/Video?documentId=22673108&bcastId=5290'
|
|
|
|
|
)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://тест.рф/фрагмент'),
|
2016-03-23 11:20:28 -04:00
|
|
|
|
'http://xn--e1aybc.xn--p1ai/%D1%84%D1%80%D0%B0%D0%B3%D0%BC%D0%B5%D0%BD%D1%82'
|
2014-09-13 09:59:16 -04:00
|
|
|
|
)
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
escape_url('http://тест.рф/абв?абв=абв#абв'),
|
2016-03-23 11:23:26 -04:00
|
|
|
|
'http://xn--e1aybc.xn--p1ai/%D0%B0%D0%B1%D0%B2?%D0%B0%D0%B1%D0%B2=%D0%B0%D0%B1%D0%B2#%D0%B0%D0%B1%D0%B2'
|
2014-09-13 09:59:16 -04:00
|
|
|
|
)
|
|
|
|
|
self.assertEqual(escape_url('http://vimeo.com/56015672#at=0'), 'http://vimeo.com/56015672#at=0')
|
|
|
|
|
|
2022-12-30 01:38:38 -05:00
|
|
|
|
def test_js_to_json_vars_strings(self):
|
|
|
|
|
self.assertDictEqual(
|
|
|
|
|
json.loads(js_to_json(
|
|
|
|
|
'''{
|
|
|
|
|
'null': a,
|
|
|
|
|
'nullStr': b,
|
|
|
|
|
'true': c,
|
|
|
|
|
'trueStr': d,
|
|
|
|
|
'false': e,
|
|
|
|
|
'falseStr': f,
|
|
|
|
|
'unresolvedVar': g,
|
|
|
|
|
}''',
|
|
|
|
|
{
|
|
|
|
|
'a': 'null',
|
|
|
|
|
'b': '"null"',
|
|
|
|
|
'c': 'true',
|
|
|
|
|
'd': '"true"',
|
|
|
|
|
'e': 'false',
|
|
|
|
|
'f': '"false"',
|
|
|
|
|
'g': 'var',
|
|
|
|
|
}
|
|
|
|
|
)),
|
|
|
|
|
{
|
|
|
|
|
'null': None,
|
|
|
|
|
'nullStr': 'null',
|
|
|
|
|
'true': True,
|
|
|
|
|
'trueStr': 'true',
|
|
|
|
|
'false': False,
|
|
|
|
|
'falseStr': 'false',
|
|
|
|
|
'unresolvedVar': 'var'
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self.assertDictEqual(
|
|
|
|
|
json.loads(js_to_json(
|
|
|
|
|
'''{
|
|
|
|
|
'int': a,
|
|
|
|
|
'intStr': b,
|
|
|
|
|
'float': c,
|
|
|
|
|
'floatStr': d,
|
|
|
|
|
}''',
|
|
|
|
|
{
|
|
|
|
|
'a': '123',
|
|
|
|
|
'b': '"123"',
|
|
|
|
|
'c': '1.23',
|
|
|
|
|
'd': '"1.23"',
|
|
|
|
|
}
|
|
|
|
|
)),
|
|
|
|
|
{
|
|
|
|
|
'int': 123,
|
|
|
|
|
'intStr': '123',
|
|
|
|
|
'float': 1.23,
|
|
|
|
|
'floatStr': '1.23',
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self.assertDictEqual(
|
|
|
|
|
json.loads(js_to_json(
|
|
|
|
|
'''{
|
|
|
|
|
'object': a,
|
|
|
|
|
'objectStr': b,
|
|
|
|
|
'array': c,
|
|
|
|
|
'arrayStr': d,
|
|
|
|
|
}''',
|
|
|
|
|
{
|
|
|
|
|
'a': '{}',
|
|
|
|
|
'b': '"{}"',
|
|
|
|
|
'c': '[]',
|
|
|
|
|
'd': '"[]"',
|
|
|
|
|
}
|
|
|
|
|
)),
|
|
|
|
|
{
|
|
|
|
|
'object': {},
|
|
|
|
|
'objectStr': '{}',
|
|
|
|
|
'array': [],
|
|
|
|
|
'arrayStr': '[]',
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2014-09-30 05:12:59 -04:00
|
|
|
|
def test_js_to_json_realworld(self):
|
2014-09-30 01:56:24 -04:00
|
|
|
|
inp = '''{
|
2014-09-30 05:12:59 -04:00
|
|
|
|
'clip':{'provider':'pseudo'}
|
2014-09-30 01:56:24 -04:00
|
|
|
|
}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{
|
2014-09-30 05:12:59 -04:00
|
|
|
|
"clip":{"provider":"pseudo"}
|
2014-09-30 01:56:24 -04:00
|
|
|
|
}''')
|
|
|
|
|
json.loads(js_to_json(inp))
|
|
|
|
|
|
2014-09-30 05:12:59 -04:00
|
|
|
|
inp = '''{
|
|
|
|
|
'playlist':[{'controls':{'all':null}}]
|
|
|
|
|
}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{
|
|
|
|
|
"playlist":[{"controls":{"all":null}}]
|
|
|
|
|
}''')
|
|
|
|
|
|
2015-10-20 13:09:51 -04:00
|
|
|
|
inp = '''"The CW\\'s \\'Crazy Ex-Girlfriend\\'"'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''"The CW's 'Crazy Ex-Girlfriend'"''')
|
|
|
|
|
|
2015-02-18 17:59:50 -05:00
|
|
|
|
inp = '"SAND Number: SAND 2013-7800P\\nPresenter: Tom Russo\\nHabanero Software Training - Xyce Software\\nXyce, Sandia\\u0027s"'
|
|
|
|
|
json_code = js_to_json(inp)
|
|
|
|
|
self.assertEqual(json.loads(json_code), json.loads(inp))
|
|
|
|
|
|
2016-05-14 10:39:58 -04:00
|
|
|
|
inp = '''{
|
|
|
|
|
0:{src:'skipped', type: 'application/dash+xml'},
|
|
|
|
|
1:{src:'skipped', type: 'application/vnd.apple.mpegURL'},
|
|
|
|
|
}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{
|
|
|
|
|
"0":{"src":"skipped", "type": "application/dash+xml"},
|
|
|
|
|
"1":{"src":"skipped", "type": "application/vnd.apple.mpegURL"}
|
|
|
|
|
}''')
|
|
|
|
|
|
2016-06-15 23:00:54 -04:00
|
|
|
|
inp = '''{"foo":101}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{"foo":101}''')
|
|
|
|
|
|
2016-08-19 08:37:17 -04:00
|
|
|
|
inp = '''{"duration": "00:01:07"}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{"duration": "00:01:07"}''')
|
|
|
|
|
|
2018-01-20 10:54:21 -05:00
|
|
|
|
inp = '''{segments: [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}'''
|
|
|
|
|
self.assertEqual(js_to_json(inp), '''{"segments": [{"offset":-3.885780586188048e-16,"duration":39.75000000000001}]}''')
|
|
|
|
|
|
2014-09-30 05:12:59 -04:00
|
|
|
|
def test_js_to_json_edgecases(self):
|
|
|
|
|
on = js_to_json("{abc_def:'1\\'\\\\2\\\\\\'3\"4'}")
|
|
|
|
|
self.assertEqual(json.loads(on), {"abc_def": "1'\\2\\'3\"4"})
|
|
|
|
|
|
|
|
|
|
on = js_to_json('{"abc": true}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'abc': True})
|
|
|
|
|
|
2015-02-02 15:48:54 -05:00
|
|
|
|
# Ignore JavaScript code as well
|
|
|
|
|
on = js_to_json('''{
|
|
|
|
|
"x": 1,
|
|
|
|
|
y: "a",
|
|
|
|
|
z: some.code
|
|
|
|
|
}''')
|
|
|
|
|
d = json.loads(on)
|
|
|
|
|
self.assertEqual(d['x'], 1)
|
|
|
|
|
self.assertEqual(d['y'], 'a')
|
|
|
|
|
|
2020-11-19 14:22:59 -05:00
|
|
|
|
# Just drop ! prefix for now though this results in a wrong value
|
|
|
|
|
on = js_to_json('''{
|
|
|
|
|
a: !0,
|
|
|
|
|
b: !1,
|
|
|
|
|
c: !!0,
|
|
|
|
|
d: !!42.42,
|
|
|
|
|
e: !!![],
|
|
|
|
|
f: !"abc",
|
|
|
|
|
g: !"",
|
|
|
|
|
!42: 42
|
|
|
|
|
}''')
|
|
|
|
|
self.assertEqual(json.loads(on), {
|
|
|
|
|
'a': 0,
|
|
|
|
|
'b': 1,
|
|
|
|
|
'c': 0,
|
|
|
|
|
'd': 42.42,
|
|
|
|
|
'e': [],
|
|
|
|
|
'f': "abc",
|
|
|
|
|
'g': "",
|
|
|
|
|
'42': 42
|
|
|
|
|
})
|
|
|
|
|
|
2015-04-04 07:48:55 -04:00
|
|
|
|
on = js_to_json('["abc", "def",]')
|
|
|
|
|
self.assertEqual(json.loads(on), ['abc', 'def'])
|
|
|
|
|
|
2017-02-02 14:55:06 -05:00
|
|
|
|
on = js_to_json('[/*comment\n*/"abc"/*comment\n*/,/*comment\n*/"def",/*comment\n*/]')
|
|
|
|
|
self.assertEqual(json.loads(on), ['abc', 'def'])
|
|
|
|
|
|
|
|
|
|
on = js_to_json('[//comment\n"abc" //comment\n,//comment\n"def",//comment\n]')
|
|
|
|
|
self.assertEqual(json.loads(on), ['abc', 'def'])
|
|
|
|
|
|
2015-04-04 07:48:55 -04:00
|
|
|
|
on = js_to_json('{"abc": "def",}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'abc': 'def'})
|
|
|
|
|
|
2017-02-02 14:55:06 -05:00
|
|
|
|
on = js_to_json('{/*comment\n*/"abc"/*comment\n*/:/*comment\n*/"def"/*comment\n*/,/*comment\n*/}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'abc': 'def'})
|
|
|
|
|
|
2016-03-13 07:29:15 -04:00
|
|
|
|
on = js_to_json('{ 0: /* " \n */ ",]" , }')
|
|
|
|
|
self.assertEqual(json.loads(on), {'0': ',]'})
|
|
|
|
|
|
2017-02-02 14:55:06 -05:00
|
|
|
|
on = js_to_json('{ /*comment\n*/0/*comment\n*/: /* " \n */ ",]" , }')
|
|
|
|
|
self.assertEqual(json.loads(on), {'0': ',]'})
|
|
|
|
|
|
2017-01-31 01:54:53 -05:00
|
|
|
|
on = js_to_json('{ 0: // comment\n1 }')
|
|
|
|
|
self.assertEqual(json.loads(on), {'0': 1})
|
|
|
|
|
|
2016-03-13 07:29:15 -04:00
|
|
|
|
on = js_to_json(r'["<p>x<\/p>"]')
|
|
|
|
|
self.assertEqual(json.loads(on), ['<p>x</p>'])
|
|
|
|
|
|
|
|
|
|
on = js_to_json(r'["\xaa"]')
|
|
|
|
|
self.assertEqual(json.loads(on), ['\u00aa'])
|
|
|
|
|
|
|
|
|
|
on = js_to_json("['a\\\nb']")
|
|
|
|
|
self.assertEqual(json.loads(on), ['ab'])
|
|
|
|
|
|
2017-02-02 14:55:06 -05:00
|
|
|
|
on = js_to_json("/*comment\n*/[/*comment\n*/'a\\\nb'/*comment\n*/]/*comment\n*/")
|
|
|
|
|
self.assertEqual(json.loads(on), ['ab'])
|
|
|
|
|
|
2016-05-14 10:39:58 -04:00
|
|
|
|
on = js_to_json('{0xff:0xff}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'255': 255})
|
|
|
|
|
|
2017-02-02 14:55:06 -05:00
|
|
|
|
on = js_to_json('{/*comment\n*/0xff/*comment\n*/:/*comment\n*/0xff/*comment\n*/}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'255': 255})
|
|
|
|
|
|
2016-05-14 10:39:58 -04:00
|
|
|
|
on = js_to_json('{077:077}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'63': 63})
|
|
|
|
|
|
2017-02-02 14:55:06 -05:00
|
|
|
|
on = js_to_json('{/*comment\n*/077/*comment\n*/:/*comment\n*/077/*comment\n*/}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'63': 63})
|
|
|
|
|
|
2016-05-14 10:39:58 -04:00
|
|
|
|
on = js_to_json('{42:42}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'42': 42})
|
|
|
|
|
|
2017-02-02 14:55:06 -05:00
|
|
|
|
on = js_to_json('{/*comment\n*/42/*comment\n*/:/*comment\n*/42/*comment\n*/}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'42': 42})
|
|
|
|
|
|
2018-01-20 10:54:21 -05:00
|
|
|
|
on = js_to_json('{42:4.2e1}')
|
|
|
|
|
self.assertEqual(json.loads(on), {'42': 42.0})
|
|
|
|
|
|
2020-11-19 14:22:59 -05:00
|
|
|
|
on = js_to_json('{ "0x40": "0x40" }')
|
|
|
|
|
self.assertEqual(json.loads(on), {'0x40': '0x40'})
|
|
|
|
|
|
|
|
|
|
on = js_to_json('{ "040": "040" }')
|
|
|
|
|
self.assertEqual(json.loads(on), {'040': '040'})
|
|
|
|
|
|
2021-07-13 03:18:20 -04:00
|
|
|
|
on = js_to_json('[1,//{},\n2]')
|
|
|
|
|
self.assertEqual(json.loads(on), [1, 2])
|
|
|
|
|
|
2022-10-12 16:22:17 -04:00
|
|
|
|
on = js_to_json(R'"\^\$\#"')
|
|
|
|
|
self.assertEqual(json.loads(on), R'^$#', msg='Unnecessary escapes should be stripped')
|
|
|
|
|
|
|
|
|
|
on = js_to_json('\'"\\""\'')
|
|
|
|
|
self.assertEqual(json.loads(on), '"""', msg='Unnecessary quote escape should be escaped')
|
|
|
|
|
|
2018-01-20 10:58:48 -05:00
|
|
|
|
def test_js_to_json_malformed(self):
|
|
|
|
|
self.assertEqual(js_to_json('42a1'), '42"a1"')
|
|
|
|
|
self.assertEqual(js_to_json('42a-1'), '42"a"-1')
|
|
|
|
|
|
2016-01-02 14:49:59 -05:00
|
|
|
|
def test_extract_attributes(self):
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes("<e x='y'>"), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x=y>'), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="a \'b\' c">'), {'x': "a 'b' c"})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x=\'a "b" c\'>'), {'x': 'a "b" c'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="y">'), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="&">'), {'x': '&'}) # XML
|
|
|
|
|
self.assertEqual(extract_attributes('<e x=""">'), {'x': '"'})
|
2016-03-16 11:50:04 -04:00
|
|
|
|
self.assertEqual(extract_attributes('<e x="£">'), {'x': '£'}) # HTML 3.2
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="λ">'), {'x': 'λ'}) # HTML 4.0
|
2016-01-02 14:49:59 -05:00
|
|
|
|
self.assertEqual(extract_attributes('<e x="&foo">'), {'x': '&foo'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="\'">'), {'x': "'"})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x=\'"\'>'), {'x': '"'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x >'), {'x': None})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x=y a>'), {'x': 'y', 'a': None})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x= y>'), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x=1 y=2 x=3>'), {'y': '2', 'x': '3'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e \nx=\ny\n>'), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e \nx=\n"y"\n>'), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes("<e \nx=\n'y'\n>"), {'x': 'y'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e \nx="\ny\n">'), {'x': '\ny\n'})
|
2016-03-16 11:50:04 -04:00
|
|
|
|
self.assertEqual(extract_attributes('<e CAPS=x>'), {'caps': 'x'}) # Names lowercased
|
2016-01-02 14:49:59 -05:00
|
|
|
|
self.assertEqual(extract_attributes('<e x=1 X=2>'), {'x': '2'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e X=1 x=2>'), {'x': '2'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e _:funny-name1=1>'), {'_:funny-name1': '1'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="Fáilte 世界 \U0001f600">'), {'x': 'Fáilte 世界 \U0001f600'})
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="décomposé">'), {'x': 'décompose\u0301'})
|
|
|
|
|
# "Narrow" Python builds don't support unicode code points outside BMP.
|
|
|
|
|
try:
|
2022-06-24 04:10:17 -04:00
|
|
|
|
chr(0x10000)
|
2016-01-02 14:49:59 -05:00
|
|
|
|
supports_outside_bmp = True
|
|
|
|
|
except ValueError:
|
|
|
|
|
supports_outside_bmp = False
|
|
|
|
|
if supports_outside_bmp:
|
|
|
|
|
self.assertEqual(extract_attributes('<e x="Smile 😀!">'), {'x': 'Smile \U0001f600!'})
|
2017-06-11 14:52:24 -04:00
|
|
|
|
# Malformed HTML should not break attributes extraction on older Python
|
|
|
|
|
self.assertEqual(extract_attributes('<mal"formed/>'), {})
|
2016-01-02 14:49:59 -05:00
|
|
|
|
|
2014-11-13 09:02:31 -05:00
|
|
|
|
def test_clean_html(self):
|
|
|
|
|
self.assertEqual(clean_html('a:\nb'), 'a: b')
|
2022-02-03 12:44:46 -05:00
|
|
|
|
self.assertEqual(clean_html('a:\n "b"'), 'a: "b"')
|
2017-04-28 12:05:14 -04:00
|
|
|
|
self.assertEqual(clean_html('a<br>\xa0b'), 'a\nb')
|
2014-11-13 09:02:31 -05:00
|
|
|
|
|
2014-11-13 09:28:42 -05:00
|
|
|
|
def test_intlist_to_bytes(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
intlist_to_bytes([0, 1, 127, 128, 255]),
|
|
|
|
|
b'\x00\x01\x7f\x80\xff')
|
|
|
|
|
|
2014-11-23 04:49:19 -05:00
|
|
|
|
def test_args_to_str(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
args_to_str(['foo', 'ba/r', '-baz', '2 be', '']),
|
2017-07-05 13:25:37 -04:00
|
|
|
|
'foo ba/r -baz \'2 be\' \'\'' if compat_os_name != 'nt' else 'foo ba/r -baz "2 be" ""'
|
2014-11-23 04:49:19 -05:00
|
|
|
|
)
|
|
|
|
|
|
2014-11-25 03:54:54 -05:00
|
|
|
|
def test_parse_filesize(self):
|
|
|
|
|
self.assertEqual(parse_filesize(None), None)
|
|
|
|
|
self.assertEqual(parse_filesize(''), None)
|
|
|
|
|
self.assertEqual(parse_filesize('91 B'), 91)
|
|
|
|
|
self.assertEqual(parse_filesize('foobar'), None)
|
|
|
|
|
self.assertEqual(parse_filesize('2 MiB'), 2097152)
|
|
|
|
|
self.assertEqual(parse_filesize('5 GB'), 5000000000)
|
|
|
|
|
self.assertEqual(parse_filesize('1.2Tb'), 1200000000000)
|
2016-08-18 12:32:00 -04:00
|
|
|
|
self.assertEqual(parse_filesize('1.2tb'), 1200000000000)
|
2014-12-04 11:02:05 -05:00
|
|
|
|
self.assertEqual(parse_filesize('1,24 KB'), 1240)
|
2016-08-18 12:32:00 -04:00
|
|
|
|
self.assertEqual(parse_filesize('1,24 kb'), 1240)
|
2016-08-19 12:12:32 -04:00
|
|
|
|
self.assertEqual(parse_filesize('8.5 megabytes'), 8500000)
|
2014-11-25 03:54:54 -05:00
|
|
|
|
|
2016-03-13 06:27:20 -04:00
|
|
|
|
def test_parse_count(self):
|
|
|
|
|
self.assertEqual(parse_count(None), None)
|
|
|
|
|
self.assertEqual(parse_count(''), None)
|
|
|
|
|
self.assertEqual(parse_count('0'), 0)
|
|
|
|
|
self.assertEqual(parse_count('1000'), 1000)
|
|
|
|
|
self.assertEqual(parse_count('1.000'), 1000)
|
|
|
|
|
self.assertEqual(parse_count('1.1k'), 1100)
|
2021-12-23 16:32:50 -05:00
|
|
|
|
self.assertEqual(parse_count('1.1 k'), 1100)
|
|
|
|
|
self.assertEqual(parse_count('1,1 k'), 1100)
|
2016-03-13 06:27:20 -04:00
|
|
|
|
self.assertEqual(parse_count('1.1kk'), 1100000)
|
2016-03-19 06:42:35 -04:00
|
|
|
|
self.assertEqual(parse_count('1.1kk '), 1100000)
|
2021-12-23 16:32:50 -05:00
|
|
|
|
self.assertEqual(parse_count('1,1kk'), 1100000)
|
|
|
|
|
self.assertEqual(parse_count('100 views'), 100)
|
|
|
|
|
self.assertEqual(parse_count('1,100 views'), 1100)
|
2016-03-19 06:42:35 -04:00
|
|
|
|
self.assertEqual(parse_count('1.1kk views'), 1100000)
|
2021-12-23 16:32:50 -05:00
|
|
|
|
self.assertEqual(parse_count('10M views'), 10000000)
|
|
|
|
|
self.assertEqual(parse_count('has 10M views'), 10000000)
|
2016-03-13 06:27:20 -04:00
|
|
|
|
|
2018-03-02 11:39:04 -05:00
|
|
|
|
def test_parse_resolution(self):
|
|
|
|
|
self.assertEqual(parse_resolution(None), {})
|
|
|
|
|
self.assertEqual(parse_resolution(''), {})
|
2021-10-21 20:04:00 -04:00
|
|
|
|
self.assertEqual(parse_resolution(' 1920x1080'), {'width': 1920, 'height': 1080})
|
|
|
|
|
self.assertEqual(parse_resolution('1920×1080 '), {'width': 1920, 'height': 1080})
|
2018-03-02 11:39:04 -05:00
|
|
|
|
self.assertEqual(parse_resolution('1920 x 1080'), {'width': 1920, 'height': 1080})
|
|
|
|
|
self.assertEqual(parse_resolution('720p'), {'height': 720})
|
|
|
|
|
self.assertEqual(parse_resolution('4k'), {'height': 2160})
|
|
|
|
|
self.assertEqual(parse_resolution('8K'), {'height': 4320})
|
2021-10-21 20:04:00 -04:00
|
|
|
|
self.assertEqual(parse_resolution('pre_1920x1080_post'), {'width': 1920, 'height': 1080})
|
|
|
|
|
self.assertEqual(parse_resolution('ep1x2'), {})
|
|
|
|
|
self.assertEqual(parse_resolution('1920, 1080'), {'width': 1920, 'height': 1080})
|
2018-03-02 11:39:04 -05:00
|
|
|
|
|
2019-03-16 22:07:47 -04:00
|
|
|
|
def test_parse_bitrate(self):
|
|
|
|
|
self.assertEqual(parse_bitrate(None), None)
|
|
|
|
|
self.assertEqual(parse_bitrate(''), None)
|
|
|
|
|
self.assertEqual(parse_bitrate('300kbps'), 300)
|
|
|
|
|
self.assertEqual(parse_bitrate('1500kbps'), 1500)
|
|
|
|
|
self.assertEqual(parse_bitrate('300 kbps'), 300)
|
|
|
|
|
|
2014-12-06 06:14:26 -05:00
|
|
|
|
def test_version_tuple(self):
|
|
|
|
|
self.assertEqual(version_tuple('1'), (1,))
|
|
|
|
|
self.assertEqual(version_tuple('10.23.344'), (10, 23, 344))
|
2014-12-06 06:36:23 -05:00
|
|
|
|
self.assertEqual(version_tuple('10.1-6'), (10, 1, 6)) # avconv style
|
2014-12-06 06:14:26 -05:00
|
|
|
|
|
2014-12-14 15:59:59 -05:00
|
|
|
|
def test_detect_exe_version(self):
|
|
|
|
|
self.assertEqual(detect_exe_version('''ffmpeg version 1.2.1
|
|
|
|
|
built on May 27 2013 08:37:26 with gcc 4.7 (Debian 4.7.3-4)
|
|
|
|
|
configuration: --prefix=/usr --extra-'''), '1.2.1')
|
|
|
|
|
self.assertEqual(detect_exe_version('''ffmpeg version N-63176-g1fb4685
|
|
|
|
|
built on May 15 2014 22:09:06 with gcc 4.8.2 (GCC)'''), 'N-63176-g1fb4685')
|
|
|
|
|
self.assertEqual(detect_exe_version('''X server found. dri2 connection failed!
|
|
|
|
|
Trying to open render node...
|
|
|
|
|
Success at /dev/dri/renderD128.
|
|
|
|
|
ffmpeg version 2.4.4 Copyright (c) 2000-2014 the FFmpeg ...'''), '2.4.4')
|
|
|
|
|
|
2015-01-07 01:20:20 -05:00
|
|
|
|
def test_age_restricted(self):
|
|
|
|
|
self.assertFalse(age_restricted(None, 10)) # unrestricted content
|
|
|
|
|
self.assertFalse(age_restricted(1, None)) # unrestricted policy
|
|
|
|
|
self.assertFalse(age_restricted(8, 10))
|
|
|
|
|
self.assertTrue(age_restricted(18, 14))
|
|
|
|
|
self.assertFalse(age_restricted(18, 18))
|
|
|
|
|
|
2015-01-22 19:21:30 -05:00
|
|
|
|
def test_is_html(self):
|
|
|
|
|
self.assertFalse(is_html(b'\x49\x44\x43<html'))
|
|
|
|
|
self.assertTrue(is_html(b'<!DOCTYPE foo>\xaaa'))
|
|
|
|
|
self.assertTrue(is_html( # UTF-8 with BOM
|
|
|
|
|
b'\xef\xbb\xbf<!DOCTYPE foo>\xaaa'))
|
|
|
|
|
self.assertTrue(is_html( # UTF-16-LE
|
|
|
|
|
b'\xff\xfe<\x00h\x00t\x00m\x00l\x00>\x00\xe4\x00'
|
|
|
|
|
))
|
|
|
|
|
self.assertTrue(is_html( # UTF-16-BE
|
|
|
|
|
b'\xfe\xff\x00<\x00h\x00t\x00m\x00l\x00>\x00\xe4'
|
|
|
|
|
))
|
|
|
|
|
self.assertTrue(is_html( # UTF-32-BE
|
|
|
|
|
b'\x00\x00\xFE\xFF\x00\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4'))
|
|
|
|
|
self.assertTrue(is_html( # UTF-32-LE
|
|
|
|
|
b'\xFF\xFE\x00\x00<\x00\x00\x00h\x00\x00\x00t\x00\x00\x00m\x00\x00\x00l\x00\x00\x00>\x00\x00\x00\xe4\x00\x00\x00'))
|
|
|
|
|
|
2015-01-24 20:38:47 -05:00
|
|
|
|
def test_render_table(self):
|
2021-11-19 22:03:51 -05:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
render_table(
|
|
|
|
|
['a', 'empty', 'bcd'],
|
|
|
|
|
[[123, '', 4], [9999, '', 51]]),
|
|
|
|
|
'a empty bcd\n'
|
|
|
|
|
'123 4\n'
|
|
|
|
|
'9999 51')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
render_table(
|
|
|
|
|
['a', 'empty', 'bcd'],
|
|
|
|
|
[[123, '', 4], [9999, '', 51]],
|
|
|
|
|
hide_empty=True),
|
|
|
|
|
'a bcd\n'
|
|
|
|
|
'123 4\n'
|
|
|
|
|
'9999 51')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
render_table(
|
|
|
|
|
['\ta', 'bcd'],
|
|
|
|
|
[['1\t23', 4], ['\t9999', 51]]),
|
|
|
|
|
' a bcd\n'
|
|
|
|
|
'1 23 4\n'
|
|
|
|
|
'9999 51')
|
|
|
|
|
|
2015-01-24 20:38:47 -05:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
render_table(
|
|
|
|
|
['a', 'bcd'],
|
2021-11-19 22:03:51 -05:00
|
|
|
|
[[123, 4], [9999, 51]],
|
|
|
|
|
delim='-'),
|
2015-01-24 20:38:47 -05:00
|
|
|
|
'a bcd\n'
|
2021-11-19 22:03:51 -05:00
|
|
|
|
'--------\n'
|
2015-01-24 20:38:47 -05:00
|
|
|
|
'123 4\n'
|
|
|
|
|
'9999 51')
|
|
|
|
|
|
2021-11-19 22:03:51 -05:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
render_table(
|
|
|
|
|
['a', 'bcd'],
|
|
|
|
|
[[123, 4], [9999, 51]],
|
|
|
|
|
delim='-', extra_gap=2),
|
|
|
|
|
'a bcd\n'
|
|
|
|
|
'----------\n'
|
|
|
|
|
'123 4\n'
|
|
|
|
|
'9999 51')
|
|
|
|
|
|
2015-02-09 21:32:21 -05:00
|
|
|
|
def test_match_str(self):
|
2021-08-04 17:31:23 -04:00
|
|
|
|
# Unary
|
2015-02-09 21:32:21 -05:00
|
|
|
|
self.assertFalse(match_str('xy', {'x': 1200}))
|
|
|
|
|
self.assertTrue(match_str('!xy', {'x': 1200}))
|
|
|
|
|
self.assertTrue(match_str('x', {'x': 1200}))
|
|
|
|
|
self.assertFalse(match_str('!x', {'x': 1200}))
|
|
|
|
|
self.assertTrue(match_str('x', {'x': 0}))
|
2021-08-04 17:31:23 -04:00
|
|
|
|
self.assertTrue(match_str('is_live', {'is_live': True}))
|
|
|
|
|
self.assertFalse(match_str('is_live', {'is_live': False}))
|
|
|
|
|
self.assertFalse(match_str('is_live', {'is_live': None}))
|
|
|
|
|
self.assertFalse(match_str('is_live', {}))
|
|
|
|
|
self.assertFalse(match_str('!is_live', {'is_live': True}))
|
|
|
|
|
self.assertTrue(match_str('!is_live', {'is_live': False}))
|
|
|
|
|
self.assertTrue(match_str('!is_live', {'is_live': None}))
|
|
|
|
|
self.assertTrue(match_str('!is_live', {}))
|
|
|
|
|
self.assertTrue(match_str('title', {'title': 'abc'}))
|
|
|
|
|
self.assertTrue(match_str('title', {'title': ''}))
|
|
|
|
|
self.assertFalse(match_str('!title', {'title': 'abc'}))
|
|
|
|
|
self.assertFalse(match_str('!title', {'title': ''}))
|
|
|
|
|
|
|
|
|
|
# Numeric
|
2015-02-09 21:32:21 -05:00
|
|
|
|
self.assertFalse(match_str('x>0', {'x': 0}))
|
|
|
|
|
self.assertFalse(match_str('x>0', {}))
|
|
|
|
|
self.assertTrue(match_str('x>?0', {}))
|
|
|
|
|
self.assertTrue(match_str('x>1K', {'x': 1200}))
|
|
|
|
|
self.assertFalse(match_str('x>2K', {'x': 1200}))
|
|
|
|
|
self.assertTrue(match_str('x>=1200 & x < 1300', {'x': 1200}))
|
|
|
|
|
self.assertFalse(match_str('x>=1100 & x < 1200', {'x': 1200}))
|
2021-10-16 15:34:00 -04:00
|
|
|
|
self.assertTrue(match_str('x > 1:0:0', {'x': 3700}))
|
2021-08-04 17:31:23 -04:00
|
|
|
|
|
|
|
|
|
# String
|
2015-02-09 21:32:21 -05:00
|
|
|
|
self.assertFalse(match_str('y=a212', {'y': 'foobar42'}))
|
|
|
|
|
self.assertTrue(match_str('y=foobar42', {'y': 'foobar42'}))
|
|
|
|
|
self.assertFalse(match_str('y!=foobar42', {'y': 'foobar42'}))
|
|
|
|
|
self.assertTrue(match_str('y!=foobar2', {'y': 'foobar42'}))
|
2021-06-13 10:25:19 -04:00
|
|
|
|
self.assertTrue(match_str('y^=foo', {'y': 'foobar42'}))
|
|
|
|
|
self.assertFalse(match_str('y!^=foo', {'y': 'foobar42'}))
|
|
|
|
|
self.assertFalse(match_str('y^=bar', {'y': 'foobar42'}))
|
|
|
|
|
self.assertTrue(match_str('y!^=bar', {'y': 'foobar42'}))
|
|
|
|
|
self.assertRaises(ValueError, match_str, 'x^=42', {'x': 42})
|
|
|
|
|
self.assertTrue(match_str('y*=bar', {'y': 'foobar42'}))
|
|
|
|
|
self.assertFalse(match_str('y!*=bar', {'y': 'foobar42'}))
|
|
|
|
|
self.assertFalse(match_str('y*=baz', {'y': 'foobar42'}))
|
|
|
|
|
self.assertTrue(match_str('y!*=baz', {'y': 'foobar42'}))
|
|
|
|
|
self.assertTrue(match_str('y$=42', {'y': 'foobar42'}))
|
|
|
|
|
self.assertFalse(match_str('y$=43', {'y': 'foobar42'}))
|
2021-08-04 17:31:23 -04:00
|
|
|
|
|
|
|
|
|
# And
|
2015-02-09 21:32:21 -05:00
|
|
|
|
self.assertFalse(match_str(
|
|
|
|
|
'like_count > 100 & dislike_count <? 50 & description',
|
|
|
|
|
{'like_count': 90, 'description': 'foo'}))
|
|
|
|
|
self.assertTrue(match_str(
|
|
|
|
|
'like_count > 100 & dislike_count <? 50 & description',
|
|
|
|
|
{'like_count': 190, 'description': 'foo'}))
|
|
|
|
|
self.assertFalse(match_str(
|
|
|
|
|
'like_count > 100 & dislike_count <? 50 & description',
|
|
|
|
|
{'like_count': 190, 'dislike_count': 60, 'description': 'foo'}))
|
|
|
|
|
self.assertFalse(match_str(
|
|
|
|
|
'like_count > 100 & dislike_count <? 50 & description',
|
|
|
|
|
{'like_count': 190, 'dislike_count': 10}))
|
2021-08-04 17:31:23 -04:00
|
|
|
|
|
|
|
|
|
# Regex
|
|
|
|
|
self.assertTrue(match_str(r'x~=\bbar', {'x': 'foo bar'}))
|
|
|
|
|
self.assertFalse(match_str(r'x~=\bbar.+', {'x': 'foo bar'}))
|
|
|
|
|
self.assertFalse(match_str(r'x~=^FOO', {'x': 'foo bar'}))
|
|
|
|
|
self.assertTrue(match_str(r'x~=(?i)^FOO', {'x': 'foo bar'}))
|
|
|
|
|
|
|
|
|
|
# Quotes
|
|
|
|
|
self.assertTrue(match_str(r'x^="foo"', {'x': 'foo "bar"'}))
|
|
|
|
|
self.assertFalse(match_str(r'x^="foo "', {'x': 'foo "bar"'}))
|
|
|
|
|
self.assertFalse(match_str(r'x$="bar"', {'x': 'foo "bar"'}))
|
|
|
|
|
self.assertTrue(match_str(r'x$=" \"bar\""', {'x': 'foo "bar"'}))
|
|
|
|
|
|
|
|
|
|
# Escaping &
|
|
|
|
|
self.assertFalse(match_str(r'x=foo & bar', {'x': 'foo & bar'}))
|
|
|
|
|
self.assertTrue(match_str(r'x=foo \& bar', {'x': 'foo & bar'}))
|
|
|
|
|
self.assertTrue(match_str(r'x=foo \& bar & x^=foo', {'x': 'foo & bar'}))
|
|
|
|
|
self.assertTrue(match_str(r'x="foo \& bar" & x^=foo', {'x': 'foo & bar'}))
|
|
|
|
|
|
|
|
|
|
# Example from docs
|
2021-08-15 04:12:23 -04:00
|
|
|
|
self.assertTrue(match_str(
|
|
|
|
|
r"!is_live & like_count>?100 & description~='(?i)\bcats \& dogs\b'",
|
|
|
|
|
{'description': 'Raining Cats & Dogs'}))
|
|
|
|
|
|
|
|
|
|
# Incomplete
|
|
|
|
|
self.assertFalse(match_str('id!=foo', {'id': 'foo'}, True))
|
|
|
|
|
self.assertTrue(match_str('x', {'id': 'foo'}, True))
|
|
|
|
|
self.assertTrue(match_str('!x', {'id': 'foo'}, True))
|
|
|
|
|
self.assertFalse(match_str('x', {'id': 'foo'}, False))
|
2015-02-09 21:32:21 -05:00
|
|
|
|
|
2015-04-25 11:15:05 -04:00
|
|
|
|
def test_parse_dfxp_time_expr(self):
|
2015-12-19 05:21:42 -05:00
|
|
|
|
self.assertEqual(parse_dfxp_time_expr(None), None)
|
|
|
|
|
self.assertEqual(parse_dfxp_time_expr(''), None)
|
2015-04-25 11:15:05 -04:00
|
|
|
|
self.assertEqual(parse_dfxp_time_expr('0.1'), 0.1)
|
|
|
|
|
self.assertEqual(parse_dfxp_time_expr('0.1s'), 0.1)
|
|
|
|
|
self.assertEqual(parse_dfxp_time_expr('00:00:01'), 1.0)
|
|
|
|
|
self.assertEqual(parse_dfxp_time_expr('00:00:01.100'), 1.1)
|
2015-12-19 06:29:51 -05:00
|
|
|
|
self.assertEqual(parse_dfxp_time_expr('00:00:01:100'), 1.1)
|
2015-04-25 11:15:05 -04:00
|
|
|
|
|
|
|
|
|
def test_dfxp2srt(self):
|
|
|
|
|
dfxp_data = '''<?xml version="1.0" encoding="UTF-8"?>
|
|
|
|
|
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
|
|
|
|
|
<body>
|
|
|
|
|
<div xml:lang="en">
|
|
|
|
|
<p begin="0" end="1">The following line contains Chinese characters and special symbols</p>
|
|
|
|
|
<p begin="1" end="2">第二行<br/>♪♪</p>
|
2015-05-12 00:47:37 -04:00
|
|
|
|
<p begin="2" dur="1"><span>Third<br/>Line</span></p>
|
2015-12-19 05:21:42 -05:00
|
|
|
|
<p begin="3" end="-1">Lines with invalid timestamps are ignored</p>
|
|
|
|
|
<p begin="-1" end="-1">Ignore, two</p>
|
|
|
|
|
<p begin="3" dur="-1">Ignored, three</p>
|
2015-04-25 11:15:05 -04:00
|
|
|
|
</div>
|
|
|
|
|
</body>
|
2022-04-11 11:10:28 -04:00
|
|
|
|
</tt>'''.encode()
|
2015-04-25 11:15:05 -04:00
|
|
|
|
srt_data = '''1
|
|
|
|
|
00:00:00,000 --> 00:00:01,000
|
|
|
|
|
The following line contains Chinese characters and special symbols
|
|
|
|
|
|
|
|
|
|
2
|
|
|
|
|
00:00:01,000 --> 00:00:02,000
|
|
|
|
|
第二行
|
|
|
|
|
♪♪
|
|
|
|
|
|
|
|
|
|
3
|
|
|
|
|
00:00:02,000 --> 00:00:03,000
|
|
|
|
|
Third
|
|
|
|
|
Line
|
|
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
self.assertEqual(dfxp2srt(dfxp_data), srt_data)
|
|
|
|
|
|
2022-04-11 11:10:28 -04:00
|
|
|
|
dfxp_data_no_default_namespace = b'''<?xml version="1.0" encoding="UTF-8"?>
|
2015-05-18 12:45:01 -04:00
|
|
|
|
<tt xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
|
|
|
|
|
<body>
|
|
|
|
|
<div xml:lang="en">
|
|
|
|
|
<p begin="0" end="1">The first line</p>
|
|
|
|
|
</div>
|
|
|
|
|
</body>
|
2022-04-11 11:10:28 -04:00
|
|
|
|
</tt>'''
|
2015-05-18 12:45:01 -04:00
|
|
|
|
srt_data = '''1
|
|
|
|
|
00:00:00,000 --> 00:00:01,000
|
|
|
|
|
The first line
|
|
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
self.assertEqual(dfxp2srt(dfxp_data_no_default_namespace), srt_data)
|
|
|
|
|
|
2022-04-11 11:10:28 -04:00
|
|
|
|
dfxp_data_with_style = b'''<?xml version="1.0" encoding="utf-8"?>
|
2017-02-23 12:46:20 -05:00
|
|
|
|
<tt xmlns="http://www.w3.org/2006/10/ttaf1" xmlns:ttp="http://www.w3.org/2006/10/ttaf1#parameter" ttp:timeBase="media" xmlns:tts="http://www.w3.org/2006/10/ttaf1#style" xml:lang="en" xmlns:ttm="http://www.w3.org/2006/10/ttaf1#metadata">
|
|
|
|
|
<head>
|
|
|
|
|
<styling>
|
|
|
|
|
<style id="s2" style="s0" tts:color="cyan" tts:fontWeight="bold" />
|
|
|
|
|
<style id="s1" style="s0" tts:color="yellow" tts:fontStyle="italic" />
|
|
|
|
|
<style id="s3" style="s0" tts:color="lime" tts:textDecoration="underline" />
|
|
|
|
|
<style id="s0" tts:backgroundColor="black" tts:fontStyle="normal" tts:fontSize="16" tts:fontFamily="sansSerif" tts:color="white" />
|
|
|
|
|
</styling>
|
|
|
|
|
</head>
|
|
|
|
|
<body tts:textAlign="center" style="s0">
|
|
|
|
|
<div>
|
|
|
|
|
<p begin="00:00:02.08" id="p0" end="00:00:05.84">default style<span tts:color="red">custom style</span></p>
|
|
|
|
|
<p style="s2" begin="00:00:02.08" id="p0" end="00:00:05.84"><span tts:color="lime">part 1<br /></span><span tts:color="cyan">part 2</span></p>
|
|
|
|
|
<p style="s3" begin="00:00:05.84" id="p1" end="00:00:09.56">line 3<br />part 3</p>
|
|
|
|
|
<p style="s1" tts:textDecoration="underline" begin="00:00:09.56" id="p2" end="00:00:12.36"><span style="s2" tts:color="lime">inner<br /> </span>style</p>
|
|
|
|
|
</div>
|
|
|
|
|
</body>
|
2022-04-11 11:10:28 -04:00
|
|
|
|
</tt>'''
|
2017-02-23 12:46:20 -05:00
|
|
|
|
srt_data = '''1
|
2021-10-19 13:28:14 -04:00
|
|
|
|
00:00:02,080 --> 00:00:05,840
|
2017-02-23 12:46:20 -05:00
|
|
|
|
<font color="white" face="sansSerif" size="16">default style<font color="red">custom style</font></font>
|
|
|
|
|
|
|
|
|
|
2
|
2021-10-19 13:28:14 -04:00
|
|
|
|
00:00:02,080 --> 00:00:05,840
|
2017-02-23 12:46:20 -05:00
|
|
|
|
<b><font color="cyan" face="sansSerif" size="16"><font color="lime">part 1
|
|
|
|
|
</font>part 2</font></b>
|
|
|
|
|
|
|
|
|
|
3
|
2021-10-19 13:28:14 -04:00
|
|
|
|
00:00:05,840 --> 00:00:09,560
|
2017-02-23 12:46:20 -05:00
|
|
|
|
<u><font color="lime">line 3
|
|
|
|
|
part 3</font></u>
|
|
|
|
|
|
|
|
|
|
4
|
2021-10-19 13:28:14 -04:00
|
|
|
|
00:00:09,560 --> 00:00:12,360
|
2017-02-23 12:46:20 -05:00
|
|
|
|
<i><u><font color="yellow"><font color="lime">inner
|
|
|
|
|
</font>style</font></u></i>
|
|
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
self.assertEqual(dfxp2srt(dfxp_data_with_style), srt_data)
|
|
|
|
|
|
2017-09-16 00:18:38 -04:00
|
|
|
|
dfxp_data_non_utf8 = '''<?xml version="1.0" encoding="UTF-16"?>
|
|
|
|
|
<tt xmlns="http://www.w3.org/ns/ttml" xml:lang="en" xmlns:tts="http://www.w3.org/ns/ttml#parameter">
|
|
|
|
|
<body>
|
|
|
|
|
<div xml:lang="en">
|
|
|
|
|
<p begin="0" end="1">Line 1</p>
|
|
|
|
|
<p begin="1" end="2">第二行</p>
|
|
|
|
|
</div>
|
|
|
|
|
</body>
|
|
|
|
|
</tt>'''.encode('utf-16')
|
|
|
|
|
srt_data = '''1
|
|
|
|
|
00:00:00,000 --> 00:00:01,000
|
|
|
|
|
Line 1
|
|
|
|
|
|
|
|
|
|
2
|
|
|
|
|
00:00:01,000 --> 00:00:02,000
|
|
|
|
|
第二行
|
|
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
self.assertEqual(dfxp2srt(dfxp_data_non_utf8), srt_data)
|
|
|
|
|
|
2015-09-04 17:07:19 -04:00
|
|
|
|
def test_cli_option(self):
|
|
|
|
|
self.assertEqual(cli_option({'proxy': '127.0.0.1:3128'}, '--proxy', 'proxy'), ['--proxy', '127.0.0.1:3128'])
|
|
|
|
|
self.assertEqual(cli_option({'proxy': None}, '--proxy', 'proxy'), [])
|
|
|
|
|
self.assertEqual(cli_option({}, '--proxy', 'proxy'), [])
|
2016-08-13 04:53:46 -04:00
|
|
|
|
self.assertEqual(cli_option({'retries': 10}, '--retries', 'retries'), ['--retries', '10'])
|
2015-09-04 17:07:19 -04:00
|
|
|
|
|
|
|
|
|
def test_cli_valueless_option(self):
|
|
|
|
|
self.assertEqual(cli_valueless_option(
|
|
|
|
|
{'downloader': 'external'}, '--external-downloader', 'downloader', 'external'), ['--external-downloader'])
|
|
|
|
|
self.assertEqual(cli_valueless_option(
|
|
|
|
|
{'downloader': 'internal'}, '--external-downloader', 'downloader', 'external'), [])
|
|
|
|
|
self.assertEqual(cli_valueless_option(
|
|
|
|
|
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'), ['--no-check-certificate'])
|
|
|
|
|
self.assertEqual(cli_valueless_option(
|
|
|
|
|
{'nocheckcertificate': False}, '--no-check-certificate', 'nocheckcertificate'), [])
|
|
|
|
|
self.assertEqual(cli_valueless_option(
|
|
|
|
|
{'checkcertificate': True}, '--no-check-certificate', 'checkcertificate', False), [])
|
|
|
|
|
self.assertEqual(cli_valueless_option(
|
|
|
|
|
{'checkcertificate': False}, '--no-check-certificate', 'checkcertificate', False), ['--no-check-certificate'])
|
|
|
|
|
|
|
|
|
|
def test_cli_bool_option(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
cli_bool_option(
|
|
|
|
|
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate'),
|
|
|
|
|
['--no-check-certificate', 'true'])
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
cli_bool_option(
|
|
|
|
|
{'nocheckcertificate': True}, '--no-check-certificate', 'nocheckcertificate', separator='='),
|
|
|
|
|
['--no-check-certificate=true'])
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
cli_bool_option(
|
|
|
|
|
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
|
|
|
|
|
['--check-certificate', 'false'])
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
cli_bool_option(
|
|
|
|
|
{'nocheckcertificate': True}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
|
|
|
|
|
['--check-certificate=false'])
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
cli_bool_option(
|
|
|
|
|
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true'),
|
|
|
|
|
['--check-certificate', 'true'])
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
cli_bool_option(
|
|
|
|
|
{'nocheckcertificate': False}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
|
|
|
|
|
['--check-certificate=true'])
|
2017-08-09 11:28:19 -04:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
cli_bool_option(
|
|
|
|
|
{}, '--check-certificate', 'nocheckcertificate', 'false', 'true', '='),
|
|
|
|
|
[])
|
2015-09-04 17:07:19 -04:00
|
|
|
|
|
2016-02-16 17:01:44 -05:00
|
|
|
|
def test_ohdave_rsa_encrypt(self):
|
|
|
|
|
N = 0xab86b6371b5318aaa1d3c9e612a9f1264f372323c8c0f19875b5fc3b3fd3afcc1e5bec527aa94bfa85bffc157e4245aebda05389a5357b75115ac94f074aefcd
|
|
|
|
|
e = 65537
|
|
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
ohdave_rsa_encrypt(b'aa111222', e, N),
|
|
|
|
|
'726664bd9a23fd0c70f9f1b84aab5e3905ce1e45a584e9cbcf9bcc7510338fc1986d6c599ff990d923aa43c51c0d9013cd572e13bc58f4ae48f2ed8c0b0ba881')
|
2015-01-24 20:38:47 -05:00
|
|
|
|
|
2017-02-27 05:50:19 -05:00
|
|
|
|
def test_pkcs1pad(self):
|
|
|
|
|
data = [1, 2, 3]
|
|
|
|
|
padded_data = pkcs1pad(data, 32)
|
|
|
|
|
self.assertEqual(padded_data[:2], [0, 2])
|
|
|
|
|
self.assertEqual(padded_data[28:], [0, 1, 2, 3])
|
|
|
|
|
|
|
|
|
|
self.assertRaises(ValueError, pkcs1pad, data, 8)
|
|
|
|
|
|
2016-02-26 14:19:50 -05:00
|
|
|
|
def test_encode_base_n(self):
|
|
|
|
|
self.assertEqual(encode_base_n(0, 30), '0')
|
|
|
|
|
self.assertEqual(encode_base_n(80, 30), '2k')
|
|
|
|
|
|
|
|
|
|
custom_table = '9876543210ZYXWVUTSRQPONMLKJIHGFEDCBA'
|
|
|
|
|
self.assertEqual(encode_base_n(0, 30, custom_table), '9')
|
|
|
|
|
self.assertEqual(encode_base_n(80, 30, custom_table), '7P')
|
|
|
|
|
|
|
|
|
|
self.assertRaises(ValueError, encode_base_n, 0, 70)
|
|
|
|
|
self.assertRaises(ValueError, encode_base_n, 0, 60, custom_table)
|
|
|
|
|
|
2019-11-26 14:26:42 -05:00
|
|
|
|
def test_caesar(self):
|
|
|
|
|
self.assertEqual(caesar('ace', 'abcdef', 2), 'cea')
|
|
|
|
|
self.assertEqual(caesar('cea', 'abcdef', -2), 'ace')
|
|
|
|
|
self.assertEqual(caesar('ace', 'abcdef', -2), 'eac')
|
|
|
|
|
self.assertEqual(caesar('eac', 'abcdef', 2), 'ace')
|
|
|
|
|
self.assertEqual(caesar('ace', 'abcdef', 0), 'ace')
|
|
|
|
|
self.assertEqual(caesar('xyz', 'abcdef', 2), 'xyz')
|
|
|
|
|
self.assertEqual(caesar('abc', 'acegik', 2), 'ebg')
|
|
|
|
|
self.assertEqual(caesar('ebg', 'acegik', -2), 'abc')
|
|
|
|
|
|
|
|
|
|
def test_rot47(self):
|
2021-02-24 13:45:56 -05:00
|
|
|
|
self.assertEqual(rot47('yt-dlp'), r'JE\5=A')
|
|
|
|
|
self.assertEqual(rot47('YT-DLP'), r'*%\s{!')
|
2019-11-26 14:26:42 -05:00
|
|
|
|
|
2016-06-26 03:16:49 -04:00
|
|
|
|
def test_urshift(self):
|
|
|
|
|
self.assertEqual(urshift(3, 1), 1)
|
|
|
|
|
self.assertEqual(urshift(-3, 1), 2147483646)
|
|
|
|
|
|
2022-01-05 13:37:49 -05:00
|
|
|
|
GET_ELEMENT_BY_CLASS_TEST_STRING = '''
|
|
|
|
|
<span class="foo bar">nice</span>
|
|
|
|
|
'''
|
|
|
|
|
|
2016-07-06 08:02:52 -04:00
|
|
|
|
def test_get_element_by_class(self):
|
2022-01-05 13:37:49 -05:00
|
|
|
|
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
|
2016-07-06 08:02:52 -04:00
|
|
|
|
|
|
|
|
|
self.assertEqual(get_element_by_class('foo', html), 'nice')
|
|
|
|
|
self.assertEqual(get_element_by_class('no-such-class', html), None)
|
|
|
|
|
|
2022-01-05 13:37:49 -05:00
|
|
|
|
def test_get_element_html_by_class(self):
|
|
|
|
|
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
|
|
|
|
|
|
|
|
|
|
self.assertEqual(get_element_html_by_class('foo', html), html.strip())
|
|
|
|
|
self.assertEqual(get_element_by_class('no-such-class', html), None)
|
|
|
|
|
|
|
|
|
|
GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING = '''
|
|
|
|
|
<div itemprop="author" itemscope>foo</div>
|
|
|
|
|
'''
|
|
|
|
|
|
2017-02-11 04:16:54 -05:00
|
|
|
|
def test_get_element_by_attribute(self):
|
2022-01-05 13:37:49 -05:00
|
|
|
|
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
|
2017-02-11 04:16:54 -05:00
|
|
|
|
|
|
|
|
|
self.assertEqual(get_element_by_attribute('class', 'foo bar', html), 'nice')
|
|
|
|
|
self.assertEqual(get_element_by_attribute('class', 'foo', html), None)
|
|
|
|
|
self.assertEqual(get_element_by_attribute('class', 'no-such-foo', html), None)
|
|
|
|
|
|
2022-01-05 13:37:49 -05:00
|
|
|
|
html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
|
2017-07-05 11:23:35 -04:00
|
|
|
|
|
|
|
|
|
self.assertEqual(get_element_by_attribute('itemprop', 'author', html), 'foo')
|
|
|
|
|
|
2022-01-05 13:37:49 -05:00
|
|
|
|
def test_get_element_html_by_attribute(self):
|
|
|
|
|
html = self.GET_ELEMENT_BY_CLASS_TEST_STRING
|
|
|
|
|
|
|
|
|
|
self.assertEqual(get_element_html_by_attribute('class', 'foo bar', html), html.strip())
|
|
|
|
|
self.assertEqual(get_element_html_by_attribute('class', 'foo', html), None)
|
|
|
|
|
self.assertEqual(get_element_html_by_attribute('class', 'no-such-foo', html), None)
|
|
|
|
|
|
|
|
|
|
html = self.GET_ELEMENT_BY_ATTRIBUTE_TEST_STRING
|
|
|
|
|
|
|
|
|
|
self.assertEqual(get_element_html_by_attribute('itemprop', 'author', html), html.strip())
|
|
|
|
|
|
|
|
|
|
GET_ELEMENTS_BY_CLASS_TEST_STRING = '''
|
|
|
|
|
<span class="foo bar">nice</span><span class="foo bar">also nice</span>
|
|
|
|
|
'''
|
|
|
|
|
GET_ELEMENTS_BY_CLASS_RES = ['<span class="foo bar">nice</span>', '<span class="foo bar">also nice</span>']
|
|
|
|
|
|
2017-02-11 04:16:54 -05:00
|
|
|
|
def test_get_elements_by_class(self):
|
2022-01-05 13:37:49 -05:00
|
|
|
|
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
|
2017-02-11 04:16:54 -05:00
|
|
|
|
|
|
|
|
|
self.assertEqual(get_elements_by_class('foo', html), ['nice', 'also nice'])
|
|
|
|
|
self.assertEqual(get_elements_by_class('no-such-class', html), [])
|
|
|
|
|
|
2022-01-05 13:37:49 -05:00
|
|
|
|
def test_get_elements_html_by_class(self):
|
|
|
|
|
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
|
|
|
|
|
|
|
|
|
|
self.assertEqual(get_elements_html_by_class('foo', html), self.GET_ELEMENTS_BY_CLASS_RES)
|
|
|
|
|
self.assertEqual(get_elements_html_by_class('no-such-class', html), [])
|
|
|
|
|
|
2017-02-11 04:16:54 -05:00
|
|
|
|
def test_get_elements_by_attribute(self):
|
2022-01-05 13:37:49 -05:00
|
|
|
|
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
|
2017-02-11 04:16:54 -05:00
|
|
|
|
|
|
|
|
|
self.assertEqual(get_elements_by_attribute('class', 'foo bar', html), ['nice', 'also nice'])
|
|
|
|
|
self.assertEqual(get_elements_by_attribute('class', 'foo', html), [])
|
|
|
|
|
self.assertEqual(get_elements_by_attribute('class', 'no-such-foo', html), [])
|
|
|
|
|
|
2022-01-05 13:37:49 -05:00
|
|
|
|
def test_get_elements_html_by_attribute(self):
|
|
|
|
|
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
|
|
|
|
|
|
|
|
|
|
self.assertEqual(get_elements_html_by_attribute('class', 'foo bar', html), self.GET_ELEMENTS_BY_CLASS_RES)
|
|
|
|
|
self.assertEqual(get_elements_html_by_attribute('class', 'foo', html), [])
|
|
|
|
|
self.assertEqual(get_elements_html_by_attribute('class', 'no-such-foo', html), [])
|
|
|
|
|
|
|
|
|
|
def test_get_elements_text_and_html_by_attribute(self):
|
|
|
|
|
html = self.GET_ELEMENTS_BY_CLASS_TEST_STRING
|
|
|
|
|
|
|
|
|
|
self.assertEqual(
|
2022-01-09 13:14:56 -05:00
|
|
|
|
list(get_elements_text_and_html_by_attribute('class', 'foo bar', html)),
|
2022-01-05 13:37:49 -05:00
|
|
|
|
list(zip(['nice', 'also nice'], self.GET_ELEMENTS_BY_CLASS_RES)))
|
2022-01-09 13:14:56 -05:00
|
|
|
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'foo', html)), [])
|
|
|
|
|
self.assertEqual(list(get_elements_text_and_html_by_attribute('class', 'no-such-foo', html)), [])
|
2022-01-05 13:37:49 -05:00
|
|
|
|
|
2022-10-09 01:55:26 -04:00
|
|
|
|
self.assertEqual(list(get_elements_text_and_html_by_attribute(
|
|
|
|
|
'class', 'foo', '<a class="foo">nice</a><span class="foo">nice</span>', tag='a')), [('nice', '<a class="foo">nice</a>')])
|
|
|
|
|
|
2022-01-05 13:37:49 -05:00
|
|
|
|
GET_ELEMENT_BY_TAG_TEST_STRING = '''
|
|
|
|
|
random text lorem ipsum</p>
|
|
|
|
|
<div>
|
|
|
|
|
this should be returned
|
|
|
|
|
<span>this should also be returned</span>
|
|
|
|
|
<div>
|
|
|
|
|
this should also be returned
|
|
|
|
|
</div>
|
|
|
|
|
closing tag above should not trick, so this should also be returned
|
|
|
|
|
</div>
|
|
|
|
|
but this text should not be returned
|
|
|
|
|
'''
|
|
|
|
|
GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[32:276]
|
|
|
|
|
GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT = GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML[5:-6]
|
|
|
|
|
GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML = GET_ELEMENT_BY_TAG_TEST_STRING.strip()[78:119]
|
|
|
|
|
GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT = GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML[6:-7]
|
|
|
|
|
|
|
|
|
|
def test_get_element_text_and_html_by_tag(self):
|
|
|
|
|
html = self.GET_ELEMENT_BY_TAG_TEST_STRING
|
|
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
get_element_text_and_html_by_tag('div', html),
|
|
|
|
|
(self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_TEXT, self.GET_ELEMENT_BY_TAG_RES_OUTERDIV_HTML))
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
get_element_text_and_html_by_tag('span', html),
|
|
|
|
|
(self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_TEXT, self.GET_ELEMENT_BY_TAG_RES_INNERSPAN_HTML))
|
|
|
|
|
self.assertRaises(compat_HTMLParseError, get_element_text_and_html_by_tag, 'article', html)
|
|
|
|
|
|
2020-10-27 06:37:21 -04:00
|
|
|
|
def test_iri_to_uri(self):
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b'),
|
|
|
|
|
'https://www.google.com/search?q=foo&ie=utf-8&oe=utf-8&client=firefox-b') # Same
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('https://www.google.com/search?q=Käsesoßenrührlöffel'), # German for cheese sauce stirring spoon
|
|
|
|
|
'https://www.google.com/search?q=K%C3%A4seso%C3%9Fenr%C3%BChrl%C3%B6ffel')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('https://www.google.com/search?q=lt<+gt>+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~#trash=?&garbage=#'),
|
|
|
|
|
'https://www.google.com/search?q=lt%3C+gt%3E+eq%3D+amp%26+percent%25+hash%23+colon%3A+tilde~#trash=?&garbage=#')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('http://правозащита38.рф/category/news/'),
|
|
|
|
|
'http://xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('http://www.правозащита38.рф/category/news/'),
|
|
|
|
|
'http://www.xn--38-6kcaak9aj5chl4a3g.xn--p1ai/category/news/')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('https://i❤.ws/emojidomain/👍👏🤝💪'),
|
|
|
|
|
'https://xn--i-7iq.ws/emojidomain/%F0%9F%91%8D%F0%9F%91%8F%F0%9F%A4%9D%F0%9F%92%AA')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('http://日本語.jp/'),
|
|
|
|
|
'http://xn--wgv71a119e.jp/')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
iri_to_uri('http://导航.中国/'),
|
|
|
|
|
'http://xn--fet810g.xn--fiqs8s/')
|
|
|
|
|
|
2021-01-08 11:14:50 -05:00
|
|
|
|
def test_clean_podcast_url(self):
|
|
|
|
|
self.assertEqual(clean_podcast_url('https://www.podtrac.com/pts/redirect.mp3/chtbl.com/track/5899E/traffic.megaphone.fm/HSW7835899191.mp3'), 'https://traffic.megaphone.fm/HSW7835899191.mp3')
|
|
|
|
|
self.assertEqual(clean_podcast_url('https://play.podtrac.com/npr-344098539/edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3'), 'https://edge1.pod.npr.org/anon.npr-podcasts/podcast/npr/waitwait/2020/10/20201003_waitwait_wwdtmpodcast201003-015621a5-f035-4eca-a9a1-7c118d90bc3c.mp3')
|
|
|
|
|
|
2021-06-12 11:14:30 -04:00
|
|
|
|
def test_LazyList(self):
|
|
|
|
|
it = list(range(10))
|
|
|
|
|
|
|
|
|
|
self.assertEqual(list(LazyList(it)), it)
|
|
|
|
|
self.assertEqual(LazyList(it).exhaust(), it)
|
|
|
|
|
self.assertEqual(LazyList(it)[5], it[5])
|
|
|
|
|
|
2021-08-01 02:17:30 -04:00
|
|
|
|
self.assertEqual(LazyList(it)[5:], it[5:])
|
|
|
|
|
self.assertEqual(LazyList(it)[:5], it[:5])
|
2021-06-12 11:14:30 -04:00
|
|
|
|
self.assertEqual(LazyList(it)[::2], it[::2])
|
|
|
|
|
self.assertEqual(LazyList(it)[1::2], it[1::2])
|
2021-08-01 02:17:30 -04:00
|
|
|
|
self.assertEqual(LazyList(it)[5::-1], it[5::-1])
|
2021-06-12 11:14:30 -04:00
|
|
|
|
self.assertEqual(LazyList(it)[6:2:-2], it[6:2:-2])
|
|
|
|
|
self.assertEqual(LazyList(it)[::-1], it[::-1])
|
|
|
|
|
|
|
|
|
|
self.assertTrue(LazyList(it))
|
|
|
|
|
self.assertFalse(LazyList(range(0)))
|
|
|
|
|
self.assertEqual(len(LazyList(it)), len(it))
|
|
|
|
|
self.assertEqual(repr(LazyList(it)), repr(it))
|
|
|
|
|
self.assertEqual(str(LazyList(it)), str(it))
|
|
|
|
|
|
2021-11-19 21:35:57 -05:00
|
|
|
|
self.assertEqual(list(LazyList(it, reverse=True)), it[::-1])
|
|
|
|
|
self.assertEqual(list(reversed(LazyList(it))[::-1]), it)
|
|
|
|
|
self.assertEqual(list(reversed(LazyList(it))[1:3:7]), it[::-1][1:3:7])
|
2021-06-12 11:14:30 -04:00
|
|
|
|
|
|
|
|
|
def test_LazyList_laziness(self):
|
|
|
|
|
|
|
|
|
|
def test(ll, idx, val, cache):
|
|
|
|
|
self.assertEqual(ll[idx], val)
|
2022-05-09 07:54:28 -04:00
|
|
|
|
self.assertEqual(ll._cache, list(cache))
|
2021-06-12 11:14:30 -04:00
|
|
|
|
|
|
|
|
|
ll = LazyList(range(10))
|
|
|
|
|
test(ll, 0, 0, range(1))
|
|
|
|
|
test(ll, 5, 5, range(6))
|
|
|
|
|
test(ll, -3, 7, range(10))
|
|
|
|
|
|
2021-11-19 21:35:57 -05:00
|
|
|
|
ll = LazyList(range(10), reverse=True)
|
2021-06-12 11:14:30 -04:00
|
|
|
|
test(ll, -1, 0, range(1))
|
|
|
|
|
test(ll, 3, 6, range(10))
|
|
|
|
|
|
|
|
|
|
ll = LazyList(itertools.count())
|
|
|
|
|
test(ll, 10, 10, range(11))
|
2021-11-19 21:35:57 -05:00
|
|
|
|
ll = reversed(ll)
|
2021-06-12 11:14:30 -04:00
|
|
|
|
test(ll, -15, 14, range(15))
|
|
|
|
|
|
2021-12-27 17:08:31 -05:00
|
|
|
|
def test_format_bytes(self):
|
|
|
|
|
self.assertEqual(format_bytes(0), '0.00B')
|
|
|
|
|
self.assertEqual(format_bytes(1000), '1000.00B')
|
|
|
|
|
self.assertEqual(format_bytes(1024), '1.00KiB')
|
|
|
|
|
self.assertEqual(format_bytes(1024**2), '1.00MiB')
|
|
|
|
|
self.assertEqual(format_bytes(1024**3), '1.00GiB')
|
|
|
|
|
self.assertEqual(format_bytes(1024**4), '1.00TiB')
|
|
|
|
|
self.assertEqual(format_bytes(1024**5), '1.00PiB')
|
|
|
|
|
self.assertEqual(format_bytes(1024**6), '1.00EiB')
|
|
|
|
|
self.assertEqual(format_bytes(1024**7), '1.00ZiB')
|
|
|
|
|
self.assertEqual(format_bytes(1024**8), '1.00YiB')
|
2022-03-18 17:03:09 -04:00
|
|
|
|
self.assertEqual(format_bytes(1024**9), '1024.00YiB')
|
2021-12-27 17:08:31 -05:00
|
|
|
|
|
2021-12-14 12:03:47 -05:00
|
|
|
|
def test_hide_login_info(self):
|
|
|
|
|
self.assertEqual(Config.hide_login_info(['-u', 'foo', '-p', 'bar']),
|
|
|
|
|
['-u', 'PRIVATE', '-p', 'PRIVATE'])
|
|
|
|
|
self.assertEqual(Config.hide_login_info(['-u']), ['-u'])
|
|
|
|
|
self.assertEqual(Config.hide_login_info(['-u', 'foo', '-u', 'bar']),
|
|
|
|
|
['-u', 'PRIVATE', '-u', 'PRIVATE'])
|
|
|
|
|
self.assertEqual(Config.hide_login_info(['--username=foo']),
|
|
|
|
|
['--username=PRIVATE'])
|
|
|
|
|
|
2022-04-07 02:00:46 -04:00
|
|
|
|
def test_locked_file(self):
|
|
|
|
|
TEXT = 'test_locked_file\n'
|
|
|
|
|
FILE = 'test_locked_file.ytdl'
|
|
|
|
|
MODES = 'war' # Order is important
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
for lock_mode in MODES:
|
|
|
|
|
with locked_file(FILE, lock_mode, False) as f:
|
|
|
|
|
if lock_mode == 'r':
|
|
|
|
|
self.assertEqual(f.read(), TEXT * 2, 'Wrong file content')
|
|
|
|
|
else:
|
|
|
|
|
f.write(TEXT)
|
|
|
|
|
for test_mode in MODES:
|
|
|
|
|
testing_write = test_mode != 'r'
|
|
|
|
|
try:
|
|
|
|
|
with locked_file(FILE, test_mode, False):
|
|
|
|
|
pass
|
|
|
|
|
except (BlockingIOError, PermissionError):
|
|
|
|
|
if not testing_write: # FIXME
|
|
|
|
|
print(f'Known issue: Exclusive lock ({lock_mode}) blocks read access ({test_mode})')
|
|
|
|
|
continue
|
|
|
|
|
self.assertTrue(testing_write, f'{test_mode} is blocked by {lock_mode}')
|
|
|
|
|
else:
|
|
|
|
|
self.assertFalse(testing_write, f'{test_mode} is not blocked by {lock_mode}')
|
|
|
|
|
finally:
|
2022-04-17 16:58:28 -04:00
|
|
|
|
with contextlib.suppress(OSError):
|
2022-04-07 02:00:46 -04:00
|
|
|
|
os.remove(FILE)
|
|
|
|
|
|
2022-07-15 07:52:14 -04:00
|
|
|
|
def test_determine_file_encoding(self):
|
|
|
|
|
self.assertEqual(determine_file_encoding(b''), (None, 0))
|
|
|
|
|
self.assertEqual(determine_file_encoding(b'--verbose -x --audio-format mkv\n'), (None, 0))
|
|
|
|
|
|
|
|
|
|
self.assertEqual(determine_file_encoding(b'\xef\xbb\xbf'), ('utf-8', 3))
|
|
|
|
|
self.assertEqual(determine_file_encoding(b'\x00\x00\xfe\xff'), ('utf-32-be', 4))
|
|
|
|
|
self.assertEqual(determine_file_encoding(b'\xff\xfe'), ('utf-16-le', 2))
|
|
|
|
|
|
2022-07-15 12:14:07 -04:00
|
|
|
|
self.assertEqual(determine_file_encoding(b'\xff\xfe# coding: utf-8\n--verbose'), ('utf-16-le', 2))
|
2022-07-15 07:52:14 -04:00
|
|
|
|
|
|
|
|
|
self.assertEqual(determine_file_encoding(b'# coding: utf-8\n--verbose'), ('utf-8', 0))
|
|
|
|
|
self.assertEqual(determine_file_encoding(b'# coding: someencodinghere-12345\n--verbose'), ('someencodinghere-12345', 0))
|
|
|
|
|
|
2022-07-15 12:14:07 -04:00
|
|
|
|
self.assertEqual(determine_file_encoding(b'#coding:utf-8\n--verbose'), ('utf-8', 0))
|
|
|
|
|
self.assertEqual(determine_file_encoding(b'# coding: utf-8 \r\n--verbose'), ('utf-8', 0))
|
|
|
|
|
|
|
|
|
|
self.assertEqual(determine_file_encoding('# coding: utf-32-be'.encode('utf-32-be')), ('utf-32-be', 0))
|
|
|
|
|
self.assertEqual(determine_file_encoding('# coding: utf-16-le'.encode('utf-16-le')), ('utf-16-le', 0))
|
2022-07-15 07:52:14 -04:00
|
|
|
|
|
2022-08-03 20:42:12 -04:00
|
|
|
|
def test_get_compatible_ext(self):
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=[None], acodecs=[None, None], vexts=['mp4'], aexts=['m4a', 'm4a']), 'mkv')
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=[None], acodecs=[None], vexts=['flv'], aexts=['flv']), 'flv')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['m4a']), 'mp4')
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=[None], acodecs=[None], vexts=['mp4'], aexts=['webm']), 'mkv')
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['m4a']), 'mkv')
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['webm']), 'webm')
|
2022-12-30 05:00:56 -05:00
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=[None], acodecs=[None], vexts=['webm'], aexts=['weba']), 'webm')
|
2022-08-03 20:42:12 -04:00
|
|
|
|
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=['h264'], acodecs=['mp4a'], vexts=['mov'], aexts=['m4a']), 'mp4')
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=['av01.0.12M.08'], acodecs=['opus'], vexts=['mp4'], aexts=['webm']), 'webm')
|
|
|
|
|
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=['vp9'], acodecs=['opus'], vexts=['webm'], aexts=['webm'], preferences=['flv', 'mp4']), 'mp4')
|
|
|
|
|
self.assertEqual(get_compatible_ext(
|
|
|
|
|
vcodecs=['av1'], acodecs=['mp4a'], vexts=['webm'], aexts=['m4a'], preferences=('webm', 'mkv')), 'mkv')
|
|
|
|
|
|
2022-09-25 17:03:19 -04:00
|
|
|
|
def test_traverse_obj(self):
|
|
|
|
|
_TEST_DATA = {
|
|
|
|
|
100: 100,
|
|
|
|
|
1.2: 1.2,
|
|
|
|
|
'str': 'str',
|
|
|
|
|
'None': None,
|
|
|
|
|
'...': ...,
|
|
|
|
|
'urls': [
|
|
|
|
|
{'index': 0, 'url': 'https://www.example.com/0'},
|
|
|
|
|
{'index': 1, 'url': 'https://www.example.com/1'},
|
|
|
|
|
],
|
|
|
|
|
'data': (
|
|
|
|
|
{'index': 2},
|
|
|
|
|
{'index': 3},
|
|
|
|
|
),
|
2022-10-08 21:27:32 -04:00
|
|
|
|
'dict': {},
|
2022-09-25 17:03:19 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Test base functionality
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ('str',)), 'str',
|
|
|
|
|
msg='allow tuple path')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ['str']), 'str',
|
|
|
|
|
msg='allow list path')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, (value for value in ("str",))), 'str',
|
|
|
|
|
msg='allow iterable path')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str'), 'str',
|
|
|
|
|
msg='single items should be treated as a path')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, None), _TEST_DATA)
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, 100), 100)
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, 1.2), 1.2)
|
|
|
|
|
|
|
|
|
|
# Test Ellipsis behavior
|
|
|
|
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ...),
|
2023-02-09 17:26:26 -05:00
|
|
|
|
(item for item in _TEST_DATA.values() if item not in (None, {})),
|
2023-02-07 22:11:08 -05:00
|
|
|
|
msg='`...` should give all non discarded values')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', 0, ...)), _TEST_DATA['urls'][0].values(),
|
|
|
|
|
msg='`...` selection for dicts should select all values')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., ..., 'url')),
|
|
|
|
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
|
|
|
|
msg='nested `...` queries should work')
|
|
|
|
|
self.assertCountEqual(traverse_obj(_TEST_DATA, (..., ..., 'index')), range(4),
|
|
|
|
|
msg='`...` query result should be flattened')
|
|
|
|
|
|
|
|
|
|
# Test function as key
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, lambda x, y: x == 'urls' and isinstance(y, list)),
|
|
|
|
|
[_TEST_DATA['urls']],
|
|
|
|
|
msg='function as query key should perform a filter based on (key, value)')
|
|
|
|
|
self.assertCountEqual(traverse_obj(_TEST_DATA, lambda _, x: isinstance(x[0], str)), {'str'},
|
|
|
|
|
msg='exceptions in the query function should be catched')
|
2023-02-02 00:40:19 -05:00
|
|
|
|
if __debug__:
|
|
|
|
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
|
|
|
|
traverse_obj(_TEST_DATA, lambda a: ...)
|
|
|
|
|
with self.assertRaises(Exception, msg='Wrong function signature should raise in debug'):
|
|
|
|
|
traverse_obj(_TEST_DATA, lambda a, b, c: ...)
|
|
|
|
|
|
|
|
|
|
# Test set as key (transformation/type, like `expected_type`)
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper}, )), ['STR'],
|
|
|
|
|
msg='Function in set should be a transformation')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str})), ['str'],
|
|
|
|
|
msg='Type in set should be a type filter')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {dict}), _TEST_DATA,
|
|
|
|
|
msg='A single set should be wrapped into a path')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str.upper})), ['STR'],
|
|
|
|
|
msg='Transformation function should not raise')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., {str_or_none})),
|
|
|
|
|
[item for item in map(str_or_none, _TEST_DATA.values()) if item is not None],
|
|
|
|
|
msg='Function in set should be a transformation')
|
|
|
|
|
if __debug__:
|
|
|
|
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
|
|
|
|
traverse_obj(_TEST_DATA, set())
|
|
|
|
|
with self.assertRaises(Exception, msg='Sets with length != 1 should raise in debug'):
|
|
|
|
|
traverse_obj(_TEST_DATA, {str.upper, str})
|
2022-09-25 17:03:19 -04:00
|
|
|
|
|
|
|
|
|
# Test alternative paths
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'str'), 'str',
|
2022-10-08 21:27:32 -04:00
|
|
|
|
msg='multiple `paths` should be treated as alternative paths')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, 'str', 100), 'str',
|
|
|
|
|
msg='alternatives should exit early')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, 'fail', 'fail'), None,
|
|
|
|
|
msg='alternatives should return `default` if exhausted')
|
2022-10-08 21:27:32 -04:00
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, (..., 'fail'), 100), 100,
|
|
|
|
|
msg='alternatives should track their own branching return')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ('dict', ...), ('data', ...)), list(_TEST_DATA['data']),
|
|
|
|
|
msg='alternatives on empty objects should search further')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
|
|
|
|
|
# Test branch and path nesting
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', (3, 0), 'url')), ['https://www.example.com/0'],
|
|
|
|
|
msg='tuple as key should be treated as branches')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', [3, 0], 'url')), ['https://www.example.com/0'],
|
|
|
|
|
msg='list as key should be treated as branches')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ((1, 'fail'), (0, 'url')))), ['https://www.example.com/0'],
|
|
|
|
|
msg='double nesting in path should be treated as paths')
|
|
|
|
|
self.assertEqual(traverse_obj(['0', [1, 2]], [(0, 1), 0]), [1],
|
|
|
|
|
msg='do not fail early on branching')
|
|
|
|
|
self.assertCountEqual(traverse_obj(_TEST_DATA, ('urls', ((1, ('fail', 'url')), (0, 'url')))),
|
|
|
|
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
|
|
|
|
msg='tripple nesting in path should be treated as branches')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ('urls', ('fail', (..., 'url')))),
|
|
|
|
|
['https://www.example.com/0', 'https://www.example.com/1'],
|
|
|
|
|
msg='ellipsis as branch path start gets flattened')
|
|
|
|
|
|
|
|
|
|
# Test dictionary as key
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}), {0: 100, 1: 1.2},
|
|
|
|
|
msg='dict key should result in a dict with the same keys')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', 0, 'url')}),
|
|
|
|
|
{0: 'https://www.example.com/0'},
|
|
|
|
|
msg='dict key should allow paths')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', (3, 0), 'url')}),
|
|
|
|
|
{0: ['https://www.example.com/0']},
|
|
|
|
|
msg='tuple in dict path should be treated as branches')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, 'fail'), (0, 'url')))}),
|
|
|
|
|
{0: ['https://www.example.com/0']},
|
|
|
|
|
msg='double nesting in dict path should be treated as paths')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('urls', ((1, ('fail', 'url')), (0, 'url')))}),
|
|
|
|
|
{0: ['https://www.example.com/1', 'https://www.example.com/0']},
|
|
|
|
|
msg='tripple nesting in dict path should be treated as branches')
|
2022-10-08 21:27:32 -04:00
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}), {},
|
2023-02-07 22:11:08 -05:00
|
|
|
|
msg='remove `None` values when top level dict key fails')
|
2022-10-08 21:27:32 -04:00
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'fail'}, default=...), {0: ...},
|
2023-02-07 22:11:08 -05:00
|
|
|
|
msg='use `default` if key fails and `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}), {},
|
|
|
|
|
msg='remove empty values when dict key')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 'dict'}, default=...), {0: ...},
|
|
|
|
|
msg='use `default` when dict key and `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}), {},
|
|
|
|
|
msg='remove empty values when nested dict key fails')
|
|
|
|
|
self.assertEqual(traverse_obj(None, {0: 'fail'}), {},
|
|
|
|
|
msg='default to dict if pruned')
|
2023-02-09 17:26:26 -05:00
|
|
|
|
self.assertEqual(traverse_obj(None, {0: 'fail'}, default=...), {0: ...},
|
2023-02-07 22:11:08 -05:00
|
|
|
|
msg='default to dict if pruned and default is given')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 'fail'}}, default=...), {0: {0: ...}},
|
|
|
|
|
msg='use nested `default` when nested dict key fails and `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: ('dict', ...)}), {},
|
|
|
|
|
msg='remove key if branch in dict key not successful')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
|
|
|
|
|
# Testing default parameter behavior
|
|
|
|
|
_DEFAULT_DATA = {'None': None, 'int': 0, 'list': []}
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail'), None,
|
|
|
|
|
msg='default value should be `None`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', 'fail', default=...), ...,
|
|
|
|
|
msg='chained fails should result in default')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', 'int'), 0,
|
|
|
|
|
msg='should not short cirquit on `None`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'fail', default=1), 1,
|
|
|
|
|
msg='invalid dict key should result in `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, 'None', default=1), 1,
|
|
|
|
|
msg='`None` is a deliberate sentinel and should become `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', 10)), None,
|
|
|
|
|
msg='`IndexError` should result in `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=1), 1,
|
2022-10-08 21:27:32 -04:00
|
|
|
|
msg='if branched but not successful return `default` if defined, not `[]`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail'), default=None), None,
|
|
|
|
|
msg='if branched but not successful return `default` even if `default` is `None`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, (..., 'fail')), [],
|
|
|
|
|
msg='if branched but not successful return `[]`, not `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(_DEFAULT_DATA, ('list', ...)), [],
|
|
|
|
|
msg='if branched but object is empty return `[]`, not `default`')
|
2023-02-09 17:26:26 -05:00
|
|
|
|
self.assertEqual(traverse_obj(None, ...), [],
|
|
|
|
|
msg='if branched but object is `None` return `[]`, not `default`')
|
|
|
|
|
self.assertEqual(traverse_obj({0: None}, (0, ...)), [],
|
|
|
|
|
msg='if branched but state is `None` return `[]`, not `default`')
|
|
|
|
|
|
|
|
|
|
branching_paths = [
|
|
|
|
|
('fail', ...),
|
|
|
|
|
(..., 'fail'),
|
|
|
|
|
100 * ('fail',) + (...,),
|
|
|
|
|
(...,) + 100 * ('fail',),
|
|
|
|
|
]
|
|
|
|
|
for branching_path in branching_paths:
|
|
|
|
|
self.assertEqual(traverse_obj({}, branching_path), [],
|
|
|
|
|
msg='if branched but state is `None`, return `[]` (not `default`)')
|
|
|
|
|
self.assertEqual(traverse_obj({}, 'fail', branching_path), [],
|
|
|
|
|
msg='if branching in last alternative and previous did not match, return `[]` (not `default`)')
|
|
|
|
|
self.assertEqual(traverse_obj({0: 'x'}, 0, branching_path), 'x',
|
|
|
|
|
msg='if branching in last alternative and previous did match, return single value')
|
|
|
|
|
self.assertEqual(traverse_obj({0: 'x'}, branching_path, 0), 'x',
|
|
|
|
|
msg='if branching in first alternative and non-branching path does match, return single value')
|
|
|
|
|
self.assertEqual(traverse_obj({}, branching_path, 'fail'), None,
|
|
|
|
|
msg='if branching in first alternative and non-branching path does not match, return `default`')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
|
|
|
|
|
# Testing expected_type behavior
|
|
|
|
|
_EXPECTED_TYPE_DATA = {'str': 'str', 'int': 0}
|
2023-02-09 17:26:26 -05:00
|
|
|
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=str),
|
|
|
|
|
'str', msg='accept matching `expected_type` type')
|
|
|
|
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=int),
|
|
|
|
|
None, msg='reject non matching `expected_type` type')
|
|
|
|
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'int', expected_type=lambda x: str(x)),
|
|
|
|
|
'0', msg='transform type using type function')
|
|
|
|
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, 'str', expected_type=lambda _: 1 / 0),
|
|
|
|
|
None, msg='wrap expected_type fuction in try_call')
|
|
|
|
|
self.assertEqual(traverse_obj(_EXPECTED_TYPE_DATA, ..., expected_type=str),
|
|
|
|
|
['str'], msg='eliminate items that expected_type fails on')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2}, expected_type=int),
|
|
|
|
|
{0: 100}, msg='type as expected_type should filter dict values')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: 100, 1: 1.2, 2: 'None'}, expected_type=str_or_none),
|
|
|
|
|
{0: '100', 1: '1.2'}, msg='function as expected_type should transform dict values')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ({0: 1.2}, 0, {int_or_none}), expected_type=int),
|
|
|
|
|
1, msg='expected_type should not filter non final dict values')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, {0: {0: 100, 1: 'str'}}, expected_type=int),
|
|
|
|
|
{0: {0: 100}}, msg='expected_type should transform deep dict values')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, [({0: '...'}, {0: '...'})], expected_type=type(...)),
|
|
|
|
|
[{0: ...}, {0: ...}], msg='expected_type should transform branched dict values')
|
|
|
|
|
self.assertEqual(traverse_obj({1: {3: 4}}, [(1, 2), 3], expected_type=int),
|
|
|
|
|
[4], msg='expected_type regression for type matching in tuple branching')
|
|
|
|
|
self.assertEqual(traverse_obj(_TEST_DATA, ['data', ...], expected_type=int),
|
|
|
|
|
[], msg='expected_type regression for type matching in dict result')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
|
|
|
|
|
# Test get_all behavior
|
|
|
|
|
_GET_ALL_DATA = {'key': [0, 1, 2]}
|
|
|
|
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, ('key', ...), get_all=False), 0,
|
|
|
|
|
msg='if not `get_all`, return only first matching value')
|
|
|
|
|
self.assertEqual(traverse_obj(_GET_ALL_DATA, ..., get_all=False), [0, 1, 2],
|
|
|
|
|
msg='do not overflatten if not `get_all`')
|
|
|
|
|
|
|
|
|
|
# Test casesense behavior
|
|
|
|
|
_CASESENSE_DATA = {
|
|
|
|
|
'KeY': 'value0',
|
|
|
|
|
0: {
|
|
|
|
|
'KeY': 'value1',
|
|
|
|
|
0: {'KeY': 'value2'},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'key'), None,
|
|
|
|
|
msg='dict keys should be case sensitive unless `casesense`')
|
|
|
|
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, 'keY',
|
|
|
|
|
casesense=False), 'value0',
|
|
|
|
|
msg='allow non matching key case if `casesense`')
|
|
|
|
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ('keY',)),
|
|
|
|
|
casesense=False), ['value1'],
|
|
|
|
|
msg='allow non matching key case in branch if `casesense`')
|
|
|
|
|
self.assertEqual(traverse_obj(_CASESENSE_DATA, (0, ((0, 'keY'),)),
|
|
|
|
|
casesense=False), ['value2'],
|
|
|
|
|
msg='allow non matching key case in branch path if `casesense`')
|
|
|
|
|
|
|
|
|
|
# Test traverse_string behavior
|
|
|
|
|
_TRAVERSE_STRING_DATA = {'str': 'str', 1.2: 1.2}
|
|
|
|
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0)), None,
|
|
|
|
|
msg='do not traverse into string if not `traverse_string`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', 0),
|
|
|
|
|
traverse_string=True), 's',
|
|
|
|
|
msg='traverse into string if `traverse_string`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, (1.2, 1),
|
|
|
|
|
traverse_string=True), '.',
|
|
|
|
|
msg='traverse into converted data if `traverse_string`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', ...),
|
2023-02-07 22:11:08 -05:00
|
|
|
|
traverse_string=True), 'str',
|
|
|
|
|
msg='`...` should result in string (same value) if `traverse_string`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', slice(0, None, 2)),
|
|
|
|
|
traverse_string=True), 'sr',
|
|
|
|
|
msg='`slice` should result in string if `traverse_string`')
|
|
|
|
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', lambda i, v: i or v == "s"),
|
|
|
|
|
traverse_string=True), 'str',
|
|
|
|
|
msg='function should result in string if `traverse_string`')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
self.assertEqual(traverse_obj(_TRAVERSE_STRING_DATA, ('str', (0, 2)),
|
|
|
|
|
traverse_string=True), ['s', 'r'],
|
2023-02-07 22:11:08 -05:00
|
|
|
|
msg='branching should result in list if `traverse_string`')
|
2022-09-25 17:03:19 -04:00
|
|
|
|
|
|
|
|
|
# Test is_user_input behavior
|
|
|
|
|
_IS_USER_INPUT_DATA = {'range8': list(range(8))}
|
|
|
|
|
self.assertEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3'),
|
|
|
|
|
is_user_input=True), 3,
|
|
|
|
|
msg='allow for string indexing if `is_user_input`')
|
|
|
|
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', '3:'),
|
|
|
|
|
is_user_input=True), tuple(range(8))[3:],
|
|
|
|
|
msg='allow for string slice if `is_user_input`')
|
|
|
|
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':4:2'),
|
|
|
|
|
is_user_input=True), tuple(range(8))[:4:2],
|
|
|
|
|
msg='allow step in string slice if `is_user_input`')
|
|
|
|
|
self.assertCountEqual(traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':'),
|
|
|
|
|
is_user_input=True), range(8),
|
|
|
|
|
msg='`:` should be treated as `...` if `is_user_input`')
|
|
|
|
|
with self.assertRaises(TypeError, msg='too many params should result in error'):
|
|
|
|
|
traverse_obj(_IS_USER_INPUT_DATA, ('range8', ':::'), is_user_input=True)
|
|
|
|
|
|
2022-10-08 21:31:37 -04:00
|
|
|
|
# Test re.Match as input obj
|
|
|
|
|
mobj = re.fullmatch(r'0(12)(?P<group>3)(4)?', '0123')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, ...), [x for x in mobj.groups() if x is not None],
|
|
|
|
|
msg='`...` on a `re.Match` should give its `groups()`')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 2)), ['0123', '3'],
|
|
|
|
|
msg='function on a `re.Match` should give groupno, value starting at 0')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, 'group'), '3',
|
|
|
|
|
msg='str key on a `re.Match` should give group with that name')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, 2), '3',
|
|
|
|
|
msg='int key on a `re.Match` should give group with that name')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, 'gRoUp', casesense=False), '3',
|
|
|
|
|
msg='str key on a `re.Match` should respect casesense')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, 'fail'), None,
|
|
|
|
|
msg='failing str key on a `re.Match` should return `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, 'gRoUpS', casesense=False), None,
|
|
|
|
|
msg='failing str key on a `re.Match` should return `default`')
|
|
|
|
|
self.assertEqual(traverse_obj(mobj, 8), None,
|
|
|
|
|
msg='failing int key on a `re.Match` should return `default`')
|
2023-02-02 00:40:19 -05:00
|
|
|
|
self.assertEqual(traverse_obj(mobj, lambda k, _: k in (0, 'group')), ['0123', '3'],
|
|
|
|
|
msg='function on a `re.Match` should give group name as well')
|
2022-10-08 21:31:37 -04:00
|
|
|
|
|
2016-11-17 06:42:56 -05:00
|
|
|
|
|
2012-11-27 17:20:29 -05:00
|
|
|
|
if __name__ == '__main__':
|
2012-11-27 20:04:46 -05:00
|
|
|
|
unittest.main()
|