version 2020.11.01

This commit is contained in:
Dominika 2020-11-01 05:17:15 +01:00
parent 389d4402eb
commit 1c397d1dd3
52 changed files with 334 additions and 863 deletions

View file

@ -1,43 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import json
import sys
import hashlib
import os.path
if len(sys.argv) <= 1:
print('Specify the version number as parameter')
sys.exit()
version = sys.argv[1]
with open('update/LATEST_VERSION', 'w') as f:
f.write(version)
versions_info = json.load(open('update/versions.json'))
if 'signature' in versions_info:
del versions_info['signature']
new_version = {}
filenames = {
'bin': 'haruhi-dl',
'exe': 'haruhi-dl.exe',
'tar': 'haruhi-dl-%s.tar.gz' % version}
build_dir = os.path.join('..', '..', 'build', version)
for key, filename in filenames.items():
url = 'https://yt-dl.org/downloads/%s/%s' % (version, filename)
fn = os.path.join(build_dir, filename)
with open(fn, 'rb') as f:
data = f.read()
if not data:
raise ValueError('File %s is empty!' % fn)
sha256sum = hashlib.sha256(data).hexdigest()
new_version[key] = (url, sha256sum)
versions_info['versions'][version] = new_version
versions_info['latest'] = version
with open('update/versions.json', 'w') as jsonf:
json.dump(versions_info, jsonf, indent=4, sort_keys=True)

View file

@ -1,22 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import json
versions_info = json.load(open('update/versions.json'))
version = versions_info['latest']
version_dict = versions_info['versions'][version]
# Read template page
with open('download.html.in', 'r', encoding='utf-8') as tmplf:
template = tmplf.read()
template = template.replace('@PROGRAM_VERSION@', version)
template = template.replace('@PROGRAM_URL@', version_dict['bin'][0])
template = template.replace('@PROGRAM_SHA256SUM@', version_dict['bin'][1])
template = template.replace('@EXE_URL@', version_dict['exe'][0])
template = template.replace('@EXE_SHA256SUM@', version_dict['exe'][1])
template = template.replace('@TAR_URL@', version_dict['tar'][0])
template = template.replace('@TAR_SHA256SUM@', version_dict['tar'][1])
with open('download.html', 'w', encoding='utf-8') as dlf:
dlf.write(template)

View file

@ -1,34 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals, with_statement
import rsa
import json
from binascii import hexlify
try:
input = raw_input
except NameError:
pass
versions_info = json.load(open('update/versions.json'))
if 'signature' in versions_info:
del versions_info['signature']
print('Enter the PKCS1 private key, followed by a blank line:')
privkey = b''
while True:
try:
line = input()
except EOFError:
break
if line == '':
break
privkey += line.encode('ascii') + b'\n'
privkey = rsa.PrivateKey.load_pkcs1(privkey)
signature = hexlify(rsa.pkcs1.sign(json.dumps(versions_info, sort_keys=True).encode('utf-8'), privkey, 'SHA-256')).decode()
print('signature: ' + signature)
versions_info['signature'] = signature
with open('update/versions.json', 'w') as versionsf:
json.dump(versions_info, versionsf, indent=4, sort_keys=True)

View file

@ -1,21 +0,0 @@
#!/usr/bin/env python
# coding: utf-8
from __future__ import with_statement, unicode_literals
import datetime
import glob
import io # For Python 2 compatibility
import os
import re
year = str(datetime.datetime.now().year)
for fn in glob.glob('*.html*'):
with io.open(fn, encoding='utf-8') as f:
content = f.read()
newc = re.sub(r'(?P<copyright>Copyright © 2011-)(?P<year>[0-9]{4})', 'Copyright © 2011-' + year, content)
if content != newc:
tmpFn = fn + '.part'
with io.open(tmpFn, 'wt', encoding='utf-8') as outf:
outf.write(newc)
os.rename(tmpFn, fn)

View file

@ -1,76 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import datetime
import io
import json
import textwrap
atom_template = textwrap.dedent("""\
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<link rel="self" href="http://ytdl-org.github.io/haruhi-dl/update/releases.atom" />
<title>haruhi-dl releases</title>
<id>https://yt-dl.org/feed/haruhi-dl-updates-feed</id>
<updated>@TIMESTAMP@</updated>
@ENTRIES@
</feed>""")
entry_template = textwrap.dedent("""
<entry>
<id>https://yt-dl.org/feed/haruhi-dl-updates-feed/haruhi-dl-@VERSION@</id>
<title>New version @VERSION@</title>
<link href="http://ytdl-org.github.io/haruhi-dl" />
<content type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">
Downloads available at <a href="https://yt-dl.org/downloads/@VERSION@/">https://yt-dl.org/downloads/@VERSION@/</a>
</div>
</content>
<author>
<name>The haruhi-dl maintainers</name>
</author>
<updated>@TIMESTAMP@</updated>
</entry>
""")
now = datetime.datetime.now()
now_iso = now.isoformat() + 'Z'
atom_template = atom_template.replace('@TIMESTAMP@', now_iso)
versions_info = json.load(open('update/versions.json'))
versions = list(versions_info['versions'].keys())
versions.sort()
entries = []
for v in versions:
fields = v.split('.')
year, month, day = map(int, fields[:3])
faked = 0
patchlevel = 0
while True:
try:
datetime.date(year, month, day)
except ValueError:
day -= 1
faked += 1
assert day > 0
continue
break
if len(fields) >= 4:
try:
patchlevel = int(fields[3])
except ValueError:
patchlevel = 1
timestamp = '%04d-%02d-%02dT00:%02d:%02dZ' % (year, month, day, faked, patchlevel)
entry = entry_template.replace('@TIMESTAMP@', timestamp)
entry = entry.replace('@VERSION@', v)
entries.append(entry)
entries_str = textwrap.indent(''.join(entries), '\t')
atom_template = atom_template.replace('@ENTRIES@', entries_str)
with io.open('update/releases.atom', 'w', encoding='utf-8') as atom_file:
atom_file.write(atom_template)

View file

@ -1,37 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import sys
import os
import textwrap
# We must be able to import haruhi_dl
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import haruhi_dl
def main():
with open('supportedsites.html.in', 'r', encoding='utf-8') as tmplf:
template = tmplf.read()
ie_htmls = []
for ie in haruhi_dl.list_extractors(age_limit=None):
ie_html = '<b>{}</b>'.format(ie.IE_NAME)
ie_desc = getattr(ie, 'IE_DESC', None)
if ie_desc is False:
continue
elif ie_desc is not None:
ie_html += ': {}'.format(ie.IE_DESC)
if not ie.working():
ie_html += ' (Currently broken)'
ie_htmls.append('<li>{}</li>'.format(ie_html))
template = template.replace('@SITES@', textwrap.indent('\n'.join(ie_htmls), '\t'))
with open('supportedsites.html', 'w', encoding='utf-8') as sitesf:
sitesf.write(template)
if __name__ == '__main__':
main()

View file

@ -112,7 +112,7 @@ for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$versi
ROOT=$(pwd) ROOT=$(pwd)
python devscripts/create-github-release.py ChangeLog $version "$ROOT/build/$version" python devscripts/create-github-release.py ChangeLog $version "$ROOT/build/$version"
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version" #ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
/bin/echo -e "\n### Now switching to gh-pages..." /bin/echo -e "\n### Now switching to gh-pages..."
git clone --branch gh-pages --single-branch . build/gh-pages git clone --branch gh-pages --single-branch . build/gh-pages

View file

@ -6,8 +6,8 @@ When using the ``haruhi_dl`` module, you start by creating an instance of :class
.. code-block:: python .. code-block:: python
>>> from haruhi_dl import HaruhiDL >>> from haruhi_dl import HaruhiDL
>>> ydl = HaruhiDL() >>> hdl = HaruhiDL()
>>> ydl.add_default_info_extractors() >>> hdl.add_default_info_extractors()
Extracting video information Extracting video information
---------------------------- ----------------------------
@ -16,7 +16,7 @@ You use the :meth:`HaruhiDL.extract_info` method for getting the video informati
.. code-block:: python .. code-block:: python
>>> info = ydl.extract_info('http://www.youtube.com/watch?v=BaW_jenozKc', download=False) >>> info = hdl.extract_info('http://www.youtube.com/watch?v=BaW_jenozKc', download=False)
[youtube] Setting language [youtube] Setting language
[youtube] BaW_jenozKc: Downloading webpage [youtube] BaW_jenozKc: Downloading webpage
[youtube] BaW_jenozKc: Downloading video info webpage [youtube] BaW_jenozKc: Downloading video info webpage
@ -40,7 +40,7 @@ The playlist information is extracted in a similar way, but the dictionary is a
.. code-block:: python .. code-block:: python
>>> playlist = ydl.extract_info('http://www.ted.com/playlists/13/open_source_open_world', download=False) >>> playlist = hdl.extract_info('http://www.ted.com/playlists/13/open_source_open_world', download=False)
[TED] open_source_open_world: Downloading playlist webpage [TED] open_source_open_world: Downloading playlist webpage
... ...
>>> playlist['title'] >>> playlist['title']

View file

@ -2303,7 +2303,7 @@ class HaruhiDL(object):
self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n') self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n')
if self.params.get('call_home', False): if self.params.get('call_home', False):
ipaddr = self.urlopen('https://yt-dl.org/ip').read().decode('utf-8') ipaddr = self.urlopen('https://ifconfig.me/ip').read().decode('utf-8')
self._write_string('[debug] Public IP address: %s\n' % ipaddr) self._write_string('[debug] Public IP address: %s\n' % ipaddr)
latest_version = self.urlopen( latest_version = self.urlopen(
'https://yt-dl.org/latest/version').read().decode('utf-8') 'https://yt-dl.org/latest/version').read().decode('utf-8')
@ -2343,7 +2343,7 @@ class HaruhiDL(object):
debuglevel = 1 if self.params.get('debug_printtraffic') else 0 debuglevel = 1 if self.params.get('debug_printtraffic') else 0
https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel) https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel)
ydlh = HaruhiDLHandler(self.params, debuglevel=debuglevel) hdlh = HaruhiDLHandler(self.params, debuglevel=debuglevel)
redirect_handler = HaruhiDLRedirectHandler() redirect_handler = HaruhiDLRedirectHandler()
data_handler = compat_urllib_request_DataHandler() data_handler = compat_urllib_request_DataHandler()
@ -2358,7 +2358,7 @@ class HaruhiDL(object):
file_handler.file_open = file_open file_handler.file_open = file_open
opener = compat_urllib_request.build_opener( opener = compat_urllib_request.build_opener(
proxy_handler, https_handler, cookie_processor, ydlh, redirect_handler, data_handler, file_handler) proxy_handler, https_handler, cookie_processor, hdlh, redirect_handler, data_handler, file_handler)
# Delete the default user-agent header, which would otherwise apply in # Delete the default user-agent header, which would otherwise apply in
# cases where our custom HTTP handler doesn't come into play # cases where our custom HTTP handler doesn't come into play

View file

@ -314,7 +314,7 @@ def _real_main(argv=None):
None if opts.match_filter is None None if opts.match_filter is None
else match_filter_func(opts.match_filter)) else match_filter_func(opts.match_filter))
ydl_opts = { hdl_opts = {
'usenetrc': opts.usenetrc, 'usenetrc': opts.usenetrc,
'username': opts.username, 'username': opts.username,
'password': opts.password, 'password': opts.password,
@ -438,32 +438,32 @@ def _real_main(argv=None):
'usetitle': opts.usetitle if opts.usetitle is True else None, 'usetitle': opts.usetitle if opts.usetitle is True else None,
} }
with HaruhiDL(ydl_opts) as ydl: with HaruhiDL(hdl_opts) as hdl:
# Update version # Update version
if opts.update_self: if opts.update_self:
update_self(ydl.to_screen, opts.verbose, ydl._opener) update_self(hdl.to_screen, opts.verbose, hdl._opener)
# Remove cache dir # Remove cache dir
if opts.rm_cachedir: if opts.rm_cachedir:
ydl.cache.remove() hdl.cache.remove()
# Maybe do nothing # Maybe do nothing
if (len(all_urls) < 1) and (opts.load_info_filename is None): if (len(all_urls) < 1) and (opts.load_info_filename is None):
if opts.update_self or opts.rm_cachedir: if opts.update_self or opts.rm_cachedir:
sys.exit() sys.exit()
ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv) hdl.warn_if_short_id(sys.argv[1:] if argv is None else argv)
parser.error( parser.error(
'You must provide at least one URL.\n' 'You must provide at least one URL.\n'
'Type haruhi-dl --help to see a list of all options.') 'Type haruhi-dl --help to see a list of all options.')
try: try:
if opts.load_info_filename is not None: if opts.load_info_filename is not None:
retcode = ydl.download_with_info_file(expand_path(opts.load_info_filename)) retcode = hdl.download_with_info_file(expand_path(opts.load_info_filename))
else: else:
retcode = ydl.download(all_urls) retcode = hdl.download(all_urls)
except MaxDownloadsReached: except MaxDownloadsReached:
ydl.to_screen('--max-download limit reached, aborting.') hdl.to_screen('--max-download limit reached, aborting.')
retcode = 101 retcode = 101
sys.exit(retcode) sys.exit(retcode)

View file

@ -16,11 +16,11 @@ from .utils import (
class Cache(object): class Cache(object):
def __init__(self, ydl): def __init__(self, hdl):
self._ydl = ydl self._hdl = hdl
def _get_root_dir(self): def _get_root_dir(self):
res = self._ydl.params.get('cachedir') res = self._hdl.params.get('cachedir')
if res is None: if res is None:
cache_root = compat_getenv('XDG_CACHE_HOME', '~/.cache') cache_root = compat_getenv('XDG_CACHE_HOME', '~/.cache')
res = os.path.join(cache_root, 'haruhi-dl') res = os.path.join(cache_root, 'haruhi-dl')
@ -35,7 +35,7 @@ class Cache(object):
@property @property
def enabled(self): def enabled(self):
return self._ydl.params.get('cachedir') is not False return self._hdl.params.get('cachedir') is not False
def store(self, section, key, data, dtype='json'): def store(self, section, key, data, dtype='json'):
assert dtype in ('json',) assert dtype in ('json',)
@ -53,7 +53,7 @@ class Cache(object):
write_json_file(data, fn) write_json_file(data, fn)
except Exception: except Exception:
tb = traceback.format_exc() tb = traceback.format_exc()
self._ydl.report_warning( self._hdl.report_warning(
'Writing cache to %r failed: %s' % (fn, tb)) 'Writing cache to %r failed: %s' % (fn, tb))
def load(self, section, key, dtype='json', default=None): def load(self, section, key, dtype='json', default=None):
@ -72,7 +72,7 @@ class Cache(object):
file_size = os.path.getsize(cache_fn) file_size = os.path.getsize(cache_fn)
except (OSError, IOError) as oe: except (OSError, IOError) as oe:
file_size = str(oe) file_size = str(oe)
self._ydl.report_warning( self._hdl.report_warning(
'Cache retrieval from %s failed (%s)' % (cache_fn, file_size)) 'Cache retrieval from %s failed (%s)' % (cache_fn, file_size))
except IOError: except IOError:
pass # No cache available pass # No cache available
@ -81,16 +81,16 @@ class Cache(object):
def remove(self): def remove(self):
if not self.enabled: if not self.enabled:
self._ydl.to_screen('Cache is disabled (Did you combine --no-cache-dir and --rm-cache-dir?)') self._hdl.to_screen('Cache is disabled (Did you combine --no-cache-dir and --rm-cache-dir?)')
return return
cachedir = self._get_root_dir() cachedir = self._get_root_dir()
if not any((term in cachedir) for term in ('cache', 'tmp')): if not any((term in cachedir) for term in ('cache', 'tmp')):
raise Exception('Not removing directory %s - this does not look like a cache dir' % cachedir) raise Exception('Not removing directory %s - this does not look like a cache dir' % cachedir)
self._ydl.to_screen( self._hdl.to_screen(
'Removing cache dir %s .' % cachedir, skip_eol=True) 'Removing cache dir %s .' % cachedir, skip_eol=True)
if os.path.exists(cachedir): if os.path.exists(cachedir):
self._ydl.to_screen('.', skip_eol=True) self._hdl.to_screen('.', skip_eol=True)
shutil.rmtree(cachedir) shutil.rmtree(cachedir)
self._ydl.to_screen('.') self._hdl.to_screen('.')

View file

@ -58,9 +58,9 @@ class FileDownloader(object):
_TEST_FILE_SIZE = 10241 _TEST_FILE_SIZE = 10241
params = None params = None
def __init__(self, ydl, params): def __init__(self, hdl, params):
"""Create a FileDownloader object with the given options.""" """Create a FileDownloader object with the given options."""
self.ydl = ydl self.hdl = hdl
self._progress_hooks = [] self._progress_hooks = []
self.params = params self.params = params
self.add_progress_hook(self.report_progress) self.add_progress_hook(self.report_progress)
@ -147,22 +147,22 @@ class FileDownloader(object):
return int(round(number * multiplier)) return int(round(number * multiplier))
def to_screen(self, *args, **kargs): def to_screen(self, *args, **kargs):
self.ydl.to_screen(*args, **kargs) self.hdl.to_screen(*args, **kargs)
def to_stderr(self, message): def to_stderr(self, message):
self.ydl.to_screen(message) self.hdl.to_screen(message)
def to_console_title(self, message): def to_console_title(self, message):
self.ydl.to_console_title(message) self.hdl.to_console_title(message)
def trouble(self, *args, **kargs): def trouble(self, *args, **kargs):
self.ydl.trouble(*args, **kargs) self.hdl.trouble(*args, **kargs)
def report_warning(self, *args, **kargs): def report_warning(self, *args, **kargs):
self.ydl.report_warning(*args, **kargs) self.hdl.report_warning(*args, **kargs)
def report_error(self, *args, **kargs): def report_error(self, *args, **kargs):
self.ydl.report_error(*args, **kargs) self.hdl.report_error(*args, **kargs)
def slow_down(self, start_time, now, byte_counter): def slow_down(self, start_time, now, byte_counter):
"""Sleep if the download speed is over the rate limit.""" """Sleep if the download speed is over the rate limit."""

View file

@ -279,7 +279,7 @@ class F4mFD(FragmentFD):
return media return media
def _get_bootstrap_from_url(self, bootstrap_url): def _get_bootstrap_from_url(self, bootstrap_url):
bootstrap = self.ydl.urlopen(bootstrap_url).read() bootstrap = self.hdl.urlopen(bootstrap_url).read()
return read_bootstrap_info(bootstrap) return read_bootstrap_info(bootstrap)
def _update_live_fragments(self, bootstrap_url, latest_fragment): def _update_live_fragments(self, bootstrap_url, latest_fragment):
@ -321,7 +321,7 @@ class F4mFD(FragmentFD):
requested_bitrate = info_dict.get('tbr') requested_bitrate = info_dict.get('tbr')
self.to_screen('[%s] Downloading f4m manifest' % self.FD_NAME) self.to_screen('[%s] Downloading f4m manifest' % self.FD_NAME)
urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url)) urlh = self.hdl.urlopen(self._prepare_url(info_dict, man_url))
man_url = urlh.geturl() man_url = urlh.geturl()
# Some manifests may be malformed, e.g. prosiebensat1 generated manifests # Some manifests may be malformed, e.g. prosiebensat1 generated manifests
# (see https://github.com/ytdl-org/haruhi-dl/issues/6215#issuecomment-121704244 # (see https://github.com/ytdl-org/haruhi-dl/issues/6215#issuecomment-121704244

View file

@ -134,7 +134,7 @@ class FragmentFD(FileDownloader):
'[%s] Total fragments: %s' % (self.FD_NAME, total_frags_str)) '[%s] Total fragments: %s' % (self.FD_NAME, total_frags_str))
self.report_destination(ctx['filename']) self.report_destination(ctx['filename'])
dl = HttpQuietDownloader( dl = HttpQuietDownloader(
self.ydl, self.hdl,
{ {
'continuedl': True, 'continuedl': True,
'quiet': True, 'quiet': True,

View file

@ -59,7 +59,7 @@ class HlsFD(FragmentFD):
man_url = info_dict['url'] man_url = info_dict['url']
self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME) self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME)
urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url)) urlh = self.hdl.urlopen(self._prepare_url(info_dict, man_url))
man_url = urlh.geturl() man_url = urlh.geturl()
s = urlh.read().decode('utf-8', 'ignore') s = urlh.read().decode('utf-8', 'ignore')
@ -70,7 +70,7 @@ class HlsFD(FragmentFD):
self.report_warning( self.report_warning(
'hlsnative has detected features it does not support, ' 'hlsnative has detected features it does not support, '
'extraction will be delegated to ffmpeg') 'extraction will be delegated to ffmpeg')
fd = FFmpegFD(self.ydl, self.params) fd = FFmpegFD(self.hdl, self.params)
for ph in self._progress_hooks: for ph in self._progress_hooks:
fd.add_progress_hook(ph) fd.add_progress_hook(ph)
return fd.real_download(filename, info_dict) return fd.real_download(filename, info_dict)
@ -168,7 +168,7 @@ class HlsFD(FragmentFD):
return False return False
if decrypt_info['METHOD'] == 'AES-128': if decrypt_info['METHOD'] == 'AES-128':
iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', media_sequence) iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', media_sequence)
decrypt_info['KEY'] = decrypt_info.get('KEY') or self.ydl.urlopen( decrypt_info['KEY'] = decrypt_info.get('KEY') or self.hdl.urlopen(
self._prepare_url(info_dict, info_dict.get('_decryption_key_url') or decrypt_info['URI'])).read() self._prepare_url(info_dict, info_dict.get('_decryption_key_url') or decrypt_info['URI'])).read()
frag_content = AES.new( frag_content = AES.new(
decrypt_info['KEY'], AES.MODE_CBC, iv).decrypt(frag_content) decrypt_info['KEY'], AES.MODE_CBC, iv).decrypt(frag_content)

View file

@ -107,7 +107,7 @@ class HttpFD(FileDownloader):
# Establish connection # Establish connection
try: try:
try: try:
ctx.data = self.ydl.urlopen(request) ctx.data = self.hdl.urlopen(request)
except (compat_urllib_error.URLError, ) as err: except (compat_urllib_error.URLError, ) as err:
if isinstance(err.reason, socket.timeout): if isinstance(err.reason, socket.timeout):
raise RetryDownload(err) raise RetryDownload(err)
@ -149,7 +149,7 @@ class HttpFD(FileDownloader):
# Unable to resume (requested range not satisfiable) # Unable to resume (requested range not satisfiable)
try: try:
# Open the connection again without the range header # Open the connection again without the range header
ctx.data = self.ydl.urlopen( ctx.data = self.hdl.urlopen(
sanitized_Request(url, None, headers)) sanitized_Request(url, None, headers))
content_length = ctx.data.info()['Content-Length'] content_length = ctx.data.info()['Content-Length']
except (compat_urllib_error.HTTPError, ) as err: except (compat_urllib_error.HTTPError, ) as err:

View file

@ -938,9 +938,9 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
}, },
{ {
# Multifeed video with comma in title (see https://github.com/ytdl-org/haruhi-dl/issues/8536) # Multifeed video with comma in title (see https://github.com/ytdl-org/haruhi-dl/issues/8536)
'url': 'https://www.youtube.com/watch?v=gVfLd0zydlo', 'url': 'https://www.youtube.com/watch?v=gVfLd0zhdlo',
'info_dict': { 'info_dict': {
'id': 'gVfLd0zydlo', 'id': 'gVfLd0zhdlo',
'title': 'DevConf.cz 2016 Day 2 Workshops 1 14:00 - 15:30', 'title': 'DevConf.cz 2016 Day 2 Workshops 1 14:00 - 15:30',
}, },
'playlist_count': 2, 'playlist_count': 2,
@ -2629,7 +2629,7 @@ class YoutubePlaylistIE(YoutubePlaylistBaseInfoExtractor):
}, { }, {
'url': 'PLtPgu7CB4gbY9oDN3drwC3cMbJggS7dKl', 'url': 'PLtPgu7CB4gbY9oDN3drwC3cMbJggS7dKl',
'info_dict': { 'info_dict': {
'title': 'YDL_safe_search', 'title': 'HDL_safe_search',
'id': 'PLtPgu7CB4gbY9oDN3drwC3cMbJggS7dKl', 'id': 'PLtPgu7CB4gbY9oDN3drwC3cMbJggS7dKl',
}, },
'playlist_count': 2, 'playlist_count': 2,

View file

@ -2470,14 +2470,14 @@ class XAttrUnavailableError(HaruhiDLError):
pass pass
def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs): def _create_http_connection(hdl_handler, http_class, is_https, *args, **kwargs):
# Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting # Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
# expected HTTP responses to meet HTTP/1.0 or later (see also # expected HTTP responses to meet HTTP/1.0 or later (see also
# https://github.com/ytdl-org/haruhi-dl/issues/6727) # https://github.com/ytdl-org/haruhi-dl/issues/6727)
if sys.version_info < (3, 0): if sys.version_info < (3, 0):
kwargs['strict'] = True kwargs['strict'] = True
hc = http_class(*args, **compat_kwargs(kwargs)) hc = http_class(*args, **compat_kwargs(kwargs))
source_address = ydl_handler._params.get('source_address') source_address = hdl_handler._params.get('source_address')
if source_address is not None: if source_address is not None:
# This is to workaround _create_connection() from socket where it will try all # This is to workaround _create_connection() from socket where it will try all

View file

@ -1,3 +1,3 @@
from __future__ import unicode_literals from __future__ import unicode_literals
__version__ = '2020.09.20' __version__ = '2020.11.01'

View file

@ -61,12 +61,12 @@ def report_warning(message):
sys.stderr.write(output) sys.stderr.write(output)
class FakeYDL(HaruhiDL): class FakeHDL(HaruhiDL):
def __init__(self, override=None): def __init__(self, override=None):
# Different instances of the downloader can't share the same dictionary # Different instances of the downloader can't share the same dictionary
# some test set the "sublang" parameter, which would break the md5 checks. # some test set the "sublang" parameter, which would break the md5 checks.
params = get_params(override=override) params = get_params(override=override)
super(FakeYDL, self).__init__(params, auto_init=False) super(FakeHDL, self).__init__(params, auto_init=False)
self.result = [] self.result = []
def to_screen(self, s, skip_eol=None): def to_screen(self, s, skip_eol=None):
@ -263,14 +263,14 @@ def assertEqual(self, got, expected, msg=None):
self.assertTrue(got == expected, msg) self.assertTrue(got == expected, msg)
def expect_warnings(ydl, warnings_re): def expect_warnings(hdl, warnings_re):
real_warning = ydl.report_warning real_warning = hdl.report_warning
def _report_warning(w): def _report_warning(w):
if not any(re.search(w_re, w) for w_re in warnings_re): if not any(re.search(w_re, w) for w_re in warnings_re):
real_warning(w) real_warning(w)
ydl.report_warning = _report_warning hdl.report_warning = _report_warning
def http_server_port(httpd): def http_server_port(httpd):

View file

@ -1 +0,0 @@
*.swf

View file

@ -1,19 +0,0 @@
// input: [["a", "b", "c", "d"]]
// output: ["c", "b", "a", "d"]
package {
public class ArrayAccess {
public static function main(ar:Array):Array {
var aa:ArrayAccess = new ArrayAccess();
return aa.f(ar, 2);
}
private function f(ar:Array, num:Number):Array{
var x:String = ar[0];
var y:String = ar[num % ar.length];
ar[0] = y;
ar[num] = x;
return ar;
}
}
}

View file

@ -1,17 +0,0 @@
// input: []
// output: 121
package {
public class ClassCall {
public static function main():int{
var f:OtherClass = new OtherClass();
return f.func(100,20);
}
}
}
class OtherClass {
public function func(x: int, y: int):int {
return x+y+1;
}
}

View file

@ -1,15 +0,0 @@
// input: []
// output: 0
package {
public class ClassConstruction {
public static function main():int{
var f:Foo = new Foo();
return 0;
}
}
}
class Foo {
}

View file

@ -1,18 +0,0 @@
// input: []
// output: 4
package {
public class ConstArrayAccess {
private static const x:int = 2;
private static const ar:Array = ["42", "3411"];
public static function main():int{
var c:ConstArrayAccess = new ConstArrayAccess();
return c.f();
}
public function f(): int {
return ar[1].length;
}
}
}

View file

@ -1,12 +0,0 @@
// input: []
// output: 2
package {
public class ConstantInt {
private static const x:int = 2;
public static function main():int{
return x;
}
}
}

View file

@ -1,10 +0,0 @@
// input: [{"x": 1, "y": 2}]
// output: 3
package {
public class DictCall {
public static function main(d:Object):int{
return d.x + d.y;
}
}
}

View file

@ -1,10 +0,0 @@
// input: []
// output: false
package {
public class EqualsOperator {
public static function main():Boolean{
return 1 == 2;
}
}
}

View file

@ -1,13 +0,0 @@
// input: [1, 2]
// output: 3
package {
public class LocalVars {
public static function main(a:int, b:int):int{
var c:int = a + b + b;
var d:int = c - b;
var e:int = d;
return e;
}
}
}

View file

@ -1,22 +0,0 @@
// input: [1]
// output: 2
package {
public class MemberAssignment {
public var v:int;
public function g():int {
return this.v;
}
public function f(a:int):int{
this.v = a;
return this.v + this.g();
}
public static function main(a:int): int {
var v:MemberAssignment = new MemberAssignment();
return v.f(a);
}
}
}

View file

@ -1,24 +0,0 @@
// input: []
// output: 123
package {
public class NeOperator {
public static function main(): int {
var res:int = 0;
if (1 != 2) {
res += 3;
} else {
res += 4;
}
if (2 != 2) {
res += 10;
} else {
res += 20;
}
if (9 == 9) {
res += 100;
}
return res;
}
}
}

View file

@ -1,21 +0,0 @@
// input: []
// output: 9
package {
public class PrivateCall {
public static function main():int{
var f:OtherClass = new OtherClass();
return f.func();
}
}
}
class OtherClass {
private function pf():int {
return 9;
}
public function func():int {
return this.pf();
}
}

View file

@ -1,22 +0,0 @@
// input: []
// output: 9
package {
public class PrivateVoidCall {
public static function main():int{
var f:OtherClass = new OtherClass();
f.func();
return 9;
}
}
}
class OtherClass {
private function pf():void {
;
}
public function func():void {
this.pf();
}
}

View file

@ -1,13 +0,0 @@
// input: [1]
// output: 1
package {
public class StaticAssignment {
public static var v:int;
public static function main(a:int):int{
v = a;
return v;
}
}
}

View file

@ -1,16 +0,0 @@
// input: []
// output: 1
package {
public class StaticRetrieval {
public static var v:int;
public static function main():int{
if (v) {
return 0;
} else {
return 1;
}
}
}
}

View file

@ -1,11 +0,0 @@
// input: []
// output: 3
package {
public class StringBasics {
public static function main():int{
var s:String = "abc";
return s.length;
}
}
}

View file

@ -1,11 +0,0 @@
// input: []
// output: 9897
package {
public class StringCharCodeAt {
public static function main():int{
var s:String = "abc";
return s.charCodeAt(1) * 100 + s.charCodeAt();
}
}
}

View file

@ -1,11 +0,0 @@
// input: []
// output: 2
package {
public class StringConversion {
public static function main():int{
var s:String = String(99);
return s.length;
}
}
}

View file

@ -11,7 +11,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import copy import copy
from test.helper import FakeYDL, assertRegexpMatches from test.helper import FakeHDL, assertRegexpMatches
from haruhi_dl import HaruhiDL from haruhi_dl import HaruhiDL
from haruhi_dl.compat import compat_str, compat_urllib_error from haruhi_dl.compat import compat_str, compat_urllib_error
from haruhi_dl.extractor import YoutubeIE from haruhi_dl.extractor import YoutubeIE
@ -22,9 +22,9 @@ from haruhi_dl.utils import ExtractorError, match_filter_func
TEST_URL = 'http://localhost/sample.mp4' TEST_URL = 'http://localhost/sample.mp4'
class YDL(FakeYDL): class HDL(FakeHDL):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(YDL, self).__init__(*args, **kwargs) super(HDL, self).__init__(*args, **kwargs)
self.downloaded_info_dicts = [] self.downloaded_info_dicts = []
self.msgs = [] self.msgs = []
@ -50,59 +50,59 @@ def _make_result(formats, **kwargs):
class TestFormatSelection(unittest.TestCase): class TestFormatSelection(unittest.TestCase):
def test_prefer_free_formats(self): def test_prefer_free_formats(self):
# Same resolution => download webm # Same resolution => download webm
ydl = YDL() hdl = HDL()
ydl.params['prefer_free_formats'] = True hdl.params['prefer_free_formats'] = True
formats = [ formats = [
{'ext': 'webm', 'height': 460, 'url': TEST_URL}, {'ext': 'webm', 'height': 460, 'url': TEST_URL},
{'ext': 'mp4', 'height': 460, 'url': TEST_URL}, {'ext': 'mp4', 'height': 460, 'url': TEST_URL},
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'webm') self.assertEqual(downloaded['ext'], 'webm')
# Different resolution => download best quality (mp4) # Different resolution => download best quality (mp4)
ydl = YDL() hdl = HDL()
ydl.params['prefer_free_formats'] = True hdl.params['prefer_free_formats'] = True
formats = [ formats = [
{'ext': 'webm', 'height': 720, 'url': TEST_URL}, {'ext': 'webm', 'height': 720, 'url': TEST_URL},
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL}, {'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
] ]
info_dict['formats'] = formats info_dict['formats'] = formats
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'mp4') self.assertEqual(downloaded['ext'], 'mp4')
# No prefer_free_formats => prefer mp4 and flv for greater compatibility # No prefer_free_formats => prefer mp4 and flv for greater compatibility
ydl = YDL() hdl = HDL()
ydl.params['prefer_free_formats'] = False hdl.params['prefer_free_formats'] = False
formats = [ formats = [
{'ext': 'webm', 'height': 720, 'url': TEST_URL}, {'ext': 'webm', 'height': 720, 'url': TEST_URL},
{'ext': 'mp4', 'height': 720, 'url': TEST_URL}, {'ext': 'mp4', 'height': 720, 'url': TEST_URL},
{'ext': 'flv', 'height': 720, 'url': TEST_URL}, {'ext': 'flv', 'height': 720, 'url': TEST_URL},
] ]
info_dict['formats'] = formats info_dict['formats'] = formats
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'mp4') self.assertEqual(downloaded['ext'], 'mp4')
ydl = YDL() hdl = HDL()
ydl.params['prefer_free_formats'] = False hdl.params['prefer_free_formats'] = False
formats = [ formats = [
{'ext': 'flv', 'height': 720, 'url': TEST_URL}, {'ext': 'flv', 'height': 720, 'url': TEST_URL},
{'ext': 'webm', 'height': 720, 'url': TEST_URL}, {'ext': 'webm', 'height': 720, 'url': TEST_URL},
] ]
info_dict['formats'] = formats info_dict['formats'] = formats
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'flv') self.assertEqual(downloaded['ext'], 'flv')
def test_format_selection(self): def test_format_selection(self):
@ -115,34 +115,34 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': '20/47'}) hdl = HDL({'format': '20/47'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '47') self.assertEqual(downloaded['format_id'], '47')
ydl = YDL({'format': '20/71/worst'}) hdl = HDL({'format': '20/71/worst'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '35') self.assertEqual(downloaded['format_id'], '35')
ydl = YDL() hdl = HDL()
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '2') self.assertEqual(downloaded['format_id'], '2')
ydl = YDL({'format': 'webm/mp4'}) hdl = HDL({'format': 'webm/mp4'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '47') self.assertEqual(downloaded['format_id'], '47')
ydl = YDL({'format': '3gp/40/mp4'}) hdl = HDL({'format': '3gp/40/mp4'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '35') self.assertEqual(downloaded['format_id'], '35')
ydl = YDL({'format': 'example-with-dashes'}) hdl = HDL({'format': 'example-with-dashes'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'example-with-dashes') self.assertEqual(downloaded['format_id'], 'example-with-dashes')
def test_format_selection_audio(self): def test_format_selection_audio(self):
@ -154,14 +154,14 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'bestaudio'}) hdl = HDL({'format': 'bestaudio'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'audio-high') self.assertEqual(downloaded['format_id'], 'audio-high')
ydl = YDL({'format': 'worstaudio'}) hdl = HDL({'format': 'worstaudio'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'audio-low') self.assertEqual(downloaded['format_id'], 'audio-low')
formats = [ formats = [
@ -170,9 +170,9 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'bestaudio/worstaudio/best'}) hdl = HDL({'format': 'bestaudio/worstaudio/best'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'vid-high') self.assertEqual(downloaded['format_id'], 'vid-high')
def test_format_selection_audio_exts(self): def test_format_selection_audio_exts(self):
@ -185,25 +185,25 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'best'}) hdl = HDL({'format': 'best'})
ie = YoutubeIE(ydl) ie = YoutubeIE(hdl)
ie._sort_formats(info_dict['formats']) ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict)) hdl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'aac-64') self.assertEqual(downloaded['format_id'], 'aac-64')
ydl = YDL({'format': 'mp3'}) hdl = HDL({'format': 'mp3'})
ie = YoutubeIE(ydl) ie = YoutubeIE(hdl)
ie._sort_formats(info_dict['formats']) ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict)) hdl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'mp3-64') self.assertEqual(downloaded['format_id'], 'mp3-64')
ydl = YDL({'prefer_free_formats': True}) hdl = HDL({'prefer_free_formats': True})
ie = YoutubeIE(ydl) ie = YoutubeIE(hdl)
ie._sort_formats(info_dict['formats']) ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict)) hdl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'ogg-64') self.assertEqual(downloaded['format_id'], 'ogg-64')
def test_format_selection_video(self): def test_format_selection_video(self):
@ -214,19 +214,19 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'bestvideo'}) hdl = HDL({'format': 'bestvideo'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-high') self.assertEqual(downloaded['format_id'], 'dash-video-high')
ydl = YDL({'format': 'worstvideo'}) hdl = HDL({'format': 'worstvideo'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-low') self.assertEqual(downloaded['format_id'], 'dash-video-low')
ydl = YDL({'format': 'bestvideo[format_id^=dash][format_id$=low]'}) hdl = HDL({'format': 'bestvideo[format_id^=dash][format_id$=low]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-low') self.assertEqual(downloaded['format_id'], 'dash-video-low')
formats = [ formats = [
@ -234,9 +234,9 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'bestvideo[vcodec=avc1.123456]'}) hdl = HDL({'format': 'bestvideo[vcodec=avc1.123456]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot') self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
def test_format_selection_string_ops(self): def test_format_selection_string_ops(self):
@ -247,67 +247,67 @@ class TestFormatSelection(unittest.TestCase):
info_dict = _make_result(formats) info_dict = _make_result(formats)
# equals (=) # equals (=)
ydl = YDL({'format': '[format_id=abc-cba]'}) hdl = HDL({'format': '[format_id=abc-cba]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba') self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not equal (!=) # does not equal (!=)
ydl = YDL({'format': '[format_id!=abc-cba]'}) hdl = HDL({'format': '[format_id!=abc-cba]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz') self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!=abc-cba][format_id!=zxc-cxz]'}) hdl = HDL({'format': '[format_id!=abc-cba][format_id!=zxc-cxz]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy()) self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
# starts with (^=) # starts with (^=)
ydl = YDL({'format': '[format_id^=abc]'}) hdl = HDL({'format': '[format_id^=abc]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba') self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not start with (!^=) # does not start with (!^=)
ydl = YDL({'format': '[format_id!^=abc]'}) hdl = HDL({'format': '[format_id!^=abc]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz') self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!^=abc][format_id!^=zxc]'}) hdl = HDL({'format': '[format_id!^=abc][format_id!^=zxc]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy()) self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
# ends with ($=) # ends with ($=)
ydl = YDL({'format': '[format_id$=cba]'}) hdl = HDL({'format': '[format_id$=cba]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba') self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not end with (!$=) # does not end with (!$=)
ydl = YDL({'format': '[format_id!$=cba]'}) hdl = HDL({'format': '[format_id!$=cba]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz') self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!$=cba][format_id!$=cxz]'}) hdl = HDL({'format': '[format_id!$=cba][format_id!$=cxz]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy()) self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
# contains (*=) # contains (*=)
ydl = YDL({'format': '[format_id*=bc-cb]'}) hdl = HDL({'format': '[format_id*=bc-cb]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba') self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not contain (!*=) # does not contain (!*=)
ydl = YDL({'format': '[format_id!*=bc-cb]'}) hdl = HDL({'format': '[format_id!*=bc-cb]'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz') self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!*=abc][format_id!*=zxc]'}) hdl = HDL({'format': '[format_id!*=abc][format_id!*=zxc]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy()) self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
ydl = YDL({'format': '[format_id!*=-]'}) hdl = HDL({'format': '[format_id!*=-]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy()) self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
def test_youtube_format_selection(self): def test_youtube_format_selection(self):
order = [ order = [
@ -342,69 +342,69 @@ class TestFormatSelection(unittest.TestCase):
formats_order = [format_info(f_id) for f_id in order] formats_order = [format_info(f_id) for f_id in order]
info_dict = _make_result(list(formats_order), extractor='youtube') info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo+bestaudio'}) hdl = HDL({'format': 'bestvideo+bestaudio'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '137+141') self.assertEqual(downloaded['format_id'], '137+141')
self.assertEqual(downloaded['ext'], 'mp4') self.assertEqual(downloaded['ext'], 'mp4')
info_dict = _make_result(list(formats_order), extractor='youtube') info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'}) hdl = HDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '38') self.assertEqual(downloaded['format_id'], '38')
info_dict = _make_result(list(formats_order), extractor='youtube') info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo/best,bestaudio'}) hdl = HDL({'format': 'bestvideo/best,bestaudio'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['137', '141']) self.assertEqual(downloaded_ids, ['137', '141'])
info_dict = _make_result(list(formats_order), extractor='youtube') info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'}) hdl = HDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['137+141', '248+141']) self.assertEqual(downloaded_ids, ['137+141', '248+141'])
info_dict = _make_result(list(formats_order), extractor='youtube') info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'}) hdl = HDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['136+141', '247+141']) self.assertEqual(downloaded_ids, ['136+141', '247+141'])
info_dict = _make_result(list(formats_order), extractor='youtube') info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'}) hdl = HDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['248+141']) self.assertEqual(downloaded_ids, ['248+141'])
for f1, f2 in zip(formats_order, formats_order[1:]): for f1, f2 in zip(formats_order, formats_order[1:]):
info_dict = _make_result([f1, f2], extractor='youtube') info_dict = _make_result([f1, f2], extractor='youtube')
ydl = YDL({'format': 'best/bestvideo'}) hdl = HDL({'format': 'best/bestvideo'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], f1['format_id']) self.assertEqual(downloaded['format_id'], f1['format_id'])
info_dict = _make_result([f2, f1], extractor='youtube') info_dict = _make_result([f2, f1], extractor='youtube')
ydl = YDL({'format': 'best/bestvideo'}) hdl = HDL({'format': 'best/bestvideo'})
yie = YoutubeIE(ydl) yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats']) yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], f1['format_id']) self.assertEqual(downloaded['format_id'], f1['format_id'])
def test_audio_only_extractor_format_selection(self): def test_audio_only_extractor_format_selection(self):
@ -418,14 +418,14 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'best'}) hdl = HDL({'format': 'best'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'high') self.assertEqual(downloaded['format_id'], 'high')
ydl = YDL({'format': 'worst'}) hdl = HDL({'format': 'worst'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'low') self.assertEqual(downloaded['format_id'], 'low')
def test_format_not_available(self): def test_format_not_available(self):
@ -438,8 +438,8 @@ class TestFormatSelection(unittest.TestCase):
# This must fail since complete video-audio format does not match filter # This must fail since complete video-audio format does not match filter
# and extractor does not provide incomplete only formats (i.e. only # and extractor does not provide incomplete only formats (i.e. only
# video-only or audio-only). # video-only or audio-only).
ydl = YDL({'format': 'best[height>360]'}) hdl = HDL({'format': 'best[height>360]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy()) self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
def test_format_selection_issue_10083(self): def test_format_selection_issue_10083(self):
# See https://github.com/ytdl-org/haruhi-dl/issues/10083 # See https://github.com/ytdl-org/haruhi-dl/issues/10083
@ -450,15 +450,15 @@ class TestFormatSelection(unittest.TestCase):
] ]
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'best[height>360]/bestvideo[height>360]+bestaudio'}) hdl = HDL({'format': 'best[height>360]/bestvideo[height>360]+bestaudio'})
ydl.process_ie_result(info_dict.copy()) hdl.process_ie_result(info_dict.copy())
self.assertEqual(ydl.downloaded_info_dicts[0]['format_id'], 'video+audio') self.assertEqual(hdl.downloaded_info_dicts[0]['format_id'], 'video+audio')
def test_invalid_format_specs(self): def test_invalid_format_specs(self):
def assert_syntax_error(format_spec): def assert_syntax_error(format_spec):
ydl = YDL({'format': format_spec}) hdl = HDL({'format': format_spec})
info_dict = _make_result([{'format_id': 'foo', 'url': TEST_URL}]) info_dict = _make_result([{'format_id': 'foo', 'url': TEST_URL}])
self.assertRaises(SyntaxError, ydl.process_ie_result, info_dict) self.assertRaises(SyntaxError, hdl.process_ie_result, info_dict)
assert_syntax_error('bestvideo,,best') assert_syntax_error('bestvideo,,best')
assert_syntax_error('+bestaudio') assert_syntax_error('+bestaudio')
@ -480,74 +480,74 @@ class TestFormatSelection(unittest.TestCase):
f['ext'] = 'unknown' f['ext'] = 'unknown'
info_dict = _make_result(formats) info_dict = _make_result(formats)
ydl = YDL({'format': 'best[filesize<3000]'}) hdl = HDL({'format': 'best[filesize<3000]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'D') self.assertEqual(downloaded['format_id'], 'D')
ydl = YDL({'format': 'best[filesize<=3000]'}) hdl = HDL({'format': 'best[filesize<=3000]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'E') self.assertEqual(downloaded['format_id'], 'E')
ydl = YDL({'format': 'best[filesize <= ? 3000]'}) hdl = HDL({'format': 'best[filesize <= ? 3000]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'F') self.assertEqual(downloaded['format_id'], 'F')
ydl = YDL({'format': 'best [filesize = 1000] [width>450]'}) hdl = HDL({'format': 'best [filesize = 1000] [width>450]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'B') self.assertEqual(downloaded['format_id'], 'B')
ydl = YDL({'format': 'best [filesize = 1000] [width!=450]'}) hdl = HDL({'format': 'best [filesize = 1000] [width!=450]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'C') self.assertEqual(downloaded['format_id'], 'C')
ydl = YDL({'format': '[filesize>?1]'}) hdl = HDL({'format': '[filesize>?1]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'G') self.assertEqual(downloaded['format_id'], 'G')
ydl = YDL({'format': '[filesize<1M]'}) hdl = HDL({'format': '[filesize<1M]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'E') self.assertEqual(downloaded['format_id'], 'E')
ydl = YDL({'format': '[filesize<1MiB]'}) hdl = HDL({'format': '[filesize<1MiB]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'G') self.assertEqual(downloaded['format_id'], 'G')
ydl = YDL({'format': 'all[width>=400][width<=600]'}) hdl = HDL({'format': 'all[width>=400][width<=600]'})
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts] downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['B', 'C', 'D']) self.assertEqual(downloaded_ids, ['B', 'C', 'D'])
ydl = YDL({'format': 'best[height<40]'}) hdl = HDL({'format': 'best[height<40]'})
try: try:
ydl.process_ie_result(info_dict) hdl.process_ie_result(info_dict)
except ExtractorError: except ExtractorError:
pass pass
self.assertEqual(ydl.downloaded_info_dicts, []) self.assertEqual(hdl.downloaded_info_dicts, [])
def test_default_format_spec(self): def test_default_format_spec(self):
ydl = YDL({'simulate': True}) hdl = HDL({'simulate': True})
self.assertEqual(ydl._default_format_spec({}), 'bestvideo+bestaudio/best') self.assertEqual(hdl._default_format_spec({}), 'bestvideo+bestaudio/best')
ydl = YDL({}) hdl = HDL({})
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio') self.assertEqual(hdl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
ydl = YDL({'simulate': True}) hdl = HDL({'simulate': True})
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'bestvideo+bestaudio/best') self.assertEqual(hdl._default_format_spec({'is_live': True}), 'bestvideo+bestaudio/best')
ydl = YDL({'outtmpl': '-'}) hdl = HDL({'outtmpl': '-'})
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio') self.assertEqual(hdl._default_format_spec({}), 'best/bestvideo+bestaudio')
ydl = YDL({}) hdl = HDL({})
self.assertEqual(ydl._default_format_spec({}, download=False), 'bestvideo+bestaudio/best') self.assertEqual(hdl._default_format_spec({}, download=False), 'bestvideo+bestaudio/best')
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio') self.assertEqual(hdl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
class TestHaruhiDL(unittest.TestCase): class TestHaruhiDL(unittest.TestCase):
@ -571,9 +571,9 @@ class TestHaruhiDL(unittest.TestCase):
def get_info(params={}): def get_info(params={}):
params.setdefault('simulate', True) params.setdefault('simulate', True)
ydl = YDL(params) hdl = HDL(params)
ydl.report_warning = lambda *args, **kargs: None hdl.report_warning = lambda *args, **kargs: None
return ydl.process_video_result(info_dict, download=False) return hdl.process_video_result(info_dict, download=False)
result = get_info() result = get_info()
self.assertFalse(result.get('requested_subtitles')) self.assertFalse(result.get('requested_subtitles'))
@ -618,7 +618,7 @@ class TestHaruhiDL(unittest.TestCase):
'extractor': 'Bar', 'extractor': 'Bar',
'playlist': 'funny videos', 'playlist': 'funny videos',
} }
YDL.add_extra_info(test_dict, extra_info) HDL.add_extra_info(test_dict, extra_info)
self.assertEqual(test_dict['extractor'], 'Foo') self.assertEqual(test_dict['extractor'], 'Foo')
self.assertEqual(test_dict['playlist'], 'funny videos') self.assertEqual(test_dict['playlist'], 'funny videos')
@ -633,8 +633,8 @@ class TestHaruhiDL(unittest.TestCase):
} }
def fname(templ): def fname(templ):
ydl = HaruhiDL({'outtmpl': templ}) hdl = HaruhiDL({'outtmpl': templ})
return ydl.prepare_filename(info) return hdl.prepare_filename(info)
self.assertEqual(fname('%(id)s.%(ext)s'), '1234.mp4') self.assertEqual(fname('%(id)s.%(ext)s'), '1234.mp4')
self.assertEqual(fname('%(id)s-%(width)s.%(ext)s'), '1234-NA.mp4') self.assertEqual(fname('%(id)s-%(width)s.%(ext)s'), '1234-NA.mp4')
# Replace missing fields with 'NA' # Replace missing fields with 'NA'
@ -658,12 +658,12 @@ class TestHaruhiDL(unittest.TestCase):
self.assertEqual(fname('Hello %(title2)s'), 'Hello %PATH%') self.assertEqual(fname('Hello %(title2)s'), 'Hello %PATH%')
def test_format_note(self): def test_format_note(self):
ydl = HaruhiDL() hdl = HaruhiDL()
self.assertEqual(ydl._format_note({}), '') self.assertEqual(hdl._format_note({}), '')
assertRegexpMatches(self, ydl._format_note({ assertRegexpMatches(self, hdl._format_note({
'vbr': 10, 'vbr': 10,
}), r'^\s*10k$') }), r'^\s*10k$')
assertRegexpMatches(self, ydl._format_note({ assertRegexpMatches(self, hdl._format_note({
'fps': 30, 'fps': 30,
}), r'^30fps$') }), r'^30fps$')
@ -680,9 +680,9 @@ class TestHaruhiDL(unittest.TestCase):
def run_pp(params, PP): def run_pp(params, PP):
with open(filename, 'wt') as f: with open(filename, 'wt') as f:
f.write('EXAMPLE') f.write('EXAMPLE')
ydl = HaruhiDL(params) hdl = HaruhiDL(params)
ydl.add_post_processor(PP()) hdl.add_post_processor(PP())
ydl.post_process(filename, {'filepath': filename}) hdl.post_process(filename, {'filepath': filename})
run_pp({'keepvideo': True}, SimplePP) run_pp({'keepvideo': True}, SimplePP)
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename) self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
@ -706,16 +706,16 @@ class TestHaruhiDL(unittest.TestCase):
os.unlink(filename) os.unlink(filename)
def test_match_filter(self): def test_match_filter(self):
class FilterYDL(YDL): class FilterHDL(HDL):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(FilterYDL, self).__init__(*args, **kwargs) super(FilterHDL, self).__init__(*args, **kwargs)
self.params['simulate'] = True self.params['simulate'] = True
def process_info(self, info_dict): def process_info(self, info_dict):
super(YDL, self).process_info(info_dict) super(HDL, self).process_info(info_dict)
def _match_entry(self, info_dict, incomplete): def _match_entry(self, info_dict, incomplete):
res = super(FilterYDL, self)._match_entry(info_dict, incomplete) res = super(FilterHDL, self)._match_entry(info_dict, incomplete)
if res is None: if res is None:
self.downloaded_info_dicts.append(info_dict) self.downloaded_info_dicts.append(info_dict)
return res return res
@ -745,10 +745,10 @@ class TestHaruhiDL(unittest.TestCase):
videos = [first, second] videos = [first, second]
def get_videos(filter_=None): def get_videos(filter_=None):
ydl = FilterYDL({'match_filter': filter_}) hdl = FilterHDL({'match_filter': filter_})
for v in videos: for v in videos:
ydl.process_ie_result(v, download=True) hdl.process_ie_result(v, download=True)
return [v['id'] for v in ydl.downloaded_info_dicts] return [v['id'] for v in hdl.downloaded_info_dicts]
res = get_videos() res = get_videos()
self.assertEqual(res, ['1', '2']) self.assertEqual(res, ['1', '2'])
@ -817,11 +817,11 @@ class TestHaruhiDL(unittest.TestCase):
} }
def get_downloaded_info_dicts(params): def get_downloaded_info_dicts(params):
ydl = YDL(params) hdl = HDL(params)
# make a deep copy because the dictionary and nested entries # make a deep copy because the dictionary and nested entries
# can be modified # can be modified
ydl.process_ie_result(copy.deepcopy(playlist)) hdl.process_ie_result(copy.deepcopy(playlist))
return ydl.downloaded_info_dicts return hdl.downloaded_info_dicts
def get_ids(params): def get_ids(params):
return [int(v['id']) for v in get_downloaded_info_dicts(params)] return [int(v['id']) for v in get_downloaded_info_dicts(params)]
@ -874,11 +874,11 @@ class TestHaruhiDL(unittest.TestCase):
def test_urlopen_no_file_protocol(self): def test_urlopen_no_file_protocol(self):
# see https://github.com/ytdl-org/haruhi-dl/issues/8227 # see https://github.com/ytdl-org/haruhi-dl/issues/8227
ydl = YDL() hdl = HDL()
self.assertRaises(compat_urllib_error.URLError, ydl.urlopen, 'file:///etc/passwd') self.assertRaises(compat_urllib_error.URLError, hdl.urlopen, 'file:///etc/passwd')
def test_do_not_override_ie_key_in_url_transparent(self): def test_do_not_override_ie_key_in_url_transparent(self):
ydl = YDL() hdl = HDL()
class Foo1IE(InfoExtractor): class Foo1IE(InfoExtractor):
_VALID_URL = r'foo1:' _VALID_URL = r'foo1:'
@ -908,11 +908,11 @@ class TestHaruhiDL(unittest.TestCase):
def _real_extract(self, url): def _real_extract(self, url):
return _make_result([{'url': TEST_URL}], title='foo3 title') return _make_result([{'url': TEST_URL}], title='foo3 title')
ydl.add_info_extractor(Foo1IE(ydl)) hdl.add_info_extractor(Foo1IE(hdl))
ydl.add_info_extractor(Foo2IE(ydl)) hdl.add_info_extractor(Foo2IE(hdl))
ydl.add_info_extractor(Foo3IE(ydl)) hdl.add_info_extractor(Foo3IE(hdl))
ydl.extract_info('foo1:') hdl.extract_info('foo1:')
downloaded = ydl.downloaded_info_dicts[0] downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['url'], TEST_URL) self.assertEqual(downloaded['url'], TEST_URL)
self.assertEqual(downloaded['title'], 'foo1 title') self.assertEqual(downloaded['title'], 'foo1 title')
self.assertEqual(downloaded['id'], 'testid') self.assertEqual(downloaded['id'], 'testid')

View file

@ -9,7 +9,7 @@ import sys
import unittest import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL, expect_dict, expect_value, http_server_port from test.helper import FakeHDL, expect_dict, expect_value, http_server_port
from haruhi_dl.compat import compat_etree_fromstring, compat_http_server from haruhi_dl.compat import compat_etree_fromstring, compat_http_server
from haruhi_dl.extractor.common import InfoExtractor from haruhi_dl.extractor.common import InfoExtractor
from haruhi_dl.extractor import YoutubeIE, get_info_extractor from haruhi_dl.extractor import YoutubeIE, get_info_extractor
@ -41,7 +41,7 @@ class TestIE(InfoExtractor):
class TestInfoExtractor(unittest.TestCase): class TestInfoExtractor(unittest.TestCase):
def setUp(self): def setUp(self):
self.ie = TestIE(FakeYDL()) self.ie = TestIE(FakeHDL())
def test_ie_key(self): def test_ie_key(self):
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE) self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)

View file

@ -22,11 +22,11 @@ def _download_restricted(url, filename, age):
'writeinfojson': True, 'writeinfojson': True,
'outtmpl': '%(id)s.%(ext)s', 'outtmpl': '%(id)s.%(ext)s',
} }
ydl = HaruhiDL(params) hdl = HaruhiDL(params)
ydl.add_default_info_extractors() hdl.add_default_info_extractors()
json_filename = os.path.splitext(filename)[0] + '.info.json' json_filename = os.path.splitext(filename)[0] + '.info.json'
try_rm(json_filename) try_rm(json_filename)
ydl.download([url]) hdl.download([url])
res = os.path.exists(json_filename) res = os.path.exists(json_filename)
try_rm(json_filename) try_rm(json_filename)
return res return res

View file

@ -12,7 +12,7 @@ import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL from test.helper import FakeHDL
from haruhi_dl.cache import Cache from haruhi_dl.cache import Cache
@ -38,10 +38,10 @@ class TestCache(unittest.TestCase):
shutil.rmtree(self.test_dir) shutil.rmtree(self.test_dir)
def test_cache(self): def test_cache(self):
ydl = FakeYDL({ hdl = FakeHDL({
'cachedir': self.test_dir, 'cachedir': self.test_dir,
}) })
c = Cache(ydl) c = Cache(hdl)
obj = {'x': 1, 'y': ['ä', '\\a', True]} obj = {'x': 1, 'y': ['ä', '\\a', True]}
self.assertEqual(c.load('test_cache', 'k.'), None) self.assertEqual(c.load('test_cache', 'k.'), None)
c.store('test_cache', 'k.', obj) c.store('test_cache', 'k.', obj)

View file

@ -123,18 +123,18 @@ def generator(test_case, tname):
params.setdefault('extract_flat', 'in_playlist') params.setdefault('extract_flat', 'in_playlist')
params.setdefault('skip_download', True) params.setdefault('skip_download', True)
ydl = HaruhiDL(params, auto_init=False) hdl = HaruhiDL(params, auto_init=False)
ydl.add_default_info_extractors() hdl.add_default_info_extractors()
finished_hook_called = set() finished_hook_called = set()
def _hook(status): def _hook(status):
if status['status'] == 'finished': if status['status'] == 'finished':
finished_hook_called.add(status['filename']) finished_hook_called.add(status['filename'])
ydl.add_progress_hook(_hook) hdl.add_progress_hook(_hook)
expect_warnings(ydl, test_case.get('expected_warnings', [])) expect_warnings(hdl, test_case.get('expected_warnings', []))
def get_tc_filename(tc): def get_tc_filename(tc):
return ydl.prepare_filename(tc.get('info_dict', {})) return hdl.prepare_filename(tc.get('info_dict', {}))
res_dict = None res_dict = None
@ -154,7 +154,7 @@ def generator(test_case, tname):
# We're not using .download here since that is just a shim # We're not using .download here since that is just a shim
# for outside error handling, and returns the exit code # for outside error handling, and returns the exit code
# instead of the result dict. # instead of the result dict.
res_dict = ydl.extract_info( res_dict = hdl.extract_info(
test_case['url'], test_case['url'],
force_generic_extractor=params.get('force_generic_extractor', False)) force_generic_extractor=params.get('force_generic_extractor', False))
except (DownloadError, ExtractorError) as err: except (DownloadError, ExtractorError) as err:

View file

@ -88,8 +88,8 @@ class TestHttpFD(unittest.TestCase):
def download(self, params, ep): def download(self, params, ep):
params['logger'] = FakeLogger() params['logger'] = FakeLogger()
ydl = HaruhiDL(params) hdl = HaruhiDL(params)
downloader = HttpFD(ydl, params) downloader = HttpFD(hdl, params)
filename = 'testfile.mp4' filename = 'testfile.mp4'
try_rm(encodeFilename(filename)) try_rm(encodeFilename(filename))
self.assertTrue(downloader.real_download(filename, { self.assertTrue(downloader.real_download(filename, {

View file

@ -77,8 +77,8 @@ class TestHTTP(unittest.TestCase):
if sys.version_info[0] == 3: if sys.version_info[0] == 3:
return return
ydl = HaruhiDL({'logger': FakeLogger()}) hdl = HaruhiDL({'logger': FakeLogger()})
r = ydl.extract_info('http://127.0.0.1:%d/302' % self.port) r = hdl.extract_info('http://127.0.0.1:%d/302' % self.port)
self.assertEqual(r['entries'][0]['url'], 'http://127.0.0.1:%d/vid.mp4' % self.port) self.assertEqual(r['entries'][0]['url'], 'http://127.0.0.1:%d/vid.mp4' % self.port)
@ -96,13 +96,13 @@ class TestHTTPS(unittest.TestCase):
def test_nocheckcertificate(self): def test_nocheckcertificate(self):
if sys.version_info >= (2, 7, 9): # No certificate checking anyways if sys.version_info >= (2, 7, 9): # No certificate checking anyways
ydl = HaruhiDL({'logger': FakeLogger()}) hdl = HaruhiDL({'logger': FakeLogger()})
self.assertRaises( self.assertRaises(
Exception, Exception,
ydl.extract_info, 'https://127.0.0.1:%d/video.html' % self.port) hdl.extract_info, 'https://127.0.0.1:%d/video.html' % self.port)
ydl = HaruhiDL({'logger': FakeLogger(), 'nocheckcertificate': True}) hdl = HaruhiDL({'logger': FakeLogger(), 'nocheckcertificate': True})
r = ydl.extract_info('https://127.0.0.1:%d/video.html' % self.port) r = hdl.extract_info('https://127.0.0.1:%d/video.html' % self.port)
self.assertEqual(r['entries'][0]['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port) self.assertEqual(r['entries'][0]['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
@ -139,25 +139,25 @@ class TestProxy(unittest.TestCase):
def test_proxy(self): def test_proxy(self):
geo_proxy = '127.0.0.1:{0}'.format(self.geo_port) geo_proxy = '127.0.0.1:{0}'.format(self.geo_port)
ydl = HaruhiDL({ hdl = HaruhiDL({
'proxy': '127.0.0.1:{0}'.format(self.port), 'proxy': '127.0.0.1:{0}'.format(self.port),
'geo_verification_proxy': geo_proxy, 'geo_verification_proxy': geo_proxy,
}) })
url = 'http://foo.com/bar' url = 'http://foo.com/bar'
response = ydl.urlopen(url).read().decode('utf-8') response = hdl.urlopen(url).read().decode('utf-8')
self.assertEqual(response, 'normal: {0}'.format(url)) self.assertEqual(response, 'normal: {0}'.format(url))
req = compat_urllib_request.Request(url) req = compat_urllib_request.Request(url)
req.add_header('Ytdl-request-proxy', geo_proxy) req.add_header('Ytdl-request-proxy', geo_proxy)
response = ydl.urlopen(req).read().decode('utf-8') response = hdl.urlopen(req).read().decode('utf-8')
self.assertEqual(response, 'geo: {0}'.format(url)) self.assertEqual(response, 'geo: {0}'.format(url))
def test_proxy_with_idn(self): def test_proxy_with_idn(self):
ydl = HaruhiDL({ hdl = HaruhiDL({
'proxy': '127.0.0.1:{0}'.format(self.port), 'proxy': '127.0.0.1:{0}'.format(self.port),
}) })
url = 'http://中文.tw/' url = 'http://中文.tw/'
response = ydl.urlopen(url).read().decode('utf-8') response = hdl.urlopen(url).read().decode('utf-8')
# b'xn--fiq228c' is '中文'.encode('idna') # b'xn--fiq228c' is '中文'.encode('idna')
self.assertEqual(response, 'normal: http://xn--fiq228c.tw/') self.assertEqual(response, 'normal: http://xn--fiq228c.tw/')

View file

@ -8,7 +8,7 @@ import sys
import unittest import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL from test.helper import FakeHDL
from haruhi_dl.extractor import IqiyiIE from haruhi_dl.extractor import IqiyiIE
@ -39,7 +39,7 @@ class TestIqiyiSDKInterpreter(unittest.TestCase):
If `sign` is incorrect, /validate call throws an HTTP 556 error If `sign` is incorrect, /validate call throws an HTTP 556 error
''' '''
logger = WarningLogger() logger = WarningLogger()
ie = IqiyiIEWithCredentials(FakeYDL({'logger': logger})) ie = IqiyiIEWithCredentials(FakeHDL({'logger': logger}))
ie._login() ie._login()
self.assertTrue('unable to log in:' in logger.messages[0]) self.assertTrue('unable to log in:' in logger.messages[0])

View file

@ -12,7 +12,7 @@ import random
import subprocess import subprocess
from test.helper import ( from test.helper import (
FakeYDL, FakeHDL,
get_params, get_params,
) )
from haruhi_dl.compat import ( from haruhi_dl.compat import (
@ -35,44 +35,44 @@ class TestMultipleSocks(unittest.TestCase):
params = self._check_params(['primary_proxy', 'primary_server_ip']) params = self._check_params(['primary_proxy', 'primary_server_ip'])
if params is None: if params is None:
return return
ydl = FakeYDL({ hdl = FakeHDL({
'proxy': params['primary_proxy'] 'proxy': params['primary_proxy']
}) })
self.assertEqual( self.assertEqual(
ydl.urlopen('http://yt-dl.org/ip').read().decode('utf-8'), hdl.urlopen('http://ifconfig.me/ip').read().decode('utf-8'),
params['primary_server_ip']) params['primary_server_ip'])
def test_proxy_https(self): def test_proxy_https(self):
params = self._check_params(['primary_proxy', 'primary_server_ip']) params = self._check_params(['primary_proxy', 'primary_server_ip'])
if params is None: if params is None:
return return
ydl = FakeYDL({ hdl = FakeHDL({
'proxy': params['primary_proxy'] 'proxy': params['primary_proxy']
}) })
self.assertEqual( self.assertEqual(
ydl.urlopen('https://yt-dl.org/ip').read().decode('utf-8'), hdl.urlopen('https://ifconfig.me/ip').read().decode('utf-8'),
params['primary_server_ip']) params['primary_server_ip'])
def test_secondary_proxy_http(self): def test_secondary_proxy_http(self):
params = self._check_params(['secondary_proxy', 'secondary_server_ip']) params = self._check_params(['secondary_proxy', 'secondary_server_ip'])
if params is None: if params is None:
return return
ydl = FakeYDL() hdl = FakeHDL()
req = compat_urllib_request.Request('http://yt-dl.org/ip') req = compat_urllib_request.Request('http://ifconfig.me/ip')
req.add_header('Ytdl-request-proxy', params['secondary_proxy']) req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
self.assertEqual( self.assertEqual(
ydl.urlopen(req).read().decode('utf-8'), hdl.urlopen(req).read().decode('utf-8'),
params['secondary_server_ip']) params['secondary_server_ip'])
def test_secondary_proxy_https(self): def test_secondary_proxy_https(self):
params = self._check_params(['secondary_proxy', 'secondary_server_ip']) params = self._check_params(['secondary_proxy', 'secondary_server_ip'])
if params is None: if params is None:
return return
ydl = FakeYDL() hdl = FakeHDL()
req = compat_urllib_request.Request('https://yt-dl.org/ip') req = compat_urllib_request.Request('https://ifconfig.me/ip')
req.add_header('Ytdl-request-proxy', params['secondary_proxy']) req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
self.assertEqual( self.assertEqual(
ydl.urlopen(req).read().decode('utf-8'), hdl.urlopen(req).read().decode('utf-8'),
params['secondary_server_ip']) params['secondary_server_ip'])
@ -99,10 +99,10 @@ class TestSocks(unittest.TestCase):
if self._SKIP_SOCKS_TEST: if self._SKIP_SOCKS_TEST:
return '127.0.0.1' return '127.0.0.1'
ydl = FakeYDL({ hdl = FakeHDL({
'proxy': '%s://127.0.0.1:%d' % (protocol, self.port), 'proxy': '%s://127.0.0.1:%d' % (protocol, self.port),
}) })
return ydl.urlopen('http://yt-dl.org/ip').read().decode('utf-8') return hdl.urlopen('http://ifconfig.me/ip').read().decode('utf-8')
def test_socks4(self): def test_socks4(self):
self.assertTrue(isinstance(self._get_ip('socks4'), compat_str)) self.assertTrue(isinstance(self._get_ip('socks4'), compat_str))

View file

@ -7,7 +7,7 @@ import sys
import unittest import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL, md5 from test.helper import FakeHDL, md5
from haruhi_dl.extractor import ( from haruhi_dl.extractor import (
@ -35,7 +35,7 @@ class BaseTestSubtitles(unittest.TestCase):
IE = None IE = None
def setUp(self): def setUp(self):
self.DL = FakeYDL() self.DL = FakeHDL()
self.ie = self.IE() self.ie = self.IE()
self.DL.add_info_extractor(self.ie) self.DL.add_info_extractor(self.ie)

View file

@ -1,30 +0,0 @@
#!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import json
from haruhi_dl.update import rsa_verify
class TestUpdate(unittest.TestCase):
def test_rsa_verify(self):
UPDATES_RSA_KEY = (0x9d60ee4d8f805312fdb15a62f87b95bd66177b91df176765d13514a0f1754bcd2057295c5b6f1d35daa6742c3ffc9a82d3e118861c207995a8031e151d863c9927e304576bc80692bc8e094896fcf11b66f3e29e04e3a71e9a11558558acea1840aec37fc396fb6b65dc81a1c4144e03bd1c011de62e3f1357b327d08426fe93, 65537)
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'versions.json'), 'rb') as f:
versions_info = f.read().decode()
versions_info = json.loads(versions_info)
signature = versions_info['signature']
del versions_info['signature']
self.assertTrue(rsa_verify(
json.dumps(versions_info, sort_keys=True).encode('utf-8'),
signature, UPDATES_RSA_KEY))
#if __name__ == '__main__':
# unittest.main()

View file

@ -46,9 +46,9 @@ class TestAnnotations(unittest.TestCase):
def test_info_json(self): def test_info_json(self):
expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text. expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text.
ie = haruhi_dl.extractor.YoutubeIE() ie = haruhi_dl.extractor.YoutubeIE()
ydl = HaruhiDL(params) hdl = HaruhiDL(params)
ydl.add_info_extractor(ie) hdl.add_info_extractor(ie)
ydl.download([TEST_ID]) hdl.download([TEST_ID])
self.assertTrue(os.path.exists(ANNOTATIONS_FILE)) self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
annoxml = None annoxml = None
with io.open(ANNOTATIONS_FILE, 'r', encoding='utf-8') as annof: with io.open(ANNOTATIONS_FILE, 'r', encoding='utf-8') as annof:

View file

@ -7,7 +7,7 @@ import sys
import unittest import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL from test.helper import FakeHDL
from haruhi_dl.extractor import ( from haruhi_dl.extractor import (
@ -22,7 +22,7 @@ class TestYoutubeLists(unittest.TestCase):
self.assertEqual(info['_type'], 'playlist') self.assertEqual(info['_type'], 'playlist')
def test_youtube_playlist_noplaylist(self): def test_youtube_playlist_noplaylist(self):
dl = FakeYDL() dl = FakeHDL()
dl.params['noplaylist'] = True dl.params['noplaylist'] = True
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/watch?v=FXxLjLQi3Fg&list=PLwiyx1dc3P2JR9N8gQaQN_BCvlSlap7re') result = ie.extract('https://www.youtube.com/watch?v=FXxLjLQi3Fg&list=PLwiyx1dc3P2JR9N8gQaQN_BCvlSlap7re')
@ -30,7 +30,7 @@ class TestYoutubeLists(unittest.TestCase):
self.assertEqual(YoutubeIE().extract_id(result['url']), 'FXxLjLQi3Fg') self.assertEqual(YoutubeIE().extract_id(result['url']), 'FXxLjLQi3Fg')
def test_youtube_course(self): def test_youtube_course(self):
dl = FakeYDL() dl = FakeHDL()
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
# TODO find a > 100 (paginating?) videos course # TODO find a > 100 (paginating?) videos course
result = ie.extract('https://www.youtube.com/course?list=ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8') result = ie.extract('https://www.youtube.com/course?list=ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8')
@ -40,7 +40,7 @@ class TestYoutubeLists(unittest.TestCase):
self.assertEqual(YoutubeIE().extract_id(entries[-1]['url']), 'rYefUsYuEp0') self.assertEqual(YoutubeIE().extract_id(entries[-1]['url']), 'rYefUsYuEp0')
def test_youtube_mix(self): def test_youtube_mix(self):
dl = FakeYDL() dl = FakeHDL()
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/watch?v=W01L70IGBgE&index=2&list=RDOQpdSVF_k_w') result = ie.extract('https://www.youtube.com/watch?v=W01L70IGBgE&index=2&list=RDOQpdSVF_k_w')
entries = result['entries'] entries = result['entries']
@ -51,14 +51,14 @@ class TestYoutubeLists(unittest.TestCase):
def test_youtube_toptracks(self): def test_youtube_toptracks(self):
print('Skipping: The playlist page gives error 500') print('Skipping: The playlist page gives error 500')
return return
dl = FakeYDL() dl = FakeHDL()
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/playlist?list=MCUS') result = ie.extract('https://www.youtube.com/playlist?list=MCUS')
entries = result['entries'] entries = result['entries']
self.assertEqual(len(entries), 100) self.assertEqual(len(entries), 100)
def test_youtube_flat_playlist_titles(self): def test_youtube_flat_playlist_titles(self):
dl = FakeYDL() dl = FakeHDL()
dl.params['extract_flat'] = True dl.params['extract_flat'] = True
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/playlist?list=PL-KKIb8rvtMSrAO9YFbeM6UQrAqoFTUWv') result = ie.extract('https://www.youtube.com/playlist?list=PL-KKIb8rvtMSrAO9YFbeM6UQrAqoFTUWv')

View file

@ -12,7 +12,7 @@ import io
import re import re
import string import string
from test.helper import FakeYDL from test.helper import FakeHDL
from haruhi_dl.extractor import YoutubeIE from haruhi_dl.extractor import YoutubeIE
from haruhi_dl.compat import compat_str, compat_urlretrieve from haruhi_dl.compat import compat_str, compat_urlretrieve
@ -116,8 +116,8 @@ def make_tfunc(url, stype, sig_input, expected_sig):
if not os.path.exists(fn): if not os.path.exists(fn):
compat_urlretrieve(url, fn) compat_urlretrieve(url, fn)
ydl = FakeYDL() hdl = FakeHDL()
ie = YoutubeIE(ydl) ie = YoutubeIE(hdl)
if stype == 'js': if stype == 'js':
with io.open(fn, encoding='utf-8') as testf: with io.open(fn, encoding='utf-8') as testf:
jscode = testf.read() jscode = testf.read()