version 2020.11.01

merge-requests/5/head v2020.11.01
Dominika 2020-11-01 05:17:15 +01:00
parent 389d4402eb
commit 1c397d1dd3
52 changed files with 334 additions and 863 deletions

View File

@ -1,43 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import json
import sys
import hashlib
import os.path
if len(sys.argv) <= 1:
print('Specify the version number as parameter')
sys.exit()
version = sys.argv[1]
with open('update/LATEST_VERSION', 'w') as f:
f.write(version)
versions_info = json.load(open('update/versions.json'))
if 'signature' in versions_info:
del versions_info['signature']
new_version = {}
filenames = {
'bin': 'haruhi-dl',
'exe': 'haruhi-dl.exe',
'tar': 'haruhi-dl-%s.tar.gz' % version}
build_dir = os.path.join('..', '..', 'build', version)
for key, filename in filenames.items():
url = 'https://yt-dl.org/downloads/%s/%s' % (version, filename)
fn = os.path.join(build_dir, filename)
with open(fn, 'rb') as f:
data = f.read()
if not data:
raise ValueError('File %s is empty!' % fn)
sha256sum = hashlib.sha256(data).hexdigest()
new_version[key] = (url, sha256sum)
versions_info['versions'][version] = new_version
versions_info['latest'] = version
with open('update/versions.json', 'w') as jsonf:
json.dump(versions_info, jsonf, indent=4, sort_keys=True)

View File

@ -1,22 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import json
versions_info = json.load(open('update/versions.json'))
version = versions_info['latest']
version_dict = versions_info['versions'][version]
# Read template page
with open('download.html.in', 'r', encoding='utf-8') as tmplf:
template = tmplf.read()
template = template.replace('@PROGRAM_VERSION@', version)
template = template.replace('@PROGRAM_URL@', version_dict['bin'][0])
template = template.replace('@PROGRAM_SHA256SUM@', version_dict['bin'][1])
template = template.replace('@EXE_URL@', version_dict['exe'][0])
template = template.replace('@EXE_SHA256SUM@', version_dict['exe'][1])
template = template.replace('@TAR_URL@', version_dict['tar'][0])
template = template.replace('@TAR_SHA256SUM@', version_dict['tar'][1])
with open('download.html', 'w', encoding='utf-8') as dlf:
dlf.write(template)

View File

@ -1,34 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals, with_statement
import rsa
import json
from binascii import hexlify
try:
input = raw_input
except NameError:
pass
versions_info = json.load(open('update/versions.json'))
if 'signature' in versions_info:
del versions_info['signature']
print('Enter the PKCS1 private key, followed by a blank line:')
privkey = b''
while True:
try:
line = input()
except EOFError:
break
if line == '':
break
privkey += line.encode('ascii') + b'\n'
privkey = rsa.PrivateKey.load_pkcs1(privkey)
signature = hexlify(rsa.pkcs1.sign(json.dumps(versions_info, sort_keys=True).encode('utf-8'), privkey, 'SHA-256')).decode()
print('signature: ' + signature)
versions_info['signature'] = signature
with open('update/versions.json', 'w') as versionsf:
json.dump(versions_info, versionsf, indent=4, sort_keys=True)

View File

@ -1,21 +0,0 @@
#!/usr/bin/env python
# coding: utf-8
from __future__ import with_statement, unicode_literals
import datetime
import glob
import io # For Python 2 compatibility
import os
import re
year = str(datetime.datetime.now().year)
for fn in glob.glob('*.html*'):
with io.open(fn, encoding='utf-8') as f:
content = f.read()
newc = re.sub(r'(?P<copyright>Copyright © 2011-)(?P<year>[0-9]{4})', 'Copyright © 2011-' + year, content)
if content != newc:
tmpFn = fn + '.part'
with io.open(tmpFn, 'wt', encoding='utf-8') as outf:
outf.write(newc)
os.rename(tmpFn, fn)

View File

@ -1,76 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import datetime
import io
import json
import textwrap
atom_template = textwrap.dedent("""\
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
<link rel="self" href="http://ytdl-org.github.io/haruhi-dl/update/releases.atom" />
<title>haruhi-dl releases</title>
<id>https://yt-dl.org/feed/haruhi-dl-updates-feed</id>
<updated>@TIMESTAMP@</updated>
@ENTRIES@
</feed>""")
entry_template = textwrap.dedent("""
<entry>
<id>https://yt-dl.org/feed/haruhi-dl-updates-feed/haruhi-dl-@VERSION@</id>
<title>New version @VERSION@</title>
<link href="http://ytdl-org.github.io/haruhi-dl" />
<content type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">
Downloads available at <a href="https://yt-dl.org/downloads/@VERSION@/">https://yt-dl.org/downloads/@VERSION@/</a>
</div>
</content>
<author>
<name>The haruhi-dl maintainers</name>
</author>
<updated>@TIMESTAMP@</updated>
</entry>
""")
now = datetime.datetime.now()
now_iso = now.isoformat() + 'Z'
atom_template = atom_template.replace('@TIMESTAMP@', now_iso)
versions_info = json.load(open('update/versions.json'))
versions = list(versions_info['versions'].keys())
versions.sort()
entries = []
for v in versions:
fields = v.split('.')
year, month, day = map(int, fields[:3])
faked = 0
patchlevel = 0
while True:
try:
datetime.date(year, month, day)
except ValueError:
day -= 1
faked += 1
assert day > 0
continue
break
if len(fields) >= 4:
try:
patchlevel = int(fields[3])
except ValueError:
patchlevel = 1
timestamp = '%04d-%02d-%02dT00:%02d:%02dZ' % (year, month, day, faked, patchlevel)
entry = entry_template.replace('@TIMESTAMP@', timestamp)
entry = entry.replace('@VERSION@', v)
entries.append(entry)
entries_str = textwrap.indent(''.join(entries), '\t')
atom_template = atom_template.replace('@ENTRIES@', entries_str)
with io.open('update/releases.atom', 'w', encoding='utf-8') as atom_file:
atom_file.write(atom_template)

View File

@ -1,37 +0,0 @@
#!/usr/bin/env python3
from __future__ import unicode_literals
import sys
import os
import textwrap
# We must be able to import haruhi_dl
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import haruhi_dl
def main():
with open('supportedsites.html.in', 'r', encoding='utf-8') as tmplf:
template = tmplf.read()
ie_htmls = []
for ie in haruhi_dl.list_extractors(age_limit=None):
ie_html = '<b>{}</b>'.format(ie.IE_NAME)
ie_desc = getattr(ie, 'IE_DESC', None)
if ie_desc is False:
continue
elif ie_desc is not None:
ie_html += ': {}'.format(ie.IE_DESC)
if not ie.working():
ie_html += ' (Currently broken)'
ie_htmls.append('<li>{}</li>'.format(ie_html))
template = template.replace('@SITES@', textwrap.indent('\n'.join(ie_htmls), '\t'))
with open('supportedsites.html', 'w', encoding='utf-8') as sitesf:
sitesf.write(template)
if __name__ == '__main__':
main()

View File

@ -112,7 +112,7 @@ for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$versi
ROOT=$(pwd)
python devscripts/create-github-release.py ChangeLog $version "$ROOT/build/$version"
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
#ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
/bin/echo -e "\n### Now switching to gh-pages..."
git clone --branch gh-pages --single-branch . build/gh-pages

View File

@ -6,8 +6,8 @@ When using the ``haruhi_dl`` module, you start by creating an instance of :class
.. code-block:: python
>>> from haruhi_dl import HaruhiDL
>>> ydl = HaruhiDL()
>>> ydl.add_default_info_extractors()
>>> hdl = HaruhiDL()
>>> hdl.add_default_info_extractors()
Extracting video information
----------------------------
@ -16,7 +16,7 @@ You use the :meth:`HaruhiDL.extract_info` method for getting the video informati
.. code-block:: python
>>> info = ydl.extract_info('http://www.youtube.com/watch?v=BaW_jenozKc', download=False)
>>> info = hdl.extract_info('http://www.youtube.com/watch?v=BaW_jenozKc', download=False)
[youtube] Setting language
[youtube] BaW_jenozKc: Downloading webpage
[youtube] BaW_jenozKc: Downloading video info webpage
@ -40,7 +40,7 @@ The playlist information is extracted in a similar way, but the dictionary is a
.. code-block:: python
>>> playlist = ydl.extract_info('http://www.ted.com/playlists/13/open_source_open_world', download=False)
>>> playlist = hdl.extract_info('http://www.ted.com/playlists/13/open_source_open_world', download=False)
[TED] open_source_open_world: Downloading playlist webpage
...
>>> playlist['title']

View File

@ -2303,7 +2303,7 @@ class HaruhiDL(object):
self._write_string('[debug] Proxy map: ' + compat_str(proxy_map) + '\n')
if self.params.get('call_home', False):
ipaddr = self.urlopen('https://yt-dl.org/ip').read().decode('utf-8')
ipaddr = self.urlopen('https://ifconfig.me/ip').read().decode('utf-8')
self._write_string('[debug] Public IP address: %s\n' % ipaddr)
latest_version = self.urlopen(
'https://yt-dl.org/latest/version').read().decode('utf-8')
@ -2343,7 +2343,7 @@ class HaruhiDL(object):
debuglevel = 1 if self.params.get('debug_printtraffic') else 0
https_handler = make_HTTPS_handler(self.params, debuglevel=debuglevel)
ydlh = HaruhiDLHandler(self.params, debuglevel=debuglevel)
hdlh = HaruhiDLHandler(self.params, debuglevel=debuglevel)
redirect_handler = HaruhiDLRedirectHandler()
data_handler = compat_urllib_request_DataHandler()
@ -2358,7 +2358,7 @@ class HaruhiDL(object):
file_handler.file_open = file_open
opener = compat_urllib_request.build_opener(
proxy_handler, https_handler, cookie_processor, ydlh, redirect_handler, data_handler, file_handler)
proxy_handler, https_handler, cookie_processor, hdlh, redirect_handler, data_handler, file_handler)
# Delete the default user-agent header, which would otherwise apply in
# cases where our custom HTTP handler doesn't come into play

View File

@ -314,7 +314,7 @@ def _real_main(argv=None):
None if opts.match_filter is None
else match_filter_func(opts.match_filter))
ydl_opts = {
hdl_opts = {
'usenetrc': opts.usenetrc,
'username': opts.username,
'password': opts.password,
@ -438,32 +438,32 @@ def _real_main(argv=None):
'usetitle': opts.usetitle if opts.usetitle is True else None,
}
with HaruhiDL(ydl_opts) as ydl:
with HaruhiDL(hdl_opts) as hdl:
# Update version
if opts.update_self:
update_self(ydl.to_screen, opts.verbose, ydl._opener)
update_self(hdl.to_screen, opts.verbose, hdl._opener)
# Remove cache dir
if opts.rm_cachedir:
ydl.cache.remove()
hdl.cache.remove()
# Maybe do nothing
if (len(all_urls) < 1) and (opts.load_info_filename is None):
if opts.update_self or opts.rm_cachedir:
sys.exit()
ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv)
hdl.warn_if_short_id(sys.argv[1:] if argv is None else argv)
parser.error(
'You must provide at least one URL.\n'
'Type haruhi-dl --help to see a list of all options.')
try:
if opts.load_info_filename is not None:
retcode = ydl.download_with_info_file(expand_path(opts.load_info_filename))
retcode = hdl.download_with_info_file(expand_path(opts.load_info_filename))
else:
retcode = ydl.download(all_urls)
retcode = hdl.download(all_urls)
except MaxDownloadsReached:
ydl.to_screen('--max-download limit reached, aborting.')
hdl.to_screen('--max-download limit reached, aborting.')
retcode = 101
sys.exit(retcode)

View File

@ -16,11 +16,11 @@ from .utils import (
class Cache(object):
def __init__(self, ydl):
self._ydl = ydl
def __init__(self, hdl):
self._hdl = hdl
def _get_root_dir(self):
res = self._ydl.params.get('cachedir')
res = self._hdl.params.get('cachedir')
if res is None:
cache_root = compat_getenv('XDG_CACHE_HOME', '~/.cache')
res = os.path.join(cache_root, 'haruhi-dl')
@ -35,7 +35,7 @@ class Cache(object):
@property
def enabled(self):
return self._ydl.params.get('cachedir') is not False
return self._hdl.params.get('cachedir') is not False
def store(self, section, key, data, dtype='json'):
assert dtype in ('json',)
@ -53,7 +53,7 @@ class Cache(object):
write_json_file(data, fn)
except Exception:
tb = traceback.format_exc()
self._ydl.report_warning(
self._hdl.report_warning(
'Writing cache to %r failed: %s' % (fn, tb))
def load(self, section, key, dtype='json', default=None):
@ -72,7 +72,7 @@ class Cache(object):
file_size = os.path.getsize(cache_fn)
except (OSError, IOError) as oe:
file_size = str(oe)
self._ydl.report_warning(
self._hdl.report_warning(
'Cache retrieval from %s failed (%s)' % (cache_fn, file_size))
except IOError:
pass # No cache available
@ -81,16 +81,16 @@ class Cache(object):
def remove(self):
if not self.enabled:
self._ydl.to_screen('Cache is disabled (Did you combine --no-cache-dir and --rm-cache-dir?)')
self._hdl.to_screen('Cache is disabled (Did you combine --no-cache-dir and --rm-cache-dir?)')
return
cachedir = self._get_root_dir()
if not any((term in cachedir) for term in ('cache', 'tmp')):
raise Exception('Not removing directory %s - this does not look like a cache dir' % cachedir)
self._ydl.to_screen(
self._hdl.to_screen(
'Removing cache dir %s .' % cachedir, skip_eol=True)
if os.path.exists(cachedir):
self._ydl.to_screen('.', skip_eol=True)
self._hdl.to_screen('.', skip_eol=True)
shutil.rmtree(cachedir)
self._ydl.to_screen('.')
self._hdl.to_screen('.')

View File

@ -58,9 +58,9 @@ class FileDownloader(object):
_TEST_FILE_SIZE = 10241
params = None
def __init__(self, ydl, params):
def __init__(self, hdl, params):
"""Create a FileDownloader object with the given options."""
self.ydl = ydl
self.hdl = hdl
self._progress_hooks = []
self.params = params
self.add_progress_hook(self.report_progress)
@ -147,22 +147,22 @@ class FileDownloader(object):
return int(round(number * multiplier))
def to_screen(self, *args, **kargs):
self.ydl.to_screen(*args, **kargs)
self.hdl.to_screen(*args, **kargs)
def to_stderr(self, message):
self.ydl.to_screen(message)
self.hdl.to_screen(message)
def to_console_title(self, message):
self.ydl.to_console_title(message)
self.hdl.to_console_title(message)
def trouble(self, *args, **kargs):
self.ydl.trouble(*args, **kargs)
self.hdl.trouble(*args, **kargs)
def report_warning(self, *args, **kargs):
self.ydl.report_warning(*args, **kargs)
self.hdl.report_warning(*args, **kargs)
def report_error(self, *args, **kargs):
self.ydl.report_error(*args, **kargs)
self.hdl.report_error(*args, **kargs)
def slow_down(self, start_time, now, byte_counter):
"""Sleep if the download speed is over the rate limit."""

View File

@ -279,7 +279,7 @@ class F4mFD(FragmentFD):
return media
def _get_bootstrap_from_url(self, bootstrap_url):
bootstrap = self.ydl.urlopen(bootstrap_url).read()
bootstrap = self.hdl.urlopen(bootstrap_url).read()
return read_bootstrap_info(bootstrap)
def _update_live_fragments(self, bootstrap_url, latest_fragment):
@ -321,7 +321,7 @@ class F4mFD(FragmentFD):
requested_bitrate = info_dict.get('tbr')
self.to_screen('[%s] Downloading f4m manifest' % self.FD_NAME)
urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url))
urlh = self.hdl.urlopen(self._prepare_url(info_dict, man_url))
man_url = urlh.geturl()
# Some manifests may be malformed, e.g. prosiebensat1 generated manifests
# (see https://github.com/ytdl-org/haruhi-dl/issues/6215#issuecomment-121704244

View File

@ -134,7 +134,7 @@ class FragmentFD(FileDownloader):
'[%s] Total fragments: %s' % (self.FD_NAME, total_frags_str))
self.report_destination(ctx['filename'])
dl = HttpQuietDownloader(
self.ydl,
self.hdl,
{
'continuedl': True,
'quiet': True,

View File

@ -59,7 +59,7 @@ class HlsFD(FragmentFD):
man_url = info_dict['url']
self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME)
urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url))
urlh = self.hdl.urlopen(self._prepare_url(info_dict, man_url))
man_url = urlh.geturl()
s = urlh.read().decode('utf-8', 'ignore')
@ -70,7 +70,7 @@ class HlsFD(FragmentFD):
self.report_warning(
'hlsnative has detected features it does not support, '
'extraction will be delegated to ffmpeg')
fd = FFmpegFD(self.ydl, self.params)
fd = FFmpegFD(self.hdl, self.params)
for ph in self._progress_hooks:
fd.add_progress_hook(ph)
return fd.real_download(filename, info_dict)
@ -168,7 +168,7 @@ class HlsFD(FragmentFD):
return False
if decrypt_info['METHOD'] == 'AES-128':
iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', media_sequence)
decrypt_info['KEY'] = decrypt_info.get('KEY') or self.ydl.urlopen(
decrypt_info['KEY'] = decrypt_info.get('KEY') or self.hdl.urlopen(
self._prepare_url(info_dict, info_dict.get('_decryption_key_url') or decrypt_info['URI'])).read()
frag_content = AES.new(
decrypt_info['KEY'], AES.MODE_CBC, iv).decrypt(frag_content)

View File

@ -107,7 +107,7 @@ class HttpFD(FileDownloader):
# Establish connection
try:
try:
ctx.data = self.ydl.urlopen(request)
ctx.data = self.hdl.urlopen(request)
except (compat_urllib_error.URLError, ) as err:
if isinstance(err.reason, socket.timeout):
raise RetryDownload(err)
@ -149,7 +149,7 @@ class HttpFD(FileDownloader):
# Unable to resume (requested range not satisfiable)
try:
# Open the connection again without the range header
ctx.data = self.ydl.urlopen(
ctx.data = self.hdl.urlopen(
sanitized_Request(url, None, headers))
content_length = ctx.data.info()['Content-Length']
except (compat_urllib_error.HTTPError, ) as err:

View File

@ -938,9 +938,9 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
},
{
# Multifeed video with comma in title (see https://github.com/ytdl-org/haruhi-dl/issues/8536)
'url': 'https://www.youtube.com/watch?v=gVfLd0zydlo',
'url': 'https://www.youtube.com/watch?v=gVfLd0zhdlo',
'info_dict': {
'id': 'gVfLd0zydlo',
'id': 'gVfLd0zhdlo',
'title': 'DevConf.cz 2016 Day 2 Workshops 1 14:00 - 15:30',
},
'playlist_count': 2,
@ -2629,7 +2629,7 @@ class YoutubePlaylistIE(YoutubePlaylistBaseInfoExtractor):
}, {
'url': 'PLtPgu7CB4gbY9oDN3drwC3cMbJggS7dKl',
'info_dict': {
'title': 'YDL_safe_search',
'title': 'HDL_safe_search',
'id': 'PLtPgu7CB4gbY9oDN3drwC3cMbJggS7dKl',
},
'playlist_count': 2,

View File

@ -2470,14 +2470,14 @@ class XAttrUnavailableError(HaruhiDLError):
pass
def _create_http_connection(ydl_handler, http_class, is_https, *args, **kwargs):
def _create_http_connection(hdl_handler, http_class, is_https, *args, **kwargs):
# Working around python 2 bug (see http://bugs.python.org/issue17849) by limiting
# expected HTTP responses to meet HTTP/1.0 or later (see also
# https://github.com/ytdl-org/haruhi-dl/issues/6727)
if sys.version_info < (3, 0):
kwargs['strict'] = True
hc = http_class(*args, **compat_kwargs(kwargs))
source_address = ydl_handler._params.get('source_address')
source_address = hdl_handler._params.get('source_address')
if source_address is not None:
# This is to workaround _create_connection() from socket where it will try all

View File

@ -1,3 +1,3 @@
from __future__ import unicode_literals
__version__ = '2020.09.20'
__version__ = '2020.11.01'

View File

@ -61,12 +61,12 @@ def report_warning(message):
sys.stderr.write(output)
class FakeYDL(HaruhiDL):
class FakeHDL(HaruhiDL):
def __init__(self, override=None):
# Different instances of the downloader can't share the same dictionary
# some test set the "sublang" parameter, which would break the md5 checks.
params = get_params(override=override)
super(FakeYDL, self).__init__(params, auto_init=False)
super(FakeHDL, self).__init__(params, auto_init=False)
self.result = []
def to_screen(self, s, skip_eol=None):
@ -263,14 +263,14 @@ def assertEqual(self, got, expected, msg=None):
self.assertTrue(got == expected, msg)
def expect_warnings(ydl, warnings_re):
real_warning = ydl.report_warning
def expect_warnings(hdl, warnings_re):
real_warning = hdl.report_warning
def _report_warning(w):
if not any(re.search(w_re, w) for w_re in warnings_re):
real_warning(w)
ydl.report_warning = _report_warning
hdl.report_warning = _report_warning
def http_server_port(httpd):

View File

@ -1 +0,0 @@
*.swf

View File

@ -1,19 +0,0 @@
// input: [["a", "b", "c", "d"]]
// output: ["c", "b", "a", "d"]
package {
public class ArrayAccess {
public static function main(ar:Array):Array {
var aa:ArrayAccess = new ArrayAccess();
return aa.f(ar, 2);
}
private function f(ar:Array, num:Number):Array{
var x:String = ar[0];
var y:String = ar[num % ar.length];
ar[0] = y;
ar[num] = x;
return ar;
}
}
}

View File

@ -1,17 +0,0 @@
// input: []
// output: 121
package {
public class ClassCall {
public static function main():int{
var f:OtherClass = new OtherClass();
return f.func(100,20);
}
}
}
class OtherClass {
public function func(x: int, y: int):int {
return x+y+1;
}
}

View File

@ -1,15 +0,0 @@
// input: []
// output: 0
package {
public class ClassConstruction {
public static function main():int{
var f:Foo = new Foo();
return 0;
}
}
}
class Foo {
}

View File

@ -1,18 +0,0 @@
// input: []
// output: 4
package {
public class ConstArrayAccess {
private static const x:int = 2;
private static const ar:Array = ["42", "3411"];
public static function main():int{
var c:ConstArrayAccess = new ConstArrayAccess();
return c.f();
}
public function f(): int {
return ar[1].length;
}
}
}

View File

@ -1,12 +0,0 @@
// input: []
// output: 2
package {
public class ConstantInt {
private static const x:int = 2;
public static function main():int{
return x;
}
}
}

View File

@ -1,10 +0,0 @@
// input: [{"x": 1, "y": 2}]
// output: 3
package {
public class DictCall {
public static function main(d:Object):int{
return d.x + d.y;
}
}
}

View File

@ -1,10 +0,0 @@
// input: []
// output: false
package {
public class EqualsOperator {
public static function main():Boolean{
return 1 == 2;
}
}
}

View File

@ -1,13 +0,0 @@
// input: [1, 2]
// output: 3
package {
public class LocalVars {
public static function main(a:int, b:int):int{
var c:int = a + b + b;
var d:int = c - b;
var e:int = d;
return e;
}
}
}

View File

@ -1,22 +0,0 @@
// input: [1]
// output: 2
package {
public class MemberAssignment {
public var v:int;
public function g():int {
return this.v;
}
public function f(a:int):int{
this.v = a;
return this.v + this.g();
}
public static function main(a:int): int {
var v:MemberAssignment = new MemberAssignment();
return v.f(a);
}
}
}

View File

@ -1,24 +0,0 @@
// input: []
// output: 123
package {
public class NeOperator {
public static function main(): int {
var res:int = 0;
if (1 != 2) {
res += 3;
} else {
res += 4;
}
if (2 != 2) {
res += 10;
} else {
res += 20;
}
if (9 == 9) {
res += 100;
}
return res;
}
}
}

View File

@ -1,21 +0,0 @@
// input: []
// output: 9
package {
public class PrivateCall {
public static function main():int{
var f:OtherClass = new OtherClass();
return f.func();
}
}
}
class OtherClass {
private function pf():int {
return 9;
}
public function func():int {
return this.pf();
}
}

View File

@ -1,22 +0,0 @@
// input: []
// output: 9
package {
public class PrivateVoidCall {
public static function main():int{
var f:OtherClass = new OtherClass();
f.func();
return 9;
}
}
}
class OtherClass {
private function pf():void {
;
}
public function func():void {
this.pf();
}
}

View File

@ -1,13 +0,0 @@
// input: [1]
// output: 1
package {
public class StaticAssignment {
public static var v:int;
public static function main(a:int):int{
v = a;
return v;
}
}
}

View File

@ -1,16 +0,0 @@
// input: []
// output: 1
package {
public class StaticRetrieval {
public static var v:int;
public static function main():int{
if (v) {
return 0;
} else {
return 1;
}
}
}
}

View File

@ -1,11 +0,0 @@
// input: []
// output: 3
package {
public class StringBasics {
public static function main():int{
var s:String = "abc";
return s.length;
}
}
}

View File

@ -1,11 +0,0 @@
// input: []
// output: 9897
package {
public class StringCharCodeAt {
public static function main():int{
var s:String = "abc";
return s.charCodeAt(1) * 100 + s.charCodeAt();
}
}
}

View File

@ -1,11 +0,0 @@
// input: []
// output: 2
package {
public class StringConversion {
public static function main():int{
var s:String = String(99);
return s.length;
}
}
}

View File

@ -11,7 +11,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import copy
from test.helper import FakeYDL, assertRegexpMatches
from test.helper import FakeHDL, assertRegexpMatches
from haruhi_dl import HaruhiDL
from haruhi_dl.compat import compat_str, compat_urllib_error
from haruhi_dl.extractor import YoutubeIE
@ -22,9 +22,9 @@ from haruhi_dl.utils import ExtractorError, match_filter_func
TEST_URL = 'http://localhost/sample.mp4'
class YDL(FakeYDL):
class HDL(FakeHDL):
def __init__(self, *args, **kwargs):
super(YDL, self).__init__(*args, **kwargs)
super(HDL, self).__init__(*args, **kwargs)
self.downloaded_info_dicts = []
self.msgs = []
@ -50,59 +50,59 @@ def _make_result(formats, **kwargs):
class TestFormatSelection(unittest.TestCase):
def test_prefer_free_formats(self):
# Same resolution => download webm
ydl = YDL()
ydl.params['prefer_free_formats'] = True
hdl = HDL()
hdl.params['prefer_free_formats'] = True
formats = [
{'ext': 'webm', 'height': 460, 'url': TEST_URL},
{'ext': 'mp4', 'height': 460, 'url': TEST_URL},
]
info_dict = _make_result(formats)
yie = YoutubeIE(ydl)
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'webm')
# Different resolution => download best quality (mp4)
ydl = YDL()
ydl.params['prefer_free_formats'] = True
hdl = HDL()
hdl.params['prefer_free_formats'] = True
formats = [
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
{'ext': 'mp4', 'height': 1080, 'url': TEST_URL},
]
info_dict['formats'] = formats
yie = YoutubeIE(ydl)
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'mp4')
# No prefer_free_formats => prefer mp4 and flv for greater compatibility
ydl = YDL()
ydl.params['prefer_free_formats'] = False
hdl = HDL()
hdl.params['prefer_free_formats'] = False
formats = [
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
{'ext': 'mp4', 'height': 720, 'url': TEST_URL},
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
]
info_dict['formats'] = formats
yie = YoutubeIE(ydl)
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'mp4')
ydl = YDL()
ydl.params['prefer_free_formats'] = False
hdl = HDL()
hdl.params['prefer_free_formats'] = False
formats = [
{'ext': 'flv', 'height': 720, 'url': TEST_URL},
{'ext': 'webm', 'height': 720, 'url': TEST_URL},
]
info_dict['formats'] = formats
yie = YoutubeIE(ydl)
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['ext'], 'flv')
def test_format_selection(self):
@ -115,34 +115,34 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': '20/47'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '20/47'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '47')
ydl = YDL({'format': '20/71/worst'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '20/71/worst'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '35')
ydl = YDL()
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL()
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '2')
ydl = YDL({'format': 'webm/mp4'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'webm/mp4'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '47')
ydl = YDL({'format': '3gp/40/mp4'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '3gp/40/mp4'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '35')
ydl = YDL({'format': 'example-with-dashes'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'example-with-dashes'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'example-with-dashes')
def test_format_selection_audio(self):
@ -154,14 +154,14 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestaudio'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'bestaudio'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'audio-high')
ydl = YDL({'format': 'worstaudio'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'worstaudio'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'audio-low')
formats = [
@ -170,9 +170,9 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestaudio/worstaudio/best'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'bestaudio/worstaudio/best'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'vid-high')
def test_format_selection_audio_exts(self):
@ -185,25 +185,25 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'best'})
ie = YoutubeIE(ydl)
hdl = HDL({'format': 'best'})
ie = YoutubeIE(hdl)
ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(copy.deepcopy(info_dict))
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'aac-64')
ydl = YDL({'format': 'mp3'})
ie = YoutubeIE(ydl)
hdl = HDL({'format': 'mp3'})
ie = YoutubeIE(hdl)
ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(copy.deepcopy(info_dict))
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'mp3-64')
ydl = YDL({'prefer_free_formats': True})
ie = YoutubeIE(ydl)
hdl = HDL({'prefer_free_formats': True})
ie = YoutubeIE(hdl)
ie._sort_formats(info_dict['formats'])
ydl.process_ie_result(copy.deepcopy(info_dict))
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(copy.deepcopy(info_dict))
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'ogg-64')
def test_format_selection_video(self):
@ -214,19 +214,19 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestvideo'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'bestvideo'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-high')
ydl = YDL({'format': 'worstvideo'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'worstvideo'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-low')
ydl = YDL({'format': 'bestvideo[format_id^=dash][format_id$=low]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'bestvideo[format_id^=dash][format_id$=low]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'dash-video-low')
formats = [
@ -234,9 +234,9 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'bestvideo[vcodec=avc1.123456]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'bestvideo[vcodec=avc1.123456]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'vid-vcodec-dot')
def test_format_selection_string_ops(self):
@ -247,67 +247,67 @@ class TestFormatSelection(unittest.TestCase):
info_dict = _make_result(formats)
# equals (=)
ydl = YDL({'format': '[format_id=abc-cba]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id=abc-cba]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not equal (!=)
ydl = YDL({'format': '[format_id!=abc-cba]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id!=abc-cba]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!=abc-cba][format_id!=zxc-cxz]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
hdl = HDL({'format': '[format_id!=abc-cba][format_id!=zxc-cxz]'})
self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
# starts with (^=)
ydl = YDL({'format': '[format_id^=abc]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id^=abc]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not start with (!^=)
ydl = YDL({'format': '[format_id!^=abc]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id!^=abc]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!^=abc][format_id!^=zxc]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
hdl = HDL({'format': '[format_id!^=abc][format_id!^=zxc]'})
self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
# ends with ($=)
ydl = YDL({'format': '[format_id$=cba]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id$=cba]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not end with (!$=)
ydl = YDL({'format': '[format_id!$=cba]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id!$=cba]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!$=cba][format_id!$=cxz]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
hdl = HDL({'format': '[format_id!$=cba][format_id!$=cxz]'})
self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
# contains (*=)
ydl = YDL({'format': '[format_id*=bc-cb]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id*=bc-cb]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'abc-cba')
# does not contain (!*=)
ydl = YDL({'format': '[format_id!*=bc-cb]'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[format_id!*=bc-cb]'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'zxc-cxz')
ydl = YDL({'format': '[format_id!*=abc][format_id!*=zxc]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
hdl = HDL({'format': '[format_id!*=abc][format_id!*=zxc]'})
self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
ydl = YDL({'format': '[format_id!*=-]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
hdl = HDL({'format': '[format_id!*=-]'})
self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
def test_youtube_format_selection(self):
order = [
@ -342,69 +342,69 @@ class TestFormatSelection(unittest.TestCase):
formats_order = [format_info(f_id) for f_id in order]
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo+bestaudio'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': 'bestvideo+bestaudio'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '137+141')
self.assertEqual(downloaded['ext'], 'mp4')
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': 'bestvideo[height>=999999]+bestaudio/best'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], '38')
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': 'bestvideo/best,bestaudio'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': 'bestvideo/best,bestaudio'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['137', '141'])
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['137+141', '248+141'])
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': '(bestvideo[ext=mp4],bestvideo[ext=webm])[height<=720]+bestaudio'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['136+141', '247+141'])
info_dict = _make_result(list(formats_order), extractor='youtube')
ydl = YDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': '(bestvideo[ext=none]/bestvideo[ext=webm])+bestaudio'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['248+141'])
for f1, f2 in zip(formats_order, formats_order[1:]):
info_dict = _make_result([f1, f2], extractor='youtube')
ydl = YDL({'format': 'best/bestvideo'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': 'best/bestvideo'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], f1['format_id'])
info_dict = _make_result([f2, f1], extractor='youtube')
ydl = YDL({'format': 'best/bestvideo'})
yie = YoutubeIE(ydl)
hdl = HDL({'format': 'best/bestvideo'})
yie = YoutubeIE(hdl)
yie._sort_formats(info_dict['formats'])
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], f1['format_id'])
def test_audio_only_extractor_format_selection(self):
@ -418,14 +418,14 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'best'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'best'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'high')
ydl = YDL({'format': 'worst'})
ydl.process_ie_result(info_dict.copy())
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'worst'})
hdl.process_ie_result(info_dict.copy())
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'low')
def test_format_not_available(self):
@ -438,8 +438,8 @@ class TestFormatSelection(unittest.TestCase):
# This must fail since complete video-audio format does not match filter
# and extractor does not provide incomplete only formats (i.e. only
# video-only or audio-only).
ydl = YDL({'format': 'best[height>360]'})
self.assertRaises(ExtractorError, ydl.process_ie_result, info_dict.copy())
hdl = HDL({'format': 'best[height>360]'})
self.assertRaises(ExtractorError, hdl.process_ie_result, info_dict.copy())
def test_format_selection_issue_10083(self):
# See https://github.com/ytdl-org/haruhi-dl/issues/10083
@ -450,15 +450,15 @@ class TestFormatSelection(unittest.TestCase):
]
info_dict = _make_result(formats)
ydl = YDL({'format': 'best[height>360]/bestvideo[height>360]+bestaudio'})
ydl.process_ie_result(info_dict.copy())
self.assertEqual(ydl.downloaded_info_dicts[0]['format_id'], 'video+audio')
hdl = HDL({'format': 'best[height>360]/bestvideo[height>360]+bestaudio'})
hdl.process_ie_result(info_dict.copy())
self.assertEqual(hdl.downloaded_info_dicts[0]['format_id'], 'video+audio')
def test_invalid_format_specs(self):
def assert_syntax_error(format_spec):
ydl = YDL({'format': format_spec})
hdl = HDL({'format': format_spec})
info_dict = _make_result([{'format_id': 'foo', 'url': TEST_URL}])
self.assertRaises(SyntaxError, ydl.process_ie_result, info_dict)
self.assertRaises(SyntaxError, hdl.process_ie_result, info_dict)
assert_syntax_error('bestvideo,,best')
assert_syntax_error('+bestaudio')
@ -480,74 +480,74 @@ class TestFormatSelection(unittest.TestCase):
f['ext'] = 'unknown'
info_dict = _make_result(formats)
ydl = YDL({'format': 'best[filesize<3000]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'best[filesize<3000]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'D')
ydl = YDL({'format': 'best[filesize<=3000]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'best[filesize<=3000]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'E')
ydl = YDL({'format': 'best[filesize <= ? 3000]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'best[filesize <= ? 3000]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'F')
ydl = YDL({'format': 'best [filesize = 1000] [width>450]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'best [filesize = 1000] [width>450]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'B')
ydl = YDL({'format': 'best [filesize = 1000] [width!=450]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': 'best [filesize = 1000] [width!=450]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'C')
ydl = YDL({'format': '[filesize>?1]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[filesize>?1]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'G')
ydl = YDL({'format': '[filesize<1M]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[filesize<1M]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'E')
ydl = YDL({'format': '[filesize<1MiB]'})
ydl.process_ie_result(info_dict)
downloaded = ydl.downloaded_info_dicts[0]
hdl = HDL({'format': '[filesize<1MiB]'})
hdl.process_ie_result(info_dict)
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['format_id'], 'G')
ydl = YDL({'format': 'all[width>=400][width<=600]'})
ydl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in ydl.downloaded_info_dicts]
hdl = HDL({'format': 'all[width>=400][width<=600]'})
hdl.process_ie_result(info_dict)
downloaded_ids = [info['format_id'] for info in hdl.downloaded_info_dicts]
self.assertEqual(downloaded_ids, ['B', 'C', 'D'])
ydl = YDL({'format': 'best[height<40]'})
hdl = HDL({'format': 'best[height<40]'})
try:
ydl.process_ie_result(info_dict)
hdl.process_ie_result(info_dict)
except ExtractorError:
pass
self.assertEqual(ydl.downloaded_info_dicts, [])
self.assertEqual(hdl.downloaded_info_dicts, [])
def test_default_format_spec(self):
ydl = YDL({'simulate': True})
self.assertEqual(ydl._default_format_spec({}), 'bestvideo+bestaudio/best')
hdl = HDL({'simulate': True})
self.assertEqual(hdl._default_format_spec({}), 'bestvideo+bestaudio/best')
ydl = YDL({})
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
hdl = HDL({})
self.assertEqual(hdl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
ydl = YDL({'simulate': True})
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'bestvideo+bestaudio/best')
hdl = HDL({'simulate': True})
self.assertEqual(hdl._default_format_spec({'is_live': True}), 'bestvideo+bestaudio/best')
ydl = YDL({'outtmpl': '-'})
self.assertEqual(ydl._default_format_spec({}), 'best/bestvideo+bestaudio')
hdl = HDL({'outtmpl': '-'})
self.assertEqual(hdl._default_format_spec({}), 'best/bestvideo+bestaudio')
ydl = YDL({})
self.assertEqual(ydl._default_format_spec({}, download=False), 'bestvideo+bestaudio/best')
self.assertEqual(ydl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
hdl = HDL({})
self.assertEqual(hdl._default_format_spec({}, download=False), 'bestvideo+bestaudio/best')
self.assertEqual(hdl._default_format_spec({'is_live': True}), 'best/bestvideo+bestaudio')
class TestHaruhiDL(unittest.TestCase):
@ -571,9 +571,9 @@ class TestHaruhiDL(unittest.TestCase):
def get_info(params={}):
params.setdefault('simulate', True)
ydl = YDL(params)
ydl.report_warning = lambda *args, **kargs: None
return ydl.process_video_result(info_dict, download=False)
hdl = HDL(params)
hdl.report_warning = lambda *args, **kargs: None
return hdl.process_video_result(info_dict, download=False)
result = get_info()
self.assertFalse(result.get('requested_subtitles'))
@ -618,7 +618,7 @@ class TestHaruhiDL(unittest.TestCase):
'extractor': 'Bar',
'playlist': 'funny videos',
}
YDL.add_extra_info(test_dict, extra_info)
HDL.add_extra_info(test_dict, extra_info)
self.assertEqual(test_dict['extractor'], 'Foo')
self.assertEqual(test_dict['playlist'], 'funny videos')
@ -633,8 +633,8 @@ class TestHaruhiDL(unittest.TestCase):
}
def fname(templ):
ydl = HaruhiDL({'outtmpl': templ})
return ydl.prepare_filename(info)
hdl = HaruhiDL({'outtmpl': templ})
return hdl.prepare_filename(info)
self.assertEqual(fname('%(id)s.%(ext)s'), '1234.mp4')
self.assertEqual(fname('%(id)s-%(width)s.%(ext)s'), '1234-NA.mp4')
# Replace missing fields with 'NA'
@ -658,12 +658,12 @@ class TestHaruhiDL(unittest.TestCase):
self.assertEqual(fname('Hello %(title2)s'), 'Hello %PATH%')
def test_format_note(self):
ydl = HaruhiDL()
self.assertEqual(ydl._format_note({}), '')
assertRegexpMatches(self, ydl._format_note({
hdl = HaruhiDL()
self.assertEqual(hdl._format_note({}), '')
assertRegexpMatches(self, hdl._format_note({
'vbr': 10,
}), r'^\s*10k$')
assertRegexpMatches(self, ydl._format_note({
assertRegexpMatches(self, hdl._format_note({
'fps': 30,
}), r'^30fps$')
@ -680,9 +680,9 @@ class TestHaruhiDL(unittest.TestCase):
def run_pp(params, PP):
with open(filename, 'wt') as f:
f.write('EXAMPLE')
ydl = HaruhiDL(params)
ydl.add_post_processor(PP())
ydl.post_process(filename, {'filepath': filename})
hdl = HaruhiDL(params)
hdl.add_post_processor(PP())
hdl.post_process(filename, {'filepath': filename})
run_pp({'keepvideo': True}, SimplePP)
self.assertTrue(os.path.exists(filename), '%s doesn\'t exist' % filename)
@ -706,16 +706,16 @@ class TestHaruhiDL(unittest.TestCase):
os.unlink(filename)
def test_match_filter(self):
class FilterYDL(YDL):
class FilterHDL(HDL):
def __init__(self, *args, **kwargs):
super(FilterYDL, self).__init__(*args, **kwargs)
super(FilterHDL, self).__init__(*args, **kwargs)
self.params['simulate'] = True
def process_info(self, info_dict):
super(YDL, self).process_info(info_dict)
super(HDL, self).process_info(info_dict)
def _match_entry(self, info_dict, incomplete):
res = super(FilterYDL, self)._match_entry(info_dict, incomplete)
res = super(FilterHDL, self)._match_entry(info_dict, incomplete)
if res is None:
self.downloaded_info_dicts.append(info_dict)
return res
@ -745,10 +745,10 @@ class TestHaruhiDL(unittest.TestCase):
videos = [first, second]
def get_videos(filter_=None):
ydl = FilterYDL({'match_filter': filter_})
hdl = FilterHDL({'match_filter': filter_})
for v in videos:
ydl.process_ie_result(v, download=True)
return [v['id'] for v in ydl.downloaded_info_dicts]
hdl.process_ie_result(v, download=True)
return [v['id'] for v in hdl.downloaded_info_dicts]
res = get_videos()
self.assertEqual(res, ['1', '2'])
@ -817,11 +817,11 @@ class TestHaruhiDL(unittest.TestCase):
}
def get_downloaded_info_dicts(params):
ydl = YDL(params)
hdl = HDL(params)
# make a deep copy because the dictionary and nested entries
# can be modified
ydl.process_ie_result(copy.deepcopy(playlist))
return ydl.downloaded_info_dicts
hdl.process_ie_result(copy.deepcopy(playlist))
return hdl.downloaded_info_dicts
def get_ids(params):
return [int(v['id']) for v in get_downloaded_info_dicts(params)]
@ -874,11 +874,11 @@ class TestHaruhiDL(unittest.TestCase):
def test_urlopen_no_file_protocol(self):
# see https://github.com/ytdl-org/haruhi-dl/issues/8227
ydl = YDL()
self.assertRaises(compat_urllib_error.URLError, ydl.urlopen, 'file:///etc/passwd')
hdl = HDL()
self.assertRaises(compat_urllib_error.URLError, hdl.urlopen, 'file:///etc/passwd')
def test_do_not_override_ie_key_in_url_transparent(self):
ydl = YDL()
hdl = HDL()
class Foo1IE(InfoExtractor):
_VALID_URL = r'foo1:'
@ -908,11 +908,11 @@ class TestHaruhiDL(unittest.TestCase):
def _real_extract(self, url):
return _make_result([{'url': TEST_URL}], title='foo3 title')
ydl.add_info_extractor(Foo1IE(ydl))
ydl.add_info_extractor(Foo2IE(ydl))
ydl.add_info_extractor(Foo3IE(ydl))
ydl.extract_info('foo1:')
downloaded = ydl.downloaded_info_dicts[0]
hdl.add_info_extractor(Foo1IE(hdl))
hdl.add_info_extractor(Foo2IE(hdl))
hdl.add_info_extractor(Foo3IE(hdl))
hdl.extract_info('foo1:')
downloaded = hdl.downloaded_info_dicts[0]
self.assertEqual(downloaded['url'], TEST_URL)
self.assertEqual(downloaded['title'], 'foo1 title')
self.assertEqual(downloaded['id'], 'testid')

View File

@ -9,7 +9,7 @@ import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL, expect_dict, expect_value, http_server_port
from test.helper import FakeHDL, expect_dict, expect_value, http_server_port
from haruhi_dl.compat import compat_etree_fromstring, compat_http_server
from haruhi_dl.extractor.common import InfoExtractor
from haruhi_dl.extractor import YoutubeIE, get_info_extractor
@ -41,7 +41,7 @@ class TestIE(InfoExtractor):
class TestInfoExtractor(unittest.TestCase):
def setUp(self):
self.ie = TestIE(FakeYDL())
self.ie = TestIE(FakeHDL())
def test_ie_key(self):
self.assertEqual(get_info_extractor(YoutubeIE.ie_key()), YoutubeIE)

View File

@ -22,11 +22,11 @@ def _download_restricted(url, filename, age):
'writeinfojson': True,
'outtmpl': '%(id)s.%(ext)s',
}
ydl = HaruhiDL(params)
ydl.add_default_info_extractors()
hdl = HaruhiDL(params)
hdl.add_default_info_extractors()
json_filename = os.path.splitext(filename)[0] + '.info.json'
try_rm(json_filename)
ydl.download([url])
hdl.download([url])
res = os.path.exists(json_filename)
try_rm(json_filename)
return res

View File

@ -12,7 +12,7 @@ import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL
from test.helper import FakeHDL
from haruhi_dl.cache import Cache
@ -38,10 +38,10 @@ class TestCache(unittest.TestCase):
shutil.rmtree(self.test_dir)
def test_cache(self):
ydl = FakeYDL({
hdl = FakeHDL({
'cachedir': self.test_dir,
})
c = Cache(ydl)
c = Cache(hdl)
obj = {'x': 1, 'y': ['ä', '\\a', True]}
self.assertEqual(c.load('test_cache', 'k.'), None)
c.store('test_cache', 'k.', obj)

View File

@ -123,18 +123,18 @@ def generator(test_case, tname):
params.setdefault('extract_flat', 'in_playlist')
params.setdefault('skip_download', True)
ydl = HaruhiDL(params, auto_init=False)
ydl.add_default_info_extractors()
hdl = HaruhiDL(params, auto_init=False)
hdl.add_default_info_extractors()
finished_hook_called = set()
def _hook(status):
if status['status'] == 'finished':
finished_hook_called.add(status['filename'])
ydl.add_progress_hook(_hook)
expect_warnings(ydl, test_case.get('expected_warnings', []))
hdl.add_progress_hook(_hook)
expect_warnings(hdl, test_case.get('expected_warnings', []))
def get_tc_filename(tc):
return ydl.prepare_filename(tc.get('info_dict', {}))
return hdl.prepare_filename(tc.get('info_dict', {}))
res_dict = None
@ -154,7 +154,7 @@ def generator(test_case, tname):
# We're not using .download here since that is just a shim
# for outside error handling, and returns the exit code
# instead of the result dict.
res_dict = ydl.extract_info(
res_dict = hdl.extract_info(
test_case['url'],
force_generic_extractor=params.get('force_generic_extractor', False))
except (DownloadError, ExtractorError) as err:

View File

@ -88,8 +88,8 @@ class TestHttpFD(unittest.TestCase):
def download(self, params, ep):
params['logger'] = FakeLogger()
ydl = HaruhiDL(params)
downloader = HttpFD(ydl, params)
hdl = HaruhiDL(params)
downloader = HttpFD(hdl, params)
filename = 'testfile.mp4'
try_rm(encodeFilename(filename))
self.assertTrue(downloader.real_download(filename, {

View File

@ -77,8 +77,8 @@ class TestHTTP(unittest.TestCase):
if sys.version_info[0] == 3:
return
ydl = HaruhiDL({'logger': FakeLogger()})
r = ydl.extract_info('http://127.0.0.1:%d/302' % self.port)
hdl = HaruhiDL({'logger': FakeLogger()})
r = hdl.extract_info('http://127.0.0.1:%d/302' % self.port)
self.assertEqual(r['entries'][0]['url'], 'http://127.0.0.1:%d/vid.mp4' % self.port)
@ -96,13 +96,13 @@ class TestHTTPS(unittest.TestCase):
def test_nocheckcertificate(self):
if sys.version_info >= (2, 7, 9): # No certificate checking anyways
ydl = HaruhiDL({'logger': FakeLogger()})
hdl = HaruhiDL({'logger': FakeLogger()})
self.assertRaises(
Exception,
ydl.extract_info, 'https://127.0.0.1:%d/video.html' % self.port)
hdl.extract_info, 'https://127.0.0.1:%d/video.html' % self.port)
ydl = HaruhiDL({'logger': FakeLogger(), 'nocheckcertificate': True})
r = ydl.extract_info('https://127.0.0.1:%d/video.html' % self.port)
hdl = HaruhiDL({'logger': FakeLogger(), 'nocheckcertificate': True})
r = hdl.extract_info('https://127.0.0.1:%d/video.html' % self.port)
self.assertEqual(r['entries'][0]['url'], 'https://127.0.0.1:%d/vid.mp4' % self.port)
@ -139,25 +139,25 @@ class TestProxy(unittest.TestCase):
def test_proxy(self):
geo_proxy = '127.0.0.1:{0}'.format(self.geo_port)
ydl = HaruhiDL({
hdl = HaruhiDL({
'proxy': '127.0.0.1:{0}'.format(self.port),
'geo_verification_proxy': geo_proxy,
})
url = 'http://foo.com/bar'
response = ydl.urlopen(url).read().decode('utf-8')
response = hdl.urlopen(url).read().decode('utf-8')
self.assertEqual(response, 'normal: {0}'.format(url))
req = compat_urllib_request.Request(url)
req.add_header('Ytdl-request-proxy', geo_proxy)
response = ydl.urlopen(req).read().decode('utf-8')
response = hdl.urlopen(req).read().decode('utf-8')
self.assertEqual(response, 'geo: {0}'.format(url))
def test_proxy_with_idn(self):
ydl = HaruhiDL({
hdl = HaruhiDL({
'proxy': '127.0.0.1:{0}'.format(self.port),
})
url = 'http://中文.tw/'
response = ydl.urlopen(url).read().decode('utf-8')
response = hdl.urlopen(url).read().decode('utf-8')
# b'xn--fiq228c' is '中文'.encode('idna')
self.assertEqual(response, 'normal: http://xn--fiq228c.tw/')

View File

@ -8,7 +8,7 @@ import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL
from test.helper import FakeHDL
from haruhi_dl.extractor import IqiyiIE
@ -39,7 +39,7 @@ class TestIqiyiSDKInterpreter(unittest.TestCase):
If `sign` is incorrect, /validate call throws an HTTP 556 error
'''
logger = WarningLogger()
ie = IqiyiIEWithCredentials(FakeYDL({'logger': logger}))
ie = IqiyiIEWithCredentials(FakeHDL({'logger': logger}))
ie._login()
self.assertTrue('unable to log in:' in logger.messages[0])

View File

@ -12,7 +12,7 @@ import random
import subprocess
from test.helper import (
FakeYDL,
FakeHDL,
get_params,
)
from haruhi_dl.compat import (
@ -35,44 +35,44 @@ class TestMultipleSocks(unittest.TestCase):
params = self._check_params(['primary_proxy', 'primary_server_ip'])
if params is None:
return
ydl = FakeYDL({
hdl = FakeHDL({
'proxy': params['primary_proxy']
})
self.assertEqual(
ydl.urlopen('http://yt-dl.org/ip').read().decode('utf-8'),
hdl.urlopen('http://ifconfig.me/ip').read().decode('utf-8'),
params['primary_server_ip'])
def test_proxy_https(self):
params = self._check_params(['primary_proxy', 'primary_server_ip'])
if params is None:
return
ydl = FakeYDL({
hdl = FakeHDL({
'proxy': params['primary_proxy']
})
self.assertEqual(
ydl.urlopen('https://yt-dl.org/ip').read().decode('utf-8'),
hdl.urlopen('https://ifconfig.me/ip').read().decode('utf-8'),
params['primary_server_ip'])
def test_secondary_proxy_http(self):
params = self._check_params(['secondary_proxy', 'secondary_server_ip'])
if params is None:
return
ydl = FakeYDL()
req = compat_urllib_request.Request('http://yt-dl.org/ip')
hdl = FakeHDL()
req = compat_urllib_request.Request('http://ifconfig.me/ip')
req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
self.assertEqual(
ydl.urlopen(req).read().decode('utf-8'),
hdl.urlopen(req).read().decode('utf-8'),
params['secondary_server_ip'])
def test_secondary_proxy_https(self):
params = self._check_params(['secondary_proxy', 'secondary_server_ip'])
if params is None:
return
ydl = FakeYDL()
req = compat_urllib_request.Request('https://yt-dl.org/ip')
hdl = FakeHDL()
req = compat_urllib_request.Request('https://ifconfig.me/ip')
req.add_header('Ytdl-request-proxy', params['secondary_proxy'])
self.assertEqual(
ydl.urlopen(req).read().decode('utf-8'),
hdl.urlopen(req).read().decode('utf-8'),
params['secondary_server_ip'])
@ -99,10 +99,10 @@ class TestSocks(unittest.TestCase):
if self._SKIP_SOCKS_TEST:
return '127.0.0.1'
ydl = FakeYDL({
hdl = FakeHDL({
'proxy': '%s://127.0.0.1:%d' % (protocol, self.port),
})
return ydl.urlopen('http://yt-dl.org/ip').read().decode('utf-8')
return hdl.urlopen('http://ifconfig.me/ip').read().decode('utf-8')
def test_socks4(self):
self.assertTrue(isinstance(self._get_ip('socks4'), compat_str))

View File

@ -7,7 +7,7 @@ import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL, md5
from test.helper import FakeHDL, md5
from haruhi_dl.extractor import (
@ -35,7 +35,7 @@ class BaseTestSubtitles(unittest.TestCase):
IE = None
def setUp(self):
self.DL = FakeYDL()
self.DL = FakeHDL()
self.ie = self.IE()
self.DL.add_info_extractor(self.ie)

View File

@ -1,30 +0,0 @@
#!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import json
from haruhi_dl.update import rsa_verify
class TestUpdate(unittest.TestCase):
def test_rsa_verify(self):
UPDATES_RSA_KEY = (0x9d60ee4d8f805312fdb15a62f87b95bd66177b91df176765d13514a0f1754bcd2057295c5b6f1d35daa6742c3ffc9a82d3e118861c207995a8031e151d863c9927e304576bc80692bc8e094896fcf11b66f3e29e04e3a71e9a11558558acea1840aec37fc396fb6b65dc81a1c4144e03bd1c011de62e3f1357b327d08426fe93, 65537)
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'versions.json'), 'rb') as f:
versions_info = f.read().decode()
versions_info = json.loads(versions_info)
signature = versions_info['signature']
del versions_info['signature']
self.assertTrue(rsa_verify(
json.dumps(versions_info, sort_keys=True).encode('utf-8'),
signature, UPDATES_RSA_KEY))
#if __name__ == '__main__':
# unittest.main()

View File

@ -46,9 +46,9 @@ class TestAnnotations(unittest.TestCase):
def test_info_json(self):
expected = list(EXPECTED_ANNOTATIONS) # Two annotations could have the same text.
ie = haruhi_dl.extractor.YoutubeIE()
ydl = HaruhiDL(params)
ydl.add_info_extractor(ie)
ydl.download([TEST_ID])
hdl = HaruhiDL(params)
hdl.add_info_extractor(ie)
hdl.download([TEST_ID])
self.assertTrue(os.path.exists(ANNOTATIONS_FILE))
annoxml = None
with io.open(ANNOTATIONS_FILE, 'r', encoding='utf-8') as annof:

View File

@ -7,7 +7,7 @@ import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import FakeYDL
from test.helper import FakeHDL
from haruhi_dl.extractor import (
@ -22,7 +22,7 @@ class TestYoutubeLists(unittest.TestCase):
self.assertEqual(info['_type'], 'playlist')
def test_youtube_playlist_noplaylist(self):
dl = FakeYDL()
dl = FakeHDL()
dl.params['noplaylist'] = True
ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/watch?v=FXxLjLQi3Fg&list=PLwiyx1dc3P2JR9N8gQaQN_BCvlSlap7re')
@ -30,7 +30,7 @@ class TestYoutubeLists(unittest.TestCase):
self.assertEqual(YoutubeIE().extract_id(result['url']), 'FXxLjLQi3Fg')
def test_youtube_course(self):
dl = FakeYDL()
dl = FakeHDL()
ie = YoutubePlaylistIE(dl)
# TODO find a > 100 (paginating?) videos course
result = ie.extract('https://www.youtube.com/course?list=ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8')
@ -40,7 +40,7 @@ class TestYoutubeLists(unittest.TestCase):
self.assertEqual(YoutubeIE().extract_id(entries[-1]['url']), 'rYefUsYuEp0')
def test_youtube_mix(self):
dl = FakeYDL()
dl = FakeHDL()
ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/watch?v=W01L70IGBgE&index=2&list=RDOQpdSVF_k_w')
entries = result['entries']
@ -51,14 +51,14 @@ class TestYoutubeLists(unittest.TestCase):
def test_youtube_toptracks(self):
print('Skipping: The playlist page gives error 500')
return
dl = FakeYDL()
dl = FakeHDL()
ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/playlist?list=MCUS')
entries = result['entries']
self.assertEqual(len(entries), 100)
def test_youtube_flat_playlist_titles(self):
dl = FakeYDL()
dl = FakeHDL()
dl.params['extract_flat'] = True
ie = YoutubePlaylistIE(dl)
result = ie.extract('https://www.youtube.com/playlist?list=PL-KKIb8rvtMSrAO9YFbeM6UQrAqoFTUWv')

View File

@ -12,7 +12,7 @@ import io
import re
import string
from test.helper import FakeYDL
from test.helper import FakeHDL
from haruhi_dl.extractor import YoutubeIE
from haruhi_dl.compat import compat_str, compat_urlretrieve
@ -116,8 +116,8 @@ def make_tfunc(url, stype, sig_input, expected_sig):
if not os.path.exists(fn):
compat_urlretrieve(url, fn)
ydl = FakeYDL()
ie = YoutubeIE(ydl)
hdl = FakeHDL()
ie = YoutubeIE(hdl)
if stype == 'js':
with io.open(fn, encoding='utf-8') as testf:
jscode = testf.read()