Use extract_info in YoutubePlaylist and YoutubeSearch

This commit is contained in:
Jaime Marquínez Ferrándiz 2013-03-05 11:58:01 +01:00
parent 3370abd509
commit 597cc8a455
2 changed files with 12 additions and 12 deletions

View File

@ -10,6 +10,7 @@
from youtube_dl.InfoExtractors import YoutubeUserIE, YoutubePlaylistIE, YoutubeIE from youtube_dl.InfoExtractors import YoutubeUserIE, YoutubePlaylistIE, YoutubeIE
from youtube_dl.utils import * from youtube_dl.utils import *
from youtube_dl.FileDownloader import FileDownloader
PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json") PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json")
with io.open(PARAMETERS_FILE, encoding='utf-8') as pf: with io.open(PARAMETERS_FILE, encoding='utf-8') as pf:
@ -22,7 +23,7 @@
opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler()) opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler())
compat_urllib_request.install_opener(opener) compat_urllib_request.install_opener(opener)
class FakeDownloader(object): class FakeDownloader(FileDownloader):
def __init__(self): def __init__(self):
self.result = [] self.result = []
self.params = parameters self.params = parameters
@ -30,15 +31,16 @@ def to_screen(self, s):
print(s) print(s)
def trouble(self, s): def trouble(self, s):
raise Exception(s) raise Exception(s)
def download(self, x): def extract_info(self, url):
self.result.append(x) self.result.append(url)
return url
class TestYoutubeLists(unittest.TestCase): class TestYoutubeLists(unittest.TestCase):
def test_youtube_playlist(self): def test_youtube_playlist(self):
dl = FakeDownloader() dl = FakeDownloader()
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
ie.extract('https://www.youtube.com/playlist?list=PLwiyx1dc3P2JR9N8gQaQN_BCvlSlap7re') ie.extract('https://www.youtube.com/playlist?list=PLwiyx1dc3P2JR9N8gQaQN_BCvlSlap7re')
ytie_results = [YoutubeIE()._extract_id(r[0]) for r in dl.result] ytie_results = [YoutubeIE()._extract_id(url) for url in dl.result]
self.assertEqual(ytie_results, [ 'bV9L5Ht9LgY', 'FXxLjLQi3Fg', 'tU3Bgo5qJZE']) self.assertEqual(ytie_results, [ 'bV9L5Ht9LgY', 'FXxLjLQi3Fg', 'tU3Bgo5qJZE'])
def test_issue_673(self): def test_issue_673(self):
@ -58,7 +60,7 @@ def test_youtube_playlist_with_deleted(self):
dl = FakeDownloader() dl = FakeDownloader()
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
ie.extract('https://www.youtube.com/playlist?list=PLwP_SiAcdui0KVebT0mU9Apz359a4ubsC') ie.extract('https://www.youtube.com/playlist?list=PLwP_SiAcdui0KVebT0mU9Apz359a4ubsC')
ytie_results = [YoutubeIE()._extract_id(r[0]) for r in dl.result] ytie_results = [YoutubeIE()._extract_id(url) for url in dl.result]
self.assertFalse('pElCt5oNDuI' in ytie_results) self.assertFalse('pElCt5oNDuI' in ytie_results)
self.assertFalse('KdPEApIVdWM' in ytie_results) self.assertFalse('KdPEApIVdWM' in ytie_results)
@ -67,9 +69,9 @@ def test_youtube_course(self):
ie = YoutubePlaylistIE(dl) ie = YoutubePlaylistIE(dl)
# TODO find a > 100 (paginating?) videos course # TODO find a > 100 (paginating?) videos course
ie.extract('https://www.youtube.com/course?list=ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8') ie.extract('https://www.youtube.com/course?list=ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8')
self.assertEqual(YoutubeIE()._extract_id(dl.result[0][0]), 'j9WZyLZCBzs') self.assertEqual(YoutubeIE()._extract_id(dl.result[0]), 'j9WZyLZCBzs')
self.assertEqual(len(dl.result), 25) self.assertEqual(len(dl.result), 25)
self.assertEqual(YoutubeIE()._extract_id(dl.result[-1][0]), 'rYefUsYuEp0') self.assertEqual(YoutubeIE()._extract_id(dl.result[-1]), 'rYefUsYuEp0')
def test_youtube_channel(self): def test_youtube_channel(self):
# I give up, please find a channel that does paginate and test this like test_youtube_playlist_long # I give up, please find a channel that does paginate and test this like test_youtube_playlist_long

View File

@ -1756,9 +1756,7 @@ def _real_extract(self, url):
else: else:
self._downloader.to_screen(u'[youtube] PL %s: Found %i videos, downloading %i' % (playlist_id, total, len(videos))) self._downloader.to_screen(u'[youtube] PL %s: Found %i videos, downloading %i' % (playlist_id, total, len(videos)))
for video in videos: return self._downloader.extract_info_iterable(videos)
self._downloader.download([video])
return
class YoutubeChannelIE(InfoExtractor): class YoutubeChannelIE(InfoExtractor):
@ -1892,8 +1890,8 @@ def _real_extract(self, url):
self._downloader.to_screen(u"[youtube] user %s: Collected %d video ids (downloading %d of them)" % self._downloader.to_screen(u"[youtube] user %s: Collected %d video ids (downloading %d of them)" %
(username, all_ids_count, len(video_ids))) (username, all_ids_count, len(video_ids)))
for video_id in video_ids: urls = ['http://www.youtube.com/watch?v=%s' % video_id for video_id in video_ids]
self._downloader.download(['http://www.youtube.com/watch?v=%s' % video_id]) return self._downloader.extract_info_iterable(urls)
class BlipTVUserIE(InfoExtractor): class BlipTVUserIE(InfoExtractor):