Fix WDRMaus; extend URL matching for other Maus pages; improve ID extraction
This commit is contained in:
parent
5cb4833f40
commit
568c7005d5
|
@ -10,6 +10,7 @@ from ..compat import (
|
|||
)
|
||||
from ..utils import (
|
||||
determine_ext,
|
||||
dict_get,
|
||||
ExtractorError,
|
||||
js_to_json,
|
||||
strip_jsonp,
|
||||
|
@ -22,9 +23,10 @@ from ..utils import (
|
|||
|
||||
|
||||
class WDRIE(InfoExtractor):
|
||||
_VALID_URL = r'https?://deviceids-medp\.wdr\.de/ondemand/\d+/(?P<id>\d+)\.js'
|
||||
__API_URL_TPL = '//deviceids-medp.wdr.de/ondemand/%s/%s'
|
||||
_VALID_URL = (r'(?:https?:' + __API_URL_TPL) % (r'\d+', r'(?=\d+\.js)|wdr:)(?P<id>\d{6,})')
|
||||
_GEO_COUNTRIES = ['DE']
|
||||
_TEST = {
|
||||
_TESTS = [{
|
||||
'url': 'http://deviceids-medp.wdr.de/ondemand/155/1557833.js',
|
||||
'info_dict': {
|
||||
'id': 'mdb-1557833',
|
||||
|
@ -32,11 +34,20 @@ class WDRIE(InfoExtractor):
|
|||
'title': 'Biathlon-Staffel verpasst Podest bei Olympia-Generalprobe',
|
||||
'upload_date': '20180112',
|
||||
},
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
def _asset_url(self, wdr_id):
|
||||
id_len = max(len(wdr_id), 5)
|
||||
return ''.join(('https:', self.__API_URL_TPL % (wdr_id[:id_len - 4], wdr_id, ), '.js'))
|
||||
|
||||
def _real_extract(self, url):
|
||||
video_id = self._match_id(url)
|
||||
|
||||
if url.startswith('wdr:'):
|
||||
video_id = url[4:]
|
||||
url = self._asset_url(video_id)
|
||||
|
||||
metadata = self._download_json(
|
||||
url, video_id, transform_source=strip_jsonp)
|
||||
|
||||
|
@ -115,10 +126,10 @@ class WDRIE(InfoExtractor):
|
|||
}
|
||||
|
||||
|
||||
class WDRPageIE(InfoExtractor):
|
||||
_CURRENT_MAUS_URL = r'https?://(?:www\.)wdrmaus.de/(?:[^/]+/){1,2}[^/?#]+\.php5'
|
||||
class WDRPageIE(WDRIE):
|
||||
_MAUS_REGEX = r'https?://(?:www\.)wdrmaus.de/(?:[^/]+/)*?(?P<maus_id>[^/?#.]+)(?:/?|/index\.php5|\.php5)$'
|
||||
_PAGE_REGEX = r'/(?:mediathek/)?(?:[^/]+/)*(?P<display_id>[^/]+)\.html'
|
||||
_VALID_URL = r'https?://(?:www\d?\.)?(?:(?:kinder\.)?wdr\d?|sportschau)\.de' + _PAGE_REGEX + '|' + _CURRENT_MAUS_URL
|
||||
_VALID_URL = r'https?://(?:www\d?\.)?(?:(?:kinder\.)?wdr\d?|sportschau)\.de' + _PAGE_REGEX + '|' + _MAUS_REGEX
|
||||
|
||||
_TESTS = [
|
||||
{
|
||||
|
@ -180,12 +191,12 @@ class WDRPageIE(InfoExtractor):
|
|||
{
|
||||
'url': 'http://www.wdrmaus.de/aktuelle-sendung/index.php5',
|
||||
'info_dict': {
|
||||
'id': 'mdb-1552552',
|
||||
'id': 'mdb-2627637',
|
||||
'ext': 'mp4',
|
||||
'upload_date': 're:^[0-9]{8}$',
|
||||
'title': 're:^Die Sendung mit der Maus vom [0-9.]{10}$',
|
||||
'title': 're:^Die Sendung (?:mit der Maus )?vom [0-9.]{10}$',
|
||||
},
|
||||
'skip': 'The id changes from week to week because of the new episode'
|
||||
# 'skip': 'The id changes from week to week because of the new episode'
|
||||
},
|
||||
{
|
||||
'url': 'http://www.wdrmaus.de/filme/sachgeschichten/achterbahn.php5',
|
||||
|
@ -234,7 +245,7 @@ class WDRPageIE(InfoExtractor):
|
|||
|
||||
def _real_extract(self, url):
|
||||
mobj = re.match(self._VALID_URL, url)
|
||||
display_id = mobj.group('display_id')
|
||||
display_id = dict_get(mobj.groupdict(), ('display_id', 'maus_id'), 'wdrmaus')
|
||||
webpage = self._download_webpage(url, display_id)
|
||||
|
||||
entries = []
|
||||
|
@ -260,6 +271,14 @@ class WDRPageIE(InfoExtractor):
|
|||
jsonp_url = try_get(
|
||||
media_link_obj, lambda x: x['mediaObj']['url'], compat_str)
|
||||
if jsonp_url:
|
||||
# metadata, or player JS with ['ref'] giving WDR id, or just media, perhaps
|
||||
clip_id = media_link_obj['mediaObj'].get('ref')
|
||||
if jsonp_url.endswith('.assetjsonp'):
|
||||
asset = self._download_json(
|
||||
jsonp_url, display_id, fatal=False, transform_source=strip_jsonp)
|
||||
clip_id = try_get(asset, lambda x: x['trackerData']['trackerClipId'], compat_str)
|
||||
if clip_id:
|
||||
jsonp_url = self._asset_url(clip_id[4:])
|
||||
entries.append(self.url_result(jsonp_url, ie=WDRIE.ie_key()))
|
||||
|
||||
# Playlist (e.g. https://www1.wdr.de/mediathek/video/sendungen/aktuelle-stunde/aktuelle-stunde-120.html)
|
||||
|
|
Loading…
Reference in New Issue