2020-03-19 16:45:31 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
""" AUTH API """
|
|
|
|
|
|
|
|
from __future__ import absolute_import, division, unicode_literals
|
|
|
|
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
from datetime import datetime
|
|
|
|
|
2020-03-21 20:34:07 +01:00
|
|
|
from six.moves.html_parser import HTMLParser
|
2020-03-22 10:30:23 +01:00
|
|
|
import requests
|
2020-03-19 16:45:31 +01:00
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
from resources.lib import kodiutils
|
2020-03-19 16:45:31 +01:00
|
|
|
from resources.lib.viervijfzes import CHANNELS
|
|
|
|
|
|
|
|
_LOGGER = logging.getLogger('content-api')
|
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
CACHE_AUTO = 1 # Allow to use the cache, and query the API if no cache is available
|
|
|
|
CACHE_ONLY = 2 # Only use the cache, don't use the API
|
|
|
|
CACHE_PREVENT = 3 # Don't use the cache
|
|
|
|
|
2020-03-19 16:45:31 +01:00
|
|
|
|
|
|
|
class UnavailableException(Exception):
|
|
|
|
""" Is thrown when an item is unavailable. """
|
|
|
|
|
|
|
|
|
|
|
|
class NoContentException(Exception):
|
|
|
|
""" Is thrown when no items are unavailable. """
|
|
|
|
|
|
|
|
|
|
|
|
class GeoblockedException(Exception):
|
|
|
|
""" Is thrown when a geoblocked item is played. """
|
|
|
|
|
|
|
|
|
|
|
|
class Program:
|
|
|
|
""" Defines a Program. """
|
|
|
|
|
|
|
|
def __init__(self, uuid=None, path=None, channel=None, title=None, description=None, aired=None, cover=None, background=None, seasons=None, episodes=None):
|
|
|
|
"""
|
|
|
|
:type uuid: str
|
|
|
|
:type path: str
|
|
|
|
:type channel: str
|
|
|
|
:type title: str
|
|
|
|
:type description: str
|
|
|
|
:type aired: datetime
|
|
|
|
:type cover: str
|
|
|
|
:type background: str
|
|
|
|
:type seasons: list[Season]
|
|
|
|
:type episodes: list[Episode]
|
|
|
|
"""
|
|
|
|
self.uuid = uuid
|
|
|
|
self.path = path
|
|
|
|
self.channel = channel
|
|
|
|
self.title = title
|
|
|
|
self.description = description
|
|
|
|
self.aired = aired
|
|
|
|
self.cover = cover
|
|
|
|
self.background = background
|
|
|
|
self.seasons = seasons
|
|
|
|
self.episodes = episodes
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "%r" % self.__dict__
|
|
|
|
|
|
|
|
|
|
|
|
class Season:
|
|
|
|
""" Defines a Season. """
|
|
|
|
|
|
|
|
def __init__(self, uuid=None, path=None, channel=None, title=None, description=None, cover=None, number=None):
|
|
|
|
"""
|
|
|
|
:type uuid: str
|
|
|
|
:type path: str
|
|
|
|
:type channel: str
|
|
|
|
:type title: str
|
|
|
|
:type description: str
|
|
|
|
:type cover: str
|
|
|
|
:type number: int
|
|
|
|
|
|
|
|
"""
|
|
|
|
self.uuid = uuid
|
|
|
|
self.path = path
|
|
|
|
self.channel = channel
|
|
|
|
self.title = title
|
|
|
|
self.description = description
|
|
|
|
self.cover = cover
|
|
|
|
self.number = number
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "%r" % self.__dict__
|
|
|
|
|
|
|
|
|
|
|
|
class Episode:
|
|
|
|
""" Defines an Episode. """
|
|
|
|
|
|
|
|
def __init__(self, uuid=None, nodeid=None, path=None, channel=None, program_title=None, title=None, description=None, cover=None, duration=None,
|
2020-03-22 15:37:15 +01:00
|
|
|
season=None, season_uuid=None, number=None, rating=None, aired=None, expiry=None):
|
2020-03-19 16:45:31 +01:00
|
|
|
"""
|
|
|
|
:type uuid: str
|
|
|
|
:type nodeid: str
|
|
|
|
:type path: str
|
|
|
|
:type channel: str
|
|
|
|
:type program_title: str
|
|
|
|
:type title: str
|
|
|
|
:type description: str
|
|
|
|
:type cover: str
|
|
|
|
:type duration: int
|
|
|
|
:type season: int
|
2020-03-22 15:37:15 +01:00
|
|
|
:type season_uuid: str
|
2020-03-19 16:45:31 +01:00
|
|
|
:type number: int
|
|
|
|
:type rating: str
|
|
|
|
:type aired: datetime
|
|
|
|
:type expiry: datetime
|
|
|
|
"""
|
|
|
|
self.uuid = uuid
|
|
|
|
self.nodeid = nodeid
|
|
|
|
self.path = path
|
|
|
|
self.channel = channel
|
|
|
|
self.program_title = program_title
|
|
|
|
self.title = title
|
|
|
|
self.description = description
|
|
|
|
self.cover = cover
|
|
|
|
self.duration = duration
|
|
|
|
self.season = season
|
2020-03-22 15:37:15 +01:00
|
|
|
self.season_uuid = season_uuid
|
2020-03-19 16:45:31 +01:00
|
|
|
self.number = number
|
|
|
|
self.rating = rating
|
|
|
|
self.aired = aired
|
|
|
|
self.expiry = expiry
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "%r" % self.__dict__
|
|
|
|
|
|
|
|
|
|
|
|
class ContentApi:
|
2020-03-21 20:46:14 +01:00
|
|
|
""" VIER/VIJF/ZES Content API"""
|
2020-03-19 16:45:31 +01:00
|
|
|
API_ENDPOINT = 'https://api.viervijfzes.be'
|
2020-03-22 15:37:15 +01:00
|
|
|
SITE_APIS = {
|
|
|
|
'vier': 'https://www.vier.be/api',
|
|
|
|
'vijf': 'https://www.vijf.be/api',
|
|
|
|
'zes': 'https://www.zestv.be/api',
|
|
|
|
}
|
2020-03-19 16:45:31 +01:00
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
def __init__(self, auth=None):
|
2020-03-19 16:45:31 +01:00
|
|
|
""" Initialise object """
|
|
|
|
self._session = requests.session()
|
2020-03-22 15:37:15 +01:00
|
|
|
self._auth = auth
|
2020-03-19 16:45:31 +01:00
|
|
|
|
|
|
|
def get_programs(self, channel):
|
|
|
|
""" Get a list of all programs of the specified channel.
|
|
|
|
:type channel: str
|
|
|
|
:rtype list[Program]
|
|
|
|
NOTE: This function doesn't use an API.
|
|
|
|
"""
|
|
|
|
if channel not in CHANNELS:
|
|
|
|
raise Exception('Unknown channel %s' % channel)
|
|
|
|
|
|
|
|
# Load webpage
|
|
|
|
data = self._get_url(CHANNELS[channel]['url'])
|
|
|
|
|
|
|
|
# Parse programs
|
2020-03-25 08:08:15 +01:00
|
|
|
parser = HTMLParser()
|
2020-03-19 16:45:31 +01:00
|
|
|
regex_programs = re.compile(r'<a class="program-overview__link" href="(?P<path>[^"]+)">\s+'
|
|
|
|
r'<span class="program-overview__title">\s+(?P<title>[^<]+)</span>.*?'
|
|
|
|
r'</a>', re.DOTALL)
|
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
programs = []
|
|
|
|
for item in regex_programs.finditer(data):
|
|
|
|
path = item.group('path').lstrip('/')
|
|
|
|
|
|
|
|
program = self.get_program(channel, path, CACHE_ONLY) # Get program details, but from cache only
|
|
|
|
if program:
|
|
|
|
# Use program with metadata from cache
|
|
|
|
programs.append(program)
|
|
|
|
else:
|
|
|
|
# Use program with the values that we've parsed from the page
|
|
|
|
programs.append(Program(channel=channel,
|
|
|
|
path=path,
|
2020-03-25 08:08:15 +01:00
|
|
|
title=parser.unescape(item.group('title').strip())))
|
2020-03-19 16:45:31 +01:00
|
|
|
return programs
|
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
def get_program(self, channel, path, cache=CACHE_AUTO):
|
2020-03-19 16:45:31 +01:00
|
|
|
""" Get a Program object from the specified page.
|
|
|
|
:type channel: str
|
|
|
|
:type path: str
|
2020-03-22 15:37:15 +01:00
|
|
|
:type cache: int
|
2020-03-19 16:45:31 +01:00
|
|
|
:rtype Program
|
|
|
|
NOTE: This function doesn't use an API.
|
|
|
|
"""
|
|
|
|
if channel not in CHANNELS:
|
|
|
|
raise Exception('Unknown channel %s' % channel)
|
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
if cache in [CACHE_AUTO, CACHE_ONLY]:
|
|
|
|
# Try to fetch from cache
|
|
|
|
data = kodiutils.get_cache(['program', channel, path])
|
|
|
|
if data is None and cache == CACHE_ONLY:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
data = None
|
|
|
|
|
|
|
|
if data is None:
|
|
|
|
# Fetch webpage
|
|
|
|
page = self._get_url(CHANNELS[channel]['url'] + '/' + path)
|
|
|
|
|
|
|
|
# Extract JSON
|
|
|
|
regex_program = re.compile(r'data-hero="([^"]+)', re.DOTALL)
|
|
|
|
json_data = HTMLParser().unescape(regex_program.search(page).group(1))
|
|
|
|
data = json.loads(json_data)['data']
|
|
|
|
|
|
|
|
# Store response in cache
|
|
|
|
kodiutils.set_cache(['program', channel, path], data)
|
2020-03-19 16:45:31 +01:00
|
|
|
|
|
|
|
program = self._parse_program_data(data)
|
|
|
|
|
|
|
|
return program
|
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
def get_program_by_uuid(self, uuid, cache=CACHE_AUTO):
|
|
|
|
""" Get a Program object.
|
|
|
|
:type uuid: str
|
|
|
|
:type cache: int
|
|
|
|
:rtype Program
|
|
|
|
"""
|
|
|
|
if cache in [CACHE_AUTO, CACHE_ONLY]:
|
|
|
|
# Try to fetch from cache
|
|
|
|
data = kodiutils.get_cache(['program', uuid])
|
|
|
|
if data is None and cache == CACHE_ONLY:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
data = None
|
|
|
|
|
|
|
|
if data is None:
|
|
|
|
# Fetch from API
|
|
|
|
response = self._get_url(self.API_ENDPOINT + '/content/%s' % uuid, authentication=True)
|
|
|
|
data = json.loads(response)
|
|
|
|
|
|
|
|
if not data:
|
|
|
|
raise UnavailableException()
|
|
|
|
|
|
|
|
# Store response in cache
|
|
|
|
kodiutils.set_cache(['program', uuid], data)
|
|
|
|
|
|
|
|
return Program(
|
|
|
|
uuid=uuid,
|
|
|
|
path=data['url']['S'].strip('/'),
|
|
|
|
title=data['label']['S'],
|
|
|
|
description=data['description']['S'],
|
|
|
|
cover=data['image']['S'],
|
|
|
|
)
|
|
|
|
|
2020-03-19 16:45:31 +01:00
|
|
|
def get_episode(self, channel, path):
|
|
|
|
""" Get a Episode object from the specified page.
|
|
|
|
:type channel: str
|
|
|
|
:type path: str
|
|
|
|
:rtype Episode
|
|
|
|
NOTE: This function doesn't use an API.
|
|
|
|
"""
|
|
|
|
if channel not in CHANNELS:
|
|
|
|
raise Exception('Unknown channel %s' % channel)
|
|
|
|
|
|
|
|
# Load webpage
|
|
|
|
page = self._get_url(CHANNELS[channel]['url'] + '/' + path)
|
|
|
|
|
|
|
|
# Extract program JSON
|
2020-03-25 08:08:15 +01:00
|
|
|
parser = HTMLParser()
|
2020-03-19 16:45:31 +01:00
|
|
|
regex_program = re.compile(r'data-hero="([^"]+)', re.DOTALL)
|
2020-03-25 08:08:15 +01:00
|
|
|
json_data = parser.unescape(regex_program.search(page).group(1))
|
2020-03-19 16:45:31 +01:00
|
|
|
data = json.loads(json_data)['data']
|
|
|
|
program = self._parse_program_data(data)
|
|
|
|
|
|
|
|
# Extract episode JSON
|
|
|
|
regex_episode = re.compile(r'<script type="application/json" data-drupal-selector="drupal-settings-json">(.*?)</script>', re.DOTALL)
|
2020-03-25 08:08:15 +01:00
|
|
|
json_data = parser.unescape(regex_episode.search(page).group(1))
|
2020-03-19 16:45:31 +01:00
|
|
|
data = json.loads(json_data)
|
|
|
|
|
|
|
|
# Lookup the episode in the program JSON based on the nodeId
|
|
|
|
# The episode we just found doesn't contain all information
|
|
|
|
for episode in program.episodes:
|
|
|
|
if episode.nodeid == data['pageInfo']['nodeId']:
|
|
|
|
return episode
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2020-03-26 11:31:28 +01:00
|
|
|
def get_stream_by_uuid(self, uuid):
|
|
|
|
""" Get the stream URL to use for this video.
|
|
|
|
:type uuid: str
|
|
|
|
:rtype str
|
|
|
|
"""
|
|
|
|
response = self._get_url(self.API_ENDPOINT + '/content/%s' % uuid, authentication=True)
|
|
|
|
data = json.loads(response)
|
|
|
|
return data['video']['S']
|
|
|
|
|
2020-03-19 16:45:31 +01:00
|
|
|
@staticmethod
|
|
|
|
def _parse_program_data(data):
|
|
|
|
""" Parse the Program JSON.
|
|
|
|
:type data: dict
|
|
|
|
:rtype Program
|
|
|
|
"""
|
|
|
|
# Create Program info
|
|
|
|
program = Program(
|
|
|
|
uuid=data['id'],
|
|
|
|
path=data['link'].lstrip('/'),
|
|
|
|
channel=data['pageInfo']['site'],
|
|
|
|
title=data['title'],
|
|
|
|
description=data['description'],
|
|
|
|
aired=datetime.fromtimestamp(data.get('pageInfo', {}).get('publishDate')),
|
|
|
|
cover=data['images']['poster'],
|
|
|
|
background=data['images']['hero'],
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create Season info
|
|
|
|
program.seasons = {
|
2020-03-22 15:37:15 +01:00
|
|
|
key: Season(
|
2020-03-19 16:45:31 +01:00
|
|
|
uuid=playlist['id'],
|
|
|
|
path=playlist['link'].lstrip('/'),
|
|
|
|
channel=playlist['pageInfo']['site'],
|
|
|
|
title=playlist['title'],
|
|
|
|
description=playlist['pageInfo']['description'],
|
|
|
|
number=playlist['episodes'][0]['seasonNumber'], # You did not see this
|
|
|
|
)
|
2020-03-22 15:37:15 +01:00
|
|
|
for key, playlist in enumerate(data['playlists'])
|
2020-03-19 16:45:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# Create Episodes info
|
|
|
|
program.episodes = [
|
2020-03-22 15:37:15 +01:00
|
|
|
ContentApi._parse_episode_data(episode, playlist['id'])
|
2020-03-19 16:45:31 +01:00
|
|
|
for playlist in data['playlists']
|
|
|
|
for episode in playlist['episodes']
|
|
|
|
]
|
|
|
|
|
|
|
|
return program
|
|
|
|
|
|
|
|
@staticmethod
|
2020-03-22 15:37:15 +01:00
|
|
|
def _parse_episode_data(data, season_uuid):
|
2020-03-19 16:45:31 +01:00
|
|
|
""" Parse the Episode JSON.
|
|
|
|
:type data: dict
|
2020-03-22 15:37:15 +01:00
|
|
|
:type season_uuid: str
|
2020-03-19 16:45:31 +01:00
|
|
|
:rtype Episode
|
|
|
|
"""
|
|
|
|
|
|
|
|
if data.get('episodeNumber'):
|
|
|
|
episode_number = data.get('episodeNumber')
|
|
|
|
else:
|
|
|
|
# The episodeNumber can be absent
|
|
|
|
match = re.compile(r'\d+$').search(data.get('title'))
|
|
|
|
if match:
|
|
|
|
episode_number = match.group(0)
|
|
|
|
else:
|
|
|
|
episode_number = None
|
|
|
|
|
|
|
|
episode = Episode(
|
|
|
|
uuid=data.get('videoUuid'),
|
|
|
|
nodeid=data.get('pageInfo', {}).get('nodeId'),
|
|
|
|
path=data.get('link').lstrip('/'),
|
|
|
|
channel=data.get('pageInfo', {}).get('site'),
|
|
|
|
program_title=data.get('program', {}).get('title'),
|
2020-03-22 15:37:15 +01:00
|
|
|
title=data.get('title'),
|
2020-03-19 16:45:31 +01:00
|
|
|
description=data.get('pageInfo', {}).get('description'),
|
|
|
|
cover=data.get('image'),
|
|
|
|
duration=data.get('duration'),
|
|
|
|
season=data.get('seasonNumber'),
|
2020-03-22 15:37:15 +01:00
|
|
|
season_uuid=season_uuid,
|
2020-03-19 16:45:31 +01:00
|
|
|
number=episode_number,
|
|
|
|
aired=datetime.fromtimestamp(data.get('createdDate')),
|
|
|
|
expiry=datetime.fromtimestamp(data.get('unpublishDate')) if data.get('unpublishDate') else None,
|
|
|
|
rating=data.get('parentalRating')
|
|
|
|
)
|
|
|
|
return episode
|
|
|
|
|
2020-03-22 15:37:15 +01:00
|
|
|
def _get_url(self, url, params=None, authentication=False):
|
2020-03-19 16:45:31 +01:00
|
|
|
""" Makes a GET request for the specified URL.
|
|
|
|
:type url: str
|
|
|
|
:rtype str
|
|
|
|
"""
|
2020-03-22 15:37:15 +01:00
|
|
|
if authentication:
|
|
|
|
if not self._auth:
|
|
|
|
raise Exception('Requested to authenticate, but not auth object passed')
|
2020-03-21 20:34:07 +01:00
|
|
|
response = self._session.get(url, params=params, headers={
|
2020-03-22 15:37:15 +01:00
|
|
|
'authorization': self._auth.get_token(),
|
2020-03-21 20:34:07 +01:00
|
|
|
})
|
|
|
|
else:
|
|
|
|
response = self._session.get(url, params=params)
|
2020-03-19 16:45:31 +01:00
|
|
|
|
|
|
|
if response.status_code != 200:
|
2020-03-22 15:37:15 +01:00
|
|
|
_LOGGER.error(response.text)
|
2020-03-19 16:45:31 +01:00
|
|
|
raise Exception('Could not fetch data')
|
|
|
|
|
|
|
|
return response.text
|