diff --git a/resources/lib/addon.py b/resources/lib/addon.py index 5d511367a..0e909e6e5 100644 --- a/resources/lib/addon.py +++ b/resources/lib/addon.py @@ -10,9 +10,8 @@ except ImportError: # Python 2 from urllib import unquote_plus -from kodiutils import localize, log_access, notification -from statichelper import from_unicode, to_unicode -from utils import refresh_caches +from kodiutils import localize, log_access, notification, refresh_caches +from utils import from_unicode, to_unicode plugin = Plugin() # pylint: disable=invalid-name diff --git a/resources/lib/addon_entry.py b/resources/lib/addon_entry.py index 8c4c152a2..e82cd32e5 100644 --- a/resources/lib/addon_entry.py +++ b/resources/lib/addon_entry.py @@ -3,8 +3,8 @@ ''' This is the actual VRT NU video plugin entry point ''' from __future__ import absolute_import, division, unicode_literals -import kodiutils import xbmcaddon +import kodiutils kodiutils.ADDON = xbmcaddon.Addon() diff --git a/resources/lib/apihelper.py b/resources/lib/apihelper.py index 7dcc1eade..749ea3061 100644 --- a/resources/lib/apihelper.py +++ b/resources/lib/apihelper.py @@ -13,12 +13,12 @@ from data import CHANNELS from helperobjects import TitleItem -from kodiutils import (delete_cached_thumbnail, get_global_setting, get_proxies, get_setting, - has_addon, localize, localize_from_data, log, url_for) +from kodiutils import (delete_cached_thumbnail, get_cache, get_cached_url_json, get_global_setting, + get_proxies, get_setting, get_url_json, has_addon, localize, localize_from_data, + log, ttl, update_cache, url_for) from metadata import Metadata -from statichelper import (add_https_method, convert_html_to_kodilabel, find_entry, from_unicode, play_url_to_id, - program_to_url, realpage, strip_newlines, url_to_program) -from utils import get_cache, get_cached_url_json, get_url_json, ttl, update_cache +from utils import (add_https_proto, html_to_kodilabel, find_entry, from_unicode, play_url_to_id, + program_to_url, realpage, strip_newlines, url_to_program) class ApiHelper: @@ -38,7 +38,7 @@ def __init__(self, _favorites, _resumepoints): def get_tvshows(self, category=None, channel=None, feature=None): ''' Get all TV shows for a given category, channel or feature, optionally filtered by favorites ''' - params = dict() + params = {} if category: params['facets[categories]'] = category @@ -59,7 +59,7 @@ def get_tvshows(self, category=None, channel=None, feature=None): querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items())) suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring - return get_cached_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect')) + return get_cached_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect'), fail=[]) def list_tvshows(self, category=None, channel=None, feature=None, use_favorites=False): ''' List all TV shows for a given category, channel or feature, optionally filtered by favorites ''' @@ -144,7 +144,7 @@ def __map_episodes(self, episodes, titletype=None, season=None, use_favorites=Fa highlight = episode.get('highlight') if highlight: for key in highlight: - episode[key] = convert_html_to_kodilabel(highlight.get(key)[0]) + episode[key] = html_to_kodilabel(highlight.get(key)[0]) list_item, sort, ascending = self.episode_to_listitem(episode, program, cache_file, titletype) episode_items.append(list_item) @@ -261,7 +261,7 @@ def get_upnext(self, info): # Get all episodes from current program and sort by program, seasonTitle and episodeNumber episodes = sorted(self.get_episodes(keywords=program), key=lambda k: (k.get('program'), k.get('seasonTitle'), k.get('episodeNumber'))) - upnext = dict() + upnext = {} for episode in episodes: if ep_id.get('whatson_id') == episode.get('whatsonId') or \ ep_id.get('video_id') == episode.get('videoId') or \ @@ -406,7 +406,7 @@ def get_episode_by_air_date(self, channel_name, start_date, end_date=None): schedule_date = onairdate schedule_datestr = schedule_date.isoformat().split('T')[0] url = 'https://www.vrt.be/bin/epg/schedule.%s.json' % schedule_datestr - schedule_json = get_url_json(url) + schedule_json = get_url_json(url, fail={}) episodes = schedule_json.get(channel.get('id'), []) if not episodes: return None @@ -562,15 +562,16 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None, querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items())) search_url = self._VRTNU_SEARCH_URL + '?' + querystring.replace(' ', '%20') # Only encode spaces to minimize url length if cache_file: - search_json = get_cached_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect')) + search_json = get_cached_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect'), fail={}) else: - search_json = get_url_json(url=search_url) + search_json = get_url_json(url=search_url, fail={}) # Check for multiple seasons - seasons = None + seasons = [] if 'facets[seasonTitle]' not in unquote(search_url): - facets = search_json.get('facets', dict()).get('facets') - seasons = next((f.get('buckets', []) for f in facets if f.get('name') == 'seasons' and len(f.get('buckets', [])) > 1), None) + facets = search_json.get('facets', {}).get('facets') + if facets: + seasons = next((f.get('buckets', []) for f in facets if f.get('name') == 'seasons' and len(f.get('buckets', [])) > 1), None) episodes = search_json.get('results', [{}]) show_seasons = bool(season != 'allseasons') @@ -587,7 +588,7 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None, for api_page in range(1, api_pages): api_page_url = search_url + '&from=' + str(api_page * api_page_size + 1) api_page_json = get_url_json(api_page_url) - if api_page_json: + if api_page_json is not None: episodes += api_page_json.get('results', [{}]) # Return episodes @@ -610,7 +611,7 @@ def list_channels(self, channels=None, live=True): continue context_menu = [] - art_dict = dict() + art_dict = {} # Try to use the white icons for thumbnails (used for icons as well) if has_addon('resource.images.studios.white'): @@ -681,7 +682,7 @@ def list_youtube(channels=None): continue context_menu = [] - art_dict = dict() + art_dict = {} # Try to use the white icons for thumbnails (used for icons as well) if has_addon('resource.images.studios.white'): @@ -818,7 +819,7 @@ def get_category_thumbnail(element): ''' Return a category thumbnail, if available ''' if get_setting('showfanart', 'true') == 'true': raw_thumbnail = element.find(class_='media').get('data-responsive-image', 'DefaultGenre.png') - return add_https_method(raw_thumbnail) + return add_https_proto(raw_thumbnail) return 'DefaultGenre.png' @staticmethod diff --git a/resources/lib/favorites.py b/resources/lib/favorites.py index 675d64a06..9d4c636fc 100644 --- a/resources/lib/favorites.py +++ b/resources/lib/favorites.py @@ -11,9 +11,9 @@ except ImportError: # Python 2 from urllib2 import build_opener, install_opener, ProxyHandler, Request, unquote, urlopen -from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down, - localize, log, log_error, multiselect, notification, ok_dialog) -from utils import get_cache, get_url_json, invalidate_caches, update_cache +from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, get_url_json, + has_credentials, input_down, invalidate_caches, localize, log, log_error, + multiselect, notification, ok_dialog, update_cache) class Favorites: @@ -45,7 +45,7 @@ def refresh(self, ttl=None): } favorites_url = 'https://video-user-data.vrt.be/favorites' favorites_json = get_url_json(url=favorites_url, cache='favorites.json', headers=headers) - if favorites_json: + if favorites_json is not None: self._favorites = favorites_json def update(self, program, title, value=True): @@ -70,9 +70,9 @@ def update(self, program, title, value=True): 'Referer': 'https://www.vrt.be/vrtnu', } - from statichelper import program_to_url - payload = dict(isFavorite=value, programUrl=program_to_url(program, 'short'), title=title) from json import dumps + from utils import program_to_url + payload = dict(isFavorite=value, programUrl=program_to_url(program, 'short'), title=title) data = dumps(payload).encode('utf-8') program_id = self.program_to_id(program) log(2, 'URL post: https://video-user-data.vrt.be/favorites/{program_id}', program_id=program_id) @@ -124,12 +124,12 @@ def titles(self): def programs(self): ''' Return all favorite programs ''' - from statichelper import url_to_program + from utils import url_to_program return [url_to_program(value.get('value').get('programUrl')) for value in list(self._favorites.values()) if value.get('value').get('isFavorite')] def manage(self): ''' Allow the user to unselect favorites to be removed from the listing ''' - from statichelper import url_to_program + from utils import url_to_program self.refresh(ttl=0) if not self._favorites: ok_dialog(heading=localize(30418), message=localize(30419)) # No favorites found diff --git a/resources/lib/kodiutils.py b/resources/lib/kodiutils.py index e8ac23e0f..bf5f4a887 100644 --- a/resources/lib/kodiutils.py +++ b/resources/lib/kodiutils.py @@ -4,10 +4,12 @@ from __future__ import absolute_import, division, unicode_literals from contextlib import contextmanager +from sys import version_info + import xbmc import xbmcaddon import xbmcplugin -from statichelper import from_unicode, to_unicode +from utils import from_unicode, to_unicode ADDON = xbmcaddon.Addon() @@ -750,3 +752,176 @@ def jsonrpc(**kwargs): if 'jsonrpc' not in kwargs: kwargs.update(jsonrpc='2.0') return loads(xbmc.executeJSONRPC(dumps(kwargs))) + + +def human_delta(seconds): + ''' Return a human-readable representation of the TTL ''' + from math import floor + days = int(floor(seconds / (24 * 60 * 60))) + seconds = seconds % (24 * 60 * 60) + hours = int(floor(seconds / (60 * 60))) + seconds = seconds % (60 * 60) + if days: + return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '') + minutes = int(floor(seconds / 60)) + seconds = seconds % 60 + if hours: + return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '') + if minutes: + return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '') + return '%d second%s' % (seconds, 's' if seconds != 1 else '') + + +def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name + ''' Get the content from cache, if it's still fresh ''' + if get_setting('usehttpcaching', 'true') == 'false': + return None + + fullpath = get_cache_path() + path + if not exists(fullpath): + return None + + from time import localtime, mktime + mtime = stat_file(fullpath).st_mtime() + now = mktime(localtime()) + if ttl and now >= mtime + ttl: + return None + + if ttl is None: + log(3, "Cache '{path}' is forced from cache.", path=path) + else: + log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now)) + with open_file(fullpath, 'r') as fdesc: + try: + return get_json_data(fdesc) + except ValueError as exc: # No JSON object could be decoded + log_error('JSON Error: {exc}', exc=exc) + return None + + +def update_cache(path, data): + ''' Update the cache, if necessary ''' + if get_setting('usehttpcaching', 'true') == 'false': + return + + from hashlib import md5 + from json import dump, dumps + fullpath = get_cache_path() + path + if exists(fullpath): + with open_file(fullpath) as fdesc: + cachefile = fdesc.read().encode('utf-8') + md5_cache = md5(cachefile) + else: + md5_cache = 0 + # Create cache directory if missing + if not exists(get_cache_path()): + mkdirs(get_cache_path()) + + # Avoid writes if possible (i.e. SD cards) + if md5_cache != md5(dumps(data).encode('utf-8')): + log(3, "Write cache '{path}'.", path=path) + with open_file(fullpath, 'w') as fdesc: + # dump(data, fdesc, encoding='utf-8') + dump(data, fdesc) + else: + # Update timestamp + from os import utime + log(3, "Cache '{path}' has not changed, updating mtime only.", path=path) + utime(path) + + +def ttl(kind='direct'): + ''' Return the HTTP cache ttl in seconds based on kind of relation ''' + if kind == 'direct': + return int(get_setting('httpcachettldirect', 5)) * 60 + if kind == 'indirect': + return int(get_setting('httpcachettlindirect', 60)) * 60 + return 5 * 60 + + +def get_json_data(response): + ''' Return json object from HTTP response ''' + from json import load, loads + if (3, 0, 0) <= version_info <= (3, 5, 9): # the JSON object must be str, not 'bytes' + json_data = loads(to_unicode(response.read())) + else: + json_data = load(response) + return json_data + + +def get_url_json(url, cache=None, headers=None, data=None, fail=None): + ''' Return HTTP data ''' + try: # Python 3 + from urllib.error import HTTPError + from urllib.parse import unquote + from urllib.request import urlopen, Request + except ImportError: # Python 2 + from urllib2 import HTTPError, unquote, urlopen, Request + + if headers is None: + headers = dict() + log(2, 'URL get: {url}', url=unquote(url)) + req = Request(url, headers=headers) + if data is not None: + req.data = data + try: + json_data = get_json_data(urlopen(req)) + except ValueError as exc: # No JSON object could be decoded + log_error('JSON Error: {exc}', exc=exc) + return fail + except HTTPError as exc: + if hasattr(req, 'selector'): # Python 3.4+ + url_length = len(req.selector) + else: # Python 2.7 + url_length = len(req.get_selector()) + if exc.code == 413 and url_length > 8192: + ok_dialog(heading='HTTP Error 413', message=localize(30967)) + log_error('HTTP Error 413: Exceeded maximum url length: ' + 'VRT Search API url has a length of {length} characters.', length=url_length) + return fail + if exc.code == 400 and 7600 <= url_length <= 8192: + ok_dialog(heading='HTTP Error 400', message=localize(30967)) + log_error('HTTP Error 400: Probably exceeded maximum url length: ' + 'VRT Search API url has a length of {length} characters.', length=url_length) + return fail + try: + return get_json_data(exc) + except ValueError as exc: # No JSON object could be decoded + log_error('JSON Error: {exc}', exc=exc) + return fail + raise + else: + if cache: + update_cache(cache, json_data) + return json_data + + +def get_cached_url_json(url, cache, headers=None, ttl=None, fail=None): # pylint: disable=redefined-outer-name + ''' Return data from cache, if any, else make an HTTP request ''' + # Get api data from cache if it is fresh + json_data = get_cache(cache, ttl=ttl) + if json_data is not None: + return json_data + return get_url_json(url, cache=cache, headers=headers, fail=fail) + + +def refresh_caches(cache_file=None): + ''' Invalidate the needed caches and refresh container ''' + files = ['favorites.json', 'oneoff.json', 'resume_points.json'] + if cache_file and cache_file not in files: + files.append(cache_file) + invalidate_caches(*files) + container_refresh() + notification(message=localize(30981)) + + +def invalidate_caches(*caches): + ''' Invalidate multiple cache files ''' + import fnmatch + _, files = listdir(get_cache_path()) + # Invalidate caches related to menu list refreshes + removes = set() + for expr in caches: + removes.update(fnmatch.filter(files, expr)) + for filename in removes: + delete(get_cache_path() + filename) diff --git a/resources/lib/metadata.py b/resources/lib/metadata.py index e30ed8c83..ac88dc9aa 100644 --- a/resources/lib/metadata.py +++ b/resources/lib/metadata.py @@ -11,9 +11,10 @@ except ImportError: # Python 2 from urllib import quote_plus -import statichelper from data import CHANNELS, SECONDS_MARGIN from kodiutils import get_setting, localize, localize_datelong, log, url_for +from utils import (add_https_proto, capitalize, find_entry, from_unicode, html_to_kodilabel, + reformat_url, shorten_link, to_unicode, unescape, url_to_episode) class Metadata: @@ -73,24 +74,24 @@ def get_context_menu(self, api_data, program, cache_file): if assetpath is not None: # We need to ensure forward slashes are quoted - program_title = statichelper.to_unicode(quote_plus(statichelper.from_unicode(program_title))) - url = statichelper.url_to_episode(api_data.get('url', '')) + program_title = to_unicode(quote_plus(from_unicode(program_title))) + url = url_to_episode(api_data.get('url', '')) asset_id = self._resumepoints.assetpath_to_id(assetpath) if self._resumepoints.is_watchlater(asset_id): - extras = dict() + extras = {} # If we are in a watchlater menu, move cursor down before removing a favorite if plugin.path.startswith('/resumepoints/watchlater'): extras = dict(move_down=True) # Unwatch context menu context_menu.append(( - statichelper.capitalize(localize(30402)), + capitalize(localize(30402)), 'RunPlugin(%s)' % url_for('unwatchlater', asset_id=asset_id, title=program_title, url=url, **extras) )) watchlater_marker = '[COLOR yellow]ᶫ[/COLOR]' else: # Watch context menu context_menu.append(( - statichelper.capitalize(localize(30401)), + capitalize(localize(30401)), 'RunPlugin(%s)' % url_for('watchlater', asset_id=asset_id, title=program_title, url=url) )) @@ -117,9 +118,9 @@ def get_context_menu(self, api_data, program, cache_file): follow_enabled = bool(api_data.get('url')) if follow_enabled: - program_title = statichelper.to_unicode(quote_plus(statichelper.from_unicode(program_title))) # We need to ensure forward slashes are quoted + program_title = to_unicode(quote_plus(from_unicode(program_title))) # We need to ensure forward slashes are quoted if self._favorites.is_favorite(program): - extras = dict() + extras = {} # If we are in a favorites menu, move cursor down before removing a favorite if plugin.path.startswith('/favorites'): extras = dict(move_down=True) @@ -178,17 +179,17 @@ def get_playcount(self, api_data): def get_properties(self, api_data): ''' Get properties from single item json api data ''' - properties = dict() + properties = {} # Only fill in properties when using VRT NU resumepoints because setting resumetime/totaltime breaks standard Kodi watched status if self._resumepoints.is_activated(): assetpath = self.get_assetpath(api_data) if assetpath: # We need to ensure forward slashes are quoted - program_title = statichelper.to_unicode(quote_plus(statichelper.from_unicode(api_data.get('program')))) + program_title = to_unicode(quote_plus(from_unicode(api_data.get('program')))) asset_id = self._resumepoints.assetpath_to_id(assetpath) - url = statichelper.reformat_url(api_data.get('url', ''), 'medium') + url = reformat_url(api_data.get('url', ''), 'medium') properties.update(asset_id=asset_id, url=url, title=program_title) position = self._resumepoints.get_position(asset_id) @@ -263,7 +264,7 @@ def get_plot(self, api_data, season=False, date=None): # VRT NU Search API if api_data.get('type') == 'episode': if season: - plot = statichelper.convert_html_to_kodilabel(api_data.get('programDescription')) + plot = html_to_kodilabel(api_data.get('programDescription')) # Add additional metadata to plot plot_meta = '' @@ -304,20 +305,20 @@ def get_plot(self, api_data, season=False, date=None): plot_meta += ' ' plot_meta += localize(30201) # Geo-blocked - plot = statichelper.convert_html_to_kodilabel(api_data.get('description')) + plot = html_to_kodilabel(api_data.get('description')) if plot_meta: plot = '%s\n\n%s' % (plot_meta, plot) - permalink = statichelper.shorten_link(api_data.get('permalink')) or api_data.get('externalPermalink') + permalink = shorten_link(api_data.get('permalink')) or api_data.get('externalPermalink') if permalink and get_setting('showpermalink', 'false') == 'true': plot = '%s\n\n[COLOR yellow]%s[/COLOR]' % (plot, permalink) return plot # VRT NU Suggest API if api_data.get('type') == 'program': - plot = statichelper.unescape(api_data.get('description', '???')) - # permalink = statichelper.shorten_link(api_data.get('programUrl')) + plot = unescape(api_data.get('description', '???')) + # permalink = shorten_link(api_data.get('programUrl')) # if permalink and get_setting('showpermalink', 'false') == 'true': # plot = '%s\n\n[COLOR yellow]%s[/COLOR]' % (plot, permalink) return plot @@ -342,11 +343,11 @@ def get_plotoutline(api_data, season=False): # VRT NU Search API if api_data.get('type') == 'episode': if season: - plotoutline = statichelper.convert_html_to_kodilabel(api_data.get('programDescription')) + plotoutline = html_to_kodilabel(api_data.get('programDescription')) return plotoutline - if api_data.get('displayOptions', dict()).get('showShortDescription'): - plotoutline = statichelper.convert_html_to_kodilabel(api_data.get('shortDescription')) + if api_data.get('displayOptions', {}).get('showShortDescription'): + plotoutline = html_to_kodilabel(api_data.get('shortDescription')) return plotoutline plotoutline = api_data.get('subtitle') @@ -510,24 +511,24 @@ def get_year(api_data): @staticmethod def get_art(api_data, season=False): ''' Get art dict from single item json api data ''' - art_dict = dict() + art_dict = {} # VRT NU Search API if api_data.get('type') == 'episode': if season: if get_setting('showfanart', 'true') == 'true': - art_dict['fanart'] = statichelper.add_https_method(api_data.get('programImageUrl', 'DefaultSets.png')) + art_dict['fanart'] = add_https_proto(api_data.get('programImageUrl', 'DefaultSets.png')) art_dict['banner'] = art_dict.get('fanart') if season != 'allseasons': - art_dict['thumb'] = statichelper.add_https_method(api_data.get('videoThumbnailUrl', art_dict.get('fanart'))) + art_dict['thumb'] = add_https_proto(api_data.get('videoThumbnailUrl', art_dict.get('fanart'))) else: art_dict['thumb'] = art_dict.get('fanart') else: art_dict['thumb'] = 'DefaultSets.png' else: if get_setting('showfanart', 'true') == 'true': - art_dict['thumb'] = statichelper.add_https_method(api_data.get('videoThumbnailUrl', 'DefaultAddonVideo.png')) - art_dict['fanart'] = statichelper.add_https_method(api_data.get('programImageUrl', art_dict.get('thumb'))) + art_dict['thumb'] = add_https_proto(api_data.get('videoThumbnailUrl', 'DefaultAddonVideo.png')) + art_dict['fanart'] = add_https_proto(api_data.get('programImageUrl', art_dict.get('thumb'))) art_dict['banner'] = art_dict.get('fanart') else: art_dict['thumb'] = 'DefaultAddonVideo.png' @@ -537,7 +538,7 @@ def get_art(api_data, season=False): # VRT NU Suggest API if api_data.get('type') == 'program': if get_setting('showfanart', 'true') == 'true': - art_dict['thumb'] = statichelper.add_https_method(api_data.get('thumbnail', 'DefaultAddonVideo.png')) + art_dict['thumb'] = add_https_proto(api_data.get('thumbnail', 'DefaultAddonVideo.png')) art_dict['fanart'] = art_dict.get('thumb') art_dict['banner'] = art_dict.get('fanart') else: @@ -610,7 +611,7 @@ def get_info_labels(self, api_data, season=False, date=None, channel=None): return info_labels # Not Found - return dict() + return {} @staticmethod def get_label(api_data, titletype=None, return_sort=False): @@ -618,7 +619,7 @@ def get_label(api_data, titletype=None, return_sort=False): # VRT NU Search API if api_data.get('type') == 'episode': - display_options = api_data.get('displayOptions', dict()) + display_options = api_data.get('displayOptions', {}) # NOTE: Hard-code showing seasons because it is unreliable (i.e; Thuis or Down the Road have it disabled) display_options['showSeason'] = True @@ -628,11 +629,11 @@ def get_label(api_data, titletype=None, return_sort=False): titletype = program_type if display_options.get('showEpisodeTitle'): - label = statichelper.convert_html_to_kodilabel(api_data.get('title') or api_data.get('shortDescription')) + label = html_to_kodilabel(api_data.get('title') or api_data.get('shortDescription')) elif display_options.get('showShortDescription'): - label = statichelper.convert_html_to_kodilabel(api_data.get('shortDescription') or api_data.get('title')) + label = html_to_kodilabel(api_data.get('shortDescription') or api_data.get('title')) else: - label = statichelper.convert_html_to_kodilabel(api_data.get('title') or api_data.get('shortDescription')) + label = html_to_kodilabel(api_data.get('title') or api_data.get('shortDescription')) sort = 'unsorted' ascending = True @@ -715,7 +716,7 @@ def get_tag(api_data): # VRT NU Search API if api_data.get('type') == 'episode': from data import CATEGORIES - return sorted([localize(statichelper.find_entry(CATEGORIES, 'id', category).get('msgctxt')) + return sorted([localize(find_entry(CATEGORIES, 'id', category).get('msgctxt')) for category in api_data.get('categories')]) # VRT NU Suggest API diff --git a/resources/lib/playerinfo.py b/resources/lib/playerinfo.py index b61d1c004..8dfa6a077 100644 --- a/resources/lib/playerinfo.py +++ b/resources/lib/playerinfo.py @@ -5,12 +5,13 @@ from __future__ import absolute_import, division, unicode_literals from threading import Event, Thread from xbmc import getInfoLabel, Player, PlayList + from apihelper import ApiHelper from data import SECONDS_MARGIN from favorites import Favorites -from resumepoints import ResumePoints -from statichelper import play_url_to_id, to_unicode, url_to_episode from kodiutils import addon_id, container_reload, get_advanced_setting, get_setting, has_addon, log, notify +from resumepoints import ResumePoints +from utils import play_url_to_id, to_unicode, url_to_episode class PlayerInfo(Player): @@ -55,6 +56,10 @@ def onPlayBackStarted(self): # pylint: disable=invalid-name # Get episode data episode = self.apihelper.get_single_episode_data(video_id=ep_id.get('video_id'), whatson_id=ep_id.get('whatson_id'), video_url=ep_id.get('video_url')) + # This may be a live stream? + if episode is None: + return + self.asset_id = self.resumepoints.assetpath_to_id(episode.get('assetPath')) self.title = episode.get('program') self.url = url_to_episode(episode.get('url', '')) diff --git a/resources/lib/resumepoints.py b/resources/lib/resumepoints.py index d8a6b9b9e..0ab6534f9 100644 --- a/resources/lib/resumepoints.py +++ b/resources/lib/resumepoints.py @@ -12,9 +12,9 @@ from urllib2 import build_opener, install_opener, ProxyHandler, Request, HTTPError, urlopen from data import SECONDS_MARGIN -from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down, - localize, log, log_error, notification) -from utils import get_cache, get_url_json, invalidate_caches, update_cache +from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, get_url_json, + has_credentials, input_down, invalidate_caches, localize, log, log_error, + notification, update_cache) class ResumePoints: @@ -46,7 +46,7 @@ def refresh(self, ttl=None): } resumepoints_url = 'https://video-user-data.vrt.be/resume_points' resumepoints_json = get_url_json(url=resumepoints_url, cache='resume_points.json', headers=headers) - if resumepoints_json: + if resumepoints_json is not None: self._resumepoints = resumepoints_json def update(self, asset_id, title, url, watch_later=None, position=None, total=None, whatson_id=None, asynchronous=False): @@ -69,7 +69,7 @@ def update(self, asset_id, title, url, watch_later=None, position=None, total=No # resumepoint is not changed, nothing to do return True - from statichelper import reformat_url + from utils import reformat_url url = reformat_url(url, 'short') if asset_id in self._resumepoints: @@ -171,7 +171,7 @@ def get_total(self, asset_id): def get_url(self, asset_id, url_type='medium'): ''' Return the stored url a video ''' - from statichelper import reformat_url + from utils import reformat_url return reformat_url(self._resumepoints.get(asset_id, {}).get('value', {}).get('url'), url_type) @staticmethod diff --git a/resources/lib/search.py b/resources/lib/search.py index abb77ebaf..6510de7e2 100644 --- a/resources/lib/search.py +++ b/resources/lib/search.py @@ -5,10 +5,10 @@ from __future__ import absolute_import, division, unicode_literals from favorites import Favorites +from kodiutils import (addon_profile, container_refresh, end_of_directory, get_json_data, + get_search_string, get_setting, localize, log_error, ok_dialog, open_file, + show_listing, ttl, url_for) from resumepoints import ResumePoints -from kodiutils import (addon_profile, container_refresh, end_of_directory, get_search_string, - get_setting, localize, log_error, ok_dialog, open_file, show_listing, url_for) -from utils import ttl class Search: @@ -25,12 +25,10 @@ def read_history(self): from json import load with open_file(self._search_history, 'r') as fdesc: try: - history = load(fdesc) - except (TypeError, ValueError) as exc: # No JSON object could be decoded - fdesc.seek(0, 0) - log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read()) - history = [] - return history + return get_json_data(fdesc) + except ValueError as exc: # No JSON object could be decoded + log_error('JSON Error: {exc}', exc=exc) + return [] def write_history(self, history): ''' Write search history to disk ''' @@ -84,12 +82,12 @@ def search(self, keywords=None, page=None): end_of_directory() return - from statichelper import realpage + from apihelper import ApiHelper + from utils import realpage page = realpage(page) self.add(keywords) - from apihelper import ApiHelper search_items, sort, ascending, content = ApiHelper(self._favorites, self._resumepoints).list_search(keywords, page=page) if not search_items: ok_dialog(heading=localize(30135), message=localize(30136, keywords=keywords)) diff --git a/resources/lib/service.py b/resources/lib/service.py index 1be125abb..02e118790 100644 --- a/resources/lib/service.py +++ b/resources/lib/service.py @@ -6,12 +6,11 @@ from xbmc import Monitor from apihelper import ApiHelper from favorites import Favorites -from kodiutils import container_refresh, log +from kodiutils import container_refresh, invalidate_caches, log from playerinfo import PlayerInfo from resumepoints import ResumePoints -from statichelper import to_unicode from tokenresolver import TokenResolver -from utils import invalidate_caches +from utils import to_unicode class VrtMonitor(Monitor): diff --git a/resources/lib/statichelper.py b/resources/lib/statichelper.py deleted file mode 100644 index d1df5144a..000000000 --- a/resources/lib/statichelper.py +++ /dev/null @@ -1,210 +0,0 @@ -# -*- coding: utf-8 -*- -# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) -''' Implements static functions used elsewhere in the add-on ''' - -from __future__ import absolute_import, division, unicode_literals -import re - -try: # Python 3 - from html import unescape -except ImportError: # Python 2 - from HTMLParser import HTMLParser - - def unescape(string): - ''' Expose HTMLParser's unescape ''' - return HTMLParser().unescape(string) - -HTML_MAPPING = [ - (re.compile(r'<(/?)i(|\s[^>]+)>', re.I), '[\\1I]'), - (re.compile(r'<(/?)b(|\s[^>]+)>', re.I), '[\\1B]'), - (re.compile(r']+)>', re.I), '[B][COLOR yellow]'), - (re.compile(r'', re.I), '[/COLOR][/B]'), - (re.compile(r'
  • ', re.I), '- '), - (re.compile(r']+)>', re.I), ''), - (re.compile('
    \n{0,1}', re.I), ' '), # This appears to be specific formatting for VRT NU, but unwanted by us - (re.compile('( \n){2,}', re.I), '\n'), # Remove repeating non-blocking spaced newlines -] - - -def convert_html_to_kodilabel(text): - ''' Convert VRT HTML content into Kodit formatted text ''' - for key, val in HTML_MAPPING: - text = key.sub(val, text) - return unescape(text).strip() - - -def reformat_url(url, url_type): - ''' Convert a url ''' - # Clean URLs with a hash in it - pos = url.find('#') - if pos >= 0: - url = url[:pos] - # long url - if url_type == 'long': - if url.startswith('/vrtnu/a-z'): - return 'https://www.vrt.be' + url - if url.startswith('//www.vrt.be'): - return 'https:' + url - return url - # medium url - if url_type == 'medium': - if url.startswith('https:'): - return url.replace('https:', '') - if url.startswith('/vrtnu/a-z'): - return '//www.vrt.be' + url - return url - # short url - if url_type == 'short': - if url.startswith('https://www.vrt.be'): - return url.replace('https://www.vrt.be', '') - if url.startswith('//www.vrt.be'): - return url.replace('//www.vrt.be', '') - return url - - -def program_to_url(program, url_type): - ''' Convert a program url component (e.g. de-campus-cup) to: - - a short programUrl (e.g. /vrtnu/a-z/de-campus-cup/) - - a medium programUrl (e.g. //www.vrt.be/vrtnu/a-z/de-campus-cup/) - - a long programUrl (e.g. https://www.vrt.be/vrtnu/a-z/de-campus-cup/) - ''' - url = None - if program: - # short programUrl - if url_type == 'short': - url = '/vrtnu/a-z/' + program + '/' - # medium programUrl - elif url_type == 'medium': - url = '//www.vrt.be/vrtnu/a-z/' + program + '/' - # long programUrl - elif url_type == 'long': - url = 'https://www.vrt.be/vrtnu/a-z/' + program + '/' - return url - - -def url_to_program(url): - ''' Convert - - a targetUrl (e.g. //www.vrt.be/vrtnu/a-z/de-campus-cup.relevant/), - - a short programUrl (e.g. /vrtnu/a-z/de-campus-cup/) or - - a medium programUrl (e.g. //www.vrt.be/vrtnu/a-z/de-campus-cup/) - - a long programUrl (e.g. https://www.vrt.be/vrtnu/a-z/de-campus-cup/) - to a program url component (e.g. de-campus-cup). - Any season or episode information is removed as well. - ''' - program = None - if url.startswith('https://www.vrt.be/vrtnu/a-z/'): - # long programUrl or targetUrl - program = url.split('/')[5] - elif url.startswith('//www.vrt.be/vrtnu/a-z/'): - # medium programUrl or targetUrl - program = url.split('/')[5] - elif url.startswith('/vrtnu/a-z/'): - # short programUrl - program = url.split('/')[3] - if program.endswith('.relevant'): - # targetUrl - program = program.replace('.relevant', '') - return program - - -def url_to_episode(url): - ''' Convert a targetUrl (e.g. //www.vrt.be/vrtnu/a-z/buck/1/buck-s1a32/) to - a short episode url (/vrtnu/a-z/buck/1/buck-s1a32/) - ''' - if url.startswith('https://www.vrt.be/vrtnu/a-z/'): - # long episode url - return url.replace('https://www.vrt.be/vrtnu/a-z/', '/vrtnu/a-z/') - if url.startswith('//www.vrt.be/vrtnu/a-z/'): - # medium episode url - return url.replace('//www.vrt.be/vrtnu/a-z/', '/vrtnu/a-z/') - return None - - -def video_to_api_url(url): - ''' Convert a full VRT NU url (e.g. https://www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010/) - to a VRT Search API url (e.g. //www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010/) - ''' - if url.startswith('https:'): - url = url.replace('https:', '') - # NOTE: add a trailing slash again because routing plugin removes it and VRT NU Search API needs it - if not url.endswith('/'): - url += '/' - return url - - -def play_url_to_id(url): - ''' Convert a plugin:// url (e.g. plugin://plugin.video.vrt.nu/play/id/vid-5b12c0f6-b8fe-426f-a600-557f501f3be9/pbs-pub-7e2764cf-a8c0-4e78-9cbc-46d39381c237) - to an id dictionary (e.g. {'video_id': 'vid-5b12c0f6-b8fe-426f-a600-557f501f3be9'} - ''' - play_id = dict() - if 'play/id/' in url: - play_id['video_id'] = url.split('play/id/')[1].split('/')[0] - elif 'play/upnext/' in url: - play_id['video_id'] = url.split('play/upnext/')[1] - elif '/play/url/' in url: - play_id['video_url'] = video_to_api_url(url.split('play/url/')[1]) - return play_id - - -def to_unicode(text, encoding='utf-8', errors='strict'): - ''' Force text to unicode ''' - if isinstance(text, bytes): - return text.decode(encoding, errors=errors) - return text - - -def from_unicode(text, encoding='utf-8', errors='strict'): - ''' Force unicode to text ''' - import sys - if sys.version_info.major == 2 and isinstance(text, unicode): # noqa: F821; pylint: disable=undefined-variable - return text.encode(encoding, errors) - return text - - -def shorten_link(url): - ''' Create a link that is as short as possible ''' - if url is None: - return None - if url.startswith('https://www.vrt.be/vrtnu/'): - # As used in episode search result 'permalink' - return url.replace('https://www.vrt.be/vrtnu/', 'vrtnu.be/') - if url.startswith('//www.vrt.be/vrtnu/'): - # As used in program a-z listing 'targetUrl' - return url.replace('//www.vrt.be/vrtnu/', 'vrtnu.be/') - return url - - -def strip_newlines(text): - ''' Strip newlines and whitespaces ''' - return text.replace('\n', '').strip() - - -def add_https_method(url): - ''' Add HTTPS protocol to URL that lacks it ''' - if url.startswith('//'): - return 'https:' + url - if url.startswith('/'): - return 'https://www.vrt.be' + url - return url - - -def realpage(page): - ''' Convert a URL parameter page value into an integer ''' - try: - page = int(page) - except ValueError: - return 1 - if page < 1: - return 1 - return page - - -def find_entry(dlist, key, value, default=None): - ''' Find (the first) dictionary in a list where key matches value ''' - return next((entry for entry in dlist if entry.get(key) == value), default) - - -def capitalize(string): - ''' Ensure the first character is uppercase ''' - string = string.strip() - return string[0].upper() + string[1:] diff --git a/resources/lib/streamservice.py b/resources/lib/streamservice.py index 2a08b7157..98e72c42e 100644 --- a/resources/lib/streamservice.py +++ b/resources/lib/streamservice.py @@ -14,10 +14,9 @@ from helperobjects import ApiData, StreamURLS from kodiutils import (addon_profile, can_play_drm, exists, end_of_directory, get_max_bandwidth, - get_proxies, get_setting, has_inputstream_adaptive, kodi_version, - localize, log, log_error, mkdir, ok_dialog, open_settings, supports_drm) -from statichelper import to_unicode -from utils import get_url_json + get_proxies, get_setting, get_url_json, has_inputstream_adaptive, + kodi_version, localize, log, log_error, mkdir, ok_dialog, open_settings, + supports_drm, to_unicode) class StreamService: @@ -41,9 +40,8 @@ def __init__(self, _tokenresolver): def _get_vualto_license_url(self): ''' Get Widevine license URL from Vualto API ''' - json_data = get_url_json(url=self._VUPLAY_API_URL) - if json_data: - self._vualto_license_url = json_data.get('drm_providers', dict()).get('widevine', dict()).get('la_url') + json_data = get_url_json(url=self._VUPLAY_API_URL, fail={}) + self._vualto_license_url = json_data.get('drm_providers', {}).get('widevine', {}).get('la_url') @staticmethod def _create_settings_dir(): @@ -148,7 +146,6 @@ def _webscrape_api_data(self, video_url): def _get_stream_json(self, api_data, roaming=False): ''' Get JSON with stream details from VRT API ''' - json_data = None token_url = api_data.media_api_url + '/tokens' if api_data.is_live_stream: playertoken = self._tokenresolver.get_playertoken(token_url, token_variant='live', roaming=roaming) @@ -160,8 +157,7 @@ def _get_stream_json(self, api_data, roaming=False): return None api_url = api_data.media_api_url + '/videos/' + api_data.publication_id + \ api_data.video_id + '?vrtPlayerToken=' + playertoken + '&client=' + api_data.client - json_data = get_url_json(url=api_url) - return json_data + return get_url_json(url=api_url, fail={}) @staticmethod def _fix_virtualsubclip(manifest_url, duration): diff --git a/resources/lib/tokenresolver.py b/resources/lib/tokenresolver.py index 73bee0e32..971d1912f 100644 --- a/resources/lib/tokenresolver.py +++ b/resources/lib/tokenresolver.py @@ -3,11 +3,11 @@ ''' This module contains all functionality for VRT NU API authentication. ''' from __future__ import absolute_import, division, unicode_literals -from statichelper import from_unicode -from kodiutils import (addon_profile, delete, exists, get_proxies, get_setting, get_tokens_path, - has_credentials, listdir, localize, log, log_error, mkdir, notification, - ok_dialog, open_file, open_settings, set_setting) -from utils import get_url_json, invalidate_caches +from kodiutils import (addon_profile, delete, exists, get_json_data, get_proxies, get_setting, + get_tokens_path, get_url_json, has_credentials, invalidate_caches, listdir, + localize, log, log_error, mkdir, notification, ok_dialog, open_file, + open_settings, set_setting) +from utils import from_unicode try: # Python 3 import http.cookiejar as cookielib @@ -97,13 +97,11 @@ def _get_cached_token(self, token_name, token_variant=None): if not exists(path): return None - from json import load with open_file(path) as fdesc: try: - token = load(fdesc) - except (TypeError, ValueError) as exc: # No JSON object could be decoded - fdesc.seek(0, 0) - log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read()) + token = get_json_data(fdesc) + except ValueError as exc: # No JSON object could be decoded + log_error('JSON Error: {exc}', exc=exc) return None from datetime import datetime @@ -134,7 +132,7 @@ def _set_cached_token(self, token, token_variant=None): def _get_new_playertoken(self, token_url, headers, token_variant=None): ''' Get new playertoken from VRT Token API ''' playertoken = get_url_json(url=token_url, headers=headers, data=b'') - if not playertoken: + if playertoken is None: return None self._set_cached_token(playertoken, token_variant) @@ -185,14 +183,11 @@ def _get_login_json(self): targetEnv='jssdk', ) data = urlencode(payload).encode() - json_data = get_url_json(self._LOGIN_URL, data=data) - if not json_data: - return dict() - return json_data + return get_url_json(self._LOGIN_URL, data=data, fail={}) def _get_new_xvrttoken(self, login_json, token_variant=None): ''' Get new X-VRT-Token from VRT NU website ''' - login_token = login_json.get('sessionInfo', dict()).get('login_token') + login_token = login_json.get('sessionInfo', {}).get('login_token') if not login_token: return None diff --git a/resources/lib/tvguide.py b/resources/lib/tvguide.py index fd8d325e9..a784dcb0e 100644 --- a/resources/lib/tvguide.py +++ b/resources/lib/tvguide.py @@ -16,11 +16,11 @@ from data import CHANNELS, RELATIVE_DATES from favorites import Favorites from helperobjects import TitleItem +from kodiutils import (get_cached_url_json, get_proxies, get_url_json, has_addon, localize, + localize_datelong, show_listing, ttl, url_for) from metadata import Metadata from resumepoints import ResumePoints -from statichelper import find_entry -from kodiutils import get_proxies, has_addon, localize, localize_datelong, show_listing, url_for -from utils import get_cached_url_json, get_url_json, ttl +from utils import add_https_proto, find_entry, url_to_program class TVGuide: @@ -156,11 +156,9 @@ def get_episode_items(self, date, channel): cache_file = 'schedule.%s.json' % date if date in ('today', 'yesterday', 'tomorrow'): - schedule = get_cached_url_json(url=epg_url, cache=cache_file, ttl=ttl('indirect')) + schedule = get_cached_url_json(url=epg_url, cache=cache_file, ttl=ttl('indirect'), fail={}) else: - schedule = get_url_json(url=epg_url) - if not schedule: - return [] + schedule = get_url_json(url=epg_url, fail={}) entry = find_entry(CHANNELS, 'name', channel) if entry: @@ -175,8 +173,7 @@ def get_episode_items(self, date, channel): context_menu = [] path = None if episode.get('url'): - from statichelper import add_https_method, url_to_program - video_url = add_https_method(episode.get('url')) + video_url = add_https_proto(episode.get('url')) path = url_for('play_url', video_url=video_url) program = url_to_program(episode.get('url')) context_menu, favorite_marker, watchlater_marker = self._metadata.get_context_menu(episode, program, cache_file) @@ -203,13 +200,12 @@ def playing_now(self, channel): if epg.hour < 6: epg += timedelta(days=-1) - epg_url = epg.strftime(self.VRT_TVGUIDE) - schedule = get_cached_url_json(url=epg_url, cache='schedule.today.json', ttl=ttl('indirect')) - entry = find_entry(CHANNELS, 'name', channel) if not entry: return '' + epg_url = epg.strftime(self.VRT_TVGUIDE) + schedule = get_cached_url_json(url=epg_url, cache='schedule.today.json', ttl=ttl('indirect'), fail={}) episodes = iter(schedule.get(entry.get('id'), [])) while True: @@ -236,13 +232,12 @@ def live_description(self, channel): if epg.hour < 6: epg += timedelta(days=-1) - epg_url = epg.strftime(self.VRT_TVGUIDE) - schedule = get_cached_url_json(url=epg_url, cache='schedule.today.json', ttl=ttl('indirect')) - entry = find_entry(CHANNELS, 'name', channel) if not entry: return '' + epg_url = epg.strftime(self.VRT_TVGUIDE) + schedule = get_cached_url_json(url=epg_url, cache='schedule.today.json', ttl=ttl('indirect'), fail={}) episodes = iter(schedule.get(entry.get('id'), [])) description = '' diff --git a/resources/lib/utils.py b/resources/lib/utils.py index a8f475570..e09cf0de7 100644 --- a/resources/lib/utils.py +++ b/resources/lib/utils.py @@ -1,183 +1,210 @@ # -*- coding: utf-8 -*- # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) -''' All functionality that requires Kodi imports ''' +''' Implements static functions used elsewhere in the add-on ''' from __future__ import absolute_import, division, unicode_literals -from sys import version_info +import re try: # Python 3 - from urllib.error import HTTPError - from urllib.parse import unquote - from urllib.request import urlopen, Request + from html import unescape except ImportError: # Python 2 - from urllib2 import HTTPError, unquote, urlopen, Request - -from kodiutils import (container_refresh, delete, exists, get_cache_path, get_setting, listdir, - localize, log, log_error, mkdirs, notification, ok_dialog, open_file, stat_file) -from statichelper import to_unicode - - -def human_delta(seconds): - ''' Return a human-readable representation of the TTL ''' - from math import floor - days = int(floor(seconds / (24 * 60 * 60))) - seconds = seconds % (24 * 60 * 60) - hours = int(floor(seconds / (60 * 60))) - seconds = seconds % (60 * 60) - if days: - return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '') - minutes = int(floor(seconds / 60)) - seconds = seconds % 60 - if hours: - return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '') - if minutes: - return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '') - return '%d second%s' % (seconds, 's' if seconds != 1 else '') - - -def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name - ''' Get the content from cache, if it's still fresh ''' - if get_setting('usehttpcaching', 'true') == 'false': + from HTMLParser import HTMLParser + + def unescape(string): + ''' Expose HTMLParser's unescape ''' + return HTMLParser().unescape(string) + +HTML_MAPPING = [ + (re.compile(r'<(/?)i(|\s[^>]+)>', re.I), '[\\1I]'), + (re.compile(r'<(/?)b(|\s[^>]+)>', re.I), '[\\1B]'), + (re.compile(r']+)>', re.I), '[B][COLOR yellow]'), + (re.compile(r'', re.I), '[/COLOR][/B]'), + (re.compile(r'
  • ', re.I), '- '), + (re.compile(r']+)>', re.I), ''), + (re.compile('
    \n{0,1}', re.I), ' '), # This appears to be specific formatting for VRT NU, but unwanted by us + (re.compile('( \n){2,}', re.I), '\n'), # Remove repeating non-blocking spaced newlines +] + + +def to_unicode(text, encoding='utf-8', errors='strict'): + ''' Force text to unicode ''' + if isinstance(text, bytes): + return text.decode(encoding, errors=errors) + return text + + +def from_unicode(text, encoding='utf-8', errors='strict'): + ''' Force unicode to text ''' + import sys + if sys.version_info.major == 2 and isinstance(text, unicode): # noqa: F821; pylint: disable=undefined-variable + return text.encode(encoding, errors) + return text + + +def capitalize(string): + ''' Ensure the first character is uppercase ''' + string = string.strip() + return string[0].upper() + string[1:] + + +def strip_newlines(text): + ''' Strip newlines and whitespaces ''' + return text.replace('\n', '').strip() + + +def html_to_kodilabel(text): + ''' Convert VRT HTML content into Kodit formatted text ''' + for key, val in HTML_MAPPING: + text = key.sub(val, text) + return unescape(text).strip() + + +def reformat_url(url, url_type): + ''' Convert a url ''' + # Clean URLs with a hash in it + pos = url.find('#') + if pos >= 0: + url = url[:pos] + # long url + if url_type == 'long': + if url.startswith('/vrtnu/a-z'): + return 'https://www.vrt.be' + url + if url.startswith('//www.vrt.be'): + return 'https:' + url + return url + # medium url + if url_type == 'medium': + if url.startswith('https:'): + return url.replace('https:', '') + if url.startswith('/vrtnu/a-z'): + return '//www.vrt.be' + url + return url + # short url + if url_type == 'short': + if url.startswith('https://www.vrt.be'): + return url.replace('https://www.vrt.be', '') + if url.startswith('//www.vrt.be'): + return url.replace('//www.vrt.be', '') + return url + + +def program_to_url(program, url_type): + ''' Convert a program url component (e.g. de-campus-cup) to: + - a short programUrl (e.g. /vrtnu/a-z/de-campus-cup/) + - a medium programUrl (e.g. //www.vrt.be/vrtnu/a-z/de-campus-cup/) + - a long programUrl (e.g. https://www.vrt.be/vrtnu/a-z/de-campus-cup/) + ''' + url = None + if program: + # short programUrl + if url_type == 'short': + url = '/vrtnu/a-z/' + program + '/' + # medium programUrl + elif url_type == 'medium': + url = '//www.vrt.be/vrtnu/a-z/' + program + '/' + # long programUrl + elif url_type == 'long': + url = 'https://www.vrt.be/vrtnu/a-z/' + program + '/' + return url + + +def url_to_program(url): + ''' Convert + - a targetUrl (e.g. //www.vrt.be/vrtnu/a-z/de-campus-cup.relevant/), + - a short programUrl (e.g. /vrtnu/a-z/de-campus-cup/) or + - a medium programUrl (e.g. //www.vrt.be/vrtnu/a-z/de-campus-cup/) + - a long programUrl (e.g. https://www.vrt.be/vrtnu/a-z/de-campus-cup/) + to a program url component (e.g. de-campus-cup). + Any season or episode information is removed as well. + ''' + program = None + if url.startswith('https://www.vrt.be/vrtnu/a-z/'): + # long programUrl or targetUrl + program = url.split('/')[5] + elif url.startswith('//www.vrt.be/vrtnu/a-z/'): + # medium programUrl or targetUrl + program = url.split('/')[5] + elif url.startswith('/vrtnu/a-z/'): + # short programUrl + program = url.split('/')[3] + if program.endswith('.relevant'): + # targetUrl + program = program.replace('.relevant', '') + return program + + +def url_to_episode(url): + ''' Convert a targetUrl (e.g. //www.vrt.be/vrtnu/a-z/buck/1/buck-s1a32/) to + a short episode url (/vrtnu/a-z/buck/1/buck-s1a32/) + ''' + if url.startswith('https://www.vrt.be/vrtnu/a-z/'): + # long episode url + return url.replace('https://www.vrt.be/vrtnu/a-z/', '/vrtnu/a-z/') + if url.startswith('//www.vrt.be/vrtnu/a-z/'): + # medium episode url + return url.replace('//www.vrt.be/vrtnu/a-z/', '/vrtnu/a-z/') + return None + + +def video_to_api_url(url): + ''' Convert a full VRT NU url (e.g. https://www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010/) + to a VRT Search API url (e.g. //www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010/) + ''' + if url.startswith('https:'): + url = url.replace('https:', '') + # NOTE: add a trailing slash again because routing plugin removes it and VRT NU Search API needs it + if not url.endswith('/'): + url += '/' + return url + + +def play_url_to_id(url): + ''' Convert a plugin:// url (e.g. plugin://plugin.video.vrt.nu/play/id/vid-5b12c0f6-b8fe-426f-a600-557f501f3be9/pbs-pub-7e2764cf-a8c0-4e78-9cbc-46d39381c237) + to an id dictionary (e.g. {'video_id': 'vid-5b12c0f6-b8fe-426f-a600-557f501f3be9'} + ''' + play_id = dict() + if 'play/id/' in url: + play_id['video_id'] = url.split('play/id/')[1].split('/')[0] + elif 'play/upnext/' in url: + play_id['video_id'] = url.split('play/upnext/')[1] + elif '/play/url/' in url: + play_id['video_url'] = video_to_api_url(url.split('play/url/')[1]) + return play_id + + +def shorten_link(url): + ''' Create a link that is as short as possible ''' + if url is None: return None + if url.startswith('https://www.vrt.be/vrtnu/'): + # As used in episode search result 'permalink' + return url.replace('https://www.vrt.be/vrtnu/', 'vrtnu.be/') + if url.startswith('//www.vrt.be/vrtnu/'): + # As used in program a-z listing 'targetUrl' + return url.replace('//www.vrt.be/vrtnu/', 'vrtnu.be/') + return url + + +def add_https_proto(url): + ''' Add HTTPS protocol to URL that lacks it ''' + if url.startswith('//'): + return 'https:' + url + if url.startswith('/'): + return 'https://www.vrt.be' + url + return url + + +def realpage(page): + ''' Convert a URL parameter page value into an integer ''' + try: + page = int(page) + except ValueError: + return 1 + if page < 1: + return 1 + return page - fullpath = get_cache_path() + path - if not exists(fullpath): - return None - from time import localtime, mktime - mtime = stat_file(fullpath).st_mtime() - now = mktime(localtime()) - if ttl and now >= mtime + ttl: - return None - - if ttl is None: - log(3, "Cache '{path}' is forced from cache.", path=path) - else: - log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now)) - from json import load - with open_file(fullpath, 'r') as fdesc: - try: - return load(fdesc) - except (TypeError, ValueError) as exc: # No JSON object could be decoded - fdesc.seek(0, 0) - log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read()) - return None - - -def update_cache(path, data): - ''' Update the cache, if necessary ''' - if get_setting('usehttpcaching', 'true') == 'false': - return - - from hashlib import md5 - from json import dump, dumps - fullpath = get_cache_path() + path - if exists(fullpath): - with open_file(fullpath) as fdesc: - cachefile = fdesc.read().encode('utf-8') - md5_cache = md5(cachefile) - else: - md5_cache = 0 - # Create cache directory if missing - if not exists(get_cache_path()): - mkdirs(get_cache_path()) - - # Avoid writes if possible (i.e. SD cards) - if md5_cache != md5(dumps(data).encode('utf-8')): - log(3, "Write cache '{path}'.", path=path) - with open_file(fullpath, 'w') as fdesc: - # dump(data, fdesc, encoding='utf-8') - dump(data, fdesc) - else: - # Update timestamp - from os import utime - log(3, "Cache '{path}' has not changed, updating mtime only.", path=path) - utime(path) - - -def ttl(kind='direct'): - ''' Return the HTTP cache ttl in seconds based on kind of relation ''' - if kind == 'direct': - return int(get_setting('httpcachettldirect', 5)) * 60 - if kind == 'indirect': - return int(get_setting('httpcachettlindirect', 60)) * 60 - return 5 * 60 - -def get_json_data(response): - ''' Return json object from HTTP response ''' - from json import load, loads - if (3, 0, 0) <= version_info <= (3, 5, 9): # the JSON object must be str, not 'bytes' - json_data = loads(to_unicode(response.read())) - else: - json_data = load(response) - return json_data - -def get_url_json(url, cache=None, headers=None, data=None): - ''' Return HTTP data ''' - if headers is None: - headers = dict() - log(2, 'URL get: {url}', url=unquote(url)) - req = Request(url, headers=headers) - if data is not None: - req.data = data - try: - json_data = get_json_data(urlopen(req)) - except ValueError as exc: # No JSON object could be decoded - log_error('JSON Error: {exc}', exc=exc) - return [] - except HTTPError as exc: - if hasattr(req, 'selector'): # Python 3.4+ - url_length = len(req.selector) - else: # Python 2.7 - url_length = len(req.get_selector()) - if exc.code == 413 and url_length > 8192: - ok_dialog(heading='HTTP Error 413', message=localize(30967)) - log_error('HTTP Error 413: Exceeded maximum url length: ' - 'VRT Search API url has a length of {length} characters.', length=url_length) - return [] - if exc.code == 400 and 7600 <= url_length <= 8192: - ok_dialog(heading='HTTP Error 400', message=localize(30967)) - log_error('HTTP Error 400: Probably exceeded maximum url length: ' - 'VRT Search API url has a length of {length} characters.', length=url_length) - return [] - try: - return get_json_data(exc) - except ValueError as exc: # No JSON object could be decoded - log_error('JSON Error: {exc}', exc=exc) - return [] - raise - else: - if cache: - update_cache(cache, json_data) - return json_data - - -def get_cached_url_json(url, cache, headers=None, ttl=None): # pylint: disable=redefined-outer-name - ''' Return data from cache, if any, else make an HTTP request ''' - # Get api data from cache if it is fresh - json_data = get_cache(cache, ttl=ttl) - if json_data is not None: - return json_data - return get_url_json(url, cache=cache, headers=headers) - - -def refresh_caches(cache_file=None): - ''' Invalidate the needed caches and refresh container ''' - files = ['favorites.json', 'oneoff.json', 'resume_points.json'] - if cache_file and cache_file not in files: - files.append(cache_file) - invalidate_caches(*files) - container_refresh() - notification(message=localize(30981)) - - -def invalidate_caches(*caches): - ''' Invalidate multiple cache files ''' - import fnmatch - _, files = listdir(get_cache_path()) - # Invalidate caches related to menu list refreshes - removes = set() - for expr in caches: - removes.update(fnmatch.filter(files, expr)) - for filename in removes: - delete(get_cache_path() + filename) +def find_entry(dlist, key, value, default=None): + ''' Find (the first) dictionary in a list where key matches value ''' + return next((entry for entry in dlist if entry.get(key) == value), default) diff --git a/resources/lib/vrtplayer.py b/resources/lib/vrtplayer.py index 025987cbb..eb193ef51 100644 --- a/resources/lib/vrtplayer.py +++ b/resources/lib/vrtplayer.py @@ -8,10 +8,9 @@ from helperobjects import TitleItem from kodiutils import (delete_cached_thumbnail, end_of_directory, get_addon_info, get_setting, has_credentials, localize, log_error, ok_dialog, play, set_setting, - show_listing, url_for) + show_listing, ttl, url_for) from resumepoints import ResumePoints -from statichelper import find_entry -from utils import ttl +from utils import find_entry, realpage class VRTPlayer: @@ -248,7 +247,6 @@ def show_episodes_menu(self, program, season=None): def show_recent_menu(self, page=0, use_favorites=False): ''' The VRT NU add-on 'Most recent' and 'My most recent' listing menu ''' - from statichelper import realpage # My favorites menus may need more up-to-date favorites self._favorites.refresh(ttl=ttl('direct' if use_favorites else 'indirect')) @@ -273,7 +271,6 @@ def show_recent_menu(self, page=0, use_favorites=False): def show_offline_menu(self, page=0, use_favorites=False): ''' The VRT NU add-on 'Soon offline' and 'My soon offline' listing menu ''' - from statichelper import realpage # My favorites menus may need more up-to-date favorites self._favorites.refresh(ttl=ttl('direct' if use_favorites else 'indirect')) @@ -298,7 +295,6 @@ def show_offline_menu(self, page=0, use_favorites=False): def show_watchlater_menu(self, page=0): ''' The VRT NU add-on 'My watch later' listing menu ''' - from statichelper import realpage # My watch later menu may need more up-to-date favorites self._favorites.refresh(ttl=ttl('direct')) @@ -309,7 +305,6 @@ def show_watchlater_menu(self, page=0): def show_continue_menu(self, page=0): ''' The VRT NU add-on 'Continue waching' listing menu ''' - from statichelper import realpage # Continue watching menu may need more up-to-date favorites self._favorites.refresh(ttl=ttl('direct')) diff --git a/test/test_statichelper.py b/test/test_utils.py similarity index 52% rename from test/test_statichelper.py rename to test/test_utils.py index 0987af320..deb5b4f75 100644 --- a/test/test_statichelper.py +++ b/test/test_utils.py @@ -5,7 +5,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals import unittest -import statichelper +import utils class TestStaticHelper(unittest.TestCase): @@ -15,13 +15,13 @@ def test_url_to_episode(self): ''' Test converting URL to episode ''' long_url = 'https://www.vrt.be/vrtnu/a-z/buck/1/buck-s1a32/' episode = '/vrtnu/a-z/buck/1/buck-s1a32/' - self.assertEqual(episode, statichelper.url_to_episode(long_url)) + self.assertEqual(episode, utils.url_to_episode(long_url)) medium_url = '//www.vrt.be/vrtnu/a-z/buck/1/buck-s1a32/' episode = '/vrtnu/a-z/buck/1/buck-s1a32/' - self.assertEqual(episode, statichelper.url_to_episode(medium_url)) + self.assertEqual(episode, utils.url_to_episode(medium_url)) - self.assertEqual(None, statichelper.url_to_episode('foobar')) + self.assertEqual(None, utils.url_to_episode('foobar')) def test_url_to_program(self): ''' Test converting URL to program ''' @@ -31,10 +31,10 @@ def test_url_to_program(self): long_url = 'https://www.vrt.be/vrtnu/a-z/buck/1/buck-s1a32/' short_relevant_url = '/vrtnu/a-z/buck.relevant/' - self.assertEqual(program, statichelper.url_to_program(long_url)) - self.assertEqual(program, statichelper.url_to_program(medium_url)) - self.assertEqual(program, statichelper.url_to_program(short_url)) - self.assertEqual(program, statichelper.url_to_program(short_relevant_url)) + self.assertEqual(program, utils.url_to_program(long_url)) + self.assertEqual(program, utils.url_to_program(medium_url)) + self.assertEqual(program, utils.url_to_program(short_url)) + self.assertEqual(program, utils.url_to_program(short_relevant_url)) def test_program_to_url(self): ''' Test converting program to URL ''' @@ -43,33 +43,33 @@ def test_program_to_url(self): medium_url = '//www.vrt.be/vrtnu/a-z/de-campus-cup/' long_url = 'https://www.vrt.be/vrtnu/a-z/de-campus-cup/' - self.assertEqual(short_url, statichelper.program_to_url(program, 'short')) - self.assertEqual(medium_url, statichelper.program_to_url(program, 'medium')) - self.assertEqual(long_url, statichelper.program_to_url(program, 'long')) + self.assertEqual(short_url, utils.program_to_url(program, 'short')) + self.assertEqual(medium_url, utils.program_to_url(program, 'medium')) + self.assertEqual(long_url, utils.program_to_url(program, 'long')) def test_video_to_api_url(self): ''' Test convert video to api URL ''' video = 'https://www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010/' api_url = '//www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010/' - self.assertEqual(api_url, statichelper.video_to_api_url(video)) + self.assertEqual(api_url, utils.video_to_api_url(video)) video = 'https://www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010' api_url = '//www.vrt.be/vrtnu/a-z/de-ideale-wereld/2019-nj/de-ideale-wereld-d20191010/' - self.assertEqual(api_url, statichelper.video_to_api_url(video)) + self.assertEqual(api_url, utils.video_to_api_url(video)) def test_play_url_to_id(self): ''' Test converting play_url to play_id ''' url = 'plugin://plugin.video.vrt.nu/play/id/vid-5b12c0f6-b8fe-426f-a600-557f501f3be9/pbs-pub-7e2764cf-a8c0-4e78-9cbc-46d39381c237' play_id = dict(video_id='vid-5b12c0f6-b8fe-426f-a600-557f501f3be9') - self.assertEqual(play_id, statichelper.play_url_to_id(url)) + self.assertEqual(play_id, utils.play_url_to_id(url)) url = 'plugin://plugin.video.vrt.nu/play/upnext/vid-271d7238-b7f2-4a3c-b3c7-17a5110be71a' play_id = dict(video_id='vid-271d7238-b7f2-4a3c-b3c7-17a5110be71a') - self.assertEqual(play_id, statichelper.play_url_to_id(url)) + self.assertEqual(play_id, utils.play_url_to_id(url)) url = 'plugin://plugin.video.vrt.nu/play/url/https://www.vrt.be/vrtnu/kanalen/canvas/' play_id = dict(video_url='//www.vrt.be/vrtnu/kanalen/canvas/') - self.assertEqual(play_id, statichelper.play_url_to_id(url)) + self.assertEqual(play_id, utils.play_url_to_id(url)) def test_reformat_url(self): ''' Test reformatting URLs ''' @@ -77,21 +77,21 @@ def test_reformat_url(self): medium_url = '//www.vrt.be/vrtnu/a-z/terzake/2019/terzake-d20191017/' long_url = 'https://www.vrt.be/vrtnu/a-z/terzake/2019/terzake-d20191017/' - self.assertEqual(long_url, statichelper.reformat_url(short_url, 'long')) - self.assertEqual(long_url, statichelper.reformat_url(medium_url, 'long')) - self.assertEqual(long_url, statichelper.reformat_url(long_url, 'long')) + self.assertEqual(long_url, utils.reformat_url(short_url, 'long')) + self.assertEqual(long_url, utils.reformat_url(medium_url, 'long')) + self.assertEqual(long_url, utils.reformat_url(long_url, 'long')) - self.assertEqual(medium_url, statichelper.reformat_url(short_url, 'medium')) - self.assertEqual(medium_url, statichelper.reformat_url(medium_url, 'medium')) - self.assertEqual(medium_url, statichelper.reformat_url(long_url, 'medium')) + self.assertEqual(medium_url, utils.reformat_url(short_url, 'medium')) + self.assertEqual(medium_url, utils.reformat_url(medium_url, 'medium')) + self.assertEqual(medium_url, utils.reformat_url(long_url, 'medium')) - self.assertEqual(short_url, statichelper.reformat_url(short_url, 'short')) - self.assertEqual(short_url, statichelper.reformat_url(medium_url, 'short')) - self.assertEqual(short_url, statichelper.reformat_url(long_url, 'short')) + self.assertEqual(short_url, utils.reformat_url(short_url, 'short')) + self.assertEqual(short_url, utils.reformat_url(medium_url, 'short')) + self.assertEqual(short_url, utils.reformat_url(long_url, 'short')) - self.assertEqual(long_url, statichelper.reformat_url(long_url + '#foo', 'long')) - self.assertEqual(medium_url, statichelper.reformat_url(long_url + '#foo', 'medium')) - self.assertEqual(short_url, statichelper.reformat_url(long_url + '#foo', 'short')) + self.assertEqual(long_url, utils.reformat_url(long_url + '#foo', 'long')) + self.assertEqual(medium_url, utils.reformat_url(long_url + '#foo', 'medium')) + self.assertEqual(short_url, utils.reformat_url(long_url + '#foo', 'short')) def test_shorten_link(self): ''' Test shortening links ''' @@ -99,24 +99,24 @@ def test_shorten_link(self): medium_url = '//www.vrt.be/vrtnu/p.LR90GkqOD' long_url = 'https://www.vrt.be/vrtnu/p.LR90GkqOD' - self.assertEqual(permalink, statichelper.shorten_link(long_url)) - self.assertEqual(permalink, statichelper.shorten_link(medium_url)) - self.assertEqual(None, statichelper.shorten_link(None)) + self.assertEqual(permalink, utils.shorten_link(long_url)) + self.assertEqual(permalink, utils.shorten_link(medium_url)) + self.assertEqual(None, utils.shorten_link(None)) def test_realpage(self): ''' Test converting input to page ''' - self.assertEqual(1, statichelper.realpage('foo')) - self.assertEqual(1, statichelper.realpage('-1')) - self.assertEqual(1, statichelper.realpage('0')) - self.assertEqual(2, statichelper.realpage(2)) - self.assertEqual(3, statichelper.realpage('3')) + self.assertEqual(1, utils.realpage('foo')) + self.assertEqual(1, utils.realpage('-1')) + self.assertEqual(1, utils.realpage('0')) + self.assertEqual(2, utils.realpage(2)) + self.assertEqual(3, utils.realpage('3')) def test_capitalize(self): ''' Test capitalizing string ''' - self.assertEqual('Foo bar', statichelper.capitalize('foo bar')) - self.assertEqual('Foo bar', statichelper.capitalize('Foo bar')) - self.assertEqual('FoO bAr', statichelper.capitalize('foO bAr')) - self.assertEqual('FOO BAR', statichelper.capitalize('FOO BAR')) + self.assertEqual('Foo bar', utils.capitalize('foo bar')) + self.assertEqual('Foo bar', utils.capitalize('Foo bar')) + self.assertEqual('FoO bAr', utils.capitalize('foO bAr')) + self.assertEqual('FOO BAR', utils.capitalize('FOO BAR')) if __name__ == '__main__': diff --git a/test/xbmc.py b/test/xbmc.py index c5155d4a7..de7befbfe 100644 --- a/test/xbmc.py +++ b/test/xbmc.py @@ -11,7 +11,7 @@ import json import time from xbmcextra import ADDON_ID, global_settings, import_language -from statichelper import to_unicode +from utils import to_unicode LOGLEVELS = ['Debug', 'Info', 'Notice', 'Warning', 'Error', 'Severe', 'Fatal', 'None'] LOGDEBUG = 0