Skip to content

Commit

Permalink
Return default value on failure
Browse files Browse the repository at this point in the history
  • Loading branch information
dagwieers committed Dec 9, 2019
1 parent 76dffc7 commit 0ea8d0b
Show file tree
Hide file tree
Showing 18 changed files with 349 additions and 377 deletions.
5 changes: 2 additions & 3 deletions resources/lib/addon.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@
except ImportError: # Python 2
from urllib import unquote_plus

from kodiutils import localize, log_access, notification
from statichelper import from_unicode, to_unicode
from utils import refresh_caches
from kodiutils import localize, log_access, notification, refresh_caches
from vrtutils import from_unicode, to_unicode

plugin = Plugin() # pylint: disable=invalid-name

Expand Down
2 changes: 1 addition & 1 deletion resources/lib/addon_entry.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
''' This is the actual VRT NU video plugin entry point '''

from __future__ import absolute_import, division, unicode_literals
import kodiutils
import xbmcaddon
import kodiutils

kodiutils.ADDON = xbmcaddon.Addon()

Expand Down
38 changes: 18 additions & 20 deletions resources/lib/apihelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@

from data import CHANNELS
from helperobjects import TitleItem
from kodiutils import (delete_cached_thumbnail, get_global_setting, get_proxies, get_setting,
has_addon, localize, localize_from_data, log, url_for)
from kodiutils import (delete_cached_thumbnail, get_cache, get_cached_url_json, get_global_setting,
get_proxies, get_setting, get_url_json, has_addon, localize, localize_from_data,
log, ttl, update_cache, url_for)
from metadata import Metadata
from statichelper import (add_https_method, convert_html_to_kodilabel, find_entry, from_unicode, play_url_to_id,
program_to_url, realpage, strip_newlines, url_to_program)
from utils import get_cache, get_cached_url_json, get_url_json, ttl, update_cache
from vrtutils import (add_https_proto, html_to_kodilabel, find_entry, from_unicode, play_url_to_id,
program_to_url, realpage, strip_newlines, url_to_program)


class ApiHelper:
Expand All @@ -38,7 +38,7 @@ def __init__(self, _favorites, _resumepoints):

def get_tvshows(self, category=None, channel=None, feature=None):
''' Get all TV shows for a given category, channel or feature, optionally filtered by favorites '''
params = dict()
params = {}

if category:
params['facets[categories]'] = category
Expand All @@ -59,7 +59,7 @@ def get_tvshows(self, category=None, channel=None, feature=None):

querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring
return get_cached_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect'))
return get_cached_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect'), fail=[])

def list_tvshows(self, category=None, channel=None, feature=None, use_favorites=False):
''' List all TV shows for a given category, channel or feature, optionally filtered by favorites '''
Expand Down Expand Up @@ -144,7 +144,7 @@ def __map_episodes(self, episodes, titletype=None, season=None, use_favorites=Fa
highlight = episode.get('highlight')
if highlight:
for key in highlight:
episode[key] = convert_html_to_kodilabel(highlight.get(key)[0])
episode[key] = html_to_kodilabel(highlight.get(key)[0])

list_item, sort, ascending = self.episode_to_listitem(episode, program, cache_file, titletype)
episode_items.append(list_item)
Expand Down Expand Up @@ -261,7 +261,7 @@ def get_upnext(self, info):

# Get all episodes from current program and sort by program, seasonTitle and episodeNumber
episodes = sorted(self.get_episodes(keywords=program), key=lambda k: (k.get('program'), k.get('seasonTitle'), k.get('episodeNumber')))
upnext = dict()
upnext = {}
for episode in episodes:
if ep_id.get('whatson_id') == episode.get('whatsonId') or \
ep_id.get('video_id') == episode.get('videoId') or \
Expand Down Expand Up @@ -406,7 +406,7 @@ def get_episode_by_air_date(self, channel_name, start_date, end_date=None):
schedule_date = onairdate
schedule_datestr = schedule_date.isoformat().split('T')[0]
url = 'https://www.vrt.be/bin/epg/schedule.%s.json' % schedule_datestr
schedule_json = get_url_json(url)
schedule_json = get_url_json(url, fail={})
episodes = schedule_json.get(channel.get('id'), [])
if not episodes:
return None
Expand Down Expand Up @@ -562,18 +562,16 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
search_url = self._VRTNU_SEARCH_URL + '?' + querystring.replace(' ', '%20') # Only encode spaces to minimize url length
if cache_file:
search_json = get_cached_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect'))
search_json = get_cached_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect'), fail={})
else:
search_json = get_url_json(url=search_url)
search_json = get_url_json(url=search_url, fail={})

# Check for multiple seasons
seasons = None
seasons = []
if 'facets[seasonTitle]' not in unquote(search_url):
facets = search_json.get('facets', dict()).get('facets')
facets = search_json.get('facets', {}).get('facets')
if facets:
seasons = next((f.get('buckets', []) for f in facets if f.get('name') == 'seasons' and len(f.get('buckets', [])) > 1), None)
else:
seasons = []

episodes = search_json.get('results', [{}])
show_seasons = bool(season != 'allseasons')
Expand All @@ -590,7 +588,7 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
for api_page in range(1, api_pages):
api_page_url = search_url + '&from=' + str(api_page * api_page_size + 1)
api_page_json = get_url_json(api_page_url)
if api_page_json:
if api_page_json is not None:
episodes += api_page_json.get('results', [{}])

# Return episodes
Expand All @@ -613,7 +611,7 @@ def list_channels(self, channels=None, live=True):
continue

context_menu = []
art_dict = dict()
art_dict = {}

# Try to use the white icons for thumbnails (used for icons as well)
if has_addon('resource.images.studios.white'):
Expand Down Expand Up @@ -684,7 +682,7 @@ def list_youtube(channels=None):
continue

context_menu = []
art_dict = dict()
art_dict = {}

# Try to use the white icons for thumbnails (used for icons as well)
if has_addon('resource.images.studios.white'):
Expand Down Expand Up @@ -821,7 +819,7 @@ def get_category_thumbnail(element):
''' Return a category thumbnail, if available '''
if get_setting('showfanart', 'true') == 'true':
raw_thumbnail = element.find(class_='media').get('data-responsive-image', 'DefaultGenre.png')
return add_https_method(raw_thumbnail)
return add_https_proto(raw_thumbnail)
return 'DefaultGenre.png'

@staticmethod
Expand Down
16 changes: 8 additions & 8 deletions resources/lib/favorites.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
except ImportError: # Python 2
from urllib2 import build_opener, install_opener, ProxyHandler, Request, unquote, urlopen

from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down,
localize, log, log_error, multiselect, notification, ok_dialog)
from utils import get_cache, get_url_json, invalidate_caches, update_cache
from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, get_url_json,
has_credentials, input_down, invalidate_caches, localize, log, log_error,
multiselect, notification, ok_dialog, update_cache)


class Favorites:
Expand Down Expand Up @@ -45,7 +45,7 @@ def refresh(self, ttl=None):
}
favorites_url = 'https://video-user-data.vrt.be/favorites'
favorites_json = get_url_json(url=favorites_url, cache='favorites.json', headers=headers)
if favorites_json:
if favorites_json is not None:
self._favorites = favorites_json

def update(self, program, title, value=True):
Expand All @@ -70,9 +70,9 @@ def update(self, program, title, value=True):
'Referer': 'https://www.vrt.be/vrtnu',
}

from statichelper import program_to_url
payload = dict(isFavorite=value, programUrl=program_to_url(program, 'short'), title=title)
from json import dumps
from vrtutils import program_to_url
payload = dict(isFavorite=value, programUrl=program_to_url(program, 'short'), title=title)
data = dumps(payload).encode('utf-8')
program_id = self.program_to_id(program)
log(2, 'URL post: https://video-user-data.vrt.be/favorites/{program_id}', program_id=program_id)
Expand Down Expand Up @@ -124,12 +124,12 @@ def titles(self):

def programs(self):
''' Return all favorite programs '''
from statichelper import url_to_program
from vrtutils import url_to_program
return [url_to_program(value.get('value').get('programUrl')) for value in list(self._favorites.values()) if value.get('value').get('isFavorite')]

def manage(self):
''' Allow the user to unselect favorites to be removed from the listing '''
from statichelper import url_to_program
from vrtutils import url_to_program
self.refresh(ttl=0)
if not self._favorites:
ok_dialog(heading=localize(30418), message=localize(30419)) # No favorites found
Expand Down
179 changes: 178 additions & 1 deletion resources/lib/kodiutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@

from __future__ import absolute_import, division, unicode_literals
from contextlib import contextmanager
from sys import version_info

import xbmc
import xbmcaddon
import xbmcplugin
from statichelper import from_unicode, to_unicode
from vrtutils import from_unicode, to_unicode

ADDON = xbmcaddon.Addon()

Expand Down Expand Up @@ -750,3 +752,178 @@ def jsonrpc(**kwargs):
if 'jsonrpc' not in kwargs:
kwargs.update(jsonrpc='2.0')
return loads(xbmc.executeJSONRPC(dumps(kwargs)))


def human_delta(seconds):
''' Return a human-readable representation of the TTL '''
from math import floor
days = int(floor(seconds / (24 * 60 * 60)))
seconds = seconds % (24 * 60 * 60)
hours = int(floor(seconds / (60 * 60)))
seconds = seconds % (60 * 60)
if days:
return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '')
minutes = int(floor(seconds / 60))
seconds = seconds % 60
if hours:
return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '')
if minutes:
return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '')
return '%d second%s' % (seconds, 's' if seconds != 1 else '')


def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name
''' Get the content from cache, if it's still fresh '''
if get_setting('usehttpcaching', 'true') == 'false':
return None

fullpath = get_cache_path() + path
if not exists(fullpath):
return None

from time import localtime, mktime
mtime = stat_file(fullpath).st_mtime()
now = mktime(localtime())
if ttl and now >= mtime + ttl:
return None

if ttl is None:
log(3, "Cache '{path}' is forced from cache.", path=path)
else:
log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now))
from json import load
with open_file(fullpath, 'r') as fdesc:
try:
return load(fdesc)
except (TypeError, ValueError) as exc: # No JSON object could be decoded
fdesc.seek(0, 0)
log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read())
return None


def update_cache(path, data):
''' Update the cache, if necessary '''
if get_setting('usehttpcaching', 'true') == 'false':
return

from hashlib import md5
from json import dump, dumps
fullpath = get_cache_path() + path
if exists(fullpath):
with open_file(fullpath) as fdesc:
cachefile = fdesc.read().encode('utf-8')
md5_cache = md5(cachefile)
else:
md5_cache = 0
# Create cache directory if missing
if not exists(get_cache_path()):
mkdirs(get_cache_path())

# Avoid writes if possible (i.e. SD cards)
if md5_cache != md5(dumps(data).encode('utf-8')):
log(3, "Write cache '{path}'.", path=path)
with open_file(fullpath, 'w') as fdesc:
# dump(data, fdesc, encoding='utf-8')
dump(data, fdesc)
else:
# Update timestamp
from os import utime
log(3, "Cache '{path}' has not changed, updating mtime only.", path=path)
utime(path)


def ttl(kind='direct'):
''' Return the HTTP cache ttl in seconds based on kind of relation '''
if kind == 'direct':
return int(get_setting('httpcachettldirect', 5)) * 60
if kind == 'indirect':
return int(get_setting('httpcachettlindirect', 60)) * 60
return 5 * 60


def get_json_data(response):
''' Return json object from HTTP response '''
from json import load, loads
if (3, 0, 0) <= version_info <= (3, 5, 9): # the JSON object must be str, not 'bytes'
json_data = loads(to_unicode(response.read()))
else:
json_data = load(response)
return json_data


def get_url_json(url, cache=None, headers=None, data=None, fail=None):
''' Return HTTP data '''
try: # Python 3
from urllib.error import HTTPError
from urllib.parse import unquote
from urllib.request import urlopen, Request
except ImportError: # Python 2
from urllib2 import HTTPError, unquote, urlopen, Request

if headers is None:
headers = dict()
log(2, 'URL get: {url}', url=unquote(url))
req = Request(url, headers=headers)
if data is not None:
req.data = data
try:
json_data = get_json_data(urlopen(req))
except ValueError as exc: # No JSON object could be decoded
log_error('JSON Error: {exc}', exc=exc)
return fail
except HTTPError as exc:
if hasattr(req, 'selector'): # Python 3.4+
url_length = len(req.selector)
else: # Python 2.7
url_length = len(req.get_selector())
if exc.code == 413 and url_length > 8192:
ok_dialog(heading='HTTP Error 413', message=localize(30967))
log_error('HTTP Error 413: Exceeded maximum url length: '
'VRT Search API url has a length of {length} characters.', length=url_length)
return fail
if exc.code == 400 and 7600 <= url_length <= 8192:
ok_dialog(heading='HTTP Error 400', message=localize(30967))
log_error('HTTP Error 400: Probably exceeded maximum url length: '
'VRT Search API url has a length of {length} characters.', length=url_length)
return fail
try:
return get_json_data(exc)
except ValueError as exc: # No JSON object could be decoded
log_error('JSON Error: {exc}', exc=exc)
return fail
raise
else:
if cache:
update_cache(cache, json_data)
return json_data


def get_cached_url_json(url, cache, headers=None, ttl=None, fail=None): # pylint: disable=redefined-outer-name
''' Return data from cache, if any, else make an HTTP request '''
# Get api data from cache if it is fresh
json_data = get_cache(cache, ttl=ttl)
if json_data is not None:
return json_data
return get_url_json(url, cache=cache, headers=headers, fail=fail)


def refresh_caches(cache_file=None):
''' Invalidate the needed caches and refresh container '''
files = ['favorites.json', 'oneoff.json', 'resume_points.json']
if cache_file and cache_file not in files:
files.append(cache_file)
invalidate_caches(*files)
container_refresh()
notification(message=localize(30981))


def invalidate_caches(*caches):
''' Invalidate multiple cache files '''
import fnmatch
_, files = listdir(get_cache_path())
# Invalidate caches related to menu list refreshes
removes = set()
for expr in caches:
removes.update(fnmatch.filter(files, expr))
for filename in removes:
delete(get_cache_path() + filename)
Loading

0 comments on commit 0ea8d0b

Please sign in to comment.