diff --git a/resources/lib/addon.py b/resources/lib/addon.py index 5b04fefb6..5d511367a 100644 --- a/resources/lib/addon.py +++ b/resources/lib/addon.py @@ -10,8 +10,9 @@ except ImportError: # Python 2 from urllib import unquote_plus -from kodiutils import localize, log_access, notification, refresh_caches +from kodiutils import localize, log_access, notification from statichelper import from_unicode, to_unicode +from utils import refresh_caches plugin = Plugin() # pylint: disable=invalid-name diff --git a/resources/lib/apihelper.py b/resources/lib/apihelper.py index cf60956a8..6abfc4a33 100644 --- a/resources/lib/apihelper.py +++ b/resources/lib/apihelper.py @@ -5,21 +5,20 @@ from __future__ import absolute_import, division, unicode_literals try: # Python 3 - from urllib.error import HTTPError from urllib.parse import quote_plus, unquote - from urllib.request import build_opener, install_opener, ProxyHandler, Request, urlopen + from urllib.request import build_opener, install_opener, ProxyHandler, urlopen except ImportError: # Python 2 from urllib import quote_plus - from urllib2 import build_opener, install_opener, ProxyHandler, Request, HTTPError, unquote, urlopen + from urllib2 import build_opener, install_opener, ProxyHandler, unquote, urlopen from data import CHANNELS from helperobjects import TitleItem -from kodiutils import (delete_cached_thumbnail, get_cache, get_global_setting, get_proxies, get_setting, - has_addon, localize, localize_from_data, log, log_error, ok_dialog, ttl, update_cache, - url_for) -from statichelper import (add_https_method, convert_html_to_kodilabel, find_entry, from_unicode, play_url_to_id, - program_to_url, realpage, to_unicode, strip_newlines, url_to_program) +from kodiutils import (delete_cached_thumbnail, get_global_setting, get_proxies, get_setting, + has_addon, localize, localize_from_data, log, url_for) from metadata import Metadata +from statichelper import (add_https_method, convert_html_to_kodilabel, find_entry, from_unicode, play_url_to_id, + program_to_url, realpage, strip_newlines, url_to_program) +from utils import get_cache, get_url_json, try_cache_or_url_json, ttl, update_cache class ApiHelper: @@ -57,16 +56,10 @@ def get_tvshows(self, category=None, channel=None, feature=None): if not category and not channel and not feature: params['facets[transcodingStatus]'] = 'AVAILABLE' # Required for getting results in Suggests API cache_file = 'programs.json' - tvshows = get_cache(cache_file, ttl=ttl('indirect')) # Try the cache if it is fresh - if not tvshows: - from json import loads - querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items())) - suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring - log(2, 'URL get: {url}', url=unquote(suggest_url)) - tvshows = loads(to_unicode(urlopen(suggest_url).read())) - update_cache(cache_file, tvshows) - return tvshows + querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items())) + suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring + return try_cache_or_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect')) def list_tvshows(self, category=None, channel=None, feature=None, use_favorites=False): ''' List all TV shows for a given category, channel or feature, optionally filtered by favorites ''' @@ -413,8 +406,7 @@ def get_episode_by_air_date(self, channel_name, start_date, end_date=None): schedule_date = onairdate schedule_datestr = schedule_date.isoformat().split('T')[0] url = 'https://www.vrt.be/bin/epg/schedule.%s.json' % schedule_datestr - from json import loads - schedule_json = loads(to_unicode(urlopen(url).read())) + schedule_json = get_url_json(url) episodes = schedule_json.get(channel.get('id'), []) if not episodes: return None @@ -569,35 +561,10 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None, # Construct VRT NU Search API Url and get api data querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items())) search_url = self._VRTNU_SEARCH_URL + '?' + querystring.replace(' ', '%20') # Only encode spaces to minimize url length - - from json import loads if cache_file: - # Get api data from cache if it is fresh - search_json = get_cache(cache_file, ttl=ttl('indirect')) - if not search_json: - log(2, 'URL get: {url}', url=unquote(search_url)) - req = Request(search_url) - try: - search_json = loads(to_unicode(urlopen(req).read())) - except (TypeError, ValueError): # No JSON object could be decoded - return [] - except HTTPError as exc: - url_length = len(req.get_selector()) - if exc.code == 413 and url_length > 8192: - ok_dialog(heading='HTTP Error 413', message=localize(30967)) - log_error('HTTP Error 413: Exceeded maximum url length: ' - 'VRT Search API url has a length of {length} characters.', length=url_length) - return [] - if exc.code == 400 and 7600 <= url_length <= 8192: - ok_dialog(heading='HTTP Error 400', message=localize(30967)) - log_error('HTTP Error 400: Probably exceeded maximum url length: ' - 'VRT Search API url has a length of {length} characters.', length=url_length) - return [] - raise - update_cache(cache_file, search_json) + search_json = try_cache_or_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect')) else: - log(2, 'URL get: {url}', url=unquote(search_url)) - search_json = loads(to_unicode(urlopen(search_url).read())) + search_json = get_url_json(url=search_url) # Check for multiple seasons seasons = None @@ -619,8 +586,9 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None, if all_items and total_results > api_page_size: for api_page in range(1, api_pages): api_page_url = search_url + '&from=' + str(api_page * api_page_size + 1) - api_page_json = loads(to_unicode(urlopen(api_page_url).read())) - episodes += api_page_json.get('results', [{}]) + api_page_json = get_url_json(api_page_url) + if api_page_json: + episodes += api_page_json.get('results', [{}]) # Return episodes return episodes diff --git a/resources/lib/favorites.py b/resources/lib/favorites.py index 7cb5d9332..675d64a06 100644 --- a/resources/lib/favorites.py +++ b/resources/lib/favorites.py @@ -11,9 +11,9 @@ except ImportError: # Python 2 from urllib2 import build_opener, install_opener, ProxyHandler, Request, unquote, urlopen -from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, has_credentials, - input_down, invalidate_caches, localize, log, log_error, multiselect, - notification, ok_dialog, to_unicode, update_cache) +from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down, + localize, log, log_error, multiselect, notification, ok_dialog) +from utils import get_cache, get_url_json, invalidate_caches, update_cache class Favorites: @@ -43,16 +43,8 @@ def refresh(self, ttl=None): 'content-type': 'application/json', 'Referer': 'https://www.vrt.be/vrtnu', } - req = Request('https://video-user-data.vrt.be/favorites', headers=headers) - log(2, 'URL get: https://video-user-data.vrt.be/favorites') - from json import loads - try: - favorites_json = loads(to_unicode(urlopen(req).read())) - except (TypeError, ValueError): # No JSON object could be decoded - # Force favorites from cache - favorites_json = get_cache('favorites.json', ttl=None) - else: - update_cache('favorites.json', favorites_json) + favorites_url = 'https://video-user-data.vrt.be/favorites' + favorites_json = get_url_json(url=favorites_url, cache='favorites.json', headers=headers) if favorites_json: self._favorites = favorites_json diff --git a/resources/lib/kodiutils.py b/resources/lib/kodiutils.py index b0bb76dc7..e8ac23e0f 100644 --- a/resources/lib/kodiutils.py +++ b/resources/lib/kodiutils.py @@ -666,117 +666,6 @@ def delete_cached_thumbnail(url): return True -def human_delta(seconds): - ''' Return a human-readable representation of the TTL ''' - from math import floor - days = int(floor(seconds / (24 * 60 * 60))) - seconds = seconds % (24 * 60 * 60) - hours = int(floor(seconds / (60 * 60))) - seconds = seconds % (60 * 60) - if days: - return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '') - minutes = int(floor(seconds / 60)) - seconds = seconds % 60 - if hours: - return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '') - if minutes: - return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '') - return '%d second%s' % (seconds, 's' if seconds != 1 else '') - - -def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name - ''' Get the content from cache, if it's still fresh ''' - if get_setting('usehttpcaching', 'true') == 'false': - return None - - fullpath = get_cache_path() + path - if not exists(fullpath): - return None - - from time import localtime, mktime - mtime = stat_file(fullpath).st_mtime() - now = mktime(localtime()) - if ttl and now >= mtime + ttl: - return None - - if ttl is None: - log(3, "Cache '{path}' is forced from cache.", path=path) - else: - log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now)) - with open_file(fullpath, 'r') as fdesc: - cache_data = to_unicode(fdesc.read()) - if not cache_data: - return None - - from json import loads - try: - return loads(cache_data) - except (TypeError, ValueError): # No JSON object could be decoded - return None - - -def update_cache(path, data): - ''' Update the cache, if necessary ''' - if get_setting('usehttpcaching', 'true') == 'false': - return - - from hashlib import md5 - from json import dump, dumps - fullpath = get_cache_path() + path - if exists(fullpath): - with open_file(fullpath) as fdesc: - cachefile = fdesc.read().encode('utf-8') - md5_cache = md5(cachefile) - else: - md5_cache = 0 - # Create cache directory if missing - if not exists(get_cache_path()): - mkdirs(get_cache_path()) - - # Avoid writes if possible (i.e. SD cards) - if md5_cache != md5(dumps(data).encode('utf-8')): - log(3, "Write cache '{path}'.", path=path) - with open_file(fullpath, 'w') as fdesc: - # dump(data, fdesc, encoding='utf-8') - dump(data, fdesc) - else: - # Update timestamp - from os import utime - log(3, "Cache '{path}' has not changed, updating mtime only.", path=path) - utime(path) - - -def ttl(kind='direct'): - ''' Return the HTTP cache ttl in seconds based on kind of relation ''' - if kind == 'direct': - return int(get_setting('httpcachettldirect', 5)) * 60 - if kind == 'indirect': - return int(get_setting('httpcachettlindirect', 60)) * 60 - return 5 * 60 - - -def refresh_caches(cache_file=None): - ''' Invalidate the needed caches and refresh container ''' - files = ['favorites.json', 'oneoff.json', 'resume_points.json'] - if cache_file and cache_file not in files: - files.append(cache_file) - invalidate_caches(*files) - container_refresh() - notification(message=localize(30981)) - - -def invalidate_caches(*caches): - ''' Invalidate multiple cache files ''' - import fnmatch - _, files = listdir(get_cache_path()) - # Invalidate caches related to menu list refreshes - removes = set() - for expr in caches: - removes.update(fnmatch.filter(files, expr)) - for filename in removes: - delete(get_cache_path() + filename) - - def input_down(): ''' Move the cursor down ''' jsonrpc(method='Input.Down') diff --git a/resources/lib/playerinfo.py b/resources/lib/playerinfo.py index 9f3d9fce8..b61d1c004 100644 --- a/resources/lib/playerinfo.py +++ b/resources/lib/playerinfo.py @@ -229,8 +229,8 @@ def push_position(self, position=0, total=100): # Do not reload container and rely on Kodi internal watch status when watching a single episode that is partly watched. # Kodi internal watch status is only updated when the play action is initiated from the GUI, so this only works for single episodes. - if (not self.path.startswith('plugin://plugin.video.vrt.nu/play/upnext') and - ignoresecondsatstart < position < (100 - ignorepercentatend) / 100 * total): + if (not self.path.startswith('plugin://plugin.video.vrt.nu/play/upnext') + and ignoresecondsatstart < position < (100 - ignorepercentatend) / 100 * total): return # Do not reload container when playing or not stopped diff --git a/resources/lib/resumepoints.py b/resources/lib/resumepoints.py index d02c168b5..d8a6b9b9e 100644 --- a/resources/lib/resumepoints.py +++ b/resources/lib/resumepoints.py @@ -12,9 +12,9 @@ from urllib2 import build_opener, install_opener, ProxyHandler, Request, HTTPError, urlopen from data import SECONDS_MARGIN -from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, has_credentials, - input_down, invalidate_caches, localize, log, log_error, notification, - to_unicode, update_cache) +from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down, + localize, log, log_error, notification) +from utils import get_cache, get_url_json, invalidate_caches, update_cache class ResumePoints: @@ -44,16 +44,8 @@ def refresh(self, ttl=None): 'content-type': 'application/json', 'Referer': 'https://www.vrt.be/vrtnu', } - req = Request('https://video-user-data.vrt.be/resume_points', headers=headers) - log(2, 'URL get: https://video-user-data.vrt.be/resume_points') - from json import loads - try: - resumepoints_json = loads(to_unicode(urlopen(req).read())) - except (TypeError, ValueError): # No JSON object could be decoded - # Force resumepoints from cache - resumepoints_json = get_cache('resume_points.json', ttl=None) - else: - update_cache('resume_points.json', resumepoints_json) + resumepoints_url = 'https://video-user-data.vrt.be/resume_points' + resumepoints_json = get_url_json(url=resumepoints_url, cache='resume_points.json', headers=headers) if resumepoints_json: self._resumepoints = resumepoints_json diff --git a/resources/lib/search.py b/resources/lib/search.py index f7ced9db3..abb77ebaf 100644 --- a/resources/lib/search.py +++ b/resources/lib/search.py @@ -7,7 +7,8 @@ from favorites import Favorites from resumepoints import ResumePoints from kodiutils import (addon_profile, container_refresh, end_of_directory, get_search_string, - get_setting, localize, ok_dialog, open_file, show_listing, ttl, url_for) + get_setting, localize, log_error, ok_dialog, open_file, show_listing, url_for) +from utils import ttl class Search: @@ -25,7 +26,9 @@ def read_history(self): with open_file(self._search_history, 'r') as fdesc: try: history = load(fdesc) - except (TypeError, ValueError): # No JSON object could be decoded + except (TypeError, ValueError) as exc: # No JSON object could be decoded + fdesc.seek(0, 0) + log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read()) history = [] return history diff --git a/resources/lib/service.py b/resources/lib/service.py index e1b380fa0..1be125abb 100644 --- a/resources/lib/service.py +++ b/resources/lib/service.py @@ -6,11 +6,12 @@ from xbmc import Monitor from apihelper import ApiHelper from favorites import Favorites -from kodiutils import container_refresh, invalidate_caches, log +from kodiutils import container_refresh, log from playerinfo import PlayerInfo from resumepoints import ResumePoints from statichelper import to_unicode from tokenresolver import TokenResolver +from utils import invalidate_caches class VrtMonitor(Monitor): diff --git a/resources/lib/streamservice.py b/resources/lib/streamservice.py index d66aa3a6e..796b707bd 100644 --- a/resources/lib/streamservice.py +++ b/resources/lib/streamservice.py @@ -17,6 +17,7 @@ get_proxies, get_setting, has_inputstream_adaptive, kodi_version, localize, log, log_error, mkdir, ok_dialog, open_settings, supports_drm) from statichelper import to_unicode +from utils import get_url_json class StreamService: @@ -40,9 +41,9 @@ def __init__(self, _tokenresolver): def _get_vualto_license_url(self): ''' Get Widevine license URL from Vualto API ''' - from json import loads - log(2, 'URL get: {url}', url=unquote(self._VUPLAY_API_URL)) - self._vualto_license_url = loads(to_unicode(urlopen(self._VUPLAY_API_URL).read())).get('drm_providers', dict()).get('widevine', dict()).get('la_url') + json_data = get_url_json(url=self._VUPLAY_API_URL) + if json_data: + self._vualto_license_url = json_data.get('drm_providers', dict()).get('widevine', dict()).get('la_url') @staticmethod def _create_settings_dir(): @@ -154,18 +155,18 @@ def _get_stream_json(self, api_data, roaming=False): playertoken = self._tokenresolver.get_playertoken(token_url, token_variant='ondemand', roaming=roaming) # Construct api_url and get video json - stream_json = None - if playertoken: - from json import loads - api_url = api_data.media_api_url + '/videos/' + api_data.publication_id + \ - api_data.video_id + '?vrtPlayerToken=' + playertoken + '&client=' + api_data.client - log(2, 'URL get: {url}', url=unquote(api_url)) - try: - stream_json = loads(to_unicode(urlopen(api_url).read())) - except HTTPError as exc: - stream_json = loads(to_unicode(exc.read())) - - return stream_json + if not playertoken: + return None + api_url = api_data.media_api_url + '/videos/' + api_data.publication_id + \ + api_data.video_id + '?vrtPlayerToken=' + playertoken + '&client=' + api_data.client + try: + json_data = get_url_json(url=api_url) + except HTTPError as exc: + from json import load + return load(exc) + if not json_data: + return None + return json_data @staticmethod def _fix_virtualsubclip(manifest_url, duration): diff --git a/resources/lib/tokenresolver.py b/resources/lib/tokenresolver.py index 8cf85650d..73bee0e32 100644 --- a/resources/lib/tokenresolver.py +++ b/resources/lib/tokenresolver.py @@ -3,10 +3,11 @@ ''' This module contains all functionality for VRT NU API authentication. ''' from __future__ import absolute_import, division, unicode_literals -from statichelper import from_unicode, to_unicode +from statichelper import from_unicode from kodiutils import (addon_profile, delete, exists, get_proxies, get_setting, get_tokens_path, - has_credentials, invalidate_caches, listdir, localize, log, log_error, - mkdir, notification, ok_dialog, open_file, open_settings, set_setting) + has_credentials, listdir, localize, log, log_error, mkdir, notification, + ok_dialog, open_file, open_settings, set_setting) +from utils import get_url_json, invalidate_caches try: # Python 3 import http.cookiejar as cookielib @@ -44,7 +45,6 @@ def __init__(self): def get_playertoken(self, token_url, token_variant=None, roaming=False): ''' Get cached or new playertoken, variants: live or ondemand ''' - token = None xvrttoken_variant = None if roaming: xvrttoken_variant = 'roaming' @@ -53,34 +53,35 @@ def get_playertoken(self, token_url, token_variant=None, roaming=False): delete(path) else: token = self._get_cached_token('vrtPlayerToken', token_variant) + if token: + return token - if token is None: - if token_variant == 'ondemand' or roaming: - xvrttoken = self.get_xvrttoken(token_variant=xvrttoken_variant) - if xvrttoken is None: - return token - cookie_value = 'X-VRT-Token=' + xvrttoken - headers = {'Content-Type': 'application/json', 'Cookie': cookie_value} - else: - headers = {'Content-Type': 'application/json'} - token = self._get_new_playertoken(token_url, headers, token_variant) - - return token + if token_variant == 'ondemand' or roaming: + xvrttoken = self.get_xvrttoken(token_variant=xvrttoken_variant) + if xvrttoken is None: + return None + cookie_value = 'X-VRT-Token=' + xvrttoken + headers = {'Content-Type': 'application/json', 'Cookie': cookie_value} + else: + headers = {'Content-Type': 'application/json'} + return self._get_new_playertoken(token_url, headers, token_variant) def get_xvrttoken(self, token_variant=None): ''' Get cached, fresh or new X-VRT-Token, variants: None, user or roaming ''' token = self._get_cached_token('X-VRT-Token', token_variant) - if token is None: - # Try to refresh if we have a cached refresh token (vrtlogin-rt) - refresh_token = self._get_cached_token('vrtlogin-rt') - if refresh_token and token_variant != 'roaming': - token = self._get_fresh_token(refresh_token, 'X-VRT-Token', token_variant=token_variant) - elif token_variant == 'user': - token = self._get_new_user_xvrttoken() - else: - # Login - token = self.login(token_variant=token_variant) - return token + if token: + return token + + # Try to refresh if we have a cached refresh token (vrtlogin-rt) + refresh_token = self._get_cached_token('vrtlogin-rt') + if refresh_token and token_variant != 'roaming': + return self._get_fresh_token(refresh_token, 'X-VRT-Token', token_variant=token_variant) + + if token_variant == 'user': + return self._get_new_user_xvrttoken() + + # Login + return self.login(token_variant=token_variant) @staticmethod def _get_token_path(token_name, token_variant): @@ -91,25 +92,32 @@ def _get_token_path(token_name, token_variant): def _get_cached_token(self, token_name, token_variant=None): ''' Return a cached token ''' - from json import load - cached_token = None path = self._get_token_path(token_name, token_variant) - if exists(path): - from datetime import datetime - import dateutil.parser - import dateutil.tz - with open_file(path) as fdesc: + if not exists(path): + return None + + from json import load + with open_file(path) as fdesc: + try: token = load(fdesc) - now = datetime.now(dateutil.tz.tzlocal()) - exp = dateutil.parser.parse(token.get('expirationDate')) - if exp > now: - log(3, "Got cached token '{path}'", path=path) - cached_token = token.get(token_name) - else: - log(2, "Cached token '{path}' deleted", path=path) - delete(path) - return cached_token + except (TypeError, ValueError) as exc: # No JSON object could be decoded + fdesc.seek(0, 0) + log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read()) + return None + + from datetime import datetime + import dateutil.parser + import dateutil.tz + now = datetime.now(dateutil.tz.tzlocal()) + exp = dateutil.parser.parse(token.get('expirationDate')) + if exp <= now: + log(2, "Cached token '{path}' deleted", path=path) + delete(path) + return None + + log(3, "Got cached token '{path}'", path=path) + return token.get(token_name) def _set_cached_token(self, token, token_variant=None): ''' Save token to cache''' @@ -125,12 +133,11 @@ def _set_cached_token(self, token, token_variant=None): def _get_new_playertoken(self, token_url, headers, token_variant=None): ''' Get new playertoken from VRT Token API ''' - from json import loads - log(2, 'URL post: {url}', url=unquote(token_url)) - req = Request(token_url, data=b'', headers=headers) - playertoken = loads(to_unicode(urlopen(req).read())) - if playertoken is not None: - self._set_cached_token(playertoken, token_variant) + playertoken = get_url_json(url=token_url, headers=headers, data=b'') + if not playertoken: + return None + + self._set_cached_token(playertoken, token_variant) return playertoken.get('vrtPlayerToken') def login(self, refresh=False, token_variant=None): @@ -166,12 +173,10 @@ def login(self, refresh=False, token_variant=None): login_json = self._get_login_json() # Get token - token = self._get_new_xvrttoken(login_json, token_variant) - return token + return self._get_new_xvrttoken(login_json, token_variant) def _get_login_json(self): ''' Get login json ''' - from json import loads payload = dict( loginID=from_unicode(get_setting('username')), password=from_unicode(get_setting('password')), @@ -180,45 +185,44 @@ def _get_login_json(self): targetEnv='jssdk', ) data = urlencode(payload).encode() - log(2, 'URL post: {url}', url=unquote(self._LOGIN_URL)) - req = Request(self._LOGIN_URL, data=data) - login_json = loads(to_unicode(urlopen(req).read())) - return login_json + json_data = get_url_json(self._LOGIN_URL, data=data) + if not json_data: + return dict() + return json_data def _get_new_xvrttoken(self, login_json, token_variant=None): ''' Get new X-VRT-Token from VRT NU website ''' - token = None login_token = login_json.get('sessionInfo', dict()).get('login_token') - if login_token: - from json import dumps - login_cookie = 'glt_%s=%s' % (self._API_KEY, login_token) - payload = dict( - uid=login_json.get('UID'), - uidsig=login_json.get('UIDSignature'), - ts=login_json.get('signatureTimestamp'), - email=from_unicode(get_setting('username')), - ) - data = dumps(payload).encode() - headers = {'Content-Type': 'application/json', 'Cookie': login_cookie} - log(2, 'URL post: {url}', url=unquote(self._TOKEN_GATEWAY_URL)) - req = Request(self._TOKEN_GATEWAY_URL, data=data, headers=headers) - try: # Python 3 - setcookie_header = urlopen(req).info().get('Set-Cookie') - except AttributeError: # Python 2 - setcookie_header = urlopen(req).info().getheader('Set-Cookie') - xvrttoken = TokenResolver._create_token_dictionary(setcookie_header) - if token_variant == 'roaming': - xvrttoken = self._get_roaming_xvrttoken(xvrttoken) - if xvrttoken is not None: - token = xvrttoken.get('X-VRT-Token') - self._set_cached_token(xvrttoken, token_variant) - notification(message=localize(30952)) # Login succeeded. - return token + if not login_token: + return None + + from json import dumps + login_cookie = 'glt_%s=%s' % (self._API_KEY, login_token) + payload = dict( + uid=login_json.get('UID'), + uidsig=login_json.get('UIDSignature'), + ts=login_json.get('signatureTimestamp'), + email=from_unicode(get_setting('username')), + ) + data = dumps(payload).encode() + headers = {'Content-Type': 'application/json', 'Cookie': login_cookie} + log(2, 'URL post: {url}', url=unquote(self._TOKEN_GATEWAY_URL)) + req = Request(self._TOKEN_GATEWAY_URL, data=data, headers=headers) + try: # Python 3 + setcookie_header = urlopen(req).info().get('Set-Cookie') + except AttributeError: # Python 2 + setcookie_header = urlopen(req).info().getheader('Set-Cookie') + xvrttoken = TokenResolver._create_token_dictionary(setcookie_header) + if token_variant == 'roaming': + xvrttoken = self._get_roaming_xvrttoken(xvrttoken) + if xvrttoken is None: + return None + self._set_cached_token(xvrttoken, token_variant) + notification(message=localize(30952)) # Login succeeded. + return xvrttoken.get('X-VRT-Token') def _get_new_user_xvrttoken(self): ''' Get new 'user' X-VRT-Token from VRT NU website ''' - token = None - # Get login json login_json = self._get_login_json() @@ -241,16 +245,16 @@ def _get_new_user_xvrttoken(self): opener.open(self._VRT_LOGIN_URL, data=data) xvrttoken = TokenResolver._create_token_dictionary(cookiejar) refreshtoken = TokenResolver._create_token_dictionary(cookiejar, cookie_name='vrtlogin-rt') - if xvrttoken is not None: - token = xvrttoken.get('X-VRT-Token') - self._set_cached_token(xvrttoken, token_variant='user') + if xvrttoken is None: + return None + + self._set_cached_token(xvrttoken, token_variant='user') if refreshtoken is not None: self._set_cached_token(refreshtoken) - return token + return xvrttoken.get('X-VRT-Token') def _get_fresh_token(self, refresh_token, token_name, token_variant=None): ''' Refresh an expired X-VRT-Token, vrtlogin-at or vrtlogin-rt token ''' - token = None refresh_url = self._TOKEN_GATEWAY_URL + '/refreshtoken' cookie_value = 'vrtlogin-rt=' + refresh_token headers = {'Cookie': cookie_value} @@ -260,14 +264,13 @@ def _get_fresh_token(self, refresh_token, token_name, token_variant=None): req = Request(refresh_url, headers=headers) opener.open(req) token = TokenResolver._create_token_dictionary(cookiejar, token_name) - if token is not None: - self._set_cached_token(token, token_variant) - token = list(token.values())[0] - return token + if token is None: + return None + self._set_cached_token(token, token_variant) + return list(token.values())[0] def _get_roaming_xvrttoken(self, xvrttoken): ''' Get new 'roaming' X-VRT-Token from VRT NU website ''' - roaming_xvrttoken = None cookie_value = 'X-VRT-Token=' + xvrttoken.get('X-VRT-Token') headers = {'Cookie': cookie_value} opener = build_opener(NoRedirection, ProxyHandler(self._proxies)) @@ -287,15 +290,16 @@ def _get_roaming_xvrttoken(self, xvrttoken): except AttributeError: # Python 2 url = opener.open(url).info().getheader('Location') headers = {'Cookie': cookie_value} - if url is not None: - log(2, 'URL get: {url}', url=unquote(url)) - req = Request(url, headers=headers) - try: # Python 3 - setcookie_header = opener.open(req).info().get('Set-Cookie') - except AttributeError: # Python 2 - setcookie_header = opener.open(req).info().getheader('Set-Cookie') - roaming_xvrttoken = TokenResolver._create_token_dictionary(setcookie_header) - return roaming_xvrttoken + if url is None: + return None + + log(2, 'URL get: {url}', url=unquote(url)) + req = Request(url, headers=headers) + try: # Python 3 + setcookie_header = opener.open(req).info().get('Set-Cookie') + except AttributeError: # Python 2 + setcookie_header = opener.open(req).info().getheader('Set-Cookie') + return TokenResolver._create_token_dictionary(setcookie_header) @staticmethod def _create_token_dictionary(cookie_data, cookie_name='X-VRT-Token'): diff --git a/resources/lib/tvguide.py b/resources/lib/tvguide.py index 7505419e8..14b15f2fb 100644 --- a/resources/lib/tvguide.py +++ b/resources/lib/tvguide.py @@ -9,18 +9,18 @@ import dateutil.tz try: # Python 3 - from urllib.request import build_opener, install_opener, ProxyHandler, urlopen + from urllib.request import build_opener, install_opener, ProxyHandler except ImportError: # Python 2 - from urllib2 import build_opener, install_opener, ProxyHandler, urlopen + from urllib2 import build_opener, install_opener, ProxyHandler from data import CHANNELS, RELATIVE_DATES from favorites import Favorites from helperobjects import TitleItem from metadata import Metadata from resumepoints import ResumePoints -from statichelper import find_entry, to_unicode -from kodiutils import (get_cache, get_proxies, has_addon, localize, localize_datelong, log, - show_listing, ttl, update_cache, url_for) +from statichelper import find_entry +from kodiutils import get_proxies, has_addon, localize, localize_datelong, show_listing, url_for +from utils import get_url_json, try_cache_or_url_json, ttl class TVGuide: @@ -156,17 +156,11 @@ def get_episode_items(self, date, channel): cache_file = 'schedule.%s.json' % date if date in ('today', 'yesterday', 'tomorrow'): - # Try the cache if it is fresh - schedule = get_cache(cache_file, ttl=ttl('indirect')) - if not schedule: - from json import loads - log(2, 'URL get: {url}', url=epg_url) - schedule = loads(to_unicode(urlopen(epg_url).read())) - update_cache(cache_file, schedule) + schedule = try_cache_or_url_json(url=epg_url, cache=cache_file, ttl=ttl('indirect')) else: - from json import loads - log(2, 'URL get: {url}', url=epg_url) - schedule = loads(to_unicode(urlopen(epg_url).read())) + schedule = get_url_json(url=epg_url) + if not schedule: + return [] entry = find_entry(CHANNELS, 'name', channel) if entry: @@ -208,14 +202,9 @@ def playing_now(self, channel): # Daily EPG information shows information from 6AM until 6AM if epg.hour < 6: epg += timedelta(days=-1) - # Try the cache if it is fresh - schedule = get_cache('schedule.today.json', ttl=ttl('indirect')) - if not schedule: - from json import loads - epg_url = epg.strftime(self.VRT_TVGUIDE) - log(2, 'URL get: {url}', url=epg_url) - schedule = loads(to_unicode(urlopen(epg_url).read())) - update_cache('schedule.today.json', schedule) + + epg_url = epg.strftime(self.VRT_TVGUIDE) + schedule = try_cache_or_url_json(url=epg_url, cache='schedule.today.json', ttl=ttl('indirect')) entry = find_entry(CHANNELS, 'name', channel) if not entry: @@ -246,14 +235,9 @@ def live_description(self, channel): # Daily EPG information shows information from 6AM until 6AM if epg.hour < 6: epg += timedelta(days=-1) - # Try the cache if it is fresh - schedule = get_cache('schedule.today.json', ttl=ttl('indirect')) - if not schedule: - from json import loads - epg_url = epg.strftime(self.VRT_TVGUIDE) - log(2, 'URL get: {url}', url=epg_url) - schedule = loads(to_unicode(urlopen(epg_url).read())) - update_cache('schedule.today.json', schedule) + + epg_url = epg.strftime(self.VRT_TVGUIDE) + schedule = try_cache_or_url_json(url=epg_url, cache='schedule.today.json', ttl=ttl('indirect')) entry = find_entry(CHANNELS, 'name', channel) if not entry: diff --git a/resources/lib/utils.py b/resources/lib/utils.py new file mode 100644 index 000000000..6de911b52 --- /dev/null +++ b/resources/lib/utils.py @@ -0,0 +1,172 @@ +# -*- coding: utf-8 -*- +# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +''' All functionality that requires Kodi imports ''' + +from __future__ import absolute_import, division, unicode_literals +from kodiutils import (container_refresh, delete, exists, get_cache_path, get_setting, listdir, + localize, log, log_error, mkdirs, notification, ok_dialog, open_file, stat_file) +from statichelper import to_unicode + +try: # Python 3 + from urllib.error import HTTPError + from urllib.parse import unquote + from urllib.request import urlopen, Request +except ImportError: # Python 2 + from urllib2 import HTTPError, unquote, urlopen, Request + + +def human_delta(seconds): + ''' Return a human-readable representation of the TTL ''' + from math import floor + days = int(floor(seconds / (24 * 60 * 60))) + seconds = seconds % (24 * 60 * 60) + hours = int(floor(seconds / (60 * 60))) + seconds = seconds % (60 * 60) + if days: + return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '') + minutes = int(floor(seconds / 60)) + seconds = seconds % 60 + if hours: + return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '') + if minutes: + return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '') + return '%d second%s' % (seconds, 's' if seconds != 1 else '') + + +def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name + ''' Get the content from cache, if it's still fresh ''' + if get_setting('usehttpcaching', 'true') == 'false': + return None + + fullpath = get_cache_path() + path + if not exists(fullpath): + return None + + from time import localtime, mktime + mtime = stat_file(fullpath).st_mtime() + now = mktime(localtime()) + if ttl and now >= mtime + ttl: + return None + + if ttl is None: + log(3, "Cache '{path}' is forced from cache.", path=path) + else: + log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now)) + from json import load + with open_file(fullpath, 'r') as fdesc: + try: + return load(fdesc) + except (TypeError, ValueError) as exc: # No JSON object could be decoded + fdesc.seek(0, 0) + log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read()) + return None + + +def update_cache(path, data): + ''' Update the cache, if necessary ''' + if get_setting('usehttpcaching', 'true') == 'false': + return + + from hashlib import md5 + from json import dump, dumps + fullpath = get_cache_path() + path + if exists(fullpath): + with open_file(fullpath) as fdesc: + cachefile = fdesc.read().encode('utf-8') + md5_cache = md5(cachefile) + else: + md5_cache = 0 + # Create cache directory if missing + if not exists(get_cache_path()): + mkdirs(get_cache_path()) + + # Avoid writes if possible (i.e. SD cards) + if md5_cache != md5(dumps(data).encode('utf-8')): + log(3, "Write cache '{path}'.", path=path) + with open_file(fullpath, 'w') as fdesc: + # dump(data, fdesc, encoding='utf-8') + dump(data, fdesc) + else: + # Update timestamp + from os import utime + log(3, "Cache '{path}' has not changed, updating mtime only.", path=path) + utime(path) + + +def ttl(kind='direct'): + ''' Return the HTTP cache ttl in seconds based on kind of relation ''' + if kind == 'direct': + return int(get_setting('httpcachettldirect', 5)) * 60 + if kind == 'indirect': + return int(get_setting('httpcachettlindirect', 60)) * 60 + return 5 * 60 + + +def get_url_json(url, cache=None, headers=None, data=None): + ''' Return HTTP data ''' + if headers is None: + headers = dict() + from json import load, loads + log(2, 'URL get: {url}', url=unquote(url)) + req = Request(url, headers=headers) + if data is not None: + req.data = data + try: + try: + json_data = load(urlopen(req)) + except TypeError: # the JSON object must be str, not 'bytes' + json_data = loads(to_unicode(urlopen(req).read())) + except ValueError as exc: # No JSON object could be decoded + log_error('JSON Error: {exc}', exc=exc) + return [] + except HTTPError as exc: + if hasattr(req, 'selector'): # Python 3.4+ + url_length = len(req.selector) + else: # Python 2.7 + url_length = len(req.get_selector()) + if exc.code == 413 and url_length > 8192: + ok_dialog(heading='HTTP Error 413', message=localize(30967)) + log_error('HTTP Error 413: Exceeded maximum url length: ' + 'VRT Search API url has a length of {length} characters.', length=url_length) + return [] + if exc.code == 400 and 7600 <= url_length <= 8192: + ok_dialog(heading='HTTP Error 400', message=localize(30967)) + log_error('HTTP Error 400: Probably exceeded maximum url length: ' + 'VRT Search API url has a length of {length} characters.', length=url_length) + return [] + raise + else: + if cache: + update_cache(cache, json_data) + return json_data + + +def try_cache_or_url_json(url, cache, headers=None, ttl=None): # pylint: disable=redefined-outer-name + ''' Return data from cache, if any, else make an HTTP request ''' + # Get api data from cache if it is fresh + json_data = get_cache(cache, ttl=ttl) + if json_data is not None: + return json_data + return get_url_json(url, cache=cache, headers=headers) + + +def refresh_caches(cache_file=None): + ''' Invalidate the needed caches and refresh container ''' + files = ['favorites.json', 'oneoff.json', 'resume_points.json'] + if cache_file and cache_file not in files: + files.append(cache_file) + invalidate_caches(*files) + container_refresh() + notification(message=localize(30981)) + + +def invalidate_caches(*caches): + ''' Invalidate multiple cache files ''' + import fnmatch + _, files = listdir(get_cache_path()) + # Invalidate caches related to menu list refreshes + removes = set() + for expr in caches: + removes.update(fnmatch.filter(files, expr)) + for filename in removes: + delete(get_cache_path() + filename) diff --git a/resources/lib/vrtplayer.py b/resources/lib/vrtplayer.py index c6fae2b09..025987cbb 100644 --- a/resources/lib/vrtplayer.py +++ b/resources/lib/vrtplayer.py @@ -6,10 +6,12 @@ from apihelper import ApiHelper from favorites import Favorites from helperobjects import TitleItem +from kodiutils import (delete_cached_thumbnail, end_of_directory, get_addon_info, get_setting, + has_credentials, localize, log_error, ok_dialog, play, set_setting, + show_listing, url_for) from resumepoints import ResumePoints from statichelper import find_entry -from kodiutils import (delete_cached_thumbnail, end_of_directory, get_addon_info, get_setting, has_credentials, - localize, log_error, ok_dialog, play, set_setting, show_listing, ttl, url_for) +from utils import ttl class VRTPlayer: