Skip to content

Commit

Permalink
Attempt to use json.load(fdesc) again
Browse files Browse the repository at this point in the history
This PR includes:
- Use json.load(fdesc)
- Early exit in tokenresolver
  • Loading branch information
dagwieers committed Dec 9, 2019
1 parent 2d04db2 commit c8e71ed
Show file tree
Hide file tree
Showing 13 changed files with 353 additions and 342 deletions.
3 changes: 2 additions & 1 deletion resources/lib/addon.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@
except ImportError: # Python 2
from urllib import unquote_plus

from kodiutils import localize, log_access, notification, refresh_caches
from kodiutils import localize, log_access, notification
from statichelper import from_unicode, to_unicode
from utils import refresh_caches

plugin = Plugin() # pylint: disable=invalid-name

Expand Down
64 changes: 16 additions & 48 deletions resources/lib/apihelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,20 @@
from __future__ import absolute_import, division, unicode_literals

try: # Python 3
from urllib.error import HTTPError
from urllib.parse import quote_plus, unquote
from urllib.request import build_opener, install_opener, ProxyHandler, Request, urlopen
from urllib.request import build_opener, install_opener, ProxyHandler, urlopen
except ImportError: # Python 2
from urllib import quote_plus
from urllib2 import build_opener, install_opener, ProxyHandler, Request, HTTPError, unquote, urlopen
from urllib2 import build_opener, install_opener, ProxyHandler, unquote, urlopen

from data import CHANNELS
from helperobjects import TitleItem
from kodiutils import (delete_cached_thumbnail, get_cache, get_global_setting, get_proxies, get_setting,
has_addon, localize, localize_from_data, log, log_error, ok_dialog, ttl, update_cache,
url_for)
from statichelper import (add_https_method, convert_html_to_kodilabel, find_entry, from_unicode, play_url_to_id,
program_to_url, realpage, to_unicode, strip_newlines, url_to_program)
from kodiutils import (delete_cached_thumbnail, get_global_setting, get_proxies, get_setting,
has_addon, localize, localize_from_data, log, url_for)
from metadata import Metadata
from statichelper import (add_https_method, convert_html_to_kodilabel, find_entry, from_unicode, play_url_to_id,
program_to_url, realpage, strip_newlines, url_to_program)
from utils import get_cache, get_cached_url_json, get_url_json, ttl, update_cache


class ApiHelper:
Expand Down Expand Up @@ -57,16 +56,10 @@ def get_tvshows(self, category=None, channel=None, feature=None):
if not category and not channel and not feature:
params['facets[transcodingStatus]'] = 'AVAILABLE' # Required for getting results in Suggests API
cache_file = 'programs.json'
tvshows = get_cache(cache_file, ttl=ttl('indirect')) # Try the cache if it is fresh
if not tvshows:
from json import loads
querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring
log(2, 'URL get: {url}', url=unquote(suggest_url))
tvshows = loads(to_unicode(urlopen(suggest_url).read()))
update_cache(cache_file, tvshows)

return tvshows
querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
suggest_url = self._VRTNU_SUGGEST_URL + '?' + querystring
return get_cached_url_json(url=suggest_url, cache=cache_file, ttl=ttl('indirect'))

def list_tvshows(self, category=None, channel=None, feature=None, use_favorites=False):
''' List all TV shows for a given category, channel or feature, optionally filtered by favorites '''
Expand Down Expand Up @@ -413,8 +406,7 @@ def get_episode_by_air_date(self, channel_name, start_date, end_date=None):
schedule_date = onairdate
schedule_datestr = schedule_date.isoformat().split('T')[0]
url = 'https://www.vrt.be/bin/epg/schedule.%s.json' % schedule_datestr
from json import loads
schedule_json = loads(to_unicode(urlopen(url).read()))
schedule_json = get_url_json(url)
episodes = schedule_json.get(channel.get('id'), [])
if not episodes:
return None
Expand Down Expand Up @@ -569,35 +561,10 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
# Construct VRT NU Search API Url and get api data
querystring = '&'.join('{}={}'.format(key, value) for key, value in list(params.items()))
search_url = self._VRTNU_SEARCH_URL + '?' + querystring.replace(' ', '%20') # Only encode spaces to minimize url length

from json import loads
if cache_file:
# Get api data from cache if it is fresh
search_json = get_cache(cache_file, ttl=ttl('indirect'))
if not search_json:
log(2, 'URL get: {url}', url=unquote(search_url))
req = Request(search_url)
try:
search_json = loads(to_unicode(urlopen(req).read()))
except (TypeError, ValueError): # No JSON object could be decoded
return []
except HTTPError as exc:
url_length = len(req.get_selector())
if exc.code == 413 and url_length > 8192:
ok_dialog(heading='HTTP Error 413', message=localize(30967))
log_error('HTTP Error 413: Exceeded maximum url length: '
'VRT Search API url has a length of {length} characters.', length=url_length)
return []
if exc.code == 400 and 7600 <= url_length <= 8192:
ok_dialog(heading='HTTP Error 400', message=localize(30967))
log_error('HTTP Error 400: Probably exceeded maximum url length: '
'VRT Search API url has a length of {length} characters.', length=url_length)
return []
raise
update_cache(cache_file, search_json)
search_json = get_cached_url_json(url=search_url, cache=cache_file, ttl=ttl('indirect'))
else:
log(2, 'URL get: {url}', url=unquote(search_url))
search_json = loads(to_unicode(urlopen(search_url).read()))
search_json = get_url_json(url=search_url)

# Check for multiple seasons
seasons = None
Expand All @@ -619,8 +586,9 @@ def get_episodes(self, program=None, season=None, episodes=None, category=None,
if all_items and total_results > api_page_size:
for api_page in range(1, api_pages):
api_page_url = search_url + '&from=' + str(api_page * api_page_size + 1)
api_page_json = loads(to_unicode(urlopen(api_page_url).read()))
episodes += api_page_json.get('results', [{}])
api_page_json = get_url_json(api_page_url)
if api_page_json:
episodes += api_page_json.get('results', [{}])

# Return episodes
return episodes
Expand Down
18 changes: 5 additions & 13 deletions resources/lib/favorites.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
except ImportError: # Python 2
from urllib2 import build_opener, install_opener, ProxyHandler, Request, unquote, urlopen

from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, has_credentials,
input_down, invalidate_caches, localize, log, log_error, multiselect,
notification, ok_dialog, to_unicode, update_cache)
from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down,
localize, log, log_error, multiselect, notification, ok_dialog)
from utils import get_cache, get_url_json, invalidate_caches, update_cache


class Favorites:
Expand Down Expand Up @@ -43,16 +43,8 @@ def refresh(self, ttl=None):
'content-type': 'application/json',
'Referer': 'https://www.vrt.be/vrtnu',
}
req = Request('https://video-user-data.vrt.be/favorites', headers=headers)
log(2, 'URL get: https://video-user-data.vrt.be/favorites')
from json import loads
try:
favorites_json = loads(to_unicode(urlopen(req).read()))
except (TypeError, ValueError): # No JSON object could be decoded
# Force favorites from cache
favorites_json = get_cache('favorites.json', ttl=None)
else:
update_cache('favorites.json', favorites_json)
favorites_url = 'https://video-user-data.vrt.be/favorites'
favorites_json = get_url_json(url=favorites_url, cache='favorites.json', headers=headers)
if favorites_json:
self._favorites = favorites_json

Expand Down
111 changes: 0 additions & 111 deletions resources/lib/kodiutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -666,117 +666,6 @@ def delete_cached_thumbnail(url):
return True


def human_delta(seconds):
''' Return a human-readable representation of the TTL '''
from math import floor
days = int(floor(seconds / (24 * 60 * 60)))
seconds = seconds % (24 * 60 * 60)
hours = int(floor(seconds / (60 * 60)))
seconds = seconds % (60 * 60)
if days:
return '%d day%s and %d hour%s' % (days, 's' if days != 1 else '', hours, 's' if hours != 1 else '')
minutes = int(floor(seconds / 60))
seconds = seconds % 60
if hours:
return '%d hour%s and %d minute%s' % (hours, 's' if hours != 1 else '', minutes, 's' if minutes != 1 else '')
if minutes:
return '%d minute%s and %d second%s' % (minutes, 's' if minutes != 1 else '', seconds, 's' if seconds != 1 else '')
return '%d second%s' % (seconds, 's' if seconds != 1 else '')


def get_cache(path, ttl=None): # pylint: disable=redefined-outer-name
''' Get the content from cache, if it's still fresh '''
if get_setting('usehttpcaching', 'true') == 'false':
return None

fullpath = get_cache_path() + path
if not exists(fullpath):
return None

from time import localtime, mktime
mtime = stat_file(fullpath).st_mtime()
now = mktime(localtime())
if ttl and now >= mtime + ttl:
return None

if ttl is None:
log(3, "Cache '{path}' is forced from cache.", path=path)
else:
log(3, "Cache '{path}' is fresh, expires in {time}.", path=path, time=human_delta(mtime + ttl - now))
with open_file(fullpath, 'r') as fdesc:
cache_data = to_unicode(fdesc.read())
if not cache_data:
return None

from json import loads
try:
return loads(cache_data)
except (TypeError, ValueError): # No JSON object could be decoded
return None


def update_cache(path, data):
''' Update the cache, if necessary '''
if get_setting('usehttpcaching', 'true') == 'false':
return

from hashlib import md5
from json import dump, dumps
fullpath = get_cache_path() + path
if exists(fullpath):
with open_file(fullpath) as fdesc:
cachefile = fdesc.read().encode('utf-8')
md5_cache = md5(cachefile)
else:
md5_cache = 0
# Create cache directory if missing
if not exists(get_cache_path()):
mkdirs(get_cache_path())

# Avoid writes if possible (i.e. SD cards)
if md5_cache != md5(dumps(data).encode('utf-8')):
log(3, "Write cache '{path}'.", path=path)
with open_file(fullpath, 'w') as fdesc:
# dump(data, fdesc, encoding='utf-8')
dump(data, fdesc)
else:
# Update timestamp
from os import utime
log(3, "Cache '{path}' has not changed, updating mtime only.", path=path)
utime(path)


def ttl(kind='direct'):
''' Return the HTTP cache ttl in seconds based on kind of relation '''
if kind == 'direct':
return int(get_setting('httpcachettldirect', 5)) * 60
if kind == 'indirect':
return int(get_setting('httpcachettlindirect', 60)) * 60
return 5 * 60


def refresh_caches(cache_file=None):
''' Invalidate the needed caches and refresh container '''
files = ['favorites.json', 'oneoff.json', 'resume_points.json']
if cache_file and cache_file not in files:
files.append(cache_file)
invalidate_caches(*files)
container_refresh()
notification(message=localize(30981))


def invalidate_caches(*caches):
''' Invalidate multiple cache files '''
import fnmatch
_, files = listdir(get_cache_path())
# Invalidate caches related to menu list refreshes
removes = set()
for expr in caches:
removes.update(fnmatch.filter(files, expr))
for filename in removes:
delete(get_cache_path() + filename)


def input_down():
''' Move the cursor down '''
jsonrpc(method='Input.Down')
Expand Down
4 changes: 2 additions & 2 deletions resources/lib/playerinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,8 @@ def push_position(self, position=0, total=100):

# Do not reload container and rely on Kodi internal watch status when watching a single episode that is partly watched.
# Kodi internal watch status is only updated when the play action is initiated from the GUI, so this only works for single episodes.
if (not self.path.startswith('plugin://plugin.video.vrt.nu/play/upnext') and
ignoresecondsatstart < position < (100 - ignorepercentatend) / 100 * total):
if (not self.path.startswith('plugin://plugin.video.vrt.nu/play/upnext')
and ignoresecondsatstart < position < (100 - ignorepercentatend) / 100 * total):
return

# Do not reload container when playing or not stopped
Expand Down
18 changes: 5 additions & 13 deletions resources/lib/resumepoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
from urllib2 import build_opener, install_opener, ProxyHandler, Request, HTTPError, urlopen

from data import SECONDS_MARGIN
from kodiutils import (container_refresh, get_cache, get_proxies, get_setting, has_credentials,
input_down, invalidate_caches, localize, log, log_error, notification,
to_unicode, update_cache)
from kodiutils import (container_refresh, get_proxies, get_setting, has_credentials, input_down,
localize, log, log_error, notification)
from utils import get_cache, get_url_json, invalidate_caches, update_cache


class ResumePoints:
Expand Down Expand Up @@ -44,16 +44,8 @@ def refresh(self, ttl=None):
'content-type': 'application/json',
'Referer': 'https://www.vrt.be/vrtnu',
}
req = Request('https://video-user-data.vrt.be/resume_points', headers=headers)
log(2, 'URL get: https://video-user-data.vrt.be/resume_points')
from json import loads
try:
resumepoints_json = loads(to_unicode(urlopen(req).read()))
except (TypeError, ValueError): # No JSON object could be decoded
# Force resumepoints from cache
resumepoints_json = get_cache('resume_points.json', ttl=None)
else:
update_cache('resume_points.json', resumepoints_json)
resumepoints_url = 'https://video-user-data.vrt.be/resume_points'
resumepoints_json = get_url_json(url=resumepoints_url, cache='resume_points.json', headers=headers)
if resumepoints_json:
self._resumepoints = resumepoints_json

Expand Down
7 changes: 5 additions & 2 deletions resources/lib/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
from favorites import Favorites
from resumepoints import ResumePoints
from kodiutils import (addon_profile, container_refresh, end_of_directory, get_search_string,
get_setting, localize, ok_dialog, open_file, show_listing, ttl, url_for)
get_setting, localize, log_error, ok_dialog, open_file, show_listing, url_for)
from utils import ttl


class Search:
Expand All @@ -25,7 +26,9 @@ def read_history(self):
with open_file(self._search_history, 'r') as fdesc:
try:
history = load(fdesc)
except (TypeError, ValueError): # No JSON object could be decoded
except (TypeError, ValueError) as exc: # No JSON object could be decoded
fdesc.seek(0, 0)
log_error('{exc}\nDATA: {data}', exc=exc, data=fdesc.read())
history = []
return history

Expand Down
3 changes: 2 additions & 1 deletion resources/lib/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@
from xbmc import Monitor
from apihelper import ApiHelper
from favorites import Favorites
from kodiutils import container_refresh, invalidate_caches, log
from kodiutils import container_refresh, log
from playerinfo import PlayerInfo
from resumepoints import ResumePoints
from statichelper import to_unicode
from tokenresolver import TokenResolver
from utils import invalidate_caches


class VrtMonitor(Monitor):
Expand Down
31 changes: 16 additions & 15 deletions resources/lib/streamservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
get_proxies, get_setting, has_inputstream_adaptive, kodi_version,
localize, log, log_error, mkdir, ok_dialog, open_settings, supports_drm)
from statichelper import to_unicode
from utils import get_url_json


class StreamService:
Expand All @@ -40,9 +41,9 @@ def __init__(self, _tokenresolver):

def _get_vualto_license_url(self):
''' Get Widevine license URL from Vualto API '''
from json import loads
log(2, 'URL get: {url}', url=unquote(self._VUPLAY_API_URL))
self._vualto_license_url = loads(to_unicode(urlopen(self._VUPLAY_API_URL).read())).get('drm_providers', dict()).get('widevine', dict()).get('la_url')
json_data = get_url_json(url=self._VUPLAY_API_URL)
if json_data:
self._vualto_license_url = json_data.get('drm_providers', dict()).get('widevine', dict()).get('la_url')

@staticmethod
def _create_settings_dir():
Expand Down Expand Up @@ -154,18 +155,18 @@ def _get_stream_json(self, api_data, roaming=False):
playertoken = self._tokenresolver.get_playertoken(token_url, token_variant='ondemand', roaming=roaming)

# Construct api_url and get video json
stream_json = None
if playertoken:
from json import loads
api_url = api_data.media_api_url + '/videos/' + api_data.publication_id + \
api_data.video_id + '?vrtPlayerToken=' + playertoken + '&client=' + api_data.client
log(2, 'URL get: {url}', url=unquote(api_url))
try:
stream_json = loads(to_unicode(urlopen(api_url).read()))
except HTTPError as exc:
stream_json = loads(to_unicode(exc.read()))

return stream_json
if not playertoken:
return None
api_url = api_data.media_api_url + '/videos/' + api_data.publication_id + \
api_data.video_id + '?vrtPlayerToken=' + playertoken + '&client=' + api_data.client
try:
json_data = get_url_json(url=api_url)
except HTTPError as exc:
from json import load
return load(exc)
if not json_data:
return None
return json_data

@staticmethod
def _fix_virtualsubclip(manifest_url, duration):
Expand Down
Loading

0 comments on commit c8e71ed

Please sign in to comment.