Skip to content
This repository has been archived by the owner on Oct 1, 2020. It is now read-only.

Commit

Permalink
Merge pull request #81 from edx/yro/yt_callback_bugfix
Browse files Browse the repository at this point in the history
Yro/yt callback bugfix
  • Loading branch information
Qubad786 authored Jan 17, 2018
2 parents d6c03c4 + 728ffc8 commit b71d6d3
Show file tree
Hide file tree
Showing 4 changed files with 70 additions and 54 deletions.
34 changes: 17 additions & 17 deletions control/veda_encode.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from control_env import *
from dependencies.shotgun_api3 import Shotgun
from dependencies.shotgun_api3.lib.xmlrpclib import ProtocolError
from VEDA.utils import get_config

"""
Expand Down Expand Up @@ -101,26 +102,31 @@ def query_urls(self):
continue

def check_review_approved(self):
if self.sg_script_key is None:
return True

"""
** Mediateam only **
Check in with SG to see if this video
is authorized to go to final publishing
"""
# TODO: Move to independent API Method
if self.sg_script_key is None:
return True

video_object = Video.objects.filter(
edx_id=self.veda_id
).latest()

if video_object.inst_class.sg_projID is None:
return False

sg = Shotgun(
self.sg_server_path,
self.sg_script_name,
self.sg_script_key
)
try:
sg = Shotgun(
self.sg_server_path,
self.sg_script_name,
self.sg_script_key
)
except ProtocolError:
# Periodic API Error
return False

fields = ['project', 'entity', 'sg_status_list']
filters = [
Expand All @@ -130,18 +136,12 @@ def check_review_approved(self):
"id": video_object.inst_class.sg_projID
}],
]

# TODO: Improve API query
tasks = sg.find("Task", filters, fields)
for t in tasks:
if t['entity']['name'] == self.veda_id.split('-')[-1]:
if t['sg_status_list'] != 'wtg':
return True

return False


def main():
pass


if __name__ == '__main__':
sys.exit(main())
15 changes: 12 additions & 3 deletions control/veda_val.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,11 @@ def profile_determiner(self, val_api_return):
Determine VAL profile data, from return/encode submix
"""
# Defend against old/deprecated encodes
try:
self.auth_dict['val_profile_dict'][self.encode_profile]
except KeyError:
return
if self.endpoint_url is not None:
for p in self.auth_dict['val_profile_dict'][self.encode_profile]:

Expand All @@ -264,6 +269,10 @@ def profile_determiner(self, val_api_return):
if final.encode_profile.product_spec == 'review':
pass
else:
try:
self.auth_dict['val_profile_dict'][final.encode_profile.product_spec]
except KeyError:
return
for p in self.auth_dict['val_profile_dict'][final.encode_profile.product_spec]:
test_list.append(dict(
url=str(final.encode_url),
Expand All @@ -277,19 +286,19 @@ def profile_determiner(self, val_api_return):
self.encode_data.append(t)

if len(val_api_return) == 0:
return None
return

"""
All URL Records Deleted (for some reason)
"""
if len(self.encode_data) == 0:
return None
return

for i in val_api_return['encoded_videos']:
if i['profile'] not in [g['profile'] for g in self.encode_data]:
self.encode_data.append(i)

return None
return

def send_404(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion static_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -63,4 +63,4 @@ val_profile_dict:
heal_start: 1
heal_end: 144

global_timeout: 40
global_timeout: 60
73 changes: 40 additions & 33 deletions youtube_callback/sftp_id_retrieve.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import xml.etree.ElementTree as ET
from datetime import timedelta
from os.path import expanduser
from paramiko.ssh_exception import AuthenticationException
from paramiko.ssh_exception import AuthenticationException, SSHException

import django
import pysftp
Expand Down Expand Up @@ -91,6 +91,8 @@ def xml_downloader(course):
crawl_sftp(d=d, s1=s1)
except AuthenticationException:
LOGGER.info("{inst}{clss} : Authentication Failed".format(inst=course.institution, clss=course.edx_classid))
except SSHException:
LOGGER.info("{inst}{clss} : Authentication Failed".format(inst=course.institution, clss=course.edx_classid))


def crawl_sftp(d, s1):
Expand All @@ -103,43 +105,47 @@ def crawl_sftp(d, s1):
"""
dirtime = datetime.datetime.fromtimestamp(d.st_mtime)
if dirtime < datetime.datetime.now() - timedelta(days=YOUTUBE_LOOKBACK_DAYS):
return None
return
if d.filename == "files_to_be_removed.txt":
return None
return
if d.filename == 'FAILED':
return None
return
try:
s1.cwd(d.filename)
except:
return None

for f in s1.listdir_attr():
filetime = datetime.datetime.fromtimestamp(f.st_mtime)
if not filetime > datetime.datetime.now() - timedelta(days=YOUTUBE_LOOKBACK_DAYS):
continue
if fnmatch.fnmatch(f.filename, '*.xml') or fnmatch.fnmatch(f.filename, '*.csv'):
# Determine If there are extant downloaded status files for this same ID,
# If yes, increment filename
x = 0
while True:
"""
Just in case something runs out
"""
if x > 20:
break
file_to_find = f.filename.split('.')[0] + \
str(x) + \
'.' + \
f.filename.split('.')[1]
if os.path.exists(os.path.join(workdir, file_to_find)):
x += 1
else:
break
print "%s : %s" % (f.filename, file_to_find)
s1.get(
f.filename,
os.path.join(workdir, file_to_find)
)
return
try:
for f in s1.listdir_attr():
filetime = datetime.datetime.fromtimestamp(f.st_mtime)
if not filetime > datetime.datetime.now() - timedelta(days=YOUTUBE_LOOKBACK_DAYS):
continue
if fnmatch.fnmatch(f.filename, '*.xml') or fnmatch.fnmatch(f.filename, '*.csv'):
# Determine If there are extant downloaded status files for this same ID,
# If yes, increment filename
x = 0
while True:
"""
Just in case something runs out
"""
if x > 20:
break
file_to_find = f.filename.split('.')[0] + \
str(x) + \
'.' + \
f.filename.split('.')[1]
if os.path.exists(os.path.join(workdir, file_to_find)):
x += 1
else:
break
print "%s : %s" % (f.filename, file_to_find)
s1.get(
f.filename,
os.path.join(workdir, file_to_find)
)
except IOError:
return
except SSHException:
return
s1.cwd('..')


Expand Down Expand Up @@ -218,6 +224,7 @@ def urlpatch(upload_data):
test_id = Video.objects.filter(edx_id=upload_data['edx_id']).latest()
except:
upload_data['status'] = 'Failure'
return

if upload_data['status'] == 'Success':
url_query = URL.objects.filter(
Expand Down

0 comments on commit b71d6d3

Please sign in to comment.