Skip to content

Commit

Permalink
fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
priyakasimbeg committed Jul 29, 2024
1 parent 898ee8b commit 7d01efe
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 5 deletions.
3 changes: 3 additions & 0 deletions scoring/performance_profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,9 @@ def get_workloads_time_to_target(submission,
else:
time_val = float('inf')
time_vals_per_study.append(time_val)
num_s = len(time_vals_per_study)
print(f'TIME VALS PER STUDY: {num_s}')
print(time_vals_per_study)

workloads.append({
'submission': submission_name,
Expand Down
35 changes: 30 additions & 5 deletions scoring/score_submissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
'Path to submission directory containing experiment directories.')
flags.DEFINE_string('output_dir',
'scoring_results',
'Path to save performance profile table and plot.')
'Path to save performance profile artifacts, submission_summaries and results files.')
flags.DEFINE_boolean('compute_performance_profiles',
False,
'Whether or not to compute the performance profiles.')
Expand All @@ -51,11 +51,16 @@
None,
'Filename to save the processed results that are fed into the performance profile functions.'
)
flags.DEFINE_boolean(
flags.DEFINE_string(
'load_results_from_filename',
None,
'Filename to load processed results from that are fed into performance profile functions'
)
flags.DEFINE_string(
'exclude_submissions',
'',
'Optional comma seperated list of names of submissions to exclude from scoring.'
)
FLAGS = flags.FLAGS


Expand Down Expand Up @@ -128,6 +133,21 @@ def get_submission_summary(df, include_test_split=True):
logging.info('\n' + tabulate(df, headers='keys', tablefmt='psql'))
return df

def compute_leaderboard_score(df, normalize=False):
"""Compute leaderboard score by taking integral of performance profile.
Args:
df: pd.DataFrame returned from `compute_performance_profiles`.
normalize: divide by the range of the performance profile's tau.
Returns:
pd.DataFrame with one column of scores indexed by submission.
"""
scores = np.trapz(df, x=df.columns)
if normalize:
scores /= df.columns.max() - df.columns.min()
return pd.DataFrame(scores, columns=['score'], index=df.index)


def main(_):
results = {}
Expand All @@ -144,6 +164,8 @@ def main(_):
for submission in os.listdir(
os.path.join(FLAGS.submission_directory, team)):
print(submission)
if submission in FLAGS.exclude_submissions.split(','):
continue
experiment_path = os.path.join(FLAGS.submission_directory,
team,
submission)
Expand Down Expand Up @@ -185,10 +207,13 @@ def main(_):
os.mkdir(FLAGS.output_dir)
performance_profile.plot_performance_profiles(
performance_profile_df, 'score', save_dir=FLAGS.output_dir)
perf_df = tabulate(
performance_profile_str = tabulate(
performance_profile_df.T, headers='keys', tablefmt='psql')
logging.info(f'Performance profile:\n {perf_df}')

logging.info(f'Performance profile:\n {performance_profile_str}')
scores = compute_leaderboard_score(performance_profile_df)
scores.to_csv(os.path.join(FLAGS.output_dir, 'scores.csv'))
scores_str = tabulate(scores, headers='keys', tablefmt='psql')
logging.info(f'Scores: \n {scores_str}')

if __name__ == '__main__':
# flags.mark_flag_as_required('submission_directory')
Expand Down

0 comments on commit 7d01efe

Please sign in to comment.