Skip to content

Commit

Permalink
Merge branch 'master' into patch-14
Browse files Browse the repository at this point in the history
  • Loading branch information
johnnyapol authored Sep 10, 2024
2 parents 47cc61d + a7f92fc commit 2a9f9ee
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 31 deletions.
13 changes: 13 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
name: Github Action with a cronjob trigger
on:
schedule:
- cron: "0 0 * * *"

jobs:
cronjob-based-github-action:
name: Cronjob based github action
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# - steps
- uses: entepotenz/keep-github-actions-alive-min-dependencies@v1
57 changes: 29 additions & 28 deletions .github/workflows/scrape.yml
Original file line number Diff line number Diff line change
Expand Up @@ -214,38 +214,39 @@ jobs:
name: faculty
path: scrapers/faculty.json

scrape-transfer:
name: Scrape transfer
runs-on: ubuntu-latest
steps:
- name: Checkout branch
uses: actions/checkout@v3
# This is broken, commenting out so the job does not continue to fail
# scrape-transfer:
# name: Scrape transfer
# runs-on: ubuntu-latest
# steps:
# - name: Checkout branch
# uses: actions/checkout@v3

- name: Set up python
uses: actions/setup-python@v4
with:
python-version: '3.11'
# - name: Set up python
# uses: actions/setup-python@v4
# with:
# python-version: '3.11'

- name: Install pip requirements
run: |
python -m pip install --upgrade pip
pip install -r scrapers/requirements.txt
# - name: Install pip requirements
# run: |
# python -m pip install --upgrade pip
# pip install -r scrapers/requirements.txt

- name: Scrape transfer
working-directory: ./scrapers
run: python3 transfer_scraper/main.py csv
# - name: Scrape transfer
# working-directory: ./scrapers
# run: python3 transfer_scraper/main.py csv

- name: Upload JSON data
uses: actions/upload-artifact@v3
with:
name: transfer
path: scrapers/transfer.json
# - name: Upload JSON data
# uses: actions/upload-artifact@v3
# with:
# name: transfer
# path: scrapers/transfer.json

- name: Upload CSV data
uses: actions/upload-artifact@v3
with:
name: transfer_guides
path: scrapers/transfer_guides
# - name: Upload CSV data
# uses: actions/upload-artifact@v3
# with:
# name: transfer_guides
# path: scrapers/transfer_guides

# This may need to be updated now that the scrapers have been moved into the same repo as the site
scrape-catalog:
Expand Down Expand Up @@ -312,7 +313,7 @@ jobs:
- scrape-prereq-graph
- scrape-catalog
- scrape-csci-topics
- scrape-transfer
# - scrape-transfer
if: |
always()
&& contains(needs.scrape-courses-and-prerequisites.result,'success')
Expand Down
11 changes: 9 additions & 2 deletions scrapers/prerequisites_graph/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,15 @@ def generate(semester_data_path: str):
)
)

with open(f"{sem_dirs[-1]}/catalog.json", "r") as f:
most_recent_catalog = json.load(f)
most_recent_catalog = {}
for sem_dir in reversed(sem_dirs):
try:
with open(f"{sem_dir}/catalog.json", "r") as f:
print(f"Trying to load catalog for {sem_dir}...")
most_recent_catalog = json.load(f)
break
except FileNotFoundError:
continue
for sem_dir in sem_dirs:
sem_add_courses(sem_dir, adj_list, most_recent_catalog)

Expand Down
2 changes: 1 addition & 1 deletion site/src/quacs-rs/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ impl<'a, const N: usize> Context<'a, N> {

let course_name = self.data.crn_courses.get(&crn).unwrap();

if self.selected_courses.get(course_name).is_some() {
if self.selected_courses.contains_key(course_name) {
console_log!("Checking if crn {} conflicts with global schedule", crn);

// Assuming we didn't have other sections from this course selected, do we conflict?
Expand Down

0 comments on commit 2a9f9ee

Please sign in to comment.