-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add GitHub workflow: automatic compile master-branch docs & add plugins
- Loading branch information
Showing
14 changed files
with
1,154 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
name: Daily Plugin Processing | ||
|
||
on: | ||
schedule: | ||
- cron: '0 15 * * 2,5' # Run every Tuesday and Friday at 15:00 UTC | ||
push: | ||
branches: | ||
- main | ||
pull_request: | ||
branches: | ||
- main | ||
create: | ||
tags: | ||
- '*' | ||
workflow_dispatch: | ||
|
||
jobs: | ||
build: | ||
runs-on: ubuntu-latest | ||
|
||
steps: | ||
- name: Checkout repository | ||
uses: actions/checkout@v2 | ||
|
||
- name: Clean repository | ||
run: | | ||
find . -maxdepth 1 -mindepth 1 ! -name '_parse_plugins' ! -name '.*' -exec rm -rf {} + | ||
- name: Set up Python | ||
uses: actions/setup-python@v2 | ||
with: | ||
python-version: '3.12' | ||
|
||
- name: Install dependencies | ||
run: | | ||
python -m pip install --upgrade pip | ||
pip install requests jinja2 | ||
- name: Install pandoc | ||
run: | | ||
sudo apt-get update | ||
sudo apt-get install -y pandoc | ||
- name: Clone and Install Pyxu repository | ||
run: | | ||
git clone https://github.com/pyxu-org/pyxu.git | ||
python -m pip install pyxu/.[dev] | ||
- name: Clean pyxu/doc/fair directory | ||
run: | | ||
rm -rf pyxu/doc/fair/*.rst | ||
rm -rf pyxu/doc/fair/plugins/*.rst | ||
- name: Run parse_pypi.py | ||
run: python _parse_plugins/parse_pypi.py | ||
|
||
- name: Run make_pages.py | ||
run: python _parse_plugins/make_pages.py | ||
|
||
- name: Build HTML documentation | ||
run: | | ||
export TZ=UTC | ||
sphinx-build -b html -j auto -w build/html/WARNINGS.log pyxu/doc/ ./ | ||
- name: Remove Pyxu repository | ||
run: rm -rf pyxu | ||
|
||
- name: Commit changes | ||
run: | | ||
git config --global user.name 'github-actions[bot]' | ||
git config --global user.email 'github-actions[bot]@users.noreply.github.com' | ||
git add -A | ||
git commit -m "Update generated HTML files [skip ci]" | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Push changes | ||
run: | | ||
git push "https://${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.git" HEAD:main | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,119 @@ | ||
import os | ||
from collections import defaultdict | ||
from jinja2 import Environment, FileSystemLoader | ||
import sqlite3 | ||
import json | ||
|
||
DATABASE_FILE = '_parse_plugins/plugins.db' | ||
TEMPLATES_DIR = "_parse_plugins/templates" | ||
OUTPUT_DIR = "pyxu/doc/fair/plugins" | ||
RST_DIR = "rst" | ||
|
||
entrypoint_metainfo = { | ||
"pyxu.operator": {"shortname": "Operator", "colorclass": "blue"}, | ||
"pyxu.opt.solver": {"shortname": "Solver", "colorclass": "brown"}, | ||
"pyxu.opt.stop": {"shortname": "Stop", "colorclass": "purple"}, | ||
"pyxu.math": {"shortname": "Math", "colorclass": "green"}, | ||
"pyxu.contrib": {"shortname": "Contrib", "colorclass": "orange"}, | ||
} | ||
|
||
status_dict = { | ||
"1": ["Planning: Not yet ready to use. Developers welcome!", "status-planning-d9644d.svg"], | ||
"2": ["Pre-alpha: Not yet ready to use. Developers welcome!", "status-planning-d9644d.svg"], | ||
"3": ["Alpha: Adds new functionality, not yet ready for production. Testing welcome!", "status-alpha-d6af23.svg"], | ||
"4": ["Beta: Adds new functionality, not yet ready for production. Testing welcome!", "status-beta-d6af23.svg"], | ||
"5": ["Production/Stable: Ready for production calculations. Bug reports welcome!", "status-stable-4cc61e.svg"], | ||
"6": ["Mature: Ready for production calculations. Bug reports welcome!", "status-stable-4cc61e.svg"], | ||
"7": ["Inactive: No longer maintained.", "status-inactive-bbbbbb.svg"], | ||
} | ||
|
||
entrypoints_count = defaultdict(list) | ||
|
||
def get_summary_info(entry_points): | ||
summary_info = [] | ||
ep = json.loads(entry_points) | ||
|
||
for entrypoint_name, meta in entrypoint_metainfo.items(): | ||
num = len(ep.get(entrypoint_name, {})) | ||
if num > 0: | ||
summary_info.append({"colorclass": meta["colorclass"], "text": meta["shortname"], "count": num}) | ||
entrypoints_count[entrypoint_name].append(num) | ||
|
||
return summary_info | ||
|
||
def render_plugin_pages(plugins, env): | ||
if os.path.exists(RST_DIR): | ||
for f in os.listdir(RST_DIR): | ||
if f.endswith(".rst"): | ||
os.remove(os.path.join(RST_DIR, f)) | ||
else: | ||
os.mkdir(RST_DIR) | ||
|
||
for plugin in plugins: | ||
summary_info = get_summary_info(plugin["entrypoints"]) | ||
dev_status = status_dict[plugin["development_status"]] | ||
entry_points = json.loads(plugin["entrypoints"]) | ||
rst_plugin_template = env.get_template("plugin.rst") | ||
rst_content = rst_plugin_template.render(plugin=plugin, summary_info=summary_info, dev_status=dev_status, entrypointtypes=entrypoint_metainfo, entry_points=entry_points) | ||
|
||
with open(os.path.join(OUTPUT_DIR, f'{plugin["name"]}.rst'), 'w') as f: | ||
f.write(rst_content) | ||
|
||
def render_catalogue_page(plugins, plugins_info, env): | ||
rst_catalogue_template = env.get_template("catalogue.rst") | ||
rst_content = rst_catalogue_template.render( | ||
plugins=plugins, | ||
summary_info=plugins_info["summary_info"], | ||
dev_status=plugins_info["dev_status"], | ||
dev_status_count=plugins_info["dev_status_count"], | ||
summary_info_count=plugins_info["summary_info_count"].values(), | ||
) | ||
|
||
with open(os.path.join(OUTPUT_DIR, 'index.rst'), 'w') as f: | ||
f.write(rst_content) | ||
|
||
def main(): | ||
env = Environment(loader=FileSystemLoader(TEMPLATES_DIR)) | ||
|
||
conn = sqlite3.connect(DATABASE_FILE) | ||
c = conn.cursor() | ||
c.execute("SELECT name, pyxu_version, version, author, author_email, home_page, short_description, license, development_status, entrypoints, score FROM plugins ORDER BY name COLLATE NOCASE ASC") | ||
|
||
plugins = [{ | ||
'name': row[0], | ||
'pyxu_version': row[1], | ||
'version': row[2], | ||
'author': row[3], | ||
'author_email': row[4], | ||
'home_page': row[5], | ||
'short_description': row[6], | ||
'license': row[7], | ||
'development_status': row[8], | ||
'entrypoints': row[9], | ||
'score': row[10], | ||
} for row in c.fetchall()] | ||
conn.close() | ||
|
||
plugins_info = { | ||
"summary_info": {}, | ||
"dev_status": {}, | ||
"summary_info_count": {epm["shortname"]: {"colorclass": epm["colorclass"], "num_entries": 0, "name": epm["shortname"], "total_num": 0} for epm in entrypoint_metainfo.values()}, | ||
"dev_status_count": {k: {"badge": v[1], "num_entries": 0} for k, v in status_dict.items()} | ||
} | ||
|
||
for plugin in plugins: | ||
summary_info = get_summary_info(plugin["entrypoints"]) | ||
dev_status = status_dict[plugin["development_status"]] | ||
plugins_info["summary_info"].update({plugin["name"]: summary_info}) | ||
plugins_info["dev_status"].update({plugin["name"]: dev_status}) | ||
|
||
for entry in summary_info: | ||
plugins_info["summary_info_count"][entry["text"]]["num_entries"] += 1 | ||
plugins_info["summary_info_count"][entry["text"]]["total_num"] += entry["count"] | ||
plugins_info["dev_status_count"][plugin["development_status"]]["num_entries"] += 1 | ||
|
||
render_plugin_pages(plugins, env) | ||
render_catalogue_page(plugins, plugins_info, env) | ||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,152 @@ | ||
import sqlite3 | ||
import re | ||
import warnings | ||
import requests | ||
from requests.utils import requote_uri | ||
import configparser | ||
import tempfile | ||
import zipfile | ||
import json | ||
from pathlib import Path | ||
from typing import Dict, Tuple, Any | ||
|
||
DATABASE_FILE = "_parse_plugins/plugins.db" | ||
TROVE_CLASSIFIER = "Framework :: Pycsou" | ||
|
||
|
||
def query_pypi() -> Dict[str, str]: | ||
""" | ||
Query PyPI to get all plugins matching the specified classifier. | ||
:return: Dictionary with plugin names and their latest versions. | ||
""" | ||
packages = {} | ||
name_pattern = re.compile('class="package-snippet__name">(.+?)</span>') | ||
version_pattern = re.compile('class="package-snippet__version">(.+?)</span>') | ||
url = requote_uri(f"https://pypi.org/search/?q=&o=-created&c={TROVE_CLASSIFIER}") | ||
|
||
response = requests.get(url) | ||
response.raise_for_status() | ||
|
||
html = response.text | ||
names = name_pattern.findall(html) | ||
versions = version_pattern.findall(html) | ||
|
||
if len(names) != len(versions): | ||
return {} | ||
|
||
packages = dict(zip(names, versions)) | ||
return packages | ||
|
||
|
||
class CaseSensitiveConfigParser(configparser.ConfigParser): | ||
"""Case-sensitive config parser.""" | ||
optionxform = staticmethod(str) | ||
|
||
|
||
def parse_entrypoints(plugin_data: Dict[str, Any]) -> Tuple[str, Dict[str, str]]: | ||
build_types = {data.get("packagetype"): data.get("url") for data in plugin_data.get("urls") if data.get("packagetype")} | ||
|
||
if "bdist_wheel" not in build_types: | ||
warnings.warn("No bdist_wheel available for PyPI release") | ||
return "{}", {} | ||
|
||
wheel_url = build_types.get("bdist_wheel") | ||
if not wheel_url: | ||
return "{}", {} | ||
|
||
try: | ||
with requests.get(wheel_url, stream=True, timeout=120) as download: | ||
download.raise_for_status() | ||
with tempfile.TemporaryDirectory() as tmpdirname: | ||
wheel_path = Path(tmpdirname) / "wheel.whl" | ||
with wheel_path.open("wb") as handle: | ||
for chunk in download.iter_content(chunk_size=8192): | ||
handle.write(chunk) | ||
with zipfile.ZipFile(wheel_path) as whl: | ||
entry_points_content = whl.read(next(name for name in whl.namelist() if name.endswith(".dist-info/entry_points.txt"))).decode("utf-8") | ||
metadata_content = whl.read(next(name for name in whl.namelist() if name.endswith(".dist-info/METADATA"))).decode("utf-8") | ||
|
||
# Parse entry points | ||
parser = CaseSensitiveConfigParser() | ||
parser.read_string(entry_points_content) | ||
entry_points = {section: dict(parser.items(section)) for section in parser.sections()} | ||
|
||
# Parse metadata | ||
metadata = parse_metadata(metadata_content) | ||
|
||
except Exception as err: | ||
warnings.warn(f"Unable to read wheel file from PyPI release of package {plugin_data['info']['name']}: {err}") | ||
return "{}", {} | ||
|
||
return json.dumps(entry_points), metadata | ||
|
||
|
||
def parse_metadata(metadata_content): | ||
metadata = {} | ||
for line in metadata_content.splitlines(): | ||
if line.startswith("Name: "): | ||
metadata["name"] = line.split("Name: ")[1] | ||
elif line.startswith("Version: "): | ||
metadata["version"] = line.split("Version: ")[1] | ||
elif line.startswith("Author-email: "): | ||
metadata["author"] = line.split('Author-email: ')[1].split(" <")[0] | ||
elif line.startswith("Author-email: "): | ||
metadata["author_email"] = line.split('<')[1][:-1] | ||
elif line.startswith("Summary: "): | ||
metadata["short_description"] = line.split("Summary: ")[1] | ||
elif line.startswith("Project-URL: download, "): | ||
metadata["home_page"] = line.split("Project-URL: download, ")[1] | ||
elif line.startswith("License-Expression: "): | ||
metadata["license"] = line.split("License-Expression: ")[1] | ||
elif line.startswith("Classifier: Development Status :: "): | ||
metadata["development_status"] = line.split(":: ")[-1][0] | ||
|
||
|
||
return metadata | ||
|
||
|
||
|
||
def store_plugin_data(plugin_data: Dict[str, Any], conn: sqlite3.Connection, c: sqlite3.Cursor): | ||
entrypoints, metadata = parse_entrypoints(plugin_data) | ||
if not entrypoints or not metadata: | ||
return | ||
name = metadata.get("name", "") | ||
pyxu_version = "2" # TODO: Change according to min, max pyxu versions | ||
version = metadata.get("version", "") | ||
author = metadata.get("author", "") | ||
author_email = metadata.get("author_email", "") | ||
home_page = metadata.get("home_page", "") | ||
short_description = metadata.get("short_description", "") | ||
license = metadata.get("license", "") | ||
development_status = metadata.get("development_status", "1") | ||
score = 100 | ||
|
||
c.execute(""" | ||
INSERT INTO plugins | ||
(name, pyxu_version, version, author, author_email, home_page, short_description, license, development_status, entrypoints, score) | ||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) | ||
""", (name, pyxu_version, version, author, author_email, home_page, short_description, license, development_status, entrypoints, score)) | ||
conn.commit() | ||
|
||
|
||
def main(): | ||
conn = sqlite3.connect(DATABASE_FILE) | ||
c = conn.cursor() | ||
c.execute('''DROP TABLE IF EXISTS plugins''') | ||
c.execute('''CREATE TABLE plugins | ||
(name TEXT, pyxu_version TEXT, version TEXT, author TEXT, author_email TEXT, home_page TEXT, | ||
short_description TEXT, license TEXT, development_status TEXT, entrypoints TEXT, score INTEGER)''') | ||
|
||
plugin_names = query_pypi() | ||
|
||
for plugin_name, plugin_version in plugin_names.items(): | ||
url = f"https://pypi.org/pypi/{plugin_name}/json" | ||
response = requests.get(url) | ||
response.raise_for_status() | ||
plugin_data = response.json() | ||
store_plugin_data(plugin_data, conn, c) | ||
|
||
conn.close() | ||
|
||
if __name__ == "__main__": | ||
main() |
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Oops, something went wrong.