diff --git a/.github/compose.yaml b/.github/compose.yaml index 8051484..438bf5c 100644 --- a/.github/compose.yaml +++ b/.github/compose.yaml @@ -2,10 +2,6 @@ services: metamanager: image: docker.io/fackop/pppackage-metamanager:latest user: $USER - command: - - /mnt/root/ - - --config - - /mnt/config.json environment: HOST_PWD: $PWD volumes: diff --git a/.github/config.json b/.github/config.json index 7c16b35..6d6e626 100644 --- a/.github/config.json +++ b/.github/config.json @@ -57,6 +57,7 @@ }, "repositories": [ { + "name": "archlinux-core", "driver": "pacman", "parameters": { "mirrorlist": [ @@ -65,6 +66,7 @@ } }, { + "name": "archlinux-extra", "driver": "pacman", "parameters": { "mirrorlist": [ @@ -73,9 +75,11 @@ } }, { + "name": "AUR", "driver": "AUR" }, { + "name": "conancenter", "driver": "conan", "parameters": { "url": "https://center.conan.io", diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e69aadc..7cc2f54 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -100,80 +100,85 @@ jobs: cd /home/runner/.PPpackage/ mkdir -p cache/ repository/ cd repository/ - mkdir -p 0/ 1/ 2/ 3/ + mkdir -p archlinux-core/ archlinux-extra/ AUR/ conancenter/ - - name: Update archlinux core database + - name: Update archlinux-core run: > docker run --rm --user $(id -u):$(id -g) - --mount type=bind,source=/home/runner/.PPpackage/repository/0/,target=/mnt/repository/ + --mount type=bind,source=/home/runner/.PPpackage/repository/archlinux-core/,target=/mnt/repository/ --mount type=bind,source=${{ github.workspace }}/examples/update/repository-pacman.json,target=/mnt/repository.json --env REPOSITORY=core docker.io/fackop/pppackage-updater:latest PPpackage.repository_driver.pacman + archlinux-core --data-path /mnt/repository/ --repository-config /mnt/repository.json - - name: Update archlinux extra database + - name: Update archlinux-extra run: > docker run --rm --user $(id -u):$(id -g) - --mount type=bind,source=/home/runner/.PPpackage/repository/1/,target=/mnt/repository/ + --mount type=bind,source=/home/runner/.PPpackage/repository/archlinux-extra/,target=/mnt/repository/ --mount type=bind,source=${{ github.workspace }}/examples/update/repository-pacman.json,target=/mnt/repository.json --env REPOSITORY=extra docker.io/fackop/pppackage-updater:latest PPpackage.repository_driver.pacman + archlinux-extra --data-path /mnt/repository/ --repository-config /mnt/repository.json - - name: Update AUR database + - name: Update AUR run: > docker run --rm --user $(id -u):$(id -g) - --mount type=bind,source=/home/runner/.PPpackage/repository/2/,target=/mnt/repository/ - --env DATABASE_PATH=/mnt/database/ + --mount type=bind,source=/home/runner/.PPpackage/repository/AUR/,target=/mnt/repository/ docker.io/fackop/pppackage-updater:latest PPpackage.repository_driver.AUR + AUR --data-path /mnt/repository/ - - name: Check if conancenter database exists - id: check_conancenter - uses: andstor/file-existence-action@v3.0.0 - with: - files: database/conancenter/cache/ - - - name: Update conancenter database - if: steps.check_conancenter.outputs.files_exists == 'false' + - name: Update conancenter run: > docker run --rm --user $(id -u):$(id -g) - --mount type=bind,source=/home/runner/.PPpackage/repository/3/,target=/mnt/repository/ + --mount type=bind,source=/home/runner/.PPpackage/repository/conancenter/,target=/mnt/repository/ --mount type=bind,source=${{ github.workspace }}/examples/update/repository-conancenter.json,target=/mnt/repository.json - --env DATABASE_PATH=/mnt/database/ docker.io/fackop/pppackage-updater:latest PPpackage.repository_driver.conan + conancenter --data-path /mnt/repository/ --repository-config /mnt/repository.json - - name: Run PPpackage + - name: Run PPpackage for conan-build run: > USER="$(id -u):$(id -g)" docker compose --file .github/compose.yaml - run --rm - -T metamanager < examples/input/conan-build.json + run --rm -T + metamanager + /mnt/root --config /mnt/config.json + + USER="$(id -u):$(id -g)" docker compose + --file .github/compose.yaml + run --rm -T metamanager + /mnt/root --config /mnt/config.json --just-resolve + None: try: - await main(config_path, installation_path, generators_path, graph_path) + await main( + config_path, installation_path, generators_path, graph_path, just_resolve + ) except: print_exc(file=stderr) exit(1) diff --git a/metamanager/PPpackage/metamanager/main.py b/metamanager/PPpackage/metamanager/main.py index e9e4325..8d18767 100644 --- a/metamanager/PPpackage/metamanager/main.py +++ b/metamanager/PPpackage/metamanager/main.py @@ -27,6 +27,7 @@ async def main( installation_path: Path, generators_path: Path | None, graph_path: Path | None, + just_resolve: bool, ) -> None: try: config = validate_json_io_path(Config, config_path) @@ -40,6 +41,8 @@ async def main( ) containerizer = Containerizer(config.containerizer) + + print("Pulling the solver image...", file=stderr) containerizer.pull("docker.io/fackop/pppackage-solver", "latest") input = validate_json_io(Input, stdin.buffer) @@ -79,6 +82,10 @@ async def main( write_graph_to_file(graph, graph_path) stderr.write(f"Graph written to {graph_path}.\n") + if just_resolve: + stderr.write("Done.\n") + return + installers = Installers(config.installers) stderr.write(f"Fetching and installing to {installation_path}...\n") diff --git a/metamanager/PPpackage/metamanager/repository/__init__.py b/metamanager/PPpackage/metamanager/repository/__init__.py index a306e5d..b24a37a 100644 --- a/metamanager/PPpackage/metamanager/repository/__init__.py +++ b/metamanager/PPpackage/metamanager/repository/__init__.py @@ -30,18 +30,17 @@ def __init__( interface: RepositoryInterface, epoch: str, data_path: Path, - index: int, ): self.translator_data_cache_path = ( config.translator_data_cache_path if config.translator_data_cache_path is not None - else data_path / "cache" / "translator-data" / str(index) + else data_path / "cache" / "translator-data" / config.name ) self.formula_cache_path = ( config.formula_cache_path if config.formula_cache_path is not None - else data_path / "cache" / "formula" / str(index) + else data_path / "cache" / "formula" / config.name ) self.interface = interface @@ -49,13 +48,10 @@ def __init__( @staticmethod async def create( - config: RepositoryConfig, - interface: RepositoryInterface, - data_path: Path, - index: int, + config: RepositoryConfig, interface: RepositoryInterface, data_path: Path ): epoch = await interface.get_epoch() - return Repository(config, interface, epoch, data_path, index) + return Repository(config, interface, epoch, data_path) async def fetch_translator_data(self) -> AsyncIterable[TranslatorInfo]: self.translator_data_cache_path.parent.mkdir(parents=True, exist_ok=True) @@ -169,15 +165,14 @@ async def Repositories( async with AsyncExitStack() as context_stack: yield [ await Repository.create( - config, + repository_config, await create_repository( context_stack, - config, + repository_config, drivers, - data_path / "repository" / str(index), + data_path / "repository" / repository_config.name, ), data_path, - index, ) - for index, config in enumerate(repository_configs) + for repository_config in repository_configs ] diff --git a/metamanager/PPpackage/metamanager/schemes/__init__.py b/metamanager/PPpackage/metamanager/schemes/__init__.py index 2b3a53c..761bdbf 100644 --- a/metamanager/PPpackage/metamanager/schemes/__init__.py +++ b/metamanager/PPpackage/metamanager/schemes/__init__.py @@ -8,7 +8,7 @@ Parameters, Requirement, ) -from pydantic import BaseModel +from pydantic import BaseModel, field_validator from pydantic.dataclasses import dataclass as pydantic_dataclass from PPpackage.utils.container.schemes import ContainerizerConfig @@ -25,6 +25,7 @@ class Input: class RepositoryConfig(BaseModel): + name: str driver: str parameters: Parameters = frozendict() data_path: Path | None = None @@ -63,3 +64,18 @@ class Config: product_cache_path: Annotated[Path, WithVariables] | None = None repository_drivers: Mapping[str, RepositoryDriverConfig] = frozendict() generators: Mapping[str, GeneratorConfig] = frozendict() + + @field_validator("repositories") + @classmethod + def name_must_contain_space( + cls, repository_configs: list[RepositoryConfig] + ) -> list[RepositoryConfig]: + names = set[str]() + + for repository_config in repository_configs: + if repository_config.name in names: + raise ValueError(f"Duplicate repository name: {repository_config.name}") + + names.add(repository_config.name) + + return repository_configs diff --git a/repository-driver/AUR/PPpackage/repository_driver/AUR/update.py b/repository-driver/AUR/PPpackage/repository_driver/AUR/update.py index ac40b00..910d83f 100644 --- a/repository-driver/AUR/PPpackage/repository_driver/AUR/update.py +++ b/repository-driver/AUR/PPpackage/repository_driver/AUR/update.py @@ -9,7 +9,7 @@ from PPpackage.utils.json.validate import validate_json from .epoch import update as update_epoch -from .schemes import AURPackage, DriverParameters, RepositoryParameters +from .schemes import AURPackage from .state import State from .utils import transaction diff --git a/repository-driver/conan/PPpackage/repository_driver/conan/get_formula.py b/repository-driver/conan/PPpackage/repository_driver/conan/get_formula.py index b486120..b677fb7 100644 --- a/repository-driver/conan/PPpackage/repository_driver/conan/get_formula.py +++ b/repository-driver/conan/PPpackage/repository_driver/conan/get_formula.py @@ -1,4 +1,7 @@ from collections.abc import Iterable +from concurrent.futures import Future, ProcessPoolExecutor, as_completed +from multiprocessing import cpu_count +from pathlib import Path from typing import AsyncIterable from conan.api.conan_api import ConanAPI @@ -11,7 +14,7 @@ from .epoch import get as get_epoch from .schemes import Options from .state import State -from .utils import get_requirements +from .utils import create_api_and_app, get_requirements def get_recipes(api: ConanAPI) -> Iterable[RecipeReference]: @@ -22,41 +25,63 @@ def get_revisions(api: ConanAPI, recipe: RecipeReference) -> Iterable[RecipeRefe return api.list.recipe_revisions(recipe) +def get_formula_from_one_cache_impl(home: Path) -> Iterable[list[Requirement]]: + api, app = create_api_and_app(home) + + for recipe in get_recipes(api): + for revision in get_revisions(api, recipe): + try: + requirements, system_requirements = get_requirements( + api, app, revision, system=True + ) + except: + continue + + assert requirements is not None + + revision_requirement = Requirement( + "noop", + f"conan-{revision.name}/{revision.version}#{revision.revision}", + False, + ) + + for requirement in requirements: + if not requirement.build: + yield [ + revision_requirement, + Requirement( + "conan", + { + "package": str(requirement.ref.name), + "version": str(requirement.ref.version), + }, + ), + ] + + for requirement in system_requirements: + yield [ + revision_requirement, + Requirement("pacman", requirement), + ] + + +def get_formula_from_one_cache(home: Path) -> list[list[Requirement]]: + return list(get_formula_from_one_cache_impl(home)) + + async def get_formula( state: State, translated_options: Options, epoch_result: Result[str] ) -> AsyncIterable[list[Requirement]]: async with rwlock_read(state.coroutine_lock, state.file_lock): epoch_result.set(get_epoch(state.database_path / "epoch")) - for recipe in get_recipes(state.api): - for revision in get_revisions(state.api, recipe): - try: - requirements, system_requirements = get_requirements( - state.api, state.app, revision, system=True - ) - except: - continue - - assert requirements is not None - - revision_requirement = Requirement( - "noop", - f"conan-{revision.name}/{revision.version}#{revision.revision}", - False, - ) + with ProcessPoolExecutor() as executor: + futures = list[Future[list[list[Requirement]]]]() + + for aux_home_path in state.aux_home_paths: + future = executor.submit(get_formula_from_one_cache, aux_home_path) + futures.append(future) - for requirement in requirements: - if not requirement.build: - yield [ - revision_requirement, - Requirement( - "conan", - { - "package": str(requirement.ref.name), - "version": str(requirement.ref.version), - }, - ), - ] - - for requirement in system_requirements: - yield [revision_requirement, Requirement("pacman", requirement)] + for future in as_completed(futures): + for clause in future.result(): + yield clause diff --git a/repository-driver/conan/PPpackage/repository_driver/conan/lifespan.py b/repository-driver/conan/PPpackage/repository_driver/conan/lifespan.py index a4eec08..1caa087 100644 --- a/repository-driver/conan/PPpackage/repository_driver/conan/lifespan.py +++ b/repository-driver/conan/PPpackage/repository_driver/conan/lifespan.py @@ -3,12 +3,22 @@ from pathlib import Path from aiorwlock import RWLock -from conan.api.conan_api import ConanAPI -from conan.internal.conan_app import ConanApp from fasteners import InterProcessReaderWriterLock from .schemes import DriverParameters, RepositoryParameters from .state import State +from .utils import create_api_and_app + +AUX_HOMES_COUNT = 16 + + +def setup_home(home_path: Path): + home_path.mkdir(parents=True, exist_ok=True) + + with (home_path / "global.conf").open("w") as file: + file.write("tools.system.package_manager:mode = report\n") + + return home_path @asynccontextmanager @@ -26,14 +36,17 @@ async def lifespan( coroutine_lock = RWLock() file_lock = InterProcessReaderWriterLock(database_path / "lock") - conan_home_path = database_path / "conan-home" - conan_home_path.mkdir(exist_ok=True, parents=True) + homes_path = database_path / "homes" + homes_path.mkdir(parents=True, exist_ok=True) - with (conan_home_path / "global.conf").open("w") as file: - file.write("tools.system.package_manager:mode = report\n") + main_home_path = homes_path / "main" + setup_home(main_home_path) + + api, app = create_api_and_app(main_home_path) - api = ConanAPI(str(conan_home_path.absolute())) - app = ConanApp(api) + aux_home_paths = [ + setup_home(homes_path / f"aux-{i}") for i in range(AUX_HOMES_COUNT) + ] yield State( database_path, @@ -43,4 +56,5 @@ async def lifespan( file_lock, api, app, + aux_home_paths, ) diff --git a/repository-driver/conan/PPpackage/repository_driver/conan/state.py b/repository-driver/conan/PPpackage/repository_driver/conan/state.py index a5f0f8b..c7d3125 100644 --- a/repository-driver/conan/PPpackage/repository_driver/conan/state.py +++ b/repository-driver/conan/PPpackage/repository_driver/conan/state.py @@ -17,3 +17,4 @@ class State: file_lock: InterProcessReaderWriterLock api: ConanAPI app: ConanApp + aux_home_paths: list[Path] diff --git a/repository-driver/conan/PPpackage/repository_driver/conan/update.py b/repository-driver/conan/PPpackage/repository_driver/conan/update.py index 2dd5eda..59858e1 100644 --- a/repository-driver/conan/PPpackage/repository_driver/conan/update.py +++ b/repository-driver/conan/PPpackage/repository_driver/conan/update.py @@ -1,20 +1,25 @@ from collections.abc import Iterable, Sequence -from concurrent.futures import Future, ThreadPoolExecutor +from concurrent.futures import Future, ProcessPoolExecutor, ThreadPoolExecutor +from contextlib import ExitStack +from itertools import cycle from multiprocessing import cpu_count from pathlib import Path from sys import stderr from typing import cast as type_cast from conan.api.conan_api import ConanAPI -from conan.api.model import Remote +from conan.api.model import PackagesList, Remote from conan.internal.conan_app import ConanApp from conans.errors import ConanException +from conans.model.profile import Profile from conans.model.recipe_ref import RecipeReference +from PPpackage.utils.file import TemporaryDirectory from PPpackage.utils.lock.rw import write as rwlock_write from .epoch import update as update_epoch from .state import State +from .utils import create_api_and_app def fetch_revisions( @@ -28,7 +33,7 @@ def fetch_revisions( RecipeReference, api.list.latest_recipe_revision(recipe, remote) ) except ConanException: - print(f"Failed to fetch revisions for {recipe}", file=stderr) + print(f"WARNING: Failed to fetch revisions for {recipe}", file=stderr) return [] else: return [revision] @@ -55,24 +60,38 @@ def download_recipes( app.remote_manager.get_recipe(revision, remote) +def restore(home_path: Path, archive_path: Path) -> None: + api, _ = create_api_and_app(home_path) + + api.cache.restore(archive_path) + + +def create_default_profile(home_path: Path, detected_profile: Profile) -> None: + profiles_path = home_path / "profiles" + profiles_path.mkdir(parents=True, exist_ok=True) + + with (profiles_path / "default").open("w") as profile_file: + profile_file.write("[settings]\n") + + for setting, value in detected_profile.settings.items(): + profile_file.write(f"{setting}={value}\n") + + async def update(state: State) -> None: remote = Remote("", url=str(state.url), verify_ssl=state.verify_ssl) async with rwlock_write(state.coroutine_lock, state.file_lock): detected_profile = state.api.profiles.detect() - profiles_path = Path(state.api.home_folder) / "profiles" - - profiles_path.mkdir(parents=True, exist_ok=True) + create_default_profile(Path(state.api.home_folder), detected_profile) - with (profiles_path / "default").open("w") as profile_file: - profile_file.write("[settings]\n") - - for setting, value in detected_profile.settings.items(): - profile_file.write(f"{setting}={value}\n") + for aux_home in state.aux_home_paths: + create_default_profile(aux_home, detected_profile) recipes = state.api.search.recipes("*", remote) + all_revisions = list[RecipeReference]() + with ThreadPoolExecutor(cpu_count() * 16) as executor: futures = list[Future]() @@ -83,7 +102,35 @@ async def update(state: State) -> None: executor.submit(download_recipes, state.app, remote, revisions) ) + all_revisions.extend(revisions) + for future in futures: future.result() + package_lists = [PackagesList() for _ in state.aux_home_paths] + + for revision, package_list in zip(all_revisions, cycle(package_lists)): + package_list.add_refs([revision]) + + with ExitStack() as exit_stack: + archive_paths = list[Path]() + + for package_list in package_lists: + archive_path = ( + exit_stack.enter_context(TemporaryDirectory()) / "packages" + ) + + state.api.cache.save(package_list, archive_path) + archive_paths.append(archive_path) + + with ProcessPoolExecutor() as executor: + futures = [] + + for archive_path, aux_home in zip(archive_paths, state.aux_home_paths): + future = executor.submit(restore, aux_home, archive_path) + futures.append(future) + + for future in futures: + future.result() + update_epoch(state.database_path / "epoch") diff --git a/repository-driver/conan/PPpackage/repository_driver/conan/utils.py b/repository-driver/conan/PPpackage/repository_driver/conan/utils.py index f71eb01..3bbca36 100644 --- a/repository-driver/conan/PPpackage/repository_driver/conan/utils.py +++ b/repository-driver/conan/PPpackage/repository_driver/conan/utils.py @@ -1,4 +1,5 @@ from collections.abc import Iterable +from pathlib import Path from conan.api.conan_api import ConanAPI from conan.internal.conan_app import ConanApp @@ -41,3 +42,10 @@ def get_requirements( return requirements, system_requirements return requirements, [] + + +def create_api_and_app(home_path: Path): + api = ConanAPI(str(home_path.absolute())) + app = ConanApp(api) + + return api, app diff --git a/repository-driver/update/PPpackage/repository_driver/update/__main__.py b/repository-driver/update/PPpackage/repository_driver/update/__main__.py index 86d2fae..8abc00c 100644 --- a/repository-driver/update/PPpackage/repository_driver/update/__main__.py +++ b/repository-driver/update/PPpackage/repository_driver/update/__main__.py @@ -21,8 +21,8 @@ def load_parameters(Parameters: type, path: Path | None): @app.command() async def main( package: str, + name: str, data_path: Optional[Path] = None, - index: Optional[int] = None, driver_config_path: Annotated[ Optional[Path], TyperOption("--driver-config") ] = None, @@ -30,11 +30,8 @@ async def main( Optional[Path], TyperOption("--repository-config") ] = None, ): - if data_path is None and index is None: - raise Exception("One of --data-path or --index must be specified.") - if data_path is None: - data_path = Path.home() / ".PPpackage" / "repository" / str(index) + data_path = Path.home() / ".PPpackage" / "repository" / str(name) interface = load_interface_module(Interface, package)