From 6eb90349130d99cb746a0d9a32c80e3db64a0b91 Mon Sep 17 00:00:00 2001 From: David Maxson Date: Wed, 31 Aug 2022 16:20:10 -0700 Subject: [PATCH] [WIP] Initial working version. Docs are still WIP, and no tests yet. --- .bumpversion.cfg | 10 + .github/workflows/docs.yml | 18 + .github/workflows/pre-commit.yml | 20 ++ .github/workflows/pypi.yml | 27 ++ .gitignore | 147 ++++++++ .pre-commit-config.yaml | 16 + README.md | 0 TODO.md | 2 + docker_printer/__init__.py | 0 docker_printer/__main__.py | 67 ++++ docker_printer/cli.py | 73 ++++ docker_printer/models.py | 313 ++++++++++++++++++ .../templates/base.Dockerfile.jinja2 | 13 + .../templates/chunk.Dockerfile.jinja2 | 35 ++ docker_printer/utils.py | 98 ++++++ docs/Makefile | 20 ++ docs/builds.md | 74 +++++ docs/conf.py | 43 +++ docs/index.md | 81 +++++ docs/make.bat | 35 ++ docs/modules.md | 63 ++++ docs/synth.md | 6 + docs/targets.md | 127 +++++++ docs/templates.md | 2 + pyproject.toml | 7 + requirements.txt | 3 + setup.cfg | 35 ++ 27 files changed, 1335 insertions(+) create mode 100644 .bumpversion.cfg create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/pre-commit.yml create mode 100644 .github/workflows/pypi.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 README.md create mode 100644 TODO.md create mode 100644 docker_printer/__init__.py create mode 100644 docker_printer/__main__.py create mode 100644 docker_printer/cli.py create mode 100644 docker_printer/models.py create mode 100644 docker_printer/resources/templates/base.Dockerfile.jinja2 create mode 100644 docker_printer/resources/templates/chunk.Dockerfile.jinja2 create mode 100644 docker_printer/utils.py create mode 100644 docs/Makefile create mode 100644 docs/builds.md create mode 100644 docs/conf.py create mode 100644 docs/index.md create mode 100644 docs/make.bat create mode 100644 docs/modules.md create mode 100644 docs/synth.md create mode 100644 docs/targets.md create mode 100644 docs/templates.md create mode 100644 pyproject.toml create mode 100644 requirements.txt create mode 100644 setup.cfg diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 0000000..988c2fd --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,10 @@ +[bumpversion] +current_version = 0.1.0 +commit = True +tag = True + +[bumpversion:file:setup.cfg] + +[bumpversion:file:docker-printer/__init__.py] + +[bumpversion:file:docs/conf.py] diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..6368ebf --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,18 @@ +# Copied almost verbatim from https://github.com/marketplace/actions/sphinx-build +name: sphinx +on: + - push + +jobs: + docs_build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: ammaraskar/sphinx-action@master + with: + docs-folder: "docs/" + + - uses: actions/upload-artifact@v1 + with: + name: DocumentationHTML + path: docs/_build/html/ diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 0000000..138af6f --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,20 @@ +name: pre-commit + +on: + pull_request: + push: + branches: + - main + - dev + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-python@v4 + - uses: pre-commit/action@v3.0.0 + with: + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml new file mode 100644 index 0000000..1c15831 --- /dev/null +++ b/.github/workflows/pypi.yml @@ -0,0 +1,27 @@ +name: Upload Python Package + +on: + release: + types: [published] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + cache: 'pip' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2daf2c8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,147 @@ +### Example user template template +### Example user template + +# IntelliJ project files +.idea +*.iml +out +gen +### Python template +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..855ff8b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,16 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + args: ['--unsafe'] # Temporary workaround while we still have cloudformation files + - id: check-added-large-files +- repo: https://github.com/psf/black + rev: 22.6.0 + hooks: + - id: black + additional_dependencies: ['click!=8.1.0'] diff --git a/README.md b/README.md new file mode 100644 index 0000000..e69de29 diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..74ac235 --- /dev/null +++ b/TODO.md @@ -0,0 +1,2 @@ +- Add `setup` feature for modules +- Add build profiles to control the generation of the bakefile(s) diff --git a/docker_printer/__init__.py b/docker_printer/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/docker_printer/__main__.py b/docker_printer/__main__.py new file mode 100644 index 0000000..b6866bc --- /dev/null +++ b/docker_printer/__main__.py @@ -0,0 +1,67 @@ +import logging +import sys +from logging.config import dictConfig + +log = logging.getLogger(__name__) + + +def main(): + dictConfig( + dict( + version=1, + formatters=dict( + brief=dict( + format=logging.BASIC_FORMAT, + ), + bare=dict( + format="%(message)s", + ), + ), + handlers=dict( + console={ + "class": "logging.StreamHandler", + "formatter": "brief", + "level": logging.DEBUG, + "stream": sys.stdout, + }, + console_bare={ + "class": "logging.StreamHandler", + "formatter": "bare", + "level": logging.DEBUG, + "stream": sys.stderr, + }, + ), + loggers=dict( + rearc_cli=dict( + level=logging.DEBUG, + propagate=False, + handlers=["console_bare"], + ), + rearc_data_utils=dict( + level=logging.DEBUG, + propagate=True, + ), + jobs=dict( + level=logging.DEBUG, + propagate=True, + ), + common=dict( + level=logging.DEBUG, + propagate=True, + ), + __main__=dict( + level=logging.DEBUG, + propagate=True, + ), + ), + root=dict(handlers=["console"]), + ) + ) + + from docker_printer.cli import cli + + cli() + + +if __name__ == "__main__": + main() diff --git a/docker_printer/cli.py b/docker_printer/cli.py new file mode 100644 index 0000000..bfc7656 --- /dev/null +++ b/docker_printer/cli.py @@ -0,0 +1,73 @@ +import shutil +import subprocess +from pathlib import Path + +import click + +from .models import TargetCollection, BuildConfig, BuildConfigCollection +from .utils import ( + config_dir, + jinja_env, + yml_load, + preload_modules, + targets_file, + builds_file, +) + + +@click.group() +def cli(): + pass + + +@cli.command() +def synth(): + _synth() + + +def _synth(): + preload_modules() + + targets = TargetCollection.parse_obj(yml_load(targets_file())) + build_configs = BuildConfigCollection.parse_obj(yml_load(builds_file())) + + dockerfile = targets.render_dockerfile(jinja_env()) + dockerfile_path = Path("Dockerfile.synth") + + click.echo(f"Saving to {dockerfile_path}") + dockerfile_path.write_text(dockerfile) + + for build_config in build_configs.__root__: + bakefile_path = Path(f"docker-bake.{build_config.name}.json") + bakefile = build_config.generate_bakefile(targets) + bakefile_path.write_text(bakefile) + click.echo(build_config.build_command) + + return targets, build_configs + + +@cli.command() +@click.argument("name") +def build(name): + _, build_configs = _synth() + + try: + config = next(cfg for cfg in build_configs.__root__ if cfg.name == name) + except StopIteration: + raise click.Abort(f"No build config found with name '{name}'") + + subprocess.run(config.build_command) + + +@cli.command() +def init(): + base_dir = config_dir() + if base_dir.exists(): + click.echo(f"{base_dir} already exists, cannot initialize new project") + raise click.Abort() + + base_dir.mkdir(exist_ok=False, parents=False) + (base_dir / "modules").mkdir(exist_ok=False, parents=False) + (base_dir / "templates").mkdir(exist_ok=False, parents=False) + (base_dir / "targets.yml.jinja2").touch(exist_ok=False) + (base_dir / "builds.yml.jinja2").touch(exist_ok=False) diff --git a/docker_printer/models.py b/docker_printer/models.py new file mode 100644 index 0000000..e863fe7 --- /dev/null +++ b/docker_printer/models.py @@ -0,0 +1,313 @@ +import json +import re +from collections import defaultdict +from typing import List, Optional, Set, Dict, Any, Hashable, Union, Iterable + +import jinja2 +from pydantic import BaseModel, validator, PrivateAttr + + +class CommonListTree: + class Node: + def __init__(self): + self.children = defaultdict(CommonListTree.Node) + self.labels = set() + # self.parent: CommonListTree.Node = None + + def merge_list(self, vals: List[Hashable], label: str): + self.labels.add(label) + if not vals: + return + + v, *remaining = vals + child = self.children[v] + # child.parent = self + child.merge_list(remaining, label) + + @property + def terminal_labels(self): + return set(self.labels) - { + lbl for child in self.children.values() for lbl in child.labels + } + + def print_tree(self, _indent=0): + for value, child in self.children.items(): + if child.terminal_labels: + suffix = f' [{", ".join(child.terminal_labels)}]' + else: + suffix = "" + print(" " * _indent + f"- {value}" + suffix) + child.print_tree(_indent + 1) + + def visit(self, func): + for value, child in self.children.items(): + func(value, child, self) + child.visit(func) + + def __init__(self): + self.root = CommonListTree.Node() + + def merge_list(self, *args, **kwargs): + self.root.merge_list(*args, **kwargs) + + def print_tree(self): + self.root.print_tree() + + def visit(self, func): + self.root.visit(func) + + +class FilledTemplate(BaseModel): + file: str = "chunk.Dockerfile.jinja2" + variables: Dict[str, Any] = {} + + def render(self, environment: jinja2.Environment): + return environment.get_template(self.file).render(**self.variables) + + +class Module(BaseModel): + __modules__: Dict[str, "Module"] = dict() + + name: str + depends_on: List[str] = [] + priority: int = 0 + template: FilledTemplate + image_args: Dict[str, Any] = {} + + _all_modules: Set["Module"] = PrivateAttr(default=None) + + def __init__(self, **kwargs): + super().__init__(**kwargs) + if self.name in self.__modules__: + raise RuntimeError( + f"Multiple modules defined with the same name: '{self.name}'" + ) + self.__modules__[self.name] = self + + def __hash__(self): + return hash(self.name) + + @validator("image_args", pre=True) + def ensure_is_dictionary(cls, v): + if isinstance(v, (list, tuple)): + return {k: None for k in v} + else: + return v + + def all_modules(self) -> Set["Module"]: + if self._all_modules is None: + self._all_modules = {self} | { + mod + for dep in self.depends_on + for mod in Module.__modules__[dep].all_modules() + } + return self._all_modules + + def get_chunk(self, environment: jinja2.Environment, prev_name, cur_name): + vars = dict(self.template.variables) + vars.setdefault("base", prev_name) + vars.setdefault("name", cur_name) + vars.setdefault("labels", {}) + vars.setdefault("arguments", {}) + vars.setdefault("env", {}) + return environment.get_template(self.template.file).render(**vars) + + def __repr__(self): + return f"<{type(self).__name__} {self.name}>" + + def __str__(self): + return self.name + + +class Target(BaseModel): + __targets__: Dict[str, "Target"] = {} + + name: str + modules: List[str] = set() + extends: List[str] = [] + exclude: bool = False + tags: List[str] = [] + + _all_modules: List[Module] = PrivateAttr(default=None) + + def __init__(self, **kwargs): + super().__init__(**kwargs) + if self.name in self.__targets__: + raise RuntimeError( + f"Multiple targets defined with the same name: '{self.name}'" + ) + self.__targets__[self.name] = self + + def __hash__(self): + return hash(self.name) + + @property + def _resolved_modules(self): + return [Module.__modules__[m] for m in self.modules] + + @property + def _extended_targets(self) -> Set["Target"]: + return { + target + for extended_target in self.extends + for target in Target.__targets__[extended_target].all_targets() + } + + def all_targets(self): + return {self} | self._extended_targets + + def all_modules(self) -> Iterable[Module]: + if self._all_modules is None: + modules = { + mod + for base_mod in self._resolved_modules + for mod in base_mod.all_modules() + } | { + mod for target in self._extended_targets for mod in target.all_modules() + } + self._all_modules = list( + sorted(modules, key=lambda m: (-m.priority, m.name)) + ) + + return self._all_modules + + def render_dockerfile(self, environment: jinja2.Environment): + image_args = {} + for mod in self._resolved_modules: + image_args.update(mod.image_args) + + prev_mod = None + chunks = [] + for mod in self.all_modules(): + chunks.append(mod.get_chunk(environment, prev_mod.name, mod.name)) + prev_mod = mod + + dockerfile = environment.get_template("base.Dockerfile.jinja2").render( + image_arguments=image_args, chunks=chunks + ) + dockerfile = re.sub(r"\n{3,}", r"\n\n", dockerfile) + return dockerfile + + def __repr__(self): + return f"<{type(self).__name__} {self.name}>" + + def __str__(self): + return self.name + + +class TargetCollection(BaseModel): + __root__: Set[Target] + + @property + def targets(self): + return [t for t in self.__root__ if not t.exclude] + + # def __getitem__(self, item: str) -> Target: + # try: + # return next(t for t in self.targets if t.name == item) + # except StopIteration: + # raise KeyError(f"Name {item} not found in target collection") + + def render_dockerfile( + self, environment: jinja2.Environment, targets: List[str] = () + ): + targets = self.targets + + pre_image_args = dict() + for target in targets: + for module in target.all_modules(): + pre_image_args.update(module.image_args) + + module_tree = CommonListTree() + for target in targets: + module_tree.merge_list(target.all_modules(), target.name) + + module_tree.print_tree() + chunks: Dict[Union[CommonListTree.Node, str], str] = {} + names = {} + image_args = {} + last_chunk_per_target = {} + + def visit_node( + module: Module, node: CommonListTree.Node, parent: CommonListTree.Node + ): + image_args.update(module.image_args) + + if len(node.terminal_labels) == 1: # This is a terminal node for a target + cur_name = list(node.terminal_labels)[0] + elif len(node.labels) == len(targets): # All targets go through this node + cur_name = module.name + else: # Something in between + cur_name = "-".join([module.name] + list(sorted(node.labels))) + + for label in node.labels: + last_chunk_per_target[label] = cur_name + + names[node] = cur_name + prev_name = names.get(parent) + chunks[node] = module.get_chunk(environment, prev_name, cur_name) + + module_tree.visit(visit_node) + for target in targets: + if target.name not in names.values(): + chunks[target.name] = environment.get_template( + "chunk.Dockerfile.jinja2" + ).render( + base=last_chunk_per_target[target.name], + name=target.name, + labels={}, + arguments={}, + env={}, + ) + + dockerfile = environment.get_template("base.Dockerfile.jinja2").render( + image_arguments=image_args, chunks=list(chunks.values()) + ) + dockerfile = re.sub(r"\n{3,}", r"\n\n", dockerfile) + + return dockerfile + + +class BuildConfig(BaseModel): + name: str + image: str + tag_prefix: Optional[str] + tag_postfix: Optional[str] + platforms: List[str] = ["linux/amd64"] + build_args: Dict[str, Any] = {"load": True} + limit_tags: List[str] = [] + + def generate_bakefile(self, target_collection: TargetCollection): + def tag_maker(name): + return "-".join(v for v in [self.tag_prefix, name, self.tag_postfix] if v) + + targets = [ + t + for t in target_collection.targets + if all(tag in t.tags for tag in self.limit_tags) + ] + + return json.dumps( + dict( + group=dict(default=dict(targets=[target.name for target in targets])), + target={ + target.name: dict( + dockerfile="Dockerfile.synth", + tags=[f"{self.image}:{tag_maker(target.name)}"], + platforms=self.platforms, + target=target.name, + **self.build_args, + ) + for target in targets + }, + ), + indent=2, + ) + + @property + def build_command(self): + return f"docker buildx bake -f docker-bake.{self.name}.json" + + +class BuildConfigCollection(BaseModel): + __root__: List[BuildConfig] diff --git a/docker_printer/resources/templates/base.Dockerfile.jinja2 b/docker_printer/resources/templates/base.Dockerfile.jinja2 new file mode 100644 index 0000000..f400f3c --- /dev/null +++ b/docker_printer/resources/templates/base.Dockerfile.jinja2 @@ -0,0 +1,13 @@ +# syntax = docker/dockerfile:1.2 + +{% block pre_init %}{% endblock %} + +{% block image_args -%} +{% for arg, default in image_arguments.items() -%} +ARG {{ arg }}{% if default %}={{ default }}{% endif %} +{% endfor %} +{%- endblock %} + +{% for chunk in chunks %} +{{ chunk }} +{% endfor %} diff --git a/docker_printer/resources/templates/chunk.Dockerfile.jinja2 b/docker_printer/resources/templates/chunk.Dockerfile.jinja2 new file mode 100644 index 0000000..d58b219 --- /dev/null +++ b/docker_printer/resources/templates/chunk.Dockerfile.jinja2 @@ -0,0 +1,35 @@ +{% block from -%} +FROM {{ base }} AS {{ name }} +{%- endblock %} + +{% block labels -%} +{% for key, value in labels.items() -%} +LABEL "{{ key }}"={{ value }}" +{% endfor %} +{%- endblock %} + +{% block args -%} +{% for arg, default in arguments.items() -%} +ARG {{ arg }}{% if default %}={{ default }}{% endif %} +{% endfor %} +{%- endblock %} + +{% block env -%} +{% for key, value in env.items() -%} +ENV {{ key }}="{{ value }}" +{% endfor %} +{%- endblock %} + +{% block instructions -%} +{% for instr in instructions -%} +{{ instr }} +{% endfor %} +{%- endblock %} + +{% if entrypoint -%} +ENTRYPOINT {{ entrypoint }} +{%- endif %} + +{% if command -%} +CMD {{ command }} +{%- endif %} diff --git a/docker_printer/utils.py b/docker_printer/utils.py new file mode 100644 index 0000000..6a4d03e --- /dev/null +++ b/docker_printer/utils.py @@ -0,0 +1,98 @@ +import getpass +import platform +from pathlib import Path + +import jinja2 +import pkg_resources +import yaml +from yaml.scanner import ScannerError + +from .models import Module + + +def config_dir() -> Path: + return Path() / "docker-printer" + + +def base_resources_dir() -> Path: + return Path(pkg_resources.resource_filename(__name__, "resources")) + + +def jinja_env(): + return jinja2.Environment( + loader=jinja2.FileSystemLoader( + searchpath=[ + config_dir() / "templates", + base_resources_dir() / "templates", + ] + ), + auto_reload=True, + ) + + +def yml_load(path: Path): + try: + return yaml.safe_load(path.open()) + except ScannerError as e: + raise ValueError(f"Invalid YAML file: {path.resolve()}") from e + + +def preload_modules(): + for root in [base_resources_dir(), config_dir()]: + for f in (root / "modules").rglob("*.yml"): + Module.parse_obj(yml_load(f)) # Side effect: stored in Module.__modules__ + + +def targets_file(): + targets_raw_path = config_dir() / "targets.yml" + targets_template_path = config_dir() / "targets.yml.jinja2" + targets_rendered_path = config_dir() / "targets.rendered.yml" + + if targets_raw_path.exists() and targets_template_path.exists(): + raise RuntimeError( + f"Can only have one of {targets_raw_path} or {targets_template_path}" + ) + + elif targets_template_path.exists(): + rendered = jinja2.Template(targets_template_path.read_text()).render() + targets_rendered_path.write_text(rendered) + return targets_rendered_path + + elif targets_raw_path.exists(): + return targets_raw_path + + else: + raise RuntimeError(f"No targets.yml found in {config_dir()}") + + +def _local_docker_architecture(): + architecture_map = { + "x86_64": "amd64", + } + arch = platform.machine().lower() + return architecture_map.get(arch, arch) + + +def builds_file(): + builds_raw_path = config_dir() / "builds.yml" + builds_template_path = config_dir() / "builds.yml.jinja2" + builds_rendered_path = config_dir() / "builds.rendered.yml" + + if builds_raw_path.exists() and builds_template_path.exists(): + raise RuntimeError( + f"Can only have one of {builds_raw_path} or {builds_template_path}" + ) + + elif builds_template_path.exists(): + rendered = jinja2.Template(builds_template_path.read_text()).render( + username=getpass.getuser(), + local_architecture=_local_docker_architecture(), + ) + builds_rendered_path.write_text(rendered) + return builds_rendered_path + + elif builds_raw_path.exists(): + return builds_raw_path + + else: + raise RuntimeError(f"No builds.yml found in {config_dir()}") diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/builds.md b/docs/builds.md new file mode 100644 index 0000000..83b8444 --- /dev/null +++ b/docs/builds.md @@ -0,0 +1,74 @@ +(builds)= +# Builds + + +(build_tagging)= +### Build Particular Tags + + + +## `builds.yml` Schema + +```json +{ + "title": "BuildConfigCollection", + "type": "array", + "items": { + "$ref": "#/definitions/BuildConfig" + }, + "definitions": { + "BuildConfig": { + "title": "BuildConfig", + "type": "object", + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "image": { + "title": "Image", + "type": "string" + }, + "tag_prefix": { + "title": "Tag Prefix", + "type": "string" + }, + "tag_postfix": { + "title": "Tag Postfix", + "type": "string" + }, + "platforms": { + "title": "Platforms", + "default": [ + "linux/amd64" + ], + "type": "array", + "items": { + "type": "string" + } + }, + "build_args": { + "title": "Build Args", + "default": { + "load": true + }, + "type": "object" + }, + "limit_tags": { + "title": "Limit Tags", + "default": [], + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "name", + "image" + ] + } + } +} + +``` diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..71e37c4 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,43 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "docker-printer" +copyright = "2022, David Maxson" +author = "David Maxson" +release = "0.1.0" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "myst_parser", + "sphinx.ext.doctest", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..1f31464 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,81 @@ +# Docker-Printer + +`docker-printer` is a CLI for easily managing multistep and branching dockerfiles. + +```{toctree} +--- +maxdepth: 1 +caption: Index +--- + +templates +modules +targets +builds +synth +``` + +Regular multi-stage dockerfiles and `docker build` commands are incredibly powerful and useful; however, they are designed for building a single image. Multistage builds can be used to define multiple related images, but this quickly results in complicated dockerfiles, possibly duplicated instructions, and complicated collections of build commands. + +`docker-printer` addresses this in two main ways: +- By allowing dockerfiles to be composed from re-usable modules. +- By building bake files for use by `docker buildx bake` that consolidate the build processes of multiple images in multiple environments into a single configuration file. + +## Example + +Consider trying to build two related Python images with just a few dependencies or files differing between the two, then both a development and production version of each. You might have something like the following: + +```dockerfile +FROM python:latest AS base +COPY requirements.txt /requirements.txt +RUN pip install -r requirements.txt + +FROM base AS dev_matplotlib +RUN pip install matplotlib + +FROM base AS dev_plotly +RUN pip install plotly + +FROM dev_matplotlib AS prod_matplotlib +WORKDIR /app +COPY app /app +CMD ['flask', '/app/app.py'] + +FROM dev_plotly AS prod_plotly +WORKDIR /app +COPY app /app +CMD ['flask', '/app/app.py'] +``` + +And you now need to manage at least four different build commands, one for each target. Additionally, if this begins to branch beyond just a couple of variations, the dockerfile can quickly get unwieldy, and the code duplication can quickly get problematic. + +Existing solutions include: +- Using Docker Compose to point at each build target, consolidating the build command into a single compose file. +- Consolidating re-used setup commands into a bash file, and executing that in each duplicate stage +- Writing scripts for developers that enumerate all the build commands + +Docker Printer provides another option that relies on nothing more than multistage builds and, if desired, `docker buildx`, while providing no-duplication, high-speed, templatable, branching, and self-arranging dockerfiles. It also provides an easy way to share best practices for accomplishing common tasks in a dockerfile, such as how to cache downloads while installing packages with `apt-get` or `pip`. + +# Getting Started + +To get started, install `docker-printer`: + +``` +pip install docker-printer +``` + +Then initialize the printer configuration in your repository: + +``` +docker-printer init +``` + +This will create the folders and files necessary to use `docker-printer`. + +Next, you'll define the following for your particular dockerfile needs: +- Define [templates](Templates), which are Jinja2 template files that represent stages (or parts of stages) in your build process. +- Define [modules](Modules), which populate templates to represent a particular re-usable stage in a dockerfile. +- Specify your final desired [targets](Targets), which define the output images from your dockerfile. +- Define one or more [build configurations](Builds) that describe which targets will be built and where they'll be stored + +Once you have your definitions laid out, you can [synthesize](Synthesize) your final Dockerfile and, if desired, your bake files. diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..954237b --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/modules.md b/docs/modules.md new file mode 100644 index 0000000..a79e8d0 --- /dev/null +++ b/docs/modules.md @@ -0,0 +1,63 @@ +(modules)= +# Modules + +## `.yml` schema + +```json +{ + "title": "Module", + "type": "object", + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "depends_on": { + "title": "Depends On", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "priority": { + "title": "Priority", + "default": 0, + "type": "integer" + }, + "setup": { + "$ref": "#/definitions/FilledTemplate" + }, + "template": { + "$ref": "#/definitions/FilledTemplate" + }, + "image_args": { + "title": "Image Args", + "default": {}, + "type": "object" + } + }, + "required": [ + "name", + "template" + ], + "definitions": { + "FilledTemplate": { + "title": "FilledTemplate", + "type": "object", + "properties": { + "file": { + "title": "File", + "default": "chunk.Dockerfile.jinja2", + "type": "string" + }, + "variables": { + "title": "Variables", + "default": {}, + "type": "object" + } + } + } + } +} +``` diff --git a/docs/synth.md b/docs/synth.md new file mode 100644 index 0000000..77272e5 --- /dev/null +++ b/docs/synth.md @@ -0,0 +1,6 @@ +(synthesize)= +# Synthesize + +``` +docker-printer synth +``` diff --git a/docs/targets.md b/docs/targets.md new file mode 100644 index 0000000..976a94d --- /dev/null +++ b/docs/targets.md @@ -0,0 +1,127 @@ +(targets)= +# Targets + +While a synthesized dockerfile may have many stages, only some represent final stages for images you care about. Target definitions are how we specify the final stages we care about, and from there `docker-printer` can figure out how to create the dockerfile necessary to produce that stage. + +## Example + +In your `docker-printer/` folder, define a `targets.yml` file. It will be a list of targets, each of which looks like the following: + +```yaml +- name: my-target + modules: + - mod1 + - mod2 +``` + +A target can extend another target: + +```yaml +- name: my-next-target + extends: + - my-target + modules: + - mod3 +``` + +This will mark that the new `my-next-target` target relies on all the same modules as `my-target`. This will result in its dockerfile stages being based on the stages of the earlier target. This can be useful, for example, to define a "dev" target with your application dependencies, then a "prod" target that also includes your source code. + +### Tags + +Targets can be tagged, which allows them to be filtered later in particular build configurations. + +```yaml +- name: dev1 + tags: + - dev + # ... + +- name: dev2 + tags: + - dev + # ... +``` + +See [building tags](build_tagging). + +### Exclude + +Targets can be excluded from being directly built if they merely represent a common ancestor of other targets. + +```yaml +- name: base + modules: + - mod1 + - mod2 + - mod3 + exclude: true + +- name: dev + extends: + - base + modules: + - mod4 + +- name: prod + extends: + - base + modules: + - mod5 +``` + +## `targets.yml` Schema + +```json +{ + "title": "TargetCollection", + "type": "array", + "items": { + "$ref": "#/definitions/Target" + }, + "uniqueItems": true, + "definitions": { + "Target": { + "title": "Target", + "type": "object", + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "modules": { + "title": "Modules", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "extends": { + "title": "Extends", + "default": [], + "type": "array", + "items": { + "type": "string" + } + }, + "exclude": { + "title": "Exclude", + "default": false, + "type": "boolean" + }, + "tags": { + "title": "Tags", + "default": [], + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "name" + ] + } + } +} +``` diff --git a/docs/templates.md b/docs/templates.md new file mode 100644 index 0000000..784ba73 --- /dev/null +++ b/docs/templates.md @@ -0,0 +1,2 @@ +(templates)= +# Templates diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..317a153 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,7 @@ +[build-system] +requires = [ + "setuptools>=42", + "build", + "wheel" +] +build-backend = "setuptools.build_meta" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..c39e2af --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +pyyaml +jinja2 +click diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..f8586ca --- /dev/null +++ b/setup.cfg @@ -0,0 +1,35 @@ +[metadata] +version = 0.1.0 +name = docker-printer +description = Composer for dockerfiles +author = Rearc Data +author_email = data@rearc.io + +long_description = file: README.md +long_description_content_type = text/markdown +url = https://github.com/rearc-data/docker-printer + +project_urls = + Bug Tracker = https://github.com/rearc-data/docker-printer/issues +classifiers = + Programming Language :: Python :: 3 + License :: OSI Approved :: MIT License + Operating System :: OS Independent + +[options.entry_points] +console_scripts = + docker-printer = docker_printer.__main__:main + +[options] +packages = find: +python_requires = >=3.7 +install_requires = + pyyaml + jinja2 + click + +[options.packages.find] +where = . + +[tool:pytest] +addopts = --cov=docker-printer --cov-report term-missing --cov-report html