diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 0000000..2b11178 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,14 @@ +name: pre-commit + +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.1 diff --git a/.gitignore b/.gitignore index e4bf442..d8dd9a3 100644 --- a/.gitignore +++ b/.gitignore @@ -88,4 +88,4 @@ cython_debug/ # Project specific data.yaml config.yaml -.ceph \ No newline at end of file +.ceph diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..f715b3c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.1 + hooks: + - id: ruff + args: [ --fix ] + - id: ruff-format + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: check-json + - id: check-toml + - id: check-merge-conflict + - id: mixed-line-ending + - id: end-of-file-fixer + - id: trailing-whitespace diff --git a/MANIFEST.in b/MANIFEST.in index c607150..1cd3ffd 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,3 @@ -include pyproject.toml README requirements.txt +include pyproject.toml README.md requirements.txt recursive-include tests * diff --git a/pyproject.toml b/pyproject.toml index b768650..63a7d09 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,6 @@ [build-system] requires = [ "setuptools", "wheel" ] build-backend = "setuptools.build_meta" + +[project] +requires-python = ">=3.9" diff --git a/setup.py b/setup.py index e1d60b9..46f0fd9 100644 --- a/setup.py +++ b/setup.py @@ -22,26 +22,25 @@ from setuptools import find_packages, setup except ImportError: from distutils import find_packages, setup -# def get_version(): """ -Returns the version currently in development. + Returns the version currently in development. -:return: (str) Version string -:since: v0.0.1 + :return: (str) Version string + :since: v0.0.1 """ return os.environ.get("ROOKIFY_VERSION", "0.0.0-dev") -# -_setup = { "version": get_version(), - "data_files": [ ( "docs", [ "LICENSE", "README.md" ] ) ], - "entry_points": { "console_scripts": [ "rookify = rookify.__main__:main" ] }, - "test_suite": "tests" - } +_setup = { + "version": get_version(), + "data_files": [("docs", [ "LICENSE", "README.md" ])], + "entry_points": {"console_scripts": ["rookify = rookify.__main__:main"]}, + "test_suite": "tests" +} -_setup['package_dir'] = { "": "src" } -_setup['packages'] = find_packages("src") +_setup["package_dir"] = {"": "src"} +_setup["packages"] = find_packages("src") setup(**_setup) diff --git a/src/rookify/__main__.py b/src/rookify/__main__.py index 1ea9c40..44c71fe 100644 --- a/src/rookify/__main__.py +++ b/src/rookify/__main__.py @@ -5,40 +5,46 @@ from types import MappingProxyType from .yaml import load_yaml, save_yaml + def main(): try: config = load_yaml("config.yaml") except FileNotFoundError as err: - raise SystemExit(f'Could not load config: {err}') - migration_modules = rookify.modules.load_modules(config['migration_modules']) + raise SystemExit(f"Could not load config: {err}") + preflight_modules, migration_modules = rookify.modules.load_modules( + config["migration_modules"] + ) module_data = dict() try: - module_data.update(load_yaml(config['general']['module_data_file'])) + module_data.update(load_yaml(config["general"]["module_data_file"])) except FileNotFoundError: pass - # Get a list of handlers and run handlers if they should be run in preflight + # Run preflight requirement modules + for preflight_module in preflight_modules: + handler = preflight_module.HANDLER_CLASS( + config=MappingProxyType(config), data=MappingProxyType(module_data) + ) + result = handler.run() + module_data[preflight_module.MODULE_NAME] = result + + # Run preflight checks and append handlers to list handlers = list() - for module in migration_modules: - handler = module.HANDLER_CLASS(config=MappingProxyType(config), data=MappingProxyType(module_data)) - if module.RUN_IN_PREFLIGHT: - handler.preflight_check() - result = handler.run() - module_data[module.__name__] = result - else: - handlers.append((module, handler)) - - # Do preflight check of all other handlers - for module, handler in handlers: + for migration_module in migration_modules: + handler = migration_module.HANDLER_CLASS( + config=MappingProxyType(config), data=MappingProxyType(module_data) + ) handler.preflight_check() + handlers.append((migration_module, handler)) - # Run handlers - for module, handler in handlers: + # Run migration modules + for migration_module, handler in handlers: result = handler.run() - module_data[module.__name__] = result + module_data[migration_module.MODULE_NAME] = result + + save_yaml(config["general"]["module_data_file"], module_data) - save_yaml(config['general']['module_data_file'], module_data) if __name__ == "__main__": main() diff --git a/src/rookify/modules/__init__.py b/src/rookify/modules/__init__.py index b048498..aaf0140 100644 --- a/src/rookify/modules/__init__.py +++ b/src/rookify/modules/__init__.py @@ -1,16 +1,17 @@ # -*- coding: utf-8 -*- -import functools import importlib +import types -from typing import Optional from collections import OrderedDict from .module import ModuleHandler + class ModuleLoadException(Exception): """ ModuleLoadException is an exception class that can be raised during the dynamic load process for modules. """ + def __init__(self, module_name: str, message: str): """ Construct a new 'ModuleLoadException' object. @@ -21,44 +22,86 @@ def __init__(self, module_name: str, message: str): self.module_name = module_name self.message = message -def load_modules(module_names: list) -> list: + +def load_modules(module_names: list) -> tuple[list, list]: """ Dynamically loads modules from the 'modules' package. :param module_names: The module names to load + :return: returns tuple of preflight_modules, modules """ + # Sanity checks for modules + def check_module_sanity(module_name: str, module: types.ModuleType): + for attr_type, attr_name in ( + (ModuleHandler, "HANDLER_CLASS"), + (str, "MODULE_NAME"), + (list, "REQUIRES"), + (list, "AFTER"), + (list, "PREFLIGHT_REQUIRES"), + ): + if not hasattr(module, attr_name): + raise ModuleLoadException( + module_name, f"Module has no attribute {attr_name}" + ) + + attr = getattr(module, attr_name) + if not isinstance(attr, attr_type) and not issubclass(attr, attr_type): + raise ModuleLoadException( + module_name, f"Attribute {attr_name} is not type {attr_type}" + ) + # Load the modules in the given list and recursivley load required modules required_modules = OrderedDict() - def load_required_modules(module_names: list, modules: OrderedDict) -> None: + + def load_required_modules(modules_out: OrderedDict, module_names: list) -> None: for module_name in module_names: - if module_name in modules: + if module_name in modules_out: continue - module = importlib.import_module(f"rookify.modules.{module_name}") + module = importlib.import_module(f".{module_name}", "rookify.modules") + check_module_sanity(module_name, module) - for attr_type, attr_name in ( - (ModuleHandler, 'HANDLER_CLASS'), - (list, 'REQUIRES'), - (list, 'AFTER'), - (bool, 'RUN_IN_PREFLIGHT') - ): - if not hasattr(module, attr_name): - raise ModuleLoadException(module_name, f'Module has no attribute {attr_name}') + load_required_modules(modules_out, module.REQUIRES) + module.AFTER.extend(module.REQUIRES) - attr = getattr(module, attr_name) - if not isinstance(attr, attr_type) and not issubclass(attr, attr_type): - raise ModuleLoadException(module_name, f'Attribute {attr_name} is not type {attr_type}') + modules_out[module_name] = module - load_required_modules(module.REQUIRES, modules) - module.AFTER.extend(module.REQUIRES) + load_required_modules(required_modules, module_names) - modules[module_name] = module - load_required_modules(module_names, required_modules) + # Recursively load the modules in the PREFLIGHT_REQUIRES attribute of the given modules + preflight_modules = OrderedDict() + + def load_preflight_modules( + modules_in: OrderedDict, modules_out: OrderedDict, module_names: list + ) -> None: + for module_name in module_names: + if module_name in modules_out: + continue + + module = importlib.import_module(f".{module_name}", "rookify.modules") + check_module_sanity(module_name, module) + + # We have to check, if the preflight_requires list is already loaded as migration requirement + for preflight_requirement in module.PREFLIGHT_REQUIRES: + if preflight_requirement in modules_in: + raise ModuleLoadException( + module_name, + f"Module {preflight_requirement} is already loaded as migration requirement", + ) + + load_preflight_modules(modules_in, modules_out, module.PREFLIGHT_REQUIRES) + if module_name not in modules_in: + modules_out[module_name] = module + + load_preflight_modules(required_modules, preflight_modules, required_modules.keys()) # Sort the modules by the AFTER keyword modules = OrderedDict() - def sort_modules(modules_in: OrderedDict, modules_out: OrderedDict, module_names: list) -> None: + + def sort_modules( + modules_in: OrderedDict, modules_out: OrderedDict, module_names: list + ) -> None: for module_name in module_names: if module_name not in modules_in: continue @@ -70,6 +113,7 @@ def sort_modules(modules_in: OrderedDict, modules_out: OrderedDict, module_names sort_modules(modules_in, modules_out, after_modules_name) modules_out[module_name] = modules_in[module_name] + sort_modules(required_modules, modules, list(required_modules.keys())) - return list(modules.values()) + return list(preflight_modules.values()), list(modules.values()) diff --git a/src/rookify/modules/analyze_ceph/__init__.py b/src/rookify/modules/analyze_ceph/__init__.py index 373b776..c3726e4 100644 --- a/src/rookify/modules/analyze_ceph/__init__.py +++ b/src/rookify/modules/analyze_ceph/__init__.py @@ -2,7 +2,8 @@ from .main import AnalyzeCephHandler +MODULE_NAME = "analyze_ceph" HANDLER_CLASS = AnalyzeCephHandler -RUN_IN_PREFLIGHT = True REQUIRES = [] AFTER = [] +PREFLIGHT_REQUIRES = [] diff --git a/src/rookify/modules/analyze_ceph/main.py b/src/rookify/modules/analyze_ceph/main.py index f9042f5..4cb570c 100644 --- a/src/rookify/modules/analyze_ceph/main.py +++ b/src/rookify/modules/analyze_ceph/main.py @@ -1,24 +1,16 @@ # -*- coding: utf-8 -*- -import json from ..module import ModuleHandler -class AnalyzeCephHandler(ModuleHandler): +class AnalyzeCephHandler(ModuleHandler): def run(self) -> dict: - - commands = [ - 'mon dump', - 'osd dump', - 'device ls', - 'fs dump', - 'node ls' - ] + commands = ["mon dump", "osd dump", "device ls", "fs dump", "node ls"] results = dict() for command in commands: - parts = command.split(' ') + parts = command.split(" ") leaf = results for idx, part in enumerate(parts): if idx < len(parts) - 1: @@ -27,12 +19,10 @@ def run(self) -> dict: leaf[part] = self.ceph.mon_command(command) leaf = leaf[part] - results['ssh'] = dict() - results['ssh']['osd'] = dict() - for node, values in results['node']['ls']['osd'].items(): - devices = self.ssh.command(node, 'find /dev/ceph-*/*').stdout.splitlines() - results['ssh']['osd'][node] = { - 'devices': devices - } + results["ssh"] = dict() + results["ssh"]["osd"] = dict() + for node, values in results["node"]["ls"]["osd"].items(): + devices = self.ssh.command(node, "find /dev/ceph-*/*").stdout.splitlines() + results["ssh"]["osd"][node] = {"devices": devices} return results diff --git a/src/rookify/modules/example/__init__.py b/src/rookify/modules/example/__init__.py index fb9b8cb..8496365 100644 --- a/src/rookify/modules/example/__init__.py +++ b/src/rookify/modules/example/__init__.py @@ -2,7 +2,12 @@ from .main import ExampleHandler -HANDLER_CLASS = ExampleHandler # Define the handler class for this module -RUN_IN_PREFLIGHT = False # This executes the run method during preflight checks. This is neccessary for analyze modules. -REQUIRES = ['analyze_ceph'] # A list of modules that are required to run before this module. Modules in this list will be imported, even if they are not configured -AFTER = ['migrate_monitors'] # A list of modules that should be run before this module, if they are defined in config +MODULE_NAME = "example" # Name of the module +HANDLER_CLASS = ExampleHandler # Define the handler class for this module +REQUIRES = [] # A list of modules that are required to run before this module. Modules in this list will be imported, even if they are not configured +AFTER = [ + "migrate_monitors" +] # A list of modules that should be run before this module, if they are defined in config +PREFLIGHT_REQUIRES = [ + "analyze_ceph" +] # A list of modules that are required to run the preflight_check of this module. Modules in this list will be imported and run in preflight stage. diff --git a/src/rookify/modules/example/main.py b/src/rookify/modules/example/main.py index e3fa465..12f337f 100644 --- a/src/rookify/modules/example/main.py +++ b/src/rookify/modules/example/main.py @@ -2,11 +2,11 @@ from ..module import ModuleHandler, ModuleException -class ExampleHandler(ModuleHandler): +class ExampleHandler(ModuleHandler): def preflight_check(self): # Do something for checking if all needed preconditions are met else throw ModuleException - raise ModuleException('Example module was loaded, so aborting!') + raise ModuleException("Example module was loaded, so aborting!") def run(self) -> dict: # Run the migration tasks diff --git a/src/rookify/modules/migrate_monitors/__init__.py b/src/rookify/modules/migrate_monitors/__init__.py index 2bd03a8..168ce94 100644 --- a/src/rookify/modules/migrate_monitors/__init__.py +++ b/src/rookify/modules/migrate_monitors/__init__.py @@ -2,7 +2,8 @@ from .main import MigrateMonitorsHandler +MODULE_NAME = "migrate_monitors" HANDLER_CLASS = MigrateMonitorsHandler -RUN_IN_PREFLIGHT = False -REQUIRES = ['analyze_ceph'] +REQUIRES = [] AFTER = [] +PREFLIGHT_REQUIRES = ["analyze_ceph"] diff --git a/src/rookify/modules/migrate_monitors/main.py b/src/rookify/modules/migrate_monitors/main.py index 359c34c..367a7ca 100644 --- a/src/rookify/modules/migrate_monitors/main.py +++ b/src/rookify/modules/migrate_monitors/main.py @@ -2,5 +2,6 @@ from ..module import ModuleHandler + class MigrateMonitorsHandler(ModuleHandler): pass diff --git a/src/rookify/modules/migrate_osds/__init__.py b/src/rookify/modules/migrate_osds/__init__.py index 11f571f..f3b00b6 100644 --- a/src/rookify/modules/migrate_osds/__init__.py +++ b/src/rookify/modules/migrate_osds/__init__.py @@ -2,7 +2,8 @@ from .main import MigrateOSDsHandler +MODULE_NAME = "migrate_osds" HANDLER_CLASS = MigrateOSDsHandler -RUN_IN_PREFLIGHT = False -REQUIRES = ['analyze_ceph'] -AFTER = ['migrate_monitors'] +REQUIRES = [] +AFTER = ["migrate_monitors"] +PREFLIGHT_REQUIRES = ["analyze_ceph"] diff --git a/src/rookify/modules/migrate_osds/main.py b/src/rookify/modules/migrate_osds/main.py index 141a23a..3b47496 100644 --- a/src/rookify/modules/migrate_osds/main.py +++ b/src/rookify/modules/migrate_osds/main.py @@ -1,35 +1,35 @@ # -*- coding: utf-8 -*- -from ..module import ModuleHandler, ModuleException +from ..module import ModuleHandler -class MigrateOSDsHandler(ModuleHandler): +class MigrateOSDsHandler(ModuleHandler): def preflight_check(self): - result = self.ceph.mon_command('osd dump') - #raise ModuleException('test error') + pass + # result = self.ceph.mon_command("osd dump") + # raise ModuleException('test error') def run(self) -> dict: osd_config = dict() - for node, osds in self._data['modules.analyze_ceph']['node']['ls']['osd'].items(): - osd_config[node] = {'osds': {}} + for node, osds in self._data["analyze_ceph"]["node"]["ls"]["osd"].items(): + osd_config[node] = {"osds": {}} for osd in osds: - osd_config[node]['osds'][osd] = dict() - + osd_config[node]["osds"][osd] = dict() - for osd in self._data['modules.analyze_ceph']['osd']['dump']['osds']: - number = osd['osd'] - uuid = osd['uuid'] + for osd in self._data["analyze_ceph"]["osd"]["dump"]["osds"]: + number = osd["osd"] + uuid = osd["uuid"] for host in osd_config.values(): - if number in host['osds']: - host['osds'][number]['uuid'] = uuid + if number in host["osds"]: + host["osds"][number]["uuid"] = uuid break for node, values in osd_config.items(): - devices = self._data['modules.analyze_ceph']['ssh']['osd'][node]['devices'] - for osd in values['osds'].values(): + devices = self._data["analyze_ceph"]["ssh"]["osd"][node]["devices"] + for osd in values["osds"].values(): for device in devices: - if osd['uuid'] in device: - osd['device'] = device + if osd["uuid"] in device: + osd["device"] = device break print(osd_config) diff --git a/src/rookify/modules/module.py b/src/rookify/modules/module.py index 7eef9a7..d2882b9 100644 --- a/src/rookify/modules/module.py +++ b/src/rookify/modules/module.py @@ -6,9 +6,11 @@ import kubernetes import fabric + class ModuleException(Exception): pass + class ModuleHandler: """ ModuleHandler is an abstract class that modules have to extend. @@ -17,27 +19,26 @@ class ModuleHandler: class __Ceph: def __init__(self, config: dict): try: - self.__ceph = rados.Rados(conffile=config['conf_file'], conf={'keyring': config['keyring']}) + self.__ceph = rados.Rados( + conffile=config["conf_file"], conf={"keyring": config["keyring"]} + ) self.__ceph.connect() except rados.ObjectNotFound as err: - raise ModuleException(f'Could not connect to ceph: {err}') + raise ModuleException(f"Could not connect to ceph: {err}") def mon_command(self, command: str, **kwargs) -> dict: - cmd = { - 'prefix': command, - 'format': 'json' - } + cmd = {"prefix": command, "format": "json"} cmd.update(kwargs) - result = self.__ceph.mon_command(json.dumps(cmd), b'') + result = self.__ceph.mon_command(json.dumps(cmd), b"") if result[0] != 0: - raise ModuleException(f'Ceph did return an error: {result}') + raise ModuleException(f"Ceph did return an error: {result}") return json.loads(result[1]) class __K8s: def __init__(self, config: dict): k8s_config = kubernetes.client.Configuration() - k8s_config.api_key = config['api_key'] - k8s_config.host = config['host'] + k8s_config.api_key = config["api_key"] + k8s_config.host = config["host"] self.__client = kubernetes.client.ApiClient(k8s_config) @property @@ -58,14 +59,22 @@ def __init__(self, config: dict): def command(self, host: str, command: str) -> fabric.runners.Result: try: - address = self.__config['hosts'][host]['address'] - user = self.__config['hosts'][host]['user'] - port = self.__config['hosts'][host]['port'] if 'port' in self.__config['hosts'][host] else 22 - private_key = self.__config['private_key'] + address = self.__config["hosts"][host]["address"] + user = self.__config["hosts"][host]["user"] + port = ( + self.__config["hosts"][host]["port"] + if "port" in self.__config["hosts"][host] + else 22 + ) + private_key = self.__config["private_key"] except KeyError as err: - raise ModuleException(f'Could not find settings for {host} in config: {err}') - connect_kwargs = {'key_filename': private_key} - result = fabric.Connection(address, user=user, port=port, connect_kwargs=connect_kwargs).run(command, hide=True) + raise ModuleException( + f"Could not find settings for {host} in config: {err}" + ) + connect_kwargs = {"key_filename": private_key} + result = fabric.Connection( + address, user=user, port=port, connect_kwargs=connect_kwargs + ).run(command, hide=True) return result def __init__(self, config: dict, data: dict): @@ -99,18 +108,18 @@ def run(self) -> dict: @property def ceph(self) -> __Ceph: - if self.__ceph == None: - self.__ceph = self.__Ceph(self._config['ceph']) + if self.__ceph is None: + self.__ceph = self.__Ceph(self._config["ceph"]) return self.__ceph @property def k8s(self) -> __K8s: - if self.__k8s == None: - self.__k8s = self.__K8s(self._config['kubernetes']) + if self.__k8s is None: + self.__k8s = self.__K8s(self._config["kubernetes"]) return self.__k8s @property def ssh(self) -> __SSH: - if self.__ssh == None: - self.__ssh = self.__SSH(self._config['ssh']) + if self.__ssh is None: + self.__ssh = self.__SSH(self._config["ssh"]) return self.__ssh diff --git a/src/rookify/yaml.py b/src/rookify/yaml.py index 44d6e49..9cf6ad7 100644 --- a/src/rookify/yaml.py +++ b/src/rookify/yaml.py @@ -2,10 +2,12 @@ import yaml + def load_yaml(path: str) -> dict: - with open(path, 'r') as file: + with open(path, "r") as file: return yaml.safe_load(file) + def save_yaml(path: str, data: dict) -> None: - with open(path, 'w') as file: + with open(path, "w") as file: yaml.safe_dump(data, file)