diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..824182b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,166 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + + +/workers +/rpaScripts \ No newline at end of file diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 0000000..16a4bbc --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,35 @@ +name: Build and Publish Docker Image + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build-and-publish: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + + - name: Log in to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and Push Image + uses: docker/build-push-action@v5 + with: + push: ${{ !github.event.pull_request.head.repo.fork }} + tags: ghcr.io/${{ github.repository_owner }}/prototype-rpa-worker:main diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..824182b --- /dev/null +++ b/.gitignore @@ -0,0 +1,166 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + + +/workers +/rpaScripts \ No newline at end of file diff --git a/Camunda/Camunda.py b/Camunda/Camunda.py new file mode 100644 index 0000000..e327386 --- /dev/null +++ b/Camunda/Camunda.py @@ -0,0 +1,76 @@ +import yaml +import os +import mimetypes +import base64 + +from robot.api.deco import keyword + + +class Camunda: + ROBOT_LIBRARY_SCOPE = "GLOBAL" + ROBOT_LISTENER_API_VERSION = 2 + + def __init__(self): + self.ROBOT_LIBRARY_LISTENER = self + self.outputs = {} + + @keyword(name="Set Output Variable") + def set_output_variable(self, name, value): + """ + Stores the value in-memory with the name as key. + """ + self.outputs[name] = value + + @keyword(name="Set Output File") + def set_output_file(self, name, path): + """ + Parses the file at `path` using `create_file_object` and stores the return value in-memory with the name as a key. + """ + try: + # Assuming create_file_object is a function you've written elsewhere + file_object = create_file_object(path) + self.outputs[name] = file_object + except Exception as e: + raise Exception(f"Failed to parse file and set output: {e}") + + def _write_outputs_to_file(self): + """ + Writes the current state of self.outputs to 'outputs.yml'. + """ + with open("outputs.yml", "w", encoding="UTF8") as outfile: + yaml.dump(self.outputs, outfile, default_flow_style=False) + + def _close(self): + """ + A listener method that is called after the test suite has finished execution. + """ + self._write_outputs_to_file() + + +def create_file_object(path): + # This is a placeholder for the actual file parsing logic. + # Replace this with the actual implementation. + print(f"create_file_object {path}") + + if os.path.isfile(path): + file_name = os.path.basename(path) + file_name = file_name.replace(".", "_") + mime_type, _ = mimetypes.guess_type(path) + if mime_type and mime_type.startswith("image/"): + content = file_to_data_url(path) + else: + with open(path, "r", encoding="utf-8", errors="ignore") as f: + content = f.read() + return content + + +def file_to_data_url(file_path): + """ + Convert an image file to a data URL. + """ + print(file_path) + mime_type, _ = mimetypes.guess_type(file_path) + with open(file_path, "rb", encoding="UTF8") as f: + encoded_string = base64.b64encode(f.read()).decode("utf-8") + data_url = f"data:{mime_type};base64,{encoded_string}" + return data_url diff --git a/Camunda/__init__.py b/Camunda/__init__.py new file mode 100644 index 0000000..6b8387f --- /dev/null +++ b/Camunda/__init__.py @@ -0,0 +1 @@ +from .Camunda import Camunda diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c180be8 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,55 @@ +# Use an official Python runtime as a parent image +FROM python:3.10-slim + +# Set the working directory in the container +WORKDIR /usr/src/app + +# Install xvfb and other dependencies you might need +RUN apt-get update && apt-get install -y \ + xvfb \ + nodejs \ + npm \ + libnss3 \ + libnspr4 \ + libatk1.0-0 \ + libatk-bridge2.0-0 \ + libcups2 \ + libatspi2.0-0 \ + libxdamage1 \ + libxkbcommon0 \ + libpango-1.0-0 \ + libcairo2 \ + libasound2 \ + && rm -rf /var/lib/apt/lists/* + +# Copy the current directory contents into the container at /usr/src/app +COPY . . + +# Install any needed packages specified in requirements.txt +RUN pip install --no-cache-dir -r requirements.txt --no-deps +RUN rfbrowser init + +# Make port 36227 available to the world outside this container +EXPOSE 36227 + +# Define environment variable +# Worker configuration +ENV ENABLE_WORKER=true +ENV ZEEBE_CLIENT_ID=ID +ENV ZEEBE_CLIENT_SECRET=SECRET +ENV CAMUNDA_CLUSTER_ID=CLUSTER_ID +ENV PYTHONUNBUFFERED=1 + +# Local development configuration +ENV ENABLE_REST=true +ENV PORT=36227 +# Bind http server to 0.0.0.0 so it accepts connections from outside the container +ENV HOST=0.0.0.0 + +# Setup XVFB-Server to run headless +COPY entrypoint.sh /usr/local/bin/ +RUN chmod +x /usr/local/bin/entrypoint.sh +ENTRYPOINT ["entrypoint.sh"] + +# Run worker.py when the container launches +CMD ["python", "./worker.py"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..27ccc12 --- /dev/null +++ b/LICENSE @@ -0,0 +1,65 @@ +Camunda License 1.0 + +The Camunda License (the “License”) sets forth the terms and conditions +under which Camunda Services GmbH ("the Licensor") grants You a license +solely to the source code in this repository ("the Software"). + +Acceptance +By Using the Software, You agree to all the terms and conditions below. +If Your Use of the Software does not comply with the terms and conditions +described in this License, You must purchase a commercial license from the +Licensor, its affiliated entities, or authorized resellers, or You must +refrain from Using the Software. If You receive the Software in original or +modified form from a third party, the terms and conditions outlined in this +License apply to Your Use of that Software. You should have received a copy +of this License in this case. + +Copyright License +Subject to the terms and conditions of this License, the Licensor hereby grants +You the non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable +right to Use the Software in any way or manner that would otherwise infringe the +Licensor’s copyright as long and insofar as You Use the Software only and limited +to the Use in or for the purpose of Using the Software in Non-Production Environment. +Each time you distribute or make otherwise publicly available the Software or +Derivative Works thereof, the recipient automatically receives a license from +the original Licensor to the respective Software or Derivative Works thereof +under the terms of this License. + +Conditions and Restrictions +All Use of the Software is explicitly made subject to the following conditions: + * You may not move, change, disable, or circumvent the license key functionality + in the Software, and You may not remove or obscure any functionality in the + Software that is protected by the license key. + * If You distribute or make available the Software or any modification or Derivative + Works thereof (including compiled versions), You must conspicuously display and + attach this License on each original or modified copy of the Software and enable + the recipient to obtain the source code if You have distributed a compiled version + + +Patent License +Patent and trademark rights are not licensed under this Public License. + + +No Liability +EXCEPT FOR DAMAGES CAUSED BY INTENT OR FRAUDULENTLY CONCEALED DEFECTS, AND EXCEPT FOR +DAMAGES RESULTING FROM BREACH OF ANY WARRANTY OR GUARANTEE EXPRESSLY GIVEN BY LICENSOR +IN THIS LICENCE, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY +DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK. ANY MANDATORY STATUTORY +LIABILITY UNDER APPLICABLE LAW REMAINS UNAFFECTED. + +No Warranty +EXCEPT AS EXPRESSLY STATED IN THIS LICENSE OR REQUIRED BY APPLICABLE LAW, THE WORKS ARE +PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND INCLUDING WITHOUT LIMITATION, +ANY WARRANTIES REGARDING THE CONTENTS, ACCURACY, OR FITNESS FOR A PARTICULAR PURPOSE. + + +Definitions +You refer to the individual or entity agreeing to these terms. +Use means any action concerning the Software that, without permission, would make Youliable +for infringement under applicable copyright law. Use within the meaning of this License +includes, but is not limited to, copying, distribution (with or without modification), +making available to the public, and modifying the Software. + +Non-Production Environment means a setting in which the Software is used for development, staging, +testing, quality assurance, demonstration, or evaluation purposes, and not for any live or +production systems. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2765a47 --- /dev/null +++ b/README.md @@ -0,0 +1,109 @@ +> [!NOTE] +> **Experimental Features Notice:** The code in this repository is in the experimental stage and may be unstable. Use at your own risk and avoid usage in production environments. + +# Camunda RPA Runtime + +The Camunda RPA Runtime is designed to bridge the gap between Robotic Process Automation (RPA) bots and the Camunda workflow engine, enabling you to seamlessly integrate your task workers with Camunda. + +## Getting Started + +This guide will help you set up the Camunda RPA Runtime in your development environment. + +### Environment Configuration + +To connect your task worker to the Camunda Cloud, you must set the following environment variables with appropriate values. The required Scope for this Client is `Zeebe` and `Secrets`: + +- `ZEEBE_CLIENT_ID`: Your Zeebe client ID. +- `ZEEBE_CLIENT_SECRET`: Your Zeebe client secret. +- `CAMUNDA_CLUSTER_ID`: Your Camunda Cloud cluster ID. +- `CAMUNDA_CLUSTER_REGION`: The Region of your Cluster. + +### Installation + +### On Windows + +- Clone or download the repository +- Run `setup.ps1`. If you can not run the file, set your Execution policy to allow remote scripts with `Set-ExecutionPolicy RemoteSigned` +- Add your credentials to `.env` +- run `start.ps1` + +### Running with python + +To install the required dependencies for the Camunda RPA Runtime, run the following command: + +#### Prerequisites + +Before you begin, ensure you have the following prerequisites installed on your system: + +- Python 3.10 or higher +- pip 24 or higher + +```sh +pip install --no-cache-dir -r requirements.txt --no-deps +``` +This will install all the necessary Python packages as specified in the requirements.txt file. + +##### Windows + +On windows, you might need to update pywin32: + +```sh +pip install --upgrade pywin32 +``` + +##### Browser Automation + +If you want to use browser automation, install the required browsers with + +```sh +rfbrowser init +``` + +#### Running the Worker +With all dependencies installed and environment variables set, you can now run the task worker using the command below: + +```sh +python worker.py +``` +This will start the RPA Runtime worker, and it will begin listening for jobs from the Camunda Cloud. + +### Running from Docker + +If you prefer to run the Camunda RPA Runtime using Docker, you can pull the Docker image from the GitHub Container Registry (ghcr.io). + +To run the RPA Runtime from Docker, execute the following command: + +```sh +docker run -e ZEEBE_CLIENT_ID= -e ZEEBE_CLIENT_SECRET= -e CAMUNDA_CLUSTER_ID= -v /path/to/your/scripts:/usr/src/app/rpaScripts -p 36227:36227 ghcr.io/camunda/prototype-rpa-worker:main +``` + +If you prefer to use Docker Compose, you can create a `docker-compose.yml` file with the following content: + +```yaml +version: '3' +services: + rpa-worker: + image: ghcr.io/camunda/prototype-rpa-worker:main + environment: + - ZEEBE_CLIENT_ID= + - ZEEBE_CLIENT_SECRET= + - CAMUNDA_CLUSTER_ID= + volumes: + - /path/to/your/scripts:/usr/src/app/rpaScripts + ports: + - 36227:36227 +``` + +To run the RPA Runtime using Docker Compose, execute the following command: + +```sh +docker-compose up +``` + +Make sure to replace ``, ``, ``, and `/path/to/your/scripts` with the appropriate values for your setup. + +This command will start the RPA Runtime worker using the specified environment variables and mount your scripts to the `/usr/src/app/rpaScripts` directory in the container. + +## License + +These source files are made available under the [Camunda License Version 1.0](/LICENSE). diff --git a/bpmn/diagram.bpmn b/bpmn/diagram.bpmn new file mode 100644 index 0000000..11c47af --- /dev/null +++ b/bpmn/diagram.bpmn @@ -0,0 +1,49 @@ + + + + + + + + + + Flow_1n7srsa + + + + Flow_0iyidoo + + + + + + + + + + Flow_1n7srsa + Flow_0iyidoo + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dev.env b/dev.env new file mode 100644 index 0000000..a4be713 --- /dev/null +++ b/dev.env @@ -0,0 +1,14 @@ +# Worker configuration +ENABLE_WORKER=true +ZEEBE_CLIENT_ID=ID +ZEEBE_CLIENT_SECRET=SECRET +CAMUNDA_CLUSTER_ID=CLUSTER_ID +CAMUNDA_CLUSTER_REGION=bru-2 + +# Local development configuration +ENABLE_REST=true +PORT=36227 + +# Worker configuration +MAX_PARALLEL_EXECUTIONS=1 +TIMEOUT_SECONDS=500 \ No newline at end of file diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 0000000..4eeff12 --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,4 @@ +#!/bin/bash +Xvfb :99 -screen 0 1024x768x24 & +export DISPLAY=:99 +exec "$@" diff --git a/prod.env b/prod.env new file mode 100644 index 0000000..98d4878 --- /dev/null +++ b/prod.env @@ -0,0 +1,14 @@ +# Worker configuration +ENABLE_WORKER=true +ZEEBE_CLIENT_ID=ID +ZEEBE_CLIENT_SECRET=SECRET +CAMUNDA_CLUSTER_ID=CLUSTER_ID +CAMUNDA_CLUSTER_REGION=bru-2 + +# Local development configuration +ENABLE_REST=false +PORT=36227 + +# Worker configuration +MAX_PARALLEL_EXECUTIONS=1 +TIMEOUT_SECONDS=60 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..29ce5d1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,171 @@ +aiofiles==24.1.0 +amazon-textract-response-parser==0.1.48 +attrs==24.1.0 +beautifulsoup4==4.12.3 +blinker==1.8.2 +boto3==1.34.153 +botocore==1.34.153 +cached-property==1.5.2 +cachetools==5.4.0 +certifi==2024.8.30 +cffi==1.16.0 +chardet==3.0.4 +charset-normalizer==3.3.2 +click==8.1.7 +convertdate==2.4.0 +cryptography==42.0.8 +decorator==5.1.1 +defusedxml==0.7.1 +dnspython==2.6.1 +docutils==0.21.2 +et-xmlfile==1.1.0 +exceptiongroup==1.2.2 +exchangelib==5.4.2 +Flask==3.0.3 +fonttools==4.53.1 +fpdf2==2.7.5 +furl==2.1.3 +google-api-core==2.19.1 +google-api-python-client==2.123.0 +google-auth==2.32.0 +google-auth-httplib2==0.1.1 +google-auth-oauthlib==0.5.3 +google-cloud-core==2.4.1 +google-cloud-documentai==2.30.0 +google-cloud-language==2.14.0 +google-cloud-speech==2.27.0 +google-cloud-storage==2.18.0 +google-cloud-texttospeech==2.16.5 +google-cloud-translate==3.15.5 +google-cloud-videointelligence==2.13.5 +google-cloud-vision==3.7.4 +google-crc32c==1.5.0 +google-resumable-media==2.7.1 +googleapis-common-protos==1.63.2 +graphviz==0.13.2 +grpcio==1.66.1 +grpcio-status==1.64.1 +grpcio-tools==1.64.1 +h11==0.14.0 +h2==4.1.0 +hijri-converter==2.3.1 +holidays==0.21.13 +hpack==4.0.0 +htmldocx==0.0.6 +httplib2==0.22.0 +Hypercorn==0.17.3 +hyperframe==6.0.1 +idna==3.8 +importlib_metadata==8.2.0 +isodate==0.6.1 +itsdangerous==2.2.0 +java-access-bridge-wrapper==1.2.0 +Jinja2==3.1.4 +jmespath==1.0.1 +jsonpath-ng==1.6.1 +jsonschema==4.23.0 +jsonschema-specifications==2023.12.1 +korean-lunar-calendar==0.3.1 +lxml==5.2.2 +MarkupSafe==2.1.5 +marshmallow==3.21.3 +more-itertools==10.3.0 +mss==6.1.0 +netsuitesdk==1.24.0 +notifiers==1.3.3 +numpy==1.26.4 +O365==2.0.26 +oauthlib==3.2.2 +opencv-python-headless==4.8.1.78 +openpyxl==3.1.5 +orderedmultidict==1.0.1 +outcome==1.3.0.post0 +overrides==7.7.0 +packaging==24.1 +pdfminer.six==20221105 +pendulum==2.1.2 +pillow==10.4.0 +platformdirs==4.2.2 +ply==3.11 +priority==2.0.0 +proto-plus==1.24.0 +protobuf==4.25.4 +pyasn1==0.6.0 +pyasn1_modules==0.4.0 +pycparser==2.22 +Pygments==2.18.0 +PyJWT==2.9.0 +PyMeeus==0.5.12 +pynput-robocorp-fork==5.0.0 +pyotp==2.9.0 +pyparsing==3.1.2 +pypdf==3.17.4 +pyperclip==1.9.0 +PySocks==1.7.1 +pyspnego==0.11.1 +pytesseract==0.3.10 +python-dateutil==2.9.0.post0 +python-docx==0.8.11 +python-dotenv==1.0.1 +python-xlib==0.33 +pytz==2024.1 +pytz-deprecation-shim==0.1.0.post0 +pytzdata==2020.1 +PyYAML==6.0.1 +pyzeebe==4.0.0rc3 +Quart==0.19.6 +referencing==0.35.1 +requests==2.31.0 +requests-file==2.1.0 +requests-oauthlib==2.0.0 +requests-toolbelt==1.0.0 +requests_ntlm==1.3.0 +robocorp-storage==1.0.4 +robotframework==6.1.1 +robotframework-assertion-engine==3.0.3 +robotframework-browser==18.6.3 +robotframework-pythonlibcore==4.4.1 +robotframework-requests==0.9.7 +robotframework-seleniumlibrary==6.5.0 +robotframework-seleniumtestability==2.1.0 +rpaframework==28.6.1 +rpaframework-aws==5.3.3 +rpaframework-core==11.3.2 +rpaframework-google==9.0.1 +rpaframework-pdf==7.3.2 +rpaframework-recognition==5.2.4 +rpds-py==0.19.1 +rsa==4.9 +s3transfer==0.10.2 +selenium==4.15.2 +simple-salesforce==1.12.6 +six==1.16.0 +smartsheet-python-sdk==3.0.4 +sniffio==1.3.1 +sortedcontainers==2.4.0 +soupsieve==2.5 +stringcase==1.2.0 +taskgroup==0.0.0a4 +tenacity==8.5.0 +tomli==2.0.1 +trio==0.26.1 +trio-websocket==0.11.1 +truststore==0.9.1 +tweepy==3.10.0 +typing_extensions==4.12.2 +tzdata==2024.1 +tzlocal==4.3.1 +uritemplate==4.1.1 +urllib3==2.2.2 +validators==0.20.0 +watchdog==5.0.2 +webdriver-manager==4.0.1 +Werkzeug==3.0.4 +wrapt==1.16.0 +wsproto==1.2.0 +xlrd==2.0.1 +xlutils==2.0.0 +xlwt==1.3.0 +zeebe-grpc==8.4.0 +zeep==4.2.1 +zipp==3.19.2 diff --git a/rpaFiles/RPA_challenge.robot b/rpaFiles/RPA_challenge.robot new file mode 100644 index 0000000..6ba3bba --- /dev/null +++ b/rpaFiles/RPA_challenge.robot @@ -0,0 +1,53 @@ +*** Settings *** +Documentation Robot to solve the first challenge at rpachallenge.com, +... which consists of filling a form that randomly rearranges +... itself for ten times, with data taken from a provided +... Microsoft Excel file. + +Library RPA.Browser.Playwright +Library RPA.Excel.Files +Library RPA.HTTP + + +*** Tasks *** +Complete the challenge + Start the challenge + Fill the forms + Collect the results + + +*** Keywords *** +Start the challenge + New Browser + New Page http://rpachallenge.com/ + RPA.HTTP.Download + ... http://rpachallenge.com/assets/downloadFiles/challenge.xlsx + ... overwrite=True + Click button + +Fill the forms + ${people}= Get the list of people from the Excel file + FOR ${person} IN @{people} + Fill and submit the form ${person} + END + +Get the list of people from the Excel file + Open Workbook challenge.xlsx + ${table}= Read Worksheet As Table header=True + Close Workbook + RETURN ${table} + +Fill and submit the form + [Arguments] ${person} + Fill Text //input[@ng-reflect-name="labelFirstName"] ${person}[First Name] + Fill Text //input[@ng-reflect-name="labelLastName"] ${person}[Last Name] + Fill Text //input[@ng-reflect-name="labelCompanyName"] ${person}[Company Name] + Fill Text //input[@ng-reflect-name="labelRole"] ${person}[Role in Company] + Fill Text //input[@ng-reflect-name="labelAddress"] ${person}[Address] + Fill Text //input[@ng-reflect-name="labelEmail"] ${person}[Email] + Fill Text //input[@ng-reflect-name="labelPhone"] ${person}[Phone Number] + Click input[type=submit] + +Collect the results + Take Screenshot %{ROBOT_ARTIFACTS}${/}result.png selector=css=div.congratulations + Close Browser diff --git a/rpaFiles/TestOutput.robot b/rpaFiles/TestOutput.robot new file mode 100644 index 0000000..4fd3161 --- /dev/null +++ b/rpaFiles/TestOutput.robot @@ -0,0 +1,7 @@ +*** Settings *** +Library Camunda + +*** Test Cases *** +Example Test Case + Set Output Variable exampleVar ExampleValue + Set Output File exampleFile /path/to/file diff --git a/rpaFiles/fill_form.robot b/rpaFiles/fill_form.robot new file mode 100644 index 0000000..e499083 --- /dev/null +++ b/rpaFiles/fill_form.robot @@ -0,0 +1,35 @@ +*** Settings *** +Documentation Robot to take input and fill in a form +Library RPA.Browser.Playwright +MetaData Version 1.0 +MetaData Name Fills a Form with data from a process Variable + +*** Variables *** +&{person} firstName=John lastName=Doe companyName=ACME role=CEO address=123 Main St email=foo@bar.com phone=1234567890 + + +*** Tasks *** +Complete the challenge + Start the challenge + Fill the form + Collect the results + + +*** Keywords *** +Start the challenge + New Browser + New Page http://rpachallenge.com/ + Click button + +Fill the form + Fill Text //input[@ng-reflect-name="labelFirstName"] ${person.firstName} + Fill Text //input[@ng-reflect-name="labelLastName"] ${person.lastName} + Fill Text //input[@ng-reflect-name="labelCompanyName"] ${person.companyName} + Fill Text //input[@ng-reflect-name="labelRole"] ${person.role} + Fill Text //input[@ng-reflect-name="labelAddress"] ${person.address} + Fill Text //input[@ng-reflect-name="labelEmail"] ${person.email} + Fill Text //input[@ng-reflect-name="labelPhone"] ${person.phone} + +Collect the results + Take Screenshot %{ROBOT_ARTIFACTS}${/}result selector=css=div.inputFields + Close Browser diff --git a/rpaFiles/log_to_console.robot b/rpaFiles/log_to_console.robot new file mode 100644 index 0000000..effb1c9 --- /dev/null +++ b/rpaFiles/log_to_console.robot @@ -0,0 +1,9 @@ +*** Settings *** +Library BuiltIn + +*** Variables *** +${processVariable} Default Value # You can set a default value here + +*** Test Cases *** +Echo Input Variable + Log To Console The input variable is: ${processVariable.bar} diff --git a/scripts/MultiStream.py b/scripts/MultiStream.py new file mode 100644 index 0000000..72b6bbe --- /dev/null +++ b/scripts/MultiStream.py @@ -0,0 +1,12 @@ +class MultiStream(object): + def __init__(self, *streams): + self.streams = streams + + def write(self, data): + for stream in self.streams: + stream.write(data) + stream.flush() # Ensure data is written out immediately + + def flush(self): + for stream in self.streams: + stream.flush() diff --git a/scripts/Secrets/Secrets.py b/scripts/Secrets/Secrets.py new file mode 100644 index 0000000..79c8d26 --- /dev/null +++ b/scripts/Secrets/Secrets.py @@ -0,0 +1,66 @@ +import requests +import json +import os + + +class SecretsManager: + + def __init__( + self, + base_url="https://cluster-api.cloud.camunda.io/secrets", + client_id=None, + client_secret=None, + ): + self.base_url = base_url + self.client_id = client_id or os.getenv("ZEEBE_CLIENT_ID") + self.client_secret = client_secret or os.getenv("ZEEBE_CLIENT_SECRET") + self.cache = {} # Simple dictionary for caching secrets + self.token_url = "https://login.cloud.camunda.io/oauth/token" + self.token = None + + def _get_oauth_token(self): + """Retrieve OAuth token for authentication.""" + payload = { + "client_id": self.client_id, + "client_secret": self.client_secret, + "audience": "secrets.camunda.io", + "grant_type": "client_credentials", + } + headers = {"content-type": "application/json"} + + response = requests.post( + self.token_url, data=json.dumps(payload), headers=headers + ) + + if response.status_code == 200: + self.token = response.json()["access_token"] + print("Access token retrieved successfully.") + else: + response.raise_for_status() + + def _fetch_secrets(self): + """Fetch secrets from the endpoint and return them, using the OAuth token for authentication.""" + # Ensure we have a valid token before making the request + if not self.token: + self._get_oauth_token() + + url = f"{self.base_url}" + headers = {"Authorization": f"Bearer {self.token}"} + response = requests.get(url, headers=headers) + response.raise_for_status() + return response.json() + + def get_secrets(self): + """Get secrets, using the cache if available.""" + if self.cache: + print("Using cached secrets.") + return self.cache + + try: + secrets = self._fetch_secrets() + except Exception as e: + print(f"Failed to fetch secrets: {e}") + print(f"Ensure the client ID and secret have the Scope `Secrets`.") + secrets = {} + self.cache = secrets + return secrets diff --git a/scripts/Secrets/__init__.py b/scripts/Secrets/__init__.py new file mode 100644 index 0000000..b29d3c1 --- /dev/null +++ b/scripts/Secrets/__init__.py @@ -0,0 +1 @@ +from .Secrets import SecretsManager diff --git a/scripts/collectResults.py b/scripts/collectResults.py new file mode 100644 index 0000000..f0c4c30 --- /dev/null +++ b/scripts/collectResults.py @@ -0,0 +1,37 @@ +import os +import base64 +import mimetypes +from glob import glob + + +def file_to_data_url(file_path): + """ + Convert an image file to a data URL. + """ + mime_type, _ = mimetypes.guess_type(file_path) + with open(file_path, "rb", encoding="UTF8") as f: + encoded_string = base64.b64encode(f.read()).decode("utf-8") + data_url = f"data:{mime_type};base64,{encoded_string}" + return data_url + + +def find_files_and_create_object(start_path=os.getcwd()): + """ + Finds all files within a folder called 'robot_artifacts' and creates an object with + the structure fileName: content. Sets the content as a data URL for images. + """ + artifacts = {} + pattern = os.path.join(start_path, "**/robot_artifacts/**/*") + + for file_path in glob(pattern, recursive=True): + if os.path.isfile(file_path): + file_name = os.path.basename(file_path) + file_name = file_name.replace(".", "_") + mime_type, _ = mimetypes.guess_type(file_path) + if mime_type and mime_type.startswith("image/"): + content = file_to_data_url(file_path) + else: + with open(file_path, "r", encoding="utf-8", errors="ignore") as f: + content = f.read() + artifacts[file_name] = content + return artifacts diff --git a/scripts/constants.py b/scripts/constants.py new file mode 100644 index 0000000..88d1508 --- /dev/null +++ b/scripts/constants.py @@ -0,0 +1,13 @@ +import os + +RPA_SCRIPTS_FOLDER_NAME = "rpaScripts" +RPA_SCRIPTS_FOLDER = os.path.join(os.getcwd(), RPA_SCRIPTS_FOLDER_NAME) + +WORKER_FOLDER_NAME = "workers" +WORKER_FOLDER = os.path.join(os.getcwd(), WORKER_FOLDER_NAME) + +if not os.path.exists(RPA_SCRIPTS_FOLDER): + os.makedirs(RPA_SCRIPTS_FOLDER) + +if not os.path.exists(WORKER_FOLDER): + os.makedirs(WORKER_FOLDER) diff --git a/scripts/rest.py b/scripts/rest.py new file mode 100644 index 0000000..7c7e5d8 --- /dev/null +++ b/scripts/rest.py @@ -0,0 +1,122 @@ +from quart import Quart, request, json, jsonify +import os +import io +import asyncio +from werkzeug.exceptions import HTTPException +from datetime import datetime + +import traceback + +from scripts.constants import RPA_SCRIPTS_FOLDER +from scripts.rpa import run_robot_task +from scripts.zeebe import ZeebeWorkerManager +from scripts.Secrets import SecretsManager + + +class ServerManager: + def __init__(self, zeebe_worker_manager: ZeebeWorkerManager): + self.app = Quart(__name__) + self.zeebe_worker_manager = zeebe_worker_manager + self.shutdown_event = asyncio.Event() + self.setup_routes() + + def setup_routes(self): + + @self.app.errorhandler(HTTPException) + def handle_exception(e): + """Return JSON instead of HTML for HTTP errors.""" + # start with the correct headers and status code from the error + return ( + jsonify( + variables={}, + stdOut=f"The RPA runtime encountered an error, please check the logs.\n{e.description}", + ), + e.code, + ) + + @self.app.route("/run", methods=["POST"]) + async def run(): + data = await request.get_json() + + script = data.get("script") + script_id = data.get("id") or "default" + variables = data.get("variables") or {} + + currentTime = datetime.now().strftime("%Y-%m-%dT%H-%M-%S") + + try: + output = await run_robot_task( + script=script, + variables=variables, + secrets_manager=self.secrets_manager, + workingdir=f"workers/manual_run/{script_id}/{currentTime}", + ) + except Exception as e: + print(e) + return jsonify({"stdOut": str(e), "log": None, "variables": "null"}) + + return jsonify( + variables=output.get("variables"), + stdOut=output.get("stdOut"), + log=get_file_content(output.get("logPath")), + logPath=output.get("logPath"), + ) + + @self.app.route("/deploy", methods=["POST"]) + async def deploy(): + data = await request.get_json() + script = data.get("script") + script_id = data.get("id") + script_path = os.path.join(RPA_SCRIPTS_FOLDER, f"{script_id}.robot") + with open(script_path, "w") as file: + file.write(script) + + await self.zeebe_worker_manager.update() # Use the zeebe_worker_manager instance + return jsonify({"message": "Script deployed.", "path": script_path}) + + @self.app.route("/status", methods=["GET"]) + async def status(): + return jsonify({"status": "up"}), 200 + + def shutdown(self): + self.shutdown_event.set() + + async def shutdown_trigger(self): + await self.shutdown_event.wait() + print("Server shutting down") + + async def start(self, port=36227): + print("Starting server") + self.secrets_manager = SecretsManager() + + host = os.getenv("HOST", "127.0.0.1") + + await self.app.run_task( + host=host, port=port, shutdown_trigger=self.shutdown_trigger + ) + print("Server done") + + +# Usage example +if __name__ == "__main__": + # Get environment variables + client_id = os.getenv("ZEEBE_CLIENT_ID") + client_secret = os.getenv("ZEEBE_CLIENT_SECRET") + cluster_id = os.getenv("CAMUNDA_CLUSTER_ID") + + # Create an instance of ZeebeWorkerManager + zeebe_worker_manager = ZeebeWorkerManager(client_id, client_secret, cluster_id) + + # Create an instance of Component + component = ServerManager(zeebe_worker_manager) + + # Run the server + asyncio.run(component.start_server()) + + +def get_file_content(path): + print(path) + if not os.path.exists(path): + return None + with open(path, "r", encoding="UTF8") as file: + return file.read() diff --git a/scripts/rpa.py b/scripts/rpa.py new file mode 100644 index 0000000..34bde94 --- /dev/null +++ b/scripts/rpa.py @@ -0,0 +1,128 @@ +import robot +import sys +import os +import io +import yaml +import asyncio + +from scripts.MultiStream import MultiStream +from scripts.collectResults import find_files_and_create_object + +# TODO: Handle Variable output +import subprocess +import os + + +async def run_robot_task( + variables=object(), + workingdir="workers/default", + outputdir="output", + script="", + secrets_manager=None, +): + workingdir = os.path.normpath(workingdir) + outputdir = os.path.normpath(outputdir) + + def callback(): + os.makedirs(workingdir, exist_ok=True) + + if secrets_manager and not variables.get("SECRETS"): + variables["SECRETS"] = secrets_manager.get_secrets() + + # Save the variables to a file + yaml.dump( + variables, + open(os.path.join(workingdir, "variables.yaml"), "w", encoding="UTF8"), + ) + + # Save the script to a file + with open( + os.path.join(workingdir, "tasks.robot"), "w", encoding="UTF8" + ) as output_file: + output_file.write(script) + + os.environ["ROBOT_ARTIFACTS"] = os.path.join(workingdir, "robot_artifacts") + + with open( + os.path.join(workingdir, "output.txt"), "w", encoding="UTF8" + ) as output_file: + outputBuffer = io.StringIO() + streams = [output_file, outputBuffer, sys.stdout] + outStream = MultiStream(*streams) + + # Ensure Camunda Library is discoverable + # Thid is very flaky and should be replaced with a more robust solution + env = os.environ.copy() + env["PYTHONPATH"] = ( + os.path.abspath(os.getcwd()) + os.pathsep + env.get("PYTHONPATH", "") + ) + + process = subprocess.Popen( + [ + "robot", + "--rpa", + "--outputdir", + outputdir, + "--variablefile", + "./variables.yaml", + "--report", + "none", + "--logtitle", + "Task log", + "./tasks.robot", + ], + cwd=os.path.abspath(workingdir), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env, + ) + + for line in iter(process.stdout.readline, b""): + outStream.write(line.decode()) + + process.communicate() + + result = process.returncode + + stdOutValue = outputBuffer.getvalue() + outputBuffer.close() + + # Remove the SECRETS variable from the file after the run completes + if "SECRETS" in variables: + del variables["SECRETS"] + yaml.dump( + variables, + open(os.path.join(workingdir, "variables.yaml"), "w", encoding="UTF8"), + ) + + absOutputDir = os.path.abspath(os.path.join(workingdir, outputdir)) + variablesToSet = get_output_variables(workingdir) + + return { + "variables": variablesToSet, + "outputPath": os.path.join(absOutputDir, "output.xml"), + "logPath": os.path.join(absOutputDir, "log.html"), + "exitCode": result, + "stdOut": stdOutValue, + } + + thread = asyncio.to_thread(callback) + threadResult = await thread + + return threadResult + + +def get_output_variables(workingdir): + # Load YAML file + outputs_file = os.path.join(workingdir, "outputs.yml") + if not os.path.exists(outputs_file): + return {} + + with open(outputs_file, "r", encoding="UTF8") as stream: + try: + data_loaded = yaml.safe_load(stream) + except yaml.YAMLError as exc: + print(exc) + + # Use the loaded data + return data_loaded diff --git a/scripts/tasks.py b/scripts/tasks.py new file mode 100644 index 0000000..68196b1 --- /dev/null +++ b/scripts/tasks.py @@ -0,0 +1,67 @@ +import os +import fnmatch +from pyzeebe import ZeebeTaskRouter, Job + +from scripts.constants import RPA_SCRIPTS_FOLDER +from scripts.rpa import run_robot_task + + +async def my_exception_handler(exception: Exception, job: Job) -> None: + print(exception) + await job.set_failure_status(message=str(exception)) + + +def add_task_to_router(router, secrets_manager, task_name, task_function): + print("creating task " + "camunda::RPA-Task::" + task_name) + + timeout_seconds = int(os.environ.get("TIMEOUT_SECONDS", 60)) + parallel_executions = int(os.environ.get("MAX_PARALLEL_EXECUTIONS", 1)) + + @router.task( + task_type="camunda::RPA-Task::" + task_name, + exception_handler=my_exception_handler, + timeout_ms=timeout_seconds * 1000, + max_running_jobs=parallel_executions, + max_jobs_to_activate=parallel_executions, + ) + async def rpa_task(job: Job): + variables = job.variables.get("camundaRpaTaskInput") or job.variables + + result = await run_robot_task( + variables=variables, + workingdir=f"workers/{task_name}/{job.key}", + outputdir="./output", + script=task_function, + secrets_manager=secrets_manager, + ) + if result.get("exitCode") != 0: + raise Exception( + result.get( + "stdOut", f"Process failed with exit code {result.get('exitCode')}" + ) + ) + + return result.get("variables") + + return rpa_task + + +def create_router(secrets_manager): + router = ZeebeTaskRouter() + + # Loop over all files in the directory + for root, dirs, files in os.walk(RPA_SCRIPTS_FOLDER): + for filename in fnmatch.filter(files, "*.robot"): + # Construct the full file path + file_path = os.path.join(root, filename) + # Read the content of the .robot file + with open(file_path, "r", encoding="utf-8") as file: + content = file.read() + # Remove the .robot suffix from the filename + filename_without_suffix = os.path.splitext(filename)[0] + # Call the function with the filename (without suffix) and the file content + add_task_to_router( + router, secrets_manager, filename_without_suffix, content + ) + + return router diff --git a/scripts/zeebe.py b/scripts/zeebe.py new file mode 100644 index 0000000..48adbb3 --- /dev/null +++ b/scripts/zeebe.py @@ -0,0 +1,129 @@ +import asyncio +import os +from pyzeebe import ZeebeWorker, create_camunda_cloud_channel + +from scripts.constants import RPA_SCRIPTS_FOLDER +from scripts.tasks import create_router +from scripts.Secrets import SecretsManager + +from watchdog.observers import Observer +from watchdog.events import FileSystemEventHandler + + +class ZeebeWorkerManager: + + def __init__( + self, client_id=None, client_secret=None, cluster_id=None, region=None + ): + self.client_id = client_id or os.getenv("ZEEBE_CLIENT_ID") + self.client_secret = client_secret or os.getenv("ZEEBE_CLIENT_SECRET") + self.cluster_id = cluster_id or os.getenv("CAMUNDA_CLUSTER_ID") + self.region = region or os.getenv("CAMUNDA_CLUSTER_REGION") + self.worker = None + self.channel = None + self.shutdown_event = asyncio.Event() + + self.secrets_manager = SecretsManager( + client_id=client_id, client_secret=client_secret + ) + + # Throw error message if environment variables are not set + if not self.client_id or not self.client_secret or not self.cluster_id: + raise Exception( + "The Environment variables ZEEBE_CLIENT_ID, ZEEBE_CLIENT_SECRET, and CAMUNDA_CLUSTER_ID must be set." + ) + self.observer = Observer() + self.observer.schedule( + FileChangeHandler(self, asyncio.get_event_loop()), + path=RPA_SCRIPTS_FOLDER, + recursive=True, + ) + self.observer.start() + + async def update(self): + print("Updating topics") + + if not self.channel: + return + + if self.worker: + await self.worker.stop() + print("Worker stopped") + + router = create_router(self.secrets_manager) + self.worker = ZeebeWorker(self.channel) + self.worker.include_router(router) + + print("Worker started") + asyncio.create_task(self.worker.work()) + + def shutdown(self): + self.shutdown_event.set() + + async def shutdown_trigger(self): + await self.shutdown_event.wait() + if self.worker: + await self.worker.stop() + self.observer.stop() + self.observer.join() + + print("Worker stopped") + + async def start(self): + print("Starting worker") + self.channel = create_camunda_cloud_channel( + self.client_id, self.client_secret, self.cluster_id, self.region + ) + + await self.update() + + await self.shutdown_trigger() + + print("Worker ended") + + +# Watchdog event handler to detect file changes +class FileChangeHandler(FileSystemEventHandler): + def __init__(self, worker_manager, loop): + self.worker_manager = worker_manager + self._loop = loop or asyncio.get_event_loop() + self.update_scheduled = False + + # This ensures we do not update multiple times for the same event + def schedule_update(self): + if not self.update_scheduled: + self.update_scheduled = True + self._loop.call_later(0.1, self.run_update) + + def run_update(self): + self.update_scheduled = False + asyncio.create_task(self.worker_manager.update()) + + def on_modified(self, event): + if not event.is_directory: + print(f"File {event.src_path} modified. Updating worker...") + self.schedule_update() + + def on_created(self, event): + if not event.is_directory: + print(f"File {event.src_path} created. Updating worker...") + self.schedule_update() + + def on_deleted(self, event): + if not event.is_directory: + print(f"File {event.src_path} deleted. Updating worker...") + self.schedule_update() + + +# Usage example +if __name__ == "__main__": + client_id = os.getenv("ZEEBE_CLIENT_ID") + client_secret = os.getenv("ZEEBE_CLIENT_SECRET") + cluster_id = os.getenv("CAMUNDA_CLUSTER_ID") + + worker_manager = ZeebeWorkerManager(client_id, client_secret, cluster_id) + + try: + asyncio.run(worker_manager.start()) + except KeyboardInterrupt: + pass diff --git a/setup.ps1 b/setup.ps1 new file mode 100644 index 0000000..79daec2 --- /dev/null +++ b/setup.ps1 @@ -0,0 +1,68 @@ +# This script assumes you have PowerShell and that execution of scripts is enabled. +# You can enable script execution by running PowerShell as Administrator and typing: +# Set-ExecutionPolicy RemoteSigned + +# Remove the python alias in Windows +Remove-Item $env:LOCALAPPDATA\Microsoft\WindowsApps\python.exe +Remove-Item $env:LOCALAPPDATA\Microsoft\WindowsApps\python3.exe + + +# Install Python +# Define the download URL +$downloadUrl = "https://github.com/winpython/winpython/releases/download/6.1.20230527/Winpython64-3.10.11.1dot.exe" + +# Define the output path +$outputPath = ".\Winpython64-3.10.11.1dot.exe" + +# Use WebClient to download WinPython +$webClient = New-Object System.Net.WebClient +$webClient.DownloadFile($downloadUrl, $outputPath) + +# Run the installer non-interactively (silent install) +Start-Process -FilePath $outputPath -ArgumentList "-y" -Wait -NoNewWindow + +# Define the path to the portable Python +$pythonPortablePath = "$(Get-Location)\WPy64-310111\python-3.10.11.amd64" + +# Add the new directory to the current PATH +$env:PATH += ";$pythonPortablePath;$pythonPortablePath\Scripts;" + +# Verify that the directory has been added +Write-Host "Current PATH: $env:PATH" + +# Setup virtual environment +& "python.exe" -m venv .venv + +& ".venv\Scripts\activate" + + +# Install Node, required for browser automation +# Define the download URL +$nodeDownloadUrl = "https://nodejs.org/dist/v14.18.1/node-v14.18.1-win-x64.zip" + +# Define the output path +$outputPath = ".\node.zip" + +# Use WebClient to download Node.js +$webClient = New-Object System.Net.WebClient +$webClient.DownloadFile($nodeDownloadUrl, $outputPath) + +# Extract Node.js to a portable location +$extractPath = ".\node" +Expand-Archive -Path $outputPath -DestinationPath $extractPath + +# Add Node.js to the PATH +$env:PATH += ";$(Get-Location)\node\node-v14.18.1-win-x64" + +# Install npm packages +& "npm" -v + +# Install Python requirements using the portable pip +& "pip.exe" install -r requirements.txt --no-deps + +# Initialize RF Browser +& "rfbrowser.exe" init + +Copy-Item ".\dev.env" -Destination ".\.env" + +Write-Host "Installation Done" diff --git a/start.ps1 b/start.ps1 new file mode 100644 index 0000000..8841933 --- /dev/null +++ b/start.ps1 @@ -0,0 +1,10 @@ +# This script assumes you have PowerShell and that execution of scripts is enabled. +# You can enable script execution by running PowerShell as Administrator and typing: +# Set-ExecutionPolicy RemoteSigned + +# Remove the python alias in Windows +Remove-Item $env:LOCALAPPDATA\Microsoft\WindowsApps\python.exe +Remove-Item $env:LOCALAPPDATA\Microsoft\WindowsApps\python3.exe + +& ".venv\Scripts\activate" +& "python.exe" -u worker.py diff --git a/worker.py b/worker.py new file mode 100644 index 0000000..2f39bad --- /dev/null +++ b/worker.py @@ -0,0 +1,78 @@ +import asyncio +import os +import signal +from scripts.rest import ServerManager +from scripts.zeebe import ZeebeWorkerManager +from dotenv import load_dotenv + + +class ApplicationManager: + def __init__(self, client_id, client_secret, cluster_id): + self.worker_manager = None + self.rest_server = None + self.client_id = client_id + self.client_secret = client_secret + self.cluster_id = cluster_id + + async def shutdown(self): + # Shut down the server and the worker + if self.rest_server: + self.rest_server.shutdown() + if self.worker_manager: + self.worker_manager.shutdown() + + def add_shutdown_listener(self): + loop = asyncio.get_event_loop() + + def _shutdown_handler(): + asyncio.create_task(self.shutdown()) + + for signal_name in {"SIGINT", "SIGTERM", "SIGBREAK"}: + if hasattr(signal, signal_name): + try: + loop.add_signal_handler( + getattr(signal, signal_name), _shutdown_handler + ) + except NotImplementedError: + # Add signal handler may not be implemented on Windows + signal.signal(getattr(signal, signal_name), _shutdown_handler) + + async def start(self): + self.add_shutdown_listener() + + tasks = [] + + if os.getenv("ENABLE_WORKER") == "true": + self.worker_manager = ZeebeWorkerManager( + self.client_id, self.client_secret, self.cluster_id + ) + worker_task = asyncio.create_task(self.worker_manager.start()) + tasks.append(worker_task) + + if os.getenv("ENABLE_REST") == "true": + self.rest_server = ServerManager(self.worker_manager) + server_task = asyncio.create_task( + self.rest_server.start(port=os.getenv("PORT")) + ) + tasks.append(server_task) + + await asyncio.gather(*tasks) + + +if __name__ == "__main__": + # Load environment variables from .env file + load_dotenv() + + # Get environment variables + client_id = os.getenv("ZEEBE_CLIENT_ID") + client_secret = os.getenv("ZEEBE_CLIENT_SECRET") + cluster_id = os.getenv("CAMUNDA_CLUSTER_ID") + + app_manager = ApplicationManager(client_id, client_secret, cluster_id) + + try: + asyncio.run(app_manager.start()) + except KeyboardInterrupt: + print("Keyboard interrupt") + finally: + print("Done")