From 35e0385ad8103571a20411fa184009c4ca5c55b6 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Tue, 25 Jun 2024 19:11:16 +0530 Subject: [PATCH] github actions python tests skip tests if env var missing allow runing the project without azure and 11 labs keys remove AZURE_TTS_ENDPOINT env var --- .github/workflows/python-tests.yml | 105 ++++++++++++++++++ README.md | 35 ++++-- daras_ai_v2/settings.py | 6 +- .../text_to_speech_settings_widgets.py | 12 +- glossary_resources/tests.py | 2 + recipes/VideoBots.py | 7 +- tests/test_checkout.py | 2 + tests/test_slack.py | 4 + 8 files changed, 155 insertions(+), 18 deletions(-) create mode 100644 .github/workflows/python-tests.yml diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml new file mode 100644 index 000000000..a50ccc382 --- /dev/null +++ b/.github/workflows/python-tests.yml @@ -0,0 +1,105 @@ +name: Python tests + +on: [push, workflow_dispatch] + +jobs: + test: + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + python-version: ["3.10.12"] + + # Service containers to run with `test` + services: + # https://docs.github.com/en/actions/using-containerized-services/creating-postgresql-service-containers + postgres: + # Docker Hub image + image: postgres:15.2 + # Provide the password for postgres + env: + POSTGRES_DB: gooey + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + --name postgres + ports: + # Maps tcp port 5432 on service container to the host + - 5432:5432 + # https://docs.github.com/en/actions/using-containerized-services/creating-redis-service-containers + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps tcp port 6379 on service container to the host + - 6379:6379 + steps: + - name: Increase max_connections + run: | + docker exec -i postgres bash << EOF + sed -i -e 's/max_connections = 100/max_connections = 10000/' /var/lib/postgresql/data/postgresql.conf + EOF + - name: Restart postgres + run: | + docker restart --time 0 postgres && sleep 5 + + - uses: actions/checkout@v4 +# with: +# submodules: recursive + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + + - name: Install system dependencies + run: | + sudo apt-get update && sudo apt-get install -y --no-install-recommends \ + libpoppler-cpp-dev \ + python3-opencv \ + postgresql-client \ + libzbar0 + + - name: Install python dependencies + run: | + pip install -U poetry pip && poetry install --only main --no-interaction + +# - name: Load secrets into env +# uses: oNaiPs/secrets-to-env-action@v1 +# with: +# secrets: ${{ toJSON(secrets) }} + + - name: Test with pytest + env: + PGHOST: localhost + PGPORT: 5432 + PGDATABASE: gooey + PGUSER: postgres + PGPASSWORD: password + REDIS_URL: redis://localhost:6379/0 + REDIS_CACHE_URL: redis://localhost:6379/1 + APP_BASE_URL: http://localhost:3000 + API_BASE_URL: http://localhost:8080 + ADMIN_BASE_URL: http://localhost:8000 + GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} + GS_BUCKET_NAME: ${{ secrets.GS_BUCKET_NAME }} + STRIPE_SECRET_KEY: ${{ secrets.STRIPE_SECRET_KEY }} + ELEVEN_LABS_API_KEY: ${{ secrets.ELEVEN_LABS_API_KEY }} + AZURE_SPEECH_REGION: ${{ secrets.AZURE_SPEECH_REGION }} + AZURE_SPEECH_KEY: ${{ secrets.AZURE_SPEECH_KEY }} + AZURE_FORM_RECOGNIZER_ENDPOINT: ${{ secrets.AZURE_FORM_RECOGNIZER_ENDPOINT }} + AZURE_FORM_RECOGNIZER_KEY: ${{ secrets.AZURE_FORM_RECOGNIZER_KEY }} + run: | + poetry run ./scripts/run-tests.sh diff --git a/README.md b/README.md index 93a719aa9..b2b487b11 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,31 @@ * Run `./manage.py migrate` * Install the zbar library (`brew install zbar`) +### Create a google cloud / firebase account + +1. Create a [google cloud](https://console.cloud.google.com/) project +2. Create a [firebase project](https://console.firebase.google.com/) (using the same google cloud project) +3. Enable the following services: + - [Firestore](https://console.firebase.google.com/project/_/firestore) + - [Authentication](https://console.firebase.google.com/project/_/authentication) + - [Storage](https://console.firebase.google.com/project/_/storage) + - [Speech-to-Text](https://console.cloud.google.com/marketplace/product/google/speech.googleapis.com) + - [Text-to-Speech](https://console.cloud.google.com/marketplace/product/google/texttospeech.googleapis.com) + - [Translation API](https://console.cloud.google.com/marketplace/product/google/translate.googleapis.com) + - [Google Drive API](https://console.cloud.google.com/apis/library/drive.googleapis.com) +4. Go to IAM, Create a service account with following roles: + - Cloud Datastore User + - Cloud Speech Administrator + - Cloud Translation API Admin + - Firebase Authentication Admin +5. Download the `serviceAccountKey.json` and save it to the project root. + +* Run tests to see if everything is working fine: + ``` + ./scripts/run-tests.sh + ``` + (If you run into issues with the number of open files, you can remove the limit with `ulimit -n unlimited`) + ## Run You can start all required processes in one command with Honcho: @@ -43,10 +68,6 @@ to run it. **Note:** the Celery worker must be manually restarted on code changes. You can do this by stopping and starting Honcho. -## To run any recipe - -* Save `serviceAccountKey.json` to project root - ## To run vespa (used for vector search) You need to install OrbStack or Docker Desktop for this to work. @@ -177,12 +198,6 @@ docker exec -it $cid poetry run ./manage.py runscript create_fixture docker exec -it $cid poetry run ./manage.py runscript upload_fixture ``` -Save the new fixture url in `scripts/run-tests.sh` and run the tests - - -```bash -./scripts/run-tests.sh -``` - To load the fixture on local db - ```bash diff --git a/daras_ai_v2/settings.py b/daras_ai_v2/settings.py index 7c65ccba9..f734766a2 100644 --- a/daras_ai_v2/settings.py +++ b/daras_ai_v2/settings.py @@ -293,7 +293,8 @@ LOW_BALANCE_EMAIL_DAYS = config("LOW_BALANCE_EMAIL_DAYS", 7, cast=int) LOW_BALANCE_EMAIL_ENABLED = config("LOW_BALANCE_EMAIL_ENABLED", True, cast=bool) -stripe.api_key = config("STRIPE_SECRET_KEY", None) +STRIPE_SECRET_KEY = config("STRIPE_SECRET_KEY", None) +stripe.api_key = STRIPE_SECRET_KEY STRIPE_USER_SUBSCRIPTION_METADATA_FIELD: str = "subscription_key" STRIPE_ENDPOINT_SECRET = config("STRIPE_ENDPOINT_SECRET", None) STRIPE_ADDON_PRODUCT_NAME = config( @@ -352,8 +353,7 @@ AZURE_SPEECH_REGION = config("AZURE_SPEECH_REGION", "") AZURE_SPEECH_KEY = config("AZURE_SPEECH_KEY", "") AZURE_SPEECH_ENDPOINT = f"https://{AZURE_SPEECH_REGION}.api.cognitive.microsoft.com" - -AZURE_TTS_ENDPOINT = config("AZURE_TTS_ENDPOINT", "") +AZURE_TTS_ENDPOINT = f"https://{AZURE_SPEECH_REGION}.tts.speech.microsoft.com" AZURE_OPENAI_ENDPOINT_CA = config("AZURE_OPENAI_ENDPOINT_CA", "") AZURE_OPENAI_KEY_CA = config("AZURE_OPENAI_KEY_CA", "") diff --git a/daras_ai_v2/text_to_speech_settings_widgets.py b/daras_ai_v2/text_to_speech_settings_widgets.py index 4adb3ce64..50dc50b46 100644 --- a/daras_ai_v2/text_to_speech_settings_widgets.py +++ b/daras_ai_v2/text_to_speech_settings_widgets.py @@ -195,7 +195,10 @@ def openai_tts_settings(): def azure_tts_selector(): - voices = azure_tts_voices() + if settings.AZURE_SPEECH_KEY: + voices = azure_tts_voices() + else: + voices = {} st.selectbox( label=""" ###### Azure TTS Voice name @@ -208,7 +211,7 @@ def azure_tts_selector(): def azure_tts_settings(): voice_name = st.session_state.get("azure_voice_name") - if not voice_name: + if not voice_name or not settings.AZURE_SPEECH_KEY: return try: voice = azure_tts_voices()[voice_name] @@ -382,7 +385,10 @@ def elevenlabs_selector(page): Alternatively, you can use your own ElevenLabs API key by selecting the checkbox above. """ ) - voices = default_elevenlabs_voices() + if settings.ELEVEN_LABS_API_KEY: + voices = default_elevenlabs_voices() + else: + voices = {} page.request.session[SESSION_ELEVENLABS_API_KEY] = st.session_state.get( "elevenlabs_api_key" diff --git a/glossary_resources/tests.py b/glossary_resources/tests.py index d28650f45..362e3c759 100644 --- a/glossary_resources/tests.py +++ b/glossary_resources/tests.py @@ -1,6 +1,7 @@ import pytest from daras_ai.image_input import storage_blob_for +from daras_ai_v2 import settings from daras_ai_v2.crypto import get_random_doc_id from glossary_resources.models import GlossaryResource from tests.test_translation import _test_run_google_translate_one @@ -62,6 +63,7 @@ def glossary_url(): GlossaryResource.objects.all().delete() +@pytest.mark.skipif(not settings.GS_BUCKET_NAME, reason="No GCS bucket") @pytest.mark.django_db def test_run_google_translate_glossary(glossary_url, threadpool_subtest): for text, expected, expected_with_glossary in TRANSLATION_TESTS_GLOSSARY: diff --git a/recipes/VideoBots.py b/recipes/VideoBots.py index d5c567147..49e89f8ca 100644 --- a/recipes/VideoBots.py +++ b/recipes/VideoBots.py @@ -14,7 +14,7 @@ from daras_ai.image_input import ( truncate_text_words, ) -from daras_ai_v2 import icons +from daras_ai_v2 import icons, settings from daras_ai_v2.asr import ( translation_model_selector, translation_language_selector, @@ -437,7 +437,10 @@ def render_form_v2(self): st.session_state.get("document_model"), ), ): - doc_model_descriptions = azure_form_recognizer_models() + if settings.AZURE_FORM_RECOGNIZER_KEY: + doc_model_descriptions = azure_form_recognizer_models() + else: + doc_model_descriptions = {} st.selectbox( f"{field_desc(self.RequestModel, 'document_model')}", key="document_model", diff --git a/tests/test_checkout.py b/tests/test_checkout.py index 06695ae1b..4e412543a 100644 --- a/tests/test_checkout.py +++ b/tests/test_checkout.py @@ -2,6 +2,7 @@ from fastapi.testclient import TestClient from app_users.models import AppUser +from daras_ai_v2 import settings from daras_ai_v2.billing import create_stripe_checkout_session from gooey_ui import RedirectException from payments.plans import PricingPlan @@ -10,6 +11,7 @@ client = TestClient(app) +@pytest.mark.skipif(not settings.STRIPE_SECRET_KEY, reason="No stripe secret") @pytest.mark.parametrize("plan", PricingPlan) def test_create_checkout_session( plan: PricingPlan, transactional_db, force_authentication: AppUser diff --git a/tests/test_slack.py b/tests/test_slack.py index 3a5802c8d..df1b9ce05 100644 --- a/tests/test_slack.py +++ b/tests/test_slack.py @@ -1,3 +1,4 @@ +import pytest from decouple import config from starlette.testclient import TestClient @@ -12,6 +13,9 @@ def test_slack_safe_channel_name(): assert safe_channel_name("My, Awesome, Channel %") == "my-awesome-channel" +@pytest.mark.skipif( + not config("TEST_SLACK_TEAM_ID", None), reason="No test slack team id" +) def test_slack_get_response_for_msg_id(transactional_db): team_id = config("TEST_SLACK_TEAM_ID") user_id = config("TEST_SLACK_USER_ID")