Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into pricing-v2-improvem…
Browse files Browse the repository at this point in the history
…ents

# Conflicts:
#	app_users/migrations/0017_alter_appuser_subscription.py
#	celeryapp/tasks.py
#	daras_ai_v2/base.py
#	poetry.lock
#	routers/api.py
  • Loading branch information
devxpy committed Jul 11, 2024
2 parents c971f7a + b0d3bbf commit fa6d9e5
Show file tree
Hide file tree
Showing 111 changed files with 3,494 additions and 1,783 deletions.
6 changes: 2 additions & 4 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,11 @@ APP_BASE_URL=http://localhost:3000
API_BASE_URL=http://localhost:8080
ADMIN_BASE_URL=http://localhost:8000

GS_BUCKET_NAME=dara-c1b52.appspot.com
GCP_PROJECT=dara-c1b52
GCP_REGION=us-central1

PGHOST=127.0.0.1
PGPORT=5432
PGUSER=gooey
PGDATABASE=gooey
PGPASSWORD=gooey

STRIPE_PRODUCT_IDS='{"creator": "prod_Q9x4USwPZXAjDr", "business": "prod_Q9YfjAsk4kq5o3", "addon": "prod_Pz8hAslLzckWRw"}'
PAYPAL_PLAN_IDS='{"basic": "P-7EE20432AK666360GMYZFNBQ", "premium": "P-35W68839HF2588719MYZFN5Y", "creator": "P-3T117524WS380863NMZIH36Y", "business": "P-5D3735626S133783GMZIH4MI"}'
97 changes: 97 additions & 0 deletions .github/workflows/python-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
name: Python tests

on: [ push, workflow_dispatch ]

jobs:
test:
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: [ "3.10.12" ]
poetry-version: [ "1.8.3" ]

# Service containers to run with `test`
services:
# https://docs.github.com/en/actions/using-containerized-services/creating-postgresql-service-containers
postgres:
# Docker Hub image
image: postgres:15.2
# Provide the password for postgres
env:
POSTGRES_DB: gooey
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
# Set health checks to wait until postgres has started
options: >-
--name postgres
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
# https://docs.github.com/en/actions/using-containerized-services/creating-redis-service-containers
redis:
# Docker Hub image
image: redis
# Set health checks to wait until redis has started
options: >-
--health-cmd "redis-cli ping"
--health-interval 5s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 6379 on service container to the host
- 6379:6379
steps:
# https://remarkablemark.org/blog/2022/05/12/github-actions-postgresql-increase-max-connections-and-shared-buffers/
- name: Increase max_connections
run: >-
docker exec -i postgres bash << EOF
sed -i -e 's/max_connections = 100/max_connections = 10000/' /var/lib/postgresql/data/postgresql.conf
EOF
- name: Restart postgres
run: >-
docker restart postgres
&& while ! docker exec postgres pg_isready; do sleep 5; done
- name: Install system dependencies
run: >-
sudo apt-get update && sudo apt-get install -y --no-install-recommends
libpoppler-cpp-dev
python3-opencv
postgresql-client
libzbar0
- uses: actions/checkout@v4

- name: Setup Python, Poetry and Dependencies
uses: packetcoders/action-setup-cache-python-poetry@main
with:
python-version: ${{matrix.python-version}}
poetry-version: ${{matrix.poetry-version}}
install-args: --only main

- name: Run tests
env:
PGHOST: localhost
PGPORT: 5432
PGDATABASE: gooey
PGUSER: postgres
PGPASSWORD: password
REDIS_URL: redis://localhost:6379/0
REDIS_CACHE_URL: redis://localhost:6379/1
APP_BASE_URL: http://localhost:3000
API_BASE_URL: http://localhost:8080
ADMIN_BASE_URL: http://localhost:8000
GCP_PROJECT: ${{ vars.GCP_PROJECT }}
GCP_REGION: ${{ vars.GCP_REGION }}
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }}
STRIPE_SECRET_KEY: ${{ secrets.STRIPE_SECRET_KEY }}
ELEVEN_LABS_API_KEY: ${{ secrets.ELEVEN_LABS_API_KEY }}
AZURE_SPEECH_REGION: ${{ secrets.AZURE_SPEECH_REGION }}
AZURE_SPEECH_KEY: ${{ secrets.AZURE_SPEECH_KEY }}
AZURE_FORM_RECOGNIZER_ENDPOINT: ${{ secrets.AZURE_FORM_RECOGNIZER_ENDPOINT }}
AZURE_FORM_RECOGNIZER_KEY: ${{ secrets.AZURE_FORM_RECOGNIZER_KEY }}
TEST_SLACK_TEAM_ID: ${{ secrets.TEST_SLACK_TEAM_ID }}
TEST_SLACK_USER_ID: ${{ secrets.TEST_SLACK_USER_ID }}
TEST_SLACK_AUTH_TOKEN: ${{ secrets.TEST_SLACK_AUTH_TOKEN }}
run: |
poetry run ./scripts/run-tests.sh
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ RUN wget -qO- 'https://poppler.freedesktop.org/poppler-23.07.0.tar.xz' | tar -xJ
&& rm -rf poppler-23.07.0

# install latest pandoc - https://github.com/jgm/pandoc/releases
RUN wget -qO pandoc.deb 'https://github.com/jgm/pandoc/releases/download/3.1.13/pandoc-3.1.13-1-amd64.deb' \
RUN wget -qO pandoc.deb 'https://github.com/jgm/pandoc/releases/download/3.2/pandoc-3.2-1-amd64.deb' \
&& dpkg -i pandoc.deb \
&& rm pandoc.deb

Expand Down
37 changes: 27 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,33 @@
* Run `./manage.py migrate`
* Install the zbar library (`brew install zbar`)

### Create a google cloud / firebase account

1. Create a [google cloud](https://console.cloud.google.com/) project
2. Create a [firebase project](https://console.firebase.google.com/) (using the same google cloud project)
3. Enable the following services:
- [Firestore](https://console.firebase.google.com/project/_/firestore)
- [Authentication](https://console.firebase.google.com/project/_/authentication)
- [Storage](https://console.firebase.google.com/project/_/storage)
- [Speech-to-Text](https://console.cloud.google.com/marketplace/product/google/speech.googleapis.com)
- [Text-to-Speech](https://console.cloud.google.com/marketplace/product/google/texttospeech.googleapis.com)
- [Translation API](https://console.cloud.google.com/marketplace/product/google/translate.googleapis.com)
- [Google Drive API](https://console.cloud.google.com/apis/library/drive.googleapis.com)
4. Go to IAM, Create a service account with following roles:
- Cloud Datastore User
- Cloud Speech Administrator
- Cloud Translation API Admin
- Firebase Authentication Admin
- Storage Admin
5. Create and Download a JSON Key for this service account and save it to the project root as `serviceAccountKey.json`.
6. Add your project & bucket name to `.env`

* Run tests to see if everything is working fine:
```
./scripts/run-tests.sh
```
(If you run into issues with the number of open files, you can remove the limit with `ulimit -n unlimited`)

## Run

You can start all required processes in one command with Honcho:
Expand Down Expand Up @@ -43,10 +70,6 @@ to run it.
**Note:** the Celery worker must be manually restarted on code changes. You
can do this by stopping and starting Honcho.

## To run any recipe

* Save `serviceAccountKey.json` to project root

## To run vespa (used for vector search)

You need to install OrbStack or Docker Desktop for this to work.
Expand Down Expand Up @@ -177,12 +200,6 @@ docker exec -it $cid poetry run ./manage.py runscript create_fixture
docker exec -it $cid poetry run ./manage.py runscript upload_fixture
```

Save the new fixture url in `scripts/run-tests.sh` and run the tests -

```bash
./scripts/run-tests.sh
```

To load the fixture on local db -

```bash
Expand Down
4 changes: 2 additions & 2 deletions app_users/migrations/0017_alter_appuser_subscription.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Generated by Django 4.2.7 on 2024-06-10 09:21
# Generated by Django 4.2.7 on 2024-06-18 20:34

from django.db import migrations, models
import django.db.models.deletion
Expand All @@ -7,7 +7,7 @@
class Migration(migrations.Migration):

dependencies = [
('payments', '0004_alter_subscription_auto_recharge_balance_threshold'),
('payments', '0003_alter_subscription_external_id_and_more'),
('app_users', '0016_appuser_disable_rate_limits'),
]

Expand Down
2 changes: 1 addition & 1 deletion auth/auth_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def force_authentication():
AppUser.objects.get_or_create(
email="[email protected]",
defaults=dict(
is_anonymous=True,
is_anonymous=False,
uid=get_random_doc_id(),
balance=10**9,
disable_rate_limits=True,
Expand Down
66 changes: 57 additions & 9 deletions bots/admin.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import datetime
import json
from types import SimpleNamespace

import django.db.models
from django import forms
Expand All @@ -12,7 +13,7 @@
from django.utils.timesince import timesince

from app_users.models import AppUser
from bots.admin_links import list_related_html_url, change_obj_url
from bots.admin_links import list_related_html_url, change_obj_url, open_in_new_tab
from bots.models import (
FeedbackComment,
CHATML_ROLE_ASSISSTANT,
Expand All @@ -27,13 +28,18 @@
MessageAttachment,
WorkflowMetadata,
BotIntegrationAnalysisRun,
Workflow,
)
from bots.tasks import create_personal_channels_for_all_members
from celeryapp.tasks import gui_runner
from daras_ai_v2.fastapi_tricks import get_route_url
from gooeysite.custom_actions import export_to_excel, export_to_csv
from gooeysite.custom_filters import (
related_json_field_summary,
)
from gooeysite.custom_widgets import JSONEditorWidget
from recipes.VideoBots import VideoBotsPage
from routers.root import integrations_stats_route

fb_fields = [
"fb_page_id",
Expand Down Expand Up @@ -160,7 +166,7 @@ class BotIntegrationAdmin(admin.ModelAdmin):
"view_messsages",
"created_at",
"updated_at",
"api_integration_id",
"api_integration_stats_url",
]

fieldsets = [
Expand All @@ -173,7 +179,7 @@ class BotIntegrationAdmin(admin.ModelAdmin):
"published_run",
"billing_account_uid",
"user_language",
"api_integration_id",
"api_integration_stats_url",
],
},
),
Expand Down Expand Up @@ -248,6 +254,33 @@ def view_analysis_results(self, bi: BotIntegration):
html = mark_safe(html)
return html

@admin.display(description="Integration Stats")
def api_integration_stats_url(self, bi: BotIntegration):

integration_id = bi.api_integration_id()
return open_in_new_tab(
url=get_route_url(
integrations_stats_route,
params=dict(
page_slug=VideoBotsPage.slug_versions[-1],
integration_id=integration_id,
),
),
label=integration_id,
)


@admin.register(PublishedRunVersion)
class PublishedRunVersionAdmin(admin.ModelAdmin):
search_fields = ["id", "version_id", "published_run__published_run_id"]
autocomplete_fields = ["published_run", "saved_run", "changed_by"]


class PublishedRunVersionInline(admin.TabularInline):
model = PublishedRunVersion
extra = 0
autocomplete_fields = PublishedRunVersionAdmin.autocomplete_fields


@admin.register(PublishedRun)
class PublishedRunAdmin(admin.ModelAdmin):
Expand All @@ -270,6 +303,7 @@ class PublishedRunAdmin(admin.ModelAdmin):
"created_at",
"updated_at",
]
inlines = [PublishedRunVersionInline]

def view_user(self, published_run: PublishedRun):
if published_run.created_by is None:
Expand Down Expand Up @@ -306,10 +340,17 @@ class SavedRunAdmin(admin.ModelAdmin):
"is_api_call",
"created_at",
"updated_at",
"run_status",
"error_msg",
]
list_filter = [
"workflow",
"is_api_call",
"is_flagged",
("run_status", admin.EmptyFieldListFilter),
("error_msg", admin.EmptyFieldListFilter),
"created_at",
"retention_policy",
]
search_fields = ["workflow", "example_id", "run_id", "uid"]
autocomplete_fields = ["parent_version"]
Expand All @@ -327,7 +368,7 @@ class SavedRunAdmin(admin.ModelAdmin):
"is_api_call",
]

actions = [export_to_csv, export_to_excel]
actions = [export_to_csv, export_to_excel, "rerun_tasks"]

formfield_overrides = {
django.db.models.JSONField: {"widget": JSONEditorWidget},
Expand Down Expand Up @@ -374,11 +415,18 @@ def view_usage_cost(self, saved_run: SavedRun):
saved_run.usage_costs, extra_label=f"${total_cost.normalize()}"
)


@admin.register(PublishedRunVersion)
class PublishedRunVersionAdmin(admin.ModelAdmin):
search_fields = ["id", "version_id", "published_run__published_run_id"]
autocomplete_fields = ["published_run", "saved_run", "changed_by"]
@admin.action(description="Re-Run Tasks")
def rerun_tasks(self, request, queryset):
sr: SavedRun
for sr in queryset.all():
page = Workflow(sr.workflow).page_cls(
request=SimpleNamespace(user=AppUser.objects.get(uid=sr.uid))
)
page.call_runner_task(sr)
self.message_user(
request,
f"Started re-running {queryset.count()} tasks in the background.",
)


class LastActiveDeltaFilter(admin.SimpleListFilter):
Expand Down
6 changes: 5 additions & 1 deletion bots/admin_links.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@
from daras_ai_v2 import settings


def open_in_new_tab(url: str, *, label: str = "", add_related_url: str = None) -> str:
def open_in_new_tab(
url: str | None, *, label: str = "", add_related_url: str = None
) -> str | None:
if not url:
return None
label = re.sub(r"https?://", "", label)
context = {
"url": url,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Generated by Django 4.2.7 on 2024-06-18 20:34

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('bots', '0073_savedrun_retention_policy'),
]

operations = [
migrations.AlterField(
model_name='publishedrun',
name='workflow',
field=models.IntegerField(choices=[(1, 'Doc Search'), (2, 'Doc Summary'), (3, 'Google GPT'), (4, 'Copilot'), (5, 'Lipysnc + TTS'), (6, 'Text to Speech'), (7, 'Speech Recognition'), (8, 'Lipsync'), (9, 'Deforum Animation'), (10, 'Compare Text2Img'), (11, 'Text2Audio'), (12, 'Img2Img'), (13, 'Face Inpainting'), (14, 'Google Image Gen'), (15, 'Compare AI Upscalers'), (16, 'SEO Summary'), (17, 'Email Face Inpainting'), (18, 'Social Lookup Email'), (19, 'Object Inpainting'), (20, 'Image Segmentation'), (21, 'Compare LLM'), (22, 'Chyron Plant'), (23, 'Letter Writer'), (24, 'Smart GPT'), (25, 'AI QR Code'), (26, 'Doc Extract'), (27, 'Related QnA Maker'), (28, 'Related QnA Maker Doc'), (29, 'Embeddings'), (30, 'Bulk Runner'), (31, 'Bulk Evaluator'), (32, 'Functions')]),
),
migrations.AlterField(
model_name='savedrun',
name='workflow',
field=models.IntegerField(choices=[(1, 'Doc Search'), (2, 'Doc Summary'), (3, 'Google GPT'), (4, 'Copilot'), (5, 'Lipysnc + TTS'), (6, 'Text to Speech'), (7, 'Speech Recognition'), (8, 'Lipsync'), (9, 'Deforum Animation'), (10, 'Compare Text2Img'), (11, 'Text2Audio'), (12, 'Img2Img'), (13, 'Face Inpainting'), (14, 'Google Image Gen'), (15, 'Compare AI Upscalers'), (16, 'SEO Summary'), (17, 'Email Face Inpainting'), (18, 'Social Lookup Email'), (19, 'Object Inpainting'), (20, 'Image Segmentation'), (21, 'Compare LLM'), (22, 'Chyron Plant'), (23, 'Letter Writer'), (24, 'Smart GPT'), (25, 'AI QR Code'), (26, 'Doc Extract'), (27, 'Related QnA Maker'), (28, 'Related QnA Maker Doc'), (29, 'Embeddings'), (30, 'Bulk Runner'), (31, 'Bulk Evaluator'), (32, 'Functions')], default=4),
),
migrations.AlterField(
model_name='workflowmetadata',
name='workflow',
field=models.IntegerField(choices=[(1, 'Doc Search'), (2, 'Doc Summary'), (3, 'Google GPT'), (4, 'Copilot'), (5, 'Lipysnc + TTS'), (6, 'Text to Speech'), (7, 'Speech Recognition'), (8, 'Lipsync'), (9, 'Deforum Animation'), (10, 'Compare Text2Img'), (11, 'Text2Audio'), (12, 'Img2Img'), (13, 'Face Inpainting'), (14, 'Google Image Gen'), (15, 'Compare AI Upscalers'), (16, 'SEO Summary'), (17, 'Email Face Inpainting'), (18, 'Social Lookup Email'), (19, 'Object Inpainting'), (20, 'Image Segmentation'), (21, 'Compare LLM'), (22, 'Chyron Plant'), (23, 'Letter Writer'), (24, 'Smart GPT'), (25, 'AI QR Code'), (26, 'Doc Extract'), (27, 'Related QnA Maker'), (28, 'Related QnA Maker Doc'), (29, 'Embeddings'), (30, 'Bulk Runner'), (31, 'Bulk Evaluator'), (32, 'Functions')], unique=True),
),
]
Loading

0 comments on commit fa6d9e5

Please sign in to comment.