From 82c41b45f14cc251b1eb3cc19258984c47f6c36f Mon Sep 17 00:00:00 2001 From: Ravi Shankar <42587315+ravishankar63@users.noreply.github.com> Date: Fri, 15 Sep 2023 12:40:24 +0530 Subject: [PATCH 01/25] Added metadata images for workflows --- recipes/DocExtract.py | 5 +++++ recipes/DocSummary.py | 5 ++++- recipes/Lipsync.py | 5 +++++ recipes/QRCodeGenerator.py | 4 ++++ recipes/RelatedQnA.py | 5 ++++- recipes/RelatedQnADoc.py | 5 ++++- recipes/SmartGPT.py | 5 +++++ recipes/Text2Audio.py | 5 +++++ recipes/VideoBots.py | 5 +++++ recipes/asr.py | 5 +++++ 10 files changed, 46 insertions(+), 3 deletions(-) diff --git a/recipes/DocExtract.py b/recipes/DocExtract.py index 3afc8f6ca..671733baa 100644 --- a/recipes/DocExtract.py +++ b/recipes/DocExtract.py @@ -37,6 +37,8 @@ from daras_ai_v2.vector_search import doc_url_to_metadata, DocMetadata from recipes.DocSearch import render_documents +DEFAULT_YOUTUBE_BOT_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/6c8f6876-538c-11ee-bea7-02420a000195/youtube%20bot%201.png.png" + class Columns(IntegerChoices): webpage_url = 1, "Source" @@ -81,6 +83,9 @@ class RequestModel(BaseModel): class ResponseModel(BaseModel): pass + def preview_image(self, state: dict) -> str | None: + return DEFAULT_YOUTUBE_BOT_META_IMG + def render_form_v2(self): document_uploader( "##### 🤖 Youtube URLS", diff --git a/recipes/DocSummary.py b/recipes/DocSummary.py index 63fb0ed71..f49025c7f 100644 --- a/recipes/DocSummary.py +++ b/recipes/DocSummary.py @@ -27,7 +27,7 @@ ) from recipes.GoogleGPT import render_output_with_refs, GoogleGPTPage -DEFAULT_DOC_SEARCH_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/assets/DOC%20SEARCH.gif" +DEFAULT_DOC_SUMMARY_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/4bce6718-538c-11ee-a837-02420a000190/doc%20summary%201.gif.png" class CombineDocumentsChains(Enum): @@ -78,6 +78,9 @@ class ResponseModel(BaseModel): prompt_tree: PromptTree | None final_prompt: str + def preview_image(self, state: dict) -> str | None: + return DEFAULT_DOC_SUMMARY_META_IMG + def render_form_v2(self): document_uploader("##### 📎 Documents") st.text_area("##### 👩‍💻 Instructions", key="task_instructions", height=150) diff --git a/recipes/Lipsync.py b/recipes/Lipsync.py index 9c56e2a1b..4959ccdd0 100644 --- a/recipes/Lipsync.py +++ b/recipes/Lipsync.py @@ -15,6 +15,8 @@ CREDITS_PER_MB = 2 +DEFAULT_LIPSYNC_GIF = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/13a80d08-538c-11ee-9c77-02420a000193/lipsync%20audio%201.gif.png" + class LipsyncPage(BasePage): title = "Lip Syncing" @@ -33,6 +35,9 @@ class RequestModel(BaseModel): class ResponseModel(BaseModel): output_video: str + def preview_image(self, state: dict) -> str | None: + return DEFAULT_LIPSYNC_GIF + def render_form_v2(self) -> bool: st.file_uploader( """ diff --git a/recipes/QRCodeGenerator.py b/recipes/QRCodeGenerator.py index 64fe12d30..56d907220 100644 --- a/recipes/QRCodeGenerator.py +++ b/recipes/QRCodeGenerator.py @@ -35,6 +35,7 @@ from daras_ai_v2.loom_video_widget import youtube_video ATTEMPTS = 1 +DEFAULT_QR_CODE_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/f09c8cfa-5393-11ee-a837-02420a000190/ai%20art%20qr%20codes1%201.png.png" class QRCodeGeneratorPage(BasePage): @@ -89,6 +90,9 @@ class ResponseModel(BaseModel): shortened_url: str | None cleaned_qr_code: str + def preview_image(self, state: dict) -> str | None: + return DEFAULT_QR_CODE_META_IMG + def related_workflows(self) -> list: from recipes.CompareText2Img import CompareText2ImgPage from recipes.CompareUpscaler import CompareUpscalerPage diff --git a/recipes/RelatedQnA.py b/recipes/RelatedQnA.py index 4cc81724a..fe56f28ea 100644 --- a/recipes/RelatedQnA.py +++ b/recipes/RelatedQnA.py @@ -16,7 +16,7 @@ from recipes.GoogleGPT import GoogleGPTPage from recipes.RelatedQnADoc import render_qna_outputs -DEFAULT_GOOGLE_GPT_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/assets/WEBSEARCH%20%2B%20CHATGPT.jpg" +DEFAULT_SEO_CONTENT_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/9b415768-5393-11ee-a837-02420a000190/RQnA%20SEO%20content%201.png.png" class RelatedGoogleGPTResponse(GoogleGPTPage.ResponseModel): @@ -46,6 +46,9 @@ class ResponseModel(BaseModel): output_queries: list[RelatedGoogleGPTResponse] serp_results: dict + def preview_image(self, state: dict) -> str | None: + return DEFAULT_SEO_CONTENT_META_IMG + def render_description(self) -> str: return "This workflow gets the related queries for your Google search, searches your custom domain and builds answers using the results and GPT." diff --git a/recipes/RelatedQnADoc.py b/recipes/RelatedQnADoc.py index ce54d491f..09099fabd 100644 --- a/recipes/RelatedQnADoc.py +++ b/recipes/RelatedQnADoc.py @@ -15,7 +15,7 @@ from recipes.DocSearch import DocSearchPage, render_doc_search_step, EmptySearchResults from recipes.GoogleGPT import render_output_with_refs, GoogleSearchMixin -DEFAULT_GOOGLE_GPT_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/assets/WEBSEARCH%20%2B%20CHATGPT.jpg" +DEFAULT_QNA_DOC_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/bab3dd2a-538c-11ee-920f-02420a00018e/RQnA-doc%20search%201.png.png" class RelatedDocSearchResponse(DocSearchPage.ResponseModel): @@ -43,6 +43,9 @@ class ResponseModel(BaseModel): output_queries: list[RelatedDocSearchResponse] serp_results: dict + def preview_image(self, state: dict) -> str | None: + return DEFAULT_QNA_DOC_META_IMG + def render_description(self) -> str: return "This workflow gets the related queries for your Google search, searches your custom domain and builds answers using the results and GPT." diff --git a/recipes/SmartGPT.py b/recipes/SmartGPT.py index fb97bee69..55e2a9f5d 100644 --- a/recipes/SmartGPT.py +++ b/recipes/SmartGPT.py @@ -17,6 +17,8 @@ from daras_ai_v2.language_model_settings_widgets import language_model_settings from daras_ai_v2.pt import PromptTree +DEFAULT_SMARTGPT_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/e02d1582-538a-11ee-9d7b-02420a000194/smartgpt%201.png.png" + class SmartGPTPage(BasePage): title = "SmartGPT" @@ -45,6 +47,9 @@ class ResponseModel(BaseModel): prompt_tree: PromptTree | None + def preview_image(self, state: dict) -> str | None: + return DEFAULT_SMARTGPT_META_IMG + def render_form_v2(self): st.text_area( """ diff --git a/recipes/Text2Audio.py b/recipes/Text2Audio.py index 05350c815..54435751f 100644 --- a/recipes/Text2Audio.py +++ b/recipes/Text2Audio.py @@ -17,6 +17,8 @@ num_outputs_setting, ) +DEFAULT_TEXT2AUDIO_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/ddc6e894-538b-11ee-a837-02420a000190/text2audio1%201.png.png" + class Text2AudioModels(Enum): audio_ldm = "AudioLDM (CVSSP)" @@ -58,6 +60,9 @@ class ResponseModel(BaseModel): typing.Literal[tuple(e.name for e in Text2AudioModels)], list[str] ] + def preview_image(self, state: dict) -> str | None: + return DEFAULT_TEXT2AUDIO_META_IMG + def render_form_v2(self): st.text_area( """ diff --git a/recipes/VideoBots.py b/recipes/VideoBots.py index f84b04af4..319b0f198 100644 --- a/recipes/VideoBots.py +++ b/recipes/VideoBots.py @@ -59,6 +59,8 @@ from recipes.TextToSpeech import TextToSpeechPage from url_shortener.models import ShortenedURL +DEFAULT_COPILOT_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/c8b24b0c-538a-11ee-a1a3-02420a00018d/meta%20tags1%201.png.png" + BOT_SCRIPT_RE = re.compile( # start of line r"^" @@ -254,6 +256,9 @@ class ResponseModel(BaseModel): final_search_query: str | None final_keyword_query: str | None + def preview_image(self, state: dict) -> str | None: + return DEFAULT_COPILOT_META_IMG + def related_workflows(self): from recipes.LipsyncTTS import LipsyncTTSPage from recipes.CompareText2Img import CompareText2ImgPage diff --git a/recipes/asr.py b/recipes/asr.py index ade2fbebe..acbb052d7 100644 --- a/recipes/asr.py +++ b/recipes/asr.py @@ -24,6 +24,8 @@ from daras_ai_v2.text_output_widget import text_outputs from recipes.DocSearch import render_documents +DEFAULT_ASR_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/3b98d906-538b-11ee-9c77-02420a000193/Speech1%201.png.png" + class AsrPage(BasePage): title = "Speech Recognition & Translation" @@ -43,6 +45,9 @@ class ResponseModel(BaseModel): raw_output_text: list[str] | None output_text: list[str | AsrOutputJson] + def preview_image(self, state: dict) -> str | None: + return DEFAULT_ASR_META_IMG + def preview_description(self, state: dict): return "Transcribe mp3, WhatsApp audio + wavs with OpenAI's Whisper or AI4Bharat / Bhashini ASR models. Optionally translate to any language too." From a1da6a9a50937a353262ddc7e1c74ec50898e520 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Wed, 20 Sep 2023 02:06:58 +0530 Subject: [PATCH 02/25] more admin search_fields --- bots/admin.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/bots/admin.py b/bots/admin.py index d71972450..391d28ae5 100644 --- a/bots/admin.py +++ b/bots/admin.py @@ -88,12 +88,21 @@ class Media: class BotIntegrationAdmin(admin.ModelAdmin): search_fields = [ "name", + "billing_account_uid", + "user_language", + "fb_page_id", + "fb_page_name", + "fb_page_access_token", "ig_account_id", "ig_username", - "fb_page_id", "wa_phone_number", + "wa_phone_number_id", + "slack_team_id", + "slack_team_name", "slack_channel_id", - "billing_account_uid", + "slack_channel_name", + "slack_channel_hook_url", + "slack_access_token", ] list_display = [ "name", @@ -343,7 +352,17 @@ class ConversationAdmin(admin.ModelAdmin): list_filter = ["bot_integration", "created_at", LastActiveDeltaFilter] autocomplete_fields = ["bot_integration"] search_fields = [ + "fb_page_id", + "fb_page_name", + "fb_page_access_token", + "ig_account_id", + "ig_username", "wa_phone_number", + "slack_user_id", + "slack_team_id", + "slack_user_name", + "slack_channel_id", + "slack_channel_name", ] + [f"bot_integration__{field}" for field in BotIntegrationAdmin.search_fields] actions = [export_to_csv, export_to_excel] @@ -395,10 +414,11 @@ class MessageAdmin(admin.ModelAdmin): "created_at", ] search_fields = [ - "analysis_result", "role", "content", "display_content", + "platform_msg_id", + "analysis_result", ] + [f"conversation__{field}" for field in ConversationAdmin.search_fields] list_display = [ "__str__", From 4868aa377c28c702be7e00c3d2bea5b160e40181 Mon Sep 17 00:00:00 2001 From: Alexander Metzger Date: Tue, 19 Sep 2023 13:46:07 -0700 Subject: [PATCH 03/25] changed workflow title from h2 to h1 --- daras_ai_v2/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/daras_ai_v2/base.py b/daras_ai_v2/base.py index 638c143f6..c0d594dd2 100644 --- a/daras_ai_v2/base.py +++ b/daras_ai_v2/base.py @@ -167,7 +167,7 @@ def render(self): root_url = self.app_url(example_id=example_id) st.write( - f'## {st.session_state.get(StateKeys.page_title)}', + f'# {st.session_state.get(StateKeys.page_title)}', unsafe_allow_html=True, ) st.write(st.session_state.get(StateKeys.page_notes)) From 11dc1ccfb0036fa5e862e6fc7c491bd119567939 Mon Sep 17 00:00:00 2001 From: Alexander Metzger Date: Tue, 19 Sep 2023 14:19:22 -0700 Subject: [PATCH 04/25] canonical link --- routers/root.py | 1 + 1 file changed, 1 insertion(+) diff --git a/routers/root.py b/routers/root.py index c77e9a132..900ef00cf 100644 --- a/routers/root.py +++ b/routers/root.py @@ -248,6 +248,7 @@ def st_page( uid=uid, example_id=example_id, ) + + [dict(tagName="link", rel="canonical", href=f"/{latest_slug}/{tab}")] # + [ # dict(tagName="link", rel="icon", href="/static/favicon.ico"), # dict(tagName="link", rel="stylesheet", href="/static/css/app.css"), From c0a44aa7067df34ccf7337a006f926611cc9f8cd Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Wed, 20 Sep 2023 02:52:05 +0530 Subject: [PATCH 05/25] fix slack bot actions in personal channels remove un-necesary args passed to slack bot handle users with no real_name --- .../0042_alter_message_platform_msg_id.py | 41 +++++ bots/models.py | 4 +- daras_ai_v2/bots.py | 6 +- daras_ai_v2/facebook_bots.py | 4 +- daras_ai_v2/slack_bot.py | 154 +++++++++--------- routers/slack.py | 60 +++---- 6 files changed, 149 insertions(+), 120 deletions(-) create mode 100644 bots/migrations/0042_alter_message_platform_msg_id.py diff --git a/bots/migrations/0042_alter_message_platform_msg_id.py b/bots/migrations/0042_alter_message_platform_msg_id.py new file mode 100644 index 000000000..2546b9091 --- /dev/null +++ b/bots/migrations/0042_alter_message_platform_msg_id.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.3 on 2023-09-19 20:51 + +from django.db import migrations, models + + +def forwards_func(apps, schema_editor): + Message = apps.get_model("bots", "Message") + db_alias = schema_editor.connection.alias + objects = Message.objects.using(db_alias) + objects.filter(platform_msg_id="").update(platform_msg_id=None) + + +class Migration(migrations.Migration): + dependencies = [ + ("bots", "0041_alter_botintegration_slack_create_personal_channels"), + ] + + operations = [ + migrations.AlterField( + model_name="message", + name="platform_msg_id", + field=models.TextField( + blank=True, + default=None, + help_text="The platform's delivered message id", + null=True, + ), + ), + migrations.RunPython(forwards_func), + migrations.AlterField( + model_name="message", + name="platform_msg_id", + field=models.TextField( + blank=True, + default=None, + help_text="The platform's delivered message id", + null=True, + unique=True, + ), + ), + ] diff --git a/bots/models.py b/bots/models.py index bc2d9d9ca..403af6497 100644 --- a/bots/models.py +++ b/bots/models.py @@ -704,7 +704,9 @@ class Message(models.Model): platform_msg_id = models.TextField( blank=True, - default="", + null=True, + default=None, + unique=True, help_text="The platform's delivered message id", ) diff --git a/daras_ai_v2/bots.py b/daras_ai_v2/bots.py index 15caa4f29..cba91429d 100644 --- a/daras_ai_v2/bots.py +++ b/daras_ai_v2/bots.py @@ -69,7 +69,8 @@ def nice_filename(self, mime_type: str) -> str: ext = mimetypes.guess_extension(mime_type) or "" return f"{self.platform}_{self.input_type}_from_{self.user_id}_to_{self.bot_id}{ext}" - def _unpack_bot_integration(self, bi: BotIntegration): + def _unpack_bot_integration(self): + bi = self.convo.bot_integration if bi.saved_run: self.page_cls = Workflow(bi.saved_run.workflow).page_cls self.query_params = self.page_cls.clean_query_params( @@ -305,6 +306,9 @@ def _handle_interactive_msg(bot: BotInterface): try: context_msg = Message.objects.get(platform_msg_id=context_msg_id) except Message.DoesNotExist as e: + traceback.print_exc() + capture_exception(e) + # send error msg as repsonse bot.send_msg(text=ERROR_MSG.format(e)) return if button_id == ButtonIds.feedback_thumbs_up: diff --git a/daras_ai_v2/facebook_bots.py b/daras_ai_v2/facebook_bots.py index 07c889804..68210c6d4 100644 --- a/daras_ai_v2/facebook_bots.py +++ b/daras_ai_v2/facebook_bots.py @@ -35,7 +35,7 @@ def __init__(self, message: dict, metadata: dict): bot_integration=bi, wa_phone_number="+" + self.user_id, )[0] - self._unpack_bot_integration(bi) + self._unpack_bot_integration() def get_input_text(self) -> str | None: try: @@ -325,7 +325,7 @@ def __init__(self, object_name: str, messaging: dict): fb_page_id=self.user_id, ig_account_id=self.user_id, )[0] - self._unpack_bot_integration(bi) + self._unpack_bot_integration() self.bot_id = bi.fb_page_id self._access_token = bi.fb_page_access_token diff --git a/daras_ai_v2/slack_bot.py b/daras_ai_v2/slack_bot.py index f293005e8..20481e15a 100644 --- a/daras_ai_v2/slack_bot.py +++ b/daras_ai_v2/slack_bot.py @@ -29,38 +29,34 @@ SLACK_MAX_SIZE = 3000 -class SlackMessage(TypedDict): - application_id: str - channel: str - thread_ts: str - text: str - user_id: str - files: list[dict] - actions: list[dict] - msg_id: str - team_id: str - - class SlackBot(BotInterface): - _read_msg_id: str | None = None - _file: dict | None = None + platform = Platform.SLACK + + _read_rcpt_ts: str | None = None def __init__( self, - message: SlackMessage, + *, + message_ts: str, + team_id: str, + channel_id: str, + user_id: str, + text: str = "", + files: list[dict] = None, + actions: list[dict] = None, ): - self.input_message = message # type: ignore - self.platform = Platform.SLACK - - self.bot_id = message["channel"] - self.user_id = message["user_id"] + self._msg_ts = message_ts + self._team_id = team_id + self.bot_id = channel_id + self.user_id = user_id + self._text = text # Try to find an existing conversation, this could either be a personal channel or the main channel the integration was added to try: self.convo = Conversation.objects.get( slack_channel_id=self.bot_id, slack_user_id=self.user_id, - slack_team_id=message["team_id"], + slack_team_id=self._team_id, ) except Conversation.DoesNotExist: # No existing conversation found, this could be a personal channel or the main channel the integration was added to @@ -68,38 +64,38 @@ def __init__( self.convo = Conversation.objects.get_or_create( slack_channel_id=self.bot_id, slack_user_id=self.user_id, - slack_team_id=message["team_id"], + slack_team_id=self._team_id, defaults=dict( bot_integration=BotIntegration.objects.get( slack_channel_id=self.bot_id, - slack_team_id=message["team_id"], + slack_team_id=self._team_id, ), ), )[0] fetch_missing_convo_metadata(self.convo) + self._access_token = self.convo.bot_integration.slack_access_token - bi = self.convo.bot_integration - self.name = bi.name - self.slack_access_token = bi.slack_access_token - self.read_msg = bi.slack_read_receipt_msg.strip() - self._unpack_bot_integration(bi) - - self._thread_ts = message["thread_ts"] - - self.input_type = "text" - files = message.get("files", []) if files: self._file = files[0] # we only process the first file for now # Additional check required to access file info - https://api.slack.com/apis/channels-between-orgs#check_file_info if self._file.get("file_access") == "check_file_info": - self._file = fetch_file_info(self._file["id"], bi.slack_access_token) + self._file = fetch_file_info(self._file["id"], self._access_token) self.input_type = self._file.get("mimetype", "").split("/")[0] or "unknown" - if message.get("actions"): + else: + self._file = None + self.input_type = "text" + + if actions: + self._actions = actions self.input_type = "interactive" + else: + self._actions = None + + self._unpack_bot_integration() def get_input_text(self) -> str | None: - return self.input_message.get("text") + return self._text def get_input_audio(self) -> str | None: if not self._file: @@ -112,9 +108,7 @@ def get_input_audio(self) -> str | None: "audio/" in mime_type or "video/" in mime_type ), f"Unsupported mime type {mime_type} for {url}" # download file from slack - r = requests.get( - url, headers={"Authorization": f"Bearer {self.slack_access_token}"} - ) + r = requests.get(url, headers={"Authorization": f"Bearer {self._access_token}"}) r.raise_for_status() # convert to wav data, _ = audio_bytes_to_wav(r.content) @@ -128,10 +122,8 @@ def get_input_audio(self) -> str | None: return audio_url def get_interactive_msg_info(self) -> tuple[str, str]: - return ( - self.input_message["actions"][0]["value"], - self.input_message["msg_id"], - ) + button_id = self._actions[0]["value"] + return button_id, self._msg_ts def send_msg( self, @@ -147,50 +139,50 @@ def send_msg( if should_translate and self.language and self.language != "en": text = run_google_translate([text], self.language)[0] - if self._read_msg_id and self._read_msg_id != self._thread_ts: + if self._read_rcpt_ts and self._read_rcpt_ts != self._msg_ts: delete_msg( channel=self.bot_id, - thread_ts=self._read_msg_id, - token=self.slack_access_token, + thread_ts=self._read_rcpt_ts, + token=self._access_token, ) - self._read_msg_id = None + self._read_rcpt_ts = None splits = text_splitter(text, chunk_size=SLACK_MAX_SIZE, length_function=len) for doc in splits[:-1]: - self._thread_ts = chat_post_message( + self._msg_ts = chat_post_message( text=doc.text, channel=self.bot_id, channel_is_personal=self.convo.slack_channel_is_personal, - thread_ts=self._thread_ts, - username=self.name, - token=self.slack_access_token, + thread_ts=self._msg_ts, + username=self.convo.bot_integration.name, + token=self._access_token, ) - self._thread_ts = chat_post_message( + self._msg_ts = chat_post_message( text=splits[-1].text, audio=audio, video=video, channel=self.bot_id, channel_is_personal=self.convo.slack_channel_is_personal, - thread_ts=self._thread_ts, - username=self.name, - token=self.slack_access_token, + thread_ts=self._msg_ts, + username=self.convo.bot_integration.name, + token=self._access_token, buttons=buttons or [], ) - return self._thread_ts + return self._msg_ts def mark_read(self): - if not self.read_msg: + text = self.convo.bot_integration.slack_read_receipt_msg.strip() + if not text: return - text = self.read_msg if self.language and self.language != "en": text = run_google_translate([text], self.language)[0] - self._read_msg_id = chat_post_message( + self._read_rcpt_ts = chat_post_message( text=text, channel=self.bot_id, channel_is_personal=self.convo.slack_channel_is_personal, - thread_ts=self._thread_ts, - token=self.slack_access_token, - username=self.name, + thread_ts=self._msg_ts, + token=self._access_token, + username=self.convo.bot_integration.name, ) @@ -239,7 +231,7 @@ def fetch_missing_convo_metadata(convo: Conversation): if not convo.slack_user_name: user_data = fetch_user_info(convo.slack_user_id, token) Conversation.objects.filter(id=convo.id).update( - slack_user_name=user_data["real_name"] + slack_user_name=get_slack_user_name(user_data) ) if not convo.slack_channel_name: channel_data = fetch_channel_info(convo.slack_channel_id, token) @@ -254,6 +246,15 @@ def fetch_missing_convo_metadata(convo: Conversation): raise +def get_slack_user_name(user: dict) -> str | None: + return ( + user.get("real_name") + or user.get("profile", {}).get("display_name") + or user.get("name") + or "" + ) + + def fetch_channel_info(channel: str, token: str) -> dict[str, typing.Any]: res = requests.get( "https://slack.com/api/conversations.info", @@ -306,7 +307,7 @@ def create_personal_channel( if user["is_bot"]: return None user_id = user["id"] - user_name = user["real_name"] + user_name = get_slack_user_name(user) team_id = user["team_id"] # lookup the personal convo with this bot and user convo_lookup = dict( @@ -550,26 +551,21 @@ def create_file_block( ] -def create_button_block( - buttons: list[ - dict[{"type": str, "chat_post_message": dict[{"id": str, "title": str}]}] - ] -) -> list[dict]: +def create_button_block(buttons: list[dict]) -> list[dict]: if not buttons: return [] - elements = [] - for button in buttons: - element = {} - element["type"] = "button" - element["text"] = {"type": "plain_text", "text": button["reply"]["title"]} - element["value"] = button["reply"]["id"] - element["action_id"] = "button_" + button["reply"]["id"] - elements.append(element) - return [ { "type": "actions", - "elements": elements, + "elements": [ + { + "type": "button", + "text": {"type": "plain_text", "text": button["reply"]["title"]}, + "value": button["reply"]["id"], + "action_id": "button_" + button["reply"]["id"], + } + for button in buttons + ], } ] diff --git a/routers/slack.py b/routers/slack.py index e71149169..c0a2537f4 100644 --- a/routers/slack.py +++ b/routers/slack.py @@ -15,7 +15,6 @@ from daras_ai_v2.bots import _on_msg, request_json, request_urlencoded_body from daras_ai_v2.slack_bot import ( SlackBot, - SlackMessage, invite_bot_account_to_channel, create_personal_channel, SlackAPIError, @@ -151,18 +150,11 @@ def slack_interaction( if data["type"] != "block_actions": return bot = SlackBot( - SlackMessage( - application_id=data["api_app_id"], - channel=data["channel"]["id"], - thread_ts=data["container"]["thread_ts"], - text="", - user_id=data["user"]["id"], - # user_name=data["user"]["id"], - files=[], - actions=data["actions"], - msg_id=data["container"]["message_ts"], - team_id=data["team"]["id"], - ) + message_ts=data["container"]["message_ts"], + team_id=data["team"]["id"], + user_id=data["user"]["id"], + channel_id=data["channel"]["id"], + actions=data["actions"], ) background_tasks.add_task(_on_msg, bot) @@ -181,27 +173,26 @@ def slack_event( return Response("OK") -def _handle_slack_event(data: dict, background_tasks: BackgroundTasks): - if data["type"] != "event_callback": +def _handle_slack_event(event: dict, background_tasks: BackgroundTasks): + if event["type"] != "event_callback": return - event = data["event"] - if event["type"] != "message": + message = event["event"] + if message["type"] != "message": return - try: - match event.get("subtype", "any"): + match message.get("subtype", "any"): case "channel_join": bi = BotIntegration.objects.get( - slack_channel_id=event["channel"], - slack_team_id=data["team_id"], + slack_channel_id=message["channel"], + slack_team_id=event["team_id"], ) if not bi.slack_create_personal_channels: return try: - user = fetch_user_info(event["user"], bi.slack_access_token) + user = fetch_user_info(message["user"], bi.slack_access_token) except SlackAPIError as e: if e.error == "missing_scope": - print(f"Error: Missing scopes for - {event!r}") + print(f"Error: Missing scopes for - {message!r}") capture_exception(e) else: raise @@ -209,28 +200,23 @@ def _handle_slack_event(data: dict, background_tasks: BackgroundTasks): create_personal_channel(bi, user) case "any" | "slack_audio" | "file_share": - files = event.get("files", []) + files = message.get("files", []) if not files: - event.get("messsage", {}).get("files", []) + message.get("messsage", {}).get("files", []) if not files: - attachments = event.get("attachments", []) + attachments = message.get("attachments", []) files = [ file for attachment in attachments for file in attachment.get("files", []) ] bot = SlackBot( - SlackMessage( - application_id=(data["api_app_id"]), - channel=event["channel"], - thread_ts=event["event_ts"], - text=event.get("text", ""), - user_id=event["user"], - files=files, - actions=[], - msg_id=event["ts"], - team_id=event.get("team", data["team_id"]), - ) + message_ts=message["ts"], + team_id=message.get("team", event["team_id"]), + channel_id=message["channel"], + user_id=message["user"], + text=message.get("text", ""), + files=files, ) background_tasks.add_task(_on_msg, bot) From 4dd6522491de02407f6661f95cb10d0fd25aa296 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Wed, 20 Sep 2023 03:04:22 +0530 Subject: [PATCH 06/25] don't mark read on interactive events --- daras_ai_v2/bots.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/daras_ai_v2/bots.py b/daras_ai_v2/bots.py index cba91429d..c4410be86 100644 --- a/daras_ai_v2/bots.py +++ b/daras_ai_v2/bots.py @@ -157,8 +157,9 @@ def _on_msg(bot: BotInterface): if not bot.page_cls: bot.send_msg(text=PAGE_NOT_CONNECTED_ERROR) return - # mark message as read - bot.mark_read() + if bot.input_type != "interactive": + # mark message as read + bot.mark_read() # get the attached billing account billing_account_user = AppUser.objects.get_or_create_from_uid( bot.billing_account_uid From b9c8b198da2c67ac23474a13fcccc094ff9d606f Mon Sep 17 00:00:00 2001 From: Kaustubh Maske Patil Date: Wed, 20 Sep 2023 13:20:34 +0530 Subject: [PATCH 07/25] Add Procfile to start all services with one command Starting all services in different terminals is tedious. This PR adds a Procfile and a honcho dependency that will start up all required processes in a single command. --- Procfile | 26 +++++++++++++ README.md | 42 ++++++++++++--------- poetry.lock | 100 +++++++++++++++++++++++++++---------------------- pyproject.toml | 1 + 4 files changed, 107 insertions(+), 62 deletions(-) create mode 100644 Procfile diff --git a/Procfile b/Procfile new file mode 100644 index 000000000..c4032fdb5 --- /dev/null +++ b/Procfile @@ -0,0 +1,26 @@ +# this Procfile can be run with `honcho`, and it can start multiple processes +# with a single command. Handy for development. All of the below commands are +# setup to run in dev mode only, and not in prod. +# +# The assumptions here are that: +# - you have redis installed but not running as a background service +# - you have rabbitmq installed but not running as a background service +# - your local gooey-ui repo is at ../gooey-ui/ +# +# You can comment any of the processes if you have background services running +# for them. You can also change the path for the `ui` process from `../gooey-ui/` +# to wherever your local gooey-ui directory is. + +api: poetry run uvicorn server:app --host 127.0.0.1 --port 8080 --reload + +admin: poetry run python manage.py runserver 127.0.0.1:8000 + +dashboard: poetry run streamlit run Home.py --server.port 8501 --server.headless true + +rabbitmq: rabbitmq-server + +redis: redis-server + +celery: poetry run celery -A celeryapp worker + +ui: /bin/zsh -c "cd ../gooey-ui/; PORT=3000 npm run dev" diff --git a/README.md b/README.md index 9159ddf37..28048f250 100644 --- a/README.md +++ b/README.md @@ -1,37 +1,43 @@ ## Setup + * Install [pyenv](https://github.com/pyenv/pyenv) & install the same python version as in our [Dockerfile](Dockerfile) * Install [poetry](https://python-poetry.org/docs/) * Create & active a virtualenv (e.g. `poetry shell`) * Run `poetry install --with dev` * Create an `.env` file from `.env.example` (Read [12factor.net/config](https://12factor.net/config)) * Run `./manage.py migrate` +* Install the zbar shared library (`brew install zbar`) +* Install [redis](https://redis.io/docs/getting-started/installation/install-redis-on-mac-os/) and [rabbitmq](https://www.rabbitmq.com/install-homebrew.html) ## Run -### API + GUI server - -```bash -uvicorn server:app --host 0.0.0.0 --port 8080 --reload -``` - -Open [localhost:8080](localhost:8080) in your browser +You can start all required processes in one command with Honcho: -### Admin Site - -```bash -python3 manage.py runserver 0.0.0.0:8000 +```shell +$ poetry run honcho start ``` -Open [localhost:8000](localhost:8000) in your browser - +The processes that it starts are defined in [`Procfile`](Procfile). +Currently they are these: -### Usage Dashboard +| Service | Port | +| ------- | ---- | +| API + GUI Server | 8080 | +| Admin site | 8000 | +| Usage dashboard | 8501 | +| Redis | 6379 | +| RabbitMQ | 5672 | +| Celery | - | +| UI | 3000 | -``` -streamlit run Home.py --server.port 8501 -``` +This default startup assumes that Redis and RabbitMQ are installed, but not +running as system services (e.g. with `brew services`) already. It also assumes +that the gooey-ui repo can be found at `../gooey-ui/` (adjacent to where the +gooey-server repo sits). You can open the Procfile and comment any of these +if you want to run it in some other way. -Open [localhost:8501](localhost:8501) in your browser +**Note:** the Celery worker must be manually restarted on code changes. You +can do this by stopping and starting Honcho. ## To run any recipe diff --git a/poetry.lock b/poetry.lock index 1e1f23de3..3886e97da 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "absl-py" @@ -888,6 +888,7 @@ files = [ {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a64814ae7bce73925131381603fff0116e2df25230dfc80d6d690aa6e20b37"}, {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c81f22b4f572f8a2110b0b741bb64e5a6427e0a198b2cdc1fbaf85f352a3aa"}, {file = "contourpy-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53cc3a40635abedbec7f1bde60f8c189c49e84ac180c665f2cd7c162cc454baa"}, + {file = "contourpy-1.1.0-cp310-cp310-win32.whl", hash = "sha256:9b2dd2ca3ac561aceef4c7c13ba654aaa404cf885b187427760d7f7d4c57cff8"}, {file = "contourpy-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f795597073b09d631782e7245016a4323cf1cf0b4e06eef7ea6627e06a37ff2"}, {file = "contourpy-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b7b04ed0961647691cfe5d82115dd072af7ce8846d31a5fac6c142dcce8b882"}, {file = "contourpy-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27bc79200c742f9746d7dd51a734ee326a292d77e7d94c8af6e08d1e6c15d545"}, @@ -896,6 +897,7 @@ files = [ {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5cec36c5090e75a9ac9dbd0ff4a8cf7cecd60f1b6dc23a374c7d980a1cd710e"}, {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cbd657e9bde94cd0e33aa7df94fb73c1ab7799378d3b3f902eb8eb2e04a3a"}, {file = "contourpy-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:181cbace49874f4358e2929aaf7ba84006acb76694102e88dd15af861996c16e"}, + {file = "contourpy-1.1.0-cp311-cp311-win32.whl", hash = "sha256:edb989d31065b1acef3828a3688f88b2abb799a7db891c9e282df5ec7e46221b"}, {file = "contourpy-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fb3b7d9e6243bfa1efb93ccfe64ec610d85cfe5aec2c25f97fbbd2e58b531256"}, {file = "contourpy-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcb41692aa09aeb19c7c213411854402f29f6613845ad2453d30bf421fe68fed"}, {file = "contourpy-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d123a5bc63cd34c27ff9c7ac1cd978909e9c71da12e05be0231c608048bb2ae"}, @@ -904,6 +906,7 @@ files = [ {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317267d915490d1e84577924bd61ba71bf8681a30e0d6c545f577363157e5e94"}, {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d551f3a442655f3dcc1285723f9acd646ca5858834efeab4598d706206b09c9f"}, {file = "contourpy-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7a117ce7df5a938fe035cad481b0189049e8d92433b4b33aa7fc609344aafa1"}, + {file = "contourpy-1.1.0-cp38-cp38-win32.whl", hash = "sha256:108dfb5b3e731046a96c60bdc46a1a0ebee0760418951abecbe0fc07b5b93b27"}, {file = "contourpy-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4f26b25b4f86087e7d75e63212756c38546e70f2a92d2be44f80114826e1cd4"}, {file = "contourpy-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc00bb4225d57bff7ebb634646c0ee2a1298402ec10a5fe7af79df9a51c1bfd9"}, {file = "contourpy-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:189ceb1525eb0655ab8487a9a9c41f42a73ba52d6789754788d1883fb06b2d8a"}, @@ -912,6 +915,7 @@ files = [ {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:143dde50520a9f90e4a2703f367cf8ec96a73042b72e68fcd184e1279962eb6f"}, {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e94bef2580e25b5fdb183bf98a2faa2adc5b638736b2c0a4da98691da641316a"}, {file = "contourpy-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ed614aea8462735e7d70141374bd7650afd1c3f3cb0c2dbbcbe44e14331bf002"}, + {file = "contourpy-1.1.0-cp39-cp39-win32.whl", hash = "sha256:71551f9520f008b2950bef5f16b0e3587506ef4f23c734b71ffb7b89f8721999"}, {file = "contourpy-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:438ba416d02f82b692e371858143970ed2eb6337d9cdbbede0d8ad9f3d7dd17d"}, {file = "contourpy-1.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a698c6a7a432789e587168573a864a7ea374c6be8d4f31f9d87c001d5a843493"}, {file = "contourpy-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b0ac8a12880412da3551a8cb5a187d3298a72802b45a3bd1805e204ad8439"}, @@ -1469,33 +1473,6 @@ grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -[[package]] -name = "google-api-core" -version = "2.11.0" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.11.0.tar.gz", hash = "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22"}, - {file = "google_api_core-2.11.0-py3-none-any.whl", hash = "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0dev" -googleapis-common-protos = ">=1.56.2,<2.0dev" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)", "grpcio-status (>=1.49.1,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] - [[package]] name = "google-api-core" version = "2.11.1" @@ -1510,11 +1487,8 @@ files = [ [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, -] -grpcio-status = {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""} +grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""} +grpcio-status = {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""} protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1631,8 +1605,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1650,8 +1624,8 @@ files = [ [package.dependencies] google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1690,8 +1664,8 @@ files = [ [package.dependencies] google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1854,6 +1828,7 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -1862,6 +1837,7 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -1891,6 +1867,7 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -1899,6 +1876,7 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -2048,6 +2026,23 @@ files = [ [package.extras] test = ["pytest (>=2.1.0)"] +[[package]] +name = "honcho" +version = "1.1.0" +description = "Honcho: a Python clone of Foreman. For managing Procfile-based applications." +optional = false +python-versions = "*" +files = [ + {file = "honcho-1.1.0-py2.py3-none-any.whl", hash = "sha256:a4d6e3a88a7b51b66351ecfc6e9d79d8f4b87351db9ad7e923f5632cc498122f"}, + {file = "honcho-1.1.0.tar.gz", hash = "sha256:c5eca0bded4bef6697a23aec0422fd4f6508ea3581979a3485fc4b89357eb2a9"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +export = ["jinja2 (>=2.7,<3)"] + [[package]] name = "html-sanitizer" version = "1.9.3" @@ -2845,6 +2840,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -3405,12 +3410,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, - {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.6\" and platform_system == \"Linux\" and platform_machine == \"aarch64\" or python_version >= \"3.9\""}, - {version = ">=1.17.0", markers = "python_version >= \"3.7\""}, - {version = ">=1.17.3", markers = "python_version >= \"3.8\""}, {version = ">=1.23.5", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, + {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, ] [[package]] @@ -3468,8 +3470,8 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -4357,6 +4359,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -4364,8 +4367,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -4382,6 +4392,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -4389,6 +4400,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -5214,7 +5226,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -6139,4 +6151,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "d6562e5c259fa3bde5ae541012cfe4576413c97b8304756ecab8c49d2059ee5f" +content-hash = "e1aa198ba112e95195815327669b1e7687428ab0510c5485f057442a207b982e" diff --git a/pyproject.toml b/pyproject.toml index 3169f751c..3ea6240be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,6 +79,7 @@ rank-bm25 = "^0.2.2" [tool.poetry.group.dev.dependencies] watchdog = "^2.1.9" ipython = "^8.5.0" +honcho = "^1.1.0" [build-system] requires = ["poetry-core"] From 540cd7258b369b0460c93568ec94af936d0f8497 Mon Sep 17 00:00:00 2001 From: Kaustubh Maske Patil Date: Wed, 20 Sep 2023 14:11:54 +0530 Subject: [PATCH 08/25] Procfile: aoivd specifying an explicit shell from gooey-ui's command This makes it compatible with devs that use other shells --- Procfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Procfile b/Procfile index c4032fdb5..1e540a406 100644 --- a/Procfile +++ b/Procfile @@ -23,4 +23,4 @@ redis: redis-server celery: poetry run celery -A celeryapp worker -ui: /bin/zsh -c "cd ../gooey-ui/; PORT=3000 npm run dev" +ui: cd ../gooey-ui/; PORT=3000 npm run dev From f6cf38669303aaf8b48a22a6e2c1839321caeec7 Mon Sep 17 00:00:00 2001 From: Kaustubh M <37668193+nikochiko@users.noreply.github.com> Date: Wed, 20 Sep 2023 14:49:38 +0530 Subject: [PATCH 09/25] Fix typo in bots/models.py: comapre -> compare This typo was only visible in the admin dashboard where a workflow needs to be selected. The migration is also of no consequence because the altered field only stores the integer value and not the text representations of the integer choice. --- .../0043_alter_savedrun_workflow.py | 50 +++++++++++++++++++ bots/models.py | 2 +- 2 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 bots/migrations/0043_alter_savedrun_workflow.py diff --git a/bots/migrations/0043_alter_savedrun_workflow.py b/bots/migrations/0043_alter_savedrun_workflow.py new file mode 100644 index 000000000..a7b1b93e4 --- /dev/null +++ b/bots/migrations/0043_alter_savedrun_workflow.py @@ -0,0 +1,50 @@ +# Generated by Django 4.2.5 on 2023-09-20 09:18 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("bots", "0042_alter_message_platform_msg_id"), + ] + + operations = [ + migrations.AlterField( + model_name="savedrun", + name="workflow", + field=models.IntegerField( + choices=[ + (1, "Doc Search"), + (2, "Doc Summary"), + (3, "Google GPT"), + (4, "Copilot"), + (5, "Lipysnc + TTS"), + (6, "Text to Speech"), + (7, "Speech Recognition"), + (8, "Lipsync"), + (9, "Deforum Animation"), + (10, "Compare Text2Img"), + (11, "Text2Audio"), + (12, "Img2Img"), + (13, "Face Inpainting"), + (14, "Google Image Gen"), + (15, "Compare AI Upscalers"), + (16, "SEO Summary"), + (17, "Email Face Inpainting"), + (18, "Social Lookup Email"), + (19, "Object Inpainting"), + (20, "Image Segmentation"), + (21, "Compare LLM"), + (22, "Chyron Plant"), + (23, "Letter Writer"), + (24, "Smart GPT"), + (25, "AI QR Code"), + (26, "Doc Extract"), + (27, "Related QnA Maker"), + (28, "Related QnA Maker Doc"), + (29, "Embeddings"), + ], + default=4, + ), + ), + ] diff --git a/bots/models.py b/bots/models.py index 403af6497..b4eb715c2 100644 --- a/bots/models.py +++ b/bots/models.py @@ -53,7 +53,7 @@ class Workflow(models.IntegerChoices): IMG_2_IMG = (12, "Img2Img") FACE_INPAINTING = (13, "Face Inpainting") GOOGLE_IMAGE_GEN = (14, "Google Image Gen") - COMPARE_UPSCALER = (15, "Comapre AI Upscalers") + COMPARE_UPSCALER = (15, "Compare AI Upscalers") SEO_SUMMARY = (16, "SEO Summary") EMAIL_FACE_INPAINTING = (17, "Email Face Inpainting") SOCIAL_LOOKUP_EMAIL = (18, "Social Lookup Email") From 0bad58623cc91503618fab1d03ef3af715940b76 Mon Sep 17 00:00:00 2001 From: Kaustubh Maske Patil <37668193+nikochiko@users.noreply.github.com> Date: Wed, 20 Sep 2023 16:06:33 +0530 Subject: [PATCH 10/25] Add instructions for postgresql installation, prefer background services for redis/rabbitmq over honcho The local dev setup fails with fixtures when run with SQLite, but works fine with postgresql. This could be due to some constraints that postgres enforces but not sqlite. It's also better to use postgres for local dev too because that would be consistent with production environment. The other change is preferring background services for redis/rabbitmq over starting them through honcho. This is because starting/stopping them takes longer, especially for rabbitmq. Honcho should be quick to start and stop. This also makes sure that task results persist in Redis and unprocessed tasks persist in RabbitMQ over honcho restarts. --- .env.example | 5 +++++ Procfile | 4 ---- README.md | 29 +++++++++++++---------------- 3 files changed, 18 insertions(+), 20 deletions(-) diff --git a/.env.example b/.env.example index 8ed136da7..dc3c29b99 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,8 @@ APP_BASE_URL=http://localhost:3000 API_BASE_URL=http://localhost:8080 GS_BUCKET_NAME=dara-c1b52.appspot.com +PGHOST=127.0.0.1 +PGPORT=5432 +PGUSER=gooey +PGDATABASE=gooey +PGPASSWORD=gooey diff --git a/Procfile b/Procfile index 1e540a406..a32d9aafc 100644 --- a/Procfile +++ b/Procfile @@ -17,10 +17,6 @@ admin: poetry run python manage.py runserver 127.0.0.1:8000 dashboard: poetry run streamlit run Home.py --server.port 8501 --server.headless true -rabbitmq: rabbitmq-server - -redis: redis-server - celery: poetry run celery -A celeryapp worker ui: cd ../gooey-ui/; PORT=3000 npm run dev diff --git a/README.md b/README.md index 28048f250..41acb3328 100644 --- a/README.md +++ b/README.md @@ -2,12 +2,17 @@ * Install [pyenv](https://github.com/pyenv/pyenv) & install the same python version as in our [Dockerfile](Dockerfile) * Install [poetry](https://python-poetry.org/docs/) -* Create & active a virtualenv (e.g. `poetry shell`) +* Create & activate a virtualenv (e.g. `poetry shell`) * Run `poetry install --with dev` +* Install [redis](https://redis.io/docs/getting-started/installation/install-redis-on-mac-os/), [rabbitmq](https://www.rabbitmq.com/install-homebrew.html), and [postgresql](https://formulae.brew.sh/formula/postgresql@15) (e.g. `brew install redis rabbitmq postgresql@15`) +* Enable background services for `redis`, `rabbitmq`, and `postgresql` (e.g. with `brew services start redis` and similar for `rabbitmq` and `postgresql`) +* Create a user and database for gooey in PostgreSQL: + * `createuser gooey --pwprompt` (when prompted for password, enter `gooey`) + * `createdb gooey -O gooey` + * make sure you are able to access the database with `psql -W -U gooey gooey` (and when prompted for password, entering `gooey`) * Create an `.env` file from `.env.example` (Read [12factor.net/config](https://12factor.net/config)) * Run `./manage.py migrate` -* Install the zbar shared library (`brew install zbar`) -* Install [redis](https://redis.io/docs/getting-started/installation/install-redis-on-mac-os/) and [rabbitmq](https://www.rabbitmq.com/install-homebrew.html) +* Install the zbar library (`brew install zbar`) ## Run @@ -25,16 +30,14 @@ Currently they are these: | API + GUI Server | 8080 | | Admin site | 8000 | | Usage dashboard | 8501 | -| Redis | 6379 | -| RabbitMQ | 5672 | | Celery | - | | UI | 3000 | -This default startup assumes that Redis and RabbitMQ are installed, but not -running as system services (e.g. with `brew services`) already. It also assumes -that the gooey-ui repo can be found at `../gooey-ui/` (adjacent to where the -gooey-server repo sits). You can open the Procfile and comment any of these -if you want to run it in some other way. +This default startup assumes that Redis, RabbitMQ, and PostgreSQL are installed and running +as background services on ports 6379, 5672, and 5432 respectively. +It also assumes that the gooey-ui repo can be found at `../gooey-ui/` (adjacent to where the +gooey-server repo sits). You can open the Procfile and comment this out if you don't need +to run it. **Note:** the Celery worker must be manually restarted on code changes. You can do this by stopping and starting Honcho. @@ -42,12 +45,6 @@ can do this by stopping and starting Honcho. ## To run any recipe * Save `serviceAccountKey.json` to project root -* Install & start [redis](https://redis.io/docs/getting-started/installation/install-redis-on-mac-os/) -* Install & start [rabbitmq](https://www.rabbitmq.com/install-homebrew.html) -* Run the celery worker (**Note:** you must manually restart it on code changes) -```bash -celery -A celeryapp worker -``` ## To connect to our GPU cluster From 74e4a04b0dc275378e330a45500daeac6ad0acc8 Mon Sep 17 00:00:00 2001 From: Kaustubh Maske Patil <37668193+nikochiko@users.noreply.github.com> Date: Wed, 20 Sep 2023 17:28:21 +0530 Subject: [PATCH 11/25] Add canonical URL with domain root for all workflows This does not yet make an exception for example pages --- routers/root.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/routers/root.py b/routers/root.py index 900ef00cf..f62f59d15 100644 --- a/routers/root.py +++ b/routers/root.py @@ -41,6 +41,7 @@ DEFAULT_LOGIN_REDIRECT = "/explore/" DEFAULT_LOGOUT_REDIRECT = "/" +CANONICAL_URL_ROOT = "https://gooey.ai" @app.get("/sitemap.xml/") @@ -248,7 +249,7 @@ def st_page( uid=uid, example_id=example_id, ) - + [dict(tagName="link", rel="canonical", href=f"/{latest_slug}/{tab}")] + + [dict(tagName="link", rel="canonical", href=f"{CANONICAL_URL_ROOT}/{latest_slug}/{tab}")] # + [ # dict(tagName="link", rel="icon", href="/static/favicon.ico"), # dict(tagName="link", rel="stylesheet", href="/static/css/app.css"), From 54697ec44a67af06b973478c1354d8a0b31ac270 Mon Sep 17 00:00:00 2001 From: Ravi Shankar <42587315+ravishankar63@users.noreply.github.com> Date: Wed, 20 Sep 2023 17:29:33 +0530 Subject: [PATCH 12/25] embed video for youtube bot --- recipes/DocExtract.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/recipes/DocExtract.py b/recipes/DocExtract.py index 6a7fa7ed2..5e098c7f1 100644 --- a/recipes/DocExtract.py +++ b/recipes/DocExtract.py @@ -8,6 +8,7 @@ from furl import furl from pydantic import BaseModel from pypdf import PdfWriter, PdfReader +from daras_ai_v2.loom_video_widget import youtube_video import gooey_ui as st from bots.models import Workflow @@ -103,6 +104,9 @@ def render_example(self, state: dict): st.write("**Google Sheets URL**") st.write(state.get("sheet_url")) + def render_usage_guide(self): + youtube_video("p7ZLb-loR_4") + def render_settings(self): st.text_area( "### 👩‍🏫 Task Instructions", From dbf33f53ccfa380ea1bb922da739e8b845fce93e Mon Sep 17 00:00:00 2001 From: Kaustubh Maske Patil <37668193+nikochiko@users.noreply.github.com> Date: Wed, 20 Sep 2023 18:35:06 +0530 Subject: [PATCH 13/25] Make exception in case of canonical URLs for example page This commit makes an exception for the canonical URL of the "Run" tab of an example -- it should be indexed along with its `example_id` query param. --- routers/root.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/routers/root.py b/routers/root.py index f62f59d15..4712fcfc7 100644 --- a/routers/root.py +++ b/routers/root.py @@ -240,6 +240,16 @@ def st_page( ) except RedirectException as e: return RedirectResponse(e.url, status_code=e.status_code) + + # Canonical URLs should not include uid or run_id (don't index specific runs). + # In the case of examples, all tabs other than "Run" are duplicates of the page + # without the `example_id`, and so their canonical shouldn't include `example_id` + canonical = furl(CANONICAL_URL_ROOT).add( + path=f"{latest_slug}/{tab}/", + args={"example_id": example_id} if tab == "" and example_id else {}, + ) + canonical.path.normalize() + ret |= { "meta": build_meta_tags( url=str(request.url), @@ -249,7 +259,7 @@ def st_page( uid=uid, example_id=example_id, ) - + [dict(tagName="link", rel="canonical", href=f"{CANONICAL_URL_ROOT}/{latest_slug}/{tab}")] + + [dict(tagName="link", rel="canonical", href=canonical.url)] # + [ # dict(tagName="link", rel="icon", href="/static/favicon.ico"), # dict(tagName="link", rel="stylesheet", href="/static/css/app.css"), From bfabec3b7b9c30b98fd6a460378bf01a17fa3db2 Mon Sep 17 00:00:00 2001 From: Alexander Metzger Date: Wed, 20 Sep 2023 09:09:51 -0700 Subject: [PATCH 14/25] accepted dev's suggestion --- routers/root.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/routers/root.py b/routers/root.py index 4712fcfc7..7f5936235 100644 --- a/routers/root.py +++ b/routers/root.py @@ -41,7 +41,6 @@ DEFAULT_LOGIN_REDIRECT = "/explore/" DEFAULT_LOGOUT_REDIRECT = "/" -CANONICAL_URL_ROOT = "https://gooey.ai" @app.get("/sitemap.xml/") @@ -244,11 +243,14 @@ def st_page( # Canonical URLs should not include uid or run_id (don't index specific runs). # In the case of examples, all tabs other than "Run" are duplicates of the page # without the `example_id`, and so their canonical shouldn't include `example_id` - canonical = furl(CANONICAL_URL_ROOT).add( - path=f"{latest_slug}/{tab}/", - args={"example_id": example_id} if tab == "" and example_id else {}, + canonical_url = str( + furl( + str(settings.APP_BASE_URL), + query_params={"example_id": example_id} if not tab and example_id else {}, + ) + / latest_slug + / tab ) - canonical.path.normalize() ret |= { "meta": build_meta_tags( @@ -259,7 +261,7 @@ def st_page( uid=uid, example_id=example_id, ) - + [dict(tagName="link", rel="canonical", href=canonical.url)] + + [dict(tagName="link", rel="canonical", href=canonical_url)] # + [ # dict(tagName="link", rel="icon", href="/static/favicon.ico"), # dict(tagName="link", rel="stylesheet", href="/static/css/app.css"), From 8d58e899d7327bbc87f0af67d6df5505c03208a8 Mon Sep 17 00:00:00 2001 From: Kaustubh Maske Patil <37668193+nikochiko@users.noreply.github.com> Date: Wed, 20 Sep 2023 23:13:00 +0530 Subject: [PATCH 15/25] Fix: add trailing slash at the end of canonical URLs --- routers/root.py | 1 + 1 file changed, 1 insertion(+) diff --git a/routers/root.py b/routers/root.py index 7f5936235..386cc9813 100644 --- a/routers/root.py +++ b/routers/root.py @@ -250,6 +250,7 @@ def st_page( ) / latest_slug / tab + / "/" # preserve trailing slash ) ret |= { From b77c6621d807e2bca6a07717334e0be799d4e031 Mon Sep 17 00:00:00 2001 From: Kaustubh Maske Patil <37668193+nikochiko@users.noreply.github.com> Date: Thu, 21 Sep 2023 13:28:05 +0530 Subject: [PATCH 16/25] Use sqlcreate helper for creating user/db --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 41acb3328..3df6d00bc 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,8 @@ * Run `poetry install --with dev` * Install [redis](https://redis.io/docs/getting-started/installation/install-redis-on-mac-os/), [rabbitmq](https://www.rabbitmq.com/install-homebrew.html), and [postgresql](https://formulae.brew.sh/formula/postgresql@15) (e.g. `brew install redis rabbitmq postgresql@15`) * Enable background services for `redis`, `rabbitmq`, and `postgresql` (e.g. with `brew services start redis` and similar for `rabbitmq` and `postgresql`) -* Create a user and database for gooey in PostgreSQL: - * `createuser gooey --pwprompt` (when prompted for password, enter `gooey`) - * `createdb gooey -O gooey` +* Use `sqlcreate` helper to create a user and database for gooey: + * `./manage.py sqlcreate | psql postgres` * make sure you are able to access the database with `psql -W -U gooey gooey` (and when prompted for password, entering `gooey`) * Create an `.env` file from `.env.example` (Read [12factor.net/config](https://12factor.net/config)) * Run `./manage.py migrate` From 82037650b25db07dafd200e8bd77e0eeb9926e96 Mon Sep 17 00:00:00 2001 From: Ravi Shankar <42587315+ravishankar63@users.noreply.github.com> Date: Thu, 21 Sep 2023 14:15:02 +0530 Subject: [PATCH 17/25] added docsummary and lipsync images --- recipes/DocSummary.py | 2 +- recipes/Lipsync.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/recipes/DocSummary.py b/recipes/DocSummary.py index f49025c7f..5e121a6ab 100644 --- a/recipes/DocSummary.py +++ b/recipes/DocSummary.py @@ -27,7 +27,7 @@ ) from recipes.GoogleGPT import render_output_with_refs, GoogleGPTPage -DEFAULT_DOC_SUMMARY_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/4bce6718-538c-11ee-a837-02420a000190/doc%20summary%201.gif.png" +DEFAULT_DOC_SUMMARY_META_IMG = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/db70c56e-585a-11ee-990b-02420a00018f/doc%20summary.png.png" class CombineDocumentsChains(Enum): diff --git a/recipes/Lipsync.py b/recipes/Lipsync.py index 4959ccdd0..2e17d8e41 100644 --- a/recipes/Lipsync.py +++ b/recipes/Lipsync.py @@ -15,7 +15,7 @@ CREDITS_PER_MB = 2 -DEFAULT_LIPSYNC_GIF = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/13a80d08-538c-11ee-9c77-02420a000193/lipsync%20audio%201.gif.png" +DEFAULT_LIPSYNC_GIF = "https://storage.googleapis.com/dara-c1b52.appspot.com/daras_ai/media/91acbbde-5857-11ee-920a-02420a000194/lipsync%20audio.png.png" class LipsyncPage(BasePage): From 72d867643b14581100b02e867fd767d390dfd203 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Fri, 22 Sep 2023 04:34:57 +0530 Subject: [PATCH 18/25] if the user submits an api call with only a run id, assume their uid to be the default --- routers/api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/routers/api.py b/routers/api.py index f23c34402..75c1ba0b3 100644 --- a/routers/api.py +++ b/routers/api.py @@ -328,6 +328,7 @@ def submit_api_call( self = page_cls(request=SimpleNamespace(user=user)) # get saved state from db + query_params.setdefault("uid", user.uid) state = self.get_sr_from_query_params_dict(query_params).to_dict() if state is None: raise HTTPException(status_code=404) From a97d38a7c9810966074a8aafa6eb21ea13eb77e3 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Fri, 22 Sep 2023 04:38:18 +0530 Subject: [PATCH 19/25] fix type error --- routers/api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/routers/api.py b/routers/api.py index 75c1ba0b3..47b7989a1 100644 --- a/routers/api.py +++ b/routers/api.py @@ -122,7 +122,7 @@ def run_api_json( page_cls=page_cls, user=user, request_body=page_request.dict(), - query_params=request.query_params, + query_params=dict(request.query_params), ) @app.post( @@ -176,7 +176,7 @@ def run_api_json_async( page_cls=page_cls, user=user, request_body=page_request.dict(), - query_params=request.query_params, + query_params=dict(request.query_params), run_async=True, ) response.headers["Location"] = ret["status_url"] From b2f172c4412e1df77fc2063707de78670b1852a2 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Fri, 22 Sep 2023 18:20:33 +0530 Subject: [PATCH 20/25] force create personal channels button --- bots/admin.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/bots/admin.py b/bots/admin.py index 391d28ae5..4872b7bd1 100644 --- a/bots/admin.py +++ b/bots/admin.py @@ -26,6 +26,7 @@ Conversation, BotIntegration, ) +from bots.tasks import create_personal_channels_for_all_members from gooeysite.custom_widgets import JSONEditorWidget @@ -84,6 +85,15 @@ class Media: ] +def create_personal_channels(modeladmin, request, queryset): + for bi in queryset: + create_personal_channels_for_all_members.delay(bi.id) + modeladmin.message_user( + request, + f"Started creating personal channels for {queryset.count()} bots in the background.", + ) + + @admin.register(BotIntegration) class BotIntegrationAdmin(admin.ModelAdmin): search_fields = [ @@ -190,6 +200,8 @@ class BotIntegrationAdmin(admin.ModelAdmin): ), ] + actions = [create_personal_channels] + @admin.display(description="Messages") def view_messsages(self, bi: BotIntegration): return list_related_html_url( From 198607882c0c8ad18a92147edfe701ff108c37d5 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Fri, 22 Sep 2023 18:45:22 +0530 Subject: [PATCH 21/25] handle user_team_not_in_channel --- daras_ai_v2/slack_bot.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/daras_ai_v2/slack_bot.py b/daras_ai_v2/slack_bot.py index 20481e15a..af346dd90 100644 --- a/daras_ai_v2/slack_bot.py +++ b/daras_ai_v2/slack_bot.py @@ -334,7 +334,11 @@ def create_personal_channel( ) except SlackAPIError as e: # skip if the user is restricted - if e.error in ["user_is_ultra_restricted", "user_is_restricted"]: + if e.error in [ + "user_is_ultra_restricted", + "user_is_restricted", + "user_team_not_in_channel", + ]: return else: raise From 5fafeaeeb8f4e1d3d4d69b97d49730037e4be395 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Sat, 23 Sep 2023 01:42:47 +0530 Subject: [PATCH 22/25] slack personal channel support for shortcuts --- routers/slack.py | 81 ++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 68 insertions(+), 13 deletions(-) diff --git a/routers/slack.py b/routers/slack.py index c0a2537f4..5f70d400e 100644 --- a/routers/slack.py +++ b/routers/slack.py @@ -9,7 +9,7 @@ from starlette.background import BackgroundTasks from starlette.responses import RedirectResponse, HTMLResponse -from bots.models import BotIntegration, Platform +from bots.models import BotIntegration, Platform, Conversation from bots.tasks import create_personal_channels_for_all_members from daras_ai_v2 import settings from daras_ai_v2.bots import _on_msg, request_json, request_urlencoded_body @@ -225,9 +225,17 @@ def _handle_slack_event(event: dict, background_tasks: BackgroundTasks): capture_exception(e) +@router.get("/__/slack/oauth/shortcuts/") +def slack_oauth_shortcuts(dm: bool = False): + return RedirectResponse(str(get_slack_shortcuts_connect_url(dm))) + + @router.get("/__/slack/redirect/shortcuts/") -def slack_connect_redirect_shortcuts(request: Request): - retry_button = f'Retry' +def slack_connect_redirect_shortcuts( + request: Request, + dm: bool = False, +): + retry_button = f'Retry' code = request.query_params.get("code") if not code: @@ -239,19 +247,53 @@ def slack_connect_redirect_shortcuts(request: Request): res = requests.post( furl( "https://slack.com/api/oauth.v2.access", - query_params=dict(code=code, redirect_uri=slack_shortcuts_redirect_uri), + query_params=dict( + code=code, redirect_uri=get_slack_shortcuts_redirect_uri(dm) + ), ).url, auth=HTTPBasicAuth(settings.SLACK_CLIENT_ID, settings.SLACK_CLIENT_SECRET), ) res.raise_for_status() - print(res.text) res = res.json() + print("> slack_connect_redirect_shortcuts:", res) + + channel_id = res["incoming_webhook"]["channel_id"] + channel_name = res["incoming_webhook"]["channel"].strip("#") + team_id = res["team"]["id"] + user_id = res["authed_user"]["id"] + access_token = res["authed_user"]["access_token"] + + if dm: + try: + convo = Conversation.objects.get( + bot_integration__slack_channel_id=channel_id, + bot_integration__slack_team_id=team_id, + slack_user_id=user_id, + slack_team_id=team_id, + slack_channel_is_personal=True, + ) + except Conversation.DoesNotExist: + bi = BotIntegration.objects.get( + slack_channel_id=channel_id, + slack_team_id=team_id, + ) + user = fetch_user_info(user_id, bi.slack_access_token) + convo = create_personal_channel(bi=bi, user=user) + if not convo: + return HTMLResponse( + f"

Oh No! Something went wrong here.

Error: Could not create personal channel.

" + + retry_button, + status_code=400, + ) + channel_id = convo.slack_channel_id + channel_name = convo.slack_channel_name + payload = json.dumps( dict( - slack_channel=res["incoming_webhook"]["channel"].strip("#"), - slack_channel_id=res["incoming_webhook"]["channel_id"], - slack_user_access_token=res["authed_user"]["access_token"], - slack_team_id=res["team"]["id"], + slack_channel=channel_name, + slack_channel_id=channel_id, + slack_user_access_token=access_token, + slack_team_id=team_id, ), indent=2, ) @@ -284,7 +326,20 @@ def slack_connect_redirect_shortcuts(request: Request): ) -slack_shortcuts_redirect_uri = ( - furl(settings.APP_BASE_URL) - / router.url_path_for(slack_connect_redirect_shortcuts.__name__) -).url +def get_slack_shortcuts_connect_url(dm: bool = False): + return furl( + "https://slack.com/oauth/v2/authorize", + query_params=dict( + client_id=settings.SLACK_CLIENT_ID, + scope=",".join(["incoming-webhook"]), + user_scope=",".join(["chat:write"]), + redirect_uri=get_slack_shortcuts_redirect_uri(dm), + ), + ) + + +def get_slack_shortcuts_redirect_uri(dm: bool = False) -> str: + return ( + furl(settings.APP_BASE_URL, query_params=dict(dm=dm)) + / router.url_path_for(slack_connect_redirect_shortcuts.__name__) + ).url From c7cf98bd40e042e0809be60dc6ac37dfb022f4b3 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Sat, 23 Sep 2023 02:03:05 +0530 Subject: [PATCH 23/25] slack personal channel support for shortcuts --- routers/slack.py | 42 ++++++++++++++---------------------------- 1 file changed, 14 insertions(+), 28 deletions(-) diff --git a/routers/slack.py b/routers/slack.py index 5f70d400e..ea6be9a67 100644 --- a/routers/slack.py +++ b/routers/slack.py @@ -258,40 +258,26 @@ def slack_connect_redirect_shortcuts( print("> slack_connect_redirect_shortcuts:", res) channel_id = res["incoming_webhook"]["channel_id"] - channel_name = res["incoming_webhook"]["channel"].strip("#") - team_id = res["team"]["id"] user_id = res["authed_user"]["id"] + team_id = res["team"]["id"] access_token = res["authed_user"]["access_token"] - if dm: - try: - convo = Conversation.objects.get( - bot_integration__slack_channel_id=channel_id, - bot_integration__slack_team_id=team_id, - slack_user_id=user_id, - slack_team_id=team_id, - slack_channel_is_personal=True, - ) - except Conversation.DoesNotExist: - bi = BotIntegration.objects.get( - slack_channel_id=channel_id, - slack_team_id=team_id, - ) - user = fetch_user_info(user_id, bi.slack_access_token) - convo = create_personal_channel(bi=bi, user=user) - if not convo: - return HTMLResponse( - f"

Oh No! Something went wrong here.

Error: Could not create personal channel.

" - + retry_button, - status_code=400, - ) - channel_id = convo.slack_channel_id - channel_name = convo.slack_channel_name + try: + convo = Conversation.objects.get( + slack_channel_id=channel_id, slack_user_id=user_id, slack_team_id=team_id + ) + except Conversation.DoesNotExist: + return HTMLResponse( + "

Oh No! Something went wrong here.

" + "

Conversation not found. Please make sure this channel is connected to the gooey bot

" + + retry_button, + status_code=400, + ) payload = json.dumps( dict( - slack_channel=channel_name, - slack_channel_id=channel_id, + slack_channel=convo.slack_channel_name, + slack_channel_id=convo.slack_channel_id, slack_user_access_token=access_token, slack_team_id=team_id, ), From cd9679bf61bca1dbbd783153872d78f133ee8489 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Sat, 23 Sep 2023 02:08:21 +0530 Subject: [PATCH 24/25] remove the dm query param --- routers/slack.py | 44 ++++++++++++++++++-------------------------- 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/routers/slack.py b/routers/slack.py index ea6be9a67..323107291 100644 --- a/routers/slack.py +++ b/routers/slack.py @@ -226,16 +226,13 @@ def _handle_slack_event(event: dict, background_tasks: BackgroundTasks): @router.get("/__/slack/oauth/shortcuts/") -def slack_oauth_shortcuts(dm: bool = False): - return RedirectResponse(str(get_slack_shortcuts_connect_url(dm))) +def slack_oauth_shortcuts(): + return RedirectResponse(str(slack_shortcuts_connect_url)) @router.get("/__/slack/redirect/shortcuts/") -def slack_connect_redirect_shortcuts( - request: Request, - dm: bool = False, -): - retry_button = f'Retry' +def slack_connect_redirect_shortcuts(request: Request): + retry_button = f'Retry' code = request.query_params.get("code") if not code: @@ -247,9 +244,7 @@ def slack_connect_redirect_shortcuts( res = requests.post( furl( "https://slack.com/api/oauth.v2.access", - query_params=dict( - code=code, redirect_uri=get_slack_shortcuts_redirect_uri(dm) - ), + query_params=dict(code=code, redirect_uri=slack_shortcuts_redirect_uri), ).url, auth=HTTPBasicAuth(settings.SLACK_CLIENT_ID, settings.SLACK_CLIENT_SECRET), ) @@ -312,20 +307,17 @@ def slack_connect_redirect_shortcuts( ) -def get_slack_shortcuts_connect_url(dm: bool = False): - return furl( - "https://slack.com/oauth/v2/authorize", - query_params=dict( - client_id=settings.SLACK_CLIENT_ID, - scope=",".join(["incoming-webhook"]), - user_scope=",".join(["chat:write"]), - redirect_uri=get_slack_shortcuts_redirect_uri(dm), - ), - ) - +slack_shortcuts_redirect_uri = ( + furl(settings.APP_BASE_URL) + / router.url_path_for(slack_connect_redirect_shortcuts.__name__) +).url -def get_slack_shortcuts_redirect_uri(dm: bool = False) -> str: - return ( - furl(settings.APP_BASE_URL, query_params=dict(dm=dm)) - / router.url_path_for(slack_connect_redirect_shortcuts.__name__) - ).url +slack_shortcuts_connect_url = furl( + "https://slack.com/oauth/v2/authorize", + query_params=dict( + client_id=settings.SLACK_CLIENT_ID, + scope=",".join(["incoming-webhook"]), + user_scope=",".join(["chat:write"]), + redirect_uri=slack_shortcuts_redirect_uri, + ), +) From 2e56cec3eb8db857603bb278595dc1217cca4192 Mon Sep 17 00:00:00 2001 From: Dev Aggarwal Date: Sat, 23 Sep 2023 02:13:38 +0530 Subject: [PATCH 25/25] send back the connected example_id and run_id with slack shortcuts redirect --- routers/slack.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/routers/slack.py b/routers/slack.py index 323107291..fdeee8511 100644 --- a/routers/slack.py +++ b/routers/slack.py @@ -266,8 +266,17 @@ def slack_connect_redirect_shortcuts(request: Request): "

Oh No! Something went wrong here.

" "

Conversation not found. Please make sure this channel is connected to the gooey bot

" + retry_button, + status_code=404, + ) + + if not convo.bot_integration.saved_run_id: + return HTMLResponse( + "

Oh No! Something went wrong here.

" + "

Please make sure this bot is connected to a gooey run or example

" + + retry_button, status_code=400, ) + sr = convo.bot_integration.saved_run payload = json.dumps( dict( @@ -275,6 +284,8 @@ def slack_connect_redirect_shortcuts(request: Request): slack_channel_id=convo.slack_channel_id, slack_user_access_token=access_token, slack_team_id=team_id, + gooey_example_id=sr.example_id, + gooey_run_id=sr.run_id, ), indent=2, )