Skip to content

Commit

Permalink
tests for groupchats, anthropic
Browse files Browse the repository at this point in the history
  • Loading branch information
Kye committed Nov 11, 2023
1 parent 76fd9a0 commit 9402dab
Show file tree
Hide file tree
Showing 11 changed files with 332 additions and 20 deletions.
2 changes: 1 addition & 1 deletion tests/apps/discord.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import unittest
from unittest.mock import patch, Mock, MagicMock
from unittest.mock import patch, Mock
from apps.discord import (
Bot,
) # Replace 'Bot' with the name of the file containing your bot's code.
Expand Down
98 changes: 97 additions & 1 deletion tests/models/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,20 @@
import os
import pytest
from unittest.mock import Mock, patch

import pytest

from swarms.models.anthropic import Anthropic


# Mock the Anthropic API client for testing
class MockAnthropicClient:
def __init__(self, *args, **kwargs):
pass

def completions_create(self, prompt, stop_sequences, stream, **kwargs):
return MockAnthropicResponse()


@pytest.fixture
def mock_anthropic_env():
os.environ["ANTHROPIC_API_URL"] = "https://test.anthropic.com"
Expand Down Expand Up @@ -125,3 +136,88 @@ def test_anthropic_exception_handling(
anthropic_instance(task, stop)

assert "An error occurred" in str(excinfo.value)


class MockAnthropicResponse:
def __init__(self):
self.completion = "Mocked Response from Anthropic"

def test_anthropic_instance_creation(anthropic_instance):
assert isinstance(anthropic_instance, Anthropic)

def test_anthropic_call_method(anthropic_instance):
response = anthropic_instance("What is the meaning of life?")
assert response == "Mocked Response from Anthropic"

def test_anthropic_stream_method(anthropic_instance):
generator = anthropic_instance.stream("Write a story.")
for token in generator:
assert isinstance(token, str)

def test_anthropic_async_call_method(anthropic_instance):
response = anthropic_instance.async_call("Tell me a joke.")
assert response == "Mocked Response from Anthropic"

def test_anthropic_async_stream_method(anthropic_instance):
async_generator = anthropic_instance.async_stream("Translate to French.")
for token in async_generator:
assert isinstance(token, str)

def test_anthropic_get_num_tokens(anthropic_instance):
text = "This is a test sentence."
num_tokens = anthropic_instance.get_num_tokens(text)
assert num_tokens > 0

# Add more test cases to cover other functionalities and edge cases of the Anthropic class


def test_anthropic_wrap_prompt(anthropic_instance):
prompt = "What is the meaning of life?"
wrapped_prompt = anthropic_instance._wrap_prompt(prompt)
assert wrapped_prompt.startswith(anthropic_instance.HUMAN_PROMPT)
assert wrapped_prompt.endswith(anthropic_instance.AI_PROMPT)

def test_anthropic_convert_prompt(anthropic_instance):
prompt = "What is the meaning of life?"
converted_prompt = anthropic_instance.convert_prompt(prompt)
assert converted_prompt.startswith(anthropic_instance.HUMAN_PROMPT)
assert converted_prompt.endswith(anthropic_instance.AI_PROMPT)

def test_anthropic_call_with_stop(anthropic_instance):
response = anthropic_instance("Translate to French.", stop=["stop1", "stop2"])
assert response == "Mocked Response from Anthropic"

def test_anthropic_stream_with_stop(anthropic_instance):
generator = anthropic_instance.stream("Write a story.", stop=["stop1", "stop2"])
for token in generator:
assert isinstance(token, str)

def test_anthropic_async_call_with_stop(anthropic_instance):
response = anthropic_instance.async_call("Tell me a joke.", stop=["stop1", "stop2"])
assert response == "Mocked Response from Anthropic"

def test_anthropic_async_stream_with_stop(anthropic_instance):
async_generator = anthropic_instance.async_stream("Translate to French.", stop=["stop1", "stop2"])
for token in async_generator:
assert isinstance(token, str)

def test_anthropic_get_num_tokens_with_count_tokens(anthropic_instance):
anthropic_instance.count_tokens = Mock(return_value=10)
text = "This is a test sentence."
num_tokens = anthropic_instance.get_num_tokens(text)
assert num_tokens == 10

def test_anthropic_get_num_tokens_without_count_tokens(anthropic_instance):
del anthropic_instance.count_tokens
with pytest.raises(NameError):
text = "This is a test sentence."
anthropic_instance.get_num_tokens(text)

def test_anthropic_wrap_prompt_without_human_ai_prompt(anthropic_instance):
del anthropic_instance.HUMAN_PROMPT
del anthropic_instance.AI_PROMPT
prompt = "What is the meaning of life?"
with pytest.raises(NameError):
anthropic_instance._wrap_prompt(prompt)


2 changes: 1 addition & 1 deletion tests/models/auto_temp.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from concurrent.futures import ThreadPoolExecutor
from unittest.mock import Mock, patch
from unittest.mock import patch

import pytest
from dotenv import load_dotenv
Expand Down
2 changes: 1 addition & 1 deletion tests/models/bingchat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os

# Assuming the BingChat class is in a file named "bing_chat.py"
from bing_chat import BingChat, ConversationStyle
from bing_chat import BingChat


class TestBingChat(unittest.TestCase):
Expand Down
2 changes: 1 addition & 1 deletion tests/models/huggingface.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from unittest.mock import MagicMock, patch
from unittest.mock import patch

import pytest
import torch
Expand Down
8 changes: 4 additions & 4 deletions tests/models/kosmos.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ def mock_image_request():

# Test utility function
def test_is_overlapping():
assert is_overlapping((1, 1, 3, 3), (2, 2, 4, 4)) == True
assert is_overlapping((1, 1, 2, 2), (3, 3, 4, 4)) == False
assert is_overlapping((0, 0, 1, 1), (1, 1, 2, 2)) == False
assert is_overlapping((0, 0, 2, 2), (1, 1, 2, 2)) == True
assert is_overlapping((1, 1, 3, 3), (2, 2, 4, 4)) is True
assert is_overlapping((1, 1, 2, 2), (3, 3, 4, 4)) is False
assert is_overlapping((0, 0, 1, 1), (1, 1, 2, 2)) is False
assert is_overlapping((0, 0, 2, 2), (1, 1, 2, 2)) is True


# Test model initialization
Expand Down
2 changes: 1 addition & 1 deletion tests/models/revgptv1.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def test_run(self):

def test_run_time(self):
prompt = "Generate a 300 word essay about technology."
response = self.model.run(prompt)
self.model.run(prompt)
self.assertLess(self.model.end_time - self.model.start_time, 60)

def test_generate_summary(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/models/whisperx.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
import subprocess
import tempfile
from unittest.mock import Mock, patch
from unittest.mock import patch

import pytest
import whisperx
Expand Down
10 changes: 5 additions & 5 deletions tests/structs/sequential_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,10 @@ def test_sequential_workflow_initialization():
assert isinstance(workflow, SequentialWorkflow)
assert len(workflow.tasks) == 0
assert workflow.max_loops == 1
assert workflow.autosave == False
assert workflow.autosave is False
assert workflow.saved_state_filepath == "sequential_workflow_state.json"
assert workflow.restore_state_filepath == None
assert workflow.dashboard == False
assert workflow.restore_state_filepath is None
assert workflow.dashboard is False


def test_sequential_workflow_add_task():
Expand All @@ -87,7 +87,7 @@ def test_sequential_workflow_reset_workflow():
task_flow = MockOpenAIChat()
workflow.add(task_description, task_flow)
workflow.reset_workflow()
assert workflow.tasks[0].result == None
assert workflow.tasks[0].result is None


def test_sequential_workflow_get_task_results():
Expand Down Expand Up @@ -330,4 +330,4 @@ def test_real_world_usage_with_environment_variables():
def test_real_world_usage_no_openai_key():
# Ensure that an exception is raised when the OpenAI API key is not set
with pytest.raises(ValueError):
llm = OpenAIChat() # API key not provided, should raise an exception
OpenAIChat() # API key not provided, should raise an exception
Loading

0 comments on commit 9402dab

Please sign in to comment.