Skip to content

Commit

Permalink
Fix linting
Browse files Browse the repository at this point in the history
  • Loading branch information
pm3310 committed Feb 17, 2024
1 parent d2a8028 commit 4092dcd
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
20 changes: 12 additions & 8 deletions sagify/commands/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,11 @@

_MAPPING_IMAGE_CREATION_MODEL_ID_TO_MODEL_NAME = {
'stabilityai-stable-diffusion-v2': (
'model-txt2img-stabilityai-stable-diffusion-v2',
'model-txt2img-stabilityai-stable-diffusion-v2',
'https://huggingface.co/stabilityai/stable-diffusion-2'
),
'stabilityai-stable-diffusion-v2-1-base': (
'model-txt2img-stabilityai-stable-diffusion-v2-1-base',
'model-txt2img-stabilityai-stable-diffusion-v2-1-base',
'https://huggingface.co/stabilityai/stable-diffusion-2-1-base'
),
'stabilityai-stable-diffusion-v2-fp16': (
Expand Down Expand Up @@ -101,7 +101,7 @@
('ml.p3.8xlarge', 'https://instances.vantage.sh/aws/ec2/p3.8xlarge'),
('ml.p3.16xlarge', 'https://instances.vantage.sh/aws/ec2/p3.16xlarge'),
]


@click.group()
def llm():
Expand All @@ -110,6 +110,7 @@ def llm():
"""
pass


@llm.command()
def platforms():
"""
Expand All @@ -119,6 +120,7 @@ def platforms():
logger.info(" - OpenAI: https://platform.openai.com/docs/overview")
logger.info(" - AWS Sagemaker: https://aws.amazon.com/sagemaker")


@llm.command()
@click.option(
'--all',
Expand Down Expand Up @@ -184,7 +186,7 @@ def sagemaker_models(all, chat_completions, image_creations, embeddings):
logger.info(" - Instance Type: {}".format(instance_type))
logger.info(" Instance URL: {}".format(instance_url))
logger.info("\n")

if embeddings:
logger.info("\nEmbeddings:")
for model_id, (model_name, model_url) in _MAPPING_EMBEDDINGS_MODEL_ID_TO_MODEL_NAME.items():
Expand Down Expand Up @@ -315,7 +317,7 @@ def start(
list(_MAPPING_CHAT_COMPLETIONS_MODEL_ID_TO_MODEL_NAME.keys())
)
)

if default_config['chat_completions']['instance_type'] not in _VALID_INSTANCE_TYPES_PER_CHAT_COMPLETIONS_MODEL[
_MAPPING_CHAT_COMPLETIONS_MODEL_ID_TO_MODEL_NAME[default_config['chat_completions']['model']][0]
]:
Expand Down Expand Up @@ -349,7 +351,7 @@ def start(
list(_MAPPING_IMAGE_CREATION_MODEL_ID_TO_MODEL_NAME.keys())
)
)

if default_config['image_creations']['instance_type'] not in _VALID_INSTANCE_TYPES_PER_IMAGE_CREATIONS_MODEL[
_MAPPING_IMAGE_CREATION_MODEL_ID_TO_MODEL_NAME[default_config['image_creations']['model']][0]
]:
Expand All @@ -360,7 +362,7 @@ def start(
]
)
)

image_endpoint_name, _ = api_cloud.foundation_model_deploy(
model_id=_MAPPING_IMAGE_CREATION_MODEL_ID_TO_MODEL_NAME[default_config['image_creations']['model']][0],
model_version='1.*',
Expand All @@ -383,7 +385,7 @@ def start(
list(_MAPPING_EMBEDDINGS_MODEL_ID_TO_MODEL_NAME.keys())
)
)

if default_config['embeddings']['instance_type'] not in _VALID_EMBEDDINGS_INSTANCE_TYPES:
raise ValueError(
"Invalid instance type for embeddings model. Available instance types: {}".format(
Expand Down Expand Up @@ -509,6 +511,7 @@ def stop(
logger.info("{}".format(e))
sys.exit(-1)


@llm.command()
def start_local_gateway():
"""
Expand All @@ -519,6 +522,7 @@ def start_local_gateway():
from sagify.llm_gateway.main import start_server
start_server()


llm.add_command(platforms)
llm.add_command(sagemaker_models)
llm.add_command(start)
Expand Down
2 changes: 2 additions & 0 deletions sagify/llm_gateway/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@
app.include_router(api_router)
app.add_exception_handler(InternalServerError, internal_server_error_handler)


def start_server():
uvicorn.run("sagify.llm_gateway.main:app", port=8080, host="0.0.0.0")


if __name__ == "__main__":
start_server()

0 comments on commit 4092dcd

Please sign in to comment.