Skip to content

Commit

Permalink
Added buffering symbol (#54)
Browse files Browse the repository at this point in the history
  • Loading branch information
jeremymanning authored Sep 4, 2023
2 parents ce3266e + 6c17f2c commit 824b0b3
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 11 deletions.
Binary file added chatify/assets/loading.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
16 changes: 8 additions & 8 deletions chatify/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import ipywidgets as widgets

from .chains import CreateLLMChain
from .widgets import option_widget, button_widget, text_widget, thumbs
from .widgets import option_widget, button_widget, text_widget, thumbs, loading_widget

from .utils import check_dev_config, get_html

Expand Down Expand Up @@ -71,7 +71,8 @@ def _read_prompt_dir(self):
def _create_ui_elements(self):
"""Creates UI elements like buttons, prompt types, texts, and options."""
# Buttons and prompt types
self.button = button_widget()
self.execute_button = button_widget()
self.loading = loading_widget()
self.prompt_types = self._read_prompt_dir()
self.prompt_names = {
item: key for item, key in enumerate(self.prompt_types.keys())
Expand Down Expand Up @@ -104,16 +105,13 @@ def _arrange_ui_elements(self, prompt_type):
if self.cfg["feedback"]:
elements = [
self.options[prompt_type],
self.button,
self.execute_button,
self.thumbs_up,
self.thumbs_down,
]

else:
elements = [
self.options[prompt_type],
self.button,
]
elements = [self.options[prompt_type], self.execute_button, self.loading]
hbox = widgets.HBox(elements)
vbox = widgets.VBox([hbox, self.texts[prompt_type]])
return vbox
Expand Down Expand Up @@ -156,6 +154,7 @@ def update_values(self, *args, **kwargs):
*args
Variable-length argument list.
"""
self.loading.width = 30
index = self.tabs.selected_index
selected_prompt = self.prompt_names[index]
# Get the prompt
Expand All @@ -164,6 +163,7 @@ def update_values(self, *args, **kwargs):
]
self.texts[selected_prompt].value = self.gpt(self.cell_inputs, self.prompt)
self.response = self.texts[selected_prompt].value
self.loading.width = 0

def record(self, *args):
try:
Expand Down Expand Up @@ -216,7 +216,7 @@ def explain(self, line, cell):
display(accordion)

# Button click
self.button.on_click(self.update_values)
self.execute_button.on_click(self.update_values)

# Thumbs up and down
self.thumbs_down.on_click(self.record)
Expand Down
11 changes: 11 additions & 0 deletions chatify/widgets.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import ipywidgets as widgets

import pathlib


def option_widget(config):
"""Create an options dropdown widget based on the given configuration.
Expand Down Expand Up @@ -41,6 +43,15 @@ def button_widget():
return button


def loading_widget():
dirname = pathlib.Path(__file__).parent.resolve()
with open(f"{dirname}/assets/loading.gif", "rb") as file:
# read file as string into `image`
image = file.read()
loading = widgets.Image(value=image, format='gif', width=0, height=10)
return loading


def thumbs(icon='fa-thumbs-up'):
"""Create a thumbs-up button widget.
Expand Down
26 changes: 23 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,28 @@
with open('HISTORY.rst') as history_file:
history = history_file.read()

requirements = ['gptcache<=0.1.35', 'langchain<=0.0.226', 'openai', 'markdown', 'ipywidgets', 'requests', 'markdown-it-py[linkify,plugins]', 'pygments']
extras = ['transformers', 'torch>=2.0', 'tensorflow>=2.0', 'flax', 'einops', 'accelerate', 'xformers', 'bitsandbytes', 'sentencepiece', 'llama-cpp-python']
requirements = [
'gptcache<=0.1.35',
'langchain<=0.0.226',
'openai',
'markdown',
'ipywidgets',
'requests',
'markdown-it-py[linkify,plugins]',
'pygments',
]
extras = [
'transformers',
'torch>=2.0',
'tensorflow>=2.0',
'flax',
'einops',
'accelerate',
'xformers',
'bitsandbytes',
'sentencepiece',
'llama-cpp-python',
]

test_requirements = [
'pytest>=3',
Expand Down Expand Up @@ -47,7 +67,7 @@
packages=find_packages(include=['chatify', 'chatify.*']),
test_suite='tests',
tests_require=test_requirements,
package_data={'': ['**/*.yaml']},
package_data={'': ['**/*.yaml', '**/*.gif']},
url='https://github.com/ContextLab/chatify',
version='0.2.1',
zip_safe=False,
Expand Down

0 comments on commit 824b0b3

Please sign in to comment.