Skip to content

Commit

Permalink
Merge pull request #61 from Allegheny-Ethical-CS/issue#40
Browse files Browse the repository at this point in the history
Fix #40 : images not appearing on the streamlit home page
  • Loading branch information
enpuyou authored Mar 30, 2021
2 parents f5e9e47 + 00c4ea6 commit be81929
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 16 deletions.
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ Network URL: http://xxx.xxx.x.x:8501

The web interface will be automatically opened in your browser:

![browser](resources/images/landing_page.png)
<img src="resources/images/landing_page.png" alt="browser" style="width:100%"/>

### Data Retreiving

Expand Down Expand Up @@ -113,7 +113,7 @@ with GatorGrader [here](https://github.com/enpuyou/script-api-lambda-dynamodb).
Once the documents are successfully imported, you can then navigate through
the select box in the sidebar to view the text analysis:

![select box](resources/images/select_box.png)
<img src="resources/images/select_box.png" alt="select box" style="width:100%"/>

##### Reflection Documents

Expand All @@ -132,10 +132,10 @@ format of json reports GatorMiner gathers from AWS.

### Analysis

![frequency](resources/images/frequency.png)
![sentiment](resources/images/sentiment.png)
![similarity](resources/images/similarity.png)
![topic](resources/images/topic.png)
<img src="resources/images/frequency.png" alt="frequency" style="width:100%"/>
<img src="resources/images/sentiment.png" alt="sentiment" style="width:100%"/>
<img src="resources/images/similarity.png" alt="similarity" style="width:100%"/>
<img src="resources/images/topic.png" alt="topic" style="width:100%"/>

### Contribution

Expand Down
31 changes: 21 additions & 10 deletions streamlit_web.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

import re

import base64
import numpy as np
import os
import pandas as pd
from sklearn.manifold import TSNE
import spacy
Expand Down Expand Up @@ -60,8 +62,7 @@ def main():
if debug_mode:
st.write(main_df)
if analysis_mode == "Home":
with open("README.md") as readme_file:
st.markdown(readme_file.read())
readme()
else:
if analysis_mode == "Frequency Analysis":
st.title(analysis_mode)
Expand All @@ -83,13 +84,26 @@ def main():
interactive()
success_msg.empty()

def readme():
"""function to load and configurate readme source"""

with open("README.md") as readme_file:
readme_src = readme_file.read()
for file in os.listdir("resources/images"):
if file.endswith(".png"):
img_path = f"resources/images/{file}"
with open(img_path, "rb") as f:
img_bin = base64.b64encode(f.read()).decode()
readme_src = readme_src.replace(img_path, f"data:image/png;base64,{img_bin}")

st.markdown(readme_src, unsafe_allow_html=True)

def landing_pg():
"""landing page"""
landing = st.sidebar.selectbox("Welcome", ["Home", "Interactive"])

if landing == "Home":
with open("README.md") as readme_file:
st.markdown(readme_file.read())
readme()
else:
interactive()

Expand Down Expand Up @@ -120,8 +134,7 @@ def retreive_data(data_retreive):
except TypeError:
st.sidebar.warning(
"No data imported. Please check the reflection document input")
with open("README.md") as readme_file:
st.markdown(readme_file.read())
readme()
else:
global success_msg
success_msg = None
Expand Down Expand Up @@ -157,8 +170,7 @@ def import_data(data_retreive_method, paths):
json_lst.append(md.collect_md(path))
except FileNotFoundError as err:
st.sidebar.text(err)
with open("README.md") as readme_file:
st.markdown(readme_file.read())
readme()
else:
passbuild = st.sidebar.checkbox(
"Only retreive build success records", value=True)
Expand All @@ -169,8 +181,7 @@ def import_data(data_retreive_method, paths):
json_lst.append(ju.clean_report(response))
except (EnvironmentError, Exception) as err:
st.sidebar.error(err)
with open("README.md") as readme_file:
st.markdown(readme_file.read())
readme()
# when data is retreived
if json_lst:
raw_df = pd.DataFrame()
Expand Down

0 comments on commit be81929

Please sign in to comment.