Skip to content

Commit

Permalink
Limit templated visualization data, fix PCA plot for tabular datasets
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed May 23, 2024
1 parent eef864a commit 8fcc151
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 8 deletions.
20 changes: 14 additions & 6 deletions config/plugins/visualizations/PCA_3Dplot/templates/PCA_3Dplot.mako
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
<%!
import csv
import sys
from io import StringIO
import csv
from galaxy.exceptions import RequestParameterInvalidException
MAX_SIZE = 100000 # 100 KB, empirically the largest value I can render on my browser (m2 mac, chrome)
%>

<%
Expand Down Expand Up @@ -29,11 +33,15 @@ def create_options(header):
return colour_options, start_options
def load_data():
input = "\n".join(list(hda.datatype.dataprovider(hda, 'line', comment_char=none, provide_blank=True, strip_lines=False, strip_newlines=True)))
tabular_file = StringIO(input)
dialect = csv.Sniffer().sniff(tabular_file.read(1024), delimiters=";,\t")
tabular_file.seek(0)
table = csv.reader(tabular_file, dialect)
lines = []
size = 0
for line in hda.datatype.dataprovider(hda, 'line', comment_char=none, provide_blank=True, strip_lines=False, strip_newlines=True):
size += len(line)
if size > MAX_SIZE:
raise RequestParameterInvalidException("Dataset too large to render, dataset must be less than 100 KB in size.")
lines.append(line)
dialect = csv.Sniffer().sniff("\n".join(lines))
table = csv.reader(lines, dialect)
data = "["
for i, row in enumerate(table):
if i == 0:
Expand Down
15 changes: 13 additions & 2 deletions config/plugins/visualizations/ts_visjs/templates/ts_visjs.mako
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ import collections
import json
import sys
from numpy import inf
from galaxy.exceptions import RequestParameterInvalidException
MAX_SIZE = 100000 # 100 KB
%>

<%
Expand Down Expand Up @@ -58,8 +62,15 @@ def write_reaction(edge_id, left_index, right_index, substrates, products, rate,
def load_data():
output = {"nodes": "", "border_nodes": "", "edges": "", "edges": "", "self_loops": ""}
data = ''.join(list(hda.datatype.dataprovider(hda, 'line', strip_lines=True, strip_newlines=True)))
data = json.loads(data)
lines = []
size = 0
for line in hda.datatype.dataprovider(hda, 'line', strip_lines=True, strip_newlines=True):
size += len(line)
if size > MAX_SIZE:
raise RequestParameterInvalidException("Dataset too large to render, dataset must be less than 100 KB in size.")
lines.append(line)
data = json.loads("".join(lines))
ordering = data['ordering']
nodes = {int(key): to_counter(data['nodes'][key], ordering) for key in data['nodes'].keys()}
Expand Down

0 comments on commit 8fcc151

Please sign in to comment.