Skip to content

Commit

Permalink
Remove unused write_data_to_file in data providers.
Browse files Browse the repository at this point in the history
Was originally used in some tool running fashion (c1b0c1e) but those callers are not longer present.
  • Loading branch information
jmchilton committed Jul 30, 2024
1 parent 422264d commit 2feaa7b
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 102 deletions.
6 changes: 0 additions & 6 deletions lib/galaxy/visualization/data_providers/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,6 @@ def get_data(self, chrom, start, end, start_val=0, max_vals=sys.maxsize, **kwarg
iterator = self.get_iterator(chrom, start, end)
return self.process_data(iterator, start_val, max_vals, **kwargs)

def write_data_to_file(self, filename, **kwargs):
"""
Write data in region defined by chrom, start, and end to a file.
"""
raise Exception("Unimplemented Function")


class ColumnDataProvider(BaseDataProvider):
"""Data provider for columnar data"""
Expand Down
96 changes: 0 additions & 96 deletions lib/galaxy/visualization/data_providers/genome.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,12 +188,6 @@ def __init__(
error_max_vals=error_max_vals,
)

def write_data_to_file(self, regions, filename):
"""
Write data in region defined by chrom, start, and end to a file.
"""
raise Exception("Unimplemented Function")

def valid_chroms(self):
"""
Returns chroms/contigs that the dataset contains
Expand Down Expand Up @@ -387,14 +381,6 @@ def get_iterator(self, data_file, chrom, start, end, **kwargs) -> Iterator[str]:

return iterator

def write_data_to_file(self, regions, filename):
with self.open_data_file() as data_file, open(filename, "w") as out:
for region in regions:
# Write data in region.
iterator = self.get_iterator(data_file, region.chrom, region.start, region.end)
for line in iterator:
out.write(f"{line}\n")


#
# -- Interval data providers --
Expand Down Expand Up @@ -471,9 +457,6 @@ def col_fn(col):

return {"data": rval, "message": message}

def write_data_to_file(self, regions, filename):
raise Exception("Unimplemented Function")


class IntervalTabixDataProvider(TabixDataProvider, IntervalDataProvider):
"""
Expand Down Expand Up @@ -564,18 +547,6 @@ def process_data(self, iterator, start_val=0, max_vals=None, **kwargs):

return {"data": rval, "dataset_type": self.dataset_type, "message": message}

def write_data_to_file(self, regions, filename):
with open(filename, "w") as out:
for region in regions:
# Write data in region.
chrom = region.chrom
start = region.start
end = region.end
with self.open_data_file() as data_file:
iterator = self.get_iterator(data_file, chrom, start, end)
for line in iterator:
out.write(f"{line}\n")


class BedTabixDataProvider(TabixDataProvider, BedDataProvider):
"""
Expand Down Expand Up @@ -760,15 +731,6 @@ def get_mapping(ref, alt):

return {"data": data, "message": message}

def write_data_to_file(self, regions, filename):
out = open(filename, "w")
with self.open_data_file() as data_file:
for region in regions:
# Write data in region.
iterator = self.get_iterator(data_file, region.chrom, region.start, region.end)
for line in iterator:
out.write(f"{line}\n")


class VcfTabixDataProvider(TabixDataProvider, VcfDataProvider):
"""
Expand Down Expand Up @@ -847,43 +809,6 @@ def get_filters(self):
filters.append({"name": "Mapping Quality", "type": "number", "index": filter_col})
return filters

def write_data_to_file(self, regions, filename):
"""
Write reads in regions to file.
"""

# Open current BAM file using index.
bamfile = pysam.AlignmentFile(
self.original_dataset.get_file_name(), mode="rb", index_filename=self.converted_dataset.get_file_name()
)

# TODO: write headers as well?
new_bamfile = pysam.AlignmentFile(filename, template=bamfile, mode="wb")

for region in regions:
# Write data from region.
chrom = region.chrom
start = region.start
end = region.end

try:
data = bamfile.fetch(start=start, end=end, reference=chrom)
except ValueError:
# Try alternative chrom naming.
chrom = _convert_between_ucsc_and_ensemble_naming(chrom)
try:
data = bamfile.fetch(start=start, end=end, reference=chrom)
except ValueError:
return None

# Write reads in region.
for read in data:
new_bamfile.write(read)

# Cleanup.
new_bamfile.close()
bamfile.close()

@contextmanager
def open_data_file(self):
# Attempt to open the BAM file with index
Expand Down Expand Up @@ -1331,27 +1256,6 @@ class IntervalIndexDataProvider(GenomeDataProvider, FilterableMixin):

dataset_type = "interval_index"

def write_data_to_file(self, regions, filename):
index = Indexes(self.converted_dataset.get_file_name())
with open(self.original_dataset.get_file_name()) as source, open(filename, "w") as out:
for region in regions:
# Write data from region.
chrom = region.chrom
start = region.start
end = region.end
for _start, _end, offset in index.find(chrom, start, end):
source.seek(offset)

# HACK: write differently depending on original dataset format.
if self.original_dataset.ext not in ["gff", "gff3", "gtf"]:
line = source.readline()
out.write(line)
else:
reader = GFFReaderWrapper(source, fix_strand=True)
feature = next(reader)
for interval in feature.intervals:
out.write("\t".join(interval.fields) + "\n")

@contextmanager
def open_data_file(self):
i = Indexes(self.converted_dataset.get_file_name())
Expand Down

0 comments on commit 2feaa7b

Please sign in to comment.