Skip to content

Commit

Permalink
Merge branch 'hotfix-1.30.2'
Browse files Browse the repository at this point in the history
  • Loading branch information
arteymix committed Sep 19, 2023
2 parents 09babfd + 810e85f commit 89f59fb
Show file tree
Hide file tree
Showing 84 changed files with 910 additions and 460 deletions.
2 changes: 1 addition & 1 deletion gemma-cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<parent>
<artifactId>gemma</artifactId>
<groupId>gemma</groupId>
<version>1.30.1</version>
<version>1.30.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>gemma-cli</artifactId>
Expand Down
4 changes: 2 additions & 2 deletions gemma-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<parent>
<artifactId>gemma</artifactId>
<groupId>gemma</groupId>
<version>1.30.1</version>
<version>1.30.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>gemma-core</artifactId>
Expand All @@ -18,7 +18,7 @@
<executions>
<execution>
<id>version-file</id>
<phase>process-classes</phase>
<phase>generate-resources</phase>
<goals>
<goal>run</goal>
</goals>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,7 @@ public LinkAnalysis process( ExpressionExperiment ee, FilterConfig filterConfig,
LinkAnalysisServiceImpl.log.info( "Fetching expression data for " + ee );

Collection<ProcessedExpressionDataVector> dataVectors = processedExpressionDataVectorService
.getProcessedDataVectors( ee );
dataVectors = processedExpressionDataVectorService.thaw( dataVectors );
.getProcessedDataVectorsAndThaw( ee );

LinkAnalysisServiceImpl.log.info( "Starting analysis" );
this.analyze( ee, filterConfig, linkAnalysisConfig, la, dataVectors );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,13 +304,13 @@ private ExpressionDataDoubleMatrix getCorrectedData( ExpressionExperiment ee,
*/
private Collection<ProcessedExpressionDataVector> getProcessedExpressionDataVectors( ExpressionExperiment ee ) {
Collection<ProcessedExpressionDataVector> vecs = processedExpressionDataVectorService
.getProcessedDataVectors( ee );
.getProcessedDataVectorsAndThaw( ee );
if ( vecs.isEmpty() ) {
log.info( String.format( "No processed vectors for %s, they will be computed from raw data...", ee ) );
this.processedExpressionDataVectorService.computeProcessedExpressionData( ee );
return this.processedExpressionDataVectorService.getProcessedDataVectors( ee );
vecs = this.processedExpressionDataVectorService.getProcessedDataVectorsAndThaw( ee );
}
return processedExpressionDataVectorService.thaw( vecs );
return vecs;
}

@SuppressWarnings("unused")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,7 @@ public ExpressionExperimentSet split( ExpressionExperiment toSplit, Experimental
throw new UnsupportedOperationException( "Non-double values currently not supported for experiment split" );
}

Collection<RawExpressionDataVector> vectors = rawExpressionDataVectorService.find( qt );
vectors = rawExpressionDataVectorService.thaw( vectors );
Collection<RawExpressionDataVector> vectors = rawExpressionDataVectorService.findAndThaw( qt );
if ( vectors.isEmpty() ) {
// this is okay if the data is processed, or if we have stray orphaned QTs
log.debug( "No raw vectors for " + qt + "; preferred=" + qt.getIsPreferred() );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,14 +112,11 @@ public Collection<RawExpressionDataVector> computeMissingValues( ExpressionExper
timer.start();
TwoChannelMissingValuesImpl.log.info( "Loading vectors ..." );

Collection<RawExpressionDataVector> rawVectors = rawExpressionDataVectorService.find( usefulQuantitationTypes );
Collection<RawExpressionDataVector> rawVectors = rawExpressionDataVectorService.findAndThaw( usefulQuantitationTypes );
Collection<ProcessedExpressionDataVector> procVectors = new HashSet<>();

if ( rawVectors.isEmpty() ) {
procVectors = processedExpressionDataVectorService.find( usefulQuantitationTypes );
procVectors = processedExpressionDataVectorService.thaw( procVectors );
} else {
rawVectors = rawExpressionDataVectorService.thaw( rawVectors );
procVectors = processedExpressionDataVectorService.findAndThaw( usefulQuantitationTypes );
}

timer.stop();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -414,14 +414,13 @@ private Map<QuantitationType, Collection<RawExpressionDataVector>> getVectors( E
Collection<RawExpressionDataVector> oldVectors = new HashSet<>();

for ( BioAssayDimension dim : allOldBioAssayDims ) {
oldVectors.addAll( rawExpressionDataVectorService.find( dim ) );
oldVectors.addAll( rawExpressionDataVectorService.findAndThaw( dim ) );
}

if ( oldVectors.isEmpty() ) {
throw new IllegalStateException( "No vectors" );
}

oldVectors = rawExpressionDataVectorService.thaw( oldVectors );
Map<QuantitationType, Collection<RawExpressionDataVector>> qt2Vec = new HashMap<>();
Collection<QuantitationType> qtsToAdd = new HashSet<>();
for ( RawExpressionDataVector v : oldVectors ) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,7 @@ public ExpressionDataDoubleMatrix comBat( ExpressionExperiment ee ) {
* Extract data
*/
Collection<ProcessedExpressionDataVector> vectos = processedExpressionDataVectorService
.getProcessedDataVectors( ee );
vectos = processedExpressionDataVectorService.thaw( vectos );
.getProcessedDataVectorsAndThaw( ee );
ExpressionDataDoubleMatrix mat = new ExpressionDataDoubleMatrix( vectos );

return this.comBat( mat );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,13 +146,12 @@ public SVDValueObject svd( ExpressionExperiment ee ) throws SVDException {
assert ee != null;

Collection<ProcessedExpressionDataVector> vectors = processedExpressionDataVectorService
.getProcessedDataVectors( ee );
.getProcessedDataVectorsAndThaw( ee );

if ( vectors.isEmpty() ) {
throw new IllegalArgumentException( "Experiment must have processed data already to do SVD" );
}

vectors = processedExpressionDataVectorService.thaw( vectors );
ExpressionDataDoubleMatrix mat = new ExpressionDataDoubleMatrix( vectors );

SVDServiceHelperImpl.log.info( "Starting SVD" );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,8 +188,7 @@ public Collection<CompositeSequenceMapSummary> summarizeMapResults(
Collection<BlatResult> blats = blatResultService.findByBioSequence( bioSequence );
summary.setBlatResults( blats );

Collection<BlatAssociation> maps = blatAssociationService.find( bioSequence );
maps = blatAssociationService.thaw( maps );
Collection<BlatAssociation> maps = blatAssociationService.findAndThaw( bioSequence );
for ( BlatAssociation association : maps ) {
summary.getGeneProducts().add( association.getGeneProduct() );
summary.getGenes().add( association.getGeneProduct().getGene() );
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
package ubic.gemma.core.analysis.service;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.lang3.ArrayUtils;

import javax.annotation.Nullable;
import java.io.*;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Locale;

/**
* Provide base implementation for all sorts of file services that serialize data in tabular format.
*/
public abstract class AbstractFileService<T> implements TsvFileService<T>, JsonFileService<T> {

private final ObjectMapper objectMapper = new ObjectMapper();

private static final DecimalFormat smallNumberFormat, midNumberFormat, largeNumberFormat;

static {
DecimalFormatSymbols symbols = DecimalFormatSymbols.getInstance( Locale.ENGLISH );
symbols.setNaN( "" );
symbols.setInfinity( "inf" );

// show 4 significant digits
smallNumberFormat = new DecimalFormat( "0.###E0" );
smallNumberFormat.setDecimalFormatSymbols( symbols );
smallNumberFormat.setRoundingMode( RoundingMode.HALF_UP );

// show from one up to 4 decimal places
midNumberFormat = new DecimalFormat( "0.0###" );
midNumberFormat.setDecimalFormatSymbols( symbols );
midNumberFormat.setRoundingMode( RoundingMode.HALF_UP );

// only show leading digits and at least one decimal place
largeNumberFormat = new DecimalFormat( "0.0" );
largeNumberFormat.setDecimalFormatSymbols( symbols );
largeNumberFormat.setRoundingMode( RoundingMode.HALF_UP );
}

/**
* Preconfigure a {@link CSVFormat.Builder} with desirable defaults.
* @param extraHeaderComments additional header comments that will be included at the top of the TSV file.
*/
protected CSVFormat.Builder getTsvFormatBuilder( String... extraHeaderComments ) {
return CSVFormat.Builder.create( CSVFormat.TDF )
.setCommentMarker( '#' )
.setHeaderComments( ArrayUtils.addAll( new String[] {
"If you use this file for your research, please cite:",
"Lim et al. (2021) Curation of over 10 000 transcriptomic studies to enable data reuse.",
"Database, baab006 (doi:10.1093/database/baab006)." }, extraHeaderComments ) );
}

/**
* Get the delimiter used within column.
*/
protected String getSubDelimiter() {
return "|";
}

/**
* Format a {@link Double} for TSV.
* @param d a double to format
* @return a formatted double, an empty string if d is null or NaN or inf/-inf if infinite
*/
protected String format( @Nullable Double d ) {
if ( d == null ) {
return "";
} else if ( d < 1e-4 ) {
return smallNumberFormat.format( d );
} else if ( d < 1e3 ) {
return midNumberFormat.format( d );
} else {
return largeNumberFormat.format( d );
}
}

protected String escapeTsv( String s ) {
return s.replace( "\\", "\\\\" )
.replace( "\n", "\\n" )
.replace( "\t", "\\t" )
.replace( "\r", "\\r" );
}

@Override
public void writeTsv( T entity, File file ) throws IOException {
try ( Writer writer = new OutputStreamWriter( new FileOutputStream( file ) ) ) {
writeTsv( entity, writer );
}
}

@Override
public void writeJson( T entity, Writer writer ) throws IOException {
objectMapper.writeValue( writer, entity );
}

@Override
public void writeJson( T entity, File file ) throws IOException {
try ( Writer writer = new OutputStreamWriter( new FileOutputStream( file ) ) ) {
writeJson( entity, writer );
}
}

@Override
public void write( T entity, Writer writer, String contentType ) throws IOException {
if ( "application/json".equalsIgnoreCase( contentType ) ) {
writeJson( entity, writer );
} else if ( "text/tab-separated-values".equalsIgnoreCase( contentType ) ) {
writeTsv( entity, writer );
} else {
throw new IllegalArgumentException( "Unsupported content type: " + contentType );
}
}

@Override
public void write( T entity, File file, String contentType ) throws IOException {
try ( Writer writer = new OutputStreamWriter( new FileOutputStream( file ) ) ) {
write( entity, writer, contentType );
}
}
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
package ubic.gemma.core.analysis.service;

import ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisResult;
import ubic.gemma.model.analysis.expression.diff.ExpressionAnalysisResultSet;
import ubic.gemma.model.genome.Gene;

import java.io.IOException;
import java.io.Writer;
import java.util.List;
import java.util.Map;

Expand All @@ -27,5 +27,5 @@ public interface ExpressionAnalysisResultSetFileService extends TsvFileService<E
* - rank
*
*/
void writeTsvToAppendable( ExpressionAnalysisResultSet analysisResultSet, Map<Long, List<Gene>> result2Genes, Appendable appendable ) throws IOException;
void writeTsvToAppendable( ExpressionAnalysisResultSet analysisResultSet, Map<Long, List<Gene>> result2Genes, Writer writer ) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,18 @@
import ubic.gemma.model.genome.Gene;

import java.io.IOException;
import java.io.Writer;
import java.util.*;
import java.util.stream.Collectors;

import static java.util.function.Function.identity;

@Service
@CommonsLog
public class ExpressionAnalysisResultSetFileServiceImpl extends AbstractTsvFileService<ExpressionAnalysisResultSet> implements ExpressionAnalysisResultSetFileService {
public class ExpressionAnalysisResultSetFileServiceImpl extends AbstractFileService<ExpressionAnalysisResultSet> implements ExpressionAnalysisResultSetFileService {

@Override
public void writeTsvToAppendable( ExpressionAnalysisResultSet analysisResultSet, Map<Long, List<Gene>> resultId2Genes, Appendable appendable ) throws IOException {
public void writeTsvToAppendable( ExpressionAnalysisResultSet analysisResultSet, Map<Long, List<Gene>> resultId2Genes, Writer appendable ) throws IOException {
String experimentalFactorsMetadata = "[" + analysisResultSet.getExperimentalFactors().stream()
.map( this::formatExperimentalFactor )
.collect( Collectors.joining( ", " ) ) + "]";
Expand Down Expand Up @@ -122,7 +123,7 @@ private String formatCharacteristics( Collection<Characteristic> characteristics
}

@Override
public void writeTsvToAppendable( ExpressionAnalysisResultSet entity, Appendable appendable ) throws IOException {
writeTsvToAppendable( entity, Collections.emptyMap(), appendable );
public void writeTsv( ExpressionAnalysisResultSet entity, Writer writer ) throws IOException {
writeTsvToAppendable( entity, Collections.emptyMap(), writer );
}
}
Loading

0 comments on commit 89f59fb

Please sign in to comment.