Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use Jena 2.12.1 implementation of the rulesys reasoner #36

Open
wants to merge 5 commits into
base: development
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
96 changes: 95 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<name>baseCode</name>
<groupId>baseCode</groupId>
<artifactId>baseCode</artifactId>
<version>1.1.18</version>
<version>1.1.19</version>
<inceptionYear>2003</inceptionYear>
<description>
<![CDATA[Data structures, math and statistics tools, and utilities that are often needed across projects.]]>
Expand Down Expand Up @@ -347,6 +347,100 @@
</gitFlowConfig>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>3.3.0</version>
<executions>
<execution>
<id>unpack-jena-rete-engine</id>
<phase>generate-sources</phase>
<goals>
<goal>unpack</goal>
</goals>
<configuration>
<artifactItems>
<artifactItem>
<groupId>org.apache.jena</groupId>
<artifactId>jena-core</artifactId>
<version>2.12.1</version>
<classifier>sources</classifier>
<outputDirectory>${project.build.directory}/generated-sources/jena-core</outputDirectory>
<includes>com/hp/hpl/jena/reasoner/rulesys/impl/*.java,com/hp/hpl/jena/graph/NodeFactory.java</includes>
</artifactItem>
</artifactItems>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.3.0</version>
<executions>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<!-- dependencies:unpack does not add a source root -->
<sources>${project.build.directory}/generated-sources/jena-core</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.5.0</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<includes>
<include>org.apache.jena:*</include>
</includes>
</artifactSet>
<filters>
<filter>
<artifact>org.apache.jena:*</artifact>
<includes>
<include>com/hp/hpl/jena/**/*</include>
<include>org/apache/jena/**/*</include>
<include>org/openjena/**/*</include>
<include>etc/**/*</include>
<include>vocabularies/**/*</include>
</includes>
<excludes>
<!-- this one we compiled ourselves -->
<exclude>com/hp/hpl/jena/reasoner/rulesys/impl/**/*</exclude>
</excludes>
</filter>
</filters>
<!-- relocate Jena under a private package name to prevent conflicts -->
<relocations>
<relocation>
<pattern>com.hp.hpl.jena</pattern>
<shadedPattern>ubic.basecode.ontology.jena.impl</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.jena</pattern>
<shadedPattern>ubic.basecode.ontology.jena.impl</shadedPattern>
</relocation>
<relocation>
<pattern>org.openjena</pattern>
<shadedPattern>ubic.basecode.ontology.jena.impl</shadedPattern>
</relocation>
</relocations>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,13 @@
package ubic.basecode.ontology.jena;

import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.ontology.ProfileRegistry;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.reasoner.ReasonerFactory;
import com.hp.hpl.jena.reasoner.rulesys.OWLFBRuleReasonerFactory;
import com.hp.hpl.jena.reasoner.rulesys.OWLMicroReasonerFactory;
import com.hp.hpl.jena.reasoner.rulesys.OWLMiniReasonerFactory;
import com.hp.hpl.jena.reasoner.transitiveReasoner.TransitiveReasonerFactory;
import ubic.basecode.ontology.model.OntologyModel;
import ubic.basecode.util.Configuration;

Expand All @@ -35,23 +42,50 @@ protected String getOntologyUrl() {
}

@Override
protected OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) throws IOException {
return new OntologyModelImpl( OntologyLoader.loadMemoryModel( this.getOntologyUrl(), this.getCacheName(), processImports, this.getSpec( inferenceMode ) ) );
protected OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException {
return new OntologyModelImpl( OntologyLoader.loadMemoryModel( this.getOntologyUrl(), this.getCacheName(), processImports, this.getSpec( languageLevel, inferenceMode ) ) );
}

@Override
protected OntologyModel loadModelFromStream( InputStream is, boolean processImports, InferenceMode inferenceMode ) throws IOException {
return new OntologyModelImpl( OntologyLoader.loadMemoryModel( is, this.getOntologyUrl(), processImports, this.getSpec( inferenceMode ) ) );
protected OntologyModel loadModelFromStream( InputStream is, boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException {
return new OntologyModelImpl( OntologyLoader.loadMemoryModel( is, this.getOntologyUrl(), processImports, this.getSpec( languageLevel, inferenceMode ) ) );
}

private OntModelSpec getSpec( InferenceMode inferenceMode ) {
private OntModelSpec getSpec( LanguageLevel languageLevel, InferenceMode inferenceMode ) {
String profile;
switch ( languageLevel ) {
case FULL:
profile = ProfileRegistry.OWL_LANG;
break;
case DL:
profile = ProfileRegistry.OWL_DL_LANG;
break;
case LITE:
profile = ProfileRegistry.OWL_LITE_LANG;
break;
default:
throw new UnsupportedOperationException( String.format( "Unsupported OWL language level %s.", languageLevel ) );
}
ReasonerFactory reasonerFactory;
switch ( inferenceMode ) {
case FULL:
reasonerFactory = OWLFBRuleReasonerFactory.theInstance();
break;
case MINI:
reasonerFactory = OWLMiniReasonerFactory.theInstance();
break;
case MICRO:
reasonerFactory = OWLMicroReasonerFactory.theInstance();
break;
case TRANSITIVE:
return OntModelSpec.OWL_MEM_TRANS_INF;
reasonerFactory = TransitiveReasonerFactory.theInstance();
break;
case NONE:
return OntModelSpec.OWL_MEM;
reasonerFactory = null;
break;
default:
throw new UnsupportedOperationException( String.format( "Unsupported inference level %s.", inferenceMode ) );
}
return new OntModelSpec( ModelFactory.createMemModelMaker(), null, reasonerFactory, profile );
}
}
41 changes: 26 additions & 15 deletions src/ubic/basecode/ontology/jena/AbstractOntologyService.java
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ public abstract class AbstractOntologyService implements OntologyService {
}

/* settings (applicable for next initialization) */
private LanguageLevel nextLanguageLevel = LanguageLevel.FULL;
private InferenceMode nextInferenceMode = InferenceMode.TRANSITIVE;
private boolean nextProcessImports = true;
private boolean nextSearchEnabled = true;
Expand All @@ -91,12 +92,30 @@ public abstract class AbstractOntologyService implements OntologyService {
private Set<Restriction> additionalRestrictions;
private boolean isInitialized = false;
@Nullable
private LanguageLevel languageLevel = null;
@Nullable
private InferenceMode inferenceMode = null;
@Nullable
private Boolean processImports = null;
@Nullable
private Boolean searchEnabled = null;

@Override
public LanguageLevel getLanguageLevel() {
Lock lock = rwLock.readLock();
try {
lock.lock();
return this.languageLevel != null ? this.languageLevel : nextLanguageLevel;
} finally {
lock.unlock();
}
}

@Override
public void setLanguageLevel( LanguageLevel languageLevel ) {
this.nextLanguageLevel = languageLevel;
}

@Override
public InferenceMode getInferenceMode() {
Lock lock = rwLock.readLock();
Expand Down Expand Up @@ -162,6 +181,7 @@ private void initialize( @Nullable InputStream stream, boolean forceLoad, boolea
String ontologyUrl = getOntologyUrl();
String ontologyName = getOntologyName();
String cacheName = getCacheName();
LanguageLevel languageLevel = nextLanguageLevel;
InferenceMode inferenceMode = nextInferenceMode;
boolean processImports = nextProcessImports;
boolean searchEnabled = nextSearchEnabled;
Expand Down Expand Up @@ -196,7 +216,7 @@ private void initialize( @Nullable InputStream stream, boolean forceLoad, boolea
return;

try {
OntologyModel m = stream != null ? loadModelFromStream( stream, processImports, inferenceMode ) : loadModel( processImports, inferenceMode ); // can take a while.
OntologyModel m = stream != null ? loadModelFromStream( stream, processImports, languageLevel, inferenceMode ) : loadModel( processImports, languageLevel, inferenceMode ); // can take a while.
if ( m instanceof OntologyModelImpl ) {
model = ( ( OntologyModelImpl ) m ).getOntModel();
} else {
Expand Down Expand Up @@ -254,6 +274,7 @@ private void initialize( @Nullable InputStream stream, boolean forceLoad, boolea
this.additionalRestrictions = additionalRestrictions;
this.index = index;
this.isInitialized = true;
this.languageLevel = languageLevel;
this.inferenceMode = inferenceMode;
this.processImports = processImports;
this.searchEnabled = searchEnabled;
Expand Down Expand Up @@ -615,13 +636,13 @@ public void waitForInitializationThread() throws InterruptedException {
* Delegates the call as to load the model into memory or leave it on disk. Simply delegates to either
* OntologyLoader.loadMemoryModel( url ); OR OntologyLoader.loadPersistentModel( url, spec );
*/
protected abstract OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) throws IOException;
protected abstract OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException;


/**
* Load a model from a given input stream.
*/
protected abstract OntologyModel loadModelFromStream( InputStream stream, boolean processImports, InferenceMode inferenceMode ) throws IOException;
protected abstract OntologyModel loadModelFromStream( InputStream stream, boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) throws IOException;

/**
* A name for caching this ontology, or null to disable caching.
Expand All @@ -633,17 +654,6 @@ protected String getCacheName() {
return getOntologyName();
}

private OntModelSpec getSpec( InferenceMode inferenceMode ) {
switch ( inferenceMode ) {
case TRANSITIVE:
return OntModelSpec.OWL_MEM_TRANS_INF;
case NONE:
return OntModelSpec.OWL_MEM;
default:
throw new UnsupportedOperationException( String.format( "Unsupported inference level %s.", inferenceMode ) );
}
}

@Override
public void index( boolean force ) {
String cacheName = getCacheName();
Expand Down Expand Up @@ -743,7 +753,8 @@ public void loadTermsInNameSpace( InputStream is, boolean forceIndex ) {

@Override
public String toString() {
return String.format( "%s [%s]", getOntologyName(), getOntologyUrl() );
return String.format( "%s [url=%s] [language level=%s] [inference mode=%s] [imports=%b] [search=%b]",
getOntologyName(), getOntologyUrl(), getLanguageLevel(), getInferenceMode(), getProcessImports(), isSearchEnabled() );
}

private Set<OntClass> getOntClassesFromTerms( Collection<OntologyTerm> terms ) {
Expand Down
1 change: 0 additions & 1 deletion src/ubic/basecode/ontology/model/OntologyResource.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
package ubic.basecode.ontology.model;

import javax.annotation.Nullable;
import java.io.Serializable;

/**
* @author pavlidis
Expand Down
5 changes: 3 additions & 2 deletions src/ubic/basecode/ontology/model/OntologyTermSimple.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package ubic.basecode.ontology.model;

import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.Collection;
import java.util.Comparator;
import java.util.Objects;
Expand All @@ -25,8 +26,8 @@
*
* @author Paul
*/

public class OntologyTermSimple implements OntologyTerm {
@SuppressWarnings("unused")
public class OntologyTermSimple implements OntologyTerm, Serializable {

/**
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,12 @@ protected String getOntologyUrl() {
}

@Override
protected OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) {
protected OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) {
try ( InputStream is = this.getClass().getResourceAsStream( MEDIC_ONTOLOGY_FILE ) ) {
if ( is == null ) {
throw new RuntimeException( String.format( "The MEDIC ontology was not found in classpath at %s.", MEDIC_ONTOLOGY_FILE ) );
}
return loadModelFromStream( new GZIPInputStream( is ), processImports, inferenceMode );
return loadModelFromStream( new GZIPInputStream( is ), processImports, languageLevel, inferenceMode );
} catch ( IOException e ) {
throw new RuntimeException( e );
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@ protected String getOntologyUrl() {
}

@Override
protected OntologyModel loadModel( boolean processImports, InferenceMode inferenceMode ) {
protected OntologyModel loadModel( boolean processImports, LanguageLevel languageLevel, InferenceMode inferenceMode ) {
try ( InputStream stream = getClass().getResourceAsStream( NIFSTD_ONTOLOGY_FILE ) ) {
if ( stream == null ) {
throw new RuntimeException( String.format( "The NIF ontology was not found in classpath at %s.", NIFSTD_ONTOLOGY_FILE ) );
}
return loadModelFromStream( new GZIPInputStream( stream ), processImports, inferenceMode );
return loadModelFromStream( new GZIPInputStream( stream ), processImports, languageLevel, inferenceMode );
} catch ( IOException e ) {
throw new RuntimeException( e );
}
Expand Down
43 changes: 42 additions & 1 deletion src/ubic/basecode/ontology/providers/OntologyService.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,50 @@ public interface OntologyService {

void setProcessImports( boolean processImports );

enum LanguageLevel {
/**
* The full OWL language.
*/
FULL,
/**
* OWL-DL
*/
DL,
/**
* OWL/Lite
*/
LITE
}

LanguageLevel getLanguageLevel();

void setLanguageLevel( LanguageLevel languageLevel );

enum InferenceMode {
/**
* No inference is supported, only the axioms defined in the ontology are considered.
*/
NONE,
TRANSITIVE
/**
* Only basic inference is supported for {@code subClassOf} and {@code subPropertyOf}.
* <p>
* This is the fastest inference mode.
*/
TRANSITIVE,
/**
* Very limited inference.
*/
MICRO,
/**
* Limited inference.
*/
MINI,
/**
* Complete inference.
* <p>
* This is the slowest inference mode.
*/
FULL
}

/**
Expand Down