Skip to content

Commit

Permalink
Merge pull request #189 from vincentmigot/jena-insert-sensor
Browse files Browse the repository at this point in the history
Jena #3 - Use Jena for Sensor INSERT and DELETE
  • Loading branch information
annetireau authored Jan 11, 2019
2 parents b58f712 + b0b0c5a commit 85aa7c8
Showing 1 changed file with 73 additions and 32 deletions.
105 changes: 73 additions & 32 deletions phis2-ws/src/main/java/phis2ws/service/dao/sesame/SensorDAOSesame.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,15 @@
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import javax.ws.rs.core.Response;
import org.apache.jena.arq.querybuilder.UpdateBuilder;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.update.UpdateRequest;
import org.apache.jena.vocabulary.RDF;
import org.apache.jena.vocabulary.RDFS;
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.BooleanQuery;
import org.eclipse.rdf4j.query.MalformedQueryException;
Expand All @@ -37,7 +45,6 @@
import phis2ws.service.utils.POSTResultsReturn;
import phis2ws.service.utils.UriGenerator;
import phis2ws.service.utils.sparql.SPARQLQueryBuilder;
import phis2ws.service.utils.sparql.SPARQLUpdateBuilder;
import phis2ws.service.view.brapi.Status;
import phis2ws.service.view.model.phis.Dataset;
import phis2ws.service.view.model.phis.Sensor;
Expand Down Expand Up @@ -506,28 +513,43 @@ public POSTResultsReturn check(List<SensorDTO> sensors) {
* @param sensor
* @return the query
*/
private SPARQLUpdateBuilder prepareInsertQuery(Sensor sensor) {
SPARQLUpdateBuilder query = new SPARQLUpdateBuilder();
private UpdateRequest prepareInsertQuery(Sensor sensor) {
UpdateBuilder spql = new UpdateBuilder();

query.appendGraphURI(Contexts.SENSORS.toString());
query.appendTriplet(sensor.getUri(), Rdf.RELATION_TYPE.toString(), sensor.getRdfType(), null);
query.appendTriplet(sensor.getUri(), Rdfs.RELATION_LABEL.toString(), "\"" + sensor.getLabel() + "\"", null);
query.appendTriplet(sensor.getUri(), Vocabulary.RELATION_HAS_BRAND.toString(), "\"" + sensor.getBrand() + "\"", null);
query.appendTriplet(sensor.getUri(), Vocabulary.RELATION_IN_SERVICE_DATE.toString(), "\"" + sensor.getInServiceDate() + "\"", null);
query.appendTriplet(sensor.getUri(), Vocabulary.RELATION_PERSON_IN_CHARGE.toString(), "\"" + sensor.getPersonInCharge() + "\"", null);
Node graph = NodeFactory.createURI(Contexts.SENSORS.toString());

Resource sensorUri = ResourceFactory.createResource(sensor.getUri());

Node sensorType = NodeFactory.createURI(sensor.getRdfType());

spql.addInsert(graph, sensorUri, RDF.type, sensorType);
spql.addInsert(graph, sensorUri, RDFS.label, sensor.getLabel());

Property relationHasBrand = ResourceFactory.createProperty(Vocabulary.RELATION_HAS_BRAND.toString());
Property relationInServiceDate = ResourceFactory.createProperty(Vocabulary.RELATION_IN_SERVICE_DATE.toString());
Property relationPersonInCharge = ResourceFactory.createProperty(Vocabulary.RELATION_PERSON_IN_CHARGE.toString());

spql.addInsert(graph, sensorUri, relationHasBrand, sensor.getBrand() );
spql.addInsert(graph, sensorUri, relationInServiceDate, sensor.getInServiceDate());
spql.addInsert(graph, sensorUri, relationPersonInCharge, sensor.getPersonInCharge() );

if (sensor.getSerialNumber() != null) {
query.appendTriplet(sensor.getUri(), Vocabulary.RELATION_SERIAL_NUMBER.toString(), "\"" + sensor.getSerialNumber() + "\"", null);
Property relationSerialNumber = ResourceFactory.createProperty(Vocabulary.RELATION_SERIAL_NUMBER.toString());
spql.addInsert(graph, sensorUri, relationSerialNumber, sensor.getSerialNumber() );
}

if (sensor.getDateOfPurchase() != null) {
query.appendTriplet(sensor.getUri(), Vocabulary.RELATION_DATE_OF_PURCHASE.toString(), "\"" + sensor.getDateOfPurchase() + "\"", null);
Property relationDateOfPurchase = ResourceFactory.createProperty(Vocabulary.RELATION_DATE_OF_PURCHASE.toString());
spql.addInsert(graph, sensorUri, relationDateOfPurchase, sensor.getDateOfPurchase() );
}

if (sensor.getDateOfLastCalibration() != null) {
query.appendTriplet(sensor.getUri(), Vocabulary.RELATION_DATE_OF_LAST_CALIBRATION.toString(), "\"" + sensor.getDateOfLastCalibration() + "\"", null);
Property relationDateOfCalibration = ResourceFactory.createProperty(Vocabulary.RELATION_DATE_OF_LAST_CALIBRATION.toString());
spql.addInsert(graph, sensorUri, relationDateOfCalibration, sensor.getDateOfLastCalibration() );
}

UpdateRequest query = spql.buildRequest();

LOGGER.debug(getTraceabilityLogs() + " query : " + query.toString());
return query;
}
Expand Down Expand Up @@ -557,7 +579,7 @@ public POSTResultsReturn insert(List<SensorDTO> sensorsDTO) {
Sensor sensor = sensorDTO.createObjectFromDTO();
sensor.setUri(uriGenerator.generateNewInstanceUri(sensorDTO.getRdfType(), null, null));

SPARQLUpdateBuilder query = prepareInsertQuery(sensor);
UpdateRequest query = prepareInsertQuery(sensor);
Update prepareUpdate = this.getConnection().prepareUpdate(QueryLanguage.SPARQL, query.toString());
prepareUpdate.execute();

Expand Down Expand Up @@ -604,7 +626,8 @@ public POSTResultsReturn checkAndInsert(List<SensorDTO> sensors) {
/**
* prepare a delete query of the triplets corresponding to the given sensor
* e.g.
* DELETE WHERE {
* DELETE DATA {
* GRAPH <http://www.phenome-fppn.fr/diaphen/set/sensors> {
* <http://www.phenome-fppn.fr/diaphen/2018/s18142> rdf:type <http://www.phenome-fppn.fr/vocabulary/2017#Thermocouple> .
* <http://www.phenome-fppn.fr/diaphen/2018/s18142> rdfs:label "par03_p" .
* <http://www.phenome-fppn.fr/diaphen/2018/s18142> <http://www.phenome-fppn.fr/vocabulary/2017#hasBrand> "Skye Instruments" .
Expand All @@ -613,31 +636,49 @@ public POSTResultsReturn checkAndInsert(List<SensorDTO> sensors) {
* <http://www.phenome-fppn.fr/diaphen/2018/s18142> <http://www.phenome-fppn.fr/vocabulary/2017#serialNumber> "A1E345F32" .
* <http://www.phenome-fppn.fr/diaphen/2018/s18142> <http://www.phenome-fppn.fr/vocabulary/2017#dateOfPurchase> "2017-06-15" .
* <http://www.phenome-fppn.fr/diaphen/2018/s18142> <http://www.phenome-fppn.fr/vocabulary/2017#dateOfLastCalibration> "2017-06-15"
* }
* }
* @param sensor
* @return
*/
private String prepareDeleteQuery(Sensor sensor) {
String query;
query = "DELETE WHERE { "
+ "<" + sensor.getUri() + "> <" + Rdf.RELATION_TYPE.toString() + "> <" + sensor.getRdfType() + "> . "
+ "<" + sensor.getUri() + "> <" + Rdfs.RELATION_LABEL.toString() + "> \"" + sensor.getLabel() + "\" . "
+ "<" + sensor.getUri() + "> <" + Vocabulary.RELATION_HAS_BRAND.toString() + "> \"" + sensor.getBrand() + "\" . "
+ "<" + sensor.getUri() + "> <" + Vocabulary.RELATION_IN_SERVICE_DATE.toString() + "> \"" + sensor.getInServiceDate() + "\" . "
+ "<" + sensor.getUri() + "> <" + Vocabulary.RELATION_PERSON_IN_CHARGE.toString() + "> \"" + sensor.getPersonInCharge() + "\" . ";
private UpdateRequest prepareDeleteQuery(Sensor sensor) {
UpdateBuilder spql = new UpdateBuilder();

Node graph = NodeFactory.createURI(Contexts.SENSORS.toString());

Resource sensorUri = ResourceFactory.createResource(sensor.getUri());

Node sensorType = NodeFactory.createURI(sensor.getRdfType());

spql.addDelete(graph, sensorUri, RDF.type, sensorType);
spql.addDelete(graph, sensorUri, RDFS.label, sensor.getLabel());

Property relationHasBrand = ResourceFactory.createProperty(Vocabulary.RELATION_HAS_BRAND.toString());
Property relationInServiceDate = ResourceFactory.createProperty(Vocabulary.RELATION_IN_SERVICE_DATE.toString());
Property relationPersonInCharge = ResourceFactory.createProperty(Vocabulary.RELATION_PERSON_IN_CHARGE.toString());

spql.addDelete(graph, sensorUri, relationHasBrand, sensor.getBrand() );
spql.addDelete(graph, sensorUri, relationInServiceDate, sensor.getInServiceDate());
spql.addDelete(graph, sensorUri, relationPersonInCharge, sensor.getPersonInCharge() );

if (sensor.getSerialNumber() != null) {
query += "<" + sensor.getUri() + "> <" + Vocabulary.RELATION_SERIAL_NUMBER.toString() + "> \"" + sensor.getSerialNumber() + "\" . ";
Property relationSerialNumber = ResourceFactory.createProperty(Vocabulary.RELATION_SERIAL_NUMBER.toString());
spql.addDelete(graph, sensorUri, relationSerialNumber, sensor.getSerialNumber() );
}

if (sensor.getDateOfPurchase() != null) {
query += "<" + sensor.getUri() + "> <" + Vocabulary.RELATION_DATE_OF_PURCHASE.toString() + "> \"" + sensor.getDateOfPurchase() + "\" . ";
Property relationDateOfPurchase = ResourceFactory.createProperty(Vocabulary.RELATION_DATE_OF_PURCHASE.toString());
spql.addDelete(graph, sensorUri, relationDateOfPurchase, sensor.getDateOfPurchase() );
}

if (sensor.getDateOfLastCalibration() != null) {
query += "<" + sensor.getUri() + "> <" + Vocabulary.RELATION_DATE_OF_LAST_CALIBRATION.toString() + "> \"" + sensor.getDateOfLastCalibration() + "\" . ";
Property relationDateOfCalibration = ResourceFactory.createProperty(Vocabulary.RELATION_DATE_OF_LAST_CALIBRATION.toString());
spql.addDelete(graph, sensorUri, relationDateOfCalibration, sensor.getDateOfLastCalibration() );
}

query += " }";
UpdateRequest query = spql.buildRequest();

LOGGER.debug(getTraceabilityLogs() + " query : " + query.toString());
return query;
}

Expand All @@ -661,17 +702,17 @@ private POSTResultsReturn update(List<SensorDTO> sensors) {
uri = sensorDTO.getUri();
ArrayList<Sensor> sensorsCorresponding = allPaginate();
if (sensorsCorresponding.size() > 0) {
String deleteQuery = prepareDeleteQuery(sensorsCorresponding.get(0));
UpdateRequest deleteQuery = prepareDeleteQuery(sensorsCorresponding.get(0));

//2. insert new data
SPARQLUpdateBuilder insertQuery = prepareInsertQuery(sensorDTO.createObjectFromDTO());
UpdateRequest insertQuery = prepareInsertQuery(sensorDTO.createObjectFromDTO());
try {
this.getConnection().begin();
Update prepareDelete = this.getConnection().prepareUpdate(deleteQuery);
Update prepareUpdate = this.getConnection().prepareUpdate(QueryLanguage.SPARQL, insertQuery.toString());
Update prepareDelete = this.getConnection().prepareUpdate(deleteQuery.toString());
LOGGER.debug(getTraceabilityLogs() + " query : " + prepareDelete.toString());
LOGGER.debug(getTraceabilityLogs() + " query : " + prepareUpdate.toString());
prepareDelete.execute();
Update prepareUpdate = this.getConnection().prepareUpdate(QueryLanguage.SPARQL, insertQuery.toString());
LOGGER.debug(getTraceabilityLogs() + " query : " + prepareUpdate.toString());
prepareUpdate.execute();
updatedResourcesUri.add(sensorDTO.getUri());
} catch (MalformedQueryException e) {
Expand Down

0 comments on commit 85aa7c8

Please sign in to comment.