Use same logic to remove blank nodes from reasoner's internal model as used in RDFService (#397)

* Refactor blank-nodes-as-variables behavior from RDFServiceJena to JenaModelUtils; use this logic when removing triples from the TBox reasoner's internal assertions model.  Add unit test.

* Remove complication of datasets and remove blank nodes directly from models

* Remove temporary debugging logging

* Simplify original blank node tree logic

* Remove unneeded imports
This commit is contained in:
Brian Lowe 2023-06-15 13:55:23 +03:00 committed by GitHub
parent 3a88c451ab
commit fb9d86a57d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 326 additions and 251 deletions

View file

@ -2,15 +2,17 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.graph.Triple;
import org.apache.jena.ontology.Individual;
import org.apache.jena.ontology.OntClass;
import org.apache.jena.ontology.OntModel;
@ -21,12 +23,16 @@ import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.query.QueryFactory;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.ResultSet;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.shared.Lock;
import org.apache.jena.vocabulary.OWL;
import org.apache.jena.vocabulary.RDF;
@ -39,6 +45,8 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
public class JenaModelUtils {
public static final String BNODE_ROOT_QUERY =
"SELECT DISTINCT ?s WHERE { ?s ?p ?o OPTIONAL { ?ss ?pp ?s } FILTER (!isBlank(?s) || !bound(?ss)) }";
private static final Log log = LogFactory.getLog(JenaModelUtils.class.getName());
private static final Set<String> nonIndividualTypeURIs ;
@ -398,5 +406,190 @@ public class JenaModelUtils {
return aboxModel;
}
/**
* Remove statements from a model by separating statements
* containing blank nodes from those that have no blank nodes.
* The blank node statements are removed by treating blank nodes as variables and
* constructing the matching subgraphs for deletion.
* The other statements are removed normally.
* @param toRemove containing statements to be removed
* @param removeFrom from which statements should be removed
*/
public static void removeWithBlankNodesAsVariables(Model toRemove, Model removeFrom) {
List<Statement> blankNodeStatements = new ArrayList<Statement>();
List<Statement> nonBlankNodeStatements = new ArrayList<Statement>();
StmtIterator stmtIt = toRemove.listStatements();
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.nextStatement();
if (stmt.getSubject().isAnon() || stmt.getObject().isAnon()) {
blankNodeStatements.add(stmt);
} else {
nonBlankNodeStatements.add(stmt);
}
}
if(!blankNodeStatements.isEmpty()) {
Model blankNodeModel = ModelFactory.createDefaultModel();
blankNodeModel.add(blankNodeStatements);
removeBlankNodesUsingSparqlConstruct(blankNodeModel, removeFrom);
}
if(!nonBlankNodeStatements.isEmpty()) {
try {
removeFrom.enterCriticalSection(Lock.WRITE);
removeFrom.remove(nonBlankNodeStatements);
} finally {
removeFrom.leaveCriticalSection();
}
}
}
private static void removeBlankNodesUsingSparqlConstruct(Model blankNodeModel,
Model removeFrom) {
log.debug("blank node model size " + blankNodeModel.size());
if (blankNodeModel.size() == 1) {
log.debug("Deleting single triple with blank node: " + blankNodeModel);
log.debug("This could result in the deletion of multiple triples"
+ " if multiple blank nodes match the same triple pattern.");
}
Query rootFinderQuery = QueryFactory.create(BNODE_ROOT_QUERY);
QueryExecution qe = QueryExecutionFactory.create(rootFinderQuery, blankNodeModel);
try {
ResultSet rs = qe.execSelect();
while (rs.hasNext()) {
QuerySolution qs = rs.next();
Resource s = qs.getResource("s");
String treeFinder = makeDescribe(s);
Query treeFinderQuery = QueryFactory.create(treeFinder);
QueryExecution qee = QueryExecutionFactory.create(treeFinderQuery, blankNodeModel);
try {
Model tree = qee.execDescribe();
JenaModelUtils.removeUsingSparqlConstruct(tree, removeFrom);
} finally {
qee.close();
}
}
} finally {
qe.close();
}
}
private static String makeDescribe(Resource s) {
StringBuilder query = new StringBuilder("DESCRIBE <") ;
if (s.isAnon()) {
query.append("_:").append(s.getId().toString());
} else {
query.append(s.getURI());
}
query.append(">");
return query.toString();
}
private static final boolean WHERE_CLAUSE = true;
/**
* Remove statements from a model by first constructing
* the statements to be removed with a SPARQL query that treats
* each blank node ID as a variable.
* This allows matching blank node structures to be removed even though
* the internal blank node IDs are different.
* @param toRemove containing statements to be removed
* @param removeFrom from which statements should be removed
*/
public static void removeUsingSparqlConstruct(Model toRemove, Model removeFrom) {
if(toRemove.isEmpty()) {
return;
}
List<Statement> stmts = toRemove.listStatements().toList();
stmts = sort(stmts);
StringBuffer queryBuff = new StringBuffer();
queryBuff.append("CONSTRUCT { \n");
addStatementPatterns(stmts, queryBuff, !WHERE_CLAUSE);
queryBuff.append("} WHERE { \n");
addStatementPatterns(stmts, queryBuff, WHERE_CLAUSE);
queryBuff.append("} \n");
String queryStr = queryBuff.toString();
log.debug(queryBuff.toString());
Query construct = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.create(construct, removeFrom);
try {
Model constructedRemovals = qe.execConstruct();
try {
removeFrom.enterCriticalSection(Lock.WRITE);
removeFrom.remove(constructedRemovals);
} finally {
removeFrom.leaveCriticalSection();
}
} finally {
qe.close();
}
}
private static List<Statement> sort(List<Statement> stmts) {
List<Statement> output = new ArrayList<Statement>();
int originalSize = stmts.size();
if(originalSize == 1) {
return stmts;
}
List <Statement> remaining = stmts;
ConcurrentLinkedQueue<Resource> subjQueue = new ConcurrentLinkedQueue<Resource>();
for(Statement stmt : remaining) {
if(stmt.getSubject().isURIResource()) {
subjQueue.add(stmt.getSubject());
break;
}
}
if (subjQueue.isEmpty()) {
log.warn("No named subject in statement patterns");
return stmts;
}
while(remaining.size() > 0) {
if(subjQueue.isEmpty()) {
subjQueue.add(remaining.get(0).getSubject());
}
while(!subjQueue.isEmpty()) {
Resource subj = subjQueue.poll();
List<Statement> temp = new ArrayList<Statement>();
for (Statement stmt : remaining) {
if(stmt.getSubject().equals(subj)) {
output.add(stmt);
if (stmt.getObject().isResource()) {
subjQueue.add((Resource) stmt.getObject());
}
} else {
temp.add(stmt);
}
}
remaining = temp;
}
}
if(output.size() != originalSize) {
throw new RuntimeException("original list size was " + originalSize +
" but sorted size is " + output.size());
}
return output;
}
private static void addStatementPatterns(List<Statement> stmts,
StringBuffer patternBuff, boolean whereClause) {
for(Statement stmt : stmts) {
Triple t = stmt.asTriple();
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getSubject(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getPredicate(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getObject(), null));
patternBuff.append(" .\n");
if (whereClause) {
if (t.getSubject().isBlank()) {
patternBuff.append(" FILTER(isBlank(").append(
SparqlGraph.sparqlNodeDelete(t.getSubject(), null)).append(")) \n");
}
if (t.getObject().isBlank()) {
patternBuff.append(" FILTER(isBlank(").append(
SparqlGraph.sparqlNodeDelete(t.getObject(), null)).append(")) \n");
}
}
}
}
}

View file

@ -45,9 +45,7 @@ import edu.cornell.mannlib.vitro.webapp.utils.logging.ToString;
public abstract class RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
protected static final String BNODE_ROOT_QUERY =
"SELECT DISTINCT ?s WHERE { ?s ?p ?o OPTIONAL { ?ss ?pp ?s } FILTER (!isBlank(?s) || !bound(?ss)) }";
protected String defaultWriteGraphURI;
protected List<ChangeListener> registeredListeners = new CopyOnWriteArrayList<ChangeListener>();
protected List<ModelChangedListener> registeredJenaListeners = new CopyOnWriteArrayList<ModelChangedListener>();

View file

@ -10,46 +10,40 @@ import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.query.QuerySolutionMap;
import org.apache.jena.query.Syntax;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.log4j.lf5.util.StreamUtils;
import org.apache.jena.graph.Triple;
import org.apache.jena.query.Dataset;
import org.apache.jena.query.DatasetFactory;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.query.QueryFactory;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.QuerySolutionMap;
import org.apache.jena.query.ResultSet;
import org.apache.jena.query.ResultSetFormatter;
import org.apache.jena.query.Syntax;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.sdb.SDB;
import org.apache.jena.shared.Lock;
import org.apache.jena.sparql.core.Quad;
import org.apache.log4j.lf5.util.StreamUtils;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.jena.DatasetWrapper;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceImpl;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.utils.logging.ToString;
@ -91,7 +85,7 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic
Model model = (modelChange.getGraphURI() == null) ?
dataset.getDefaultModel() :
dataset.getNamedModel(modelChange.getGraphURI());
operateOnModel(model, modelChange, dataset);
operateOnModel(model, modelChange);
} finally {
dataset.getLock().leaveCriticalSection();
}
@ -104,7 +98,7 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic
}
}
protected void operateOnModel(Model model, ModelChange modelChange, Dataset dataset) {
protected void operateOnModel(Model model, ModelChange modelChange) {
model.enterCriticalSection(Lock.WRITE);
try {
if (log.isDebugEnabled()) {
@ -115,10 +109,7 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic
model.add(addition);
} else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
Model removal = parseModel(modelChange);
model.remove(removal);
if (dataset != null) {
removeBlankNodesWithSparqlUpdate(dataset, removal, modelChange.getGraphURI());
}
JenaModelUtils.removeWithBlankNodesAsVariables(removal, model);
} else {
log.error("unrecognized operation type");
}
@ -179,216 +170,6 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic
ToString.modelToString(model)));
}
private void removeBlankNodesWithSparqlUpdate(Dataset dataset, Model model, String graphURI) {
List<Statement> blankNodeStatements = new ArrayList<Statement>();
StmtIterator stmtIt = model.listStatements();
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.nextStatement();
if (stmt.getSubject().isAnon() || stmt.getObject().isAnon()) {
blankNodeStatements.add(stmt);
}
}
if(blankNodeStatements.size() == 0) {
return;
}
Model blankNodeModel = ModelFactory.createDefaultModel();
blankNodeModel.add(blankNodeStatements);
log.debug("removal model size " + model.size());
log.debug("blank node model size " + blankNodeModel.size());
if (blankNodeModel.size() == 1) {
log.debug("Deleting single triple with blank node: " + blankNodeModel);
log.debug("This could result in the deletion of multiple triples if multiple blank nodes match the same triple pattern.");
}
Query rootFinderQuery = QueryFactory.create(BNODE_ROOT_QUERY);
QueryExecution qe = QueryExecutionFactory.create(rootFinderQuery, blankNodeModel);
try {
ResultSet rs = qe.execSelect();
if (!rs.hasNext()) {
log.warn("No rooted blank node trees; deletion is not possible.");
}
while (rs.hasNext()) {
QuerySolution qs = rs.next();
Resource s = qs.getResource("s");
String treeFinder = makeDescribe(s);
Query treeFinderQuery = QueryFactory.create(treeFinder);
QueryExecution qee = QueryExecutionFactory.create(treeFinderQuery, blankNodeModel);
try {
Model tree = qee.execDescribe();
Dataset ds = DatasetFactory.createMem();
if (graphURI == null) {
ds.setDefaultModel(dataset.getDefaultModel());
} else {
ds.addNamedModel(graphURI, dataset.getNamedModel(graphURI));
}
if (s.isAnon()) {
removeUsingSparqlUpdate(ds, tree, graphURI);
} else {
StmtIterator sit = tree.listStatements(s, null, (RDFNode) null);
while (sit.hasNext()) {
Statement stmt = sit.nextStatement();
RDFNode n = stmt.getObject();
Model m2 = ModelFactory.createDefaultModel();
if (n.isResource()) {
Resource s2 = (Resource) n;
// now run yet another describe query
String smallerTree = makeDescribe(s2);
log.debug(smallerTree);
Query smallerTreeQuery = QueryFactory.create(smallerTree);
QueryExecution qe3 = QueryExecutionFactory.create(
smallerTreeQuery, tree);
try {
qe3.execDescribe(m2);
} finally {
qe3.close();
}
}
m2.add(stmt);
removeUsingSparqlUpdate(ds, m2, graphURI);
}
}
} finally {
qee.close();
}
}
} finally {
qe.close();
}
}
private String makeDescribe(Resource s) {
StringBuilder query = new StringBuilder("DESCRIBE <") ;
if (s.isAnon()) {
query.append("_:").append(s.getId().toString());
} else {
query.append(s.getURI());
}
query.append(">");
return query.toString();
}
private void removeUsingSparqlUpdate(Dataset dataset, Model model, String graphURI) {
StmtIterator stmtIt = model.listStatements();
if (!stmtIt.hasNext()) {
stmtIt.close();
return;
}
StringBuffer queryBuff = new StringBuffer();
queryBuff.append("CONSTRUCT { \n");
List<Statement> stmts = stmtIt.toList();
stmts = sort(stmts);
addStatementPatterns(stmts, queryBuff, !WHERE_CLAUSE);
queryBuff.append("} WHERE { \n");
if (graphURI != null) {
queryBuff.append(" GRAPH <").append(graphURI).append("> { \n");
}
stmtIt = model.listStatements();
stmts = stmtIt.toList();
stmts = sort(stmts);
addStatementPatterns(stmts, queryBuff, WHERE_CLAUSE);
if (graphURI != null) {
queryBuff.append(" } \n");
}
queryBuff.append("} \n");
log.debug(queryBuff.toString());
Query construct = QueryFactory.create(queryBuff.toString());
// make a plain dataset to force the query to be run in a way that
// won't overwhelm MySQL with too many joins
Dataset ds = DatasetFactory.createMem();
if (graphURI == null) {
ds.setDefaultModel(dataset.getDefaultModel());
} else {
ds.addNamedModel(graphURI, dataset.getNamedModel(graphURI));
}
QueryExecution qe = QueryExecutionFactory.create(construct, ds);
try {
Model m = qe.execConstruct();
if (graphURI != null) {
dataset.getNamedModel(graphURI).remove(m);
} else {
dataset.getDefaultModel().remove(m);
}
} finally {
qe.close();
}
}
private List<Statement> sort(List<Statement> stmts) {
List<Statement> output = new ArrayList<Statement>();
int originalSize = stmts.size();
if(originalSize == 1) {
return stmts;
}
List <Statement> remaining = stmts;
ConcurrentLinkedQueue<Resource> subjQueue = new ConcurrentLinkedQueue<Resource>();
for(Statement stmt : remaining) {
if(stmt.getSubject().isURIResource()) {
subjQueue.add(stmt.getSubject());
break;
}
}
if (subjQueue.isEmpty()) {
log.warn("No named subject in statement patterns");
return stmts;
}
while(remaining.size() > 0) {
if(subjQueue.isEmpty()) {
subjQueue.add(remaining.get(0).getSubject());
}
while(!subjQueue.isEmpty()) {
Resource subj = subjQueue.poll();
List<Statement> temp = new ArrayList<Statement>();
for (Statement stmt : remaining) {
if(stmt.getSubject().equals(subj)) {
output.add(stmt);
if (stmt.getObject().isResource()) {
subjQueue.add((Resource) stmt.getObject());
}
} else {
temp.add(stmt);
}
}
remaining = temp;
}
}
if(output.size() != originalSize) {
throw new RuntimeException("original list size was " + originalSize +
" but sorted size is " + output.size());
}
return output;
}
private static final boolean WHERE_CLAUSE = true;
private void addStatementPatterns(List<Statement> stmts, StringBuffer patternBuff, boolean whereClause) {
for(Statement stmt : stmts) {
Triple t = stmt.asTriple();
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getSubject(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getPredicate(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getObject(), null));
patternBuff.append(" .\n");
if (whereClause) {
if (t.getSubject().isBlank()) {
patternBuff.append(" FILTER(isBlank(").append(SparqlGraph.sparqlNodeDelete(t.getSubject(), null)).append(")) \n");
}
if (t.getObject().isBlank()) {
patternBuff.append(" FILTER(isBlank(").append(SparqlGraph.sparqlNodeDelete(t.getObject(), null)).append(")) \n");
}
}
}
}
private Model parseModel(ModelChange modelChange) {
Model model = ModelFactory.createDefaultModel();
model.read(modelChange.getSerializedModel(), null,

View file

@ -95,7 +95,7 @@ public class RDFServiceModel extends RDFServiceJena implements RDFService {
m = dataset.getDefaultModel();
}
}
operateOnModel(m, modelChange, null);
operateOnModel(m, modelChange);
}
// notify listeners of triple changes

View file

@ -38,8 +38,6 @@ import org.apache.http.message.BasicNameValuePair;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.graph.Triple;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
@ -55,8 +53,10 @@ import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.sparql.core.Quad;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;
@ -691,7 +691,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
log.warn("This likely indicates a problem; excessive data may be deleted.");
}
Query rootFinderQuery = QueryFactory.create(BNODE_ROOT_QUERY);
Query rootFinderQuery = QueryFactory.create(JenaModelUtils.BNODE_ROOT_QUERY);
QueryExecution qe = QueryExecutionFactory.create(rootFinderQuery, blankNodeModel);
try {
ResultSet rs = qe.execSelect();

View file

@ -2,6 +2,8 @@
package edu.cornell.mannlib.vitro.webapp.tboxreasoner.impl.jfact;
import static org.semanticweb.owlapi.vocab.OWLRDFVocabulary.OWL_AXIOM;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
@ -9,17 +11,6 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.reasoner.InferenceType;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration;
import org.semanticweb.owlapi.reasoner.OWLReasonerFactory;
import org.semanticweb.owlapi.reasoner.SimpleConfiguration;
import uk.ac.manchester.cs.jfact.JFactFactory;
import org.apache.jena.ontology.DatatypeProperty;
import org.apache.jena.ontology.ObjectProperty;
import org.apache.jena.ontology.OntModel;
@ -32,13 +23,21 @@ import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.vocabulary.RDF;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.reasoner.InferenceType;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import org.semanticweb.owlapi.reasoner.OWLReasonerConfiguration;
import org.semanticweb.owlapi.reasoner.OWLReasonerFactory;
import org.semanticweb.owlapi.reasoner.SimpleConfiguration;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.tboxreasoner.ReasonerStatementPattern;
import edu.cornell.mannlib.vitro.webapp.tboxreasoner.TBoxChanges;
import edu.cornell.mannlib.vitro.webapp.tboxreasoner.TBoxReasoner;
import edu.cornell.mannlib.vitro.webapp.tboxreasoner.impl.TBoxInferencesAccumulator;
import static org.semanticweb.owlapi.vocab.OWLRDFVocabulary.OWL_AXIOM;
import uk.ac.manchester.cs.jfact.JFactFactory;
/**
* An implementation of the JFact reasoner for the TBox.
@ -81,7 +80,9 @@ public class JFactTBoxReasoner implements
log.debug("Adding " + changes.getAddedStatements().size()
+ ", removing " + changes.getRemovedStatements().size());
filteredAssertionsModel.add(changes.getAddedStatements());
filteredAssertionsModel.remove(changes.getRemovedStatements());
Model removals = ModelFactory.createDefaultModel();
removals.add(changes.getRemovedStatements());
JenaModelUtils.removeWithBlankNodesAsVariables(removals, filteredAssertionsModel);
clearEmptyAxiomStatements();
}

View file

@ -0,0 +1,102 @@
package edu.cornell.mannlib.vitro.webapp.tboxreasoner.impl.jfact;
import java.io.StringReader;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.ontology.OntModelSpec;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.RDFNode;
import org.junit.Assert;
import org.junit.Test;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent;
import edu.cornell.mannlib.vitro.webapp.tboxreasoner.ReasonerConfiguration;
import edu.cornell.mannlib.vitro.webapp.tboxreasoner.impl.BasicTBoxReasonerDriver;
public class JFactTBoxReasonerTest {
private final static String axioms = "@prefix obo: <http://purl.obolibrary.org/obo/> .\r\n" +
"@prefix owl: <http://www.w3.org/2002/07/owl#> .\r\n" +
"@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\r\n" +
"@prefix xml: <http://www.w3.org/XML/1998/namespace> .\r\n" +
"@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\r\n" +
"@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\r\n" +
"\r\n" +
"<http://vivo.mydomain.edu/individual/class_c> rdf:type owl:Class ;\r\n" +
" \r\n" +
" rdfs:subClassOf [ rdf:type owl:Class ;\r\n" +
" owl:intersectionOf ( <http://vivo.mydomain.edu/individual/class_a>\r\n" +
" <http://vivo.mydomain.edu/individual/class_b>\r\n" +
" )\r\n" +
" ] .\r\n" +
"\r\n" +
"<http://vivo.mydomain.edu/individual/class_a>\r\n" +
" a owl:Class ;\r\n" +
" rdfs:label \"Class A\"@en-US .\r\n" +
"\r\n" +
"<http://vivo.mydomain.edu/individual/class_b>\r\n" +
" a owl:Class ;\r\n" +
" rdfs:label \"Class B\"@en-US .\r\n" +
"\r\n" +
"<http://vivo.mydomain.edu/individual/class_c>\r\n" +
" a owl:Class ;\r\n" +
" rdfs:label \"Class C\"@en-US .\r\n";
/**
* Test that axioms containing blank nodes can be removed from the reasoner
* even if the internal blank node IDs are different from when first added.
*/
@Test
public void testRemoveAxiomsWithBlankNodes() {
OntModel tboxAssertions = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
OntModel tboxInferences = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
OntModel tboxUnion = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM,
ModelFactory.createUnion(tboxAssertions, tboxInferences));
JFactTBoxReasoner reasoner = new JFactTBoxReasoner();
BasicTBoxReasonerDriver driver = new BasicTBoxReasonerDriver(
tboxAssertions, tboxInferences.getBaseModel(), tboxUnion, reasoner,
ReasonerConfiguration.DEFAULT);
Model additions = ModelFactory.createDefaultModel();
additions.read(new StringReader(axioms), null, "TTL");
// Reading again will generate new internal blank node IDs
Model subtractions = ModelFactory.createDefaultModel();
subtractions.read(new StringReader(axioms), null, "TTL");
// Confirm that directly subtracting the models doesn't work because
// the blank node IDs do not match
Model incorrectSubtraction = additions.difference(subtractions);
Assert.assertFalse(incorrectSubtraction.isEmpty());
tboxAssertions.getBaseModel().add(additions);
tboxAssertions.getBaseModel().notifyEvent(new EditEvent(null, false));
waitForTBoxReasoning(driver);
// Confirm that union model now contains inferred triples
Assert.assertTrue(tboxUnion.size() > additions.size());
JenaModelUtils.removeWithBlankNodesAsVariables(subtractions, tboxAssertions.getBaseModel());
tboxAssertions.getBaseModel().notifyEvent(new EditEvent(null, false));
waitForTBoxReasoning(driver);
// Confirm that no statements related to classes a, b or c remain in the
// TBox union model. (The inference model may not be completely empty, because
// the reasoner may supply unrelated triples related to OWL and RDFS vocabulary.)
Assert.assertFalse(tboxUnion.contains(tboxUnion.getResource(
"http://vivo.mydomain.edu/individual/class_a"), null, (RDFNode) null));
Assert.assertFalse(tboxUnion.contains(tboxUnion.getResource(
"http://vivo.mydomain.edu/individual/class_b"), null, (RDFNode) null));
Assert.assertFalse(tboxUnion.contains(tboxUnion.getResource(
"http://vivo.mydomain.edu/individual/class_c"), null, (RDFNode) null));
}
private void waitForTBoxReasoning(BasicTBoxReasonerDriver driver) {
int sleeps = 0;
// sleep at least once to make sure the TBox reasoning gets started
while ((0 == sleeps) || ((sleeps < 1000) && driver.getStatus().isReasoning())) {
try {
Thread.sleep(200);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
sleeps++;
}
}
}