merge changes from trunk to rdf api branch and fix issue with listeners notifying before triples are actually added/removed

This commit is contained in:
brianjlowe 2012-06-13 17:19:46 +00:00
parent 94a34e274b
commit 6e8a4dfff1
27 changed files with 1429 additions and 1316 deletions

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.HashSet;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.Set;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.ByteArrayInputStream;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.InputStream;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.ByteArrayInputStream;
@ -30,7 +32,7 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler {
private static final Log log = LogFactory.getLog(SparqlGraphBulkUpdater.class);
private static final Log log = LogFactory.getLog(RDFServiceGraphBulkUpdater.class);
private RDFServiceGraph graph;
public RDFServiceGraphBulkUpdater(RDFServiceGraph graph) {

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.StringWriter;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;

View file

@ -35,6 +35,7 @@ import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDataset;
@ -90,7 +91,8 @@ public class WebappDaoFactorySDBPrep implements Filter {
}
}
OntModelSelector oms = (OntModelSelector) _ctx.getAttribute("unionOntModelSelector");
OntModelSelector oms = ModelContext.getUnionOntModelSelector(_ctx);
OntModelSelector baseOms = ModelContext.getBaseOntModelSelector(_ctx);
String defaultNamespace = (String) _ctx.getAttribute("defaultNamespace");
WebappDaoFactory wadf = null;
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
@ -112,7 +114,7 @@ public class WebappDaoFactorySDBPrep implements Filter {
Dataset dataset = new RDFServiceDataset(rdfService);
wadf = new WebappDaoFactorySDB(rdfService, oms, config);
WebappDaoFactory assertions = new WebappDaoFactorySDB(
rdfService, oms, config, SDBDatasetMode.ASSERTIONS_ONLY);
rdfService, baseOms, config, SDBDatasetMode.ASSERTIONS_ONLY);
vreq.setWebappDaoFactory(wadf);
vreq.setAssertionsWebappDaoFactory(assertions);
vreq.setFullWebappDaoFactory(wadf);

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.ByteArrayInputStream;

View file

@ -0,0 +1,217 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Capabilities;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.GraphEventManager;
import com.hp.hpl.jena.graph.GraphStatisticsHandler;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Reifier;
import com.hp.hpl.jena.graph.TransactionHandler;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.graph.impl.GraphWithPerform;
import com.hp.hpl.jena.graph.impl.SimpleBulkUpdateHandler;
import com.hp.hpl.jena.graph.impl.SimpleEventManager;
import com.hp.hpl.jena.graph.query.QueryHandler;
import com.hp.hpl.jena.graph.query.SimpleQueryHandler;
import com.hp.hpl.jena.shared.AddDeniedException;
import com.hp.hpl.jena.shared.DeleteDeniedException;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.shared.impl.PrefixMappingImpl;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import edu.cornell.mannlib.vitro.webapp.dao.jena.EmptyReifier;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
public class ListeningGraph implements GraphWithPerform {
private static final Log log = LogFactory.getLog(ListeningGraph.class);
private RDFServiceSDB rdfServiceSDB;
private String graphURI;
private BulkUpdateHandler bulkUpdateHandler;
private GraphEventManager eventManager;
private PrefixMapping prefixMapping = new PrefixMappingImpl();
private Reifier reifier = new EmptyReifier(this);
private QueryHandler queryHandler;
public ListeningGraph(String graphURI, RDFServiceSDB rdfServiceSDB) {
this.graphURI = graphURI;
this.rdfServiceSDB = rdfServiceSDB;
}
@Override
public void add(Triple triple) throws AddDeniedException {
performAdd(triple);
}
@Override
public void performAdd(Triple triple) throws AddDeniedException {
this.rdfServiceSDB.notifyListeners(triple, ModelChange.Operation.ADD, graphURI);
}
@Override
public void delete(Triple triple) throws DeleteDeniedException {
performDelete(triple);
}
@Override
public void performDelete(Triple triple) throws DeleteDeniedException {
this.rdfServiceSDB.notifyListeners(triple, ModelChange.Operation.REMOVE, graphURI);
}
@Override
public void close() {
}
@Override
public boolean contains(Triple arg0) {
return contains(arg0.getSubject(), arg0.getPredicate(), arg0.getObject());
}
@Override
public boolean contains(Node subject, Node predicate, Node object) {
return false;
}
@Override
public boolean dependsOn(Graph arg0) {
return false; // who knows?
}
@Override
public ExtendedIterator<Triple> find(TripleMatch arg0) {
Triple t = arg0.asTriple();
return find(t.getSubject(), t.getPredicate(), t.getObject());
}
@Override
public ExtendedIterator<Triple> find(Node subject, Node predicate, Node object) {
List<Triple> triplist = new ArrayList<Triple>();
return WrappedIterator.create(triplist.iterator());
}
@Override
public BulkUpdateHandler getBulkUpdateHandler() {
if (this.bulkUpdateHandler == null) {
this.bulkUpdateHandler = new SimpleBulkUpdateHandler(this);
}
return this.bulkUpdateHandler;
}
@Override
public Capabilities getCapabilities() {
return capabilities;
}
@Override
public GraphEventManager getEventManager() {
if (eventManager == null) {
eventManager = new SimpleEventManager(this);
}
return eventManager;
}
@Override
public PrefixMapping getPrefixMapping() {
return prefixMapping;
}
@Override
public Reifier getReifier() {
return reifier;
}
@Override
public GraphStatisticsHandler getStatisticsHandler() {
return null;
}
@Override
public TransactionHandler getTransactionHandler() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isClosed() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isEmpty() {
return (size() == 0);
}
@Override
public boolean isIsomorphicWith(Graph arg0) {
throw new UnsupportedOperationException("isIsomorphicWith() not supported " +
"by SPARQL graphs");
}
@Override
public QueryHandler queryHandler() {
if (queryHandler == null) {
queryHandler = new SimpleQueryHandler(this);
}
return queryHandler;
}
@Override
public int size() {
int size = find(null, null, null).toList().size();
return size;
}
private final static Capabilities capabilities = new Capabilities() {
public boolean addAllowed() {
return false;
}
public boolean addAllowed(boolean everyTriple) {
return false;
}
public boolean canBeEmpty() {
return true;
}
public boolean deleteAllowed() {
return false;
}
public boolean deleteAllowed(boolean everyTriple) {
return false;
}
public boolean findContractSafe() {
return true;
}
public boolean handlesLiteralTyping() {
return true;
}
public boolean iteratorRemoveAllowed() {
return false;
}
public boolean sizeAccurate() {
return true;
}
};
}

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb;
import org.apache.commons.dbcp.BasicDataSource;

View file

@ -1,3 +1,5 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb;
import java.io.ByteArrayInputStream;
@ -35,6 +37,7 @@ import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.OWL;
import edu.cornell.mannlib.vitro.webapp.dao.jena.DatasetWrapper;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
@ -82,9 +85,7 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
changeSet.getPreconditionQueryType())) {
return false;
}
Iterator<ModelChange> csIt = changeSet.getModelChanges().iterator();
SDBConnection conn = null;
try {
conn = new SDBConnection(bds.getConnection());
@ -96,47 +97,47 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
Dataset dataset = getDataset(conn);
boolean transaction = conn.getTransactionHandler().transactionsSupported();
try {
try {
if (transaction) {
conn.getTransactionHandler().begin();
} else {
for (Object o : changeSet.getPreChangeEvents()) {
this.notifyListenersOfEvent(o);
}
}
for (Object o : changeSet.getPreChangeEvents()) {
this.notifyListenersOfEvent(o);
}
Iterator<ModelChange> csIt = changeSet.getModelChanges().iterator();
while (csIt.hasNext()) {
ModelChange modelChange = csIt.next();
modelChange.getSerializedModel().mark(Integer.MAX_VALUE);
dataset.getLock().enterCriticalSection(Lock.WRITE);
try {
Model model = dataset.getNamedModel(modelChange.getGraphURI());
model.enterCriticalSection(Lock.WRITE);
try {
model.register(new ModelListener(modelChange.getGraphURI(), this));
if (modelChange.getOperation() == ModelChange.Operation.ADD) {
model.read(modelChange.getSerializedModel(), null,
getSerializationFormatString(modelChange.getSerializationFormat()));
} else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
model.remove(parseModel(modelChange));
removeBlankNodesWithSparqlUpdate(dataset, model, modelChange.getGraphURI());
} else {
log.error("unrecognized operation type");
}
} finally {
model.leaveCriticalSection();
}
operateOnModel(model, modelChange, dataset);
} finally {
dataset.getLock().leaveCriticalSection();
}
}
if (transaction) {
for (Object o : changeSet.getPreChangeEvents()) {
this.notifyListenersOfEvent(o);
}
conn.getTransactionHandler().commit();
}
// notify listeners of triple changes
csIt = changeSet.getModelChanges().iterator();
while (csIt.hasNext()) {
ModelChange modelChange = csIt.next();
modelChange.getSerializedModel().reset();
Model model = ModelFactory.createModelForGraph(
new ListeningGraph(modelChange.getGraphURI(), this));
operateOnModel(model, modelChange, null);
}
for (Object o : changeSet.getPostChangeEvents()) {
this.notifyListenersOfEvent(o);
}
} catch (Exception e) {
log.error(e, e);
if (transaction) {
@ -150,6 +151,25 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
return true;
}
private void operateOnModel(Model model, ModelChange modelChange, Dataset dataset) {
model.enterCriticalSection(Lock.WRITE);
try {
if (modelChange.getOperation() == ModelChange.Operation.ADD) {
model.read(modelChange.getSerializedModel(), null,
getSerializationFormatString(modelChange.getSerializationFormat()));
} else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
model.remove(parseModel(modelChange));
if (dataset != null) {
removeBlankNodesWithSparqlUpdate(dataset, model, modelChange.getGraphURI());
}
} else {
log.error("unrecognized operation type");
}
} finally {
model.leaveCriticalSection();
}
}
private void removeBlankNodesWithSparqlUpdate(Dataset dataset, Model model, String graphURI) {
Model blankNodeModel = ModelFactory.createDefaultModel();
@ -414,12 +434,10 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
}
public void addedStatement(Statement stmt) {
log.debug("adding " + stmt + " to " + graphURI);
s.notifyListeners(stmt.asTriple(), ModelChange.Operation.ADD, graphURI);
}
public void removedStatement(Statement stmt) {
log.debug("removing " + stmt + " from " + graphURI);
s.notifyListeners(stmt.asTriple(), ModelChange.Operation.REMOVE, graphURI);
}

View file

@ -22,4 +22,7 @@ public interface ReasonerPlugin {
Model aboxInferencesModel,
OntModel TBoxInferencesModel);
public void setSimpleReasoner(SimpleReasoner simpleReasoner);
public SimpleReasoner getSimpleReasoner();
}

View file

@ -49,12 +49,14 @@ public class SimpleReasonerTBoxListener extends StatementListener {
@Override
public void addedStatement(Statement statement) {
ModelUpdate mu = new ModelUpdate(statement, ModelUpdate.Operation.ADD, JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL);
processUpdate(mu);
}
@Override
public void removedStatement(Statement statement) {
ModelUpdate mu = new ModelUpdate(statement, ModelUpdate.Operation.RETRACT, JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL);
processUpdate(mu);
}

View file

@ -115,6 +115,7 @@ public class SimpleReasonerSetup implements ServletContextListener {
try {
ReasonerPlugin plugin = (ReasonerPlugin) Class.forName(
classname).getConstructors()[0].newInstance();
plugin.setSimpleReasoner(simpleReasoner);
pluginList.add(plugin);
} catch(Throwable t) {
ss.info(this, "Could not instantiate reasoner plugin " + classname);