incremental development

This commit is contained in:
stellamit 2012-05-22 21:08:21 +00:00
parent 9e210249b9
commit e21bd9e170
4 changed files with 74 additions and 67 deletions

View file

@ -14,20 +14,18 @@ public interface ChangeListener {
/** /**
* Override this to listen to all statements added to the RDF store. * Override this to listen to all statements added to the RDF store.
* *
* @param String serializedTriple - the added statement * @param String serializedTriple - the added statement in n3 format
* @param RDFService.ModelSerializationFormat format - RDF format of serializedTriple
* @param String graphURI - the graph to which the statement was added * @param String graphURI - the graph to which the statement was added
*/ */
public void addedStatement(String serializedTriple, RDFService.ModelSerializationFormat format, String graphURI); public void addedStatement(String serializedTriple, String graphURI);
/** /**
* Override this to listen to all statements removed from the RDF store. * Override this to listen to all statements removed from the RDF store.
* *
* @param String serializedTriple - the removed statement * @param String serializedTriple - the removed statement in n3 format
* @param RDFService.ModelSerializationFormat format - RDF format of serializedTriple
* @param String graphURI - the graph from which the statement was removed * @param String graphURI - the graph from which the statement was removed
*/ */
public void removedStatement(String serializedTriple, RDFService.ModelSerializationFormat format, String graphURI); public void removedStatement(String serializedTriple, String graphURI);
/** /**
* Override this to listen to events pertaining to the given graphURI. * Override this to listen to events pertaining to the given graphURI.

View file

@ -3,7 +3,6 @@
package edu.cornell.mannlib.vitro.webapp.rdfservice; package edu.cornell.mannlib.vitro.webapp.rdfservice;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream;
import java.util.List; import java.util.List;
/* /*
@ -125,13 +124,6 @@ public interface RDFService {
*/ */
public String getDefaultWriteGraphURI() throws RDFServiceException; public String getDefaultWriteGraphURI() throws RDFServiceException;
/**
* Get the URI of the default read graph
*
* @return String URI of default read graph
*/
public String getDefaultReadGraphURI() throws RDFServiceException;
/** /**
* Register a listener to listen to changes in any graph in * Register a listener to listen to changes in any graph in
* the RDF store. * the RDF store.

View file

@ -5,12 +5,13 @@ package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.InputStream; import java.io.InputStream;
import java.util.HashSet; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.openrdf.model.Resource;
import org.openrdf.query.MalformedQueryException; import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage; import org.openrdf.query.QueryLanguage;
import org.openrdf.query.Update; import org.openrdf.query.Update;
@ -18,6 +19,7 @@ import org.openrdf.query.UpdateExecutionException;
import org.openrdf.repository.Repository; import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException; import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.http.HTTPRepository; import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Node;
@ -49,17 +51,19 @@ public class RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class); private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
private String endpointURI; private String endpointURI;
private String defaultWriteGraphURI;
private Repository repository; private Repository repository;
private HashSet<ChangeListener> registeredListeners; private ArrayList<ChangeListener> registeredListeners;
/** /**
* Returns an RDFService for a remote repository * Returns an RDFService for a remote repository
* @param endpointURI * @param endpointURI
*/ */
public RDFServiceImpl(String endpointURI) { public RDFServiceImpl(String endpointURI, String defaultWriteGraphURI) {
this.endpointURI = endpointURI; this.endpointURI = endpointURI;
this.defaultWriteGraphURI = defaultWriteGraphURI;
this.repository = new HTTPRepository(endpointURI); this.repository = new HTTPRepository(endpointURI);
this.registeredListeners = new HashSet<ChangeListener>(); this.registeredListeners = new ArrayList<ChangeListener>();
} }
/** /**
@ -110,7 +114,7 @@ public class RDFServiceImpl implements RDFService {
public void newIndividual(String individualURI, public void newIndividual(String individualURI,
String individualTypeURI) throws RDFServiceException { String individualTypeURI) throws RDFServiceException {
newIndividual(individualURI, individualTypeURI, null); newIndividual(individualURI, individualTypeURI, defaultWriteGraphURI);
} }
/** /**
@ -277,11 +281,30 @@ public class RDFServiceImpl implements RDFService {
* *
* @return List<String> - list of all the graph URIs in the RDF store * @return List<String> - list of all the graph URIs in the RDF store
*/ */
//TODO - need to verify that the sesame getContextIDs method is implemented
// in such a way that it works with all triple stores that support the
// graph update API
@Override @Override
public List<String> getGraphURIs() throws RDFServiceException { public List<String> getGraphURIs() throws RDFServiceException {
List<String> list = null;
return list; List<String> graphNodeList = new ArrayList<String>();
try {
RepositoryConnection conn = getConnection();
try {
RepositoryResult<Resource> conResult = conn.getContextIDs();
while (conResult.hasNext()) {
Resource res = conResult.next();
graphNodeList.add(res.stringValue());
}
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
return graphNodeList;
} }
/** /**
@ -300,21 +323,7 @@ public class RDFServiceImpl implements RDFService {
*/ */
@Override @Override
public String getDefaultWriteGraphURI() throws RDFServiceException { public String getDefaultWriteGraphURI() throws RDFServiceException {
String graphURI = null; return defaultWriteGraphURI;
return graphURI;
}
/**
* Get the URI of the default read graph
*
* @return String URI of default read graph
*/
@Override
public String getDefaultReadGraphURI() throws RDFServiceException {
String graphURI = null;
return graphURI;
} }
/** /**
@ -324,7 +333,10 @@ public class RDFServiceImpl implements RDFService {
*/ */
@Override @Override
public synchronized void registerListener(ChangeListener changeListener) throws RDFServiceException { public synchronized void registerListener(ChangeListener changeListener) throws RDFServiceException {
registeredListeners.add(changeListener);
if (!registeredListeners.contains(changeListener)) {
registeredListeners.add(changeListener);
}
} }
/** /**
@ -384,51 +396,56 @@ public class RDFServiceImpl implements RDFService {
protected void addTriple(Triple t, String graphURI) { protected void addTriple(Triple t, String graphURI) {
String updateString = "INSERT DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" ) StringBuffer updateString = new StringBuffer();
+ sparqlNodeUpdate(t.getSubject(), "") + " " updateString.append("INSERT DATA { ");
+ sparqlNodeUpdate(t.getPredicate(), "") + " " updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
+ sparqlNodeUpdate(t.getObject(), "") + " } " updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
+ ((graphURI != null) ? " } " : ""); updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getObject(), ""));
updateString.append(" }");
updateString.append((graphURI != null) ? " } " : "");
executeUpdate(updateString); executeUpdate(updateString.toString());
notifyListeners(t, ModelChange.Operation.ADD, graphURI); notifyListeners(t, ModelChange.Operation.ADD, graphURI);
} }
protected void removeTriple(Triple t, String graphURI) { protected void removeTriple(Triple t, String graphURI) {
String updateString = "DELETE DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" ) StringBuffer updateString = new StringBuffer();
+ sparqlNodeUpdate(t.getSubject(), "") + " " updateString.append("DELETE DATA { ");
+ sparqlNodeUpdate(t.getPredicate(), "") + " " updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
+ sparqlNodeUpdate(t.getObject(), "") + " } " updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
+ ((graphURI != null) ? " } " : ""); updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getObject(), ""));
updateString.append(" }");
updateString.append((graphURI != null) ? " } " : "");
executeUpdate(updateString); executeUpdate(updateString.toString());
notifyListeners(t, ModelChange.Operation.REMOVE, graphURI); notifyListeners(t, ModelChange.Operation.REMOVE, graphURI);
} }
protected synchronized void notifyListeners(Triple triple, ModelChange.Operation operation, String graphURI) { protected synchronized void notifyListeners(Triple triple, ModelChange.Operation operation, String graphURI) {
if (!(triple.getSubject().isURI() && triple.getPredicate().isURI() && triple.getObject().isURI())) { StringBuffer serializedTriple = new StringBuffer();
return; serializedTriple.append(sparqlNodeUpdate(triple.getSubject(), ""));
} serializedTriple.append(" ");
serializedTriple.append(sparqlNodeUpdate(triple.getPredicate(), ""));
Model model = ModelFactory.createDefaultModel(); serializedTriple.append(" ");
Statement statement = model.createStatement(model.createResource(triple.getSubject().getURI()), serializedTriple.append(sparqlNodeUpdate(triple.getObject(), ""));
model.createProperty(triple.getPredicate().getURI()), serializedTriple.append(" .");
model.createResource(triple.getObject().getURI()));
model.add(statement);
ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
model.write(serializedModel,getSerializationFormatString(RDFService.ModelSerializationFormat.N3));
String serializedTriple = serializedModel.toString();
Iterator<ChangeListener> iter = registeredListeners.iterator(); Iterator<ChangeListener> iter = registeredListeners.iterator();
while (iter.hasNext()) { while (iter.hasNext()) {
ChangeListener listener = iter.next(); ChangeListener listener = iter.next();
if (operation == ModelChange.Operation.ADD) { if (operation == ModelChange.Operation.ADD) {
listener.addedStatement(serializedTriple, RDFService.ModelSerializationFormat.N3, graphURI); listener.addedStatement(serializedTriple.toString(), graphURI);
} else { } else {
listener.addedStatement(serializedTriple, RDFService.ModelSerializationFormat.N3, graphURI); listener.removedStatement(serializedTriple.toString(), graphURI);
} }
} }
} }