diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/sparql/RDFServiceSparql.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/sparql/RDFServiceSparql.java
index f4727ce8d..3e8a49976 100644
--- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/sparql/RDFServiceSparql.java
+++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/sparql/RDFServiceSparql.java
@@ -21,14 +21,22 @@ import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.AuthenticationException;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.client.protocol.ClientContext;
import org.apache.http.client.utils.URIBuilder;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.impl.conn.PoolingClientConnectionManager;
+import org.apache.http.impl.auth.BasicScheme;
+import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.message.BasicNameValuePair;
+import org.apache.http.protocol.BasicHttpContext;
+import org.apache.http.protocol.HttpContext;
import org.apache.http.util.EntityUtils;
import org.apache.jena.riot.RDFDataMgr;
@@ -64,177 +72,179 @@ import edu.cornell.mannlib.vitro.webapp.utils.sparql.ResultSetIterators.ResultSe
import edu.cornell.mannlib.vitro.webapp.utils.sparql.ResultSetIterators.ResultSetTriplesIterator;
/*
- * API to write, read, and update Vitro's RDF store, with support
+ * API to write, read, and update Vitro's RDF store, with support
* to allow listening, logging and auditing.
- *
+ *
*/
public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
-
+
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
- protected String readEndpointURI;
+ protected String readEndpointURI;
protected String updateEndpointURI;
- // the number of triples to be
+ // the number of triples to be
private static final int CHUNK_SIZE = 1000; // added/removed in a single
- // SPARQL UPDATE
+ // SPARQL UPDATE
protected HttpClient httpClient;
/**
- * Returns an RDFService for a remote repository
- * @param String - URI of the read SPARQL endpoint for the knowledge base
- * @param String - URI of the update SPARQL endpoint for the knowledge base
- * @param String - URI of the default write graph within the knowledge base.
- * this is the graph that will be written to when a graph
- * is not explicitly specified.
- *
- * The default read graph is the union of all graphs in the
- * knowledge base
- */
- public RDFServiceSparql(String readEndpointURI, String updateEndpointURI, String defaultWriteGraphURI) {
- this.readEndpointURI = readEndpointURI;
- this.updateEndpointURI = updateEndpointURI;
+ * Returns an RDFService for a remote repository
+ * @param String - URI of the read SPARQL endpoint for the knowledge base
+ * @param String - URI of the update SPARQL endpoint for the knowledge base
+ * @param String - URI of the default write graph within the knowledge base.
+ * this is the graph that will be written to when a graph
+ * is not explicitly specified.
+ *
+ * The default read graph is the union of all graphs in the
+ * knowledge base
+ */
+ public RDFServiceSparql(String readEndpointURI, String updateEndpointURI, String defaultWriteGraphURI) {
+ this.readEndpointURI = readEndpointURI;
+ this.updateEndpointURI = updateEndpointURI;
httpClient = HttpClientFactory.getHttpClient();
- testConnection();
- }
-
- private void testConnection() {
- try {
- this.sparqlSelectQuery(
- "SELECT ?s WHERE { ?s a " +
- " }",
- RDFService.ResultFormat.JSON);
- } catch (Exception e) {
- throw new RuntimeException("Unable to connect to endpoint at " +
- readEndpointURI, e);
- }
- }
-
- /**
- * Returns an RDFService for a remote repository
- * @param String - URI of the read SPARQL endpoint for the knowledge base
- * @param String - URI of the update SPARQL endpoint for the knowledge base
- *
- * The default read graph is the union of all graphs in the
- * knowledge base
- */
- public RDFServiceSparql(String readEndpointURI, String updateEndpointURI) {
- this(readEndpointURI, updateEndpointURI, null);
- }
-
- /**
- * Returns an RDFService for a remote repository
- * @param String - URI of the read and update SPARQL endpoint for the knowledge base
- *
- * The default read graph is the union of all graphs in the
- * knowledge base
- */
- public RDFServiceSparql(String endpointURI) {
- this(endpointURI, endpointURI, null);
- }
-
- public void close() {
- // nothing for now
- }
-
+ if (RDFServiceSparql.class.getName().equals(this.getClass().getName())) {
+ testConnection();
+ }
+ }
+
+ protected void testConnection() {
+ try {
+ this.sparqlSelectQuery(
+ "SELECT ?s WHERE { ?s a " +
+ " }",
+ RDFService.ResultFormat.JSON);
+ } catch (Exception e) {
+ throw new RuntimeException("Unable to connect to endpoint at " +
+ readEndpointURI, e);
+ }
+ }
+
+ /**
+ * Returns an RDFService for a remote repository
+ * @param String - URI of the read SPARQL endpoint for the knowledge base
+ * @param String - URI of the update SPARQL endpoint for the knowledge base
+ *
+ * The default read graph is the union of all graphs in the
+ * knowledge base
+ */
+ public RDFServiceSparql(String readEndpointURI, String updateEndpointURI) {
+ this(readEndpointURI, updateEndpointURI, null);
+ }
+
+ /**
+ * Returns an RDFService for a remote repository
+ * @param String - URI of the read and update SPARQL endpoint for the knowledge base
+ *
+ * The default read graph is the union of all graphs in the
+ * knowledge base
+ */
+ public RDFServiceSparql(String endpointURI) {
+ this(endpointURI, endpointURI, null);
+ }
+
+ public void close() {
+ // nothing for now
+ }
+
/**
* Perform a series of additions to and or removals from specified graphs
- * in the RDF store. preConditionSparql will be executed against the
- * union of all the graphs in the knowledge base before any updates are made.
+ * in the RDF store. preConditionSparql will be executed against the
+ * union of all the graphs in the knowledge base before any updates are made.
* If the precondition query returns a non-empty result no updates
- * will be made.
- *
+ * will be made.
+ *
* @param ChangeSet - a set of changes to be performed on the RDF store.
- *
- * @return boolean - indicates whether the precondition was satisfied
+ *
+ * @return boolean - indicates whether the precondition was satisfied
*/
- @Override
- public boolean changeSetUpdate(ChangeSet changeSet)
- throws RDFServiceException {
-
- if (changeSet.getPreconditionQuery() != null
- && !isPreconditionSatisfied(
- changeSet.getPreconditionQuery(),
- changeSet.getPreconditionQueryType())) {
- return false;
- }
-
- try {
- for (Object o : changeSet.getPreChangeEvents()) {
- this.notifyListenersOfEvent(o);
- }
+ @Override
+ public boolean changeSetUpdate(ChangeSet changeSet)
+ throws RDFServiceException {
+
+ if (changeSet.getPreconditionQuery() != null
+ && !isPreconditionSatisfied(
+ changeSet.getPreconditionQuery(),
+ changeSet.getPreconditionQueryType())) {
+ return false;
+ }
+
+ try {
+ for (Object o : changeSet.getPreChangeEvents()) {
+ this.notifyListenersOfEvent(o);
+ }
+
+ Iterator csIt = changeSet.getModelChanges().iterator();
+ while (csIt.hasNext()) {
+ ModelChange modelChange = csIt.next();
+ if (!modelChange.getSerializedModel().markSupported()) {
+ byte[] bytes = IOUtils.toByteArray(modelChange.getSerializedModel());
+ modelChange.setSerializedModel(new ByteArrayInputStream(bytes));
+ }
+ modelChange.getSerializedModel().mark(Integer.MAX_VALUE);
+ performChange(modelChange);
+ }
+
+ // notify listeners of triple changes
+ csIt = changeSet.getModelChanges().iterator();
+ while (csIt.hasNext()) {
+ ModelChange modelChange = csIt.next();
+ modelChange.getSerializedModel().reset();
+ Model model = ModelFactory.createModelForGraph(
+ new ListeningGraph(modelChange.getGraphURI(), this));
+ if (modelChange.getOperation() == ModelChange.Operation.ADD) {
+ model.read(modelChange.getSerializedModel(), null,
+ getSerializationFormatString(
+ modelChange.getSerializationFormat()));
+ } else if (modelChange.getOperation() == ModelChange.Operation.REMOVE){
+ Model temp = ModelFactory.createDefaultModel();
+ temp.read(modelChange.getSerializedModel(), null,
+ getSerializationFormatString(
+ modelChange.getSerializationFormat()));
+ model.remove(temp);
+ } else {
+ log.error("Unsupported model change type " +
+ modelChange.getOperation().getClass().getName());
+ }
+ }
+
+ for (Object o : changeSet.getPostChangeEvents()) {
+ this.notifyListenersOfEvent(o);
+ }
+
+ } catch (Exception e) {
+ log.error(e, e);
+ throw new RDFServiceException(e);
+ }
+ return true;
+ }
- Iterator csIt = changeSet.getModelChanges().iterator();
- while (csIt.hasNext()) {
- ModelChange modelChange = csIt.next();
- if (!modelChange.getSerializedModel().markSupported()) {
- byte[] bytes = IOUtils.toByteArray(modelChange.getSerializedModel());
- modelChange.setSerializedModel(new ByteArrayInputStream(bytes));
- }
- modelChange.getSerializedModel().mark(Integer.MAX_VALUE);
- performChange(modelChange);
- }
-
- // notify listeners of triple changes
- csIt = changeSet.getModelChanges().iterator();
- while (csIt.hasNext()) {
- ModelChange modelChange = csIt.next();
- modelChange.getSerializedModel().reset();
- Model model = ModelFactory.createModelForGraph(
- new ListeningGraph(modelChange.getGraphURI(), this));
- if (modelChange.getOperation() == ModelChange.Operation.ADD) {
- model.read(modelChange.getSerializedModel(), null,
- getSerializationFormatString(
- modelChange.getSerializationFormat()));
- } else if (modelChange.getOperation() == ModelChange.Operation.REMOVE){
- Model temp = ModelFactory.createDefaultModel();
- temp.read(modelChange.getSerializedModel(), null,
- getSerializationFormatString(
- modelChange.getSerializationFormat()));
- model.remove(temp);
- } else {
- log.error("Unsupported model change type " +
- modelChange.getOperation().getClass().getName());
- }
- }
-
- for (Object o : changeSet.getPostChangeEvents()) {
- this.notifyListenersOfEvent(o);
- }
-
- } catch (Exception e) {
- log.error(e, e);
- throw new RDFServiceException(e);
- }
- return true;
- }
-
/**
* Performs a SPARQL construct query against the knowledge base. The query may have
* an embedded graph identifier.
- *
+ *
* @param String query - the SPARQL query to be executed against the RDF store
* @param RDFService.ModelSerializationFormat resultFormat - type of serialization for RDF result of the SPARQL query
* @param OutputStream outputStream - the result of the query
- *
+ *
*/
@Override
public InputStream sparqlConstructQuery(String queryStr,
- RDFServiceImpl.ModelSerializationFormat resultFormat) throws RDFServiceException {
-
+ RDFServiceImpl.ModelSerializationFormat resultFormat) throws RDFServiceException {
+
Model model = ModelFactory.createDefaultModel();
Query query = createQuery(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
-
+
try {
qe.execConstruct(model);
} catch (Exception e) {
- log.error("Error executing CONSTRUCT against remote endpoint: " + queryStr);
+ log.error("Error executing CONSTRUCT against remote endpoint: " + queryStr);
} finally {
qe.close();
}
- ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
+ ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
model.write(serializedModel,getSerializationFormatString(resultFormat));
InputStream result = new ByteArrayInputStream(serializedModel.toByteArray());
return result;
@@ -257,28 +267,28 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
/**
* Performs a SPARQL describe query against the knowledge base. The query may have
* an embedded graph identifier.
- *
+ *
* @param String query - the SPARQL query to be executed against the RDF store
* @param RDFService.ModelSerializationFormat resultFormat - type of serialization for RDF result of the SPARQL query
- *
+ *
* @return InputStream - the result of the query
- *
+ *
*/
@Override
public InputStream sparqlDescribeQuery(String queryStr,
- RDFServiceImpl.ModelSerializationFormat resultFormat) throws RDFServiceException {
-
+ RDFServiceImpl.ModelSerializationFormat resultFormat) throws RDFServiceException {
+
Model model = ModelFactory.createDefaultModel();
Query query = createQuery(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
-
+
try {
qe.execDescribe(model);
} finally {
qe.close();
}
- ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
+ ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
model.write(serializedModel,getSerializationFormatString(resultFormat));
InputStream result = new ByteArrayInputStream(serializedModel.toByteArray());
return result;
@@ -287,22 +297,23 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
/**
* Performs a SPARQL select query against the knowledge base. The query may have
* an embedded graph identifier.
- *
+ *
* @param String query - the SPARQL query to be executed against the RDF store
* @param RDFService.ResultFormat resultFormat - format for the result of the Select query
- *
+ *
* @return InputStream - the result of the query
- *
+ *
*/
@Override
public InputStream sparqlSelectQuery(String queryStr, RDFService.ResultFormat resultFormat) throws RDFServiceException {
-
- //QueryEngineHTTP qh = new QueryEngineHTTP(readEndpointURI, queryStr);
-
- try {
- HttpGet meth = new HttpGet(new URIBuilder(readEndpointURI).addParameter("query", queryStr).build());
- meth.addHeader("Accept", "application/sparql-results+xml");
- HttpResponse response = httpClient.execute(meth);
+
+ //QueryEngineHTTP qh = new QueryEngineHTTP(readEndpointURI, queryStr);
+
+ try {
+ HttpGet meth = new HttpGet(new URIBuilder(readEndpointURI).addParameter("query", queryStr).build());
+ meth.addHeader("Accept", "application/sparql-results+xml");
+ HttpContext context = getContext(meth);
+ HttpResponse response = context != null ? httpClient.execute(meth, context) : httpClient.execute(meth);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
@@ -338,9 +349,9 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
EntityUtils.consume(response.getEntity());
}
} catch (IOException ioe) {
- throw new RuntimeException(ioe);
- } catch (URISyntaxException e) {
- throw new RuntimeException(e);
+ throw new RuntimeException(ioe);
+ } catch (URISyntaxException e) {
+ throw new RuntimeException(e);
}
}
@@ -351,7 +362,8 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
try {
HttpGet meth = new HttpGet(new URIBuilder(readEndpointURI).addParameter("query", queryStr).build());
meth.addHeader("Accept", "application/sparql-results+xml");
- HttpResponse response = httpClient.execute(meth);
+ HttpContext context = getContext(meth);
+ HttpResponse response = context != null ? httpClient.execute(meth, context) : httpClient.execute(meth);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
@@ -376,53 +388,53 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
/**
* Performs a SPARQL ASK query against the knowledge base. The query may have
* an embedded graph identifier.
- *
+ *
* @param String query - the SPARQL query to be executed against the RDF store
- *
- * @return boolean - the result of the SPARQL query
+ *
+ * @return boolean - the result of the SPARQL query
*/
@Override
public boolean sparqlAskQuery(String queryStr) throws RDFServiceException {
-
- Query query = createQuery(queryStr);
- QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
-
- try {
- return qe.execAsk();
- } finally {
- qe.close();
- }
+
+ Query query = createQuery(queryStr);
+ QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
+
+ try {
+ return qe.execAsk();
+ } finally {
+ qe.close();
+ }
}
-
+
/**
* Get a list of all the graph URIs in the RDF store.
- *
- * @return List - list of all the graph URIs in the RDF store
+ *
+ * @return List - list of all the graph URIs in the RDF store
*/
@Override
public List getGraphURIs() throws RDFServiceException {
- return getGraphURIsFromSparqlQuery();
+ return getGraphURIsFromSparqlQuery();
}
-
- private List getGraphURIsFromSparqlQuery() throws RDFServiceException {
- String fastJenaQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g {} } ORDER BY ?g";
- String standardQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } ORDER BY ?g";
- List graphURIs = new ArrayList();
- try {
- graphURIs = getGraphURIsFromSparqlQuery(fastJenaQuery);
- } catch (Exception e) {
- log.debug("Unable to use non-standard ARQ query for graph list", e);
- }
- if (graphURIs.isEmpty()) {
- graphURIs = getGraphURIsFromSparqlQuery(standardQuery);
- }
- return graphURIs;
+
+ private List getGraphURIsFromSparqlQuery() throws RDFServiceException {
+ String fastJenaQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g {} } ORDER BY ?g";
+ String standardQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } ORDER BY ?g";
+ List graphURIs = new ArrayList();
+ try {
+ graphURIs = getGraphURIsFromSparqlQuery(fastJenaQuery);
+ } catch (Exception e) {
+ log.debug("Unable to use non-standard ARQ query for graph list", e);
+ }
+ if (graphURIs.isEmpty()) {
+ graphURIs = getGraphURIsFromSparqlQuery(standardQuery);
+ }
+ return graphURIs;
}
-
+
private List getGraphURIsFromSparqlQuery(String queryString) throws RDFServiceException {
- final List graphURIs = new ArrayList();
- try {
- sparqlSelectQuery(queryString, new ResultSetConsumer() {
+ final List graphURIs = new ArrayList();
+ try {
+ sparqlSelectQuery(queryString, new ResultSetConsumer() {
@Override
protected void processQuerySolution(QuerySolution qs) {
if (qs != null) { // no idea how this happens, but it seems to
@@ -433,48 +445,48 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
}
}
});
- } catch (Exception e) {
- throw new RDFServiceException("Unable to list graph URIs", e);
- }
- return graphURIs;
+ } catch (Exception e) {
+ throw new RDFServiceException("Unable to list graph URIs", e);
+ }
+ return graphURIs;
}
/**
* TODO - what is the definition of this method?
- * @return
+ * @return
*/
@Override
public void getGraphMetadata() throws RDFServiceException {
-
+
}
-
+
/**
* Get the URI of the default write graph
- *
+ *
* @return String URI of default write graph
*/
@Override
public String getDefaultWriteGraphURI() throws RDFServiceException {
- return defaultWriteGraphURI;
+ return defaultWriteGraphURI;
}
-
+
/**
* Register a listener to listen to changes in any graph in
* the RDF store.
- *
+ *
*/
@Override
public synchronized void registerListener(ChangeListener changeListener) throws RDFServiceException {
-
+
if (!registeredListeners.contains(changeListener)) {
- registeredListeners.add(changeListener);
+ registeredListeners.add(changeListener);
}
}
-
+
/**
* Unregister a listener from listening to changes in any graph
* in the RDF store.
- *
+ *
*/
@Override
public synchronized void unregisterListener(ChangeListener changeListener) throws RDFServiceException {
@@ -483,31 +495,32 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
/**
* Create a ChangeSet object
- *
+ *
* @return a ChangeSet object
*/
@Override
public ChangeSet manufactureChangeSet() {
return new ChangeSetImpl();
}
-
+
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Non-override methods below
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- protected String getReadEndpointURI() {
- return readEndpointURI;
- }
-
- protected String getUpdateEndpointURI() {
- return updateEndpointURI;
- }
-
- protected void executeUpdate(String updateString) throws RDFServiceException {
- try {
- HttpPost meth = new HttpPost(updateEndpointURI);
- meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
- meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(new BasicNameValuePair("update", updateString))));
- HttpResponse response = httpClient.execute(meth);
+ protected String getReadEndpointURI() {
+ return readEndpointURI;
+ }
+
+ protected String getUpdateEndpointURI() {
+ return updateEndpointURI;
+ }
+
+ protected void executeUpdate(String updateString) throws RDFServiceException {
+ try {
+ HttpPost meth = new HttpPost(updateEndpointURI);
+ meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
+ meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(new BasicNameValuePair("update", updateString))));
+ HttpContext context = getContext(meth);
+ HttpResponse response = context != null ? httpClient.execute(meth, context) : httpClient.execute(meth);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
@@ -518,353 +531,344 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
} finally {
EntityUtils.consume(response.getEntity());
}
- } catch (Exception e) {
- throw new RDFServiceException("Unable to perform change set update", e);
- }
- }
-
- public void addModel(Model model, String graphURI) throws RDFServiceException {
- verbModel(model, graphURI, "INSERT");
- }
-
- public void deleteModel(Model model, String graphURI) throws RDFServiceException {
- verbModel(model, graphURI, "DELETE");
- }
-
- private void verbModel(Model model, String graphURI, String verb) throws RDFServiceException {
- Model m = ModelFactory.createDefaultModel();
- StmtIterator stmtIt = model.listStatements();
- int count = 0;
- try {
- while (stmtIt.hasNext()) {
- count++;
- m.add(stmtIt.nextStatement());
- if (count % CHUNK_SIZE == 0 || !stmtIt.hasNext()) {
- StringWriter sw = new StringWriter();
- m.write(sw, "N-TRIPLE");
- StringBuffer updateStringBuff = new StringBuffer();
- updateStringBuff.append(verb + " DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" ));
- updateStringBuff.append(sw);
- updateStringBuff.append(((graphURI != null) ? " } " : "") + " }");
-
- String updateString = updateStringBuff.toString();
-
- executeUpdate(updateString);
-
- m.removeAll();
- }
- }
- } finally {
- stmtIt.close();
- }
- }
-
- protected void addTriple(Triple t, String graphURI) throws RDFServiceException {
-
- StringBuffer updateString = new StringBuffer();
- updateString.append("INSERT DATA { ");
- updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
- updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
- updateString.append(" ");
- updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
- updateString.append(" ");
- updateString.append(sparqlNodeUpdate(t.getObject(), ""));
- updateString.append(" }");
- updateString.append((graphURI != null) ? " } " : "");
-
- executeUpdate(updateString.toString());
- notifyListeners(t, ModelChange.Operation.ADD, graphURI);
- }
-
- protected void removeTriple(Triple t, String graphURI) throws RDFServiceException {
-
- StringBuffer updateString = new StringBuffer();
- updateString.append("DELETE DATA { ");
- updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
- updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
- updateString.append(" ");
- updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
- updateString.append(" ");
- updateString.append(sparqlNodeUpdate(t.getObject(), ""));
- updateString.append(" }");
- updateString.append((graphURI != null) ? " } " : "");
-
- executeUpdate(updateString.toString());
- notifyListeners(t, ModelChange.Operation.REMOVE, graphURI);
- }
-
- @Override
- protected boolean isPreconditionSatisfied(String query,
- RDFService.SPARQLQueryType queryType)
- throws RDFServiceException {
- Model model = ModelFactory.createDefaultModel();
-
- switch (queryType) {
- case DESCRIBE:
- model.read(sparqlDescribeQuery(query,RDFService.ModelSerializationFormat.N3), null);
- return !model.isEmpty();
- case CONSTRUCT:
- model.read(sparqlConstructQuery(query,RDFService.ModelSerializationFormat.N3), null);
- return !model.isEmpty();
- case SELECT:
- return sparqlSelectQueryHasResults(query);
- case ASK:
- return sparqlAskQuery(query);
- default:
- throw new RDFServiceException("unrecognized SPARQL query type");
- }
+ } catch (Exception e) {
+ throw new RDFServiceException("Unable to perform change set update", e);
+ }
}
-
+
+ public void addModel(Model model, String graphURI) throws RDFServiceException {
+ verbModel(model, graphURI, "INSERT");
+ }
+
+ public void deleteModel(Model model, String graphURI) throws RDFServiceException {
+ verbModel(model, graphURI, "DELETE");
+ }
+
+ private void verbModel(Model model, String graphURI, String verb) throws RDFServiceException {
+ Model m = ModelFactory.createDefaultModel();
+ StmtIterator stmtIt = model.listStatements();
+ int count = 0;
+ try {
+ while (stmtIt.hasNext()) {
+ count++;
+ m.add(stmtIt.nextStatement());
+ if (count % CHUNK_SIZE == 0 || !stmtIt.hasNext()) {
+ StringWriter sw = new StringWriter();
+ m.write(sw, "N-TRIPLE");
+ StringBuffer updateStringBuff = new StringBuffer();
+ updateStringBuff.append(verb + " DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" ));
+ updateStringBuff.append(sw);
+ updateStringBuff.append(((graphURI != null) ? " } " : "") + " }");
+
+ String updateString = updateStringBuff.toString();
+
+ executeUpdate(updateString);
+
+ m.removeAll();
+ }
+ }
+ } finally {
+ stmtIt.close();
+ }
+ }
+
+ protected void addTriple(Triple t, String graphURI) throws RDFServiceException {
+
+ StringBuffer updateString = new StringBuffer();
+ updateString.append("INSERT DATA { ");
+ updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
+ updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
+ updateString.append(" ");
+ updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
+ updateString.append(" ");
+ updateString.append(sparqlNodeUpdate(t.getObject(), ""));
+ updateString.append(" }");
+ updateString.append((graphURI != null) ? " } " : "");
+
+ executeUpdate(updateString.toString());
+ notifyListeners(t, ModelChange.Operation.ADD, graphURI);
+ }
+
+ protected void removeTriple(Triple t, String graphURI) throws RDFServiceException {
+
+ StringBuffer updateString = new StringBuffer();
+ updateString.append("DELETE DATA { ");
+ updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
+ updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
+ updateString.append(" ");
+ updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
+ updateString.append(" ");
+ updateString.append(sparqlNodeUpdate(t.getObject(), ""));
+ updateString.append(" }");
+ updateString.append((graphURI != null) ? " } " : "");
+
+ executeUpdate(updateString.toString());
+ notifyListeners(t, ModelChange.Operation.REMOVE, graphURI);
+ }
+
+ @Override
+ protected boolean isPreconditionSatisfied(String query,
+ RDFService.SPARQLQueryType queryType)
+ throws RDFServiceException {
+ Model model = ModelFactory.createDefaultModel();
+
+ switch (queryType) {
+ case DESCRIBE:
+ model.read(sparqlDescribeQuery(query,RDFService.ModelSerializationFormat.N3), null);
+ return !model.isEmpty();
+ case CONSTRUCT:
+ model.read(sparqlConstructQuery(query,RDFService.ModelSerializationFormat.N3), null);
+ return !model.isEmpty();
+ case SELECT:
+ return sparqlSelectQueryHasResults(query);
+ case ASK:
+ return sparqlAskQuery(query);
+ default:
+ throw new RDFServiceException("unrecognized SPARQL query type");
+ }
+ }
+
@Override
protected boolean sparqlSelectQueryHasResults(String queryStr) throws RDFServiceException {
-
- Query query = createQuery(queryStr);
- QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
-
- try {
- ResultSet resultSet = qe.execSelect();
- return resultSet.hasNext();
- } finally {
- qe.close();
- }
- }
-
- private void performChange(ModelChange modelChange) throws RDFServiceException {
- Model model = parseModel(modelChange);
- Model[] separatedModel = separateStatementsWithBlankNodes(model);
- if (modelChange.getOperation() == ModelChange.Operation.ADD) {
- addModel(separatedModel[1], modelChange.getGraphURI());
- addBlankNodesWithSparqlUpdate(separatedModel[0], modelChange.getGraphURI());
- } else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
- deleteModel(separatedModel[1], modelChange.getGraphURI());
- removeBlankNodesWithSparqlUpdate(separatedModel[0], modelChange.getGraphURI());
- } else {
- log.error("unrecognized operation type");
- }
- }
-
- private void addBlankNodesWithSparqlUpdate(Model model, String graphURI)
- throws RDFServiceException {
- updateBlankNodesWithSparqlUpdate(model, graphURI, ADD);
- }
-
- private void removeBlankNodesWithSparqlUpdate(Model model, String graphURI)
- throws RDFServiceException {
- updateBlankNodesWithSparqlUpdate(model, graphURI, REMOVE);
- }
-
- private static final boolean ADD = true;
- private static final boolean REMOVE = false;
-
- private void updateBlankNodesWithSparqlUpdate(Model model, String graphURI, boolean add)
- throws RDFServiceException {
- List blankNodeStatements = new ArrayList();
- StmtIterator stmtIt = model.listStatements();
- while (stmtIt.hasNext()) {
- Statement stmt = stmtIt.nextStatement();
- if (stmt.getSubject().isAnon() || stmt.getObject().isAnon()) {
- blankNodeStatements.add(stmt);
- }
- }
-
- if(blankNodeStatements.size() == 0) {
- return;
- }
-
- Model blankNodeModel = ModelFactory.createDefaultModel();
- blankNodeModel.add(blankNodeStatements);
-
- log.debug("update model size " + model.size());
- log.debug("blank node model size " + blankNodeModel.size());
-
- if (!add && blankNodeModel.size() == 1) {
- log.warn("Deleting single triple with blank node: " + blankNodeModel);
- log.warn("This likely indicates a problem; excessive data may be deleted.");
- }
-
- Query rootFinderQuery = QueryFactory.create(BNODE_ROOT_QUERY);
- QueryExecution qe = QueryExecutionFactory.create(rootFinderQuery, blankNodeModel);
- try {
- ResultSet rs = qe.execSelect();
- while (rs.hasNext()) {
- QuerySolution qs = rs.next();
- com.hp.hpl.jena.rdf.model.Resource s = qs.getResource("s");
- String treeFinder = makeDescribe(s);
- Query treeFinderQuery = QueryFactory.create(treeFinder);
- QueryExecution qee = QueryExecutionFactory.create(treeFinderQuery, blankNodeModel);
- try {
- Model tree = qee.execDescribe();
- if (s.isAnon()) {
- if (add) {
- addModel(tree, graphURI);
- } else {
- removeUsingSparqlUpdate(tree, graphURI);
- }
- } else {
- StmtIterator sit = tree.listStatements(s, null, (RDFNode) null);
- while (sit.hasNext()) {
- Statement stmt = sit.nextStatement();
- RDFNode n = stmt.getObject();
- Model m2 = ModelFactory.createDefaultModel();
- if (n.isResource()) {
- com.hp.hpl.jena.rdf.model.Resource s2 =
- (com.hp.hpl.jena.rdf.model.Resource) n;
- // now run yet another describe query
- String smallerTree = makeDescribe(s2);
- Query smallerTreeQuery = QueryFactory.create(smallerTree);
- QueryExecution qe3 = QueryExecutionFactory.create(
- smallerTreeQuery, tree);
- try {
- qe3.execDescribe(m2);
- } finally {
- qe3.close();
- }
- }
- m2.add(stmt);
- if (add) {
- addModel(m2, graphURI);
- } else {
- removeUsingSparqlUpdate(m2, graphURI);
- }
- }
- }
- } finally {
- qee.close();
- }
- }
- } finally {
- qe.close();
- }
- }
-
- private void removeUsingSparqlUpdate(Model model, String graphURI)
- throws RDFServiceException {
-
- StmtIterator stmtIt = model.listStatements();
-
- if (!stmtIt.hasNext()) {
- stmtIt.close();
- return;
- }
-
- StringBuffer queryBuff = new StringBuffer();
- queryBuff.append("DELETE { \n");
- if (graphURI != null) {
- queryBuff.append(" GRAPH <" + graphURI + "> { \n");
- }
- List stmts = stmtIt.toList();
- sort(stmts);
- addStatementPatterns(stmts, queryBuff, !WHERE_CLAUSE);
- if (graphURI != null) {
- queryBuff.append(" } \n");
- }
- queryBuff.append("} WHERE { \n");
- if (graphURI != null) {
- queryBuff.append(" GRAPH <" + graphURI + "> { \n");
- }
- stmtIt = model.listStatements();
- stmts = stmtIt.toList();
- sort(stmts);
- addStatementPatterns(stmts, queryBuff, WHERE_CLAUSE);
- if (graphURI != null) {
- queryBuff.append(" } \n");
- }
- queryBuff.append("} \n");
-
- if(log.isDebugEnabled()) {
- log.debug(queryBuff.toString());
- }
- executeUpdate(queryBuff.toString());
- }
-
- private List sort(List stmts) {
- List output = new ArrayList();
- int originalSize = stmts.size();
- if (originalSize == 1)
- return stmts;
- List remaining = stmts;
- ConcurrentLinkedQueue subjQueue =
- new ConcurrentLinkedQueue();
- for(Statement stmt : remaining) {
- if(stmt.getSubject().isURIResource()) {
- subjQueue.add(stmt.getSubject());
- break;
- }
- }
- if (subjQueue.isEmpty()) {
- throw new RuntimeException("No named subject in statement patterns");
- }
- while(remaining.size() > 0) {
- if(subjQueue.isEmpty()) {
- subjQueue.add(remaining.get(0).getSubject());
- }
- while(!subjQueue.isEmpty()) {
- com.hp.hpl.jena.rdf.model.Resource subj = subjQueue.poll();
- List temp = new ArrayList();
- for (Statement stmt : remaining) {
- if(stmt.getSubject().equals(subj)) {
- output.add(stmt);
- if (stmt.getObject().isResource()) {
- subjQueue.add((com.hp.hpl.jena.rdf.model.Resource) stmt.getObject());
- }
- } else {
- temp.add(stmt);
- }
- }
- remaining = temp;
- }
- }
- if(output.size() != originalSize) {
- throw new RuntimeException("original list size was " + originalSize +
- " but sorted size is " + output.size());
- }
- return output;
- }
-
- private static final boolean WHERE_CLAUSE = true;
-
- private void addStatementPatterns(List stmts, StringBuffer patternBuff, boolean whereClause) {
- for(Statement stmt : stmts) {
- Triple t = stmt.asTriple();
- patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getSubject(), null));
- patternBuff.append(" ");
- patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getPredicate(), null));
- patternBuff.append(" ");
- patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getObject(), null));
- patternBuff.append(" .\n");
- if (whereClause) {
- if (t.getSubject().isBlank()) {
- patternBuff.append(" FILTER(isBlank(" + SparqlGraph.sparqlNodeDelete(t.getSubject(), null)).append(")) \n");
- }
- if (t.getObject().isBlank()) {
- patternBuff.append(" FILTER(isBlank(" + SparqlGraph.sparqlNodeDelete(t.getObject(), null)).append(")) \n");
- }
- }
- }
- }
-
- private String makeDescribe(com.hp.hpl.jena.rdf.model.Resource s) {
- StringBuffer query = new StringBuffer("DESCRIBE <") ;
- if (s.isAnon()) {
- query.append("_:" + s.getId().toString());
- } else {
- query.append(s.getURI());
- }
- query.append(">");
- return query.toString();
- }
-
- private Model parseModel(ModelChange modelChange) {
- Model model = ModelFactory.createDefaultModel();
- model.read(modelChange.getSerializedModel(), null,
- getSerializationFormatString(modelChange.getSerializationFormat()));
- return model;
- }
- @Override
+ Query query = createQuery(queryStr);
+ QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
+
+ try {
+ ResultSet resultSet = qe.execSelect();
+ return resultSet.hasNext();
+ } finally {
+ qe.close();
+ }
+ }
+
+ private void performChange(ModelChange modelChange) throws RDFServiceException {
+ Model model = parseModel(modelChange);
+ Model[] separatedModel = separateStatementsWithBlankNodes(model);
+ if (modelChange.getOperation() == ModelChange.Operation.ADD) {
+ addModel(separatedModel[1], modelChange.getGraphURI());
+ addBlankNodesWithSparqlUpdate(separatedModel[0], modelChange.getGraphURI());
+ } else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
+ deleteModel(separatedModel[1], modelChange.getGraphURI());
+ removeBlankNodesWithSparqlUpdate(separatedModel[0], modelChange.getGraphURI());
+ } else {
+ log.error("unrecognized operation type");
+ }
+ }
+
+ private void addBlankNodesWithSparqlUpdate(Model model, String graphURI)
+ throws RDFServiceException {
+ updateBlankNodesWithSparqlUpdate(model, graphURI, ADD);
+ }
+
+ private void removeBlankNodesWithSparqlUpdate(Model model, String graphURI)
+ throws RDFServiceException {
+ updateBlankNodesWithSparqlUpdate(model, graphURI, REMOVE);
+ }
+
+ private static final boolean ADD = true;
+ private static final boolean REMOVE = false;
+
+ private void updateBlankNodesWithSparqlUpdate(Model model, String graphURI, boolean add)
+ throws RDFServiceException {
+ List blankNodeStatements = new ArrayList();
+ StmtIterator stmtIt = model.listStatements();
+ while (stmtIt.hasNext()) {
+ Statement stmt = stmtIt.nextStatement();
+ if (stmt.getSubject().isAnon() || stmt.getObject().isAnon()) {
+ blankNodeStatements.add(stmt);
+ }
+ }
+
+ if(blankNodeStatements.size() == 0) {
+ return;
+ }
+
+ Model blankNodeModel = ModelFactory.createDefaultModel();
+ blankNodeModel.add(blankNodeStatements);
+
+ log.debug("update model size " + model.size());
+ log.debug("blank node model size " + blankNodeModel.size());
+
+ if (!add && blankNodeModel.size() == 1) {
+ log.warn("Deleting single triple with blank node: " + blankNodeModel);
+ log.warn("This likely indicates a problem; excessive data may be deleted.");
+ }
+
+ Query rootFinderQuery = QueryFactory.create(BNODE_ROOT_QUERY);
+ QueryExecution qe = QueryExecutionFactory.create(rootFinderQuery, blankNodeModel);
+ try {
+ ResultSet rs = qe.execSelect();
+ while (rs.hasNext()) {
+ QuerySolution qs = rs.next();
+ com.hp.hpl.jena.rdf.model.Resource s = qs.getResource("s");
+ String treeFinder = makeDescribe(s);
+ Query treeFinderQuery = QueryFactory.create(treeFinder);
+ QueryExecution qee = QueryExecutionFactory.create(treeFinderQuery, blankNodeModel);
+ try {
+ Model tree = qee.execDescribe();
+ if (s.isAnon()) {
+ if (add) {
+ addModel(tree, graphURI);
+ } else {
+ removeUsingSparqlUpdate(tree, graphURI);
+ }
+ } else {
+ StmtIterator sit = tree.listStatements(s, null, (RDFNode) null);
+ while (sit.hasNext()) {
+ Statement stmt = sit.nextStatement();
+ RDFNode n = stmt.getObject();
+ Model m2 = ModelFactory.createDefaultModel();
+ if (n.isResource()) {
+ com.hp.hpl.jena.rdf.model.Resource s2 =
+ (com.hp.hpl.jena.rdf.model.Resource) n;
+ // now run yet another describe query
+ String smallerTree = makeDescribe(s2);
+ Query smallerTreeQuery = QueryFactory.create(smallerTree);
+ QueryExecution qe3 = QueryExecutionFactory.create(
+ smallerTreeQuery, tree);
+ try {
+ qe3.execDescribe(m2);
+ } finally {
+ qe3.close();
+ }
+ }
+ m2.add(stmt);
+ if (add) {
+ addModel(m2, graphURI);
+ } else {
+ removeUsingSparqlUpdate(m2, graphURI);
+ }
+ }
+ }
+ } finally {
+ qee.close();
+ }
+ }
+ } finally {
+ qe.close();
+ }
+ }
+
+ private void removeUsingSparqlUpdate(Model model, String graphURI)
+ throws RDFServiceException {
+
+ StmtIterator stmtIt = model.listStatements();
+
+ if (!stmtIt.hasNext()) {
+ stmtIt.close();
+ return;
+ }
+
+ StringBuffer queryBuff = new StringBuffer();
+ if (graphURI != null) {
+ queryBuff.append("WITH <" + graphURI + "> \n");
+ }
+ queryBuff.append("DELETE { \n");
+ List stmts = stmtIt.toList();
+ sort(stmts);
+ addStatementPatterns(stmts, queryBuff, !WHERE_CLAUSE);
+ queryBuff.append("} WHERE { \n");
+ stmtIt = model.listStatements();
+ stmts = stmtIt.toList();
+ sort(stmts);
+ addStatementPatterns(stmts, queryBuff, WHERE_CLAUSE);
+ queryBuff.append("} \n");
+
+ if(log.isDebugEnabled()) {
+ log.debug(queryBuff.toString());
+ }
+ executeUpdate(queryBuff.toString());
+ }
+
+ private List sort(List stmts) {
+ List output = new ArrayList();
+ int originalSize = stmts.size();
+ if (originalSize == 1)
+ return stmts;
+ List remaining = stmts;
+ ConcurrentLinkedQueue subjQueue =
+ new ConcurrentLinkedQueue();
+ for(Statement stmt : remaining) {
+ if(stmt.getSubject().isURIResource()) {
+ subjQueue.add(stmt.getSubject());
+ break;
+ }
+ }
+ if (subjQueue.isEmpty()) {
+ throw new RuntimeException("No named subject in statement patterns");
+ }
+ while(remaining.size() > 0) {
+ if(subjQueue.isEmpty()) {
+ subjQueue.add(remaining.get(0).getSubject());
+ }
+ while(!subjQueue.isEmpty()) {
+ com.hp.hpl.jena.rdf.model.Resource subj = subjQueue.poll();
+ List temp = new ArrayList();
+ for (Statement stmt : remaining) {
+ if(stmt.getSubject().equals(subj)) {
+ output.add(stmt);
+ if (stmt.getObject().isResource()) {
+ subjQueue.add((com.hp.hpl.jena.rdf.model.Resource) stmt.getObject());
+ }
+ } else {
+ temp.add(stmt);
+ }
+ }
+ remaining = temp;
+ }
+ }
+ if(output.size() != originalSize) {
+ throw new RuntimeException("original list size was " + originalSize +
+ " but sorted size is " + output.size());
+ }
+ return output;
+ }
+
+ private static final boolean WHERE_CLAUSE = true;
+
+ private void addStatementPatterns(List stmts, StringBuffer patternBuff, boolean whereClause) {
+ for(Statement stmt : stmts) {
+ Triple t = stmt.asTriple();
+ patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getSubject(), null));
+ patternBuff.append(" ");
+ patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getPredicate(), null));
+ patternBuff.append(" ");
+ patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getObject(), null));
+ patternBuff.append(" .\n");
+ if (whereClause) {
+ if (t.getSubject().isBlank()) {
+ patternBuff.append(" FILTER(isBlank(" + SparqlGraph.sparqlNodeDelete(t.getSubject(), null)).append(")) \n");
+ }
+ if (t.getObject().isBlank()) {
+ patternBuff.append(" FILTER(isBlank(" + SparqlGraph.sparqlNodeDelete(t.getObject(), null)).append(")) \n");
+ }
+ }
+ }
+ }
+
+ private String makeDescribe(com.hp.hpl.jena.rdf.model.Resource s) {
+ StringBuffer query = new StringBuffer("DESCRIBE <") ;
+ if (s.isAnon()) {
+ query.append("_:" + s.getId().toString());
+ } else {
+ query.append(s.getURI());
+ }
+ query.append(">");
+ return query.toString();
+ }
+
+ private Model parseModel(ModelChange modelChange) {
+ Model model = ModelFactory.createDefaultModel();
+ model.read(modelChange.getSerializedModel(), null,
+ getSerializationFormatString(modelChange.getSerializationFormat()));
+ return model;
+ }
+
+ @Override
public void serializeAll(OutputStream outputStream)
throws RDFServiceException {
- String query = "SELECT * WHERE { GRAPH ?g {?s ?p ?o}}";
+ String query = "SELECT * WHERE { GRAPH ?g {?s ?p ?o}}";
serialize(outputStream, query);
}
@@ -876,8 +880,8 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
}
private void serialize(OutputStream outputStream, String query) throws RDFServiceException {
- InputStream resultStream = sparqlSelectQuery(query, RDFService.ResultFormat.JSON);
- ResultSet resultSet = ResultSetFactory.fromJSON(resultStream);
+ InputStream resultStream = sparqlSelectQuery(query, RDFService.ResultFormat.JSON);
+ ResultSet resultSet = ResultSetFactory.fromJSON(resultStream);
if (resultSet.getResultVars().contains("g")) {
Iterator quads = new ResultSetQuadsIterator(resultSet);
RDFDataMgr.writeQuads(outputStream, quads);
@@ -892,12 +896,35 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
* tripleStore, and ask whether they are isomorphic.
*/
@Override
- public boolean isEquivalentGraph(String graphURI, InputStream serializedGraph,
- ModelSerializationFormat serializationFormat) throws RDFServiceException {
+ public boolean isEquivalentGraph(String graphURI, InputStream serializedGraph,
+ ModelSerializationFormat serializationFormat) throws RDFServiceException {
Model fileModel = RDFServiceUtils.parseModel(serializedGraph, serializationFormat);
Model tripleStoreModel = new RDFServiceDataset(this).getNamedModel(graphURI);
Model fromTripleStoreModel = ModelFactory.createDefaultModel().add(tripleStoreModel);
return fileModel.isIsomorphicWith(fromTripleStoreModel);
}
+ protected HttpContext getContext(HttpRequestBase request) {
+ UsernamePasswordCredentials credentials = getCredentials();
+ if (credentials != null) {
+ try {
+ request.addHeader(new BasicScheme().authenticate(credentials, request, null));
+
+ CredentialsProvider provider = new BasicCredentialsProvider();
+ provider.setCredentials(AuthScope.ANY, getCredentials());
+
+ BasicHttpContext context = new BasicHttpContext();
+ context.setAttribute(ClientContext.CREDS_PROVIDER, provider);
+ return context;
+ } catch (AuthenticationException e) {
+ log.error("Unable to set credentials");
+ }
+ }
+
+ return null;
+ }
+
+ protected UsernamePasswordCredentials getCredentials() {
+ return null;
+ }
}
diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/virtuoso/RDFServiceVirtuoso.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/virtuoso/RDFServiceVirtuoso.java
index 6e9847bf7..e7117f045 100644
--- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/virtuoso/RDFServiceVirtuoso.java
+++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/virtuoso/RDFServiceVirtuoso.java
@@ -6,20 +6,26 @@ import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
+import java.util.ArrayList;
+import java.util.List;
-import org.apache.commons.httpclient.HttpMethod;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+import com.hp.hpl.jena.rdf.model.Property;
+import com.hp.hpl.jena.rdf.model.RDFNode;
+import com.hp.hpl.jena.rdf.model.Resource;
+import com.hp.hpl.jena.rdf.model.Selector;
+import com.hp.hpl.jena.rdf.model.Statement;
+import com.hp.hpl.jena.rdf.model.StmtIterator;
+import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
+import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
-import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
-import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.protocol.ClientContext;
import org.apache.http.entity.StringEntity;
-import org.apache.http.impl.client.BasicCredentialsProvider;
-import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
@@ -53,10 +59,10 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
private final String password;
public RDFServiceVirtuoso(String baseURI, String username, String password) {
- super(figureReadEndpointUri(baseURI), figureUpdateEndpointUri(baseURI,
- username));
+ super(figureReadEndpointUri(baseURI), figureUpdateEndpointUri(baseURI, username));
this.username = username;
this.password = password;
+ testConnection();
}
private static String figureReadEndpointUri(String baseUri) {
@@ -81,8 +87,8 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
try {
HttpPost request = createHttpRequest(updateString);
- HttpResponse response = httpClient.execute(
- request, createHttpContext());
+ HttpContext context = getContext(request);
+ HttpResponse response = context != null ? httpClient.execute(request, context) : httpClient.execute(request);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
@@ -129,18 +135,29 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
return meth;
}
- /**
- * We need an HttpContext that will provide username and password in
- * response to a basic authentication challenge.
- */
- private HttpContext createHttpContext() {
- CredentialsProvider provider = new BasicCredentialsProvider();
- provider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(
- username, password));
+ protected UsernamePasswordCredentials getCredentials() {
+ if (username != null && password != null) {
+ return new UsernamePasswordCredentials(username, password);
+ }
- BasicHttpContext context = new BasicHttpContext();
- context.setAttribute(ClientContext.CREDS_PROVIDER, provider);
- return context;
+ return null;
+ }
+
+ private static boolean isNumeric(String typeUri) {
+ return typeUri != null && (typeUri.endsWith("decimal") ||
+ typeUri.endsWith("int") ||
+ typeUri.endsWith("integer") ||
+ typeUri.endsWith("float") ||
+ typeUri.endsWith("long") ||
+ typeUri.endsWith("negativeInteger") ||
+ typeUri.endsWith("nonNegativeInteger") ||
+ typeUri.endsWith("nonPositiveInteger") ||
+ typeUri.endsWith("positiveInteger") ||
+ typeUri.endsWith("short") ||
+ typeUri.endsWith("unsignedLong") ||
+ typeUri.endsWith("unsignedInt") ||
+ typeUri.endsWith("unsignedShort") ||
+ typeUri.endsWith("unsignedByte"));
}
/**
@@ -150,14 +167,84 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
* To determine whether this serialized graph is equivalent to what is
* already in Virtuoso, we need to do the same.
*/
- @Override
- public boolean isEquivalentGraph(String graphURI,
- InputStream serializedGraph,
- ModelSerializationFormat serializationFormat)
- throws RDFServiceException {
- return super.isEquivalentGraph(graphURI,
- adjustForNonNegativeIntegers(serializedGraph),
- serializationFormat);
+ public boolean isEquivalentGraph(String graphURI, InputStream serializedGraph,
+ ModelSerializationFormat serializationFormat) throws RDFServiceException {
+ Model fileModel = RDFServiceUtils.parseModel(serializedGraph, serializationFormat);
+ Model tripleStoreModel = new RDFServiceDataset(this).getNamedModel(graphURI);
+ Model fromTripleStoreModel = ModelFactory.createDefaultModel().add(tripleStoreModel);
+
+ // Compare the models
+ Model difference = fileModel.difference(fromTripleStoreModel);
+
+ // If there is a difference
+ if (difference.size() > 0) {
+ // First, normalize the numeric values, as Virtuoso likes to mess with the datatypes
+ // Iterate over the differences
+ StmtIterator stmtIterator = difference.listStatements();
+ while (stmtIterator.hasNext()) {
+ final Statement stmt = stmtIterator.next();
+ final RDFNode subject = stmt.getSubject();
+ final Property predicate = stmt.getPredicate();
+ final RDFNode object = stmt.getObject();
+
+ // If the object is a numeric literal
+ if (object.isLiteral() && isNumeric(object.asLiteral().getDatatypeURI())) {
+ // Find a matching statement in the triple store, based on normalized numeric values
+ StmtIterator matching = fromTripleStoreModel.listStatements(new Selector() {
+ @Override
+ public boolean test(Statement statement) {
+ RDFNode objectToMatch = statement.getObject();
+
+ // Both values are numeric, so compare them as parsed doubles
+ if (objectToMatch.isLiteral()) {
+ String num1 = object.asLiteral().getString();
+ String num2 = objectToMatch.asLiteral().getString();
+
+ return Double.parseDouble(num1) == Double.parseDouble(num2);
+ }
+
+ return false;
+ }
+
+ @Override
+ public boolean isSimple() {
+ return false;
+ }
+
+ @Override
+ public Resource getSubject() {
+ return subject.asResource();
+ }
+
+ @Override
+ public Property getPredicate() {
+ return predicate;
+ }
+
+ @Override
+ public RDFNode getObject() {
+ return null;
+ }
+ });
+
+ // For every matching statement
+ // Rewrite the object as the one in the file model (they are the same, just differ in datatype)
+ List toModify = new ArrayList();
+ while (matching.hasNext()) {
+ toModify.add(matching.next());
+ }
+
+ for (Statement stmtToModify : toModify) {
+ stmtToModify.changeObject(object);
+ }
+ }
+ }
+
+ // Now we've normalized the datatypes, check the graphs are isomorphic
+ return fileModel.isIsomorphicWith(fromTripleStoreModel);
+ }
+
+ return true;
}
/**
diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/SolrSmokeTest.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/SolrSmokeTest.java
index cb6702ee8..0701c8594 100644
--- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/SolrSmokeTest.java
+++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/SolrSmokeTest.java
@@ -12,6 +12,7 @@ import java.net.UnknownHostException;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
+import edu.cornell.mannlib.vitro.webapp.utils.http.HttpClientFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpException;
@@ -24,6 +25,7 @@ import org.apache.http.impl.client.DefaultHttpClient;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
import edu.cornell.mannlib.vitro.webapp.utils.threads.VitroBackgroundThread;
+import org.apache.http.util.EntityUtils;
/**
* Spin off a thread that will try to connect to Solr.
@@ -207,7 +209,7 @@ public class SolrSmokeTest implements ServletContextListener {
private static final long SLEEP_INTERVAL = 20000; // 20 seconds
private final URL solrUrl;
- private final HttpClient httpClient = new DefaultHttpClient();
+ private final HttpClient httpClient = HttpClientFactory.getHttpClient();
private int statusCode;
@@ -238,8 +240,12 @@ public class SolrSmokeTest implements ServletContextListener {
HttpGet method = new HttpGet(solrUrl.toExternalForm());
SolrSmokeTest.log.debug("Trying to connect to Solr");
HttpResponse response = httpClient.execute(method);
- statusCode = response.getStatusLine().getStatusCode();
- SolrSmokeTest.log.debug("HTTP status was " + statusCode);
+ try {
+ statusCode = response.getStatusLine().getStatusCode();
+ SolrSmokeTest.log.debug("HTTP status was " + statusCode);
+ } finally {
+ EntityUtils.consume(response.getEntity());
+ }
} catch (SocketTimeoutException e) {
// Catch the exception so we can retry this.
// Save the status so we know why we failed.
@@ -274,7 +280,7 @@ public class SolrSmokeTest implements ServletContextListener {
*/
private static class SolrPinger {
private final URL solrUrl;
- private final HttpClient httpClient = new DefaultHttpClient();
+ private final HttpClient httpClient = HttpClientFactory.getHttpClient();
public SolrPinger(URL solrUrl) {
this.solrUrl = solrUrl;
@@ -286,10 +292,14 @@ public class SolrSmokeTest implements ServletContextListener {
+ "/admin/ping");
SolrSmokeTest.log.debug("Trying to ping Solr");
HttpResponse response = httpClient.execute(method);
- SolrSmokeTest.log.debug("Finished pinging Solr");
- int statusCode = response.getStatusLine().getStatusCode();
- if (statusCode != HttpStatus.SC_OK) {
- throw new SolrProblemException(statusCode);
+ try {
+ SolrSmokeTest.log.debug("Finished pinging Solr");
+ int statusCode = response.getStatusLine().getStatusCode();
+ if (statusCode != HttpStatus.SC_OK) {
+ throw new SolrProblemException(statusCode);
+ }
+ } finally {
+ EntityUtils.consume(response.getEntity());
}
} catch (IOException e) {
throw new SolrProblemException(e);
diff --git a/webapp/src/edu/ucsf/vitro/opensocial/OpenSocialSmokeTests.java b/webapp/src/edu/ucsf/vitro/opensocial/OpenSocialSmokeTests.java
index 08d050ca5..ec0223a24 100644
--- a/webapp/src/edu/ucsf/vitro/opensocial/OpenSocialSmokeTests.java
+++ b/webapp/src/edu/ucsf/vitro/opensocial/OpenSocialSmokeTests.java
@@ -18,12 +18,14 @@ import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
+import edu.cornell.mannlib.vitro.webapp.utils.http.HttpClientFactory;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
+import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
@@ -349,7 +351,7 @@ public class OpenSocialSmokeTests implements ServletContextListener {
private final String shindigBaseUrl;
private final String shindigTestUrl;
- private final DefaultHttpClient httpClient = new DefaultHttpClient();
+ private final HttpClient httpClient = HttpClientFactory.getHttpClient();
private int statusCode = Integer.MIN_VALUE;