[VIVO-1122] Fix to the way resources are consumed in the SPARQL clients, so that it doesn't leak / tie up HTTP connections.

This commit is contained in:
grahamtriggs 2015-11-03 01:19:38 +00:00
parent 046d445639
commit 817e90716c
4 changed files with 122 additions and 74 deletions

View file

@ -7,6 +7,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import edu.cornell.mannlib.vitro.webapp.utils.http.HttpClientFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
@ -44,6 +45,7 @@ import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import edu.cornell.mannlib.vitro.webapp.utils.logging.ToString;
import org.apache.http.util.EntityUtils;
public class SparqlGraph implements GraphWithPerform {
@ -73,9 +75,7 @@ public class SparqlGraph implements GraphWithPerform {
this.endpointURI = endpointURI;
this.graphURI = graphURI;
PoolingClientConnectionManager cm = new PoolingClientConnectionManager();
cm.setDefaultMaxPerRoute(50);
this.httpClient = new DefaultHttpClient(cm);
this.httpClient = HttpClientFactory.getHttpClient();
}
public String getEndpointURI() {
@ -91,22 +91,28 @@ public class SparqlGraph implements GraphWithPerform {
performAdd(arg0);
}
public void executeUpdate(String updateString) {
public void executeUpdate(String updateString) {
HttpPost meth = new HttpPost(endpointURI);
try {
HttpPost meth = new HttpPost(endpointURI);
meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(
new BasicNameValuePair("update", updateString))));
HttpResponse response = httpClient.execute(meth);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to update. \n");
throw new RuntimeException("Unable to perform SPARQL UPDATE: \n"
+ updateString);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to update. \n");
throw new RuntimeException("Unable to perform SPARQL UPDATE: \n"
+ updateString);
}
} finally {
EntityUtils.consume(response.getEntity());
}
} catch (Exception e) {
throw new RuntimeException("Unable to perform SPARQL UPDATE", e);
}
} finally {
meth.abort();
}
}
@Override

View file

@ -16,10 +16,12 @@ import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import edu.cornell.mannlib.vitro.webapp.utils.http.HttpClientFactory;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
@ -27,6 +29,7 @@ import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.apache.jena.riot.RDFDataMgr;
import com.hp.hpl.jena.graph.Triple;
@ -70,12 +73,13 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
protected String readEndpointURI;
protected String updateEndpointURI;
protected DefaultHttpClient httpClient;
// the number of triples to be
// the number of triples to be
private static final int CHUNK_SIZE = 1000; // added/removed in a single
// SPARQL UPDATE
/**
protected HttpClient httpClient;
/**
* Returns an RDFService for a remote repository
* @param String - URI of the read SPARQL endpoint for the knowledge base
* @param String - URI of the update SPARQL endpoint for the knowledge base
@ -89,11 +93,8 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
public RDFServiceSparql(String readEndpointURI, String updateEndpointURI, String defaultWriteGraphURI) {
this.readEndpointURI = readEndpointURI;
this.updateEndpointURI = updateEndpointURI;
httpClient = HttpClientFactory.getHttpClient();
PoolingClientConnectionManager cm = new PoolingClientConnectionManager();
cm.setDefaultMaxPerRoute(50);
this.httpClient = new DefaultHttpClient(cm);
testConnection();
}
@ -302,35 +303,39 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
HttpGet meth = new HttpGet(new URIBuilder(readEndpointURI).addParameter("query", queryStr).build());
meth.addHeader("Accept", "application/sparql-results+xml");
HttpResponse response = httpClient.execute(meth);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to query. \n");
log.debug("update string: \n" + queryStr);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
try (InputStream in = response.getEntity().getContent()) {
ResultSet resultSet = ResultSetFactory.fromXML(in);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
switch (resultFormat) {
case CSV:
ResultSetFormatter.outputAsCSV(outputStream, resultSet);
break;
case TEXT:
ResultSetFormatter.out(outputStream, resultSet);
break;
case JSON:
ResultSetFormatter.outputAsJSON(outputStream, resultSet);
break;
case XML:
ResultSetFormatter.outputAsXML(outputStream, resultSet);
break;
default:
throw new RDFServiceException("unrecognized result format");
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to query. \n");
log.debug("update string: \n" + queryStr);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
InputStream result = new ByteArrayInputStream(
outputStream.toByteArray());
return result;
try (InputStream in = response.getEntity().getContent()) {
ResultSet resultSet = ResultSetFactory.fromXML(in);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
switch (resultFormat) {
case CSV:
ResultSetFormatter.outputAsCSV(outputStream, resultSet);
break;
case TEXT:
ResultSetFormatter.out(outputStream, resultSet);
break;
case JSON:
ResultSetFormatter.outputAsJSON(outputStream, resultSet);
break;
case XML:
ResultSetFormatter.outputAsXML(outputStream, resultSet);
break;
default:
throw new RDFServiceException("unrecognized result format");
}
InputStream result = new ByteArrayInputStream(
outputStream.toByteArray());
return result;
}
} finally {
EntityUtils.consume(response.getEntity());
}
} catch (IOException ioe) {
throw new RuntimeException(ioe);
@ -347,15 +352,19 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
HttpGet meth = new HttpGet(new URIBuilder(readEndpointURI).addParameter("query", queryStr).build());
meth.addHeader("Accept", "application/sparql-results+xml");
HttpResponse response = httpClient.execute(meth);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to query. \n");
log.debug("update string: \n" + queryStr);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to query. \n");
log.debug("update string: \n" + queryStr);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
try (InputStream in = response.getEntity().getContent()) {
consumer.processResultSet(ResultSetFactory.fromXML(in));
try (InputStream in = response.getEntity().getContent()) {
consumer.processResultSet(ResultSetFactory.fromXML(in));
}
} finally {
EntityUtils.consume(response.getEntity());
}
} catch (IOException ioe) {
throw new RuntimeException(ioe);
@ -498,13 +507,17 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
HttpPost meth = new HttpPost(updateEndpointURI);
meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(new BasicNameValuePair("update", updateString))));
HttpResponse response = httpClient.execute(meth);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine() + " to update. \n");
//log.debug("update string: \n" + updateString);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
HttpResponse response = httpClient.execute(meth);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine() + " to update. \n");
//log.debug("update string: \n" + updateString);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
} finally {
EntityUtils.consume(response.getEntity());
}
} catch (Exception e) {
throw new RDFServiceException("Unable to perform change set update", e);
}

View file

@ -7,6 +7,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -23,6 +24,7 @@ import org.apache.http.protocol.HttpContext;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql.RDFServiceSparql;
import org.apache.http.util.EntityUtils;
/**
* For now, at least, it is just like an RDFServiceSparql except:
@ -78,22 +80,27 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
log.debug("UPDATE STRING: " + updateString);
try {
HttpPost request = createHttpRequest(updateString);
HttpResponse response = httpClient.execute(
createHttpRequest(updateString), createHttpContext());
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine()
+ " to update. \n");
request, createHttpContext());
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine()
+ " to update. \n");
try (InputStream content = response.getEntity().getContent()) {
for (String line : IOUtils.readLines(content)) {
log.error("response-line >>" + line);
try (InputStream content = response.getEntity().getContent()) {
for (String line : IOUtils.readLines(content)) {
log.error("response-line >>" + line);
}
}
}
throw new RDFServiceException(
"Unable to perform SPARQL UPDATE: status code = "
+ statusCode);
throw new RDFServiceException(
"Unable to perform SPARQL UPDATE: status code = "
+ statusCode);
}
} finally {
EntityUtils.consume(response.getEntity());
}
} catch (Exception e) {
log.error("Failed to update: " + updateString, e);

View file

@ -0,0 +1,22 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.utils.http;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
public final class HttpClientFactory {
private static final DefaultHttpClient httpClient;
static {
PoolingClientConnectionManager cm = new PoolingClientConnectionManager();
cm.setDefaultMaxPerRoute(50);
cm.setMaxTotal(300);
httpClient = new DefaultHttpClient(cm);
}
public static HttpClient getHttpClient() {
return httpClient;
}
}