VIVO-1026 Back off to an older version of HttpClient

OWLAPI 4.0.1 uses HttpClient 4.2.5, and we were getting a class-version conflict in some contexts.
This commit is contained in:
Jim Blake 2015-05-01 15:37:49 -04:00
parent 0b79dfdf01
commit 2058fbf2a6
13 changed files with 101 additions and 124 deletions

View file

@ -9,12 +9,12 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
@ -49,7 +49,7 @@ public class SparqlGraph implements GraphWithPerform {
private String endpointURI;
private String graphURI;
private CloseableHttpClient httpClient;
private HttpClient httpClient;
private static final Log log = LogFactory.getLog(SparqlGraph.class);
private BulkUpdateHandler bulkUpdateHandler;
@ -73,9 +73,9 @@ public class SparqlGraph implements GraphWithPerform {
this.endpointURI = endpointURI;
this.graphURI = graphURI;
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
PoolingClientConnectionManager cm = new PoolingClientConnectionManager();
cm.setDefaultMaxPerRoute(50);
this.httpClient = HttpClients.custom().setConnectionManager(cm).build();
this.httpClient = new DefaultHttpClient(cm);
}
public String getEndpointURI() {
@ -97,17 +97,13 @@ public class SparqlGraph implements GraphWithPerform {
meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(
new BasicNameValuePair("update", updateString))));
CloseableHttpResponse response = httpClient.execute(meth);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to update. \n");
throw new RuntimeException("Unable to perform SPARQL UPDATE: \n"
+ updateString);
}
} finally {
response.close();
}
HttpResponse response = httpClient.execute(meth);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to update. \n");
throw new RuntimeException("Unable to perform SPARQL UPDATE: \n"
+ updateString);
}
} catch (Exception e) {
throw new RuntimeException("Unable to perform SPARQL UPDATE", e);
}

View file

@ -18,14 +18,13 @@ import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import org.apache.jena.riot.RDFDataMgr;
@ -70,7 +69,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
protected String readEndpointURI;
protected String updateEndpointURI;
protected CloseableHttpClient httpClient;
protected DefaultHttpClient httpClient;
// the number of triples to be
private static final int CHUNK_SIZE = 1000; // added/removed in a single
// SPARQL UPDATE
@ -90,9 +89,9 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
this.readEndpointURI = readEndpointURI;
this.updateEndpointURI = updateEndpointURI;
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
PoolingClientConnectionManager cm = new PoolingClientConnectionManager();
cm.setDefaultMaxPerRoute(50);
this.httpClient = HttpClients.custom().setConnectionManager(cm).build();
this.httpClient = new DefaultHttpClient(cm);
testConnection();
}
@ -287,41 +286,38 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
try {
HttpGet meth = new HttpGet(new URIBuilder(readEndpointURI).addParameter("query", queryStr).build());
meth.addHeader("Accept", "application/sparql-results+xml");
CloseableHttpResponse response = httpClient.execute(meth);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to query. \n");
log.debug("update string: \n" + queryStr);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
InputStream in = response.getEntity().getContent();
ResultSet resultSet = ResultSetFactory.fromXML(in);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
switch (resultFormat) {
case CSV:
ResultSetFormatter.outputAsCSV(outputStream,resultSet);
break;
case TEXT:
ResultSetFormatter.out(outputStream,resultSet);
break;
case JSON:
ResultSetFormatter.outputAsJSON(outputStream, resultSet);
break;
case XML:
ResultSetFormatter.outputAsXML(outputStream, resultSet);
break;
default:
throw new RDFServiceException("unrecognized result format");
}
InputStream result = new ByteArrayInputStream(outputStream.toByteArray());
return result;
} finally {
response.close();
}
} catch (IOException ioe) {
HttpResponse response = httpClient.execute(meth);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to query. \n");
log.debug("update string: \n" + queryStr);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
try (InputStream in = response.getEntity().getContent()) {
ResultSet resultSet = ResultSetFactory.fromXML(in);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
switch (resultFormat) {
case CSV:
ResultSetFormatter.outputAsCSV(outputStream, resultSet);
break;
case TEXT:
ResultSetFormatter.out(outputStream, resultSet);
break;
case JSON:
ResultSetFormatter.outputAsJSON(outputStream, resultSet);
break;
case XML:
ResultSetFormatter.outputAsXML(outputStream, resultSet);
break;
default:
throw new RDFServiceException("unrecognized result format");
}
InputStream result = new ByteArrayInputStream(
outputStream.toByteArray());
return result;
}
} catch (IOException ioe) {
throw new RuntimeException(ioe);
} catch (URISyntaxException e) {
throw new RuntimeException(e);
@ -463,17 +459,13 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
HttpPost meth = new HttpPost(updateEndpointURI);
meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(new BasicNameValuePair("update", updateString))));
CloseableHttpResponse response = httpClient.execute(meth);
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine() + " to update. \n");
//log.debug("update string: \n" + updateString);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
} finally {
response.close();
}
HttpResponse response = httpClient.execute(meth);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine() + " to update. \n");
//log.debug("update string: \n" + updateString);
throw new RDFServiceException("Unable to perform SPARQL UPDATE");
}
} catch (Exception e) {
throw new RDFServiceException("Unable to perform change set update", e);
}

View file

@ -5,18 +5,21 @@ package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.virtuoso;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.protocol.ClientContext;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql.RDFServiceSparql;
@ -75,25 +78,22 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
log.debug("UPDATE STRING: " + updateString);
try {
CloseableHttpResponse response = httpClient.execute(
HttpResponse response = httpClient.execute(
createHttpRequest(updateString), createHttpContext());
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine()
+ " to update. \n");
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + response.getStatusLine()
+ " to update. \n");
InputStream content = response.getEntity().getContent();
try (InputStream content = response.getEntity().getContent()) {
for (String line : IOUtils.readLines(content)) {
log.error("response-line >>" + line);
}
throw new RDFServiceException(
"Unable to perform SPARQL UPDATE: status code = "
+ statusCode);
}
} finally {
response.close();
throw new RDFServiceException(
"Unable to perform SPARQL UPDATE: status code = "
+ statusCode);
}
} catch (Exception e) {
log.error("Failed to update: " + updateString, e);
@ -113,7 +113,12 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
private HttpPost createHttpRequest(String updateString) {
HttpPost meth = new HttpPost(updateEndpointURI);
meth.addHeader("Content-Type", "application/sparql-query");
meth.setEntity(new StringEntity(updateString, "UTF-8"));
try {
meth.setEntity(new StringEntity(updateString, "UTF-8"));
} catch (UnsupportedEncodingException e) {
// UTF-8 is unsupported?
throw new RuntimeException(e);
}
return meth;
}
@ -121,13 +126,13 @@ public class RDFServiceVirtuoso extends RDFServiceSparql {
* We need an HttpContext that will provide username and password in
* response to a basic authentication challenge.
*/
private HttpClientContext createHttpContext() {
private HttpContext createHttpContext() {
CredentialsProvider provider = new BasicCredentialsProvider();
provider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(
username, password));
HttpClientContext context = HttpClientContext.create();
context.setCredentialsProvider(provider);
BasicHttpContext context = new BasicHttpContext();
context.setAttribute(ClientContext.CREDS_PROVIDER, provider);
return context;
}

View file

@ -15,11 +15,11 @@ import javax.servlet.ServletContextListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpException;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.client.DefaultHttpClient;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
@ -207,8 +207,7 @@ public class SolrSmokeTest implements ServletContextListener {
private static final long SLEEP_INTERVAL = 20000; // 20 seconds
private final URL solrUrl;
private final CloseableHttpClient httpClient = HttpClients
.createDefault();
private final HttpClient httpClient = new DefaultHttpClient();
private int statusCode;
@ -238,13 +237,9 @@ public class SolrSmokeTest implements ServletContextListener {
try {
HttpGet method = new HttpGet(solrUrl.toExternalForm());
SolrSmokeTest.log.debug("Trying to connect to Solr");
CloseableHttpResponse response = httpClient.execute(method);
try {
statusCode = response.getStatusLine().getStatusCode();
SolrSmokeTest.log.debug("HTTP status was " + statusCode);
} finally {
response.close();
}
HttpResponse response = httpClient.execute(method);
statusCode = response.getStatusLine().getStatusCode();
SolrSmokeTest.log.debug("HTTP status was " + statusCode);
} catch (SocketTimeoutException e) {
// Catch the exception so we can retry this.
// Save the status so we know why we failed.
@ -279,8 +274,7 @@ public class SolrSmokeTest implements ServletContextListener {
*/
private static class SolrPinger {
private final URL solrUrl;
private final CloseableHttpClient httpClient = HttpClients
.createDefault();
private final HttpClient httpClient = new DefaultHttpClient();
public SolrPinger(URL solrUrl) {
this.solrUrl = solrUrl;
@ -291,15 +285,11 @@ public class SolrSmokeTest implements ServletContextListener {
HttpGet method = new HttpGet(solrUrl.toExternalForm()
+ "/admin/ping");
SolrSmokeTest.log.debug("Trying to ping Solr");
CloseableHttpResponse response = httpClient.execute(method);
HttpResponse response = httpClient.execute(method);
SolrSmokeTest.log.debug("Finished pinging Solr");
try {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
throw new SolrProblemException(statusCode);
}
} finally {
response.close();
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
throw new SolrProblemException(statusCode);
}
} catch (IOException e) {
throw new SolrProblemException(e);

View file

@ -22,11 +22,10 @@ import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.client.DefaultHttpClient;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
@ -350,8 +349,7 @@ public class OpenSocialSmokeTests implements ServletContextListener {
private final String shindigBaseUrl;
private final String shindigTestUrl;
private final CloseableHttpClient httpClient = HttpClients
.createDefault();
private final DefaultHttpClient httpClient = new DefaultHttpClient();
private int statusCode = Integer.MIN_VALUE;
@ -383,13 +381,9 @@ public class OpenSocialSmokeTests implements ServletContextListener {
HttpGet method = new HttpGet(shindigTestUrl);
try {
log.debug("Trying to connect to Shindig");
CloseableHttpResponse response = httpClient.execute(method);
try {
statusCode = response.getStatusLine().getStatusCode();
log.debug("HTTP status was " + statusCode);
} finally {
response.close();
}
HttpResponse response = httpClient.execute(method);
statusCode = response.getStatusLine().getStatusCode();
log.debug("HTTP status was " + statusCode);
} catch (SocketTimeoutException e) {
// Catch the exception so we can retry this.
// Save the status so we know why we failed.