NIHVIVO-3796 ability to specify different read and update endpoints

This commit is contained in:
stellamit 2012-06-20 15:46:36 +00:00
parent ec77d1075b
commit 0a109f1656
2 changed files with 63 additions and 28 deletions

View file

@ -50,17 +50,20 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb.ListeningGraph;
/* /*
* API to write, read, and update Vitro's RDF store, with support * API to write, read, and update Vitro's RDF store, with support
* to allow listening, logging and auditing. * to allow listening, logging and auditing.
*
*/ */
public class RDFServiceSparql extends RDFServiceImpl implements RDFService { public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class); private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
private String endpointURI; private String readEndpointURI;
private Repository repository; private String updateEndpointURI;
private Repository readRepository;
private Repository updateRepository;
/** /**
* Returns an RDFService for a remote repository * Returns an RDFService for a remote repository
* @param String - URI of the SPARQL endpoint for the knowledge base * @param String - URI of the read SPARQL endpoint for the knowledge base
* @param String - URI of the update SPARQL endpoint for the knowledge base
* @param String - URI of the default write graph within the knowledge base. * @param String - URI of the default write graph within the knowledge base.
* this is the graph that will be written to when a graph * this is the graph that will be written to when a graph
* is not explicitly specified. * is not explicitly specified.
@ -68,25 +71,40 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
* The default read graph is the union of all graphs in the * The default read graph is the union of all graphs in the
* knowledge base * knowledge base
*/ */
public RDFServiceSparql(String endpointURI, String defaultWriteGraphURI) { public RDFServiceSparql(String readEndpointURI, String updateEndpointURI, String defaultWriteGraphURI) {
this.endpointURI = endpointURI; this.readEndpointURI = readEndpointURI;
this.repository = new HTTPRepository(endpointURI); this.updateEndpointURI = updateEndpointURI;
this.readRepository = new HTTPRepository(readEndpointURI);
this.updateRepository = new HTTPRepository(updateEndpointURI);
}
/**
* Returns an RDFService for a remote repository
* @param String - URI of the read SPARQL endpoint for the knowledge base
* @param String - URI of the update SPARQL endpoint for the knowledge base
*
* The default read graph is the union of all graphs in the
* knowledge base
*/
public RDFServiceSparql(String readEndpointURI, String updateEndpointURI) {
this(readEndpointURI, updateEndpointURI, null);
} }
/** /**
* Returns an RDFService for a remote repository * Returns an RDFService for a remote repository
* @param String - URI of the SPARQL endpoint for the knowledge base * @param String - URI of the read and update SPARQL endpoint for the knowledge base
* *
* The default read graph is the union of all graphs in the * The default read graph is the union of all graphs in the
* knowledge base * knowledge base
*/ */
public RDFServiceSparql(String endpointURI) { public RDFServiceSparql(String endpointURI) {
this(endpointURI, null); this(endpointURI, endpointURI, null);
} }
public void close() { public void close() {
try { try {
this.repository.shutDown(); this.readRepository.shutDown();
this.updateRepository.shutDown();
} catch (RepositoryException re) { } catch (RepositoryException re) {
log.error(re, re); log.error(re, re);
} }
@ -175,7 +193,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
Model model = ModelFactory.createDefaultModel(); Model model = ModelFactory.createDefaultModel();
Query query = QueryFactory.create(queryStr); Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query); QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
try { try {
qe.execConstruct(model); qe.execConstruct(model);
@ -205,7 +223,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
Model model = ModelFactory.createDefaultModel(); Model model = ModelFactory.createDefaultModel();
Query query = QueryFactory.create(queryStr); Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query); QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
try { try {
qe.execDescribe(model); qe.execDescribe(model);
@ -233,7 +251,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
public InputStream sparqlSelectQuery(String queryStr, RDFService.ResultFormat resultFormat) throws RDFServiceException { public InputStream sparqlSelectQuery(String queryStr, RDFService.ResultFormat resultFormat) throws RDFServiceException {
Query query = QueryFactory.create(queryStr); Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query); QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
try { try {
ResultSet resultSet = qe.execSelect(); ResultSet resultSet = qe.execSelect();
@ -275,7 +293,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
public boolean sparqlAskQuery(String queryStr) throws RDFServiceException { public boolean sparqlAskQuery(String queryStr) throws RDFServiceException {
Query query = QueryFactory.create(queryStr); Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query); QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
try { try {
return qe.execAsk(); return qe.execAsk();
@ -298,7 +316,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
List<String> graphNodeList = new ArrayList<String>(); List<String> graphNodeList = new ArrayList<String>();
try { try {
RepositoryConnection conn = getConnection(); RepositoryConnection conn = getWriteConnection();
try { try {
RepositoryResult<Resource> conResult = conn.getContextIDs(); RepositoryResult<Resource> conResult = conn.getContextIDs();
while (conResult.hasNext()) { while (conResult.hasNext()) {
@ -370,21 +388,25 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Non-override methods below // Non-override methods below
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
protected String getEndpointURI() { protected String getReadEndpointURI() {
return endpointURI; return readEndpointURI;
} }
protected RepositoryConnection getConnection() { protected String getUpdateEndpointURI() {
return updateEndpointURI;
}
protected RepositoryConnection getWriteConnection() {
try { try {
return this.repository.getConnection(); return this.updateRepository.getConnection();
} catch (RepositoryException e) { } catch (RepositoryException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
protected void executeUpdate(String updateString) { protected void executeUpdate(String updateString) {
try { try {
RepositoryConnection conn = getConnection(); RepositoryConnection conn = getWriteConnection();
try { try {
Update u = conn.prepareUpdate(QueryLanguage.SPARQL, updateString); Update u = conn.prepareUpdate(QueryLanguage.SPARQL, updateString);
u.execute(); u.execute();
@ -499,7 +521,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
protected boolean sparqlSelectQueryHasResults(String queryStr) throws RDFServiceException { protected boolean sparqlSelectQueryHasResults(String queryStr) throws RDFServiceException {
Query query = QueryFactory.create(queryStr); Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query); QueryExecution qe = QueryExecutionFactory.sparqlService(readEndpointURI, query);
try { try {
ResultSet resultSet = qe.execSelect(); ResultSet resultSet = qe.execSelect();
@ -669,5 +691,4 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
getSerializationFormatString(modelChange.getSerializationFormat())); getSerializationFormatString(modelChange.getSerializationFormat()));
return model; return model;
} }
} }

View file

@ -22,7 +22,6 @@ import com.hp.hpl.jena.sdb.util.StoreUtils;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties; import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.filter.SameAsFilteringRDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceFactorySingle; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceFactorySingle;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb.RDFServiceSDB; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb.RDFServiceSDB;
@ -45,8 +44,10 @@ implements javax.servlet.ServletContextListener {
try { try {
String endpointURI = ConfigurationProperties.getBean(sce).getProperty( String endpointURI = ConfigurationProperties.getBean(sce).getProperty(
"VitroConnection.DataSource.endpointURI"); "VitroConnection.DataSource.endpointURI");
String updateEndpointURI = ConfigurationProperties.getBean(sce).getProperty(
"VitroConnection.DataSource.updateEndpointURI");
if (endpointURI != null) { if (endpointURI != null) {
useEndpoint(endpointURI, ctx); useEndpoint(endpointURI, updateEndpointURI, ctx);
} else { } else {
useSDB(ctx, ss); useSDB(ctx, ss);
} }
@ -60,11 +61,24 @@ implements javax.servlet.ServletContextListener {
} }
} }
private void useEndpoint(String endpointURI, ServletContext ctx) { private void useEndpoint(String endpointURI, String updateEndpointURI, ServletContext ctx) {
RDFService rdfService = new RDFServiceSparql(endpointURI);
RDFService rdfService = null;
if (updateEndpointURI == null) {
rdfService = new RDFServiceSparql(endpointURI);
} else {
rdfService = new RDFServiceSparql(endpointURI, updateEndpointURI);
}
RDFServiceFactory rdfServiceFactory = new RDFServiceFactorySingle(rdfService); RDFServiceFactory rdfServiceFactory = new RDFServiceFactorySingle(rdfService);
RDFServiceUtils.setRDFServiceFactory(ctx, rdfServiceFactory); RDFServiceUtils.setRDFServiceFactory(ctx, rdfServiceFactory);
log.info("Using endpoint at " + endpointURI);
if (updateEndpointURI != null) {
log.info("Using read endpoint at " + endpointURI);
log.info("Using update endpoint at " + updateEndpointURI);
} else {
log.info("Using endpoint at " + endpointURI);
}
} }
private void useSDB(ServletContext ctx, StartupStatus ss) throws SQLException { private void useSDB(ServletContext ctx, StartupStatus ss) throws SQLException {