NIHVIVO-3642 wiring in RDFService; cleanup of startup listeners remains to be done

This commit is contained in:
brianjlowe 2012-06-06 21:27:02 +00:00
parent 1bd242a0e1
commit 5d3ca126d7
18 changed files with 951 additions and 275 deletions

View file

@ -42,16 +42,20 @@ import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.IndividualUpdateEvent;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements ObjectPropertyStatementDao {
private static final Log log = LogFactory.getLog(ObjectPropertyStatementDaoJena.class);
private DatasetWrapperFactory dwf;
private RDFService rdfService;
public ObjectPropertyStatementDaoJena(DatasetWrapperFactory dwf,
public ObjectPropertyStatementDaoJena(RDFService rdfService,
DatasetWrapperFactory dwf,
WebappDaoFactoryJena wadf) {
super(wadf);
this.rdfService = rdfService;
this.dwf = dwf;
}
@ -351,38 +355,27 @@ public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements Objec
log.debug("CONSTRUCT query string for object property " +
propertyUri + ": " + queryString);
Query query = null;
try {
query = QueryFactory.create(queryString, Syntax.syntaxARQ);
} catch(Throwable th){
log.error("Could not create CONSTRUCT SPARQL query for query " +
"string. " + th.getMessage());
log.error(queryString);
return constructedModel;
}
queryString = queryString.replace("?subject", "<" + subjectUri + ">");
queryString = queryString.replace("?property", "<" + propertyUri + ">");
QuerySolutionMap initialBindings = new QuerySolutionMap();
initialBindings.add(
"subject", ResourceFactory.createResource(subjectUri));
initialBindings.add(
"property", ResourceFactory.createResource(propertyUri));
// we no longer need this query object, but we might want to do this
// query parse step to improve debugging, depending on the error returned
// through the RDF API
// try {
// QueryFactory.create(queryString, Syntax.syntaxARQ);
// } catch(Throwable th){
// log.error("Could not create CONSTRUCT SPARQL query for query " +
// "string. " + th.getMessage());
// log.error(queryString);
// return constructedModel;
// }
DatasetWrapper w = dwf.getDatasetWrapper();
Dataset dataset = w.getDataset();
dataset.getLock().enterCriticalSection(Lock.READ);
QueryExecution qe = null;
try {
qe = QueryExecutionFactory.create(
query, dataset, initialBindings);
qe.execConstruct(constructedModel);
constructedModel.read(
rdfService.sparqlConstructQuery(
queryString, RDFService.ModelSerializationFormat.N3), null, "N3");
} catch (Exception e) {
log.error("Error getting constructed model for subject " + subjectUri + " and property " + propertyUri);
} finally {
if (qe != null) {
qe.close();
}
dataset.getLock().leaveCriticalSection();
w.close();
}
}

View file

@ -23,16 +23,15 @@ import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectProperty;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.jena.IndividualSDB.IndividualNotFoundException;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
public class ObjectPropertyStatementDaoSDB extends
ObjectPropertyStatementDaoJena implements ObjectPropertyStatementDao {
@ -43,10 +42,11 @@ public class ObjectPropertyStatementDaoSDB extends
private SDBDatasetMode datasetMode;
public ObjectPropertyStatementDaoSDB(
RDFService rdfService,
DatasetWrapperFactory dwf,
SDBDatasetMode datasetMode,
WebappDaoFactoryJena wadf) {
super (dwf, wadf);
super (rdfService, dwf, wadf);
this.dwf = dwf;
this.datasetMode = datasetMode;
}

View file

@ -10,6 +10,8 @@ import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
public class RDFServiceDataset implements Dataset {
private RDFServiceDatasetGraph g;
@ -18,6 +20,10 @@ public class RDFServiceDataset implements Dataset {
this.g = g;
}
public RDFServiceDataset(RDFService rdfService) {
this.g = new RDFServiceDatasetGraph(rdfService);
}
@Override
public DatasetGraph asDatasetGraph() {
return g;

View file

@ -37,6 +37,7 @@ import com.hp.hpl.jena.util.iterator.WrappedIterator;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
public class RDFServiceGraph implements GraphWithPerform {
@ -116,8 +117,8 @@ public class RDFServiceGraph implements GraphWithPerform {
ChangeSet changeSet = rdfService.manufactureChangeSet();
try {
changeSet.addAddition(new ByteArrayInputStream(
serialize(t).getBytes()), RDFService.ModelSerializationFormat.N3, graphURI);
changeSet.addAddition(RDFServiceUtils.toInputStream(serialize(t)),
RDFService.ModelSerializationFormat.N3, graphURI);
rdfService.changeSetUpdate(changeSet);
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);

View file

@ -48,6 +48,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.VClassGroupDao;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.pellet.PelletListener;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class WebappDaoFactoryJena implements WebappDaoFactory {
@ -73,6 +74,8 @@ public class WebappDaoFactoryJena implements WebappDaoFactory {
protected DatasetWrapperFactory dwf;
protected RDFService rdfService;
/* **************** constructors **************** */
public WebappDaoFactoryJena(WebappDaoFactoryJena base, String userURI) {
@ -343,8 +346,10 @@ public class WebappDaoFactoryJena implements WebappDaoFactory {
ObjectPropertyStatementDao objectPropertyStatementDao = null;
public ObjectPropertyStatementDao getObjectPropertyStatementDao() {
if( objectPropertyStatementDao == null )
// TODO supply a valid RDFService as the first argument if we keep this
// implementation
objectPropertyStatementDao = new ObjectPropertyStatementDaoJena(
dwf, this);
null, dwf, this);
return objectPropertyStatementDao;
}

View file

@ -4,7 +4,6 @@ package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashSet;
import org.apache.commons.dbcp.BasicDataSource;
@ -20,70 +19,37 @@ import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.SimpleReasonerSetup;
public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
public static final String UNION_GRAPH = "urn:x-arq:UnionGraph";
private SDBDatasetMode datasetMode = SDBDatasetMode.ASSERTIONS_AND_INFERENCES;
/**
* For use when any database connection associated with the Dataset
* is managed externally
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
Dataset dataset) {
super(ontModelSelector);
this.dwf = new StaticDatasetFactory(dataset);
public WebappDaoFactorySDB(RDFService rdfService,
OntModelSelector ontModelSelector) {
this(rdfService, ontModelSelector, new WebappDaoFactoryConfig());
}
/**
* For use when any database connection associated with the Dataset
* is managed externally
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
Dataset dataset,
WebappDaoFactoryConfig config) {
super(ontModelSelector, config);
this.dwf = new StaticDatasetFactory(dataset);
}
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
Dataset dataset,
WebappDaoFactoryConfig config, SDBDatasetMode datasetMode) {
super(ontModelSelector, config);
this.dwf = new StaticDatasetFactory(dataset);
this.datasetMode = datasetMode;
}
/**
* For use when any Dataset access should get a temporary DB connection
* from a pool
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
BasicDataSource bds,
StoreDesc storeDesc,
WebappDaoFactoryConfig config) {
super(ontModelSelector, config);
this.dwf = new ReconnectingDatasetFactory(bds, storeDesc);
public WebappDaoFactorySDB(RDFService rdfService,
OntModelSelector ontModelSelector,
WebappDaoFactoryConfig config) {
this(rdfService, ontModelSelector, config, null);
}
/**
* For use when any Dataset access should get a temporary DB connection
* from a pool, and access to the inference graph needs to be specified.
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
BasicDataSource bds,
StoreDesc storeDesc,
WebappDaoFactoryConfig config,
SDBDatasetMode datasetMode) {
public WebappDaoFactorySDB(RDFService rdfService,
OntModelSelector ontModelSelector,
WebappDaoFactoryConfig config,
SDBDatasetMode datasetMode) {
super(ontModelSelector, config);
this.dwf = new ReconnectingDatasetFactory(bds, storeDesc);
this.datasetMode = datasetMode;
this.dwf = new StaticDatasetFactory(new RDFServiceDataset(rdfService));
this.rdfService = rdfService;
if (datasetMode != null) {
this.datasetMode = datasetMode;
}
}
public WebappDaoFactorySDB(WebappDaoFactorySDB base, String userURI) {
super(base.ontModelSelector);
this.ontModelSelector = base.ontModelSelector;
@ -116,7 +82,7 @@ public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
return objectPropertyStatementDao;
else
return objectPropertyStatementDao =
new ObjectPropertyStatementDaoSDB(dwf, datasetMode, this);
new ObjectPropertyStatementDaoSDB(rdfService, dwf, datasetMode, this);
}
@Override
@ -178,6 +144,12 @@ public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
return filterBlock.toString();
}
@Override
public void close() {
super.close();
this.rdfService.close();
}
private class ReconnectingDatasetFactory implements DatasetWrapperFactory {
private BasicDataSource _bds;

View file

@ -51,6 +51,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
/**
@ -172,10 +173,11 @@ public class VitroRequestPrep implements Filter {
vreq.setDataset(dataset);
}
ServletContext ctx = vreq.getSession().getServletContext();
vreq.setUnfilteredWebappDaoFactory(new WebappDaoFactorySDB(
RDFServiceUtils.getRDFServiceFactory(ctx).getRDFService(),
ModelContext.getUnionOntModelSelector(
vreq.getSession().getServletContext()),
vreq.getDataset()));
ctx)));
req.setAttribute("VitroRequestPrep.setup", new Integer(1));
chain.doFilter(req, response);

View file

@ -3,8 +3,6 @@
package edu.cornell.mannlib.vitro.webapp.filters;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
@ -31,20 +29,21 @@ import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraphMultilingual;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class WebappDaoFactorySDBPrep implements Filter {
@ -90,80 +89,10 @@ public class WebappDaoFactorySDBPrep implements Filter {
}
}
BasicDataSource bds = JenaDataSourceSetupBase.getApplicationDataSource(_ctx);
StoreDesc storeDesc = (StoreDesc) _ctx.getAttribute("storeDesc");
OntModelSelector oms = (OntModelSelector) _ctx.getAttribute("unionOntModelSelector");
String defaultNamespace = (String) _ctx.getAttribute("defaultNamespace");
Connection sqlConn = null;
SDBConnection conn = null;
Store store = null;
Dataset dataset = null;
WebappDaoFactory wadf = null;
// temporary scaffolding in the rdfapi dev branch
// TODO remove me
if (ConfigurationProperties.getBean(request).getProperty(
"VitroConnection.DataSource.endpointURI") != null) {
filterSparql(request, oms, defaultNamespace);
filterChain.doFilter(request, response);
return;
}
try {
if (bds == null || storeDesc == null || oms == null) {
throw new RuntimeException("SDB store not property set up");
}
try {
sqlConn = bds.getConnection();
conn = new SDBConnection(sqlConn) ;
} catch (SQLException sqe) {
throw new RuntimeException("Unable to connect to database", sqe);
}
if (conn != null) {
store = SDBFactory.connectStore(conn, storeDesc);
dataset = SDBFactory.connectDataset(store);
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace);
wadf = new WebappDaoFactorySDB(oms, dataset, config);
vreq.setWebappDaoFactory(wadf);
vreq.setFullWebappDaoFactory(wadf);
vreq.setDataset(dataset);
vreq.setJenaOntModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, dataset.getNamedModel(
WebappDaoFactorySDB.UNION_GRAPH)));
}
} catch (Throwable t) {
log.error("Unable to filter request to set up SDB connection", t);
}
request.setAttribute("WebappDaoFactorySDBPrep.setup", 1);
try {
filterChain.doFilter(request, response);
return;
} finally {
if (conn != null) {
conn.close();
}
if (dataset != null) {
dataset.close();
}
if (store != null) {
store.close();
}
if (wadf != null) {
wadf.close();
}
}
}
private void filterSparql(ServletRequest request, OntModelSelector oms, String defaultNamespace) {
log.info("---------");
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
Enumeration<String> headStrs = vreq.getHeaderNames();
while (headStrs.hasMoreElements()) {
@ -173,44 +102,37 @@ public class WebappDaoFactorySDBPrep implements Filter {
List<String> langs = new ArrayList<String>();
log.info("Accept-Language: " + vreq.getHeader("Accept-Language"));
log.debug("Accept-Language: " + vreq.getHeader("Accept-Language"));
Enumeration<Locale> locs = vreq.getLocales();
while (locs.hasMoreElements()) {
Locale locale = locs.nextElement();
langs.add(locale.toString().replace("_", "-"));
log.info(locale.toString() + " / " + locale.getLanguage() + " + " + locale.getCountry() + " : " + locale.getDisplayCountry() + " | " + locale.getLanguage() + " : " + locale.getDisplayLanguage());
}
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace);
config.setPreferredLanguages(langs);
//okay let's make a graph-backed model
String endpointURI = ConfigurationProperties.getBean(
request).getProperty("VitroConnection.DataSource.endpointURI");
RDFServiceFactory factory = RDFServiceUtils.getRDFServiceFactory(_ctx);
RDFService rdfService = factory.getRDFService();
Dataset dataset = new RDFServiceDataset(rdfService);
wadf = new WebappDaoFactorySDB(rdfService, oms, config);
vreq.setWebappDaoFactory(wadf);
vreq.setFullWebappDaoFactory(wadf);
vreq.setDataset(dataset);
vreq.setJenaOntModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, dataset.getDefaultModel()));
Graph g = new SparqlGraphMultilingual(endpointURI, langs);
//Graph g = new SparqlGraph(endpointURI);
request.setAttribute("WebappDaoFactorySDBPrep.setup", 1);
Model m = ModelFactory.createModelForGraph(g);
OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m);
//oms = new SingleContentOntModelSelector(om, oms.getDisplayModel(), oms.getUserAccountsModel());
try {
filterChain.doFilter(request, response);
return;
} finally {
if (wadf != null) {
wadf.close();
}
}
Dataset dataset = new SparqlDataset(new SparqlDatasetGraph(endpointURI));
//DataSource datasource = DatasetFactory.create();
//datasource.addNamedModel("fake:fake", m);
//dataset = datasource;
//WebappDaoFactory wadf = new WebappDaoFactoryJena(oms, config);
WebappDaoFactory wadf = new WebappDaoFactorySDB(oms, dataset, config);
vreq.setWebappDaoFactory(wadf);
vreq.setFullWebappDaoFactory(wadf);
vreq.setUnfilteredWebappDaoFactory(wadf);
vreq.setWebappDaoFactory(wadf);
//vreq.setAssertionsWebappDaoFactory(wadf);
vreq.setDataset(dataset);
vreq.setJenaOntModel(om);
vreq.setOntModelSelector(oms);
}
@Override

View file

@ -6,8 +6,16 @@ public class RDFServiceException extends Exception {
super();
}
public RDFServiceException(Throwable cause) {
super(cause);
}
public RDFServiceException(String message) {
super(message);
}
public RDFServiceException(String message, Throwable cause) {
super(message, cause);
}
}

View file

@ -0,0 +1,30 @@
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
/**
* An RDFServiceFactory that always returns the same RDFService object
* @author bjl23
*
*/
public class RDFServiceFactorySingle implements RDFServiceFactory {
private RDFService rdfService;
public RDFServiceFactorySingle(RDFService rdfService) {
this.rdfService = rdfService;
}
@Override
public RDFService getRDFService() {
return this.rdfService;
}
@Override
public RDFService getRDFService(VitroRequest vreq) {
return this.rdfService;
}
}

View file

@ -1,5 +1,9 @@
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import javax.servlet.ServletContext;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
@ -9,14 +13,22 @@ public class RDFServiceUtils {
private static final String RDFSERVICEFACTORY_ATTR =
RDFServiceUtils.class.getName() + ".RDFServiceFactory";
public RDFServiceFactory getRDFServiceFactory(ServletContext context) {
public static RDFServiceFactory getRDFServiceFactory(ServletContext context) {
Object o = context.getAttribute(RDFSERVICEFACTORY_ATTR);
return (o instanceof RDFServiceFactory) ? (RDFServiceFactory) o : null;
}
public void setRDFServiceFactory(ServletContext context,
public static void setRDFServiceFactory(ServletContext context,
RDFServiceFactory factory) {
context.setAttribute(RDFSERVICEFACTORY_ATTR, factory);
}
public static InputStream toInputStream(String serializedRDF) {
try {
return new ByteArrayInputStream(serializedRDF.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}

View file

@ -26,8 +26,10 @@ import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.update.GraphStore;
import com.hp.hpl.jena.update.GraphStoreFactory;
import com.hp.hpl.jena.update.UpdateAction;
@ -57,14 +59,19 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
protected DatasetWrapper getDatasetWrapper() {
try {
SDBConnection conn = new SDBConnection(bds.getConnection());
Dataset dataset = SDBFactory.connectDataset(conn, storeDesc);
return new DatasetWrapper(dataset, conn);
return new DatasetWrapper(getDataset(conn), conn);
} catch (SQLException sqle) {
log.error(sqle, sqle);
throw new RuntimeException(sqle);
}
}
protected Dataset getDataset(SDBConnection conn) {
Store store = SDBFactory.connectStore(conn, storeDesc);
store.getLoader().setUseThreading(false);
return SDBFactory.connectDataset(store);
}
@Override
public boolean changeSetUpdate(ChangeSet changeSet)
throws RDFServiceException {
@ -83,11 +90,11 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
conn = new SDBConnection(bds.getConnection());
} catch (SQLException sqle) {
log.error(sqle, sqle);
throw new RuntimeException(sqle);
throw new RDFServiceException(sqle);
}
Dataset dataset = SDBFactory.connectDataset(conn, storeDesc);
boolean transaction = conn.getTransactionHandler().transactionsSupported();
Dataset dataset = getDataset(conn);
boolean transaction = false; // conn.getTransactionHandler().transactionsSupported();
try {
if (transaction) {
@ -95,26 +102,38 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
}
while (csIt.hasNext()) {
ModelChange modelChange = csIt.next();
Model model = dataset.getNamedModel(modelChange.getGraphURI());
model.register(new ModelListener(modelChange.getGraphURI(), this));
if (modelChange.getOperation() == ModelChange.Operation.ADD) {
model.add(parseModel(modelChange));
} else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
model.remove(parseModel(modelChange));
removeBlankNodesWithSparqlUpdate(dataset, model, modelChange.getGraphURI());
} else {
log.error("unrecognized operation type");
dataset.getLock().enterCriticalSection(Lock.WRITE);
try {
Model model = dataset.getNamedModel(modelChange.getGraphURI());
model.enterCriticalSection(Lock.WRITE);
try {
model.register(new ModelListener(modelChange.getGraphURI(), this));
if (modelChange.getOperation() == ModelChange.Operation.ADD) {
Model m = parseModel(modelChange);
model.add(m);
} else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
model.remove(parseModel(modelChange));
removeBlankNodesWithSparqlUpdate(dataset, model, modelChange.getGraphURI());
} else {
log.error("unrecognized operation type");
}
} finally {
model.leaveCriticalSection();
}
} finally {
dataset.getLock().leaveCriticalSection();
}
}
} catch (Throwable t) {
if (transaction) {
conn.getTransactionHandler().abort();
}
throw new RuntimeException(t);
} finally {
if (transaction) {
conn.getTransactionHandler().commit();
}
} catch (Exception e) {
log.error(e, e);
if (transaction) {
conn.getTransactionHandler().abort();
}
throw new RDFServiceException(e);
} finally {
conn.close();
}
@ -173,7 +192,7 @@ public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
private Model parseModel(ModelChange modelChange) {
Model model = ModelFactory.createDefaultModel();
model.read(modelChange.getSerializedModel(),
model.read(modelChange.getSerializedModel(), null,
getSerializationFormatString(modelChange.getSerializationFormat()));
return model;
}

View file

@ -68,6 +68,17 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
this.repository = new HTTPRepository(endpointURI);
}
/**
* Returns an RDFService for a remote repository
* @param String - URI of the SPARQL endpoint for the knowledge base
*
* The default read graph is the union of all graphs in the
* knowledge base
*/
public RDFServiceSparql(String endpointURI) {
this(endpointURI, null);
}
public void close() {
try {
this.repository.shutDown();

View file

@ -288,10 +288,10 @@ public class JenaDataSourceSetup extends JenaDataSourceSetupBase
ModelContext.setBaseOntModel(baseOms.getFullModel(), ctx);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(getDefaultNamespace(ctx));
WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
baseOms, bds, storeDesc, config,
WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
// WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
// baseOms, bds, storeDesc, config,
// WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
// ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
OntModel inferenceUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
@ -300,20 +300,20 @@ public class JenaDataSourceSetup extends JenaDataSourceSetupBase
inferenceOms.getTBoxModel()));
inferenceOms.setFullModel(inferenceUnion);
ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(
inferenceOms, bds, storeDesc, config,
WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
// WebappDaoFactory infWadf = new WebappDaoFactorySDB(
// inferenceOms, bds, storeDesc, config,
// WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
// ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
OntModel masterUnion = ModelFactory.createOntologyModel(
DB_ONT_MODEL_SPEC, makeDBModel(
bds, WebappDaoFactorySDB.UNION_GRAPH,
DB_ONT_MODEL_SPEC, TripleStoreType.SDB, ctx));
unionOms.setFullModel(masterUnion);
ctx.setAttribute("jenaOntModel", masterUnion);
WebappDaoFactory wadf = new WebappDaoFactorySDB(
unionOms, bds, storeDesc, config);
ctx.setAttribute("webappDaoFactory",wadf);
// OntModel masterUnion = ModelFactory.createOntologyModel(
// DB_ONT_MODEL_SPEC, makeDBModel(
// bds, WebappDaoFactorySDB.UNION_GRAPH,
// DB_ONT_MODEL_SPEC, TripleStoreType.SDB, ctx));
// unionOms.setFullModel(masterUnion);
// ctx.setAttribute("jenaOntModel", masterUnion);
// WebappDaoFactory wadf = new WebappDaoFactorySDB(
// unionOms, bds, storeDesc, config);
// ctx.setAttribute("webappDaoFactory",wadf);
ModelContext.setOntModelSelector(unionOms, ctx);
ModelContext.setUnionOntModelSelector(unionOms, ctx);

View file

@ -310,11 +310,11 @@ public class JenaDataSourceSetupSparql extends JenaDataSourceSetupBase
inferenceOms.getABoxModel(),
inferenceOms.getTBoxModel()));
// inferenceOms.setFullModel(inferenceUnion);
ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(
inferenceOms, bds, storeDesc, config,
WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
// ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
// WebappDaoFactory infWadf = new WebappDaoFactorySDB(
// inferenceOms, bds, storeDesc, config,
// WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
// ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
OntModel masterUnion = unionOms.getFullModel();
@ -325,9 +325,9 @@ public class JenaDataSourceSetupSparql extends JenaDataSourceSetupBase
// unionOms.setFullModel(masterUnion);
ctx.setAttribute("jenaOntModel", masterUnion);
WebappDaoFactory wadf = new WebappDaoFactorySDB(
unionOms, bds, storeDesc, config);
ctx.setAttribute("webappDaoFactory",wadf);
// WebappDaoFactory wadf = new WebappDaoFactorySDB(
// unionOms, bds, storeDesc, config);
// ctx.setAttribute("webappDaoFactory",wadf);
ModelContext.setOntModelSelector(unionOms, ctx);
ModelContext.setUnionOntModelSelector(unionOms, ctx);

View file

@ -52,13 +52,14 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSDBModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql.RDFServiceSparql;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
import edu.cornell.mannlib.vitro.webapp.utils.jena.InitialJenaModelUtils;
@ -128,7 +129,9 @@ public class JenaDataSourceSetupSparql2 extends JenaDataSourceSetupBase
ctx).getProperty("VitroConnection.DataSource.endpointURI");
Graph g = new SparqlGraph(endpointURI);
Dataset dataset = new SparqlDataset(new SparqlDatasetGraph(endpointURI));
RDFService rdfService = new RDFServiceSparql(endpointURI);
Dataset dataset = new RDFServiceDataset(rdfService);
setStartupDataset(dataset, ctx);
// setStartupDataset(SDBFactory.connectDataset(store), ctx);
@ -290,7 +293,7 @@ public class JenaDataSourceSetupSparql2 extends JenaDataSourceSetupBase
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(getDefaultNamespace(ctx));
WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
baseOms, dataset, config,
rdfService, baseOms, config,
WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
@ -302,7 +305,7 @@ public class JenaDataSourceSetupSparql2 extends JenaDataSourceSetupBase
inferenceOms.setFullModel(inferenceUnion);
ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(
inferenceOms, dataset, config,
rdfService, inferenceOms, config,
WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
@ -311,7 +314,7 @@ public class JenaDataSourceSetupSparql2 extends JenaDataSourceSetupBase
unionOms.setFullModel(masterUnion);
ctx.setAttribute("jenaOntModel", masterUnion);
WebappDaoFactory wadf = new WebappDaoFactorySDB(
unionOms, dataset, config);
rdfService, unionOms, config);
ctx.setAttribute("webappDaoFactory",wadf);
ModelContext.setOntModelSelector(unionOms, ctx);

View file

@ -0,0 +1,692 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.DISPLAY_ONT_MODEL;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sdb.SDB;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.LayoutType;
import com.hp.hpl.jena.sdb.util.StoreUtils;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.ResourceUtils;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSDBModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceFactorySingle;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb.RDFServiceSDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql.RDFServiceSparql;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
import edu.cornell.mannlib.vitro.webapp.utils.jena.InitialJenaModelUtils;
public class RDFServiceSetup extends JenaDataSourceSetupBase
implements javax.servlet.ServletContextListener {
private static final Log log = LogFactory.getLog(JenaDataSourceSetup.class);
@Override
public void contextInitialized(ServletContextEvent sce) {
ServletContext ctx = sce.getServletContext();
StartupStatus ss = StartupStatus.getBean(ctx);
try {
long startTime = System.currentTimeMillis();
setUpJenaDataSource(ctx);
log.info((System.currentTimeMillis() - startTime) / 1000 +
" seconds to set up SDB store");
} catch (SQLException sqle) {
// SQL exceptions are fatal and should halt startup
log.error("Error using SQL database; startup aborted.", sqle);
ss.fatal(this, "Error using SQL database; startup aborted.", sqle);
} catch (Throwable t) {
log.error("Throwable in " + this.getClass().getName(), t);
ss.fatal(this, "Throwable in " + this.getClass().getName(), t);
}
}
private void setUpJenaDataSource(ServletContext ctx) throws SQLException {
OntModelSelectorImpl baseOms = new OntModelSelectorImpl();
OntModelSelectorImpl inferenceOms = new OntModelSelectorImpl();
OntModelSelectorImpl unionOms = new OntModelSelectorImpl();
OntModel userAccountsModel = ontModelFromContextAttribute(
ctx, "userAccountsOntModel");
baseOms.setUserAccountsModel(userAccountsModel);
inferenceOms.setUserAccountsModel(userAccountsModel);
unionOms.setUserAccountsModel(userAccountsModel);
OntModel displayModel = ontModelFromContextAttribute(
ctx,DISPLAY_ONT_MODEL);
baseOms.setDisplayModel(displayModel);
inferenceOms.setDisplayModel(displayModel);
unionOms.setDisplayModel(displayModel);
// SDB setup
// union default graph
SDB.getContext().set(SDB.unionDefaultGraph, true) ;
//
StoreDesc storeDesc = makeStoreDesc(ctx);
// setApplicationStoreDesc(storeDesc, ctx);
//
BasicDataSource bds = getApplicationDataSource(ctx);
// if (bds == null) {
// bds = makeDataSourceFromConfigurationProperties(ctx);
// setApplicationDataSource(bds, ctx);
// }
// Store store = connectStore(bds, storeDesc);
// setApplicationStore(store, ctx);
//okay let's make a graph-backed model
RDFService rdfService = new RDFServiceSDB(bds, storeDesc);
RDFServiceFactory rdfServiceFactory = new RDFServiceFactorySingle(rdfService);
RDFServiceUtils.setRDFServiceFactory(ctx, rdfServiceFactory);
Graph g = new RDFServiceGraph(rdfService);
Dataset dataset = new RDFServiceDataset(rdfService);
setStartupDataset(dataset, ctx);
// setStartupDataset(SDBFactory.connectDataset(store), ctx);
// if (!isSetUp(store)) {
// log.info("Initializing SDB store");
// if (isFirstStartup()) {
// setupSDB(ctx, store);
// } else {
// migrateToSDBFromExistingRDBStore(ctx, store);
// }
// }
// The code below, which sets up the OntModelSelectors, controls whether
// each model is maintained in memory, in the DB, or both while the
// application is running.
// Populate the three OntModelSelectors (BaseOntModel = assertions,
// InferenceOntModel = inferences and JenaOntModel = union of assertions
// and inferences) with the post-SDB-conversion models.
// ABox assertions
Model aboxAssertions = dataset.getNamedModel(
JenaDataSourceSetupBase.JENA_DB_MODEL);
Model listenableAboxAssertions = ModelFactory.createUnion(
aboxAssertions, ModelFactory.createDefaultModel());
baseOms.setABoxModel(
ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, listenableAboxAssertions));
// ABox inferences
Model aboxInferences = dataset.getNamedModel(
JenaDataSourceSetupBase.JENA_INF_MODEL);
Model listenableAboxInferences = ModelFactory.createUnion(
aboxInferences, ModelFactory.createDefaultModel());
inferenceOms.setABoxModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, listenableAboxInferences));
// Since the TBox models are in memory, they do not have timeout issues
// like the like the ABox models do (and so don't need the extra step
// to make them listenable.)
// TBox assertions
try {
Model tboxAssertionsDB = dataset.getNamedModel(
JENA_TBOX_ASSERTIONS_MODEL);
OntModel tboxAssertions = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxAssertionsDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox assertions into memory");
tboxAssertions.add(tboxAssertionsDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox assertions");
}
tboxAssertions.getBaseModel().register(new ModelSynchronizer(
tboxAssertionsDB));
baseOms.setTBoxModel(tboxAssertions);
} catch (Throwable e) {
log.error("Unable to load tbox assertion cache from DB", e);
}
// TBox inferences
try {
Model tboxInferencesDB = dataset.getNamedModel(JENA_TBOX_INF_MODEL);
OntModel tboxInferences = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxInferencesDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox inferences into memory");
tboxInferences.add(tboxInferencesDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox inferences");
}
tboxInferences.getBaseModel().register(new ModelSynchronizer(
tboxInferencesDB));
inferenceOms.setTBoxModel(tboxInferences);
} catch (Throwable e) {
log.error("Unable to load tbox inference cache from DB", e);
}
// union ABox
OntModel unionABoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getABoxModel(), inferenceOms.getABoxModel()));
unionOms.setABoxModel(unionABoxModel);
// union TBox
OntModel unionTBoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getTBoxModel(), inferenceOms.getTBoxModel()));
unionOms.setTBoxModel(unionTBoxModel);
// Application metadata model is cached in memory.
try {
Model applicationMetadataModelDB = dataset.getNamedModel(
JENA_APPLICATION_METADATA_MODEL);
OntModel applicationMetadataModel =
ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached application metadata model into memory");
applicationMetadataModel.add(applicationMetadataModelDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load application metadata model " +
"assertions of size " + applicationMetadataModel.size());
applicationMetadataModel.getBaseModel().register(
new ModelSynchronizer(applicationMetadataModelDB));
if (applicationMetadataModel.size()== 0 /* isFirstStartup() */) {
applicationMetadataModel.add(
InitialJenaModelUtils.loadInitialModel(
ctx, getDefaultNamespace(ctx)));
}
// else if (applicationMetadataModelDB.size() == 0) {
// repairAppMetadataModel(
// applicationMetadataModel, aboxAssertions,
// aboxInferences);
// }
baseOms.setApplicationMetadataModel(applicationMetadataModel);
inferenceOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
unionOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
} catch (Throwable e) {
log.error("Unable to load application metadata model cache from DB"
, e);
}
checkForNamespaceMismatch( baseOms.getApplicationMetadataModel(), ctx );
if (isFirstStartup()) {
loadDataFromFilesystem(baseOms, ctx);
}
log.info("Setting up union models and DAO factories");
// create TBox + ABox union models and set up webapp DAO factories
OntModel baseUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
ModelFactory.createUnion(baseOms.getABoxModel(),
baseOms.getTBoxModel()));
baseOms.setFullModel(baseUnion);
ModelContext.setBaseOntModel(baseOms.getFullModel(), ctx);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(getDefaultNamespace(ctx));
WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
rdfService, baseOms, config,
WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
OntModel inferenceUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
ModelFactory.createUnion(
inferenceOms.getABoxModel(),
inferenceOms.getTBoxModel()));
inferenceOms.setFullModel(inferenceUnion);
ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(
rdfService, inferenceOms, config,
WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
OntModel masterUnion = ModelFactory.createOntologyModel(
DB_ONT_MODEL_SPEC, dataset.getDefaultModel());
unionOms.setFullModel(masterUnion);
ctx.setAttribute("jenaOntModel", masterUnion);
WebappDaoFactory wadf = new WebappDaoFactorySDB(
rdfService, unionOms, config);
ctx.setAttribute("webappDaoFactory",wadf);
ModelContext.setOntModelSelector(unionOms, ctx);
ModelContext.setUnionOntModelSelector(unionOms, ctx);
// assertions and inferences
ModelContext.setBaseOntModelSelector(baseOms, ctx);
// assertions
ModelContext.setInferenceOntModelSelector(inferenceOms, ctx);
// inferences
ctx.setAttribute("defaultNamespace", getDefaultNamespace(ctx));
log.info("SDB store ready for use");
makeModelMakerFromConnectionProperties(TripleStoreType.RDB, ctx);
VitroJenaModelMaker vjmm = getVitroJenaModelMaker();
setVitroJenaModelMaker(vjmm, ctx);
makeModelMakerFromConnectionProperties(TripleStoreType.SDB, ctx);
VitroJenaSDBModelMaker vsmm = getVitroJenaSDBModelMaker();
setVitroJenaSDBModelMaker(vsmm, ctx);
//bdc34: I have no reason for vsmm vs vjmm.
//I don't know what are the implications of this choice.
setVitroModelSource( new VitroModelSource(vsmm,ctx), ctx);
log.info("Model makers set up");
}
/**
* If we find a "portal1" portal (and we should), its URI should use the
* default namespace.
*/
private void checkForNamespaceMismatch(OntModel model, ServletContext ctx) {
String expectedNamespace = getDefaultNamespace(ctx);
List<Resource> portals = getPortal1s(model);
if(!portals.isEmpty() && noPortalForNamespace(
portals, expectedNamespace)) {
// There really should be only one portal 1, but if there happen to
// be multiple, just arbitrarily pick the first in the list.
Resource portal = portals.get(0);
String oldNamespace = portal.getNameSpace();
renamePortal(portal, expectedNamespace, model);
StartupStatus ss = StartupStatus.getBean(ctx);
ss.warning(this, "\nThe default namespace has been changed \n" +
"from " + oldNamespace +
"\nto " + expectedNamespace + ".\n" +
"The application will function normally, but " +
"any individuals in the \n" + oldNamespace + " " +
"namespace will need to have their URIs \n" +
"changed in order to be served as linked data. " +
"You can use the Ingest Tools \nto change the " +
"URIs for a batch of resources.");
}
}
private List<Resource> getPortal1s(Model model) {
List<Resource> portals = new ArrayList<Resource>();
try {
model.enterCriticalSection(Lock.READ);
ResIterator portalIt = model.listResourcesWithProperty(
RDF.type, PORTAL);
while (portalIt.hasNext()) {
Resource portal = portalIt.nextResource();
if ("portal1".equals(portal.getLocalName())) {
portals.add(portal);
}
}
} finally {
model.leaveCriticalSection();
}
return portals;
}
private boolean noPortalForNamespace(List<Resource> portals,
String expectedNamespace) {
for (Resource portal : portals) {
if(expectedNamespace.equals(portal.getNameSpace())) {
return false;
}
}
return true;
}
private void renamePortal(Resource portal, String namespace, Model model) {
model.enterCriticalSection(Lock.WRITE);
try {
ResourceUtils.renameResource(
portal, namespace + portal.getLocalName());
} finally {
model.leaveCriticalSection();
}
}
/* ===================================================================== */
@Override
public void contextDestroyed(ServletContextEvent sce) {
// Nothing to do.
}
@Override
protected OntModel ontModelFromContextAttribute(ServletContext ctx,
String attribute) {
OntModel ontModel;
Object attributeValue = ctx.getAttribute(attribute);
if (attributeValue != null && attributeValue instanceof OntModel) {
ontModel = (OntModel) attributeValue;
} else {
ontModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
ctx.setAttribute(attribute, ontModel);
}
return ontModel;
}
private boolean isEmpty(Model model) {
ClosableIterator<Statement> closeIt = model.listStatements(
null, RDF.type, ResourceFactory.createResource(
VitroVocabulary.PORTAL));
try {
if (closeIt.hasNext()) {
return false;
} else {
return true;
}
} finally {
closeIt.close();
}
}
private void loadDataFromFilesystem(OntModelSelector baseOms,
ServletContext ctx) {
Long startTime = System.currentTimeMillis();
log.debug("Initializing models from RDF files");
readOntologyFilesInPathSet(USER_ABOX_PATH, ctx, baseOms.getABoxModel());
readOntologyFilesInPathSet(USER_TBOX_PATH, ctx, baseOms.getTBoxModel());
readOntologyFilesInPathSet(
USER_APPMETA_PATH, ctx, baseOms.getApplicationMetadataModel());
log.debug(((System.currentTimeMillis() - startTime) / 1000)
+ " seconds to read RDF files ");
}
private static void getTBoxModel(Model fullModel,
Model submodels,
Model tboxModel) {
JenaModelUtils modelUtils = new JenaModelUtils();
Model tempModel = ModelFactory.createUnion(fullModel, submodels);
Model tempTBoxModel = modelUtils.extractTBox(tempModel);
// copy intersection of tempTBoxModel and fullModel to tboxModel.
StmtIterator iter = tempTBoxModel.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (fullModel.contains(stmt)) {
tboxModel.add(stmt);
}
}
return;
}
/*
* Copy all statements from model 1 that are not in model 2 to model 3.
*/
private static void copyDifference(Model model1,
Model model2,
Model model3) {
StmtIterator iter = model1.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (!model2.contains(stmt)) {
model3.add(stmt);
}
}
return;
}
protected static void getAppMetadata(Model source, Model target) {
String amdQuery = "DESCRIBE ?x WHERE { " +
"{?x a <" + VitroVocabulary.PORTAL +"> } UNION " +
"{?x a <" + VitroVocabulary.PROPERTYGROUP +"> } UNION " +
"{?x a <" + VitroVocabulary.CLASSGROUP +"> } } ";
try {
Query q = QueryFactory.create(amdQuery, Syntax.syntaxARQ);
QueryExecution qe = QueryExecutionFactory.create(q, source);
qe.execDescribe(target);
} catch (Exception e) {
log.error("unable to create the application metadata model",e);
}
return;
}
protected static void repairAppMetadataModel(Model applicationMetadataModel,
Model aboxAssertions,
Model aboxInferences) {
log.info("Moving application metadata from ABox to dedicated model");
getAppMetadata(aboxAssertions, applicationMetadataModel);
getAppMetadata(aboxInferences, applicationMetadataModel);
aboxAssertions.remove(applicationMetadataModel);
aboxInferences.remove(applicationMetadataModel);
return;
}
public static StoreDesc makeStoreDesc(ServletContext ctx) {
String layoutStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.sdb.layout", "layout2/hash");
String dbtypeStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.dbtype", "MySQL");
return new StoreDesc(
LayoutType.fetch(layoutStr),
DatabaseType.fetch(dbtypeStr) );
}
public static Store connectStore(BasicDataSource bds, StoreDesc storeDesc)
throws SQLException {
SDBConnection conn = new SDBConnection(bds.getConnection()) ;
return SDBFactory.connectStore(conn, storeDesc);
}
public static void setupSDB(ServletContext ctx, Store store) {
setupSDB(ctx,
store,
ModelFactory.createDefaultModel(),
ModelFactory.createDefaultModel());
}
public static void setupSDB(ServletContext ctx,
Store store,
Model memModel,
Model inferenceModel) {
store.getTableFormatter().create();
store.getTableFormatter().truncate();
store.getTableFormatter().dropIndexes(); // improve load performance
try {
// This is a one-time copy of stored KB data - from a Jena RDB store
// to a Jena SDB store. In the process, we will also separate out
// the TBox from the Abox; these are in the same graph in pre-1.2
// VIVO versions and will now be stored and maintained in separate
// models. Access to the Jena RDB data is through the
// OntModelSelectors that have been set up earlier in the current
// session by JenaPersistentDataSourceSetup.java. In the code
// below, note that the current getABoxModel() methods on the
// OntModelSelectors return a graph with both ABox and TBox data.
OntModel submodels = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
readOntologyFilesInPathSet(SUBMODELS, ctx, submodels);
Model tboxAssertions = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL);
// initially putting the results in memory so we have a
// cheaper way of computing the difference when we copy the ABox
Model memTboxAssertions = ModelFactory.createDefaultModel();
getTBoxModel(memModel, submodels, memTboxAssertions);
tboxAssertions.add(memTboxAssertions);
Model tboxInferences = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_TBOX_INF_MODEL);
// initially putting the results in memory so we have a
// cheaper way of computing the difference when we copy the ABox
Model memTboxInferences = ModelFactory.createDefaultModel();
getTBoxModel(inferenceModel, submodels, memTboxInferences);
tboxInferences.add(memTboxInferences);
Model aboxAssertions = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_DB_MODEL);
copyDifference(memModel, memTboxAssertions, aboxAssertions);
Model aboxInferences = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_INF_MODEL);
copyDifference(inferenceModel, memTboxInferences, aboxInferences);
// Set up the application metadata model
Model applicationMetadataModel = SDBFactory.connectNamedModel(
store,
JenaDataSourceSetupBase.JENA_APPLICATION_METADATA_MODEL);
getAppMetadata(memModel, applicationMetadataModel);
log.info("During initial SDB setup, created an application " +
"metadata model of size " +
applicationMetadataModel.size());
// remove application metadata from ABox model
aboxAssertions.remove(applicationMetadataModel);
aboxInferences.remove(applicationMetadataModel);
// Make sure the reasoner takes into account the newly-set-up data.
SimpleReasonerSetup.setRecomputeRequired(ctx);
} finally {
log.info("Adding indexes to SDB database tables.");
store.getTableFormatter().addIndexes();
log.info("Indexes created.");
}
}
private void migrateToSDBFromExistingRDBStore(ServletContext ctx,
Store store) {
Model rdbAssertionsModel = makeDBModelFromConfigurationProperties(
JENA_DB_MODEL, DB_ONT_MODEL_SPEC, ctx);
Model rdbInferencesModel = makeDBModelFromConfigurationProperties(
JENA_INF_MODEL, DB_ONT_MODEL_SPEC, ctx);
setupSDB(ctx, store, rdbAssertionsModel, rdbInferencesModel);
}
/**
* Tests whether an SDB store has been formatted and populated for use.
* @param store
* @return
*/
private boolean isSetUp(Store store) throws SQLException {
if (!(StoreUtils.isFormatted(store))) {
return false;
}
// even if the store exists, it may be empty
try {
return (SDBFactory.connectNamedModel(
store,
JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL))
.size() > 0;
} catch (Exception e) {
return false;
}
}
private static final String STOREDESC_ATTR = "storeDesc";
private static final String STORE_ATTR = "kbStore";
public static void setApplicationStoreDesc(StoreDesc storeDesc,
ServletContext ctx) {
ctx.setAttribute(STOREDESC_ATTR, storeDesc);
}
public static StoreDesc getApplicationStoreDesc(ServletContext ctx) {
return (StoreDesc) ctx.getAttribute(STOREDESC_ATTR);
}
public static void setApplicationStore(Store store,
ServletContext ctx) {
ctx.setAttribute(STORE_ATTR, store);
}
public static Store getApplicationStore(ServletContext ctx) {
return (Store) ctx.getAttribute(STORE_ATTR);
}
}

View file

@ -60,9 +60,9 @@ public class WebappDaoSDBSetup extends JenaDataSourceSetupBase
" seconds to set up SDB store");
//TODO remove this temporary development scaffolding
if (ConfigurationProperties.getBean(sce).getProperty(
if (true || ConfigurationProperties.getBean(sce).getProperty(
"VitroConnection.DataSource.endpointURI") != null) {
(new JenaDataSourceSetupSparql2()).contextInitialized(sce);
(new RDFServiceSetup()).contextInitialized(sce);
}
} catch (SQLException sqle) {
@ -115,10 +115,10 @@ public class WebappDaoSDBSetup extends JenaDataSourceSetupBase
StoreDesc storeDesc = getApplicationStoreDesc(ctx);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(getDefaultNamespace(ctx));
WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
baseOms, bds, storeDesc, config,
WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
// WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
// baseOms, bds, storeDesc, config,
// WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
// ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
///////////////////////////////////////////////////////////////
//create inference webapp DAO factory
@ -130,23 +130,23 @@ public class WebappDaoSDBSetup extends JenaDataSourceSetupBase
inferenceOms.getTBoxModel()));
inferenceOms.setFullModel(inferenceUnion);
ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(
inferenceOms, bds, storeDesc, config,
WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
// WebappDaoFactory infWadf = new WebappDaoFactorySDB(
// inferenceOms, bds, storeDesc, config,
// WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
// ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
///////////////////////////////////////////////////////////////
//create default union webapp DAO factory
OntModel masterUnion = ModelFactory.createOntologyModel(
DB_ONT_MODEL_SPEC, makeDBModel(
bds, WebappDaoFactorySDB.UNION_GRAPH,
DB_ONT_MODEL_SPEC, TripleStoreType.SDB, ctx));
unionOms.setFullModel(masterUnion);
ctx.setAttribute("jenaOntModel", masterUnion);
WebappDaoFactory wadf = new WebappDaoFactorySDB(
unionOms, bds, storeDesc, config);
ctx.setAttribute("webappDaoFactory",wadf);
// OntModel masterUnion = ModelFactory.createOntologyModel(
// DB_ONT_MODEL_SPEC, makeDBModel(
// bds, WebappDaoFactorySDB.UNION_GRAPH,
// DB_ONT_MODEL_SPEC, TripleStoreType.SDB, ctx));
// unionOms.setFullModel(masterUnion);
// ctx.setAttribute("jenaOntModel", masterUnion);
// WebappDaoFactory wadf = new WebappDaoFactorySDB(
// unionOms, bds, storeDesc, config);
// ctx.setAttribute("webappDaoFactory",wadf);
makeModelMakerFromConnectionProperties(TripleStoreType.RDB, ctx);
VitroJenaModelMaker vjmm = getVitroJenaModelMaker();