generic in-progress rdfapi-related work on dev branch

This commit is contained in:
brianjlowe 2012-04-24 15:39:40 +00:00
parent d1db16b0b4
commit dfa3dfa65e
13 changed files with 1376 additions and 267 deletions

View file

@ -1,7 +1,10 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena; package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import org.apache.commons.collections.iterators.EmptyIterator;
import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Reifier; import com.hp.hpl.jena.graph.Reifier;
@ -48,12 +51,12 @@ public class EmptyReifier implements Reifier {
@Override @Override
public ExtendedIterator<Triple> findEither(TripleMatch arg0, boolean arg1) { public ExtendedIterator<Triple> findEither(TripleMatch arg0, boolean arg1) {
return find(arg0); return WrappedIterator.create(EmptyIterator.INSTANCE);
} }
@Override @Override
public ExtendedIterator<Triple> findExposed(TripleMatch arg0) { public ExtendedIterator<Triple> findExposed(TripleMatch arg0) {
return find(arg0); return WrappedIterator.create(EmptyIterator.INSTANCE);
} }
@Override @Override

View file

@ -657,7 +657,7 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
relatedClasses.addAll(listSuperClasses(ontClass)); relatedClasses.addAll(listSuperClasses(ontClass));
for (OntClass relatedClass : relatedClasses) { for (OntClass relatedClass : relatedClasses) {
// find properties in restrictions // find properties in restrictions
if (relatedClass.isRestriction()) { if (relatedClass.isRestriction() && relatedClass.canAs(Restriction.class)) {
// TODO: check if restriction is something like // TODO: check if restriction is something like
// maxCardinality 0 or allValuesFrom owl:Nothing, // maxCardinality 0 or allValuesFrom owl:Nothing,
// in which case the property is NOT applicable! // in which case the property is NOT applicable!

View file

@ -0,0 +1,59 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.Iterator;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
public class SparqlDataset implements Dataset {
private String endpointURI;
public SparqlDataset(String endpointURI) {
this.endpointURI = endpointURI;
}
@Override
public DatasetGraph asDatasetGraph() {
// TODO Auto-generated method stub
return null;
}
@Override
public void close() {
// TODO Auto-generated method stub
}
@Override
public boolean containsNamedModel(String arg0) {
return true;
}
@Override
public Model getDefaultModel() {
// TODO Auto-generated method stub
return null;
}
@Override
public Lock getLock() {
// TODO Auto-generated method stub
return null;
}
@Override
public Model getNamedModel(String arg0) {
// TODO Auto-generated method stub
return null;
}
@Override
public Iterator<String> listNames() {
// TODO Auto-generated method stub
return null;
}
}

View file

@ -0,0 +1,265 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.openrdf.model.Resource;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
import com.hp.hpl.jena.sparql.core.Quad;
import com.hp.hpl.jena.sparql.resultset.ResultSetMem;
import com.hp.hpl.jena.sparql.util.Context;
import com.hp.hpl.jena.sparql.util.NodeFactory;
import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class SparqlDatasetGraph implements DatasetGraph {
private String endpointURI;
private Repository repository;
public SparqlDatasetGraph(String endpointURI) {
this.endpointURI = endpointURI;
this.repository = new HTTPRepository(endpointURI);
}
private Graph getGraphFor(Quad q) {
return getGraphFor(q.getGraph());
}
private Graph getGraphFor(Node g) {
return (g == Node.ANY)
? new SparqlGraph(endpointURI)
: new SparqlGraph(endpointURI, g.getURI());
}
@Override
public void add(Quad arg0) {
getGraphFor(arg0).add(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void addGraph(Node arg0, Graph arg1) {
// TODO Auto-generated method stub
}
@Override
public void close() {
// TODO Auto-generated method stub
}
@Override
public boolean contains(Quad arg0) {
return getGraphFor(arg0).contains(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public boolean contains(Node arg0, Node arg1, Node arg2, Node arg3) {
return getGraphFor(arg0).contains(arg1, arg2, arg3);
}
@Override
public boolean containsGraph(Node arg0) {
// TODO Auto-generated method stub
return true;
}
@Override
public void delete(Quad arg0) {
getGraphFor(arg0).delete(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void deleteAny(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
getGraphFor(arg0).delete(new Triple(arg1, arg2, arg3));
}
@Override
public Iterator<Quad> find() {
return find(Node.ANY, Node.ANY, Node.ANY, Node.ANY);
}
@Override
public Iterator<Quad> find(Quad arg0) {
return find(arg0.getSubject(), arg0.getPredicate(), arg0.getObject(), arg0.getGraph());
}
@Override
public Iterator<Quad> find(Node graph, Node subject, Node predicate, Node object) {
if (!isVar(subject) && !isVar(predicate) && !isVar(object) &&!isVar(graph)) {
if (contains(subject, predicate, object, graph)) {
return new SingletonIterator(new Triple(subject, predicate, object));
} else {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
}
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
String graphURI = !isVar(graph) ? graph.getURI() : null;
findQuery.append(" GRAPH ");
if (graphURI != null) {
findQuery.append(" <" + graphURI + ">");
} else {
findQuery.append("?g");
}
findQuery.append(" { ");
findQuery.append(SparqlGraph.sparqlNode(subject, "?s"))
.append(" ")
.append(SparqlGraph.sparqlNode(predicate, "?p"))
.append(" ")
.append(SparqlGraph.sparqlNode(object, "?o"));
findQuery.append(" } ");
findQuery.append("\n}");
//log.info(findQuery.toString());
ResultSet rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString());
List<Quad> quadlist = new ArrayList<Quad>();
while (rs.hasNext()) {
QuerySolution soln = rs.nextSolution();
Quad q = new Quad(isVar(graph) ? soln.get("?g").asNode() : graph,
isVar(subject) ? soln.get("?s").asNode() : subject,
isVar(predicate) ? soln.get("?p").asNode() : predicate,
isVar(object) ? soln.get("?o").asNode() : object);
//log.info(t);
quadlist.add(q);
}
//log.info(triplist.size() + " results");
return WrappedIterator.create(quadlist.iterator()); }
@Override
public Iterator<Quad> findNG(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
return find(arg0, arg1, arg2, arg3);
}
@Override
public Context getContext() {
// TODO Auto-generated method stub
return null;
}
@Override
public Graph getDefaultGraph() {
return new SparqlGraph(endpointURI);
}
@Override
public Graph getGraph(Node arg0) {
return new SparqlGraph(endpointURI, arg0.getURI());
}
@Override
public Lock getLock() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isEmpty() {
// TODO Auto-generated method stub
return false;
}
@Override
public Iterator<Node> listGraphNodes() {
List<Node> graphNodeList = new ArrayList<Node>();
try {
RepositoryConnection conn = getConnection();
try {
RepositoryResult<Resource> conResult = conn.getContextIDs();
while (conResult.hasNext()) {
Resource con = conResult.next();
graphNodeList.add(Node.createURI(con.stringValue()));
}
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
return graphNodeList.iterator();
}
private RepositoryConnection getConnection() {
try {
return this.repository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
@Override
public void removeGraph(Node arg0) {
// TODO Auto-generated method stub
}
@Override
public void setDefaultGraph(Graph arg0) {
// TODO Auto-generated method stub
}
@Override
public long size() {
// TODO Auto-generated method stub
return 0;
}
private boolean isVar(Node node) {
return (node == null || node.isVariable() || node == Node.ANY);
}
private ResultSet execSelect(String queryStr) {
// long startTime1 = System.currentTimeMillis();
// try {
//
// RepositoryConnection conn = getConnection();
// try {
// GraphQuery q = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
// q.evaluate();
// } catch (MalformedQueryException e) {
// throw new RuntimeException(e);
// } finally {
// conn.close();
// }
// } catch (Exception re) {
// //log.info(re,re);
// }
// log.info((System.currentTimeMillis() - startTime1) + " to execute via sesame");
long startTime = System.currentTimeMillis();
Query askQuery = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery);
try {
return new ResultSetMem(qe.execSelect());
} finally {
//log.info((System.currentTimeMillis() - startTime) + " to execute via Jena");
qe.close();
}
}
}

View file

@ -6,7 +6,6 @@ import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.openrdf.query.GraphQuery;
import org.openrdf.query.MalformedQueryException; import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage; import org.openrdf.query.QueryLanguage;
import org.openrdf.query.Update; import org.openrdf.query.Update;
@ -27,7 +26,6 @@ import com.hp.hpl.jena.graph.TransactionHandler;
import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch; import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.graph.impl.GraphWithPerform; import com.hp.hpl.jena.graph.impl.GraphWithPerform;
import com.hp.hpl.jena.graph.impl.SimpleBulkUpdateHandler;
import com.hp.hpl.jena.graph.impl.SimpleEventManager; import com.hp.hpl.jena.graph.impl.SimpleEventManager;
import com.hp.hpl.jena.graph.query.QueryHandler; import com.hp.hpl.jena.graph.query.QueryHandler;
import com.hp.hpl.jena.graph.query.SimpleQueryHandler; import com.hp.hpl.jena.graph.query.SimpleQueryHandler;
@ -49,6 +47,7 @@ import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class SparqlGraph implements GraphWithPerform { public class SparqlGraph implements GraphWithPerform {
private String endpointURI; private String endpointURI;
private String graphURI;
private static final Log log = LogFactory.getLog(SparqlGraph.class); private static final Log log = LogFactory.getLog(SparqlGraph.class);
private BulkUpdateHandler bulkUpdateHandler; private BulkUpdateHandler bulkUpdateHandler;
@ -61,8 +60,22 @@ public class SparqlGraph implements GraphWithPerform {
private Repository repository; private Repository repository;
/**
* Returns a SparqlGraph for the union of named graphs in a remote repository
* @param endpointURI
*/
public SparqlGraph(String endpointURI) { public SparqlGraph(String endpointURI) {
this(endpointURI, null);
}
/**
* Returns a SparqlGraph for a particular named graph in a remote repository
* @param endpointURI
* @param graphURI
*/
public SparqlGraph(String endpointURI, String graphURI) {
this.endpointURI = endpointURI; this.endpointURI = endpointURI;
this.graphURI = graphURI;
this.repository = new HTTPRepository(endpointURI); this.repository = new HTTPRepository(endpointURI);
} }
@ -84,21 +97,11 @@ public class SparqlGraph implements GraphWithPerform {
//log.info("adding " + t); //log.info("adding " + t);
String updateString = "INSERT DATA { GRAPH <junk:junk> { " String updateString = "INSERT DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" )
+ sparqlNode(t.getSubject(), "") + " " + sparqlNode(t.getSubject(), "") + " "
+ sparqlNode(t.getPredicate(), "") + " " + sparqlNode(t.getPredicate(), "") + " "
+ sparqlNode(t.getObject(), "") + + sparqlNode(t.getObject(), "") + " } "
" } }"; + ((graphURI != null) ? " } " : "");
if (false) {
try {
throw new RuntimeException("Breakpoint");
} catch (RuntimeException e) {
log.error(e, e);
//throw(e);
}
}
//log.info(updateString); //log.info(updateString);
@ -122,15 +125,14 @@ public class SparqlGraph implements GraphWithPerform {
@Override @Override
public void performDelete(Triple t) { public void performDelete(Triple t) {
log.info ("************** DELETE!!!!! ********************");
String updateString = "DELETE DATA { GRAPH <junk:junk> { " String updateString = "DELETE DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" )
+ sparqlNode(t.getSubject(), "") + " " + sparqlNode(t.getSubject(), "") + " "
+ sparqlNode(t.getPredicate(), "") + " " + sparqlNode(t.getPredicate(), "") + " "
+ sparqlNode(t.getObject(), "") + + sparqlNode(t.getObject(), "") + " } "
" } }"; + ((graphURI != null) ? " } " : "");
log.info(updateString); //log.info(updateString);
try { try {
RepositoryConnection conn = getConnection(); RepositoryConnection conn = getConnection();
@ -164,19 +166,26 @@ public class SparqlGraph implements GraphWithPerform {
if (subject.isBlank() || predicate.isBlank() || object.isBlank()) { if (subject.isBlank() || predicate.isBlank() || object.isBlank()) {
return false; return false;
} }
StringBuffer containsQuery = new StringBuffer("ASK { \n") StringBuffer containsQuery = new StringBuffer("ASK { \n");
.append(sparqlNode(subject, "?s")) if (graphURI != null) {
containsQuery.append(" GRAPH <" + graphURI + "> { ");
}
containsQuery.append(sparqlNode(subject, "?s"))
.append(" ") .append(" ")
.append(sparqlNode(predicate, "?p")) .append(sparqlNode(predicate, "?p"))
.append(" ") .append(" ")
.append(sparqlNode(object, "?o")) .append(sparqlNode(object, "?o"));
.append("\n}"); if (graphURI != null) {
return execAsk(containsQuery.toString()); containsQuery.append(" } \n");
}
containsQuery.append("\n}");
boolean result = execAsk(containsQuery.toString());
return result;
} }
@Override @Override
public void delete(Triple arg0) throws DeleteDeniedException { public void delete(Triple arg0) throws DeleteDeniedException {
log.info("********************** DELETE!!!!!! ************************"); //log.info("********************** DELETE!!!!!! ************************");
performDelete(arg0); performDelete(arg0);
} }
@ -192,7 +201,7 @@ public class SparqlGraph implements GraphWithPerform {
return find(t.getSubject(), t.getPredicate(), t.getObject()); return find(t.getSubject(), t.getPredicate(), t.getObject());
} }
private String sparqlNode(Node node, String varName) { public static String sparqlNode(Node node, String varName) {
if (node == null || node.isVariable()) { if (node == null || node.isVariable()) {
return varName; return varName;
} else if (node.isBlank()) { } else if (node.isBlank()) {
@ -202,7 +211,9 @@ public class SparqlGraph implements GraphWithPerform {
return uriBuff.append("<").append(node.getURI()).append(">").toString(); return uriBuff.append("<").append(node.getURI()).append(">").toString();
} else if (node.isLiteral()) { } else if (node.isLiteral()) {
StringBuffer literalBuff = new StringBuffer(); StringBuffer literalBuff = new StringBuffer();
literalBuff.append("\"").append(node.getLiteralLexicalForm()).append("\""); literalBuff.append("\"");
pyString(literalBuff, node.getLiteralLexicalForm());
literalBuff.append("\"");
if (node.getLiteralDatatypeURI() != null) { if (node.getLiteralDatatypeURI() != null) {
literalBuff.append("^^<").append(node.getLiteralDatatypeURI()).append(">"); literalBuff.append("^^<").append(node.getLiteralDatatypeURI()).append(">");
} else if (node.getLiteralLanguage() != null && node.getLiteralLanguage() != "") { } else if (node.getLiteralLanguage() != null && node.getLiteralLanguage() != "") {
@ -223,15 +234,19 @@ public class SparqlGraph implements GraphWithPerform {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator()); return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
} }
} }
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n") StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
.append(sparqlNode(subject, "?s")) if (graphURI != null) {
findQuery.append(" GRAPH <" + graphURI + "> { ");
}
findQuery.append(sparqlNode(subject, "?s"))
.append(" ") .append(" ")
.append(sparqlNode(predicate, "?p")) .append(sparqlNode(predicate, "?p"))
.append(" ") .append(" ")
.append(sparqlNode(object, "?o")) .append(sparqlNode(object, "?o"));
.append("\n}"); if (graphURI != null) {
findQuery.append(" } ");
//log.info(findQuery.toString()); }
findQuery.append("\n}");
ResultSet rs = execSelect(findQuery.toString()); ResultSet rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString()); //rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString()); //rs = execSelect(findQuery.toString());
@ -311,6 +326,7 @@ public class SparqlGraph implements GraphWithPerform {
@Override @Override
public boolean isIsomorphicWith(Graph arg0) { public boolean isIsomorphicWith(Graph arg0) {
log.info("Hey dummy!");
throw new UnsupportedOperationException("isIsomorphicWith() not supported " + throw new UnsupportedOperationException("isIsomorphicWith() not supported " +
"by SPARQL graphs"); "by SPARQL graphs");
} }
@ -325,7 +341,8 @@ public class SparqlGraph implements GraphWithPerform {
@Override @Override
public int size() { public int size() {
return find(null, null, null).toList().size(); int size = find(null, null, null).toList().size();
return size;
} }
private final static Capabilities capabilities = new Capabilities() { private final static Capabilities capabilities = new Capabilities() {
@ -408,5 +425,44 @@ public class SparqlGraph implements GraphWithPerform {
} }
} }
/*
*
* see http://www.python.org/doc/2.5.2/ref/strings.html
* or see jena's n3 grammar jena/src/com/hp/hpl/jena/n3/n3.g
*/
protected static void pyString(StringBuffer sbuff, String s)
{
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
// Escape escapes and quotes
if (c == '\\' || c == '"' )
{
sbuff.append('\\') ;
sbuff.append(c) ;
continue ;
}
// Whitespace
if (c == '\n'){ sbuff.append("\\n");continue; }
if (c == '\t'){ sbuff.append("\\t");continue; }
if (c == '\r'){ sbuff.append("\\r");continue; }
if (c == '\f'){ sbuff.append("\\f");continue; }
if (c == '\b'){ sbuff.append("\\b");continue; }
if( c == 7 ) { sbuff.append("\\a");continue; }
// Output as is (subject to UTF-8 encoding on output that is)
sbuff.append(c) ;
// // Unicode escapes
// // c < 32, c >= 127, not whitespace or other specials
// String hexstr = Integer.toHexString(c).toUpperCase();
// int pad = 4 - hexstr.length();
// sbuff.append("\\u");
// for (; pad > 0; pad--)
// sbuff.append("0");
// sbuff.append(hexstr);
}
}
} }

View file

@ -49,6 +49,14 @@ public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
this.dwf = new StaticDatasetFactory(dataset); this.dwf = new StaticDatasetFactory(dataset);
} }
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
Dataset dataset,
WebappDaoFactoryConfig config, SDBDatasetMode datasetMode) {
super(ontModelSelector, config);
this.dwf = new StaticDatasetFactory(dataset);
this.datasetMode = datasetMode;
}
/** /**
* For use when any Dataset access should get a temporary DB connection * For use when any Dataset access should get a temporary DB connection
* from a pool * from a pool

View file

@ -44,6 +44,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SingleContentOntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.SingleContentOntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraphMultilingual; import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraphMultilingual;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
@ -197,8 +198,11 @@ public class WebappDaoFactorySDBPrep implements Filter {
OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m); OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m);
oms = new SingleContentOntModelSelector(om, oms.getDisplayModel(), oms.getUserAccountsModel()); oms = new SingleContentOntModelSelector(om, oms.getDisplayModel(), oms.getUserAccountsModel());
DataSource dataset = DatasetFactory.create(); Dataset dataset = DatasetFactory.create(new SparqlDatasetGraph(endpointURI));
dataset.addNamedModel("fake:fake", m);
//DataSource datasource = DatasetFactory.create();
//datasource.addNamedModel("fake:fake", m);
//dataset = datasource;
WebappDaoFactory wadf = new WebappDaoFactoryJena(oms, config); WebappDaoFactory wadf = new WebappDaoFactoryJena(oms, config);
//wadf = new WebappDaoFactorySDB(oms, dataset, config); //wadf = new WebappDaoFactorySDB(oms, dataset, config);

View file

@ -44,6 +44,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SingleContentOntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.SingleContentOntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraphMultilingual; import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraphMultilingual;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase; import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
@ -149,9 +150,11 @@ public class WebappDaoFactorySparqlPrep implements Filter {
OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m); OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m);
oms = new SingleContentOntModelSelector(om, oms.getDisplayModel(), oms.getUserAccountsModel()); oms = new SingleContentOntModelSelector(om, oms.getDisplayModel(), oms.getUserAccountsModel());
DataSource datasource = DatasetFactory.create(); dataset = DatasetFactory.create(new SparqlDatasetGraph(endpointURI));
datasource.addNamedModel("fake:fake", m);
dataset = datasource; //DataSource datasource = DatasetFactory.create();
//datasource.addNamedModel("fake:fake", m);
//dataset = datasource;
wadf = new WebappDaoFactoryJena(oms, config); wadf = new WebappDaoFactoryJena(oms, config);
//wadf = new WebappDaoFactorySDB(oms, dataset, config); //wadf = new WebappDaoFactorySDB(oms, dataset, config);

View file

@ -19,18 +19,16 @@ import javax.servlet.ServletContextListener;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.ontology.OntDocumentManager; import com.hp.hpl.jena.ontology.OntDocumentManager;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.util.StoreUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext; import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
// This ContextListener must run after the JenaDataSourceSetup ContextListener // This ContextListener must run after the JenaDataSourceSetup ContextListener
@ -53,26 +51,26 @@ public class FileGraphSetup implements ServletContextListener {
OntDocumentManager.getInstance().setProcessImports(true); OntDocumentManager.getInstance().setProcessImports(true);
baseOms = ModelContext.getBaseOntModelSelector(sce.getServletContext()); baseOms = ModelContext.getBaseOntModelSelector(sce.getServletContext());
Store kbStore = (Store) sce.getServletContext().getAttribute("kbStore"); Dataset dataset = JenaDataSourceSetupBase.getStartupDataset(sce.getServletContext());
// ABox files // ABox files
Set<String> pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + ABOX); Set<String> pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + ABOX);
cleanupDB(kbStore, pathToURI(pathSet, ABOX), ABOX); cleanupDB(dataset, pathToURI(pathSet, ABOX), ABOX);
if (pathSet != null) { if (pathSet != null) {
OntModel aboxBaseModel = baseOms.getABoxModel(); OntModel aboxBaseModel = baseOms.getABoxModel();
aboxChanged = readGraphs(sce, pathSet, kbStore, ABOX, aboxBaseModel); aboxChanged = readGraphs(sce, pathSet, dataset, ABOX, aboxBaseModel);
} }
// TBox files // TBox files
pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + TBOX); pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + TBOX);
cleanupDB(kbStore, pathToURI(pathSet, TBOX),TBOX); cleanupDB(dataset, pathToURI(pathSet, TBOX),TBOX);
if (pathSet != null) { if (pathSet != null) {
OntModel tboxBaseModel = baseOms.getTBoxModel(); OntModel tboxBaseModel = baseOms.getTBoxModel();
tboxChanged = readGraphs(sce, pathSet, kbStore, TBOX, tboxBaseModel); tboxChanged = readGraphs(sce, pathSet, dataset, TBOX, tboxBaseModel);
} }
} catch (ClassCastException cce) { } catch (ClassCastException cce) {
String errMsg = "Unable to cast servlet context attribute to the appropriate type " + cce.getLocalizedMessage(); String errMsg = "Unable to cast servlet context attribute to the appropriate type " + cce.getLocalizedMessage();
@ -104,7 +102,7 @@ public class FileGraphSetup implements ServletContextListener {
* Note: no connection needs to be maintained between the in-memory copy of the * Note: no connection needs to be maintained between the in-memory copy of the
* graph and the DB copy. * graph and the DB copy.
*/ */
public boolean readGraphs(ServletContextEvent sce, Set<String> pathSet, Store kbStore, String type, OntModel baseModel) { public boolean readGraphs(ServletContextEvent sce, Set<String> pathSet, Dataset dataset, String type, OntModel baseModel) {
int count = 0; int count = 0;
@ -134,7 +132,7 @@ public class FileGraphSetup implements ServletContextListener {
log.info("Attached file graph as " + type + " submodel " + p); log.info("Attached file graph as " + type + " submodel " + p);
} }
modelChanged = modelChanged | updateGraphInDB(kbStore, model, type, p); modelChanged = modelChanged | updateGraphInDB(dataset, model, type, p);
} catch (Exception ioe) { } catch (Exception ioe) {
log.error("Unable to process file graph " + p, ioe); log.error("Unable to process file graph " + p, ioe);
@ -168,16 +166,32 @@ public class FileGraphSetup implements ServletContextListener {
* Otherwise, if a graph with the given name is in the DB and is isomorphic with * Otherwise, if a graph with the given name is in the DB and is isomorphic with
* the graph that was read from the files system, then do nothing. * the graph that was read from the files system, then do nothing.
*/ */
public boolean updateGraphInDB(Store kbStore, Model fileModel, String type, String path) { public boolean updateGraphInDB(Dataset dataset, Model fileModel, String type, String path) {
String graphURI = pathToURI(path,type); String graphURI = pathToURI(path,type);
Model dbModel = SDBFactory.connectNamedModel(kbStore, graphURI); Model dbModel = dataset.getNamedModel(graphURI);
boolean modelChanged = false; boolean modelChanged = false;
boolean isIsomorphic = dbModel.isIsomorphicWith(fileModel);
if (dbModel.isEmpty() ) { if (dbModel.isEmpty() ) {
dbModel.add(fileModel); dbModel.add(fileModel);
modelChanged = true; modelChanged = true;
} else if (!dbModel.isIsomorphicWith(fileModel)) { } else if (!isIsomorphic) {
System.out.println("==================================================");
System.out.println("Remove the following print statement from FileGraphSetup.java");
System.out.println("Updating " + path + " because graphs are not isomorphic");
log.info("Updating " + path + " because graphs are not isomorphic");
log.info("dbModel: " + dbModel.size() + " ; fileModel: " + fileModel.size());
System.out.println("--------------------");
System.out.println("fileModel - dbModel:");
System.out.println("--------------------");
fileModel.difference(dbModel).write(System.out);
System.out.println("--------------------");
System.out.println("dbModel - fileModel:");
System.out.println("--------------------");
dbModel.difference(fileModel).write(System.out);
dbModel.removeAll(); dbModel.removeAll();
dbModel.add(fileModel); dbModel.add(fileModel);
modelChanged = true; modelChanged = true;
@ -196,21 +210,21 @@ public class FileGraphSetup implements ServletContextListener {
* @param type (input) - abox or tbox. * @param type (input) - abox or tbox.
* @param kbStore (output) - the SDB store for the application * @param kbStore (output) - the SDB store for the application
*/ */
public void cleanupDB(Store kbStore, Set<String> uriSet, String type) { public void cleanupDB(Dataset dataset, Set<String> uriSet, String type) {
Pattern graphURIPat = Pattern.compile("^" + FILEGRAPH_URI_ROOT + type); Pattern graphURIPat = Pattern.compile("^" + FILEGRAPH_URI_ROOT + type);
Iterator<Node> iter = StoreUtils.storeGraphNames(kbStore); Iterator<String> iter = dataset.listNames();
while (iter.hasNext()) { while (iter.hasNext()) {
Node node = iter.next(); String graphURI = iter.next();
Matcher matcher = graphURIPat.matcher(node.getURI()); Matcher matcher = graphURIPat.matcher(graphURI);
if (matcher.find()) { if (matcher.find()) {
if (!uriSet.contains(node.getURI())) { if (!uriSet.contains(graphURI)) {
Model model = SDBFactory.connectNamedModel(kbStore, node.getURI()); Model model = dataset.getNamedModel(graphURI);
model.removeAll(); // delete the graph from the DB model.removeAll(); // delete the graph from the DB
log.info("Removed " + type + " file graph " + node.getURI() + " from the DB store because the file no longer exists in the file system"); log.info("Removed " + type + " file graph " + graphURI + " from the DB store because the file no longer exists in the file system");
} }
} }
} }
@ -256,8 +270,8 @@ public class FileGraphSetup implements ServletContextListener {
// nothing to do // nothing to do
} }
private static boolean isUpdateRequired(ServletContext ctx) { private static boolean isUpdateRequired(ServletContext ctx) {
return (ctx.getAttribute(UpdateKnowledgeBase.KBM_REQURIED_AT_STARTUP) != null); return (ctx.getAttribute(UpdateKnowledgeBase.KBM_REQURIED_AT_STARTUP) != null);
} }
} }

View file

@ -72,7 +72,7 @@ public class JenaDataSourceSetup extends JenaDataSourceSetupBase
// TODO remove me // TODO remove me
if (ConfigurationProperties.getBean(ctx).getProperty( if (ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.endpointURI") != null) { "VitroConnection.DataSource.endpointURI") != null) {
(new JenaDataSourceSetupSparql()).contextInitialized(sce); (new JenaDataSourceSetupSparql2()).contextInitialized(sce);
return; return;
} }
@ -126,6 +126,7 @@ public class JenaDataSourceSetup extends JenaDataSourceSetupBase
Store store = connectStore(bds, storeDesc); Store store = connectStore(bds, storeDesc);
setApplicationStore(store, ctx); setApplicationStore(store, ctx);
setStartupDataset(SDBFactory.connectDataset(store), ctx);
if (!isSetUp(store)) { if (!isSetUp(store)) {
log.info("Initializing SDB store"); log.info("Initializing SDB store");

View file

@ -18,6 +18,7 @@ import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.StoreDesc; import com.hp.hpl.jena.sdb.StoreDesc;
@ -524,4 +525,13 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
"VitroConnection.DataSource.validationQuery", "SELECT 1"); "VitroConnection.DataSource.validationQuery", "SELECT 1");
} }
public static void setStartupDataset(Dataset dataset, ServletContext ctx) {
ctx.setAttribute("startupDataset", dataset);
}
public static Dataset getStartupDataset(ServletContext ctx) {
Object o = ctx.getAttribute("startupDataset");
return (o instanceof Dataset) ? ((Dataset) o) : null;
}
} }

View file

@ -18,6 +18,8 @@ import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.DatasetFactory;
import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryExecutionFactory;
@ -52,6 +54,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SingleContentOntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.SingleContentOntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph; import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker; import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSDBModelMaker; import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSDBModelMaker;
@ -109,6 +112,8 @@ public class JenaDataSourceSetupSparql extends JenaDataSourceSetupBase
ctx).getProperty("VitroConnection.DataSource.endpointURI"); ctx).getProperty("VitroConnection.DataSource.endpointURI");
Graph g = new SparqlGraph(endpointURI); Graph g = new SparqlGraph(endpointURI);
Dataset dataset = DatasetFactory.create(new SparqlDatasetGraph(endpointURI));
setStartupDataset(dataset, ctx);
Model m = ModelFactory.createModelForGraph(g); Model m = ModelFactory.createModelForGraph(g);
OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m); OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m);

View file

@ -0,0 +1,681 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.DISPLAY_ONT_MODEL;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.DatasetFactory;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sdb.SDB;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.LayoutType;
import com.hp.hpl.jena.sdb.util.StoreUtils;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.ResourceUtils;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSDBModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
import edu.cornell.mannlib.vitro.webapp.utils.jena.InitialJenaModelUtils;
public class JenaDataSourceSetupSparql2 extends JenaDataSourceSetupBase
implements javax.servlet.ServletContextListener {
private static final Log log = LogFactory.getLog(JenaDataSourceSetup.class);
@Override
public void contextInitialized(ServletContextEvent sce) {
ServletContext ctx = sce.getServletContext();
StartupStatus ss = StartupStatus.getBean(ctx);
try {
long startTime = System.currentTimeMillis();
setUpJenaDataSource(ctx);
log.info((System.currentTimeMillis() - startTime) / 1000 +
" seconds to set up SDB store");
} catch (SQLException sqle) {
// SQL exceptions are fatal and should halt startup
log.error("Error using SQL database; startup aborted.", sqle);
ss.fatal(this, "Error using SQL database; startup aborted.", sqle);
} catch (Throwable t) {
log.error("Throwable in " + this.getClass().getName(), t);
ss.fatal(this, "Throwable in " + this.getClass().getName(), t);
}
}
private void setUpJenaDataSource(ServletContext ctx) throws SQLException {
OntModelSelectorImpl baseOms = new OntModelSelectorImpl();
OntModelSelectorImpl inferenceOms = new OntModelSelectorImpl();
OntModelSelectorImpl unionOms = new OntModelSelectorImpl();
OntModel userAccountsModel = ontModelFromContextAttribute(
ctx, "userAccountsOntModel");
baseOms.setUserAccountsModel(userAccountsModel);
inferenceOms.setUserAccountsModel(userAccountsModel);
unionOms.setUserAccountsModel(userAccountsModel);
OntModel displayModel = ontModelFromContextAttribute(
ctx,DISPLAY_ONT_MODEL);
baseOms.setDisplayModel(displayModel);
inferenceOms.setDisplayModel(displayModel);
unionOms.setDisplayModel(displayModel);
// SDB setup
// union default graph
// SDB.getContext().set(SDB.unionDefaultGraph, true) ;
//
// StoreDesc storeDesc = makeStoreDesc(ctx);
// setApplicationStoreDesc(storeDesc, ctx);
//
// BasicDataSource bds = getApplicationDataSource(ctx);
// if (bds == null) {
// bds = makeDataSourceFromConfigurationProperties(ctx);
// setApplicationDataSource(bds, ctx);
// }
// Store store = connectStore(bds, storeDesc);
// setApplicationStore(store, ctx);
//okay let's make a graph-backed model
String endpointURI = ConfigurationProperties.getBean(
ctx).getProperty("VitroConnection.DataSource.endpointURI");
Graph g = new SparqlGraph(endpointURI);
Dataset dataset = DatasetFactory.create(new SparqlDatasetGraph(endpointURI));
setStartupDataset(dataset, ctx);
// setStartupDataset(SDBFactory.connectDataset(store), ctx);
// if (!isSetUp(store)) {
// log.info("Initializing SDB store");
// if (isFirstStartup()) {
// setupSDB(ctx, store);
// } else {
// migrateToSDBFromExistingRDBStore(ctx, store);
// }
// }
// The code below, which sets up the OntModelSelectors, controls whether
// each model is maintained in memory, in the DB, or both while the
// application is running.
// Populate the three OntModelSelectors (BaseOntModel = assertions,
// InferenceOntModel = inferences and JenaOntModel = union of assertions
// and inferences) with the post-SDB-conversion models.
// ABox assertions
Model aboxAssertions = dataset.getNamedModel(
JenaDataSourceSetupBase.JENA_DB_MODEL);
Model listenableAboxAssertions = ModelFactory.createUnion(
aboxAssertions, ModelFactory.createDefaultModel());
baseOms.setABoxModel(
ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, listenableAboxAssertions));
// ABox inferences
Model aboxInferences = dataset.getNamedModel(
JenaDataSourceSetupBase.JENA_INF_MODEL);
Model listenableAboxInferences = ModelFactory.createUnion(
aboxInferences, ModelFactory.createDefaultModel());
inferenceOms.setABoxModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, listenableAboxInferences));
// Since the TBox models are in memory, they do not have timeout issues
// like the like the ABox models do (and so don't need the extra step
// to make them listenable.)
// TBox assertions
try {
Model tboxAssertionsDB = dataset.getNamedModel(
JENA_TBOX_ASSERTIONS_MODEL);
OntModel tboxAssertions = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxAssertionsDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox assertions into memory");
tboxAssertions.add(tboxAssertionsDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox assertions");
}
tboxAssertions.getBaseModel().register(new ModelSynchronizer(
tboxAssertionsDB));
baseOms.setTBoxModel(tboxAssertions);
} catch (Throwable e) {
log.error("Unable to load tbox assertion cache from DB", e);
}
// TBox inferences
try {
Model tboxInferencesDB = dataset.getNamedModel(JENA_TBOX_INF_MODEL);
OntModel tboxInferences = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxInferencesDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox inferences into memory");
tboxInferences.add(tboxInferencesDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox inferences");
}
tboxInferences.getBaseModel().register(new ModelSynchronizer(
tboxInferencesDB));
inferenceOms.setTBoxModel(tboxInferences);
} catch (Throwable e) {
log.error("Unable to load tbox inference cache from DB", e);
}
// union ABox
OntModel unionABoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getABoxModel(), inferenceOms.getABoxModel()));
unionOms.setABoxModel(unionABoxModel);
// union TBox
OntModel unionTBoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getTBoxModel(), inferenceOms.getTBoxModel()));
unionOms.setTBoxModel(unionTBoxModel);
// Application metadata model is cached in memory.
try {
Model applicationMetadataModelDB = dataset.getNamedModel(
JENA_APPLICATION_METADATA_MODEL);
OntModel applicationMetadataModel =
ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached application metadata model into memory");
applicationMetadataModel.add(applicationMetadataModelDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load application metadata model " +
"assertions of size " + applicationMetadataModel.size());
applicationMetadataModel.getBaseModel().register(
new ModelSynchronizer(applicationMetadataModelDB));
if (isFirstStartup()) {
applicationMetadataModel.add(
InitialJenaModelUtils.loadInitialModel(
ctx, getDefaultNamespace(ctx)));
} else if (applicationMetadataModelDB.size() == 0) {
repairAppMetadataModel(
applicationMetadataModel, aboxAssertions,
aboxInferences);
}
baseOms.setApplicationMetadataModel(applicationMetadataModel);
inferenceOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
unionOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
} catch (Throwable e) {
log.error("Unable to load application metadata model cache from DB"
, e);
}
checkForNamespaceMismatch( baseOms.getApplicationMetadataModel(), ctx );
if (isFirstStartup()) {
loadDataFromFilesystem(baseOms, ctx);
}
log.info("Setting up union models and DAO factories");
// create TBox + ABox union models and set up webapp DAO factories
OntModel baseUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
ModelFactory.createUnion(baseOms.getABoxModel(),
baseOms.getTBoxModel()));
baseOms.setFullModel(baseUnion);
ModelContext.setBaseOntModel(baseOms.getFullModel(), ctx);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(getDefaultNamespace(ctx));
WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
baseOms, dataset, config,
WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
OntModel inferenceUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
ModelFactory.createUnion(
inferenceOms.getABoxModel(),
inferenceOms.getTBoxModel()));
inferenceOms.setFullModel(inferenceUnion);
ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(
inferenceOms, dataset, config,
WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
OntModel masterUnion = ModelFactory.createOntologyModel(
DB_ONT_MODEL_SPEC, dataset.getDefaultModel());
unionOms.setFullModel(masterUnion);
ctx.setAttribute("jenaOntModel", masterUnion);
WebappDaoFactory wadf = new WebappDaoFactorySDB(
unionOms, dataset, config);
ctx.setAttribute("webappDaoFactory",wadf);
ModelContext.setOntModelSelector(unionOms, ctx);
ModelContext.setUnionOntModelSelector(unionOms, ctx);
// assertions and inferences
ModelContext.setBaseOntModelSelector(baseOms, ctx);
// assertions
ModelContext.setInferenceOntModelSelector(inferenceOms, ctx);
// inferences
ctx.setAttribute("defaultNamespace", getDefaultNamespace(ctx));
log.info("SDB store ready for use");
makeModelMakerFromConnectionProperties(TripleStoreType.RDB, ctx);
VitroJenaModelMaker vjmm = getVitroJenaModelMaker();
setVitroJenaModelMaker(vjmm, ctx);
makeModelMakerFromConnectionProperties(TripleStoreType.SDB, ctx);
VitroJenaSDBModelMaker vsmm = getVitroJenaSDBModelMaker();
setVitroJenaSDBModelMaker(vsmm, ctx);
//bdc34: I have no reason for vsmm vs vjmm.
//I don't know what are the implications of this choice.
setVitroModelSource( new VitroModelSource(vsmm,ctx), ctx);
log.info("Model makers set up");
}
/**
* If we find a "portal1" portal (and we should), its URI should use the
* default namespace.
*/
private void checkForNamespaceMismatch(OntModel model, ServletContext ctx) {
String expectedNamespace = getDefaultNamespace(ctx);
List<Resource> portals = getPortal1s(model);
if(!portals.isEmpty() && noPortalForNamespace(
portals, expectedNamespace)) {
// There really should be only one portal 1, but if there happen to
// be multiple, just arbitrarily pick the first in the list.
Resource portal = portals.get(0);
String oldNamespace = portal.getNameSpace();
renamePortal(portal, expectedNamespace, model);
StartupStatus ss = StartupStatus.getBean(ctx);
ss.warning(this, "\nThe default namespace has been changed \n" +
"from " + oldNamespace +
"\nto " + expectedNamespace + ".\n" +
"The application will function normally, but " +
"any individuals in the \n" + oldNamespace + " " +
"namespace will need to have their URIs \n" +
"changed in order to be served as linked data. " +
"You can use the Ingest Tools \nto change the " +
"URIs for a batch of resources.");
}
}
private List<Resource> getPortal1s(Model model) {
List<Resource> portals = new ArrayList<Resource>();
try {
model.enterCriticalSection(Lock.READ);
ResIterator portalIt = model.listResourcesWithProperty(
RDF.type, PORTAL);
while (portalIt.hasNext()) {
Resource portal = portalIt.nextResource();
if ("portal1".equals(portal.getLocalName())) {
portals.add(portal);
}
}
} finally {
model.leaveCriticalSection();
}
return portals;
}
private boolean noPortalForNamespace(List<Resource> portals,
String expectedNamespace) {
for (Resource portal : portals) {
if(expectedNamespace.equals(portal.getNameSpace())) {
return false;
}
}
return true;
}
private void renamePortal(Resource portal, String namespace, Model model) {
model.enterCriticalSection(Lock.WRITE);
try {
ResourceUtils.renameResource(
portal, namespace + portal.getLocalName());
} finally {
model.leaveCriticalSection();
}
}
/* ===================================================================== */
@Override
public void contextDestroyed(ServletContextEvent sce) {
// Nothing to do.
}
private OntModel ontModelFromContextAttribute(ServletContext ctx,
String attribute) {
OntModel ontModel;
Object attributeValue = ctx.getAttribute(attribute);
if (attributeValue != null && attributeValue instanceof OntModel) {
ontModel = (OntModel) attributeValue;
} else {
ontModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
ctx.setAttribute(attribute, ontModel);
}
return ontModel;
}
private boolean isEmpty(Model model) {
ClosableIterator<Statement> closeIt = model.listStatements(
null, RDF.type, ResourceFactory.createResource(
VitroVocabulary.PORTAL));
try {
if (closeIt.hasNext()) {
return false;
} else {
return true;
}
} finally {
closeIt.close();
}
}
private void loadDataFromFilesystem(OntModelSelector baseOms,
ServletContext ctx) {
Long startTime = System.currentTimeMillis();
log.debug("Initializing models from RDF files");
readOntologyFilesInPathSet(USER_ABOX_PATH, ctx, baseOms.getABoxModel());
readOntologyFilesInPathSet(USER_TBOX_PATH, ctx, baseOms.getTBoxModel());
readOntologyFilesInPathSet(
USER_APPMETA_PATH, ctx, baseOms.getApplicationMetadataModel());
log.debug(((System.currentTimeMillis() - startTime) / 1000)
+ " seconds to read RDF files ");
}
private static void getTBoxModel(Model fullModel,
Model submodels,
Model tboxModel) {
JenaModelUtils modelUtils = new JenaModelUtils();
Model tempModel = ModelFactory.createUnion(fullModel, submodels);
Model tempTBoxModel = modelUtils.extractTBox(tempModel);
// copy intersection of tempTBoxModel and fullModel to tboxModel.
StmtIterator iter = tempTBoxModel.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (fullModel.contains(stmt)) {
tboxModel.add(stmt);
}
}
return;
}
/*
* Copy all statements from model 1 that are not in model 2 to model 3.
*/
private static void copyDifference(Model model1,
Model model2,
Model model3) {
StmtIterator iter = model1.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (!model2.contains(stmt)) {
model3.add(stmt);
}
}
return;
}
private static void getAppMetadata(Model source, Model target) {
String amdQuery = "DESCRIBE ?x WHERE { " +
"{?x a <" + VitroVocabulary.PORTAL +"> } UNION " +
"{?x a <" + VitroVocabulary.PROPERTYGROUP +"> } UNION " +
"{?x a <" + VitroVocabulary.CLASSGROUP +"> } } ";
try {
Query q = QueryFactory.create(amdQuery, Syntax.syntaxARQ);
QueryExecution qe = QueryExecutionFactory.create(q, source);
qe.execDescribe(target);
} catch (Exception e) {
log.error("unable to create the application metadata model",e);
}
return;
}
private static void repairAppMetadataModel(Model applicationMetadataModel,
Model aboxAssertions,
Model aboxInferences) {
log.info("Moving application metadata from ABox to dedicated model");
getAppMetadata(aboxAssertions, applicationMetadataModel);
getAppMetadata(aboxInferences, applicationMetadataModel);
aboxAssertions.remove(applicationMetadataModel);
aboxInferences.remove(applicationMetadataModel);
return;
}
public static StoreDesc makeStoreDesc(ServletContext ctx) {
String layoutStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.sdb.layout", "layout2/hash");
String dbtypeStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.dbtype", "MySQL");
return new StoreDesc(
LayoutType.fetch(layoutStr),
DatabaseType.fetch(dbtypeStr) );
}
public static Store connectStore(BasicDataSource bds, StoreDesc storeDesc)
throws SQLException {
SDBConnection conn = new SDBConnection(bds.getConnection()) ;
return SDBFactory.connectStore(conn, storeDesc);
}
public static void setupSDB(ServletContext ctx, Store store) {
setupSDB(ctx,
store,
ModelFactory.createDefaultModel(),
ModelFactory.createDefaultModel());
}
public static void setupSDB(ServletContext ctx,
Store store,
Model memModel,
Model inferenceModel) {
store.getTableFormatter().create();
store.getTableFormatter().truncate();
store.getTableFormatter().dropIndexes(); // improve load performance
try {
// This is a one-time copy of stored KB data - from a Jena RDB store
// to a Jena SDB store. In the process, we will also separate out
// the TBox from the Abox; these are in the same graph in pre-1.2
// VIVO versions and will now be stored and maintained in separate
// models. Access to the Jena RDB data is through the
// OntModelSelectors that have been set up earlier in the current
// session by JenaPersistentDataSourceSetup.java. In the code
// below, note that the current getABoxModel() methods on the
// OntModelSelectors return a graph with both ABox and TBox data.
OntModel submodels = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
readOntologyFilesInPathSet(SUBMODELS, ctx, submodels);
Model tboxAssertions = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL);
// initially putting the results in memory so we have a
// cheaper way of computing the difference when we copy the ABox
Model memTboxAssertions = ModelFactory.createDefaultModel();
getTBoxModel(memModel, submodels, memTboxAssertions);
tboxAssertions.add(memTboxAssertions);
Model tboxInferences = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_TBOX_INF_MODEL);
// initially putting the results in memory so we have a
// cheaper way of computing the difference when we copy the ABox
Model memTboxInferences = ModelFactory.createDefaultModel();
getTBoxModel(inferenceModel, submodels, memTboxInferences);
tboxInferences.add(memTboxInferences);
Model aboxAssertions = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_DB_MODEL);
copyDifference(memModel, memTboxAssertions, aboxAssertions);
Model aboxInferences = SDBFactory.connectNamedModel(
store, JenaDataSourceSetupBase.JENA_INF_MODEL);
copyDifference(inferenceModel, memTboxInferences, aboxInferences);
// Set up the application metadata model
Model applicationMetadataModel = SDBFactory.connectNamedModel(
store,
JenaDataSourceSetupBase.JENA_APPLICATION_METADATA_MODEL);
getAppMetadata(memModel, applicationMetadataModel);
log.info("During initial SDB setup, created an application " +
"metadata model of size " +
applicationMetadataModel.size());
// remove application metadata from ABox model
aboxAssertions.remove(applicationMetadataModel);
aboxInferences.remove(applicationMetadataModel);
// Make sure the reasoner takes into account the newly-set-up data.
SimpleReasonerSetup.setRecomputeRequired(ctx);
} finally {
log.info("Adding indexes to SDB database tables.");
store.getTableFormatter().addIndexes();
log.info("Indexes created.");
}
}
private void migrateToSDBFromExistingRDBStore(ServletContext ctx,
Store store) {
Model rdbAssertionsModel = makeDBModelFromConfigurationProperties(
JENA_DB_MODEL, DB_ONT_MODEL_SPEC, ctx);
Model rdbInferencesModel = makeDBModelFromConfigurationProperties(
JENA_INF_MODEL, DB_ONT_MODEL_SPEC, ctx);
setupSDB(ctx, store, rdbAssertionsModel, rdbInferencesModel);
}
/**
* Tests whether an SDB store has been formatted and populated for use.
* @param store
* @return
*/
private boolean isSetUp(Store store) throws SQLException {
if (!(StoreUtils.isFormatted(store))) {
return false;
}
// even if the store exists, it may be empty
try {
return (SDBFactory.connectNamedModel(
store,
JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL))
.size() > 0;
} catch (Exception e) {
return false;
}
}
private static final String STOREDESC_ATTR = "storeDesc";
private static final String STORE_ATTR = "kbStore";
public static void setApplicationStoreDesc(StoreDesc storeDesc,
ServletContext ctx) {
ctx.setAttribute(STOREDESC_ATTR, storeDesc);
}
public static StoreDesc getApplicationStoreDesc(ServletContext ctx) {
return (StoreDesc) ctx.getAttribute(STOREDESC_ATTR);
}
public static void setApplicationStore(Store store,
ServletContext ctx) {
ctx.setAttribute(STORE_ATTR, store);
}
public static Store getApplicationStore(ServletContext ctx) {
return (Store) ctx.getAttribute(STORE_ATTR);
}
}