SDB code into trunk; not yet active by default

This commit is contained in:
bjl23 2010-11-22 16:24:00 +00:00
parent b2abc1f389
commit 0a71b802df
37 changed files with 5507 additions and 81 deletions

View file

@ -108,10 +108,11 @@
<!-- See documentation for details. -->
<!-- If used, must be run after JenaDataSourceSetup -->
<!--
<listener>
<listener-class>edu.cornell.mannlib.vitro.webapp.servlet.setup.PelletReasonerSetup</listener-class>
</listener>
-->
<!-- The followng listener records all edit changes, in reified form, to another database model -->
<!-- still at an experimental stage -->

View file

@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.DataSource;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.DatasetFactory;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
@ -57,9 +58,11 @@ import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
public class SparqlQueryServlet extends BaseEditController {
private static final Log log = LogFactory.getLog(SparqlQueryServlet.class.getName());
private static final String kb2 = "http://vitro.mannlib.cornell.edu/default/vitro-kb-2";
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
protected static HashMap<String,ResultSetFormat>formatSymbols = new HashMap<String,ResultSetFormat>();
static{
formatSymbols.put( ResultSetFormat.syntaxXML.getSymbol(), ResultSetFormat.syntaxXML);
@ -141,6 +144,7 @@ public class SparqlQueryServlet extends BaseEditController {
}
DataSource dataSource = DatasetFactory.create() ;
Dataset dataset = null;
ModelMaker maker = (ModelMaker) getServletContext().getAttribute("vitroJenaModelMaker");
boolean someModelSet = false;
@ -156,21 +160,27 @@ public class SparqlQueryServlet extends BaseEditController {
ontModel.addSubModel(modelNamed);
someModelSet = true;
}
}
}
if (someModelSet) {
dataSource.setDefaultModel(ontModel);
}
}
if( ! someModelSet )
dataSource.setDefaultModel(model) ;
executeQuery(request, response, resultFormatParam, rdfResultFormatParam, queryParam, dataSource);
if( ! someModelSet ){
dataset = vreq.getDataset();
if(dataset==null){
dataSource.setDefaultModel(model) ;
}
}
executeQuery(request, response, resultFormatParam, rdfResultFormatParam, queryParam, (dataset != null) ? dataset : dataSource);
return;
}
private void executeQuery(HttpServletRequest req,
HttpServletResponse response, String resultFormatParam, String rdfResultFormatParam, String queryParam, DataSource dataSource ) throws IOException {
HttpServletResponse response, String resultFormatParam, String rdfResultFormatParam, String queryParam, Dataset dataset ) throws IOException {
ResultSetFormat rsf = null;
/* BJL23 2008-11-06
@ -188,7 +198,7 @@ public class SparqlQueryServlet extends BaseEditController {
QueryExecution qe = null;
try{
Query query = QueryFactory.create(queryParam, SYNTAX);
qe = QueryExecutionFactory.create(query, dataSource);
qe = QueryExecutionFactory.create(query, dataset);
if( query.isSelectType() ){
ResultSet results = null;
results = qe.execSelect();

View file

@ -19,6 +19,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.query.Dataset;
import edu.cornell.mannlib.vitro.webapp.beans.ApplicationBean;
import edu.cornell.mannlib.vitro.webapp.beans.Portal;
@ -68,14 +69,31 @@ public class VitroRequest implements HttpServletRequest {
return (WebappDaoFactory) getAttribute("webappDaoFactory");
}
public void setFullWebappDaoFactory(WebappDaoFactory wdf) {
setAttribute("fullWebappDaoFactory", wdf);
}
public Dataset getDataset() {
return (Dataset) getAttribute("dataset");
}
public void setDataset(Dataset dataset) {
setAttribute("dataset", dataset);
}
/** gets assertions + inferences WebappDaoFactory with no filtering **/
public WebappDaoFactory getFullWebappDaoFactory() {
Object webappDaoFactoryAttr = _req.getSession().getAttribute("webappDaoFactory");
if (webappDaoFactoryAttr instanceof WebappDaoFactory) {
return (WebappDaoFactory) webappDaoFactoryAttr;
} else {
return (WebappDaoFactory) _req.getSession().getServletContext().getAttribute("webappDaoFactory");
}
Object webappDaoFactoryAttr = _req.getAttribute("fullWebappDaoFactory");
if (webappDaoFactoryAttr instanceof WebappDaoFactory) {
return (WebappDaoFactory) webappDaoFactoryAttr;
} else {
webappDaoFactoryAttr = _req.getSession().getAttribute("webappDaoFactory");
if (webappDaoFactoryAttr instanceof WebappDaoFactory) {
return (WebappDaoFactory) webappDaoFactoryAttr;
} else {
return (WebappDaoFactory) _req.getSession().getServletContext().getAttribute("webappDaoFactory");
}
}
}
/** gets assertions-only WebappDaoFactory with no filtering */

View file

@ -5,6 +5,8 @@ package edu.cornell.mannlib.vitro.webapp.controller.edit;
import java.text.CollationKey;
import java.util.ArrayList;
import java.util.Collection;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
@ -12,6 +14,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.TreeMap;
import java.util.ListIterator;
import javax.servlet.RequestDispatcher;
import javax.servlet.http.HttpServletRequest;
@ -73,9 +76,8 @@ public class Properties2PropertiesRetryController extends BaseEditController {
Collections.sort(propList);
String superpropertyURIstr = request.getParameter("SuperpropertyURI");
String subpropertyURIstr = request.getParameter("SubpropertyURI");
String superpropertyURIstr = request.getParameter("SuperpropertyURI");
String subpropertyURIstr = request.getParameter("SubpropertyURI");
HashMap<String,Option> hashMap = new HashMap<String,Option>();
List<Option> optionList = FormUtils.makeOptionListFromBeans(propList,"URI","LocalNameWithPrefix",superpropertyURIstr,null);

View file

@ -92,6 +92,7 @@ public class BrowseController extends FreemarkerHttpServlet {
int portalId = vreq.getPortal().getPortalId();
List<VClassGroup> groups = getGroups(vreq.getWebappDaoFactory().getVClassGroupDao(), portalId);
_groupListMap.put(portalId, groups);
if (groups == null || groups.isEmpty()) {
message = "There are not yet any items in the system.";
}
@ -114,13 +115,17 @@ public class BrowseController extends FreemarkerHttpServlet {
public void destroy(){
_cacheRebuildThread.kill();
}
private List getGroups( VClassGroupDao vcgDao, int portalId) {
return getGroups( vcgDao, portalId, INCLUDE_INDIVIDUAL_COUNT);
}
private List getGroups( VClassGroupDao vcgDao, int portalId ){
private List getGroups( VClassGroupDao vcgDao, int portalId, boolean includeIndividualCount ){
List grp = _groupListMap.get(portalId);
if( grp == null ){
log.debug("needed to build vclassGroups for portal " + portalId);
// Get all classgroups, each populated with a list of their member vclasses
List groups = vcgDao.getPublicGroupsWithVClasses(ORDER_BY_DISPLAYRANK, !INCLUDE_UNINSTANTIATED, INCLUDE_INDIVIDUAL_COUNT);
List groups = vcgDao.getPublicGroupsWithVClasses(ORDER_BY_DISPLAYRANK, !INCLUDE_UNINSTANTIATED, includeIndividualCount);
// remove classes that have been configured to be hidden
// from search results
@ -130,7 +135,7 @@ public class BrowseController extends FreemarkerHttpServlet {
//removeUnpopulatedClasses( groups);
vcgDao.removeUnpopulatedGroups(groups);
_groupListMap.put(portalId, groups);
//_groupListMap.put(portalId, groups);
return groups;
} else {
return grp;
@ -212,7 +217,7 @@ public class BrowseController extends FreemarkerHttpServlet {
portals.add(wdFactory.getPortalDao().getPortalByURI(uri));
}
}
for(Portal portal : portals){
rebuildCacheForPortal(portal,appBean,wdFactory);
}
@ -258,8 +263,55 @@ public class BrowseController extends FreemarkerHttpServlet {
filteringDaoFactory = wdFactory;
}
_groupListMap.remove(portal.getPortalId());
getGroups(filteringDaoFactory.getVClassGroupDao(),portal.getPortalId());
if ( !singlePortalApplication ) {
_groupListMap.put(portal.getPortalId(),
getGroups(filteringDaoFactory.getVClassGroupDao(),portal.getPortalId()));
} else {
List<VClassGroup> unfilteredGroups = getGroups(wdFactory.getVClassGroupDao(), portal.getPortalId(), INCLUDE_INDIVIDUAL_COUNT);
List<VClassGroup> filteredGroups = getGroups(filteringDaoFactory.getVClassGroupDao(),portal.getPortalId(), !INCLUDE_INDIVIDUAL_COUNT);
_groupListMap.put(portal.getPortalId(), removeFilteredOutGroupsAndClasses(unfilteredGroups, filteredGroups));
// BJL23: You may be wondering, why this extra method?
// Can't we just use the filtering DAO?
// Yes, but using the filtered DAO involves an expensive method
// called correctVClassCounts() that requires each individual
// in a VClass to be retrieved and filtered. This is fine in memory,
// but awful when using a database. We can't (yet) avoid all
// this work when portal filtering is involved, but we can
// short-circuit it when we have a single portal by using
// the filtering DAO only to filter groups and classes,
// and the unfiltered DAO to get the counts.
}
}
private List<VClassGroup> removeFilteredOutGroupsAndClasses(List<VClassGroup> unfilteredGroups, List<VClassGroup> filteredGroups) {
List<VClassGroup> groups = new ArrayList<VClassGroup>();
Set<String> allowedGroups = new HashSet<String>();
Set<String> allowedVClasses = new HashSet<String>();
for (VClassGroup group : filteredGroups) {
if (group.getURI() != null) {
allowedGroups.add(group.getURI());
}
for (VClass vcl : group) {
if (vcl.getURI() != null) {
allowedVClasses.add(vcl.getURI());
}
}
}
for (VClassGroup group : unfilteredGroups) {
if (allowedGroups.contains(group.getURI())) {
groups.add(group);
}
List<VClass> tmp = new ArrayList<VClass>();
for (VClass vcl : group) {
if (allowedVClasses.contains(vcl.getURI())) {
tmp.add(vcl);
}
}
group.setVitroClassList(tmp);
}
return groups;
}
private void clearGroupCache(){
_groupListMap = new ConcurrentHashMap<Integer, List>();

View file

@ -33,6 +33,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSpecialModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent;
import edu.cornell.mannlib.vitro.webapp.filestorage.uploadrequest.FileUploadServletRequest;
@ -155,10 +156,12 @@ public class RDFUploadController extends BaseEditController {
if( tempModel != null ){
OntModel memModel=null;
try {
memModel = (OntModel) request.getSession().getAttribute("baseOntModel");
memModel = ((OntModelSelector) request.getSession()
.getAttribute("unionOntModelSelector")).getABoxModel();
} catch (Exception e) {}
if (memModel==null) {
memModel = (OntModel) getServletContext().getAttribute("baseOntModel");
memModel = ((OntModelSelector) getServletContext()
.getAttribute("unionOntModelSelector")).getABoxModel();
}
if (memModel != null) {
stmtCount = operateOnModel(request.getFullWebappDaoFactory(), memModel,tempModel,remove,makeClassgroups,portalArray,loginBean.getUserURI());

View file

@ -221,6 +221,7 @@ public class VClassDaoFiltering extends BaseFiltering implements VClassDao{
Filter.filter(ents,filters.getIndividualFilter(),out);
if( out != null )
vclass.setEntityCount(out.size());
System.out.println(vclass.getURI() + " count: " + vclass.getEntityCount());
return;
}

View file

@ -66,9 +66,10 @@ public class DataPropertyStatementDaoJena extends JenaBaseDao implements DataPro
}
else
{
getOntModel().enterCriticalSection(Lock.READ);
OntModel ontModel = getOntModelSelector().getABoxModel();
ontModel.enterCriticalSection(Lock.READ);
try {
Resource ind = getOntModel().getResource(entity.getURI());
Resource ind = ontModel.getResource(entity.getURI());
List<DataPropertyStatement> edList = new ArrayList<DataPropertyStatement>();
StmtIterator stmtIt = ind.listProperties();
while( stmtIt.hasNext() )
@ -103,7 +104,7 @@ public class DataPropertyStatementDaoJena extends JenaBaseDao implements DataPro
entity.setDataPropertyStatements(edList);
return entity;
} finally {
getOntModel().leaveCriticalSection();
ontModel.leaveCriticalSection();
}
}
}
@ -234,14 +235,14 @@ public class DataPropertyStatementDaoJena extends JenaBaseDao implements DataPro
return 0;
}
private DataPropertyStatement fillDataPropertyStatementWithJenaLiteral(DataPropertyStatement dataPropertyStatement, Literal l) {
protected DataPropertyStatement fillDataPropertyStatementWithJenaLiteral(DataPropertyStatement dataPropertyStatement, Literal l) {
dataPropertyStatement.setData(l.getLexicalForm());
dataPropertyStatement.setDatatypeURI(l.getDatatypeURI());
dataPropertyStatement.setLanguage(l.getLanguage());
return dataPropertyStatement;
}
private Literal jenaLiteralFromDataPropertyStatement(DataPropertyStatement dataPropertyStatement, OntModel ontModel) {
protected Literal jenaLiteralFromDataPropertyStatement(DataPropertyStatement dataPropertyStatement, OntModel ontModel) {
Literal l = null;
if ((dataPropertyStatement.getLanguage()) != null && (dataPropertyStatement.getLanguage().length()>0)) {
l = ontModel.createLiteral(dataPropertyStatement.getData(),dataPropertyStatement.getLanguage());

View file

@ -0,0 +1,100 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.beans.DataProperty;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.dao.DataPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
public class DataPropertyStatementDaoSDB extends DataPropertyStatementDaoJena
implements DataPropertyStatementDao {
private Dataset dataset;
public DataPropertyStatementDaoSDB(Dataset dataset, WebappDaoFactoryJena wadf) {
super (wadf);
this.dataset = dataset;
}
@Override
public Individual fillExistingDataPropertyStatementsForIndividual( Individual entity/*, boolean allowAnyNameSpace*/)
{
if( entity.getURI() == null )
{
return entity;
}
else
{
String query =
"CONSTRUCT { \n" +
" <" + entity.getURI() + "> ?p ?o . \n" +
"} WHERE { GRAPH ?g { \n" +
" <" + entity.getURI() + "> ?p ?o . \n" +
" FILTER(isLiteral(?o)) \n" +
"} }" ;
Model results = QueryExecutionFactory.create(QueryFactory.create(query), dataset).execConstruct();
OntModel ontModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, results);
ontModel.enterCriticalSection(Lock.READ);
try {
Resource ind = ontModel.getResource(entity.getURI());
List<DataPropertyStatement> edList = new ArrayList<DataPropertyStatement>();
StmtIterator stmtIt = ind.listProperties();
while( stmtIt.hasNext() )
{
Statement st = (Statement)stmtIt.next();
boolean addToList = /*allowAnyNameSpace ? st.getObject().canAs(Literal.class) :*/ st.getObject().isLiteral() &&
(
(RDF.value.equals(st.getPredicate()) || VitroVocabulary.value.equals(st.getPredicate().getURI()))
|| !(NONUSER_NAMESPACES.contains(st.getPredicate().getNameSpace()))
);
if( addToList )
{ /* now want to expose Cornellemailnetid and potentially other properties so can at least control whether visible
boolean isExternalId = false;
ClosableIterator externalIdStmtIt = getOntModel().listStatements(st.getPredicate(), DATAPROPERTY_ISEXTERNALID, (Literal)null);
try {
if (externalIdStmtIt.hasNext()) {
isExternalId = true;
}
} finally {
externalIdStmtIt.close();
}
if (!isExternalId) { */
DataPropertyStatement ed = new DataPropertyStatementImpl();
Literal lit = (Literal)st.getObject();
fillDataPropertyStatementWithJenaLiteral(ed,lit);
ed.setDatapropURI(st.getPredicate().getURI());
ed.setIndividualURI(ind.getURI());
edList.add(ed);
/* } */
}
}
entity.setDataPropertyStatements(edList);
return entity;
} finally {
ontModel.leaveCriticalSection();
}
}
}
}

View file

@ -151,16 +151,19 @@ public class IndividualDaoJena extends JenaBaseDao implements IndividualDao {
? getOntModel().createResource(new AnonId(vclassURI.split("#")[1]))
: ResourceFactory.createResource(vclassURI);
getOntModel().enterCriticalSection(Lock.READ);
try {
if (theClass.isAnon() && theClass.canAs(UnionClass.class)) {
UnionClass u = (UnionClass) theClass.as(UnionClass.class);
for (OntClass operand : u.listOperands().toList()) {
VClass vc = new VClassJena(operand, getWebappDaoFactory());
ents.addAll(getIndividualsByVClass(vc));
}
} else {
StmtIterator stmtIt = getOntModel().listStatements((Resource) null, RDF.type, theClass);
if (theClass.isAnon() && theClass.canAs(UnionClass.class)) {
UnionClass u = (UnionClass) theClass.as(UnionClass.class);
for (OntClass operand : u.listOperands().toList()) {
VClass vc = new VClassJena(operand, getWebappDaoFactory());
ents.addAll(getIndividualsByVClass(vc));
}
} else {
OntModel ontModel = getOntModelSelector().getABoxModel();
try {
ontModel.enterCriticalSection(Lock.READ);
StmtIterator stmtIt = ontModel.listStatements((Resource) null, RDF.type, theClass);
try {
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.nextStatement();
@ -170,10 +173,11 @@ public class IndividualDaoJena extends JenaBaseDao implements IndividualDao {
} finally {
stmtIt.close();
}
}
} finally {
getOntModel().leaveCriticalSection();
} finally {
ontModel.leaveCriticalSection();
}
}
java.util.Collections.sort(ents);

View file

@ -0,0 +1,444 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.joda.time.DateTime;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.UnionClass;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.AnonId;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.OWL;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
public class IndividualDaoSDB extends IndividualDaoJena {
private Dataset dataset;
private WebappDaoFactoryJena wadf;
public IndividualDaoSDB(Dataset dataset, WebappDaoFactoryJena wadf) {
super(wadf);
this.dataset = dataset;
}
protected Dataset getDataset() {
return this.dataset;
}
protected Individual makeIndividual(String individualURI) {
return new IndividualSDB2(individualURI, getDataset(), getWebappDaoFactory());
}
private static final Log log = LogFactory.getLog(IndividualDaoSDB.class.getName());
@Override
protected OntModel getOntModel() {
return getOntModelSelector().getABoxModel();
}
@Override
public List getIndividualsByVClassURI(String vclassURI, int offset, int quantity ) {
if (vclassURI==null) {
return null;
}
List ents = new ArrayList();
Resource theClass = (vclassURI.indexOf(PSEUDO_BNODE_NS) == 0)
? getOntModel().createResource(new AnonId(vclassURI.split("#")[1]))
: ResourceFactory.createResource(vclassURI);
if (theClass.isAnon() && theClass.canAs(UnionClass.class)) {
UnionClass u = (UnionClass) theClass.as(UnionClass.class);
for (OntClass operand : u.listOperands().toList()) {
VClass vc = new VClassJena(operand, getWebappDaoFactory());
ents.addAll(getIndividualsByVClass(vc));
}
} else {
Model model;
dataset.getLock().enterCriticalSection(Lock.READ);
try {
String query =
"CONSTRUCT " +
"{ ?ind <" + RDFS.label.getURI() + "> ?ooo. \n" +
"?ind a <" + theClass.getURI() + "> . \n" +
"?ind <" + VitroVocabulary.MONIKER + "> ?moniker \n" +
"} WHERE " +
"{ GRAPH ?g { \n" +
" ?ind a <" + theClass.getURI() + "> \n" +
"OPTIONAL { ?ind <" + RDFS.label.getURI() + "> ?ooo } \n" +
"OPTIONAL { ?ind <" + VitroVocabulary.MONIKER + "> ?moniker } \n" +
"} \n" +
"}";
model = QueryExecutionFactory.create(QueryFactory.create(query), dataset).execConstruct();
} finally {
dataset.getLock().leaveCriticalSection();
}
ResIterator resIt = model.listSubjects();
try {
while (resIt.hasNext()) {
Resource ind = resIt.nextResource();
if (!ind.isAnon()) {
ents.add(new IndividualSDB(ind.getURI(), dataset, getWebappDaoFactory(), model));
}
}
} finally {
resIt.close();
}
}
java.util.Collections.sort(ents);
return ents;
}
@Override
public Individual getIndividualByURI(String entityURI) {
if( entityURI == null || entityURI.length() == 0 ) {
return null;
} else {
return makeIndividual(entityURI);
}
}
/**
* fills in the Individual objects needed for any ObjectPropertyStatements attached to the specified individual.
* @param entity
*/
private void fillIndividualsForObjectPropertyStatements(Individual entity){
getOntModel().enterCriticalSection(Lock.READ);
try {
Iterator e2eIt = entity.getObjectPropertyStatements().iterator();
while (e2eIt.hasNext()) {
ObjectPropertyStatement e2e = (ObjectPropertyStatement) e2eIt.next();
e2e.setSubject(makeIndividual(e2e.getSubjectURI()));
e2e.setObject(makeIndividual(e2e.getObjectURI()));
}
} finally {
getOntModel().leaveCriticalSection();
}
}
/**
* In Jena it can be difficult to get an object with a given dataproperty if
* you do not care about the datatype or lang of the literal. Use this
* method if you would like to ignore the lang and datatype.
*/
@Override
public List<Individual> getIndividualsByDataProperty(String dataPropertyUri, String value){
Property prop = null;
if( RDFS.label.getURI().equals( dataPropertyUri )){
prop = RDFS.label;
}else{
prop = getOntModel().getDatatypeProperty(dataPropertyUri);
}
if( prop == null ) {
log.debug("Could not getIndividualsByDataProperty() " +
"because " + dataPropertyUri + "was not found in model.");
return Collections.emptyList();
}
if( value == null ){
log.debug("Could not getIndividualsByDataProperty() " +
"because value was null");
return Collections.emptyList();
}
Literal litv1 = getOntModel().createLiteral(value);
Literal litv2 = getOntModel().createTypedLiteral(value);
//warning: this assumes that any language tags will be EN
Literal litv3 = getOntModel().createLiteral(value,"EN");
HashMap<String,Individual> individualsMap = new HashMap<String, Individual>();
getOntModel().enterCriticalSection(Lock.READ);
int count = 0;
try{
StmtIterator stmts
= getOntModel().listStatements((Resource)null, prop, litv1);
while(stmts.hasNext()){
count++;
Statement stmt = stmts.nextStatement();
RDFNode sub = stmt.getSubject();
if( sub == null || sub.isAnon() || sub.isLiteral() )
continue;
RDFNode obj = stmt.getObject();
if( obj == null || !obj.isLiteral() )
continue;
Literal literal = (Literal)obj;
Object v = literal.getValue();
if( v == null )
continue;
String subUri = ((Resource)sub).getURI();
if( ! individualsMap.containsKey(subUri)){
individualsMap.put(subUri,makeIndividual(subUri));
}
}
stmts = getOntModel().listStatements((Resource)null, prop, litv2);
while(stmts.hasNext()){
count++;
Statement stmt = stmts.nextStatement();
RDFNode sub = stmt.getSubject();
if( sub == null || sub.isAnon() || sub.isLiteral() )
continue;
RDFNode obj = stmt.getObject();
if( obj == null || !obj.isLiteral() )
continue;
Literal literal = (Literal)obj;
Object v = literal.getValue();
if( v == null )
continue;
String subUri = ((Resource)sub).getURI();
if( ! individualsMap.containsKey(subUri)){
individualsMap.put(subUri, makeIndividual(subUri));
}
}
stmts = getOntModel().listStatements((Resource)null, prop, litv3);
while(stmts.hasNext()){
count++;
Statement stmt = stmts.nextStatement();
RDFNode sub = stmt.getSubject();
if( sub == null || sub.isAnon() || sub.isLiteral() )
continue;
RDFNode obj = stmt.getObject();
if( obj == null || !obj.isLiteral() )
continue;
Literal literal = (Literal)obj;
Object v = literal.getValue();
if( v == null )
continue;
String subUri = ((Resource)sub).getURI();
if( ! individualsMap.containsKey(subUri)){
individualsMap.put(subUri, makeIndividual(subUri));
}
}
} finally {
getOntModel().leaveCriticalSection();
}
List<Individual> rv = new ArrayList(individualsMap.size());
rv.addAll(individualsMap.values());
return rv;
}
@Override
public Iterator getAllOfThisTypeIterator() {
final List<String> list =
new LinkedList<String>();
String query = "SELECT ?ind WHERE { \n" +
" GRAPH ?g { ?ind <" + RDFS.label.getURI() + "> ?label } \n" +
"}";
Query q = QueryFactory.create(query);
QueryExecution qe = QueryExecutionFactory.create(q, dataset);
try {
ResultSet rs = qe.execSelect();
while (rs.hasNext()) {
Resource res = rs.next().getResource("ind");
if (!res.isAnon()) {
list.add(res.getURI());
}
}
} finally {
qe.close();
}
// getOntModel().enterCriticalSection(Lock.READ);
// try {
// ClosableIterator allIndIt = getOntModel().listIndividuals();
// try {
// while (allIndIt.hasNext()) {
// com.hp.hpl.jena.ontology.Individual ind = (com.hp.hpl.jena.ontology.Individual) allIndIt.next();
//
//
//
// //don't include anything that lacks a label, issue VIVO-119.
// if( getLabel(ind) == null )
// continue;
//
//
// boolean userVisible = true;
// //Check for non-user visible types, maybe this should be an annotation?
// ClosableIterator typeIt = ind.listRDFTypes(false);
// try {
// while (typeIt.hasNext()) {
// Resource typeRes = (Resource) typeIt.next();
// String type = typeRes.getURI();
// // brute forcing this until we implement a better strategy
// if (VitroVocabulary.PORTAL.equals(type) ||
// VitroVocabulary.TAB.equals(type) ||
// VitroVocabulary.TAB_INDIVIDUALRELATION.equals(type) ||
// VitroVocabulary.LINK.equals(type) ||
// VitroVocabulary.KEYWORD.equals(type) ||
// VitroVocabulary.KEYWORD_INDIVIDUALRELATION.equals(type) ||
// VitroVocabulary.CLASSGROUP.equals(type) ||
// VitroVocabulary.PROPERTYGROUP.equals(type) ||
// VitroVocabulary.APPLICATION.equals(type)) {
// userVisible = false;
// break;
// }
// if( OWL.ObjectProperty.getURI().equals(type) ||
// OWL.DatatypeProperty.getURI().equals(type) ||
// OWL.AnnotationProperty.getURI().equals(type) ||
// RDF.type.getURI().equals(type) ){
// userVisible = false;
// break;
// }
// }
// } finally {
// typeIt.close();
// }
// if (userVisible) {
// list.add(ind);
// }
//
// }
// } finally {
// allIndIt.close();
// }
// } finally {
// getOntModel().leaveCriticalSection();
// }
if (list.size() >0){
return new Iterator(){
Iterator<String> innerIt = list.iterator();
public boolean hasNext() {
return innerIt.hasNext();
}
public Object next() {
return makeIndividual(innerIt.next());
}
public void remove() {
//not used
}
};
}
else
return null;
}
@Override
public Iterator getAllOfThisVClassIterator(String vClassURI) {
getOntModel().enterCriticalSection(Lock.READ);
try {
List ents = new LinkedList();
OntClass cls = getOntModel().getOntClass(vClassURI);
Iterator indIt = cls.listInstances();
while (indIt.hasNext()) {
com.hp.hpl.jena.ontology.Individual ind = (com.hp.hpl.jena.ontology.Individual) indIt.next();
ents.add(makeIndividual(ind.getURI()));
}
return ents.iterator();
} finally {
getOntModel().leaveCriticalSection();
}
}
@Override
public Iterator getUpdatedSinceIterator(long updatedSince){
List ents = new ArrayList();
Date since = new DateTime(updatedSince).toDate();
String sinceStr = xsdDateTimeFormat.format(since);
getOntModel().enterCriticalSection(Lock.READ);
try {
Property modTimeProp = MODTIME;
if (modTimeProp == null)
modTimeProp = getOntModel().getProperty(VitroVocabulary.MODTIME);
if (modTimeProp == null)
return null; // throw an exception?
String queryStr = "PREFIX vitro: <"+ VitroVocabulary.vitroURI+"> " +
"PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>" +
"SELECT ?ent " +
"WHERE { " +
" ?ent vitro:modTime ?modTime ." +
" FILTER (xsd:dateTime(?modTime) >= \""+sinceStr+"\"^^xsd:dateTime) " +
"}";
Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.create(query,getOntModel());
ResultSet results = qe.execSelect();
while (results.hasNext()) {
QuerySolution qs = (QuerySolution) results.next();
Resource res = (Resource) qs.get("?ent");
com.hp.hpl.jena.ontology.Individual ent = getOntModel().getIndividual(res.getURI());
if (ent != null) {
boolean userVisible = false;
ClosableIterator typeIt = ent.listRDFTypes(true);
try {
while (typeIt.hasNext()) {
Resource typeRes = (Resource) typeIt.next();
if (typeRes.getNameSpace() == null || (!NONUSER_NAMESPACES.contains(typeRes.getNameSpace()))) {
userVisible = true;
break;
}
}
} finally {
typeIt.close();
}
if (userVisible) {
ents.add(makeIndividual(ent.getURI()));
}
}
}
} finally {
getOntModel().leaveCriticalSection();
}
return ents.iterator();
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -10,6 +10,7 @@ import java.util.List;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
@ -68,7 +69,7 @@ public class KeywordIndividualRelationDaoJena extends JenaBaseDao implements Key
}
getOntModel().enterCriticalSection(Lock.READ);
try {
com.hp.hpl.jena.ontology.Individual individual = getOntModel().getIndividual(individualURI);
Resource individual = ResourceFactory.createResource(individualURI);
if (individual != null) {
ClosableIterator stmtIt = getOntModel().listStatements(null, KEYWORD_INDIVIDUALRELATION_INVOLVESINDIVIDUAL, individual);
try {

View file

@ -19,6 +19,7 @@ import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
@ -47,7 +48,7 @@ public class LinksDaoJena extends JenaBaseDao implements LinksDao {
List<Link> linksList = new ArrayList<Link>();
getOntModel().enterCriticalSection(Lock.READ);
try {
com.hp.hpl.jena.ontology.Individual entInd = getOntModel().getIndividual(individual.getURI());
Resource entInd = ResourceFactory.createResource(individual.getURI());
if (ADDITIONAL_LINK != null) {
ClosableIterator<Statement> links = getOntModel().listStatements(entInd,ADDITIONAL_LINK,(Resource)null);
try {

View file

@ -61,9 +61,10 @@ public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements Objec
ObjectPropertyDaoJena opDaoJena = new ObjectPropertyDaoJena(getWebappDaoFactory());
getOntModel().enterCriticalSection(Lock.READ);
OntModel ontModel = getOntModelSelector().getABoxModel();
ontModel.enterCriticalSection(Lock.READ);
try {
Resource ind = getOntModel().getResource(entity.getURI());
Resource ind = ontModel.getResource(entity.getURI());
List<ObjectPropertyStatement> objPropertyStmtList = new ArrayList<ObjectPropertyStatement>();
ClosableIterator<Statement> propIt = ind.listProperties();
try {
@ -119,7 +120,7 @@ public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements Objec
}
entity.setObjectPropertyStatements(objPropertyStmtList);
} finally {
getOntModel().leaveCriticalSection();
ontModel.leaveCriticalSection();
}
return entity;
}

View file

@ -0,0 +1,139 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectProperty;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
public class ObjectPropertyStatementDaoSDB extends
ObjectPropertyStatementDaoJena implements ObjectPropertyStatementDao {
private Dataset dataset;
public ObjectPropertyStatementDaoSDB(Dataset dataset, WebappDaoFactoryJena wadf) {
super (wadf);
this.dataset = dataset;
}
@Override
public Individual fillExistingObjectPropertyStatements(Individual entity) {
if (entity.getURI() == null)
return entity;
else {
Map<String, ObjectProperty> uriToObjectProperty = new HashMap<String,ObjectProperty>();
String query = "CONSTRUCT { \n" +
" <" + entity.getURI() + "> ?p ?o . \n" +
" ?o a ?oType . \n" +
" ?o <" + RDFS.label.getURI() + "> ?oLabel . \n" +
" ?o <" + VitroVocabulary.MONIKER + "> ?oMoniker \n" +
"} WHERE { GRAPH ?g { \n" +
" <" + entity.getURI() + "> ?p ?o . \n" +
" ?o a ?oType \n" +
" OPTIONAL { ?o <" + RDFS.label.getURI() + "> ?oLabel } \n" +
" OPTIONAL { ?o <" + VitroVocabulary.MONIKER + "> ?oMoniker } \n" +
"} }";
long startTime = System.currentTimeMillis();
dataset.getLock().enterCriticalSection(Lock.READ);
Model m = null;
try {
m = QueryExecutionFactory.create(QueryFactory.create(query), dataset).execConstruct();
} finally {
dataset.getLock().leaveCriticalSection();
}
if (log.isDebugEnabled()) {
log.debug("Time (ms) to query for related individuals: " + (System.currentTimeMillis() - startTime));
if (System.currentTimeMillis() - startTime > 1000) {
//log.debug(query);
log.debug("Results size (statements): " + m.size());
}
}
OntModel ontModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m);
ontModel.enterCriticalSection(Lock.READ);
try {
Resource ind = ontModel.getResource(entity.getURI());
List<ObjectPropertyStatement> objPropertyStmtList = new ArrayList<ObjectPropertyStatement>();
ClosableIterator<Statement> propIt = ind.listProperties();
try {
while (propIt.hasNext()) {
Statement st = (Statement) propIt.next();
if (st.getObject().isResource() && !(NONUSER_NAMESPACES.contains(st.getPredicate().getNameSpace()))) {
try {
ObjectPropertyStatement objPropertyStmt = new ObjectPropertyStatementImpl();
objPropertyStmt.setSubjectURI(entity.getURI());
objPropertyStmt.setSubject(entity);
try {
objPropertyStmt.setObjectURI(((Resource)st.getObject()).getURI());
} catch (Throwable t) {
t.printStackTrace();
}
objPropertyStmt.setPropertyURI(st.getPredicate().getURI());
try {
Property prop = st.getPredicate();
if( uriToObjectProperty.containsKey(prop.getURI())){
objPropertyStmt.setProperty(uriToObjectProperty.get(prop.getURI()));
}else{
ObjectProperty p = getWebappDaoFactory().getObjectPropertyDao().getObjectPropertyByURI(prop.getURI());
if( p != null ){
uriToObjectProperty.put(prop.getURI(), p);
objPropertyStmt.setProperty(uriToObjectProperty.get(prop.getURI()));
}else{
//if ObjectProperty not found in ontology, skip it
continue;
}
}
} catch (Throwable g) {
//do not add statement to list
log.debug("exception while trying to get object property for statement list, statement skipped.", g);
continue;
}
if (objPropertyStmt.getObjectURI() != null) {
Individual objInd = new IndividualSDB(objPropertyStmt.getObjectURI(), dataset, getWebappDaoFactory(), m);
objPropertyStmt.setObject(objInd);
}
//add object property statement to list for Individual
if ((objPropertyStmt.getSubjectURI() != null) && (objPropertyStmt.getPropertyURI() != null) && (objPropertyStmt.getObject() != null)){
objPropertyStmtList.add(objPropertyStmt);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
}
} finally {
propIt.close();
}
entity.setObjectPropertyStatements(objPropertyStmtList);
} finally {
ontModel.leaveCriticalSection();
}
return entity;
}
}
}

View file

@ -0,0 +1,68 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import com.hp.hpl.jena.ontology.OntModel;
public class OntModelSelectorImpl implements OntModelSelector {
private OntModel aboxModel;
private OntModel applicationMetadataModel;
private OntModel displayModel;
private OntModel fullModel;
private OntModel tboxModel;
private OntModel userAccountsModel;
public OntModel getABoxModel() {
return this.aboxModel;
}
public OntModel getApplicationMetadataModel() {
return this.applicationMetadataModel;
}
public OntModel getDisplayModel() {
return this.displayModel;
}
public OntModel getFullModel() {
return this.fullModel;
}
public OntModel getTBoxModel() {
return this.tboxModel;
}
public OntModel getTBoxModel(String ontologyURI) {
return this.tboxModel;
}
public OntModel getUserAccountsModel() {
return this.userAccountsModel;
}
public void setABoxModel(OntModel m) {
this.aboxModel = m;
}
public void setApplicationMetadataModel(OntModel m) {
this.applicationMetadataModel = m;
}
public void setDisplayModel(OntModel m) {
this.displayModel = m;
}
public void setTBoxModel(OntModel m) {
this.tboxModel = m;
}
public void setUserAccountsModel(OntModel m) {
this.userAccountsModel = m;
}
public void setFullModel(OntModel m) {
this.fullModel = m;
}
}

View file

@ -0,0 +1,50 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import java.sql.Connection;
import java.sql.SQLException;
public class SDBGraphGenerator implements GraphGenerator {
private static final Log log = LogFactory.getLog(SDBGraphGenerator.class.getName());
private BasicDataSource ds;
private Connection connection;
private StoreDesc storeDesc;
private String graphID;
public SDBGraphGenerator(BasicDataSource dataSource, StoreDesc storeDesc,
String graphID) {
this.ds = dataSource;
this.storeDesc = storeDesc;
this.graphID = graphID;
}
public Graph generateGraph() {
try {
if ( ( this.connection == null ) || ( this.connection.isClosed() ) ) {
this.connection = ds.getConnection();
}
Store store = SDBFactory.connectStore(connection, storeDesc);
return SDBFactory.connectNamedGraph(store, graphID);
} catch (SQLException e) {
String errMsg = "Unable to generate SDB graph";
log.error(errMsg, e);
throw new RuntimeException(errMsg, e);
}
}
public Connection getConnection() {
return connection;
}
}

View file

@ -27,7 +27,14 @@ import com.hp.hpl.jena.ontology.ProfileException;
import com.hp.hpl.jena.ontology.Restriction;
import com.hp.hpl.jena.ontology.SomeValuesFromRestriction;
import com.hp.hpl.jena.ontology.UnionClass;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
@ -775,10 +782,10 @@ public class VClassDaoJena extends JenaBaseDao implements VClassDao {
getOntModel().enterCriticalSection(Lock.READ);
try {
if ((group != null) && (group.getURI() != null)) {
Individual groupInd = getOntModel().getIndividual(group.getURI());
Resource groupRes = ResourceFactory.createResource(group.getURI());
AnnotationProperty inClassGroup = getOntModel().getAnnotationProperty(VitroVocabulary.IN_CLASSGROUP);
if (inClassGroup != null) {
ClosableIterator annotIt = getOntModel().listStatements((OntClass)null,inClassGroup,groupInd);
ClosableIterator annotIt = getOntModel().listStatements((OntClass)null,inClassGroup,groupRes);
try {
while (annotIt.hasNext()) {
try {
@ -788,20 +795,39 @@ public class VClassDaoJena extends JenaBaseDao implements VClassDao {
if (vcw != null) {
boolean classIsInstantiated = false;
if (getIndividualCount) {
int count = getOntModel().listStatements(null,RDF.type,cls).toList().size();
Model aboxModel = getOntModelSelector().getABoxModel();
aboxModel.enterCriticalSection(Lock.READ);
int count = 0;
try {
String countQueryStr = "SELECT COUNT(*) WHERE \n" +
"{ ?s a <" + cls.getURI() + "> } \n";
Query countQuery = QueryFactory.create(countQueryStr, Syntax.syntaxARQ);
QueryExecution qe = QueryExecutionFactory.create(countQuery, aboxModel);
ResultSet rs =qe.execSelect();
count = Integer.parseInt(((Literal) rs.nextSolution().get(".1")).getLexicalForm());
//count = aboxModel.listStatements(null,RDF.type,cls).toList().size();
} finally {
aboxModel.leaveCriticalSection();
}
vcw.setEntityCount(count);
classIsInstantiated = (count > 0);
} else if (includeUninstantiatedClasses == false) {
// Note: to support SDB models, may want to do this with
// SPARQL and LIMIT 1 if SDB can take advantage of it
ClosableIterator countIt = getOntModel().listStatements(null,RDF.type,cls);
try {
if (countIt.hasNext()) {
classIsInstantiated = true;
}
} finally {
countIt.close();
}
Model aboxModel = getOntModelSelector().getABoxModel();
aboxModel.enterCriticalSection(Lock.READ);
try {
ClosableIterator countIt = aboxModel.listStatements(null,RDF.type,cls);
try {
if (countIt.hasNext()) {
classIsInstantiated = true;
}
} finally {
countIt.close();
}
} finally {
aboxModel.leaveCriticalSection();
}
}
if (includeUninstantiatedClasses || classIsInstantiated) {

View file

@ -0,0 +1,110 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import com.hp.hpl.jena.ontology.AnnotationProperty;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
import edu.cornell.mannlib.vitro.webapp.beans.VClassGroup;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
public class VClassDaoSDB extends VClassDaoJena {
private Dataset dataset;
private WebappDaoFactoryJena wadf;
public VClassDaoSDB(Dataset dataset, WebappDaoFactoryJena wadf) {
super(wadf);
this.dataset = dataset;
}
protected Dataset getDataset() {
return this.dataset;
}
@Deprecated
public void addVClassesToGroup(VClassGroup group, boolean includeUninstantiatedClasses, boolean getIndividualCount) {
getOntModel().enterCriticalSection(Lock.READ);
try {
if ((group != null) && (group.getURI() != null)) {
Resource groupRes = ResourceFactory.createResource(group.getURI());
AnnotationProperty inClassGroup = getOntModel().getAnnotationProperty(VitroVocabulary.IN_CLASSGROUP);
if (inClassGroup != null) {
ClosableIterator annotIt = getOntModel().listStatements((OntClass)null,inClassGroup,groupRes);
try {
while (annotIt.hasNext()) {
try {
Statement annot = (Statement) annotIt.next();
Resource cls = (Resource) annot.getSubject();
VClass vcw = (VClass) getVClassByURI(cls.getURI());
if (vcw != null) {
boolean classIsInstantiated = false;
if (getIndividualCount) {
Model aboxModel = getOntModelSelector().getABoxModel();
aboxModel.enterCriticalSection(Lock.READ);
int count = 0;
try {
String countQueryStr = "SELECT COUNT(*) WHERE \n" +
"{ GRAPH ?g { ?s a <" + cls.getURI() + "> } } \n";
Query countQuery = QueryFactory.create(countQueryStr, Syntax.syntaxARQ);
QueryExecution qe = QueryExecutionFactory.create(countQuery, getDataset());
ResultSet rs =qe.execSelect();
count = Integer.parseInt(((Literal) rs.nextSolution().get(".1")).getLexicalForm());
} finally {
aboxModel.leaveCriticalSection();
}
vcw.setEntityCount(count);
classIsInstantiated = (count > 0);
} else if (includeUninstantiatedClasses == false) {
// Note: to support SDB models, may want to do this with
// SPARQL and LIMIT 1 if SDB can take advantage of it
Model aboxModel = getOntModelSelector().getABoxModel();
aboxModel.enterCriticalSection(Lock.READ);
try {
ClosableIterator countIt = aboxModel.listStatements(null,RDF.type,cls);
try {
if (countIt.hasNext()) {
classIsInstantiated = true;
}
} finally {
countIt.close();
}
} finally {
aboxModel.leaveCriticalSection();
}
}
if (includeUninstantiatedClasses || classIsInstantiated) {
group.add(vcw);
}
}
} catch (ClassCastException cce) {cce.printStackTrace();}
}
} finally {
annotIt.close();
}
}
}
java.util.Collections.sort(group.getVitroClassList());
} finally {
getOntModel().leaveCriticalSection();
}
}
}

View file

@ -549,7 +549,7 @@ public class WebappDaoFactoryJena implements WebappDaoFactory {
return propertyInstanceDao;
}
private VClassDao vClassDao = null;
protected VClassDao vClassDao = null;
public VClassDao getVClassDao() {
if( vClassDao == null )
vClassDao = new VClassDaoJena(this);

View file

@ -0,0 +1,60 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.HashSet;
import com.hp.hpl.jena.query.Dataset;
import edu.cornell.mannlib.vitro.webapp.dao.DataPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.IndividualDao;
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
private Dataset dataset;
public WebappDaoFactorySDB(OntModelSelector ontModelSelector, Dataset dataset) {
super(ontModelSelector);
this.dataset = dataset;
}
public WebappDaoFactorySDB(OntModelSelector ontModelSelector, Dataset dataset, String defaultNamespace, HashSet<String> nonuserNamespaces, String[] preferredLanguages) {
super(ontModelSelector, defaultNamespace, nonuserNamespaces, preferredLanguages);
this.dataset = dataset;
}
@Override
public IndividualDao getIndividualDao() {
if (entityWebappDao != null)
return entityWebappDao;
else
return entityWebappDao = new IndividualDaoSDB(dataset, this);
}
@Override
public DataPropertyStatementDao getDataPropertyStatementDao() {
if (dataPropertyStatementDao != null)
return dataPropertyStatementDao;
else
return dataPropertyStatementDao = new DataPropertyStatementDaoSDB(dataset, this);
}
@Override
public ObjectPropertyStatementDao getObjectPropertyStatementDao() {
if (objectPropertyStatementDao != null)
return objectPropertyStatementDao;
else
return objectPropertyStatementDao = new ObjectPropertyStatementDaoSDB(dataset, this);
}
@Override
public VClassDao getVClassDao() {
if (vClassDao != null)
return vClassDao;
else
return vClassDao = new VClassDaoSDB(dataset, this);
}
}

View file

@ -8,6 +8,7 @@ import javax.servlet.http.HttpServletRequest;
import com.hp.hpl.jena.ontology.OntModel;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
public class StandardModelSelector implements ModelSelector {
@ -15,13 +16,18 @@ public class StandardModelSelector implements ModelSelector {
VitroRequest vreq = new VitroRequest( request );
Object sessionOntModel = null;
if( vreq.getSession() != null)
sessionOntModel = vreq.getSession().getAttribute("jenaOntModel");
if( vreq.getSession() != null) {
OntModelSelector oms = (OntModelSelector) vreq.getSession()
.getAttribute("unionOntModelSelector");
if (oms != null) {
sessionOntModel = oms.getABoxModel();
}
}
if(sessionOntModel != null && sessionOntModel instanceof OntModel )
return (OntModel)sessionOntModel;
else
return (OntModel)context.getAttribute("jenaOntModel");
return ((OntModelSelector) context
.getAttribute("unionOntModelSelector")).getABoxModel();
}
public static final ModelSelector selector = new StandardModelSelector();

View file

@ -128,7 +128,7 @@ public class VitroRequestPrep implements Filter {
vreq.setSunsetFlag(sunsetFlag);
//-- setup DAO factory --//
WebappDaoFactory wdf = getWebappDaoFactory();
WebappDaoFactory wdf = getWebappDaoFactory(vreq);
//TODO: get accept-language from request and set as preferred languages
//-- setup portal and portalFlag --//
@ -199,8 +199,10 @@ public class VitroRequestPrep implements Filter {
chain.doFilter(request, response);
}
private WebappDaoFactory getWebappDaoFactory(){
return (WebappDaoFactory) _context.getAttribute("webappDaoFactory");
private WebappDaoFactory getWebappDaoFactory(VitroRequest vreq){
WebappDaoFactory webappDaoFactory = vreq.getWebappDaoFactory();
return (webappDaoFactory != null) ? webappDaoFactory :
(WebappDaoFactory) _context.getAttribute("webappDaoFactory");
}
public void init(FilterConfig filterConfig) throws ServletException {

View file

@ -0,0 +1,135 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.filters;
import java.io.IOException;
import java.sql.SQLException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class WebappDaoFactorySDBPrep implements Filter {
private final static Log log = LogFactory.getLog(WebappDaoFactorySDBPrep.class);
BasicDataSource _bds;
StoreDesc _storeDesc;
SDBConnection _conn;
OntModelSelector _oms;
String _defaultNamespace;
/**
* The filter will be applied to all incoming urls,
this is a list of URI patterns to skip. These are
matched against the requestURI sans query parameters,
* e.g.
* "/vitro/index.jsp"
* "/vitro/themes/enhanced/css/edit.css"
*
* These patterns are from VitroRequestPrep.java
*/
Pattern[] skipPatterns = {
Pattern.compile(".*\\.(gif|GIF|jpg|jpeg)$"),
Pattern.compile(".*\\.css$"),
Pattern.compile(".*\\.js$"),
Pattern.compile("/.*/themes/.*/site_icons/.*"),
Pattern.compile("/.*/images/.*")
};
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain filterChain) throws IOException, ServletException {
if (request.getAttribute("WebappDaoFactorySDBPrep.setup") != null) {
// don't run multiple time
return;
}
for( Pattern skipPattern : skipPatterns){
Matcher match =skipPattern.matcher( ((HttpServletRequest)request).getRequestURI() );
if( match.matches() ){
log.debug("request matched a skipPattern, skipping VitroRequestPrep");
filterChain.doFilter(request, response);
return;
}
}
SDBConnection conn = null;
try {
if (
request instanceof HttpServletRequest &&
_bds != null && _storeDesc != null && _oms != null) {
try {
conn = new SDBConnection(_bds.getConnection()) ;
} catch (SQLException sqe) {
throw new RuntimeException("Unable to connect to database", sqe);
}
if (conn != null) {
Store store = SDBFactory.connectStore(conn, _storeDesc);
Dataset dataset = SDBFactory.connectDataset(store);
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
WebappDaoFactory wadf =
new WebappDaoFactorySDB(_oms, dataset, _defaultNamespace, null, null);
vreq.setWebappDaoFactory(wadf);
vreq.setFullWebappDaoFactory(wadf);
vreq.setDataset(dataset);
}
}
} catch (Throwable t) {
t.printStackTrace();
}
request.setAttribute("WebappDaoFactorySDBPrep.setup", 1);
try {
filterChain.doFilter(request, response);
return;
} finally {
if (conn != null) {
conn.close();
}
}
}
public void init(FilterConfig filterConfig) throws ServletException {
try {
ServletContext ctx = filterConfig.getServletContext();
_bds = JenaDataSourceSetupBase.getApplicationDataSource(ctx);
_storeDesc = (StoreDesc) ctx.getAttribute("storeDesc");
_oms = (OntModelSelector) ctx.getAttribute("unionOntModelSelector");
_defaultNamespace = (String) ctx.getAttribute("defaultNamespace");
} catch (Throwable t) {
log.error("Unable to set up SDB WebappDaoFactory for request", t);
}
}
public void destroy() {
// no destroy actions
}
}

View file

@ -0,0 +1,575 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.reasoner;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.ontology.OntProperty;
import com.hp.hpl.jena.rdf.listeners.StatementListener;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
/**
* Allows for instant incremental materialization or retraction of RDFS-
* style class and property subsumption based ABox inferences as statements
* are added to or removed from the (ABox or TBox) knowledge base.
*
*/
public class SimpleReasoner extends StatementListener {
private static final Log log = LogFactory.getLog(SimpleReasoner.class);
//private static final MyTempLogger log = new MyTempLogger();
private OntModel tboxModel;
private OntModel aboxModel;
private Model inferenceModel;
private Model inferenceRebuildModel;
private Model scratchpadModel;
/**
* @param tboxModel - input. This model contains both asserted and inferred TBox axioms
* @param aboxModel - input. This model contains asserted ABox statements
* @param inferenceModel - output. This is the model in which inferred (materialized) ABox statements are maintained (added or retracted).
* @param inferenceRebuildModel - output. This the model temporarily used when the whole ABox inference model is rebuilt
* @param inferenceScratchpadModel - output. This the model temporarily used when the whole ABox inference model is rebuilt
*/
public SimpleReasoner(OntModel tboxModel, OntModel aboxModel, Model inferenceModel,
Model inferenceRebuildModel, Model scratchpadModel) {
this.tboxModel = tboxModel;
this.aboxModel = aboxModel;
this.inferenceModel = inferenceModel;
this.inferenceRebuildModel = inferenceRebuildModel;
this.scratchpadModel = scratchpadModel;
aboxModel.register(this);
}
/**
* @param tboxModel - input. This model contains both asserted and inferred TBox axioms
* @param aboxModel - input. This model contains asserted ABox statements
* @param inferenceModel - output. This is the model in which inferred (materialized) ABox statements are maintained (added or retracted).
*/
public SimpleReasoner(OntModel tboxModel, OntModel aboxModel, Model inferenceModel) {
this.tboxModel = tboxModel;
this.aboxModel = aboxModel;
this.inferenceModel = inferenceModel;
this.inferenceRebuildModel = ModelFactory.createDefaultModel();
this.scratchpadModel = ModelFactory.createDefaultModel();
}
/*
* Performs incremental selected ABox reasoning based
* on a new type assertion (assertion that an individual
* is of a certain type) added to the ABox.
*
*/
@Override
public void addedStatement(Statement stmt) {
try {
if (stmt.getPredicate().equals(RDF.type)) {
addedType(stmt, inferenceModel);
}
} catch (Exception e) {
// don't stop the edit if there's an exception
log.error("Exception while adding incremental inferences: ", e);
}
}
/*
* Performs incremental selected ABox reasoning based
* on a removed type assertion (assertion that an individual
* is of a certain type) from the ABox.
*
*/
@Override
public void removedStatement(Statement stmt) {
try {
if (stmt.getPredicate().equals(RDF.type)) {
removedType(stmt);
}
} catch (Exception e) {
// don't stop the edit if there's an exception
log.error("Exception while retracting inferences: ", e);
}
}
/*
* Performs incremental selected ABox reasoning based
* on changes to the class hierarchy.
*
* Handles subclassOf and equivalentClass assertions
*
*/
public void addedTBoxStatement(Statement stmt) {
try {
if ( !(stmt.getPredicate().equals(RDFS.subClassOf) || stmt.getPredicate().equals(RDFS.subClassOf) ) ) return;
log.debug("stmt = " + stmt.toString());
OntClass subject = tboxModel.getOntClass((stmt.getSubject()).getURI());
OntClass object = tboxModel.getOntClass(((Resource)stmt.getObject()).getURI());
if (stmt.getPredicate().equals(RDFS.subClassOf)) {
addedSubClass(subject,object);
} else {
// equivalent class is the same as subclass in both directions
addedSubClass(subject,object);
addedSubClass(object,subject);
}
} catch (Exception e) {
// don't stop the edit if there's an exception
log.error("Exception while adding incremental inferences: ", e);
}
}
/*
* Performs incremental selected ABox reasoning based
* on changes to the class hierarchy.
*
* Handles subclassOf and equivalentClass assertions
*
*/
public void removedTBoxStatement(Statement stmt) {
try {
if ( !(stmt.getPredicate().equals(RDFS.subClassOf) || stmt.getPredicate().equals(RDFS.subClassOf) ) ) return;
log.debug("stmt = " + stmt.toString());
OntClass subject = tboxModel.getOntClass((stmt.getSubject()).getURI());
OntClass object = tboxModel.getOntClass(((Resource)stmt.getObject()).getURI());
if (stmt.getPredicate().equals(RDFS.subClassOf)) {
removedSubClass(subject,object);
} else {
// equivalent class is the same as subclass in both directions
removedSubClass(subject,object);
removedSubClass(object,subject);
}
} catch (Exception e) {
// don't stop the edit if there's an exception
log.error("Exception while removing incremental inferences: ", e);
}
}
/*
* If it is added that B is of type A, then for each superclass of
* A assert that B is of that type.
*
*/
public void addedType(Statement stmt, Model inferenceModel) {
log.debug("stmt = " + stmt.toString());
tboxModel.enterCriticalSection(Lock.READ);
try {
OntClass cls = tboxModel.getOntClass(((Resource)stmt.getObject()).getURI());
if (cls != null) {
ExtendedIterator<OntClass> superIt = cls.listSuperClasses(false);
while (superIt.hasNext()) {
OntClass parentClass = superIt.next();
// VIVO doesn't materialize statements that assert anonymous types
// for individuals. Also, sharing an identical anonymous node is
// not allowed in owl-dl. picklist population code looks at qualities
// of classes not individuals.
if (parentClass.isAnon()) continue;
Statement infStmt = ResourceFactory.createStatement(stmt.getSubject(), RDF.type, parentClass);
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
if (!inferenceModel.contains(infStmt)) {
log.debug("Adding this inferred statement: " + infStmt.toString() );
inferenceModel.add(infStmt);
}
} finally {
inferenceModel.leaveCriticalSection();
}
}
} else {
log.debug("Didn't find target class (the object of the added rdf:type statement) in the TBox: " + ((Resource)stmt.getObject()).getURI());
}
} finally {
tboxModel.leaveCriticalSection();
}
}
/*
* If it is removed that B is of type A, then for each superclass of A remove
* the inferred statement that B is of that type UNLESS it is otherwise entailed
* that B is of that type.
*
*/
public void removedType(Statement stmt) {
log.debug("stmt = " + stmt.toString());
tboxModel.enterCriticalSection(Lock.READ);
try {
OntClass cls = tboxModel.getOntClass(((Resource)stmt.getObject()).getURI());
if (cls != null) {
ExtendedIterator<OntClass> superIt = cls.listSuperClasses(false);
while (superIt.hasNext()) {
OntClass parentClass = superIt.next();
// VIVO doesn't materialize statements that assert anonymous types
// for individuals. Also, sharing an identical anonymous node is
// not allowed in owl-dl. picklist population code looks at qualities
// of classes not individuals.
if (parentClass.isAnon()) continue;
if (entailedType(stmt.getSubject(),parentClass)) continue; // if a type is still entailed without the
// removed statement, then don't remove it
// from the inferences
Statement infStmt = ResourceFactory.createStatement(stmt.getSubject(), RDF.type, parentClass);
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
if (inferenceModel.contains(infStmt)) {
log.debug("Removing this inferred statement: " + infStmt.toString() + " - " + infStmt.getSubject().toString() + " - " + infStmt.getPredicate().toString() + " - " + infStmt.getObject().toString());
inferenceModel.remove(infStmt);
}
} finally {
inferenceModel.leaveCriticalSection();
}
}
} else {
log.debug("Didn't find target class (the object of the removed rdf:type statement) in the TBox: " + ((Resource)stmt.getObject()).getURI());
}
} finally {
tboxModel.leaveCriticalSection();
}
}
// Returns true if it is entailed by class subsumption that subject is
// of type cls; otherwise returns false.
public boolean entailedType(Resource subject, OntClass cls) {
log.debug("subject = " + subject.getURI() + " class = " + cls.getURI());
aboxModel.enterCriticalSection(Lock.READ);
tboxModel.enterCriticalSection(Lock.READ);
try {
ExtendedIterator<OntClass> iter = cls.listSubClasses(false);
while (iter.hasNext()) {
OntClass childClass = iter.next();
Statement stmt = ResourceFactory.createStatement(subject, RDF.type, childClass);
if (aboxModel.contains(stmt)) return true;
}
return false;
} finally {
aboxModel.leaveCriticalSection();
tboxModel.leaveCriticalSection();
}
}
/*
* If added that B is a subclass of A, then find all individuals
* that are typed as B, either in the ABox or in the inferred model
* and assert that they are of type A.
*/
public void addedSubClass(OntClass subClass, OntClass superClass) {
log.debug("subClass = " + subClass.getURI() + " superClass = " + superClass.getURI());
aboxModel.enterCriticalSection(Lock.READ);
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
OntModel unionModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
unionModel.add(aboxModel);
unionModel.add(inferenceModel);
StmtIterator iter = unionModel.listStatements((Resource) null, RDF.type, subClass);
while (iter.hasNext()) {
Statement stmt = iter.next();
Resource ind = unionModel.getResource(stmt.getSubject().getURI());
Statement infStmt = ResourceFactory.createStatement(ind, RDF.type, superClass);
inferenceModel.enterCriticalSection(Lock.WRITE);
if (!inferenceModel.contains(infStmt)) {
log.debug("Adding this inferred statement: " + infStmt.toString() );
inferenceModel.add(infStmt);
}
}
} finally {
aboxModel.leaveCriticalSection();
inferenceModel.leaveCriticalSection();
}
}
/*
* If removed that B is a subclass of A, then for each individual
* that is of type B, either inferred or in the ABox, then
* remove the inferred assertion that it is of type A,
* UNLESS the individual is of some type C that is
* a subClass of A (including A itself)
*
*/
public void removedSubClass(OntClass subClass, OntClass superClass) {
log.debug("subClass = " + subClass.getURI() + ". superClass = " + superClass.getURI());
aboxModel.enterCriticalSection(Lock.READ);
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
OntModel unionModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
unionModel.add(aboxModel);
unionModel.add(inferenceModel);
StmtIterator iter = unionModel.listStatements((Resource) null, RDF.type, subClass);
while (iter.hasNext()) {
Statement stmt = iter.next();
Resource ind = unionModel.getResource(stmt.getSubject().getURI());
if (entailedType(ind,superClass)) continue;
Statement infStmt = ResourceFactory.createStatement(ind, RDF.type, superClass);
inferenceModel.enterCriticalSection(Lock.WRITE);
if (inferenceModel.contains(infStmt)) {
log.debug("Removing this inferred statement: " + infStmt.toString() );
inferenceModel.remove(infStmt);
}
}
} finally {
aboxModel.leaveCriticalSection();
inferenceModel.leaveCriticalSection();
}
}
/*
* Recompute the entire ABox inference graph. The new
* inference graph is built up in a separate model and
* then reconciled with the inference graph used by the
* application. The model reconciliation must be done
* without reading the whole inference models into
* memory since we are supporting very large ABox
* inference models.
*/
public void recompute(OntClass subClass, OntClass superClass) {
// recompute the inferences
inferenceRebuildModel.enterCriticalSection(Lock.WRITE);
aboxModel.enterCriticalSection(Lock.READ);
try {
inferenceRebuildModel.removeAll();
StmtIterator iter = aboxModel.listStatements((Resource) null, RDF.type, (RDFNode) null);
while (iter.hasNext()) {
Statement stmt = iter.next();
addedType(stmt, inferenceRebuildModel);
}
} catch (Exception e) {
log.error("Exception while recomputing ABox inference model", e);
} finally {
aboxModel.leaveCriticalSection();
inferenceRebuildModel.leaveCriticalSection();
}
// reflect the recomputed inferences into the application inference
// model.
inferenceRebuildModel.enterCriticalSection(Lock.READ);
scratchpadModel.enterCriticalSection(Lock.WRITE);
try {
// Remove everything from the current inference model that is not
// in the recomputed inference model
inferenceModel.enterCriticalSection(Lock.READ);
try {
scratchpadModel.removeAll();
StmtIterator iter = inferenceModel.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (!inferenceRebuildModel.contains(stmt)) {
scratchpadModel.add(stmt);
}
}
} catch (Exception e) {
log.error("Exception while reconciling the current and recomputed ABox inference models", e);
} finally {
inferenceModel.leaveCriticalSection();
}
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
inferenceModel.remove(scratchpadModel);
} catch (Exception e){
log.error("Exception while reconciling the current and recomputed ABox inference models", e);
} finally {
inferenceModel.leaveCriticalSection();
}
// Add everything from the recomputed inference model that is not already
// in the current inference model to the current inference model.
inferenceModel.enterCriticalSection(Lock.READ);
try {
scratchpadModel.removeAll();
StmtIterator iter = inferenceRebuildModel.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (!inferenceModel.contains(stmt)) {
scratchpadModel.add(stmt);
}
}
} catch (Exception e) {
log.error("Exception while reconciling the current and recomputed ABox inference models", e);
} finally {
inferenceModel.leaveCriticalSection();
}
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
inferenceModel.add(scratchpadModel);
} catch (Exception e){
log.error("Exception while reconciling the current and recomputed ABox inference models", e);
} finally {
inferenceModel.leaveCriticalSection();
}
} finally {
inferenceRebuildModel.leaveCriticalSection();
scratchpadModel.leaveCriticalSection();
}
}
// The following three methods aren't currently called; the default behavior of VIVO is to not materialize such inferences.
public void addedProperty(Statement stmt) {
tboxModel.enterCriticalSection(Lock.READ);
try {
OntProperty prop = tboxModel.getOntProperty(stmt.getPredicate().getURI());
if (prop != null) {
ExtendedIterator<? extends OntProperty> superIt = prop.listSuperProperties(false);
while (superIt.hasNext()) {
OntProperty parentProp = superIt.next();
Statement infStmt = ResourceFactory.createStatement(stmt.getSubject(), parentProp, stmt.getObject());
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
if (!inferenceModel.contains(infStmt)) {
log.debug("Adding inferred statement: " + infStmt.toString() + " - " + infStmt.getSubject().toString() + " - " + infStmt.getPredicate().toString() + " - " + infStmt.getObject().toString());
inferenceModel.add(infStmt);
}
} finally {
inferenceModel.leaveCriticalSection();
}
}
} else {
log.debug("Didn't find predicate of the added statement in the TBox: " + stmt.getPredicate().getURI());
}
} finally {
tboxModel.leaveCriticalSection();
}
}
public void removedProperty(Statement stmt) {
tboxModel.enterCriticalSection(Lock.READ);
try {
OntProperty prop = tboxModel.getOntProperty(stmt.getPredicate().getURI());
if (prop != null) {
ExtendedIterator<? extends OntProperty> superIt = prop.listSuperProperties(false);
while (superIt.hasNext()) {
OntProperty parentProp = superIt.next();
if (entailedStmt(stmt.getSubject(),parentProp,stmt.getObject() )) continue; // if the statement is still entailed
// don't remove it from the inference graph.
Statement infStmt = ResourceFactory.createStatement(stmt.getSubject(), parentProp, stmt.getObject());
inferenceModel.enterCriticalSection(Lock.WRITE);
try {
if (inferenceModel.contains(infStmt)) {
log.debug("Removing inferred statement: " + infStmt.toString() + " - " + infStmt.getSubject().toString() + " - " + infStmt.getPredicate().toString() + " - " + infStmt.getObject().toString());
inferenceModel.remove(infStmt);
}
} finally {
inferenceModel.leaveCriticalSection();
}
}
} else {
log.debug("Didn't find predicate of the removed statement in the TBox: " + stmt.getPredicate().getURI());
}
} finally {
tboxModel.leaveCriticalSection();
}
}
// Returns true if the statement is entailed by property subsumption
public boolean entailedStmt(Resource subject, OntProperty prop, RDFNode object) {
aboxModel.enterCriticalSection(Lock.READ);
try {
ExtendedIterator<? extends OntProperty> iter = prop.listSubProperties(false);
while (iter.hasNext()) {
OntProperty childProp = iter.next();
Statement stmt = ResourceFactory.createStatement(subject, childProp, object);
if (aboxModel.contains(stmt)) return true;
}
return false;
} finally {
aboxModel.leaveCriticalSection();
}
}
}

View file

@ -0,0 +1,39 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.reasoner.support;
import com.hp.hpl.jena.rdf.listeners.StatementListener;
import com.hp.hpl.jena.rdf.model.Statement;
import edu.cornell.mannlib.vitro.webapp.reasoner.SimpleReasoner;
/**
* Route notification of changes to TBox to the incremental ABox reasoner.
* The incremental ABox reasoner needs to handle only subclass, superclass
* and equivalent class axioms.
*
*/
public class SimpleReasonerTBoxListener extends StatementListener {
private SimpleReasoner simpleReasoner = null;
public SimpleReasonerTBoxListener(SimpleReasoner simpleReasoner) {
this.simpleReasoner = simpleReasoner;
}
@Override
public void addedStatement(Statement stmt) {
simpleReasoner.addedTBoxStatement(stmt);
}
@Override
public void removedStatement(Statement stmt) {
simpleReasoner.removedTBoxStatement(stmt);
}
}

View file

@ -53,10 +53,11 @@ public class AttachSubmodels implements ServletContextListener {
attachmentCount++;
log.info("Attached submodel from file " + p);
} catch (Exception ioe) {
fis.close();
log.error("Unable to attach submodel from file " + p, ioe);
System.out.println("Unable to attach submodel from file " + p);
ioe.printStackTrace();
} finally {
fis.close();
}
} catch (FileNotFoundException fnfe) {
log.warn(p + " not found. Unable to attach as submodel" +

View file

@ -0,0 +1,233 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.util.StoreUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
// This ContextListener must run after the JenaDataSourceSetup ContextListener
public class FileGraphSetup implements ServletContextListener {
private static String ABOX = "abox";
private static String TBOX = "tbox";
private static String PATH_ROOT = "/WEB-INF/filegraph/";
private static String URI_ROOT = "http://vitro.mannlib.cornell.edu/filegraph/";
private static final Log log = LogFactory.getLog(FileGraphSetup.class);
public void contextInitialized(ServletContextEvent sce) {
try {
OntModelSelectorImpl baseOms = (OntModelSelectorImpl) sce.getServletContext().getAttribute("baseOntModelSelector");
Store kbStore = (Store) sce.getServletContext().getAttribute("kbStore");
// ABox files
Set<String> pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + ABOX);
cleanupDB(kbStore, pathToURI(pathSet, ABOX), ABOX);
if (pathSet != null) {
OntModel aboxBaseModel = baseOms.getABoxModel();
readGraphs(sce, pathSet, kbStore, ABOX, aboxBaseModel);
}
// TBox files
pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + TBOX);
cleanupDB(kbStore, pathToURI(pathSet, TBOX),TBOX);
if (pathSet != null) {
OntModel tboxBaseModel = baseOms.getTBoxModel();
readGraphs(sce, pathSet, kbStore, TBOX, tboxBaseModel);
}
} catch (ClassCastException cce) {
String errMsg = "Unable to cast servlet context attribute to the appropriate type " + cce.getLocalizedMessage();
log.error(errMsg);
throw new ClassCastException(errMsg);
} catch (Throwable t) {
System.out.println("Throwable in listener " + this.getClass().getName());
log.error(t);
t.printStackTrace();
}
}
/*
* Reads the graphs stored as files in sub-directories of
* FileGraphSetup.PATH_ROOT and for each graph:
* 1. updates the SDB store to reflect the current contents of the graph.
* 2. adds the graph as an in-memory submodel of the base in-memory graph
*
* Note: no connection needs to be maintained between the in-memory copy of the
* graph and the DB copy.
*/
public void readGraphs(ServletContextEvent sce, Set<String> pathSet, Store kbStore, String type, OntModel baseModel) {
int count = 0;
// For each file graph in the target directory update or add that graph to
// the Jena SDB, and attach the graph as a submodel of the base model
for ( String p : pathSet ) {
count++; // note this will count the empty files too
File file = new File(sce.getServletContext().getRealPath(p));
try {
FileInputStream fis = new FileInputStream( file );
try {
Model model = ModelFactory.createDefaultModel();
if ( p.endsWith(".n3") || p.endsWith(".N3") || p.endsWith(".ttl") || p.endsWith(".TTL") ) {
model.read( fis, null, "N3" );
} else if ( p.endsWith(".owl") || p.endsWith(".OWL") || p.endsWith(".rdf") || p.endsWith(".RDF") || p.endsWith(".xml") || p.endsWith(".XML") ) {
model.read( fis, null, "RDF/XML" );
} else {
log.warn("Ignoring " + type + " file graph " + p + " because the file extension is unrecognized.");
}
if ( !model.isEmpty() ) {
baseModel.addSubModel(model);
log.info("Attached file graph as " + type + " submodel " + p);
}
updateGraphInDB(kbStore, model, type, p);
} catch (Exception ioe) {
log.error("Unable to process file graph " + p, ioe);
System.out.println("Unable to process file graph " + p);
ioe.printStackTrace();
} finally {
fis.close();
}
} catch (FileNotFoundException fnfe) {
log.warn(p + " not found. Unable to process file graph" +
((fnfe.getLocalizedMessage() != null) ?
fnfe.getLocalizedMessage() : "") );
} catch (IOException ioe) {
// this is for the fis.close() above.
log.warn("Exception while trying to close file graph file: " + p,ioe);
}
} // end - for
System.out.println("Read " + count + " " + type + " file graph" + ((count == 1) ? "" : "s") + " from " + PATH_ROOT + type);
return;
}
/*
* If a graph with the given name doesn't exist in the DB then add it.
*
* Otherwise, if a graph with the given name is in the DB and is not isomorphic with
* the graph that was read from the file system then replace the graph
* in the DB with the one read from the file system.
*
* Otherwise, if a graph with the given name is in the DB and is isomorphic with
* the graph that was read from the files system, then do nothing.
*/
public void updateGraphInDB(Store kbStore, Model fileModel, String type, String path) {
String graphURI = pathToURI(path,type);
Model dbModel = SDBFactory.connectNamedModel(kbStore, graphURI);
if (dbModel.isEmpty() ) {
dbModel.add(fileModel);
} else if (!dbModel.isIsomorphicWith(fileModel)) {
dbModel.removeAll();
dbModel.add(fileModel);
}
return;
}
/*
* Deletes any file graphs that are no longer present in the file system
* from the DB.
*
* @param uriSet (input) - a set of graph URIs representing the file
* graphs (of the given type) in the file
* system.
* @param type (input) - abox or tbox.
* @param kbStore (output) - the SDB store for the application
*/
public void cleanupDB(Store kbStore, Set<String> uriSet, String type) {
Pattern graphURIPat = Pattern.compile("^" + URI_ROOT + type);
Iterator<Node> iter = StoreUtils.storeGraphNames(kbStore);
while (iter.hasNext()) {
Node node = iter.next();
Matcher matcher = graphURIPat.matcher(node.getURI());
if (matcher.find()) {
if (!uriSet.contains(node.getURI())) {
Model model = SDBFactory.connectNamedModel(kbStore, node.getURI());
model.removeAll(); // delete the graph from the DB
log.info("Removed " + type + " file graph " + node.getURI() + " from the DB store because the file no longer exists in the file system");
}
}
}
return;
}
/*
* Takes a set of path names for file graphs and returns a set containing
* a graph uri for each path name in the input set. If pathSet is null
* returns an empty set.
*/
public Set<String> pathToURI (Set<String> pathSet, String type) {
HashSet<String> uriSet = new HashSet<String>();
if (pathSet != null) {
for ( String path : pathSet ) {
uriSet.add(pathToURI(path,type));
}
}
return uriSet;
}
/*
* Takes a path name for a file graph and returns the corresponding SDB URI
* for the graph. The correspondence is by defined convention.
*/
public String pathToURI(String path, String type) {
String uri = null;
if (path != null) {
File file = new File(path);
uri = URI_ROOT + type + "/" + file.getName();
}
return uri;
}
public void contextDestroyed( ServletContextEvent sce ) {
// nothing to do
}
}

View file

@ -84,6 +84,7 @@ public class JenaDataSourceSetup extends JenaDataSourceSetupBase implements java
sce.getServletContext().setAttribute("jenaOntModel", unionModel);
WebappDaoFactory wadf = new WebappDaoFactoryJena(unionOms, defaultNamespace, null, null);
sce.getServletContext().setAttribute("webappDaoFactory",wadf);
sce.getServletContext().setAttribute("unionOntModelSelector", unionOms);
ApplicationBean appBean = getApplicationBeanFromOntModel(memModel,wadf);
if (appBean != null) {
@ -262,3 +263,4 @@ public class JenaDataSourceSetup extends JenaDataSourceSetupBase implements java
}
}

View file

@ -16,19 +16,23 @@ import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.LayoutType;
import edu.cornell.mannlib.vitro.webapp.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaBaseDaoCon;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDBGraphGenerator;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RegeneratingGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SDBGraphGenerator;
public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
private static final Log log = LogFactory.getLog(JenaDataSourceSetupBase.class);
protected final static int DEFAULT_MAXWAIT = 10000, // ms
DEFAULT_MAXACTIVE = 40,
DEFAULT_MAXIDLE = 10,
DEFAULT_MAXACTIVE = 300,
DEFAULT_MAXIDLE = 84,
DEFAULT_TIMEBETWEENEVICTIONS = 30 * 60 * 1000, // ms
DEFAULT_TESTSPEREVICTION = 3,
DEFAULT_MINEVICTIONIDLETIME = 1000 * 60 * 30; // ms
@ -47,11 +51,42 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
String DB_PASSWD = "jenatest"; // database password
String DB = "MySQL"; // database type
String DB_DRIVER_CLASS_NAME = "com.mysql.jdbc.Driver";
// ABox assertions. These are stored in a database (Jena SDB) and the application works
// (queries and updates) with the ABox data from the DB - this model is not maintained
// in memory. For query performance reasons, there won't be any submodels for the ABox data.
static final String JENA_DB_MODEL = "http://vitro.mannlib.cornell.edu/default/vitro-kb-2";
static final String JENA_AUDIT_MODEL = "http://vitro.mannlib.cornell.edu/ns/db/experimental/audit";
// ABox inferences. This is ABox data that is inferred, using VIVO's native simple, specific-
// purpose reasoning based on the combination of the Abox (assertion and inferences) data
// and the TBox (assertions and inferences) data.
static final String JENA_INF_MODEL = "http://vitro.mannlib.cornell.edu/default/vitro-kb-inf";
// TBox assertions.
// Some of these (the local extensions) are stored and maintained in a Jena database and
// are also maintained in memory while the application is running.
// Other parts of the TBox, the 'VIVO Core', are also backed by a Jena DB, but they are
// read fresh from files each time the application starts. While the application is running,
// they are kept in memory, as submodels of the in memory copy of this named graph.
static final String JENA_TBOX_ASSERTIONS_MODEL = "http://vitro.mannlib.cornell.edu/default/asserted-tbox";
// Inferred TBox. This is TBox data that is inferred from the combination of VIVO core TBox
// and any local extension TBox assertions. Pellet computes these inferences.
// These are stored in the DB.
static final String JENA_TBOX_INF_MODEL = "http://vitro.mannlib.cornell.edu/default/inferred-tbox";
static final String JENA_AUDIT_MODEL = "http://vitro.mannlib.cornell.edu/ns/db/experimental/audit";
static final String JENA_USER_ACCOUNTS_MODEL = "http://vitro.mannlib.cornell.edu/default/vitro-kb-userAccounts";
// This model doesn't exist yet. It's a placeholder for the application ontology.
static final String JENA_APPLICATION_METADATA_MODEL = "http://vitro.mannlib.cornell.edu/default/vitro-kb-applicationMetadata";
// This is Brian C's application.owl file. We may not have to be concerned with this for
// release 1.2.
static final String JENA_DISPLAY_METADATA_MODEL = "http://vitro.mannlib.cornell.edu/default/vitro-kb-displayMetadata";
static final String DEFAULT_DEFAULT_NAMESPACE = "http://vitro.mannlib.cornell.edu/ns/default#";
@ -79,6 +114,30 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
jenaDbOntModelSpec = (jenaDbOntModelSpec != null) ? jenaDbOntModelSpec : DB_ONT_MODEL_SPEC;
return makeDBModel(ds, jenaDbModelName, jenaDbOntModelSpec);
}
/**
* Sets up a BasicDataSource using values from
* a properties file.
*/
public final BasicDataSource makeDataSourceFromConfigurationProperties(){
String dbDriverClassname = ConfigurationProperties.getProperty("VitroConnection.DataSource.driver", DB_DRIVER_CLASS_NAME);
String jdbcUrl = ConfigurationProperties.getProperty("VitroConnection.DataSource.url") + "?useUnicode=yes&characterEncoding=utf8";
String username = ConfigurationProperties.getProperty("VitroConnection.DataSource.username");
String password = ConfigurationProperties.getProperty("VitroConnection.DataSource.password");
return makeBasicDataSource(dbDriverClassname, jdbcUrl, username, password);
}
public void setApplicationDataSource(BasicDataSource bds, ServletContext ctx) {
ctx.setAttribute(getDataSourceAttributeName(), bds);
}
public static BasicDataSource getApplicationDataSource(ServletContext ctx) {
return (BasicDataSource) ctx.getAttribute(getDataSourceAttributeName());
}
private static String getDataSourceAttributeName() {
return JenaDataSourceSetupBase.class.getName() + ".dataSource";
}
public static BasicDataSource makeBasicDataSource(String dbDriverClassname, String jdbcUrl, String username, String password) {
log.debug("makeBasicDataSource('" + dbDriverClassname + "', '"
@ -107,19 +166,29 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
return ds;
}
private Model makeDBModel(BasicDataSource ds, String jenaDbModelName, OntModelSpec jenaDbOntModelSpec) {
public enum TripleStoreType {
RDB, SDB
}
protected Model makeDBModel(BasicDataSource ds, String jenaDbModelname, OntModelSpec jenaDbOntModelSpec) {
return makeDBModel(ds, jenaDbModelname, jenaDbOntModelSpec, TripleStoreType.RDB);
}
protected Model makeDBModel(BasicDataSource ds, String jenaDbModelName, OntModelSpec jenaDbOntModelSpec, TripleStoreType storeType) {
Model dbModel = null;
try {
// open the db model
try {
Graph g = new RegeneratingGraph(new RDBGraphGenerator(ds, DB, jenaDbModelName));
Model m = ModelFactory.createModelForGraph(g);
dbModel = m;
//dbModel = ModelFactory.createOntologyModel(jenaDbOntModelSpec,m);
//Graph g = maker.openGraph(JENA_DB_MODEL,false);
//dbModel = ModelFactory.createModelForGraph(g);
//maker.openModel(JENA_DB_MODEL);
Graph g = null;
switch (storeType) {
case RDB:
g = new RegeneratingGraph(new RDBGraphGenerator(ds, DB, jenaDbModelName)); break;
case SDB:
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.MySQL);
g = new RegeneratingGraph(new SDBGraphGenerator(ds, desc, jenaDbModelName)); break;
default: throw new RuntimeException ("Unsupported store type " + storeType);
}
dbModel = ModelFactory.createModelForGraph(g);
log.debug("Using database at "+ds.getUrl());
} catch (Throwable t) {
t.printStackTrace();
@ -127,7 +196,6 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
} catch (Throwable t) {
t.printStackTrace();
}
return dbModel;
}

View file

@ -0,0 +1,415 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.Individual;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.DatasetStore;
import com.hp.hpl.jena.sdb.store.LayoutType;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.OWL;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.beans.ApplicationBean;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaBaseDaoCon;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.utils.NamespaceMapper;
import edu.cornell.mannlib.vitro.webapp.utils.jena.InitialJenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.NamespaceMapperJena;
public class JenaDataSourceSetupSDB extends JenaDataSourceSetupBase implements javax.servlet.ServletContextListener {
private static final Log log = LogFactory.getLog(JenaDataSourceSetup.class.getName());
public void contextInitialized(ServletContextEvent sce) {
try {
// JenaPersistentDataSourceSetup should have already set this up - it just sets
// up things related to the DB.
// TODO: I would like to make this code (before the sdb try/catch conditional so
// that it is not executed in a post-sdb-conversion environment.
OntModel memModel = (OntModel) sce.getServletContext().getAttribute("jenaOntModel");
if (memModel == null) {
memModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
log.warn("WARNING: no database connected. Changes will disappear after context restart.");
sce.getServletContext().setAttribute("jenaOntModel",memModel);
}
memModel.addSubModel((new JenaBaseDaoCon()).getConstModel()); // add the vitro tbox to the model
OntModel inferenceModel = ontModelFromContextAttribute(sce.getServletContext(), "inferenceOntModel");
OntModel userAccountsModel = ontModelFromContextAttribute(sce.getServletContext(), "userAccountsOntModel");
if (userAccountsModel.size() == 0) {
checkMainModelForUserAccounts(memModel, userAccountsModel);
}
OntModel unionModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(memModel, inferenceModel));
OntModelSelectorImpl baseOms = new OntModelSelectorImpl();
baseOms.setApplicationMetadataModel(memModel);
baseOms.setTBoxModel(memModel);
baseOms.setFullModel(memModel);
OntModelSelectorImpl inferenceOms = new OntModelSelectorImpl();
inferenceOms.setABoxModel(inferenceModel);
inferenceOms.setTBoxModel(inferenceModel);
inferenceOms.setFullModel(inferenceModel);
OntModelSelectorImpl unionOms = new OntModelSelectorImpl();
unionOms.setApplicationMetadataModel(unionModel);
unionOms.setTBoxModel(unionModel);
unionOms.setFullModel(unionModel);
baseOms.setUserAccountsModel(userAccountsModel);
inferenceOms.setUserAccountsModel(userAccountsModel);
unionOms.setUserAccountsModel(userAccountsModel);
OntModel displayModel = ontModelFromContextAttribute(sce.getServletContext(),"displayOntModel");
baseOms.setDisplayModel(displayModel);
inferenceOms.setDisplayModel(displayModel);
unionOms.setDisplayModel(displayModel);
checkForNamespaceMismatch( memModel, defaultNamespace );
// SDB initialization
StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.MySQL) ;
sce.getServletContext().setAttribute("storeDesc", storeDesc);
BasicDataSource bds = makeDataSourceFromConfigurationProperties();
this.setApplicationDataSource(bds, sce.getServletContext());
SDBConnection conn = new SDBConnection(bds.getConnection()) ;
Store store = SDBFactory.connectStore(conn, storeDesc);
try {
// a test query to see if the store is formatted
SDBFactory.connectDefaultModel(store).contains(OWL.Thing, RDF.type, OWL.Nothing);
} catch (Exception e) { // unformatted store
log.debug("Non-SDB system detected. Setting up SDB store");
store.getTableFormatter().create();
store.getTableFormatter().truncate();
// This is a one-time copy of stored KB data - from a Jena RDB store
// to a Jena SDB store. In the process, we will also separate out the
// TBox from the Abox; these are in the same graph in pre 1.2 VIVO
// versions and will now be stored and maintained in separate models
// Access to the Jena RDB data is through the OntModelSelectors that have
// been set up earlier in the current session by
// JenaPersistentDataSourceSetup.java
// In the code below, note that the current getABoxModel() methods on
// the OntModelSelectors return a graph with both ABox and TBox data.
JenaModelUtils modelUtils = new JenaModelUtils();
Model aboxAssertions = SDBFactory.connectNamedModel(store, JenaDataSourceSetupBase.JENA_DB_MODEL);
aboxAssertions.add(modelUtils.extractABox(baseOms.getABoxModel()));
Model aboxInferences = SDBFactory.connectNamedModel(store, JenaDataSourceSetupBase.JENA_INF_MODEL);
aboxInferences.add(modelUtils.extractABox(inferenceOms.getABoxModel()));
Model tboxAssertions = SDBFactory.connectNamedModel(store, JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL);
tboxAssertions.add(modelUtils.extractTBox(baseOms.getABoxModel()));
Model tboxInferences = SDBFactory.connectNamedModel(store, JenaDataSourceSetupBase.JENA_TBOX_INF_MODEL);
tboxInferences.add(modelUtils.extractTBox(inferenceOms.getABoxModel()));
// The code below, which sets up the OntModelSelectors, controls whether each
// model is maintained in memory, in the DB, or both, while the application
// is running.
}
sce.getServletContext().setAttribute("kbStore", store);
//store.getTableFormatter().dropIndexes();
//store.getTableFormatter().addIndexes();
// Populate the three OntModelSelectors (BaseOntModel=assertions, InferenceOntModel=inferences
// and JenaOntModel=union of assertions and inferences) with the post-SDB-conversion models.
// ABox assertions
Model aboxAssertions = makeDBModel(bds, JenaDataSourceSetupBase.JENA_DB_MODEL, DB_ONT_MODEL_SPEC, TripleStoreType.SDB);
Model listenableAboxAssertions = ModelFactory.createUnion(aboxAssertions, ModelFactory.createDefaultModel());
baseOms.setABoxModel(ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, listenableAboxAssertions));
// ABox inferences
Model aboxInferences = makeDBModel(bds, JenaDataSourceSetupBase.JENA_INF_MODEL, DB_ONT_MODEL_SPEC, TripleStoreType.SDB);
Model listenableAboxInferences = ModelFactory.createUnion(aboxInferences, ModelFactory.createDefaultModel());
inferenceOms.setABoxModel(ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, listenableAboxInferences));
// Since the TBox models are in memory, they do not have time out issues like the
// ABox models do (and so don't need the extra step to make them listenable).
// TBox assertions
try {
Model tboxAssertionsDB = makeDBModel(bds, JENA_TBOX_ASSERTIONS_MODEL, DB_ONT_MODEL_SPEC, TripleStoreType.SDB);
OntModel tboxAssertions = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
if (tboxAssertionsDB != null) {
long startTime = System.currentTimeMillis();
System.out.println("Copying cached tbox assertions into memory");
tboxAssertions.add(tboxAssertionsDB);
System.out.println((System.currentTimeMillis()-startTime)/1000+" seconds to load tbox assertions");
}
tboxAssertions.getBaseModel().register(new ModelSynchronizer(tboxAssertionsDB));
baseOms.setTBoxModel(tboxAssertions);
} catch (Throwable e) {
log.error("Unable to load tbox assertion cache from DB", e);
}
// TBox inferences
try {
Model tboxInferencesDB = makeDBModel(bds, JENA_TBOX_INF_MODEL, DB_ONT_MODEL_SPEC, TripleStoreType.SDB);
OntModel tboxInferences = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
if (tboxInferencesDB != null) {
long startTime = System.currentTimeMillis();
System.out.println("Copying cached tbox inferences into memory");
tboxInferences.add(tboxInferencesDB);
System.out.println((System.currentTimeMillis()-startTime)/1000+" seconds to load tbox inferences");
}
tboxInferences.getBaseModel().register(new ModelSynchronizer(tboxInferencesDB));
inferenceOms.setTBoxModel(tboxInferences);
} catch (Throwable e) {
log.error("Unable to load tbox inference cache from DB", e);
}
// union ABox
OntModel unionABoxModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(baseOms.getABoxModel(), inferenceOms.getABoxModel()));
unionOms.setABoxModel(unionABoxModel);
// union TBox
OntModel unionTBoxModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(baseOms.getTBoxModel(), inferenceOms.getTBoxModel()));
unionOms.setTBoxModel(unionTBoxModel);
Dataset dataset = DatasetStore.create(store);
//String queryStr = "CONSTRUCT { ?s ?p ?o } \n" +
// "WHERE { GRAPH ?g { ?s ?p ?o } } ";
//Query query = QueryFactory.create(queryStr);
//QueryExecution qe = QueryExecutionFactory.create(query, dataset);
//log.info("Test query returned " + qe.execConstruct().size() + " statements");
sce.getServletContext().setAttribute("baseOntModel", memModel);
WebappDaoFactory baseWadf = new WebappDaoFactorySDB(baseOms, dataset, defaultNamespace, null, null);
//WebappDaoFactory baseWadf = new WebappDaoFactoryJena(baseOms, defaultNamespace, null, null);
sce.getServletContext().setAttribute("assertionsWebappDaoFactory",baseWadf);
sce.getServletContext().setAttribute("inferenceOntModel", inferenceModel);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(inferenceOms, dataset, defaultNamespace, null, null);
//WebappDaoFactory infWadf = new WebappDaoFactoryJena(inferenceOms, defaultNamespace, null, null);
sce.getServletContext().setAttribute("deductionsWebappDaoFactory", infWadf);
sce.getServletContext().setAttribute("jenaOntModel", unionModel);
WebappDaoFactory wadf = new WebappDaoFactorySDB(unionOms, dataset, defaultNamespace, null, null);
//WebappDaoFactory wadf = new WebappDaoFactoryJena(unionOms, defaultNamespace, null, null);
sce.getServletContext().setAttribute("webappDaoFactory",wadf);
sce.getServletContext().setAttribute("unionOntModelSelector", unionOms); //assertions and inferences
sce.getServletContext().setAttribute("baseOntModelSelector", baseOms); //assertions
sce.getServletContext().setAttribute("inferenceOntModelSelector", inferenceOms); //inferences
ApplicationBean appBean = getApplicationBeanFromOntModel(memModel,wadf);
if (appBean != null) {
sce.getServletContext().setAttribute("applicationBean", appBean);
}
if (isEmpty(memModel)) {
loadDataFromFilesystem(memModel, sce.getServletContext());
}
if (userAccountsModel.size() == 0) {
readOntologyFilesInPathSet(AUTHPATH, sce.getServletContext(), userAccountsModel);
if (userAccountsModel.size() == 0) {
createInitialAdminUser(userAccountsModel);
}
}
ensureEssentialInterfaceData(memModel, sce, wadf);
NamespaceMapper namespaceMapper = new NamespaceMapperJena(unionModel, unionModel, defaultNamespace);
sce.getServletContext().setAttribute("NamespaceMapper", namespaceMapper);
memModel.getBaseModel().register(namespaceMapper);
sce.getServletContext().setAttribute("defaultNamespace", defaultNamespace);
} catch (Throwable t) {
log.error("Throwable in " + this.getClass().getName(), t);
// printing the error because Tomcat doesn't print context listener
// errors the same way it prints other errors at startup
t.printStackTrace();
throw new Error(this.getClass().getName() + "failed");
}
}
private void checkForNamespaceMismatch(OntModel model, String defaultNamespace) {
String defaultNamespaceFromDeployProperites = ConfigurationProperties.getProperty("Vitro.defaultNamespace");
if( defaultNamespaceFromDeployProperites == null ){
log.error("Could not get namespace from deploy.properties.");
}
List<String> portalURIs = new ArrayList<String>();
try {
model.enterCriticalSection(Lock.READ);
Iterator portalIt = model.listIndividuals(PORTAL);
while (portalIt.hasNext()) {
portalURIs.add( ((Individual)portalIt.next()).getURI() );
}
} finally {
model.leaveCriticalSection();
}
if( portalURIs.size() > 0 ){
for( String portalUri : portalURIs){
if( portalUri != null && ! portalUri.startsWith(defaultNamespaceFromDeployProperites)){
log.error("Namespace mismatch between db and deploy.properties.");
log.error("Vivo will not start up correctly because the default namespace specified in deploy.properties does not match the namespace of " +
"a portal in the database. Namespace from deploy.properties: \"" + defaultNamespaceFromDeployProperites +
"\" Namespace from an existing portal: \"" + portalUri + "\" To get the application to start with this " +
"database change the default namespace in deploy.properties " + portalUri.substring(0, portalUri.lastIndexOf("/")+1) +
" Another possibility is that deploy.properties does not specify the intended database.");
}
}
}
}
/* ====================================================================== */
public void contextDestroyed(ServletContextEvent sce) {
}
private ApplicationBean getApplicationBeanFromOntModel(OntModel ontModel,WebappDaoFactory wadf) {
ClosableIterator appIt = ontModel.listIndividuals(ResourceFactory.createResource(VitroVocabulary.APPLICATION));
try {
if (appIt.hasNext()) {
Individual appInd = (Individual) appIt.next();
ApplicationBean appBean = new ApplicationBean();
try {
appBean.setMaxPortalId(Integer.decode( ((Literal)appInd.getPropertyValue(ResourceFactory.createProperty(VitroVocabulary.APPLICATION_MAXPORTALID))).getLexicalForm()));
} catch (Exception e) { /* ignore bad value */ }
try {
appBean.setMinSharedPortalId(Integer.decode( ((Literal)appInd.getPropertyValue(ResourceFactory.createProperty(VitroVocabulary.APPLICATION_MINSHAREDPORTALID))).getLexicalForm()));
} catch (Exception e) { /* ignore bad value */ }
try {
appBean.setMaxSharedPortalId(Integer.decode( ((Literal)appInd.getPropertyValue(ResourceFactory.createProperty(VitroVocabulary.APPLICATION_MAXSHAREDPORTALID))).getLexicalForm()));
} catch (Exception e) { /* ignore bad value */}
if( ! wadf.getApplicationDao().isFlag1Active() ){
appBean.setMaxPortalId(1);
}
return appBean;
} else {
return null;
}
} finally {
appIt.close();
}
}
private void ensureEssentialInterfaceData(OntModel memModel, ServletContextEvent sce, WebappDaoFactory wadf) {
Model essentialInterfaceData = null;
ClosableIterator portalIt = memModel.listIndividuals(memModel.getResource(VitroVocabulary.PORTAL));
try {
if (!portalIt.hasNext()) {
log.debug("Loading initial site configuration");
essentialInterfaceData = InitialJenaModelUtils.loadInitialModel(sce.getServletContext(), defaultNamespace);
if (essentialInterfaceData.size() == 0) {
essentialInterfaceData = InitialJenaModelUtils.basicPortalAndRootTab(defaultNamespace);
essentialInterfaceData.add(InitialJenaModelUtils.basicClassgroup(wadf.getDefaultNamespace()));
}
//JenaModelUtils.makeClassGroupsFromRootClasses(wadf,memModel,essentialInterfaceData);
memModel.add(essentialInterfaceData);
} else {
//Set the default namespace to the namespace of the first portal object we find.
//This will keep existing applications from dying when the default namespace
//config option is missing.
Individual portal = (Individual) portalIt.next();
if (portal.getNameSpace() != null) {
defaultNamespace = portal.getNameSpace();
}
}
} finally {
portalIt.close();
}
}
private void checkMainModelForUserAccounts(OntModel mainModel, OntModel userAccountsModel) {
Model extractedUserData = ((new JenaModelUtils()).extractUserAccountsData(mainModel));
if (extractedUserData.size() > 0) {
userAccountsModel.enterCriticalSection(Lock.WRITE);
try {
userAccountsModel.add(extractedUserData);
} finally {
userAccountsModel.leaveCriticalSection();
}
mainModel.enterCriticalSection(Lock.WRITE);
try {
mainModel.remove(extractedUserData);
} finally {
mainModel.leaveCriticalSection();
}
}
}
private OntModel ontModelFromContextAttribute(ServletContext ctx, String attribute) {
OntModel ontModel;
Object attributeValue = ctx.getAttribute(attribute);
if (attributeValue != null && attributeValue instanceof OntModel) {
ontModel = (OntModel) attributeValue;
} else {
ontModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
ctx.setAttribute(attribute, ontModel);
}
return ontModel;
}
private boolean isEmpty(Model model) {
ClosableIterator closeIt = model.listStatements();
try {
if (closeIt.hasNext()) {
return false;
} else {
return true;
}
} finally {
closeIt.close();
}
}
private void loadDataFromFilesystem(OntModel ontModel, ServletContext ctx) {
OntModel initialDataModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
Long startTime = System.currentTimeMillis();
log.debug("Reading ontology files");
readOntologyFilesInPathSet(USERPATH, ctx, initialDataModel);
readOntologyFilesInPathSet(SYSTEMPATH, ctx, initialDataModel);
log.debug(((System.currentTimeMillis()-startTime)/1000)+" seconds to read ontology files ");
ontModel.add(initialDataModel);
}
}

View file

@ -6,6 +6,7 @@ import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -27,6 +28,9 @@ public class JenaPersistentDataSourceSetup extends JenaDataSourceSetupBase imple
boolean firstStartup = false;
try {
dbModel = makeDBModelFromConfigurationProperties(JENA_DB_MODEL, DB_ONT_MODEL_SPEC);
ClosableIterator stmtIt = dbModel.listStatements();

View file

@ -0,0 +1,125 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mindswap.pellet.PelletOptions;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.LayoutType;
import com.hp.hpl.jena.vocabulary.OWL;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDBGraphGenerator;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RegeneratingGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SDBGraphGenerator;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena;
import edu.cornell.mannlib.vitro.webapp.dao.jena.pellet.PelletListener;
import edu.cornell.mannlib.vitro.webapp.dao.jena.pellet.ReasonerConfiguration;
import edu.cornell.mannlib.vitro.webapp.reasoner.SimpleReasoner;
import edu.cornell.mannlib.vitro.webapp.reasoner.support.SimpleReasonerTBoxListener;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.TripleStoreType;
public class SimpleReasonerSetup implements ServletContextListener {
private static final Log log = LogFactory.getLog(SimpleReasonerSetup.class.getName());
// Models used during a full recompute of the ABox
static final String JENA_INF_MODEL_REBUILD = "http://vitro.mannlib.cornell.edu/default/vitro-kb-inf-rebuild";
static final String JENA_INF_MODEL_SCRATCHPAD = "http://vitro.mannlib.cornell.edu/default/vitro-kb-inf-scratchpad";
public void contextInitialized(ServletContextEvent sce) {
try {
// set up Pellet reasoning for the TBox
OntModelSelectorImpl assertionsOms = (OntModelSelectorImpl) sce.getServletContext().getAttribute("baseOntModelSelector");
OntModelSelectorImpl inferencesOms = (OntModelSelectorImpl) sce.getServletContext().getAttribute("inferenceOntModelSelector");
OntModelSelectorImpl unionOms = (OntModelSelectorImpl) sce.getServletContext().getAttribute("unionOntModelSelector");
WebappDaoFactoryJena wadf = (WebappDaoFactoryJena) sce.getServletContext().getAttribute("webappDaoFactory");
if (!assertionsOms.getTBoxModel().getProfile().NAMESPACE().equals(OWL.NAMESPACE.getNameSpace())) {
log.error("Not connecting Pellet reasoner - the TBox assertions model is not an OWL model");
return;
}
// Set various Pellet options for incremental consistency checking, etc.
PelletOptions.DL_SAFE_RULES = true;
PelletOptions.USE_COMPLETION_QUEUE = true;
PelletOptions.USE_TRACING = true;
PelletOptions.TRACK_BRANCH_EFFECTS = true;
PelletOptions.USE_INCREMENTAL_CONSISTENCY = true;
PelletOptions.USE_INCREMENTAL_DELETION = true;
PelletListener pelletListener = new PelletListener(unionOms.getTBoxModel(),assertionsOms.getTBoxModel(),inferencesOms.getTBoxModel(),ReasonerConfiguration.DEFAULT);
sce.getServletContext().setAttribute("pelletListener",pelletListener);
sce.getServletContext().setAttribute("pelletOntModel", pelletListener.getPelletModel());
if (wadf != null) {
wadf.setPelletListener(pelletListener);
}
log.info("Pellet reasoner connected for the TBox");
// set up simple reasoning for the ABox
BasicDataSource bds = JenaDataSourceSetupBase.getApplicationDataSource(sce.getServletContext());
Model rebuildModel = makeDBModel(bds, JENA_INF_MODEL_REBUILD, JenaDataSourceSetupBase.DB_ONT_MODEL_SPEC, TripleStoreType.SDB);
Model scratchModel = makeDBModel(bds, JENA_INF_MODEL_SCRATCHPAD, JenaDataSourceSetupBase.DB_ONT_MODEL_SPEC, TripleStoreType.SDB);
// the simple reasoner will register itself as a listener to the ABox assertions
SimpleReasoner simpleReasoner = new SimpleReasoner(unionOms.getTBoxModel(), assertionsOms.getABoxModel(), inferencesOms.getABoxModel(), rebuildModel, scratchModel);
assertionsOms.getTBoxModel().register(new SimpleReasonerTBoxListener(simpleReasoner));
sce.getServletContext().setAttribute("simpleReasoner",simpleReasoner);
log.info("Simple reasoner connected for the ABox");
} catch (Throwable t) {
t.printStackTrace();
}
}
public void contextDestroyed(ServletContextEvent arg0) {
// nothing to do
}
protected Model makeDBModel(BasicDataSource ds, String jenaDbModelName, OntModelSpec jenaDbOntModelSpec, TripleStoreType storeType) {
String DB = "MySQL"; // database type
Model dbModel = null;
try {
// open the db model
try {
Graph g = null;
switch (storeType) {
case RDB:
g = new RegeneratingGraph(new RDBGraphGenerator(ds, DB, jenaDbModelName)); break;
case SDB:
StoreDesc desc = new StoreDesc(LayoutType.LayoutTripleNodesHash, DatabaseType.MySQL);
g = new RegeneratingGraph(new SDBGraphGenerator(ds, desc, jenaDbModelName)); break;
default: throw new RuntimeException ("Unsupported store type " + storeType);
}
dbModel = ModelFactory.createModelForGraph(g);
log.debug("Using database at "+ds.getUrl());
} catch (Throwable t) {
t.printStackTrace();
}
} catch (Throwable t) {
t.printStackTrace();
}
return dbModel;
}
}

View file

@ -0,0 +1,312 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.reasoner;
import org.junit.Assert;
import org.junit.Test;
import org.mindswap.pellet.jena.PelletReasonerFactory;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.reasoner.support.SimpleReasonerTBoxListener;
public class SimpleReasonerTest {
@Test
public void addTypes(){
// Create a Tbox with a simple class hierarchy. D and E are subclasses of C. B and C are subclasses of A.
// Pellet will compute TBox inferences
OntModel tBox = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC);
OntClass classA = tBox.createClass("http://test.vivo/A");
classA.setLabel("class A", "en-US");
OntClass classB = tBox.createClass("http://test.vivo/B");
classB.setLabel("class B", "en-US");
OntClass classC = tBox.createClass("http://test.vivo/C");
classC.setLabel("class C", "en-US");
OntClass classD = tBox.createClass("http://test.vivo/D");
classD.setLabel("class D", "en-US");
OntClass classE = tBox.createClass("http://test.vivo/E");
classE.setLabel("class E", "en-US");
classC.addSubClass(classD);
classC.addSubClass(classE);
classA.addSubClass(classB);
classA.addSubClass(classC);
// this is the model to receive inferences
Model inf = ModelFactory.createDefaultModel();
// create an Abox and register the SimpleReasoner listener with it
OntModel aBox = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
aBox.register(new SimpleReasoner(tBox, aBox, inf));
// add a statement to the ABox that individual x is of type E.
Resource ind_x = aBox.createResource("http://test.vivo/x");
aBox.add(ind_x, RDF.type, classE);
// Verify that "x is of type C" was inferred
Statement xisc = ResourceFactory.createStatement(ind_x, RDF.type, classC);
Assert.assertTrue(inf.contains(xisc));
// Verify that "x is of type A" was inferred
Statement xisa = ResourceFactory.createStatement(ind_x, RDF.type, classA);
Assert.assertTrue(inf.contains(xisa));
}
@Test
public void removeTypes(){
// Create a Tbox with a simple class hierarchy. C is a subclass of B and B is a subclass of A.
// Pellet will compute TBox inferences
OntModel tBox = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC);
OntClass classA = tBox.createClass("http://test.vivo/A");
classA.setLabel("class A", "en-US");
OntClass classB = tBox.createClass("http://test.vivo/B");
classB.setLabel("class B", "en-US");
OntClass classC = tBox.createClass("http://test.vivo/C");
classC.setLabel("class C", "en-US");
classB.addSubClass(classC);
classA.addSubClass(classB);
// this is the model to receive inferences
Model inf = ModelFactory.createDefaultModel();
// create an Abox and register the SimpleReasoner listener with it
OntModel aBox = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
aBox.register(new SimpleReasoner(tBox, aBox, inf));
// add a statement to the ABox that individual x is of type C.
Resource ind_x = aBox.createResource("http://test.vivo/x");
aBox.add(ind_x, RDF.type, classC);
// add a statement to the ABox that individual x is of type B.
aBox.add(ind_x, RDF.type, classB);
// remove the statement that individual x is of type C
aBox.remove(ind_x, RDF.type, classC);
// Verify that the inference graph contains the statement that x is of type A.
Statement xisa = ResourceFactory.createStatement(ind_x, RDF.type, classA);
Assert.assertTrue(inf.contains(xisa));
}
// This tests added TBox subClassOf and equivalentClass statements.
// The ABox data that will be the basis for the inference will
// be in the ABox graph.
@Test
public void addSubClass1(){
// Create TBox, ABox and Inference models and register
// the ABox reasoner listeners with the ABox and TBox
// Pellet will compute TBox inferences
OntModel tBox = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC);
OntModel aBox = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
Model inf = ModelFactory.createDefaultModel();
SimpleReasoner simpleReasoner = new SimpleReasoner(tBox, aBox, inf);
aBox.register(simpleReasoner);
tBox.register(new SimpleReasonerTBoxListener(simpleReasoner));
// Add classes classes A, B, C and D to the TBox
OntClass classA = tBox.createClass("http://test.vivo/A");
classA.setLabel("class A", "en-US");
OntClass classB = tBox.createClass("http://test.vivo/B");
classB.setLabel("class B", "en-US");
OntClass classC = tBox.createClass("http://test.vivo/C");
classC.setLabel("class C", "en-US");
OntClass classD = tBox.createClass("http://test.vivo/D");
classD.setLabel("class D", "en-US");
// Add a statement that individual x is of type C to the ABox
Resource ind_x = aBox.createResource("http://test.vivo/x");
aBox.add(ind_x, RDF.type, classC);
// Add a statement that C is a subclass of A to the TBox
classA.addSubClass(classC);
// Verify that "x is of type A" was inferred
Statement xisa = ResourceFactory.createStatement(ind_x, RDF.type, classA);
Assert.assertTrue(inf.contains(xisa));
// Verify that "x is of type B" was not inferred
Statement xisb = ResourceFactory.createStatement(ind_x, RDF.type, classB);
Assert.assertFalse(inf.contains(xisb));
// Verify that "x is of type D" was not inferred
Statement xisd = ResourceFactory.createStatement(ind_x, RDF.type, classD);
Assert.assertFalse(inf.contains(xisd));
}
// this tests added TBox subClassOf and equivalentClass statements.
// The ABox data that is the basis for the inference will be
// in the inferred graph
@Test
public void addSubClass2(){
// Create TBox, ABox and Inference models and register
// the ABox reasoner listeners with the ABox and TBox
// Pellet will compute TBox inferences
OntModel tBox = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC);
OntModel aBox = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
Model inf = ModelFactory.createDefaultModel();
SimpleReasoner simpleReasoner = new SimpleReasoner(tBox, aBox, inf);
aBox.register(simpleReasoner);
tBox.register(new SimpleReasonerTBoxListener(simpleReasoner));
// Add classes classes A, B, C and D to the TBox
// D is a subclass of C
OntClass classA = tBox.createClass("http://test.vivo/A");
classA.setLabel("class A", "en-US");
OntClass classB = tBox.createClass("http://test.vivo/B");
classB.setLabel("class B", "en-US");
OntClass classC = tBox.createClass("http://test.vivo/C");
classC.setLabel("class C", "en-US");
OntClass classD = tBox.createClass("http://test.vivo/D");
classD.setLabel("class D", "en-US");
classC.addSubClass(classD);
// Add a statement that individual x is of type D to the ABox
Resource ind_x = aBox.createResource("http://test.vivo/x");
aBox.add(ind_x, RDF.type, classD);
// Add a statement that C is a subclass of A to the TBox
classA.addSubClass(classC);
// Verify that "x is of type A" was inferred
Statement xisa = ResourceFactory.createStatement(ind_x, RDF.type, classA);
Assert.assertTrue(inf.contains(xisa));
}
@Test
// this tests incremental reasoning as a result of the removal of a subClassOf
// or equivalentClass statement from the TBox.
public void removeSubClass(){
// Create TBox, ABox and Inference models and register
// the ABox reasoner listeners with the ABox and TBox
// Pellet will compute TBox inferences
OntModel tBox = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC);
OntModel aBox = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
Model inf = ModelFactory.createDefaultModel();
SimpleReasoner simpleReasoner = new SimpleReasoner(tBox, aBox, inf);
aBox.register(simpleReasoner);
tBox.register(new SimpleReasonerTBoxListener(simpleReasoner));
// Add classes A, B, C, D, E, F, G and H to the TBox.
// B, C and D are subclasses of A.
// E is a subclass of B.
// F and G are subclasses of C.
// H is a subclass of D.
OntClass classA = tBox.createClass("http://test.vivo/A");
classA.setLabel("class A", "en-US");
OntClass classB = tBox.createClass("http://test.vivo/B");
classB.setLabel("class B", "en-US");
OntClass classC = tBox.createClass("http://test.vivo/C");
classC.setLabel("class C", "en-US");
OntClass classD = tBox.createClass("http://test.vivo/D");
classD.setLabel("class D", "en-US");
OntClass classE = tBox.createClass("http://test.vivo/E");
classE.setLabel("class E", "en-US");
OntClass classF = tBox.createClass("http://test.vivo/F");
classF.setLabel("class F", "en-US");
OntClass classG = tBox.createClass("http://test.vivo/G");
classG.setLabel("class G", "en-US");
OntClass classH = tBox.createClass("http://test.vivo/H");
classH.setLabel("class H", "en-US");
classA.addSubClass(classB);
classA.addSubClass(classC);
classA.addSubClass(classD);
classB.addSubClass(classE);
classC.addSubClass(classF);
classC.addSubClass(classG);
classD.addSubClass(classH);
// Add a statement that individual x is of type E to the ABox
Resource ind_x = aBox.createResource("http://test.vivo/x");
aBox.add(ind_x, RDF.type, classE);
// Remove the statement that B is a subclass of A from the TBox
classA.removeSubClass(classB);
// Verify that "x is of type A" is not in the inference graph
Statement xisa = ResourceFactory.createStatement(ind_x, RDF.type, classA);
Assert.assertFalse(inf.contains(xisa));
// Verify that "x is of type B" is in the inference graph
Statement xisb = ResourceFactory.createStatement(ind_x, RDF.type, classB);
Assert.assertTrue(inf.contains(xisb));
// Add statements that individual y is of types F and H to the ABox
Resource ind_y = aBox.createResource("http://test.vivo/y");
aBox.add(ind_y, RDF.type, classF);
aBox.add(ind_y, RDF.type, classH);
// Remove the statement that C is a subclass of A from the TBox
classA.removeSubClass(classC);
// Verify that "y is of type A" is in the inference graph
Statement yisa = ResourceFactory.createStatement(ind_y, RDF.type, classA);
Assert.assertTrue(inf.contains(yisa));
}
// To help in debugging the unit test
void printModels(OntModel ontModel) {
System.out.println("\nThe model has " + ontModel.size() + " statements:");
System.out.println("---------------------------------------------------");
ontModel.writeAll(System.out,"N3",null);
}
}

View file

@ -38,8 +38,10 @@ PREFIX <%=prefixText%>: <<%=urlText%>><%}}%>
SELECT ?geoLocation ?label
WHERE
{
GRAPH ?g{
?geoLocation rdf:type core:GeographicLocation .
OPTIONAL { ?geoLocation rdfs:label ?label }
OPTIONAL { ?geoLocation rdfs:label ?label } .
}
}
LIMIT 20
</textarea>