merge dev-rdfapi branch to trunk

This commit is contained in:
brianjlowe 2012-06-13 18:44:46 +00:00
commit 297e943635
137 changed files with 14413 additions and 1083 deletions

View file

@ -19,7 +19,10 @@ import com.hp.hpl.jena.query.Dataset;
import edu.cornell.mannlib.vitro.webapp.beans.ApplicationBean;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaBaseDao;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource.ModelName;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
public class VitroRequest extends HttpServletRequestWrapper {
@ -40,6 +43,16 @@ public class VitroRequest extends HttpServletRequestWrapper {
this._req = _req;
}
public RDFService getRDFService() {
Object o = getAttribute("rdfService");
if (o instanceof RDFService) {
return (RDFService) o;
} else {
RDFService rdfService = RDFServiceUtils.getRDFService(this);
setAttribute("rdfService", rdfService);
return rdfService;
}
}
public void setWebappDaoFactory( WebappDaoFactory wdf){
setAttribute("webappDaoFactory",wdf);
@ -78,6 +91,10 @@ public class VitroRequest extends HttpServletRequestWrapper {
setAttribute("jenaOntModel", ontModel);
}
public void setOntModelSelector(OntModelSelector oms) {
setAttribute("ontModelSelector", oms);
}
/** gets assertions + inferences WebappDaoFactory with no filtering **/
public WebappDaoFactory getFullWebappDaoFactory() {
Object webappDaoFactoryAttr = _req.getAttribute("fullWebappDaoFactory");
@ -97,12 +114,26 @@ public class VitroRequest extends HttpServletRequestWrapper {
public WebappDaoFactory getAssertionsWebappDaoFactory() {
Object webappDaoFactoryAttr = _req.getSession().getAttribute("assertionsWebappDaoFactory");
if (webappDaoFactoryAttr instanceof WebappDaoFactory) {
log.debug("Returning assertionsWebappDaoFactory from session");
return (WebappDaoFactory) webappDaoFactoryAttr;
} else {
return (WebappDaoFactory) _req.getSession().getServletContext().getAttribute("assertionsWebappDaoFactory");
webappDaoFactoryAttr = getAttribute("assertionsWebappDaoFactory");
if (webappDaoFactoryAttr instanceof WebappDaoFactory) {
log.debug("returning assertionsWebappDaoFactory from request attribute");
return (WebappDaoFactory) webappDaoFactoryAttr;
} else {
log.debug("Returning assertionsWebappDaoFactory from context");
return (WebappDaoFactory) _req.getSession().getServletContext().getAttribute("assertionsWebappDaoFactory");
}
}
}
/** gets assertions-only WebappDaoFactory with no filtering */
public void setAssertionsWebappDaoFactory(WebappDaoFactory wadf) {
setAttribute("assertionsWebappDaoFactory", wadf);
}
/** gets inferences-only WebappDaoFactory with no filtering */
public WebappDaoFactory getDeductionsWebappDaoFactory() {
Object webappDaoFactoryAttr = _req.getSession().getAttribute("deductionsWebappDaoFactory");
@ -137,6 +168,16 @@ public class VitroRequest extends HttpServletRequestWrapper {
return jenaOntModel;
}
public OntModelSelector getOntModelSelector() {
Object o = this.getAttribute("ontModelSelector");
if (o instanceof OntModelSelector) {
return (OntModelSelector) o;
} else {
return null;
}
}
public OntModel getAssertionsOntModel() {
OntModel jenaOntModel = (OntModel)_req.getSession().getAttribute( JenaBaseDao.ASSERTIONS_ONT_MODEL_ATTRIBUTE_NAME );
if ( jenaOntModel == null ) {

View file

@ -57,7 +57,7 @@ public class Classes2ClassesOperationController extends BaseEditController {
return;
}
VClassDao vcDao = request.getFullWebappDaoFactory().getVClassDao();
VClassDao vcDao = request.getAssertionsWebappDaoFactory().getVClassDao();
String modeStr = request.getParameter("opMode");
modeStr = (modeStr == null) ? "" : modeStr;

View file

@ -46,8 +46,9 @@ public class EntityEditController extends BaseEditController {
String entURI = request.getParameter("uri");
VitroRequest vreq = (new VitroRequest(request));
ApplicationBean application = vreq.getAppBean();
ApplicationBean application = vreq.getAppBean();
//Individual ent = vreq.getWebappDaoFactory().getIndividualDao().getIndividualByURI(entURI);
Individual ent = vreq.getAssertionsWebappDaoFactory().getIndividualDao().getIndividualByURI(entURI);
if (ent == null) {
try {

View file

@ -78,7 +78,11 @@ public class EntityRetryController extends BaseEditController {
action = epo.getAction();
}
WebappDaoFactory wadf = (vreq.getAssertionsWebappDaoFactory()!=null) ? vreq.getAssertionsWebappDaoFactory() : vreq.getFullWebappDaoFactory();
WebappDaoFactory wadf = vreq.getAssertionsWebappDaoFactory();
if (wadf == null) {
log.info("Using vreq.getFullWebappDaoFactory()");
vreq.getFullWebappDaoFactory();
}
LoginStatusBean loginBean = LoginStatusBean.getBean(request);
WebappDaoFactory myWebappDaoFactory = wadf.getUserAwareDaoFactory(loginBean.getUserURI());

View file

@ -15,9 +15,12 @@ import com.hp.hpl.jena.datatypes.RDFDatatype;
import com.hp.hpl.jena.datatypes.TypeMapper;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.ontology.OntProperty;
import com.hp.hpl.jena.ontology.Restriction;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.shared.Lock;
@ -57,129 +60,12 @@ public class RestrictionOperationController extends BaseEditController {
if (epo == null) {
response.sendRedirect(defaultLandingPage);
return;
}
// TODO: I need to de-spaghettify this and remap this controller, now that I know what I'm doing
}
if ( (request.getParameter("_cancel") == null ) ) {
ontModel.enterCriticalSection(Lock.WRITE);
try {
ontModel.getBaseModel().notifyEvent(new EditEvent(request.getFullWebappDaoFactory().getUserURI(),true));
if ( request.getParameter("_action") != null && request.getParameter("_action").equals("delete") ) {
String restId = request.getParameter("restrictionId");
if (restId != null) {
OntClass restrictedClass = ontModel.getOntClass( request.getParameter( "classUri" ) );
OntClass rest = null;
for ( Iterator i = restrictedClass.listEquivalentClasses(); i.hasNext(); ) {
OntClass equivClass = (OntClass) i.next();
if (equivClass.isAnon() && equivClass.getId().toString().equals(restId)) {
rest = equivClass;
}
}
if ( rest == null ) {
for ( Iterator i = restrictedClass.listSuperClasses(); i.hasNext(); ) {
OntClass superClass = (OntClass) i.next();
if (superClass.isAnon() && superClass.getId().toString().equals(restId)) {
rest = superClass;
}
}
}
if ( rest != null ) {
rest.remove();
}
}
} else {
OntProperty onProperty = ontModel.getOntProperty( (String) request.getParameter("onProperty") );
String conditionTypeStr = request.getParameter("conditionType");
String restrictionTypeStr = (String) epo.getAttribute("restrictionType");
Restriction rest = null;
OntClass ontClass = ontModel.getOntClass( (String) epo.getAttribute("VClassURI") );
String roleFillerURIStr = request.getParameter("ValueClass");
Resource roleFiller = null;
if (roleFillerURIStr != null) {
roleFiller = ontModel.getResource(roleFillerURIStr);
}
int cardinality = -1;
String cardinalityStr = request.getParameter("cardinality");
if (cardinalityStr != null) {
cardinality = Integer.decode(cardinalityStr);
}
if (restrictionTypeStr.equals("allValuesFrom")) {
rest = ontModel.createAllValuesFromRestriction(null,onProperty,roleFiller);
} else if (restrictionTypeStr.equals("someValuesFrom")) {
rest = ontModel.createSomeValuesFromRestriction(null,onProperty,roleFiller);
} else if (restrictionTypeStr.equals("hasValue")) {
String valueURI = request.getParameter("ValueIndividual");
if (valueURI != null) {
Resource valueRes = ontModel.getResource(valueURI);
if (valueRes != null) {
rest = ontModel.createHasValueRestriction(null, onProperty, valueRes);
}
} else {
String valueLexicalForm = request.getParameter("ValueLexicalForm");
if (valueLexicalForm != null) {
String valueDatatype = request.getParameter("ValueDatatype");
Literal value = null;
if (valueDatatype != null && valueDatatype.length() > 0) {
RDFDatatype dtype = null;
try {
dtype = TypeMapper.getInstance().getSafeTypeByName(valueDatatype);
} catch (Exception e) {
log.warn ("Unable to get safe type " + valueDatatype + " using TypeMapper");
}
if (dtype != null) {
value = ontModel.createTypedLiteral(valueLexicalForm, dtype);
} else {
value = ontModel.createLiteral(valueLexicalForm);
}
} else {
value = ontModel.createLiteral(valueLexicalForm);
}
rest = ontModel.createHasValueRestriction(null, onProperty, value);
}
}
} else if (restrictionTypeStr.equals("minCardinality")) {
rest = ontModel.createMinCardinalityRestriction(null,onProperty,cardinality);
} else if (restrictionTypeStr.equals("maxCardinality")) {
rest = ontModel.createMaxCardinalityRestriction(null,onProperty,cardinality);
} else if (restrictionTypeStr.equals("cardinality")) {
rest = ontModel.createCardinalityRestriction(null,onProperty,cardinality);
}
if (conditionTypeStr.equals("necessary")) {
ontClass.addSuperClass(rest);
} else if (conditionTypeStr.equals("necessaryAndSufficient")) {
ontClass.addEquivalentClass(rest);
}
}
} finally {
ontModel.getBaseModel().notifyEvent(new EditEvent(request.getFullWebappDaoFactory().getUserURI(),false));
ontModel.leaveCriticalSection();
}
if ( (request.getParameter("_cancel") == null ) ) {
processRestriction(request, epo, ontModel);
}
//if no page forwarder was set, just go back to referring page:
String referer = epo.getReferer();
if (referer == null) {
@ -198,7 +84,144 @@ public class RestrictionOperationController extends BaseEditController {
throw new RuntimeException(f);
}
}
}
private void processRestriction(VitroRequest request, EditProcessObject epo, OntModel ontModel) {
ontModel.enterCriticalSection(Lock.WRITE);
try {
ontModel.getBaseModel().notifyEvent(new EditEvent(request.getFullWebappDaoFactory().getUserURI(),true));
if ("delete".equals(request.getParameter("_action"))) {
processDelete(request, ontModel);
} else {
processCreate(request, epo, ontModel);
}
} finally {
ontModel.getBaseModel().notifyEvent(new EditEvent(request.getFullWebappDaoFactory().getUserURI(),false));
ontModel.leaveCriticalSection();
}
}
private void processDelete(VitroRequest request, OntModel ontModel) {
String restId = request.getParameter("restrictionId");
if (restId != null) {
OntClass restrictedClass = ontModel.getOntClass( request.getParameter( "classUri" ) );
OntClass rest = null;
for ( Iterator i = restrictedClass.listEquivalentClasses(); i.hasNext(); ) {
OntClass equivClass = (OntClass) i.next();
if (equivClass.isAnon() && equivClass.getId().toString().equals(restId)) {
rest = equivClass;
}
}
if ( rest == null ) {
for ( Iterator i = restrictedClass.listSuperClasses(); i.hasNext(); ) {
OntClass superClass = (OntClass) i.next();
if (superClass.isAnon() && superClass.getId().toString().equals(restId)) {
rest = superClass;
}
}
}
/**
* removing by graph subtraction so that statements with blank nodes
* stick together and are processed appropriately by the bulk update
* handler
*/
if ( rest != null ) {
Model temp = ModelFactory.createDefaultModel();
temp.add(rest.listProperties());
ontModel.getBaseModel().remove(temp);
}
}
}
private void processCreate(VitroRequest request, EditProcessObject epo, OntModel origModel) {
Model temp = ModelFactory.createDefaultModel();
Model dynamicUnion = ModelFactory.createUnion(temp, origModel);
OntModel ontModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, dynamicUnion);
OntProperty onProperty = ontModel.getOntProperty( (String) request.getParameter("onProperty") );
String conditionTypeStr = request.getParameter("conditionType");
String restrictionTypeStr = (String) epo.getAttribute("restrictionType");
Restriction rest = null;
OntClass ontClass = ontModel.getOntClass( (String) epo.getAttribute("VClassURI") );
String roleFillerURIStr = request.getParameter("ValueClass");
Resource roleFiller = null;
if (roleFillerURIStr != null) {
roleFiller = ontModel.getResource(roleFillerURIStr);
}
int cardinality = -1;
String cardinalityStr = request.getParameter("cardinality");
if (cardinalityStr != null) {
cardinality = Integer.decode(cardinalityStr);
}
if (restrictionTypeStr.equals("allValuesFrom")) {
rest = ontModel.createAllValuesFromRestriction(null,onProperty,roleFiller);
} else if (restrictionTypeStr.equals("someValuesFrom")) {
rest = ontModel.createSomeValuesFromRestriction(null,onProperty,roleFiller);
} else if (restrictionTypeStr.equals("hasValue")) {
String valueURI = request.getParameter("ValueIndividual");
if (valueURI != null) {
Resource valueRes = ontModel.getResource(valueURI);
if (valueRes != null) {
rest = ontModel.createHasValueRestriction(null, onProperty, valueRes);
}
} else {
String valueLexicalForm = request.getParameter("ValueLexicalForm");
if (valueLexicalForm != null) {
String valueDatatype = request.getParameter("ValueDatatype");
Literal value = null;
if (valueDatatype != null && valueDatatype.length() > 0) {
RDFDatatype dtype = null;
try {
dtype = TypeMapper.getInstance().getSafeTypeByName(valueDatatype);
} catch (Exception e) {
log.warn ("Unable to get safe type " + valueDatatype + " using TypeMapper");
}
if (dtype != null) {
value = ontModel.createTypedLiteral(valueLexicalForm, dtype);
} else {
value = ontModel.createLiteral(valueLexicalForm);
}
} else {
value = ontModel.createLiteral(valueLexicalForm);
}
rest = ontModel.createHasValueRestriction(null, onProperty, value);
}
}
} else if (restrictionTypeStr.equals("minCardinality")) {
rest = ontModel.createMinCardinalityRestriction(null,onProperty,cardinality);
} else if (restrictionTypeStr.equals("maxCardinality")) {
rest = ontModel.createMaxCardinalityRestriction(null,onProperty,cardinality);
} else if (restrictionTypeStr.equals("cardinality")) {
rest = ontModel.createCardinalityRestriction(null,onProperty,cardinality);
}
if (conditionTypeStr.equals("necessary")) {
ontClass.addSuperClass(rest);
} else if (conditionTypeStr.equals("necessaryAndSufficient")) {
ontClass.addEquivalentClass(rest);
}
origModel.add(temp);
}
}

View file

@ -211,7 +211,7 @@ class IndividualResponseBuilder {
private IndividualTemplateModel getIndividualTemplateModel(
Individual individual) {
individual.sortForDisplay();
//individual.sortForDisplay();
return new IndividualTemplateModel(individual, vreq);
}

View file

@ -69,11 +69,12 @@ import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaBaseDao;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSDBModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSpecialModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.WebappDaoSDBSetup;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.WebappDaoSetup;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestUtils.MergeResult;
@ -206,13 +207,13 @@ public class JenaIngestController extends BaseEditController {
}
private void processRDBModelsRequest(VitroRequest vreq, ModelMaker maker, String modelType) {
VitroJenaModelMaker vjmm = (VitroJenaModelMaker) getServletContext().getAttribute("vitroJenaModelMaker");
ModelMaker vjmm = (ModelMaker) getServletContext().getAttribute("vitroJenaModelMaker");
vreq.getSession().setAttribute("vitroJenaModelMaker", vjmm);
showModelList(vreq, vjmm, "rdb");
}
private void processSDBModelsRequest(VitroRequest vreq, ModelMaker maker, String modelType) {
VitroJenaSDBModelMaker vsmm = (VitroJenaSDBModelMaker) getServletContext().getAttribute("vitroJenaSDBModelMaker");
ModelMaker vsmm = (ModelMaker) getServletContext().getAttribute("vitroJenaSDBModelMaker");
vreq.getSession().setAttribute("vitroJenaModelMaker", vsmm);
showModelList(vreq, vsmm, "sdb");
}
@ -714,7 +715,7 @@ public class JenaIngestController extends BaseEditController {
? ((VitroJenaSpecialModelMaker) maker).getInnerModelMaker()
: maker;
if (modelType == null) {
if (maker instanceof VitroJenaSDBModelMaker) {
if (maker instanceof RDFServiceModelMaker) {
modelType = "sdb";
} else {
modelType = "rdb";
@ -942,7 +943,7 @@ public class JenaIngestController extends BaseEditController {
log.debug("Connecting to DB at "+jdbcUrl);
StoreDesc storeDesc = new StoreDesc(LayoutType.LayoutTripleNodesHash,dbTypeObj) ;
ServletContext ctx = vreq.getSession().getServletContext();
BasicDataSource bds = WebappDaoSDBSetup.makeBasicDataSource(
BasicDataSource bds = WebappDaoSetup.makeBasicDataSource(
driver, jdbcUrl, username, password, ctx);
try {
VitroJenaSDBModelMaker vsmm = new VitroJenaSDBModelMaker(storeDesc, bds);

View file

@ -4,6 +4,7 @@ package edu.cornell.mannlib.vitro.webapp.controller.jena;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
@ -37,6 +38,10 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.BulkUpdateEvent;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent;
import edu.cornell.mannlib.vitro.webapp.filestorage.uploadrequest.FileUploadServletRequest;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class RDFUploadController extends JenaIngestController {
@ -119,12 +124,17 @@ public class RDFUploadController extends JenaIngestController {
&& fileStreams.get("rdfStream").size() > 0 ) {
FileItem rdfStream = fileStreams.get("rdfStream").get(0);
try {
uploadModel.enterCriticalSection(Lock.WRITE);
try {
uploadModel.read(
rdfStream.getInputStream(), null, languageStr);
} finally {
uploadModel.leaveCriticalSection();
if (directRead) {
addUsingRDFService(rdfStream.getInputStream(), languageStr,
request.getRDFService());
} else {
uploadModel.enterCriticalSection(Lock.WRITE);
try {
uploadModel.read(
rdfStream.getInputStream(), null, languageStr);
} finally {
uploadModel.leaveCriticalSection();
}
}
uploadDesc = verb + " RDF from file " + rdfStream.getName();
} catch (IOException e) {
@ -198,6 +208,24 @@ public class RDFUploadController extends JenaIngestController {
}
}
private void addUsingRDFService(InputStream in, String languageStr,
RDFService rdfService) {
ChangeSet changeSet = rdfService.manufactureChangeSet();
RDFService.ModelSerializationFormat format =
("RDF/XML".equals(languageStr)
|| "RDF/XML-ABBREV".equals(languageStr))
? RDFService.ModelSerializationFormat.RDFXML
: RDFService.ModelSerializationFormat.N3;
changeSet.addAddition(in, format,
JenaDataSourceSetupBase.JENA_DB_MODEL);
try {
rdfService.changeSetUpdate(changeSet);
} catch (RDFServiceException rdfse) {
log.error(rdfse);
throw new RuntimeException(rdfse);
}
}
public void loadRDF(FileUploadServletRequest req,
VitroRequest request,
HttpServletResponse response)

View file

@ -0,0 +1,44 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.HashSet;
import com.hp.hpl.jena.rdf.model.ModelChangedListener;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class ABoxJenaChangeListener extends JenaChangeListener {
private HashSet<String> ignoredGraphs = new HashSet<String>();
public ABoxJenaChangeListener(ModelChangedListener listener) {
super(listener);
ignoredGraphs.add(JenaDataSourceSetupBase.JENA_INF_MODEL);
ignoredGraphs.add(JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL);
ignoredGraphs.add(JenaDataSourceSetupBase.JENA_TBOX_INF_MODEL);
}
@Override
public void addedStatement(String serializedTriple, String graphURI) {
if (isABoxGraph(graphURI)) {
super.addedStatement(serializedTriple, graphURI);
}
}
@Override
public void removedStatement(String serializedTriple, String graphURI) {
if (isABoxGraph(graphURI)) {
super.removedStatement(serializedTriple, graphURI);
}
}
private boolean isABoxGraph(String graphURI) {
return (graphURI == null ||
JenaDataSourceSetupBase.JENA_DB_MODEL.equals(graphURI)
|| (!ignoredGraphs.contains(graphURI)
&& !graphURI.contains("filegraph")
&& !graphURI.contains("tbox")));
}
}

View file

@ -674,7 +674,7 @@ public class DataPropertyDaoJena extends PropertyDaoJena implements
static {
List<String> namespaceFilters = new ArrayList<String>();
for (String namespace : EXCLUDED_NAMESPACES) {
namespaceFilters.add("( afn:namespace(?property) != \"" + namespace + "\" )");
namespaceFilters.add("( !regex(str(?property), \"^" + namespace + "\" ))");
}
PROPERTY_FILTERS = StringUtils.join(namespaceFilters, " && ");
}
@ -686,11 +686,11 @@ public class DataPropertyDaoJena extends PropertyDaoJena implements
" ?property a owl:DatatypeProperty . \n" +
" FILTER ( \n" +
" isLiteral(?object) && \n" +
" ( afn:namespace(?property) != \"" + VitroVocabulary.PUBLIC + "\" ) && \n" +
" ( afn:namespace(?property) != \"" + VitroVocabulary.OWL + "\" ) && \n" +
" ( !regex(str(?property), \"^" + VitroVocabulary.PUBLIC + "\" )) && \n" +
" ( !regex(str(?property), \"^" + VitroVocabulary.OWL + "\" )) && \n" +
// NIHVIVO-2790 vitro:moniker has been deprecated, but display existing values for editorial management (deletion is encouraged).
// This property will be hidden from public display by default.
" ( ?property = <" + VitroVocabulary.MONIKER + "> || afn:namespace(?property) != \"" + VitroVocabulary.vitroURI + "\" ) \n" +
" ( ?property = <" + VitroVocabulary.MONIKER + "> || !regex(str(?property), \"^" + VitroVocabulary.vitroURI + "\" )) \n" +
" ) \n" +
"}";

View file

@ -0,0 +1,138 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.Set;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Capabilities;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.GraphEventManager;
import com.hp.hpl.jena.graph.GraphStatisticsHandler;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Reifier;
import com.hp.hpl.jena.graph.TransactionHandler;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.graph.query.QueryHandler;
import com.hp.hpl.jena.shared.AddDeniedException;
import com.hp.hpl.jena.shared.DeleteDeniedException;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class DifferenceGraph implements Graph {
private Graph g;
private Graph subtract;
public DifferenceGraph(Graph g, Graph subtract) {
this.g = g;
this.subtract = subtract;
}
@Override
public void close() {
// not clear what the best behavior here is
}
@Override
public boolean contains(Triple arg0) {
return g.contains(arg0) && !subtract.contains(arg0);
}
@Override
public boolean contains(Node arg0, Node arg1, Node arg2) {
return g.contains(arg0, arg1, arg2) && !subtract.contains(arg0, arg1, arg2);
}
@Override
public void delete(Triple arg0) throws DeleteDeniedException {
g.delete(arg0);
}
@Override
public boolean dependsOn(Graph arg0) {
return g.dependsOn(arg0);
}
@Override
public ExtendedIterator<Triple> find(TripleMatch arg0) {
Set<Triple> tripSet = g.find(arg0).toSet();
tripSet.removeAll(subtract.find(arg0).toSet());
return WrappedIterator.create(tripSet.iterator());
}
@Override
public ExtendedIterator<Triple> find(Node arg0, Node arg1, Node arg2) {
Set<Triple> tripSet = g.find(arg0, arg1, arg2).toSet();
tripSet.removeAll(subtract.find(arg0, arg1, arg2).toSet());
return WrappedIterator.create(tripSet.iterator());
}
@Override
public BulkUpdateHandler getBulkUpdateHandler() {
return g.getBulkUpdateHandler();
}
@Override
public Capabilities getCapabilities() {
return g.getCapabilities();
}
@Override
public GraphEventManager getEventManager() {
return g.getEventManager();
}
@Override
public PrefixMapping getPrefixMapping() {
return g.getPrefixMapping();
}
@Override
public Reifier getReifier() {
return g.getReifier();
}
@Override
public GraphStatisticsHandler getStatisticsHandler() {
return g.getStatisticsHandler();
}
@Override
public TransactionHandler getTransactionHandler() {
return g.getTransactionHandler();
}
@Override
public boolean isClosed() {
return g.isClosed();
}
@Override
public boolean isEmpty() {
return g.isEmpty();
}
@Override
public boolean isIsomorphicWith(Graph arg0) {
return g.isIsomorphicWith(arg0);
}
@Override
public QueryHandler queryHandler() {
return g.queryHandler();
}
@Override
public int size() {
return g.size() - subtract.size();
}
@Override
public void add(Triple arg0) throws AddDeniedException {
g.add(arg0);
}
}

View file

@ -0,0 +1,119 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import org.apache.commons.collections.iterators.EmptyIterator;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Reifier;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.shared.ReificationStyle;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class EmptyReifier implements Reifier {
private Graph g;
public EmptyReifier(Graph g) {
this.g = g;
}
@Override
public Triple getTriple(Node arg0) {
// TODO Auto-generated method stub
return null;
}
@Override
public ExtendedIterator<Node> allNodes() {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
@Override
public ExtendedIterator<Node> allNodes(Triple arg0) {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
@Override
public void close() {
// TODO Auto-generated method stub
}
@Override
public ExtendedIterator<Triple> find(TripleMatch arg0) {
return g.find(arg0);
}
@Override
public ExtendedIterator<Triple> findEither(TripleMatch arg0, boolean arg1) {
return WrappedIterator.create(EmptyIterator.INSTANCE);
}
@Override
public ExtendedIterator<Triple> findExposed(TripleMatch arg0) {
return WrappedIterator.create(EmptyIterator.INSTANCE);
}
@Override
public Graph getParentGraph() {
return g;
}
@Override
public ReificationStyle getStyle() {
return ReificationStyle.Minimal;
}
@Override
public boolean handledAdd(Triple arg0) {
g.add(arg0);
return true;
}
@Override
public boolean handledRemove(Triple arg0) {
g.delete(arg0);
return true;
}
@Override
public boolean hasTriple(Node arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean hasTriple(Triple arg0) {
// TODO Auto-generated method stub
return false;
}
@Override
public Node reifyAs(Node arg0, Triple arg1) {
// TODO Auto-generated method stub
return null;
}
@Override
public void remove(Triple arg0) {
g.delete(arg0);
}
@Override
public void remove(Node arg0, Triple arg1) {
g.delete(arg1);
}
@Override
public int size() {
return g.size();
}
}

View file

@ -109,7 +109,8 @@ public class IndividualDaoJena extends JenaBaseDao implements IndividualDao {
public void removeVClass(String individualURI, String vclassURI) {
OntModel ontModel = getOntModelSelector().getABoxModel();
ontModel.enterCriticalSection(Lock.WRITE);
ontModel.getBaseModel().notifyEvent(new IndividualUpdateEvent(getWebappDaoFactory().getUserURI(),true,individualURI));
Object event = new IndividualUpdateEvent(getWebappDaoFactory().getUserURI(),true,individualURI);
ontModel.getBaseModel().notifyEvent(event);
try {
Resource indRes = ontModel.getResource(individualURI);
getOntModel().remove(indRes, RDF.type, ontModel.getResource(vclassURI));

View file

@ -134,6 +134,12 @@ public class IndividualSDB extends IndividualImpl implements Individual {
QueryFactory.create(getStatements), dataset)
.execConstruct();
} finally {
if (dataset == null) {
throw new RuntimeException("dataset is null");
} else if (dataset.getLock() == null) {
throw new RuntimeException("dataset lock is null");
}
dataset.getLock().leaveCriticalSection();
w.close();
}

View file

@ -0,0 +1,72 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.ByteArrayInputStream;
import java.io.UnsupportedEncodingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelChangedListener;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;
/**
* A ChangeListener that forwards events to a Jena ModelChangedListener
* @author bjl23
*
*/
public class JenaChangeListener implements ChangeListener {
private static final Log log = LogFactory.getLog(JenaChangeListener.class);
private ModelChangedListener listener;
private Model m = ModelFactory.createDefaultModel();
public JenaChangeListener(ModelChangedListener listener) {
this.listener = listener;
}
@Override
public void addedStatement(String serializedTriple, String graphURI) {
listener.addedStatement(parseTriple(serializedTriple));
}
@Override
public void removedStatement(String serializedTriple, String graphURI) {
listener.removedStatement(parseTriple(serializedTriple));
}
@Override
public void notifyEvent(String graphURI, Object event) {
log.debug("event: " + event.getClass());
listener.notifyEvent(m, event);
}
// TODO avoid overhead of Model
private Statement parseTriple(String serializedTriple) {
try {
Model m = ModelFactory.createDefaultModel();
m.read(new ByteArrayInputStream(
serializedTriple.getBytes("UTF-8")), null, "N3");
StmtIterator sit = m.listStatements();
if (!sit.hasNext()) {
throw new RuntimeException("no triple parsed from change event");
} else {
Statement s = sit.nextStatement();
if (sit.hasNext()) {
log.warn("More than one triple parsed from change event");
}
return s;
}
} catch (UnsupportedEncodingException uee) {
throw new RuntimeException(uee);
}
}
}

View file

@ -4,12 +4,19 @@ package edu.cornell.mannlib.vitro.webapp.dao.jena;
import javax.servlet.ServletContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.ModelChangedListener;
import edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
public class ModelContext {
private static final Log log = LogFactory.getLog(ModelContext.class);
private static final String ONT_MODEL_SELECTOR = "ontModelSelector";
private static final String UNION_ONT_MODEL_SELECTOR = "unionOntModelSelector";
@ -96,15 +103,13 @@ public class ModelContext {
* Changes to application model
*/
public static void registerListenerForChanges(ServletContext ctx, ModelChangedListener ml){
ModelContext.getJenaOntModel(ctx).register(ml);
ModelContext.getBaseOntModel(ctx).register(ml);
ModelContext.getInferenceOntModel(ctx).register(ml);
ModelContext.getUnionOntModelSelector(ctx).getABoxModel().register(ml);
ModelContext.getBaseOntModelSelector(ctx).getABoxModel().register(ml);
ModelContext.getBaseOntModelSelector(ctx).getApplicationMetadataModel().register(ml);
ModelContext.getInferenceOntModelSelector(ctx).getABoxModel().register(ml);
ModelContext.getBaseOntModelSelector(ctx).getTBoxModel().register(ml);
try {
RDFServiceUtils.getRDFServiceFactory(ctx).registerListener(
new JenaChangeListener(ml));
} catch (RDFServiceException e) {
log.error(e,e);
}
}

View file

@ -785,12 +785,12 @@ public class ObjectPropertyDaoJena extends PropertyDaoJena implements ObjectProp
static {
List<String> namespaceFilters = new ArrayList<String>();
for (String namespace : EXCLUDED_NAMESPACES) {
namespaceFilters.add("( afn:namespace(?property) != \"" + namespace + "\" )");
namespaceFilters.add("( !regex(str(?property), \"^" + namespace + "\" ))");
}
// A hack to include the vitro:primaryLink and vitro:additionalLink properties in the list
namespaceFilters.add("( ?property = vitro:primaryLink ||" +
"?property = vitro:additionalLink ||" +
"afn:namespace(?property) != \"http://vitro.mannlib.cornell.edu/ns/vitro/0.7#\" )");
"!regex(str(?property), \"^http://vitro.mannlib.cornell.edu/ns/vitro/0.7#\" ))");
PROPERTY_FILTERS = StringUtils.join(namespaceFilters, " && ");
}

View file

@ -42,16 +42,20 @@ import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.IndividualUpdateEvent;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements ObjectPropertyStatementDao {
private static final Log log = LogFactory.getLog(ObjectPropertyStatementDaoJena.class);
private DatasetWrapperFactory dwf;
private RDFService rdfService;
public ObjectPropertyStatementDaoJena(DatasetWrapperFactory dwf,
public ObjectPropertyStatementDaoJena(RDFService rdfService,
DatasetWrapperFactory dwf,
WebappDaoFactoryJena wadf) {
super(wadf);
this.rdfService = rdfService;
this.dwf = dwf;
}
@ -335,7 +339,7 @@ public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements Objec
}
}
private Model constructModelForSelectQueries(String subjectUri,
String propertyUri,
Set<String> constructQueries) {
@ -351,39 +355,28 @@ public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements Objec
log.debug("CONSTRUCT query string for object property " +
propertyUri + ": " + queryString);
Query query = null;
queryString = queryString.replace("?subject", "<" + subjectUri + ">");
queryString = queryString.replace("?property", "<" + propertyUri + ">");
// we no longer need this query object, but we might want to do this
// query parse step to improve debugging, depending on the error returned
// through the RDF API
// try {
// QueryFactory.create(queryString, Syntax.syntaxARQ);
// } catch(Throwable th){
// log.error("Could not create CONSTRUCT SPARQL query for query " +
// "string. " + th.getMessage());
// log.error(queryString);
// return constructedModel;
// }
try {
query = QueryFactory.create(queryString, Syntax.syntaxARQ);
} catch(Throwable th){
log.error("Could not create CONSTRUCT SPARQL query for query " +
"string. " + th.getMessage());
log.error(queryString);
return constructedModel;
}
QuerySolutionMap initialBindings = new QuerySolutionMap();
initialBindings.add(
"subject", ResourceFactory.createResource(subjectUri));
initialBindings.add(
"property", ResourceFactory.createResource(propertyUri));
DatasetWrapper w = dwf.getDatasetWrapper();
Dataset dataset = w.getDataset();
dataset.getLock().enterCriticalSection(Lock.READ);
QueryExecution qe = null;
try {
qe = QueryExecutionFactory.create(
query, dataset, initialBindings);
qe.execConstruct(constructedModel);
constructedModel.read(
rdfService.sparqlConstructQuery(
queryString, RDFService.ModelSerializationFormat.N3), null, "N3");
} catch (Exception e) {
log.error("Error getting constructed model for subject " + subjectUri + " and property " + propertyUri);
} finally {
if (qe != null) {
qe.close();
}
dataset.getLock().leaveCriticalSection();
w.close();
}
}
}
return constructedModel;

View file

@ -23,16 +23,15 @@ import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectProperty;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.jena.IndividualSDB.IndividualNotFoundException;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
public class ObjectPropertyStatementDaoSDB extends
ObjectPropertyStatementDaoJena implements ObjectPropertyStatementDao {
@ -43,10 +42,11 @@ public class ObjectPropertyStatementDaoSDB extends
private SDBDatasetMode datasetMode;
public ObjectPropertyStatementDaoSDB(
RDFService rdfService,
DatasetWrapperFactory dwf,
SDBDatasetMode datasetMode,
WebappDaoFactoryJena wadf) {
super (dwf, wadf);
super (rdfService, dwf, wadf);
this.dwf = dwf;
this.datasetMode = datasetMode;
}

View file

@ -3,7 +3,6 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
@ -35,6 +34,7 @@ import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.sparql.resultset.ResultSetMem;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.OWL;
import com.hp.hpl.jena.vocabulary.RDFS;
@ -596,6 +596,31 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
}
}
private List<OntClass> listSuperClasses(OntClass ontClass) {
return relatedClasses(ontClass, RDFS.subClassOf);
}
private List<OntClass> listEquivalentClasses(OntClass ontClass) {
return relatedClasses(ontClass, OWL.equivalentClass);
}
private List<OntClass> relatedClasses(OntClass ontClass,
com.hp.hpl.jena.rdf.model.Property property) {
List<OntClass> classes = new ArrayList<OntClass>();
StmtIterator closeIt = ontClass.listProperties(property);
try {
while (closeIt.hasNext()) {
Statement stmt = closeIt.nextStatement();
if (stmt.getObject().canAs(OntClass.class)) {
classes.add(stmt.getObject().as(OntClass.class));
}
}
} finally {
closeIt.close();
}
return classes;
}
public List<PropertyInstance> getAllPropInstByVClasses(List<VClass> vclasses) {
List<PropertyInstance> propInsts = new ArrayList<PropertyInstance>();
@ -628,11 +653,11 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
OntClass ontClass = getOntClass(ontModel,VClassURI);
if (ontClass != null) {
List<OntClass> relatedClasses = new ArrayList<OntClass>();
relatedClasses.addAll(ontClass.listEquivalentClasses().toList());
relatedClasses.addAll(ontClass.listSuperClasses().toList());
relatedClasses.addAll(listEquivalentClasses(ontClass));
relatedClasses.addAll(listSuperClasses(ontClass));
for (OntClass relatedClass : relatedClasses) {
// find properties in restrictions
if (relatedClass.isRestriction()) {
if (relatedClass.isRestriction() && relatedClass.canAs(Restriction.class)) {
// TODO: check if restriction is something like
// maxCardinality 0 or allValuesFrom owl:Nothing,
// in which case the property is NOT applicable!

View file

@ -0,0 +1,70 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Iterator;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
public class RDFServiceDataset implements Dataset {
private RDFServiceDatasetGraph g;
public RDFServiceDataset(RDFServiceDatasetGraph g) {
this.g = g;
}
public RDFServiceDataset(RDFService rdfService) {
this.g = new RDFServiceDatasetGraph(rdfService);
}
@Override
public DatasetGraph asDatasetGraph() {
return g;
}
@Override
public void close() {
g.close();
}
@Override
public boolean containsNamedModel(String arg0) {
return g.containsGraph(Node.createURI(arg0));
}
@Override
public Model getDefaultModel() {
return RDFServiceGraph.createRDFServiceModel(g.getDefaultGraph());
}
@Override
public Lock getLock() {
return g.getLock();
}
@Override
public Model getNamedModel(String arg0) {
return RDFServiceGraph.createRDFServiceModel(g.getGraph(Node.createURI(arg0)));
}
@Override
public Iterator<String> listNames() {
ArrayList<String> nameList = new ArrayList<String>();
Iterator<Node> nodeIt = g.listGraphNodes();
while (nodeIt.hasNext()) {
Node n = nodeIt.next();
nameList.add(n.getURI());
}
return nameList.iterator();
}
}

View file

@ -0,0 +1,223 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.shared.LockMRSW;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
import com.hp.hpl.jena.sparql.core.Quad;
import com.hp.hpl.jena.sparql.resultset.JSONInput;
import com.hp.hpl.jena.sparql.resultset.ResultSetMem;
import com.hp.hpl.jena.sparql.util.Context;
import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
public class RDFServiceDatasetGraph implements DatasetGraph {
private RDFService rdfService;
private Lock lock = new LockMRSW();
public RDFServiceDatasetGraph(RDFService rdfService) {
this.rdfService = rdfService;
}
private Graph getGraphFor(Quad q) {
return getGraphFor(q.getGraph());
}
private Graph getGraphFor(Node g) {
return (g == Node.ANY)
? new RDFServiceGraph(rdfService)
: new RDFServiceGraph(rdfService, g.getURI());
}
@Override
public void add(Quad arg0) {
getGraphFor(arg0).add(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void addGraph(Node arg0, Graph arg1) {
// TODO Auto-generated method stub
}
@Override
public void close() {
// TODO Auto-generated method stub
}
@Override
public boolean contains(Quad arg0) {
return getGraphFor(arg0).contains(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public boolean contains(Node arg0, Node arg1, Node arg2, Node arg3) {
return getGraphFor(arg0).contains(arg1, arg2, arg3);
}
@Override
public boolean containsGraph(Node arg0) {
// TODO Auto-generated method stub
return true;
}
@Override
public void delete(Quad arg0) {
getGraphFor(arg0).delete(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void deleteAny(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
getGraphFor(arg0).delete(new Triple(arg1, arg2, arg3));
}
@Override
public Iterator<Quad> find() {
return find(Node.ANY, Node.ANY, Node.ANY, Node.ANY);
}
@Override
public Iterator<Quad> find(Quad arg0) {
return find(arg0.getSubject(), arg0.getPredicate(), arg0.getObject(), arg0.getGraph());
}
@Override
public Iterator<Quad> find(Node graph, Node subject, Node predicate, Node object) {
if (!isVar(subject) && !isVar(predicate) && !isVar(object) &&!isVar(graph)) {
if (contains(subject, predicate, object, graph)) {
return new SingletonIterator(new Triple(subject, predicate, object));
} else {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
}
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
String graphURI = !isVar(graph) ? graph.getURI() : null;
findQuery.append(" GRAPH ");
if (graphURI != null) {
findQuery.append(" <" + graphURI + ">");
} else {
findQuery.append("?g");
}
findQuery.append(" { ");
findQuery.append(SparqlGraph.sparqlNode(subject, "?s"))
.append(" ")
.append(SparqlGraph.sparqlNode(predicate, "?p"))
.append(" ")
.append(SparqlGraph.sparqlNode(object, "?o"));
findQuery.append(" } ");
findQuery.append("\n}");
//log.info(findQuery.toString());
ResultSet rs = null;
try {
rs = JSONInput.fromJSON(rdfService.sparqlSelectQuery(
findQuery.toString(), RDFService.ResultFormat.JSON));
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
List<Quad> quadlist = new ArrayList<Quad>();
while (rs.hasNext()) {
QuerySolution soln = rs.nextSolution();
Quad q = new Quad(isVar(graph) ? soln.get("?g").asNode() : graph,
isVar(subject) ? soln.get("?s").asNode() : subject,
isVar(predicate) ? soln.get("?p").asNode() : predicate,
isVar(object) ? soln.get("?o").asNode() : object);
//log.info(t);
quadlist.add(q);
}
//log.info(triplist.size() + " results");
return WrappedIterator.create(quadlist.iterator()); }
@Override
public Iterator<Quad> findNG(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
return find(arg0, arg1, arg2, arg3);
}
@Override
public Context getContext() {
// TODO Auto-generated method stub
return null;
}
@Override
public RDFServiceGraph getDefaultGraph() {
return new RDFServiceGraph(rdfService);
}
@Override
public RDFServiceGraph getGraph(Node arg0) {
return new RDFServiceGraph(rdfService, arg0.getURI());
}
@Override
public Lock getLock() {
return lock;
}
@Override
public boolean isEmpty() {
// TODO Auto-generated method stub
return false;
}
@Override
public Iterator<Node> listGraphNodes() {
List<Node> graphNodeList = new ArrayList<Node>();
try {
for (String graphURI : rdfService.getGraphURIs()) {
graphNodeList.add(Node.createURI(graphURI));
}
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
return graphNodeList.iterator();
}
@Override
public void removeGraph(Node arg0) {
// TODO Auto-generated method stub
}
@Override
public void setDefaultGraph(Graph arg0) {
// TODO Auto-generated method stub
}
@Override
public long size() {
// TODO Auto-generated method stub
return 0;
}
private boolean isVar(Node node) {
return (node == null || node.isVariable() || node == Node.ANY);
}
}

View file

@ -0,0 +1,460 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Capabilities;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.GraphEventManager;
import com.hp.hpl.jena.graph.GraphStatisticsHandler;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Reifier;
import com.hp.hpl.jena.graph.TransactionHandler;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.graph.impl.GraphWithPerform;
import com.hp.hpl.jena.graph.impl.SimpleEventManager;
import com.hp.hpl.jena.graph.query.QueryHandler;
import com.hp.hpl.jena.graph.query.SimpleQueryHandler;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.listeners.StatementListener;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.shared.AddDeniedException;
import com.hp.hpl.jena.shared.DeleteDeniedException;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.shared.impl.PrefixMappingImpl;
import com.hp.hpl.jena.sparql.resultset.JSONInput;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
public class RDFServiceGraph implements GraphWithPerform {
private RDFService rdfService;
private String graphURI;
private static final Log log = LogFactory.getLog(SparqlGraph.class);
private BulkUpdateHandler bulkUpdateHandler;
private PrefixMapping prefixMapping = new PrefixMappingImpl();
private GraphEventManager eventManager;
private Reifier reifier = new EmptyReifier(this);
private QueryHandler queryHandler;
/**
* Returns a SparqlGraph for the union of named graphs in a remote repository
* @param endpointURI
*/
public RDFServiceGraph(RDFService rdfService) {
this(rdfService, null);
}
/**
* Returns a SparqlGraph for a particular named graph in a remote repository
* @param endpointURI
* @param graphURI
*/
public RDFServiceGraph(RDFService rdfService, String graphURI) {
this.rdfService = rdfService;
this.graphURI = graphURI;
}
public RDFService getRDFService() {
return this.rdfService;
}
public String getGraphURI() {
return graphURI;
}
@Override
public void add(Triple arg0) throws AddDeniedException {
performAdd(arg0);
}
private String serialize(Triple t) {
StringBuffer sb = new StringBuffer();
sb.append(sparqlNodeUpdate(t.getSubject(), "")).append(" ")
.append(sparqlNodeUpdate(t.getPredicate(), "")).append(" ")
.append(sparqlNodeUpdate(t.getObject(), "")).append(" .");
return sb.toString();
}
@Override
public void performAdd(Triple t) {
ChangeSet changeSet = rdfService.manufactureChangeSet();
try {
changeSet.addAddition(RDFServiceUtils.toInputStream(serialize(t)),
RDFService.ModelSerializationFormat.N3, graphURI);
rdfService.changeSetUpdate(changeSet);
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
}
@Override
public void performDelete(Triple t) {
ChangeSet changeSet = rdfService.manufactureChangeSet();
try {
changeSet.addRemoval(RDFServiceUtils.toInputStream(serialize(t)),
RDFService.ModelSerializationFormat.N3, graphURI);
rdfService.changeSetUpdate(changeSet);
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
}
public void removeAll() {
// only to be used with a single graph
if (graphURI == null) {
return;
}
String constructStr = "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH <" + graphURI + "> { ?s ?p ?o } }";
try {
InputStream model = rdfService.sparqlConstructQuery(
constructStr, RDFService.ModelSerializationFormat.N3);
ChangeSet changeSet = rdfService.manufactureChangeSet();
changeSet.addRemoval(model, RDFService.ModelSerializationFormat.N3, graphURI);
rdfService.changeSetUpdate(changeSet);
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
}
@Override
public void close() {
// can't close a remote endpoint
}
@Override
public boolean contains(Triple arg0) {
return contains(arg0.getSubject(), arg0.getPredicate(), arg0.getObject());
}
@Override
public boolean contains(Node subject, Node predicate, Node object) {
if (subject.isBlank() || predicate.isBlank() || object.isBlank()) {
return false;
}
StringBuffer containsQuery = new StringBuffer("ASK { \n");
if (graphURI != null) {
containsQuery.append(" GRAPH <" + graphURI + "> { ");
}
containsQuery.append(sparqlNode(subject, "?s"))
.append(" ")
.append(sparqlNode(predicate, "?p"))
.append(" ")
.append(sparqlNode(object, "?o"));
if (graphURI != null) {
containsQuery.append(" } \n");
}
containsQuery.append("\n}");
boolean result = execAsk(containsQuery.toString());
return result;
}
@Override
public void delete(Triple arg0) throws DeleteDeniedException {
performDelete(arg0);
}
@Override
public boolean dependsOn(Graph arg0) {
return false; // who knows?
}
@Override
public ExtendedIterator<Triple> find(TripleMatch arg0) {
//log.info("find(TripleMatch) " + arg0);
Triple t = arg0.asTriple();
return find(t.getSubject(), t.getPredicate(), t.getObject());
}
public static String sparqlNode(Node node, String varName) {
if (node == null || node.isVariable()) {
return varName;
} else if (node.isBlank()) {
return "<fake:blank>"; // or throw exception?
} else if (node.isURI()) {
StringBuffer uriBuff = new StringBuffer();
return uriBuff.append("<").append(node.getURI()).append(">").toString();
} else if (node.isLiteral()) {
StringBuffer literalBuff = new StringBuffer();
literalBuff.append("\"");
pyString(literalBuff, node.getLiteralLexicalForm());
literalBuff.append("\"");
if (node.getLiteralDatatypeURI() != null) {
literalBuff.append("^^<").append(node.getLiteralDatatypeURI()).append(">");
} else if (node.getLiteralLanguage() != null && node.getLiteralLanguage() != "") {
literalBuff.append("@").append(node.getLiteralLanguage());
}
return literalBuff.toString();
} else {
return varName;
}
}
public static String sparqlNodeUpdate(Node node, String varName) {
if (node.isBlank()) {
return "_:" + node.getBlankNodeLabel().replaceAll("\\W", "");
} else {
return sparqlNode(node, varName);
}
}
public static String sparqlNodeDelete(Node node, String varName) {
if (node.isBlank()) {
return "?" + node.getBlankNodeLabel().replaceAll("\\W", "");
} else {
return sparqlNode(node, varName);
}
}
@Override
public ExtendedIterator<Triple> find(Node subject, Node predicate, Node object) {
if (!isVar(subject) && !isVar(predicate) && !isVar(object)) {
if (contains(subject, predicate, object)) {
return new SingletonIterator(new Triple(subject, predicate, object));
} else {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
}
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
if (graphURI != null) {
findQuery.append(" GRAPH <" + graphURI + "> { ");
}
findQuery.append(sparqlNode(subject, "?s"))
.append(" ")
.append(sparqlNode(predicate, "?p"))
.append(" ")
.append(sparqlNode(object, "?o"));
if (graphURI != null) {
findQuery.append(" } ");
}
findQuery.append("\n}");
String queryString = findQuery.toString();
ResultSet rs = execSelect(queryString);
List<Triple> triplist = new ArrayList<Triple>();
while (rs.hasNext()) {
QuerySolution soln = rs.nextSolution();
Triple t = new Triple(isVar(subject) ? soln.get("?s").asNode() : subject,
isVar(predicate) ? soln.get("?p").asNode() : predicate,
isVar(object) ? soln.get("?o").asNode() : object);
//log.info(t);
triplist.add(t);
}
//log.info(triplist.size() + " results");
return WrappedIterator.create(triplist.iterator());
}
private boolean isVar(Node node) {
return (node == null || node.isVariable() || node == Node.ANY);
}
@Override
public BulkUpdateHandler getBulkUpdateHandler() {
if (this.bulkUpdateHandler == null) {
this.bulkUpdateHandler = new RDFServiceGraphBulkUpdater(this);
}
return this.bulkUpdateHandler;
}
@Override
public Capabilities getCapabilities() {
return capabilities;
}
@Override
public GraphEventManager getEventManager() {
if (eventManager == null) {
eventManager = new SimpleEventManager(this);
}
return eventManager;
}
@Override
public PrefixMapping getPrefixMapping() {
return prefixMapping;
}
@Override
public Reifier getReifier() {
return reifier;
}
@Override
public GraphStatisticsHandler getStatisticsHandler() {
return null;
}
@Override
public TransactionHandler getTransactionHandler() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isClosed() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isEmpty() {
return (size() == 0);
}
@Override
public boolean isIsomorphicWith(Graph arg0) {
throw new UnsupportedOperationException("isIsomorphicWith() not supported " +
"by SPARQL graphs");
}
@Override
public QueryHandler queryHandler() {
if (queryHandler == null) {
queryHandler = new SimpleQueryHandler(this);
}
return queryHandler;
}
@Override
public int size() {
int size = find(null, null, null).toList().size();
return size;
}
private final static Capabilities capabilities = new Capabilities() {
public boolean addAllowed() {
return false;
}
public boolean addAllowed(boolean everyTriple) {
return false;
}
public boolean canBeEmpty() {
return true;
}
public boolean deleteAllowed() {
return false;
}
public boolean deleteAllowed(boolean everyTriple) {
return false;
}
public boolean findContractSafe() {
return true;
}
public boolean handlesLiteralTyping() {
return true;
}
public boolean iteratorRemoveAllowed() {
return false;
}
public boolean sizeAccurate() {
return true;
}
};
private boolean execAsk(String queryStr) {
try {
return rdfService.sparqlAskQuery(queryStr);
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
}
private ResultSet execSelect(String queryStr) {
try {
return JSONInput.fromJSON(rdfService.sparqlSelectQuery(
queryStr, RDFService.ResultFormat.JSON));
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
}
/*
*
* see http://www.python.org/doc/2.5.2/ref/strings.html
* or see jena's n3 grammar jena/src/com/hp/hpl/jena/n3/n3.g
*/
protected static void pyString(StringBuffer sbuff, String s)
{
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
// Escape escapes and quotes
if (c == '\\' || c == '"' )
{
sbuff.append('\\') ;
sbuff.append(c) ;
continue ;
}
// Whitespace
if (c == '\n'){ sbuff.append("\\n");continue; }
if (c == '\t'){ sbuff.append("\\t");continue; }
if (c == '\r'){ sbuff.append("\\r");continue; }
if (c == '\f'){ sbuff.append("\\f");continue; }
if (c == '\b'){ sbuff.append("\\b");continue; }
if( c == 7 ) { sbuff.append("\\a");continue; }
// Output as is (subject to UTF-8 encoding on output that is)
sbuff.append(c) ;
// // Unicode escapes
// // c < 32, c >= 127, not whitespace or other specials
// String hexstr = Integer.toHexString(c).toUpperCase();
// int pad = 4 - hexstr.length();
// sbuff.append("\\u");
// for (; pad > 0; pad--)
// sbuff.append("0");
// sbuff.append(hexstr);
}
}
public static Model createRDFServiceModel(final RDFServiceGraph g) {
Model m = ModelFactory.createModelForGraph(g);
m.register(new StatementListener() {
@Override
public void notifyEvent(Model m, Object event) {
ChangeSet changeSet = g.getRDFService().manufactureChangeSet();
changeSet.addPreChangeEvent(event);
try {
g.getRDFService().changeSetUpdate(changeSet);
} catch (RDFServiceException e) {
throw new RuntimeException(e);
}
}
});
return m;
}
}

View file

@ -0,0 +1,185 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.GraphEvents;
import com.hp.hpl.jena.graph.GraphUtil;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.impl.SimpleBulkUpdateHandler;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sparql.util.graph.GraphFactory;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler {
private static final Log log = LogFactory.getLog(RDFServiceGraphBulkUpdater.class);
private RDFServiceGraph graph;
public RDFServiceGraphBulkUpdater(RDFServiceGraph graph) {
super(graph);
this.graph = graph;
}
@Override
public void add(Triple[] arg0) {
Graph g = GraphFactory.createPlainGraph();
for (int i = 0 ; i < arg0.length ; i++) {
g.add(arg0[i]);
}
add(g);
}
@Override
public void add(List<Triple> arg0) {
Graph g = GraphFactory.createPlainGraph();
for (Triple t : arg0) {
g.add(t);
}
add(g);
}
@Override
public void add(Iterator<Triple> arg0) {
Graph g = GraphFactory.createPlainGraph();
while (arg0.hasNext()) {
Triple t = arg0.next();
g.add(t);
}
add(g);
}
@Override
public void add(Graph arg0) {
add(arg0, false);
}
@Override
public void add(Graph g, boolean arg1) {
Model[] model = separateStatementsWithBlankNodes(g);
addModel(model[1] /* nonBlankNodeModel */);
// replace following call with different method
addModel(model[0] /*blankNodeModel*/);
}
/**
* Returns a pair of models. The first contains any statement containing at
* least one blank node. The second contains all remaining statements.
* @param g
* @return
*/
private Model[] separateStatementsWithBlankNodes(Graph g) {
Model gm = ModelFactory.createModelForGraph(g);
Model blankNodeModel = ModelFactory.createDefaultModel();
Model nonBlankNodeModel = ModelFactory.createDefaultModel();
StmtIterator sit = gm.listStatements();
while (sit.hasNext()) {
Statement stmt = sit.nextStatement();
if (!stmt.getSubject().isAnon() && !stmt.getObject().isAnon()) {
nonBlankNodeModel.add(stmt);
} else {
blankNodeModel.add(stmt);
}
}
Model[] result = new Model[2];
result[0] = blankNodeModel;
result[1] = nonBlankNodeModel;
return result;
}
@Override
public void delete(Graph g, boolean withReifications) {
delete(g);
}
@Override
public void delete(Graph g) {
deleteModel(ModelFactory.createModelForGraph(g));
}
public void addModel(Model model) {
ChangeSet changeSet = graph.getRDFService().manufactureChangeSet();
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.write(out, "N-TRIPLE");
changeSet.addAddition(new ByteArrayInputStream(
out.toByteArray()), RDFService.ModelSerializationFormat.N3,
graph.getGraphURI());
try {
graph.getRDFService().changeSetUpdate(changeSet);
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
}
public void deleteModel(Model model) {
ChangeSet changeSet = graph.getRDFService().manufactureChangeSet();
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.write(out, "N-TRIPLE");
changeSet.addRemoval(new ByteArrayInputStream(
out.toByteArray()), RDFService.ModelSerializationFormat.N3,
graph.getGraphURI());
try {
graph.getRDFService().changeSetUpdate(changeSet);
} catch (RDFServiceException rdfse) {
throw new RuntimeException(rdfse);
}
}
@Override
public void removeAll() {
removeAll(graph);
notifyRemoveAll();
}
protected void notifyRemoveAll() {
manager.notifyEvent(graph, GraphEvents.removeAll);
}
@Override
public void remove(Node s, Node p, Node o) {
removeAll(graph, s, p, o);
manager.notifyEvent(graph, GraphEvents.remove(s, p, o));
}
public static void removeAll(Graph g, Node s, Node p, Node o)
{
ExtendedIterator<Triple> it = g.find( s, p, o );
try {
while (it.hasNext()) {
Triple t = it.next();
g.delete(t);
it.remove();
}
}
finally {
it.close();
}
}
public static void removeAll( Graph g )
{
g.getBulkUpdateHandler().delete(g);
}
}

View file

@ -0,0 +1,244 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.GraphMaker;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelMaker;
import com.hp.hpl.jena.rdf.model.ModelReader;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
public class RDFServiceModelMaker implements ModelMaker {
private final static Log log = LogFactory.getLog(VitroJenaSDBModelMaker.class);
private RDFServiceFactory rdfServiceFactory;
public static final String METADATA_MODEL_URI =
"http://vitro.mannlib.cornell.edu/ns/vitro/sdb/metadata";
public static final String HAS_NAMED_MODEL_URI =
"http://vitro.mannlib.cornell.edu/ns/vitro/sdb/hasNamedModel";
private Resource sdbResource; // a resource representing the SDB database
public RDFServiceModelMaker(RDFServiceFactory rdfServiceFactory) {
this.rdfServiceFactory = rdfServiceFactory;
}
protected RDFService getRDFService() {
return rdfServiceFactory.getRDFService();
}
Model getMetadataModel() {
return getModel(METADATA_MODEL_URI);
}
public void close() {
// n.a.
}
public Model createModel(String modelName) {
Model model = getModel(modelName);
Model metadataModel = getMetadataModel();
try {
metadataModel.add(
sdbResource,metadataModel.getProperty(
HAS_NAMED_MODEL_URI), modelName);
} finally {
metadataModel.close();
}
return model;
}
public Model createModel(String arg0, boolean arg1) {
// TODO Figure out if we can offer a "create if not found" option using SDB
return createModel(arg0);
}
public GraphMaker getGraphMaker() {
throw new UnsupportedOperationException(
"GraphMaker not supported by " + this.getClass().getName());
}
public boolean hasModel(String arg0) {
Model metadataModel = getMetadataModel();
try {
StmtIterator stmtIt = metadataModel.listStatements(
sdbResource, metadataModel.getProperty(
HAS_NAMED_MODEL_URI), arg0);
try {
return stmtIt.hasNext();
} finally {
if (stmtIt != null) {
stmtIt.close();
}
}
} finally {
metadataModel.close();
}
}
public ExtendedIterator<String> listModels() {
Model metadataModel = getMetadataModel();
try {
return listModelNames(metadataModel);
} finally {
metadataModel.close();
}
}
private ExtendedIterator<String> listModelNames(Model metadataModel) {
Set<String> modelNameSet = new HashSet<String>();
Iterator<RDFNode> metadataNameIt = metadataModel.listObjectsOfProperty(
metadataModel.getProperty(HAS_NAMED_MODEL_URI));
while (metadataNameIt.hasNext()) {
RDFNode rdfNode = metadataNameIt.next();
if (rdfNode.isLiteral()) {
modelNameSet.add(((Literal) rdfNode).getLexicalForm());
}
}
RDFService service = getRDFService();
try {
modelNameSet.addAll(service.getGraphURIs());
} catch (RDFServiceException e) {
throw new RuntimeException(e);
} finally {
service.close();
}
List<String> modelNameList = new ArrayList<String>();
modelNameList.addAll(modelNameSet);
Collections.sort(modelNameList, Collator.getInstance());
return WrappedIterator.create(modelNameList.iterator());
}
public Model openModel(String arg0, boolean arg1) {
RDFService service = getRDFService();
try {
Dataset dataset = new RDFServiceDataset(service);
return dataset.getNamedModel(arg0);
} finally {
service.close();
}
}
public void removeModel(String arg0) {
Model m = getModel(arg0);
m.removeAll(null,null,null);
Model metadataModel = getMetadataModel();
try {
metadataModel.remove(sdbResource, metadataModel.getProperty(
HAS_NAMED_MODEL_URI),metadataModel.createLiteral(arg0));
} finally {
metadataModel.close();
}
}
public Model addDescription(Model arg0, Resource arg1) {
throw new UnsupportedOperationException(
"addDescription not supported by " + this.getClass().getName());
}
public Model createModelOver(String arg0) {
throw new UnsupportedOperationException(
"createModelOver not supported by " + this.getClass().getName());
}
public Model getDescription() {
throw new UnsupportedOperationException(
"createModelOver not supported by " + this.getClass().getName());
}
public Model getDescription(Resource arg0) {
throw new UnsupportedOperationException(
"getDescription not supported by "+this.getClass().getName());
}
public Model openModel() {
RDFService service = getRDFService();
try {
Dataset dataset = new RDFServiceDataset(service);
return dataset.getDefaultModel();
} finally {
service.close();
}
}
public Model createDefaultModel() {
return openModel();
}
public Model createFreshModel() {
throw new UnsupportedOperationException(
"createFreshModel not supported by " + this.getClass().getName());
}
/**
* @deprecated
*/
public Model createModel() {
return openModel();
}
/**
* @deprecated
*/
public Model getModel() {
return openModel();
}
public Model openModel(String arg0) {
return openModel();
}
public Model openModelIfPresent(String arg0) {
return (this.hasModel(arg0))
? openModel(arg0, false)
: null;
}
public Model getModel(String modelName) {
return openModel(modelName, true);
}
public Model getModel(String arg0, ModelReader arg1) {
throw new UnsupportedOperationException(
"getModel(String, ModelReader) not supported by " +
this.getClass().getName());
}
}

View file

@ -14,14 +14,14 @@ import com.hp.hpl.jena.shared.Lock;
*/
public class SimpleOntModelSelector implements OntModelSelector {
private OntModel fullModel;
private OntModel aboxModel;
private OntModel applicationMetadataModel;
private OntModel tboxModel;
private OntModel userAccountsModel;
protected OntModel fullModel;
protected OntModel aboxModel;
protected OntModel applicationMetadataModel;
protected OntModel tboxModel;
protected OntModel userAccountsModel;
private OntModelSpec DEFAULT_ONT_MODEL_SPEC = OntModelSpec.OWL_MEM;
private OntModel displayModel;
protected OntModelSpec DEFAULT_ONT_MODEL_SPEC = OntModelSpec.OWL_MEM;
protected OntModel displayModel;
/**
* Construct an OntModelSelector with a bunch of empty models

View file

@ -0,0 +1,15 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import com.hp.hpl.jena.ontology.OntModel;
public class SingleContentOntModelSelector extends SimpleOntModelSelector {
public SingleContentOntModelSelector(OntModel contentModel,
OntModel displayModel,
OntModel userAccountsModel) {
super(contentModel);
super.displayModel = displayModel;
super.userAccountsModel = userAccountsModel;
}
}

View file

@ -0,0 +1,64 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Iterator;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
public class SparqlDataset implements Dataset {
private SparqlDatasetGraph g;
public SparqlDataset(SparqlDatasetGraph g) {
this.g = g;
}
@Override
public DatasetGraph asDatasetGraph() {
return g;
}
@Override
public void close() {
g.close();
}
@Override
public boolean containsNamedModel(String arg0) {
return g.containsGraph(Node.createURI(arg0));
}
@Override
public Model getDefaultModel() {
return ModelFactory.createModelForGraph(g.getDefaultGraph());
}
@Override
public Lock getLock() {
return g.getLock();
}
@Override
public Model getNamedModel(String arg0) {
return ModelFactory.createModelForGraph(g.getGraph(Node.createURI(arg0)));
}
@Override
public Iterator<String> listNames() {
ArrayList<String> nameList = new ArrayList<String>();
Iterator<Node> nodeIt = g.listGraphNodes();
while (nodeIt.hasNext()) {
Node n = nodeIt.next();
nameList.add(n.getURI());
}
return nameList.iterator();
}
}

View file

@ -0,0 +1,266 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.openrdf.model.Resource;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.shared.LockMRSW;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
import com.hp.hpl.jena.sparql.core.Quad;
import com.hp.hpl.jena.sparql.resultset.ResultSetMem;
import com.hp.hpl.jena.sparql.util.Context;
import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class SparqlDatasetGraph implements DatasetGraph {
private String endpointURI;
private Repository repository;
private Lock lock = new LockMRSW();
public SparqlDatasetGraph(String endpointURI) {
this.endpointURI = endpointURI;
this.repository = new HTTPRepository(endpointURI);
}
private Graph getGraphFor(Quad q) {
return getGraphFor(q.getGraph());
}
private Graph getGraphFor(Node g) {
return (g == Node.ANY)
? new SparqlGraph(endpointURI)
: new SparqlGraph(endpointURI, g.getURI());
}
@Override
public void add(Quad arg0) {
getGraphFor(arg0).add(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void addGraph(Node arg0, Graph arg1) {
// TODO Auto-generated method stub
}
@Override
public void close() {
// TODO Auto-generated method stub
}
@Override
public boolean contains(Quad arg0) {
return getGraphFor(arg0).contains(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public boolean contains(Node arg0, Node arg1, Node arg2, Node arg3) {
return getGraphFor(arg0).contains(arg1, arg2, arg3);
}
@Override
public boolean containsGraph(Node arg0) {
// TODO Auto-generated method stub
return true;
}
@Override
public void delete(Quad arg0) {
getGraphFor(arg0).delete(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void deleteAny(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
getGraphFor(arg0).delete(new Triple(arg1, arg2, arg3));
}
@Override
public Iterator<Quad> find() {
return find(Node.ANY, Node.ANY, Node.ANY, Node.ANY);
}
@Override
public Iterator<Quad> find(Quad arg0) {
return find(arg0.getSubject(), arg0.getPredicate(), arg0.getObject(), arg0.getGraph());
}
@Override
public Iterator<Quad> find(Node graph, Node subject, Node predicate, Node object) {
if (!isVar(subject) && !isVar(predicate) && !isVar(object) &&!isVar(graph)) {
if (contains(subject, predicate, object, graph)) {
return new SingletonIterator(new Triple(subject, predicate, object));
} else {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
}
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
String graphURI = !isVar(graph) ? graph.getURI() : null;
findQuery.append(" GRAPH ");
if (graphURI != null) {
findQuery.append(" <" + graphURI + ">");
} else {
findQuery.append("?g");
}
findQuery.append(" { ");
findQuery.append(SparqlGraph.sparqlNode(subject, "?s"))
.append(" ")
.append(SparqlGraph.sparqlNode(predicate, "?p"))
.append(" ")
.append(SparqlGraph.sparqlNode(object, "?o"));
findQuery.append(" } ");
findQuery.append("\n}");
//log.info(findQuery.toString());
ResultSet rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString());
List<Quad> quadlist = new ArrayList<Quad>();
while (rs.hasNext()) {
QuerySolution soln = rs.nextSolution();
Quad q = new Quad(isVar(graph) ? soln.get("?g").asNode() : graph,
isVar(subject) ? soln.get("?s").asNode() : subject,
isVar(predicate) ? soln.get("?p").asNode() : predicate,
isVar(object) ? soln.get("?o").asNode() : object);
//log.info(t);
quadlist.add(q);
}
//log.info(triplist.size() + " results");
return WrappedIterator.create(quadlist.iterator()); }
@Override
public Iterator<Quad> findNG(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
return find(arg0, arg1, arg2, arg3);
}
@Override
public Context getContext() {
// TODO Auto-generated method stub
return null;
}
@Override
public Graph getDefaultGraph() {
return new SparqlGraph(endpointURI);
}
@Override
public Graph getGraph(Node arg0) {
return new SparqlGraph(endpointURI, arg0.getURI());
}
@Override
public Lock getLock() {
return lock;
}
@Override
public boolean isEmpty() {
// TODO Auto-generated method stub
return false;
}
@Override
public Iterator<Node> listGraphNodes() {
List<Node> graphNodeList = new ArrayList<Node>();
try {
RepositoryConnection conn = getConnection();
try {
RepositoryResult<Resource> conResult = conn.getContextIDs();
while (conResult.hasNext()) {
Resource con = conResult.next();
graphNodeList.add(Node.createURI(con.stringValue()));
}
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
return graphNodeList.iterator();
}
private RepositoryConnection getConnection() {
try {
return this.repository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
@Override
public void removeGraph(Node arg0) {
// TODO Auto-generated method stub
}
@Override
public void setDefaultGraph(Graph arg0) {
// TODO Auto-generated method stub
}
@Override
public long size() {
// TODO Auto-generated method stub
return 0;
}
private boolean isVar(Node node) {
return (node == null || node.isVariable() || node == Node.ANY);
}
private ResultSet execSelect(String queryStr) {
// long startTime1 = System.currentTimeMillis();
// try {
//
// RepositoryConnection conn = getConnection();
// try {
// GraphQuery q = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
// q.evaluate();
// } catch (MalformedQueryException e) {
// throw new RuntimeException(e);
// } finally {
// conn.close();
// }
// } catch (Exception re) {
// //log.info(re,re);
// }
// log.info((System.currentTimeMillis() - startTime1) + " to execute via sesame");
long startTime = System.currentTimeMillis();
Query askQuery = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery);
try {
return new ResultSetMem(qe.execSelect());
} finally {
//log.info((System.currentTimeMillis() - startTime) + " to execute via Jena");
qe.close();
}
}
}

View file

@ -0,0 +1,508 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.Update;
import org.openrdf.query.UpdateExecutionException;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Capabilities;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.GraphEventManager;
import com.hp.hpl.jena.graph.GraphStatisticsHandler;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Reifier;
import com.hp.hpl.jena.graph.TransactionHandler;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.graph.impl.GraphWithPerform;
import com.hp.hpl.jena.graph.impl.SimpleEventManager;
import com.hp.hpl.jena.graph.query.QueryHandler;
import com.hp.hpl.jena.graph.query.SimpleQueryHandler;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.shared.AddDeniedException;
import com.hp.hpl.jena.shared.DeleteDeniedException;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.shared.impl.PrefixMappingImpl;
import com.hp.hpl.jena.sparql.resultset.ResultSetMem;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class SparqlGraph implements GraphWithPerform {
private String endpointURI;
private String graphURI;
private static final Log log = LogFactory.getLog(SparqlGraph.class);
private BulkUpdateHandler bulkUpdateHandler;
private PrefixMapping prefixMapping = new PrefixMappingImpl();
private GraphEventManager eventManager;
private Reifier reifier = new EmptyReifier(this);
private GraphStatisticsHandler graphStatisticsHandler;
private TransactionHandler transactionHandler;
private QueryHandler queryHandler;
private Repository repository;
/**
* Returns a SparqlGraph for the union of named graphs in a remote repository
* @param endpointURI
*/
public SparqlGraph(String endpointURI) {
this(endpointURI, null);
}
/**
* Returns a SparqlGraph for a particular named graph in a remote repository
* @param endpointURI
* @param graphURI
*/
public SparqlGraph(String endpointURI, String graphURI) {
this.endpointURI = endpointURI;
this.graphURI = graphURI;
this.repository = new HTTPRepository(endpointURI);
}
public String getEndpointURI() {
return endpointURI;
}
public String getGraphURI() {
return graphURI;
}
public RepositoryConnection getConnection() {
try {
return this.repository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
@Override
public void add(Triple arg0) throws AddDeniedException {
performAdd(arg0);
}
public void executeUpdate(String updateString) {
try {
RepositoryConnection conn = getConnection();
try {
Update u = conn.prepareUpdate(QueryLanguage.SPARQL, updateString);
u.execute();
} catch (MalformedQueryException e) {
throw new RuntimeException(e);
} catch (UpdateExecutionException e) {
log.error(e,e);
log.error("Update command: \n" + updateString);
throw new RuntimeException(e);
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
}
@Override
public void performAdd(Triple t) {
//log.info("adding " + t);
String updateString = "INSERT DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" )
+ sparqlNodeUpdate(t.getSubject(), "") + " "
+ sparqlNodeUpdate(t.getPredicate(), "") + " "
+ sparqlNodeUpdate(t.getObject(), "") + " } "
+ ((graphURI != null) ? " } " : "");
if (graphURI != null) {
log.info("=====> update to graph " + graphURI);
}
log.info(updateString);
executeUpdate(updateString);
}
@Override
public void performDelete(Triple t) {
String updateString = "DELETE DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" )
+ sparqlNodeUpdate(t.getSubject(), "") + " "
+ sparqlNodeUpdate(t.getPredicate(), "") + " "
+ sparqlNodeUpdate(t.getObject(), "") + " } "
+ ((graphURI != null) ? " } " : "");
//log.info(updateString);
executeUpdate(updateString);
}
public void removeAll() {
// now we flush out any remaining blank nodes
String updateString = "DELETE { ?s ?p ?o } WHERE { \n" +
((getGraphURI() != null) ? ("GRAPH <" + getGraphURI() + "> { \n") : ("")) +
" ?s ?p ?o \n" +
((getGraphURI() != null) ? "} \n" : "") +
"}";
executeUpdate(updateString);
}
@Override
public void close() {
// can't close a remote endpoint
}
@Override
public boolean contains(Triple arg0) {
return contains(arg0.getSubject(), arg0.getPredicate(), arg0.getObject());
}
@Override
public boolean contains(Node subject, Node predicate, Node object) {
if (subject.isBlank() || predicate.isBlank() || object.isBlank()) {
return false;
}
StringBuffer containsQuery = new StringBuffer("ASK { \n");
if (graphURI != null) {
containsQuery.append(" GRAPH <" + graphURI + "> { ");
}
containsQuery.append(sparqlNode(subject, "?s"))
.append(" ")
.append(sparqlNode(predicate, "?p"))
.append(" ")
.append(sparqlNode(object, "?o"));
if (graphURI != null) {
containsQuery.append(" } \n");
}
containsQuery.append("\n}");
boolean result = execAsk(containsQuery.toString());
return result;
}
@Override
public void delete(Triple arg0) throws DeleteDeniedException {
performDelete(arg0);
}
@Override
public boolean dependsOn(Graph arg0) {
return false; // who knows?
}
@Override
public ExtendedIterator<Triple> find(TripleMatch arg0) {
//log.info("find(TripleMatch) " + arg0);
Triple t = arg0.asTriple();
return find(t.getSubject(), t.getPredicate(), t.getObject());
}
public static String sparqlNode(Node node, String varName) {
if (node == null || node.isVariable()) {
return varName;
} else if (node.isBlank()) {
return "<fake:blank>"; // or throw exception?
} else if (node.isURI()) {
StringBuffer uriBuff = new StringBuffer();
return uriBuff.append("<").append(node.getURI()).append(">").toString();
} else if (node.isLiteral()) {
StringBuffer literalBuff = new StringBuffer();
literalBuff.append("\"");
pyString(literalBuff, node.getLiteralLexicalForm());
literalBuff.append("\"");
if (node.getLiteralDatatypeURI() != null) {
literalBuff.append("^^<").append(node.getLiteralDatatypeURI()).append(">");
} else if (node.getLiteralLanguage() != null && node.getLiteralLanguage() != "") {
literalBuff.append("@").append(node.getLiteralLanguage());
}
return literalBuff.toString();
} else {
return varName;
}
}
public static String sparqlNodeUpdate(Node node, String varName) {
if (node.isBlank()) {
return "_:" + node.getBlankNodeLabel().replaceAll("\\W", "");
} else {
return sparqlNode(node, varName);
}
}
public static String sparqlNodeDelete(Node node, String varName) {
if (node.isBlank()) {
return "?" + node.getBlankNodeLabel().replaceAll("\\W", "");
} else {
return sparqlNode(node, varName);
}
}
@Override
public ExtendedIterator<Triple> find(Node subject, Node predicate, Node object) {
if (!isVar(subject) && !isVar(predicate) && !isVar(object)) {
if (contains(subject, predicate, object)) {
return new SingletonIterator(new Triple(subject, predicate, object));
} else {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
}
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
if (graphURI != null) {
findQuery.append(" GRAPH <" + graphURI + "> { ");
}
findQuery.append(sparqlNode(subject, "?s"))
.append(" ")
.append(sparqlNode(predicate, "?p"))
.append(" ")
.append(sparqlNode(object, "?o"));
if (graphURI != null) {
findQuery.append(" } ");
}
findQuery.append("\n}");
String queryString = findQuery.toString();
//log.info(queryString);
// //TODO remove me
// if (queryString.contains("individual/AI") && queryString.contains("label")) {
// throw new RuntimeException("break!");
// }
ResultSet rs = execSelect(queryString);
//rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString());
List<Triple> triplist = new ArrayList<Triple>();
while (rs.hasNext()) {
QuerySolution soln = rs.nextSolution();
Triple t = new Triple(isVar(subject) ? soln.get("?s").asNode() : subject,
isVar(predicate) ? soln.get("?p").asNode() : predicate,
isVar(object) ? soln.get("?o").asNode() : object);
//log.info(t);
triplist.add(t);
}
//log.info(triplist.size() + " results");
return WrappedIterator.create(triplist.iterator());
}
private boolean isVar(Node node) {
return (node == null || node.isVariable() || node == Node.ANY);
}
@Override
public BulkUpdateHandler getBulkUpdateHandler() {
if (this.bulkUpdateHandler == null) {
this.bulkUpdateHandler = new SparqlGraphBulkUpdater(this);
}
return this.bulkUpdateHandler;
}
@Override
public Capabilities getCapabilities() {
return capabilities;
}
@Override
public GraphEventManager getEventManager() {
if (eventManager == null) {
eventManager = new SimpleEventManager(this);
}
return eventManager;
}
@Override
public PrefixMapping getPrefixMapping() {
return prefixMapping;
}
@Override
public Reifier getReifier() {
//if (reifier == null) {
// reifier = new SimpleReifier(this, ReificationStyle.Standard);
//}
return reifier;
}
@Override
public GraphStatisticsHandler getStatisticsHandler() {
return null;
}
@Override
public TransactionHandler getTransactionHandler() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isClosed() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isEmpty() {
return (size() == 0);
}
@Override
public boolean isIsomorphicWith(Graph arg0) {
log.info("Hey dummy!");
throw new UnsupportedOperationException("isIsomorphicWith() not supported " +
"by SPARQL graphs");
}
@Override
public QueryHandler queryHandler() {
if (queryHandler == null) {
queryHandler = new SimpleQueryHandler(this);
}
return queryHandler;
}
@Override
public int size() {
int size = find(null, null, null).toList().size();
return size;
}
private final static Capabilities capabilities = new Capabilities() {
public boolean addAllowed() {
return false;
}
public boolean addAllowed(boolean everyTriple) {
return false;
}
public boolean canBeEmpty() {
return true;
}
public boolean deleteAllowed() {
return false;
}
public boolean deleteAllowed(boolean everyTriple) {
return false;
}
public boolean findContractSafe() {
return true;
}
public boolean handlesLiteralTyping() {
return true;
}
public boolean iteratorRemoveAllowed() {
return false;
}
public boolean sizeAccurate() {
return true;
}
};
private boolean execAsk(String queryStr) {
Query askQuery = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery);
try {
return qe.execAsk();
} finally {
qe.close();
}
}
private ResultSet execSelect(String queryStr) {
// long startTime1 = System.currentTimeMillis();
// try {
//
// RepositoryConnection conn = getConnection();
// try {
// GraphQuery q = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
// q.evaluate();
// } catch (MalformedQueryException e) {
// throw new RuntimeException(e);
// } finally {
// conn.close();
// }
// } catch (Exception re) {
// //log.info(re,re);
// }
// log.info((System.currentTimeMillis() - startTime1) + " to execute via sesame");
long startTime = System.currentTimeMillis();
Query askQuery = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery);
try {
return new ResultSetMem(qe.execSelect());
} finally {
//log.info((System.currentTimeMillis() - startTime) + " to execute via Jena");
qe.close();
}
}
/*
*
* see http://www.python.org/doc/2.5.2/ref/strings.html
* or see jena's n3 grammar jena/src/com/hp/hpl/jena/n3/n3.g
*/
protected static void pyString(StringBuffer sbuff, String s)
{
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
// Escape escapes and quotes
if (c == '\\' || c == '"' )
{
sbuff.append('\\') ;
sbuff.append(c) ;
continue ;
}
// Whitespace
if (c == '\n'){ sbuff.append("\\n");continue; }
if (c == '\t'){ sbuff.append("\\t");continue; }
if (c == '\r'){ sbuff.append("\\r");continue; }
if (c == '\f'){ sbuff.append("\\f");continue; }
if (c == '\b'){ sbuff.append("\\b");continue; }
if( c == 7 ) { sbuff.append("\\a");continue; }
// Output as is (subject to UTF-8 encoding on output that is)
sbuff.append(c) ;
// // Unicode escapes
// // c < 32, c >= 127, not whitespace or other specials
// String hexstr = Integer.toHexString(c).toUpperCase();
// int pad = 4 - hexstr.length();
// sbuff.append("\\u");
// for (; pad > 0; pad--)
// sbuff.append("0");
// sbuff.append(hexstr);
}
}
}

View file

@ -0,0 +1,242 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.io.StringWriter;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.GraphEvents;
import com.hp.hpl.jena.graph.GraphUtil;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.impl.SimpleBulkUpdateHandler;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sparql.util.graph.GraphFactory;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
public class SparqlGraphBulkUpdater extends SimpleBulkUpdateHandler {
private static final Log log = LogFactory.getLog(SparqlGraphBulkUpdater.class);
private SparqlGraph graph;
public SparqlGraphBulkUpdater(SparqlGraph graph) {
super(graph);
this.graph = graph;
}
@Override
public void add(Triple[] arg0) {
Graph g = GraphFactory.createPlainGraph();
for (int i = 0 ; i < arg0.length ; i++) {
g.add(arg0[i]);
}
add(g);
}
@Override
public void add(List<Triple> arg0) {
Graph g = GraphFactory.createPlainGraph();
for (Triple t : arg0) {
g.add(t);
}
add(g);
}
@Override
public void add(Iterator<Triple> arg0) {
Graph g = GraphFactory.createPlainGraph();
while (arg0.hasNext()) {
Triple t = arg0.next();
g.add(t);
}
add(g);
}
@Override
public void add(Graph arg0) {
add(arg0, false);
}
@Override
public void add(Graph g, boolean arg1) {
log.info("adding graph");
Model[] model = separateStatementsWithBlankNodes(g);
addModel(model[1] /* nonBlankNodeModel */);
// replace following call with different method
addModel(model[0] /*blankNodeModel*/);
}
/**
* Returns a pair of models. The first contains any statement containing at
* least one blank node. The second contains all remaining statements.
* @param g
* @return
*/
private Model[] separateStatementsWithBlankNodes(Graph g) {
Model gm = ModelFactory.createModelForGraph(g);
Model blankNodeModel = ModelFactory.createDefaultModel();
Model nonBlankNodeModel = ModelFactory.createDefaultModel();
StmtIterator sit = gm.listStatements();
while (sit.hasNext()) {
Statement stmt = sit.nextStatement();
if (!stmt.getSubject().isAnon() && !stmt.getObject().isAnon()) {
nonBlankNodeModel.add(stmt);
} else {
blankNodeModel.add(stmt);
}
}
Model[] result = new Model[2];
result[0] = blankNodeModel;
result[1] = nonBlankNodeModel;
return result;
}
@Override
public void delete(Graph g, boolean withReifications) {
delete(g);
}
@Override
public void delete(Graph g) {
Model[] model = separateStatementsWithBlankNodes(g);
deleteModel(model[1] /*statements without blank nodes*/);
// replace blank nodes in remaining statements with variables
StringBuffer patternBuff = new StringBuffer();
Iterator<Triple> tripIt = g.find(null, null, null);
while(tripIt.hasNext()) {
Triple t = tripIt.next();
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getSubject(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getPredicate(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getObject(), null));
patternBuff.append(" .\n");
}
StringBuffer queryBuff = new StringBuffer();
String graphURI = graph.getGraphURI();
queryBuff.append("DELETE { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" ) + " \n");
queryBuff.append(patternBuff);
if (graphURI != null) {
queryBuff.append(" } \n");
}
queryBuff.append("} WHERE { \n");
if (graphURI != null) {
queryBuff.append(" GRAPH <" + graphURI + "> { \n");
}
queryBuff.append(patternBuff);
if (graphURI != null) {
queryBuff.append(" } \n");
}
queryBuff.append("} \n");
log.debug(queryBuff.toString());
graph.executeUpdate(queryBuff.toString());
}
public void addModel(Model model) {
verbModel(model, "INSERT");
}
public void deleteModel(Model model) {
verbModel(model, "DELETE");
}
private void verbModel(Model model, String verb) {
Model m = ModelFactory.createDefaultModel();
int testLimit = 1000;
StmtIterator stmtIt = model.listStatements();
int count = 0;
try {
while (stmtIt.hasNext()) {
count++;
m.add(stmtIt.nextStatement());
if (count % testLimit == 0 || !stmtIt.hasNext()) {
StringWriter sw = new StringWriter();
m.write(sw, "N-TRIPLE");
StringBuffer updateStringBuff = new StringBuffer();
String graphURI = graph.getGraphURI();
updateStringBuff.append(verb + " DATA { " + ((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" ));
updateStringBuff.append(sw);
updateStringBuff.append(((graphURI != null) ? " } " : "") + " }");
String updateString = updateStringBuff.toString();
//log.info(updateString);
graph.executeUpdate(updateString);
m.removeAll();
}
}
} finally {
stmtIt.close();
}
}
@Override
public void removeAll() {
removeAll(graph);
notifyRemoveAll();
}
protected void notifyRemoveAll() {
manager.notifyEvent(graph, GraphEvents.removeAll);
}
@Override
public void remove(Node s, Node p, Node o) {
removeAll(graph, s, p, o);
manager.notifyEvent(graph, GraphEvents.remove(s, p, o));
}
public static void removeAll(Graph g, Node s, Node p, Node o)
{
ExtendedIterator<Triple> it = g.find( s, p, o );
try {
while (it.hasNext()) {
Triple t = it.next();
g.delete(t);
it.remove();
}
}
finally {
it.close();
}
}
public static void removeAll( Graph g )
{
ExtendedIterator<Triple> it = GraphUtil.findAll(g);
try {
while (it.hasNext()) {
Triple t = it.next();
g.delete(t);
it.remove();
}
} finally {
it.close();
}
// get rid of remaining blank nodes using a SPARQL DELETE
if (g instanceof SparqlGraph) {
((SparqlGraph) g).removeAll();
}
}
}

View file

@ -0,0 +1,149 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.graph.impl.GraphWithPerform;
import com.hp.hpl.jena.shared.AddDeniedException;
import com.hp.hpl.jena.sparql.util.NodeFactory;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class SparqlGraphMultilingual extends SparqlGraph implements GraphWithPerform {
private static final Log log = LogFactory.getLog(SparqlGraphMultilingual.class);
protected List<String> langs;
public SparqlGraphMultilingual(String endpointURI, List<String> languages) {
super(endpointURI);
this.langs = languages;
}
@Override
public void add(Triple arg0) throws AddDeniedException {
performAdd(arg0);
}
@Override
public void performAdd(Triple t) {
if (true) {
super.performAdd(t);
return;
}
if (langs == null || langs.size() == 0) {
log.info("No language configured - adding original triple " + t);
super.performAdd(t);
} else if (t.getObject().isLiteral()
&& t.getObject().getLiteral().getDatatypeURI() == null) {
log.info("adding language tag");
super.performAdd(Triple.create(t.getSubject(),
t.getPredicate(), NodeFactory.createLiteralNode(
t.getObject().getLiteralLexicalForm(), langs.get(0), null)));
} else {
log.info("adding original triple " + t);
super.performAdd(t);
}
}
@Override
public ExtendedIterator<Triple> find(TripleMatch arg0) {
//log.info("find(TripleMatch) " + arg0);
Triple t = arg0.asTriple();
return find(t.getSubject(), t.getPredicate(), t.getObject());
}
@Override
public ExtendedIterator<Triple> find(Node subject, Node predicate, Node object) {
long startTime = System.currentTimeMillis();
ExtendedIterator<Triple> rawResults = super.find(subject, predicate, object);
long rawTime = System.currentTimeMillis() - startTime;
List<Triple> tripList = new ArrayList<Triple>();
while (rawResults.hasNext()) {
tripList.add(rawResults.next());
}
if (tripList.size() == 0) {
return WrappedIterator.create(tripList.iterator());
}
if (subject.isConcrete() && predicate.isConcrete() && !object.isConcrete()) {
Collections.sort(tripList, new TripleSortByLang());
LinkedList<Triple> tripl = new LinkedList<Triple>();
if (!tripList.get(0).getObject().isLiteral()) {
tripl.addAll(tripList);
} else if (StringUtils.isEmpty(tripList.get(0).getObject().getLiteralLanguage())) {
tripl.addAll(tripList); // is this right?
} else {
String lang = tripList.get(0).getObject().getLiteralLanguage();
for (Triple t : tripList) {
if (lang.equals(t.getObject().getLiteralLanguage())) {
tripl.add(t);
} else {
break;
}
}
}
long filterTime = System.currentTimeMillis() - rawTime - startTime;
if (filterTime > 1) {
log.info("raw time " + rawTime + " ; filter time " + filterTime);
}
return WrappedIterator.create(tripl.iterator());
} else {
if (rawTime > 9) {
log.info("raw time " + rawTime);
log.info("^ " + subject + " : " + predicate + " : " + object);
}
return WrappedIterator.create(tripList.iterator());
}
}
private class TripleSortByLang implements Comparator<Triple> {
public int compare(Triple t1, Triple t2) {
if (t1 == null || t2 == null) {
return 0;
} else if (!t1.getObject().isLiteral() || !t2.getObject().isLiteral()) {
return 0;
}
String t1lang = t1.getObject().getLiteral().language();
String t2lang = t2.getObject().getLiteral().language();
if ( t1lang == null && t2lang == null) {
return 0;
} else if (t1lang == null) {
return 1;
} else if (t2lang == null) {
return -1;
} else {
int t1langPref = langs.indexOf(t1.getObject().getLiteral().language());
if (t1langPref == -1) {
t1langPref = Integer.MAX_VALUE;
}
int t2langPref = langs.indexOf(t2.getObject().getLiteral().language());
if (t2langPref == -1) {
t2langPref = Integer.MAX_VALUE;
}
return t1langPref - t2langPref;
}
}
}
}

View file

@ -38,6 +38,7 @@ import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
@ -620,9 +621,13 @@ public class VClassDaoJena extends JenaBaseDao implements VClassDao {
supURIs.add(getClassURIStr(cls));
}
} catch (Exception e) {
//TODO make this attempt respect the direct argument
// we'll try this again using a different method that doesn't try to convert to OntClass
List<Resource> supList = this.listDirectObjectPropertyValues(getOntModel().getResource(classURI), RDFS.subClassOf);
log.debug(e,e);
// we'll try this again using a different method
// that doesn't try to convert to OntClass
supURIs.clear();
List<Resource> supList = (direct)
? listDirectObjectPropertyValues(subClass, RDFS.subClassOf)
: listObjectPropertyValues(subClass, RDFS.subClassOf);
for (Resource res : supList) {
supURIs.add(getClassURIStr(res));
}
@ -630,6 +635,18 @@ public class VClassDaoJena extends JenaBaseDao implements VClassDao {
return supURIs;
}
private List<Resource> listObjectPropertyValues(Resource res, Property prop) {
List<Resource> values = new ArrayList<Resource>();
StmtIterator stmtIt = res.listProperties(prop);
while (stmtIt.hasNext()) {
Statement s = stmtIt.nextStatement();
if (s.getObject().isResource()) {
values.add(s.getObject().asResource());
}
}
return values;
}
public VClass getTopConcept() {
VClass top = new VClass();
if (getOntModel().getProfile().NAMESPACE().equals(RDFS.getURI())) {

View file

@ -48,6 +48,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.VClassGroupDao;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.pellet.PelletListener;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class WebappDaoFactoryJena implements WebappDaoFactory {
@ -73,6 +74,8 @@ public class WebappDaoFactoryJena implements WebappDaoFactory {
protected DatasetWrapperFactory dwf;
protected RDFService rdfService;
/* **************** constructors **************** */
public WebappDaoFactoryJena(WebappDaoFactoryJena base, String userURI) {
@ -343,8 +346,10 @@ public class WebappDaoFactoryJena implements WebappDaoFactory {
ObjectPropertyStatementDao objectPropertyStatementDao = null;
public ObjectPropertyStatementDao getObjectPropertyStatementDao() {
if( objectPropertyStatementDao == null )
// TODO supply a valid RDFService as the first argument if we keep this
// implementation
objectPropertyStatementDao = new ObjectPropertyStatementDaoJena(
dwf, this);
null, dwf, this);
return objectPropertyStatementDao;
}

View file

@ -4,7 +4,6 @@ package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashSet;
import org.apache.commons.dbcp.BasicDataSource;
@ -20,62 +19,37 @@ import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.SimpleReasonerSetup;
public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
public static final String UNION_GRAPH = "urn:x-arq:UnionGraph";
private SDBDatasetMode datasetMode = SDBDatasetMode.ASSERTIONS_AND_INFERENCES;
/**
* For use when any database connection associated with the Dataset
* is managed externally
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
Dataset dataset) {
super(ontModelSelector);
this.dwf = new StaticDatasetFactory(dataset);
public WebappDaoFactorySDB(RDFService rdfService,
OntModelSelector ontModelSelector) {
this(rdfService, ontModelSelector, new WebappDaoFactoryConfig());
}
/**
* For use when any database connection associated with the Dataset
* is managed externally
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
Dataset dataset,
WebappDaoFactoryConfig config) {
super(ontModelSelector, config);
this.dwf = new StaticDatasetFactory(dataset);
}
/**
* For use when any Dataset access should get a temporary DB connection
* from a pool
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
BasicDataSource bds,
StoreDesc storeDesc,
WebappDaoFactoryConfig config) {
super(ontModelSelector, config);
this.dwf = new ReconnectingDatasetFactory(bds, storeDesc);
public WebappDaoFactorySDB(RDFService rdfService,
OntModelSelector ontModelSelector,
WebappDaoFactoryConfig config) {
this(rdfService, ontModelSelector, config, null);
}
/**
* For use when any Dataset access should get a temporary DB connection
* from a pool, and access to the inference graph needs to be specified.
*/
public WebappDaoFactorySDB(OntModelSelector ontModelSelector,
BasicDataSource bds,
StoreDesc storeDesc,
WebappDaoFactoryConfig config,
SDBDatasetMode datasetMode) {
public WebappDaoFactorySDB(RDFService rdfService,
OntModelSelector ontModelSelector,
WebappDaoFactoryConfig config,
SDBDatasetMode datasetMode) {
super(ontModelSelector, config);
this.dwf = new ReconnectingDatasetFactory(bds, storeDesc);
this.datasetMode = datasetMode;
this.dwf = new StaticDatasetFactory(new RDFServiceDataset(rdfService));
this.rdfService = rdfService;
if (datasetMode != null) {
this.datasetMode = datasetMode;
}
}
public WebappDaoFactorySDB(WebappDaoFactorySDB base, String userURI) {
super(base.ontModelSelector);
this.ontModelSelector = base.ontModelSelector;
@ -108,7 +82,7 @@ public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
return objectPropertyStatementDao;
else
return objectPropertyStatementDao =
new ObjectPropertyStatementDaoSDB(dwf, datasetMode, this);
new ObjectPropertyStatementDaoSDB(rdfService, dwf, datasetMode, this);
}
@Override
@ -170,6 +144,12 @@ public class WebappDaoFactorySDB extends WebappDaoFactoryJena {
return filterBlock.toString();
}
@Override
public void close() {
super.close();
this.rdfService.close();
}
private class ReconnectingDatasetFactory implements DatasetWrapperFactory {
private BasicDataSource _bds;

View file

@ -163,7 +163,9 @@ public class PelletListener implements ModelChangedListener {
this.deletedDataProperties = ModelFactory.createDefaultModel();
this.mainModel.enterCriticalSection(Lock.READ);
try {
addedStatements(mainModel);
for (ObjectPropertyStatementPattern pat : this.inferenceDrivingPatternAllowSet) {
addedStatements(mainModel.listStatements((Resource) null, pat.getPredicate(), (RDFNode) null));
}
if (!skipReasoningUponInitialization) {
this.foreground = foreground;
notifyEvent(null,new EditEvent(null,false));

View file

@ -509,7 +509,7 @@ public class EditConfigurationVTwo {
}
public void addUrisOnForm(String ... strs){
this.urisOnform.addAll(new ArrayList<String>(Arrays.asList( strs )));
this.urisOnform.addAll(Arrays.asList( strs ));
}

View file

@ -5,6 +5,9 @@ package edu.cornell.mannlib.vitro.webapp.edit.n3editing.configuration;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
@ -12,6 +15,8 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
public class StandardModelSelector implements ModelSelector {
private static final Log log = LogFactory.getLog(StandardModelSelector.class);
public OntModel getModel(HttpServletRequest request, ServletContext context) {
VitroRequest vreq = new VitroRequest( request );
@ -23,11 +28,17 @@ public class StandardModelSelector implements ModelSelector {
sessionOntModel = oms.getABoxModel();
}
}
if(sessionOntModel != null && sessionOntModel instanceof OntModel )
if(sessionOntModel != null && sessionOntModel instanceof OntModel ) {
log.debug("using OntModelSelector from session");
return (OntModel)sessionOntModel;
else
} else if (vreq.getOntModelSelector() != null) {
log.debug("using OntModelSelector from request");
return vreq.getOntModelSelector().getABoxModel();
} else {
log.debug("using OntModelSelector from context");
return ((OntModelSelector) context
.getAttribute("unionOntModelSelector")).getABoxModel();
.getAttribute("unionOntModelSelector")).getABoxModel();
}
}
public static final ModelSelector selector = new StandardModelSelector();

View file

@ -66,7 +66,7 @@ public abstract class BaseEditConfigurationGenerator implements EditConfiguratio
//setup the model selectors for query, write and display models on editConfig
setupModelSelectorsFromVitroRequest(vreq, editConfig);
OntModel queryModel = (OntModel)vreq.getAttribute("jenaOntModel");
OntModel queryModel = vreq.getJenaOntModel(); // (OntModel)vreq.getAttribute("jenaOntModel");
if( editConfig.getSubjectUri() == null)
editConfig.setSubjectUri( EditConfigurationUtils.getSubjectUri(vreq));

View file

@ -51,6 +51,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
/**
@ -172,10 +173,11 @@ public class VitroRequestPrep implements Filter {
vreq.setDataset(dataset);
}
ServletContext ctx = vreq.getSession().getServletContext();
vreq.setUnfilteredWebappDaoFactory(new WebappDaoFactorySDB(
RDFServiceUtils.getRDFServiceFactory(ctx).getRDFService(),
ModelContext.getUnionOntModelSelector(
vreq.getSession().getServletContext()),
vreq.getDataset()));
ctx)));
req.setAttribute("VitroRequestPrep.setup", new Integer(1));
chain.doFilter(req, response);

View file

@ -3,8 +3,10 @@
package edu.cornell.mannlib.vitro.webapp.filters;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -21,19 +23,29 @@ import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraphMultilingual;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class WebappDaoFactorySDBPrep implements Filter {
@ -79,67 +91,52 @@ public class WebappDaoFactorySDBPrep implements Filter {
}
}
BasicDataSource bds = JenaDataSourceSetupBase.getApplicationDataSource(_ctx);
StoreDesc storeDesc = (StoreDesc) _ctx.getAttribute("storeDesc");
OntModelSelector oms = (OntModelSelector) _ctx.getAttribute("unionOntModelSelector");
OntModelSelector oms = ModelContext.getUnionOntModelSelector(_ctx);
OntModelSelector baseOms = ModelContext.getBaseOntModelSelector(_ctx);
String defaultNamespace = (String) _ctx.getAttribute("defaultNamespace");
Connection sqlConn = null;
SDBConnection conn = null;
Store store = null;
Dataset dataset = null;
WebappDaoFactory wadf = null;
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
try {
if (bds == null || storeDesc == null || oms == null) {
throw new RuntimeException("SDB store not property set up");
}
try {
sqlConn = bds.getConnection();
conn = new SDBConnection(sqlConn) ;
} catch (SQLException sqe) {
throw new RuntimeException("Unable to connect to database", sqe);
}
if (conn != null) {
store = SDBFactory.connectStore(conn, storeDesc);
dataset = SDBFactory.connectDataset(store);
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace);
wadf = new WebappDaoFactorySDB(oms, dataset, config);
vreq.setWebappDaoFactory(wadf);
vreq.setFullWebappDaoFactory(wadf);
vreq.setDataset(dataset);
vreq.setJenaOntModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, dataset.getNamedModel(
WebappDaoFactorySDB.UNION_GRAPH)));
}
} catch (Throwable t) {
log.error("Unable to filter request to set up SDB connection", t);
}
List<String> langs = new ArrayList<String>();
log.debug("Accept-Language: " + vreq.getHeader("Accept-Language"));
Enumeration<Locale> locs = vreq.getLocales();
while (locs.hasMoreElements()) {
Locale locale = locs.nextElement();
langs.add(locale.toString().replace("_", "-"));
}
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace);
config.setPreferredLanguages(langs);
RDFServiceFactory factory = RDFServiceUtils.getRDFServiceFactory(_ctx);
RDFService rdfService = factory.getRDFService();
Dataset dataset = new RDFServiceDataset(rdfService);
wadf = new WebappDaoFactorySDB(rdfService, oms, config);
WebappDaoFactory assertions = new WebappDaoFactorySDB(
rdfService, baseOms, config, SDBDatasetMode.ASSERTIONS_ONLY);
vreq.setWebappDaoFactory(wadf);
vreq.setAssertionsWebappDaoFactory(assertions);
vreq.setFullWebappDaoFactory(wadf);
vreq.setDataset(dataset);
vreq.setOntModelSelector(oms);
vreq.setJenaOntModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, dataset.getDefaultModel()));
request.setAttribute("WebappDaoFactorySDBPrep.setup", 1);
try {
filterChain.doFilter(request, response);
return;
} finally {
if (conn != null) {
conn.close();
}
if (dataset != null) {
dataset.close();
}
if (store != null) {
store.close();
}
if (wadf != null) {
wadf.close();
}
}
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
try {

View file

@ -0,0 +1,211 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.filters;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.DataSource;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.DatasetFactory;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SingleContentOntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlDatasetGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraphMultilingual;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
public class WebappDaoFactorySparqlPrep implements Filter {
private final static Log log = LogFactory.getLog(WebappDaoFactorySparqlPrep.class);
ServletContext _ctx;
/**
* The filter will be applied to all incoming urls,
this is a list of URI patterns to skip. These are
matched against the requestURI sans query parameters,
* e.g.
* "/vitro/index.jsp"
* "/vitro/themes/enhanced/css/edit.css"
*
* These patterns are from VitroRequestPrep.java
*/
Pattern[] skipPatterns = {
Pattern.compile(".*\\.(gif|GIF|jpg|jpeg)$"),
Pattern.compile(".*\\.css$"),
Pattern.compile(".*\\.js$"),
Pattern.compile("/.*/themes/.*/site_icons/.*"),
Pattern.compile("/.*/images/.*")
};
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain filterChain) throws IOException, ServletException {
if ( request.getAttribute("WebappDaoFactorySDBPrep.setup") != null ) {
// don't run multiple times
filterChain.doFilter(request, response);
return;
}
for( Pattern skipPattern : skipPatterns){
Matcher match =skipPattern.matcher( ((HttpServletRequest)request).getRequestURI() );
if( match.matches() ){
log.debug("request matched a skipPattern, skipping VitroRequestPrep");
filterChain.doFilter(request, response);
return;
}
}
BasicDataSource bds = JenaDataSourceSetupBase.getApplicationDataSource(_ctx);
StoreDesc storeDesc = (StoreDesc) _ctx.getAttribute("storeDesc");
OntModelSelector oms = (OntModelSelector) _ctx.getAttribute("unionOntModelSelector");
String defaultNamespace = (String) _ctx.getAttribute("defaultNamespace");
Connection sqlConn = null;
SDBConnection conn = null;
Store store = null;
Dataset dataset = null;
WebappDaoFactory wadf = null;
try {
if (bds == null || storeDesc == null || oms == null) {
throw new RuntimeException("SDB store not property set up");
}
try {
sqlConn = bds.getConnection();
conn = new SDBConnection(sqlConn) ;
} catch (SQLException sqe) {
throw new RuntimeException("Unable to connect to database", sqe);
}
if (conn != null) {
store = SDBFactory.connectStore(conn, storeDesc);
dataset = SDBFactory.connectDataset(store);
VitroRequest vreq = new VitroRequest((HttpServletRequest) request);
log.info("---------");
Enumeration<String> headStrs = vreq.getHeaderNames();
while (headStrs.hasMoreElements()) {
String head = headStrs.nextElement();
log.info(head + " : " + vreq.getHeader(head));
}
List<String> langs = new ArrayList<String>();
log.info("Accept-Language: " + vreq.getHeader("Accept-Language"));
Enumeration<Locale> locs = vreq.getLocales();
while (locs.hasMoreElements()) {
Locale locale = locs.nextElement();
langs.add(locale.toString().replace("_", "-"));
log.info(locale.toString() + " / " + locale.getLanguage() + " + " + locale.getCountry() + " : " + locale.getDisplayCountry() + " | " + locale.getLanguage() + " : " + locale.getDisplayLanguage());
}
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace);
config.setPreferredLanguages(langs);
//okay let's make a graph-backed model
String endpointURI = ConfigurationProperties.getBean(
request).getProperty("VitroConnection.DataSource.endpointURI");
Graph g = new SparqlGraphMultilingual(endpointURI, langs);
//Graph g = new SparqlGraph(endpointURI);
Model m = ModelFactory.createModelForGraph(g);
OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m);
oms = new SingleContentOntModelSelector(om, oms.getDisplayModel(), oms.getUserAccountsModel());
dataset = DatasetFactory.create(new SparqlDatasetGraph(endpointURI));
//DataSource datasource = DatasetFactory.create();
//datasource.addNamedModel("fake:fake", m);
//dataset = datasource;
vreq.setAssertionsWebappDaoFactory(wadf);
wadf = new WebappDaoFactoryJena(oms, config);
//wadf = new WebappDaoFactorySDB(oms, dataset, config);
vreq.setWebappDaoFactory(wadf);
vreq.setFullWebappDaoFactory(wadf);
vreq.setUnfilteredWebappDaoFactory(wadf);
vreq.setWebappDaoFactory(wadf);
vreq.setDataset(dataset);
vreq.setJenaOntModel(om);
vreq.setOntModelSelector(oms);
}
} catch (Throwable t) {
log.error("Unable to filter request to set up SDB connection", t);
}
request.setAttribute("WebappDaoFactorySDBPrep.setup", 1);
try {
filterChain.doFilter(request, response);
return;
} finally {
if (conn != null) {
conn.close();
}
if (dataset != null) {
dataset.close();
}
if (store != null) {
store.close();
}
if (wadf != null) {
wadf.close();
}
}
}
@Override
public void init(FilterConfig filterConfig) throws ServletException {
try {
_ctx = filterConfig.getServletContext();
} catch (Throwable t) {
log.error("Unable to initialize WebappDaoFactorySDBPrep", t);
}
}
@Override
public void destroy() {
// no destroy actions
}
}

View file

@ -0,0 +1,37 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice;
/*
*
* A listener that filters all its listening down to the single-statement level. Users of
* this class override addedStatement(statement) and removedStatement(statement).
*
*/
public interface ChangeListener {
/**
* Override this to listen to all statements added to the RDF store.
*
* @param serializedTriple - the added statement in n3 format
* @param graphURI - the graph to which the statement was added
*/
public void addedStatement(String serializedTriple, String graphURI);
/**
* Override this to listen to all statements removed from the RDF store.
*
* @param serializedTriple - the removed statement in n3 format
* @param graphURI - the graph from which the statement was removed
*/
public void removedStatement(String serializedTriple, String graphURI);
/**
* Override this to listen to events pertaining to the given graphURI.
*
* @param graphURI - the graph to which the event pertains
* @param event - the event that occurred.
*/
public void notifyEvent(String graphURI, Object event);
}

View file

@ -0,0 +1,119 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice;
import java.io.InputStream;
import java.util.List;
/*
* Input parameter to changeSetUpdate() method in RDFService.
* Represents a precondition query and an ordered list of model changes.
*/
public interface ChangeSet {
/**
* Getter for the precondition query
*
* @return String - a SPARQL query
*/
public String getPreconditionQuery();
/**
* Setter for the precondition query
*
* @param preconditionQuery - a SPARQL query
*/
public void setPreconditionQuery(String preconditionQuery);
/**
* Getter for the precondition query type
*
* @return RDFService.SPARQLQueryType - the precondition query type
*/
public RDFService.SPARQLQueryType getPreconditionQueryType();
/**
* Setter for the precondition query type
*
* @param queryType - the precondition query type
*/
public void setPreconditionQueryType(RDFService.SPARQLQueryType queryType);
/**
* Getter for the list of model changes
*
* @return List<ModelChange> - list of model changes
*/
public List<ModelChange> getModelChanges();
/**
* Adds one model change representing an addition to the list of model changes
*
* @param model - a serialized RDF model (collection of triples)
* @param serializationFormat - format of the serialized RDF model
* @param graphURI - URI of the graph to which the RDF model should be added
*/
public void addAddition(InputStream model,
RDFService.ModelSerializationFormat serializationFormat,
String graphURI);
/**
* Adds one model change representing a deletion to the list of model changes
*
* @param model - a serialized RDF model (collection of triples)
* @param serializationFormat - format of the serialized RDF model
* @param graphURI - URI of the graph from which the RDF model should be removed
*/
public void addRemoval(InputStream model,
RDFService.ModelSerializationFormat serializationFormat,
String graphURI);
/**
* Creates an instance of the ModelChange class
*/
public ModelChange manufactureModelChange();
/**
* Creates an instance of the ModelChange class
*
* @param serializedModel - a serialized RDF model (collection of triples)
* @param serializationFormat - format of the serialized RDF model
* @param operation - the type of operation to be performed with the serialized RDF model
* @param graphURI - URI of the graph on which to apply the model change operation
*/
public ModelChange manufactureModelChange(InputStream serializedModel,
RDFService.ModelSerializationFormat serializationFormat,
ModelChange.Operation operation,
String graphURI);
/**
* Add an event that will be be passed to any change listeners in advance of
* the change set additions and retractions being performed. The event
* will only be fired if the precondition (if any) is met.
* @param event
*/
public void addPreChangeEvent(Object event);
/**
* Add an event that will be be passed to any change listeners after all of
* the change set additions and retractions are performed.
* @param event
*/
public void addPostChangeEvent(Object event);
/**
* Return a list of events to pass to any change listeners in
* advance of the change set additions and retractions being performed.
* @return
*/
public List<Object> getPreChangeEvents();
/**
* Return a list of events to pass to any change listeners after
* the change set additions and retractions are performed.
* @return
*/
public List<Object> getPostChangeEvents();
}

View file

@ -0,0 +1,73 @@
package edu.cornell.mannlib.vitro.webapp.rdfservice;
import java.io.InputStream;
/*
* A ModelChange is one component of a ChangeSet.
* Represents a model (collection of RDF triples), the URI
* of a graph, and an indication of whether to add or
* remove the model from the graph.
*/
public interface ModelChange {
public enum Operation {
ADD, REMOVE
}
/**
* Getter for the serialized model
*
* @return InputStream - a serialized model (collection of RDF triples) representing a change to make
*/
public InputStream getSerializedModel();
/**
* Setter for the serialized model
*
* @param serializedModel - a serialized model (collection of RDF triples) representing a change to make
*/
public void setSerializedModel(InputStream serializedModel);
/**
* Getter for the serialization format of the model
*
* @return RDFService.ModelSerializationFormat - the serialization format of the model
*/
public RDFService.ModelSerializationFormat getSerializationFormat();
/**
* Setter for the serialization format of the model
*
* @param serializationFormat - the serialization format of the model
*/
public void setSerializationFormat(RDFService.ModelSerializationFormat serializationFormat);
/**
* Getter for the operation type
*
* @return ModelChange.Operation - the operation to be performed
*/
public ModelChange.Operation getOperation();
/**
* Setter for the operation type
*
* @param operation - the operation to be performed
*/
public void setOperation(ModelChange.Operation operation);
/**
* Getter for the URI of the graph to which to apply the change
*
* @return String - the URI of the graph to which to apply the change
*/
public String getGraphURI();
/**
* Setter for the URI of the graph to which to apply the change
*
* @param graphURI - the URI of the graph to which to apply the change
*/
public void setGraphURI(String graphURI);
}

View file

@ -0,0 +1,155 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice;
import java.io.InputStream;
import java.util.List;
/*
* Interface for API to write, read, and update Vitro's RDF store, with support
* to allow listening, logging and auditing.
*/
public interface RDFService {
public enum SPARQLQueryType {
SELECT, CONSTRUCT, DESCRIBE, ASK
}
public enum ModelSerializationFormat {
RDFXML, N3
}
public enum ResultFormat {
JSON, CSV, XML, TEXT
}
/**
* Perform a series of additions to and or removals from specified graphs
* in the RDF store. preConditionSparql will be executed against the
* union of all the graphs in the knowledge base before any updates are made.
* If the precondition query returns a non-empty result, no updates
* will be made.
*
* @param changeSet - a set of changes to be performed on the RDF store.
*
* @return boolean - indicates whether the precondition was satisfied
*/
public boolean changeSetUpdate(ChangeSet changeSet) throws RDFServiceException;
/**
* If the given individual already exists in the default graph, throws an
* RDFServiceException, otherwise adds one type assertion to the default
* graph.
*
* @param individualURI - URI of the individual to be added
* @param individualTypeURI - URI of the type for the individual
*/
public void newIndividual(String individualURI, String individualTypeURI) throws RDFServiceException;
/**
* If the given individual already exists in the given graph, throws an
* RDFServiceException, otherwise adds one type assertion to the given
* graph.
*
* @param individualURI - URI of the individual to be added
* @param individualTypeURI - URI of the type for the individual
* @param graphURI - URI of the graph to which to add the individual
*/
public void newIndividual(String individualURI, String individualTypeURI, String graphURI) throws RDFServiceException;
/**
* Performs a SPARQL construct query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param query - the SPARQL query to be executed against the RDF store
* @param resultFormat - type of serialization for RDF result of the SPARQL query
*
* @return InputStream - the result of the query
*
*/
public InputStream sparqlConstructQuery(String query, RDFService.ModelSerializationFormat resultFormat) throws RDFServiceException;
/**
* Performs a SPARQL describe query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param query - the SPARQL query to be executed against the RDF store
* @param resultFormat - type of serialization for RDF result of the SPARQL query
*
* @return InputStream - the result of the query
*
*/
public InputStream sparqlDescribeQuery(String query, RDFService.ModelSerializationFormat resultFormat) throws RDFServiceException;
/**
* Performs a SPARQL select query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param query - the SPARQL query to be executed against the RDF store
* @param resultFormat - format for the result of the Select query
*
* @return InputStream - the result of the query
*
*/
public InputStream sparqlSelectQuery(String query, RDFService.ResultFormat resultFormat) throws RDFServiceException;
/**
* Performs a SPARQL ASK query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param query - the SPARQL query to be executed against the RDF store
*
* @return boolean - the result of the SPARQL query
*/
public boolean sparqlAskQuery(String query) throws RDFServiceException;
/**
* Get a list of all the graph URIs in the RDF store.
*
* @return List<String> - list of all the graph URIs in the RDF store
*/
public List<String> getGraphURIs() throws RDFServiceException;
/**
* TBD - we need to define this method
*/
public void getGraphMetadata() throws RDFServiceException;
/**
* Get the URI of the default write graph
*
* @return String URI of default write graph
*/
public String getDefaultWriteGraphURI() throws RDFServiceException;
/**
* Register a listener to listen to changes in any graph in
* the RDF store.
*
* @param changeListener - the change listener
*/
public void registerListener(ChangeListener changeListener) throws RDFServiceException;
/**
* Unregister a listener from listening to changes in
* the RDF store in any graph.
*
* @param changeListener - the change listener
*/
public void unregisterListener(ChangeListener changeListener) throws RDFServiceException;
/**
* Create a ChangeSet object
*
* @return ChangeSet an empty ChangeSet object
*/
public ChangeSet manufactureChangeSet();
/**
* Free any resources held by this RDFService object
*/
public void close();
}

View file

@ -0,0 +1,21 @@
package edu.cornell.mannlib.vitro.webapp.rdfservice;
public class RDFServiceException extends Exception {
public RDFServiceException() {
super();
}
public RDFServiceException(Throwable cause) {
super(cause);
}
public RDFServiceException(String message) {
super(message);
}
public RDFServiceException(String message, Throwable cause) {
super(message, cause);
}
}

View file

@ -0,0 +1,29 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
public interface RDFServiceFactory {
public RDFService getRDFService();
/**
* Register a listener to listen to changes in any graph in
* the RDF store. Any RDFService objects returned by this factory should notify
* this listener of changes.
*
* @param changeListener - the change listener
*/
public void registerListener(ChangeListener changeListener) throws RDFServiceException;
/**
* Unregister a listener from listening to changes in
* the RDF store. Any RDFService objects returned by this factory should notify
* this listener of changes.
*
* @param changeListener - the change listener
*/
public void unregisterListener(ChangeListener changeListener) throws RDFServiceException;
}

View file

@ -0,0 +1,148 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange.Operation;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
/*
* Input parameter to changeSetUpdate() method in RDFService.
* Represents a precondition query and an ordered list of model changes.
*/
public class ChangeSetImpl implements ChangeSet {
public ChangeSetImpl() {
modelChanges = new ArrayList<ModelChange>();
}
private String preconditionQuery;
private RDFService.SPARQLQueryType queryType;
private ArrayList<ModelChange> modelChanges = new ArrayList<ModelChange>();
private ArrayList<Object> preChangeEvents = new ArrayList<Object>();
private ArrayList<Object> postChangeEvents = new ArrayList<Object>();
/**
* Getter for the precondition query
*
* @return String - a SPARQL query
*/
@Override
public String getPreconditionQuery() {
return preconditionQuery;
}
/**
* Setter for the precondition query
*
* @param String - a SPARQL query
*/
@Override
public void setPreconditionQuery(String preconditionQuery) {
this.preconditionQuery = preconditionQuery;
}
/**
* Getter for the precondition query type
*
* @return RDFService.SPARQLQueryType - the precondition query type
*/
@Override
public RDFService.SPARQLQueryType getPreconditionQueryType() {
return queryType;
}
/**
* Setter for the precondition query type
*
* @param RDFService.SPARQLQueryType - the precondition query type
*/
@Override
public void setPreconditionQueryType(RDFService.SPARQLQueryType queryType) {
this.queryType = queryType;
}
/**
* Getter for the list of model changes
*
* @return List<ModelChange> - list of model changes
*/
@Override
public List<ModelChange> getModelChanges() {
return modelChanges;
}
/**
* Adds one model change representing an addition to the list of model changes
*
* @param InputStream - a serialized RDF model (collection of triples)
* @param RDFService.ModelSerializationFormat - format of the serialized RDF model
* @param String - URI of the graph to which the RDF model should be added
*/
@Override
public void addAddition(InputStream model, RDFService.ModelSerializationFormat format, String graphURI) {
modelChanges.add(manufactureModelChange(model,format, ModelChange.Operation.ADD, graphURI));
}
/**
* Adds one model change representing a deletion to the list of model changes
*
* @param InputStream - a serialized RDF model (collection of triples)
* @param RDFService.ModelSerializationFormat - format of the serialized RDF model
* @param String - URI of the graph from which the RDF model should be removed
*/
@Override
public void addRemoval(InputStream model, RDFService.ModelSerializationFormat format, String graphURI) {
modelChanges.add(manufactureModelChange(model, format, ModelChange.Operation.REMOVE, graphURI));
}
/**
* Creates an instance of the ModelChange class
*/
@Override
public ModelChange manufactureModelChange() {
return new ModelChangeImpl();
}
/**
* Creates an instance of the ModelChange class
*
* @param InputStream - a serialized RDF model (collection of triples)
* @param RDFService.ModelSerializationFormat - format of the serialized RDF model
* @param ModelChange.Operation - the type of operation to be performed with the serialized RDF model
* @param String - URI of the graph on which to apply the model change operation
*/
@Override
public ModelChange manufactureModelChange(InputStream serializedModel,
RDFService.ModelSerializationFormat serializationFormat,
Operation operation,
String graphURI) {
return new ModelChangeImpl(serializedModel, serializationFormat, operation, graphURI);
}
@Override
public void addPreChangeEvent(Object o) {
this.preChangeEvents.add(o);
}
@Override
public void addPostChangeEvent(Object o) {
this.postChangeEvents.add(o);
}
@Override
public List<Object> getPreChangeEvents() {
return this.preChangeEvents;
}
@Override
public List<Object> getPostChangeEvents() {
return this.postChangeEvents;
}
}

View file

@ -0,0 +1,113 @@
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.InputStream;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
/*
* A ModelChange is one component of a ChangeSet.
* Represents a model (collection of RDF triples), the URI
* of a graph, and an indication of whether to add or
* remove the model from the graph.
*/
public class ModelChangeImpl implements ModelChange {
private InputStream serializedModel;
private RDFService.ModelSerializationFormat serializationFormat;
private Operation operation;
private String graphURI;
public ModelChangeImpl() {}
public ModelChangeImpl(InputStream serializedModel,
RDFService.ModelSerializationFormat serializationFormat,
Operation operation,
String graphURI) {
this.serializedModel = serializedModel;
this.serializationFormat = serializationFormat;
this.operation = operation;
this.graphURI = graphURI;
}
/**
* Getter for the serialized model
*
* @return InputStream - a model (collection of RDF triples), serialized
*/
@Override
public InputStream getSerializedModel() {
return serializedModel;
}
/**
* Setter for the serialized model
*
* @param InputStream - a model (collection of RDF triples), serialized
*/
@Override
public void setSerializedModel(InputStream serializedModel) {
this.serializedModel = serializedModel;
}
/**
* Getter for the serialization format of the model
*
* @return RDFService.ModelSerializationFormat - the serialization format of the model
*/
@Override
public RDFService.ModelSerializationFormat getSerializationFormat() {
return serializationFormat;
}
/**
* Setter for the serialization format of the model
*
* @param RDFService.ModelSerializationFormat - the serialization format of the model
*/
@Override
public void setSerializationFormat(RDFService.ModelSerializationFormat serializationFormat) {
this.serializationFormat = serializationFormat;
}
/**
* Getter for the operation type
*
* @return ModelChange.Operation - the operation type
*/
@Override
public Operation getOperation() {
return operation;
}
/**
* Setter for the operation type
*
* @param ModelChange.Operation - the operation type
*/
@Override
public void setOperation(Operation operation) {
this.operation = operation;
}
/**
* Getter for the URI of the graph to which to apply the change
*
* @return String - the graph URI
*/
@Override
public String getGraphURI() {
return graphURI;
}
/**
* Setter for the URI of the graph to which to apply the change
*
* @param String - the graph URI
*/
@Override
public void setGraphURI(String graphURI) {
this.graphURI = graphURI;
}
}

View file

@ -0,0 +1,38 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
/**
* An RDFServiceFactory that always returns the same RDFService object
* @author bjl23
*
*/
public class RDFServiceFactorySingle implements RDFServiceFactory {
private RDFService rdfService;
public RDFServiceFactorySingle(RDFService rdfService) {
this.rdfService = rdfService;
}
@Override
public RDFService getRDFService() {
return this.rdfService;
}
@Override
public void registerListener(ChangeListener listener) throws RDFServiceException {
this.rdfService.registerListener(listener);
}
@Override
public void unregisterListener(ChangeListener listener) throws RDFServiceException {
this.rdfService.unregisterListener(listener);
}
}

View file

@ -0,0 +1,266 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.ByteArrayInputStream;
import java.util.ArrayList;
import java.util.Iterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sparql.resultset.XMLInput;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
/*
* API to write, read, and update Vitro's RDF store, with support
* to allow listening, logging and auditing.
*/
public abstract class RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
protected String defaultWriteGraphURI;
protected ArrayList<ChangeListener> registeredListeners = new ArrayList<ChangeListener>();
/**
* If the given individual already exists in the default graph, throws an
* RDFServiceException, otherwise adds one type assertion to the default
* graph.
*
* @param String individualURI - URI of the individual to be added
* @param String individualTypeURI - URI of the type for the individual
*/
@Override
public void newIndividual(String individualURI,
String individualTypeURI) throws RDFServiceException {
newIndividual(individualURI, individualTypeURI, defaultWriteGraphURI);
}
/**
* If the given individual already exists in the given graph, throws an
* RDFServiceException, otherwise adds one type assertion to the given
* graph.
*
* @param String individualURI - URI of the individual to be added
* @param String individualTypeURI - URI of the type for the individual
* @param String graphURI - URI of the graph to which to add the individual
*/
@Override
public void newIndividual(String individualURI,
String individualTypeURI,
String graphURI) throws RDFServiceException {
StringBuffer containsQuery = new StringBuffer("ASK { \n");
if (graphURI != null) {
containsQuery.append(" GRAPH <" + graphURI + "> { ");
}
containsQuery.append("<");
containsQuery.append(individualURI);
containsQuery.append("> ");
containsQuery.append("?p ?o");
if (graphURI != null) {
containsQuery.append(" } \n");
}
containsQuery.append("\n}");
if (sparqlAskQuery(containsQuery.toString())) {
throw new RDFServiceException("individual already exists");
} else {
Triple triple = new Triple(Node.createURI(individualURI), RDF.type.asNode(), Node.createURI(individualTypeURI));
//addTriple(triple, graphURI);
ChangeSet cs = this.manufactureChangeSet();
cs.addAddition(new ByteArrayInputStream(
sparqlTriple(triple).getBytes()), ModelSerializationFormat.N3, graphURI);
changeSetUpdate(cs);
}
}
/**
* Get the URI of the default write graph
*
* @return String URI of default write graph
*/
@Override
public String getDefaultWriteGraphURI() throws RDFServiceException {
return defaultWriteGraphURI;
}
/**
* Register a listener to listen to changes in any graph in
* the RDF store.
*
*/
@Override
public synchronized void registerListener(ChangeListener changeListener) throws RDFServiceException {
if (!registeredListeners.contains(changeListener)) {
registeredListeners.add(changeListener);
}
}
/**
* Unregister a listener from listening to changes in any graph
* in the RDF store.
*
*/
@Override
public synchronized void unregisterListener(ChangeListener changeListener) throws RDFServiceException {
registeredListeners.remove(changeListener);
}
/**
* Create a ChangeSet object
*
* @return a ChangeSet object
*/
@Override
public ChangeSet manufactureChangeSet() {
return new ChangeSetImpl();
}
public synchronized void notifyListeners(Triple triple, ModelChange.Operation operation, String graphURI) {
Iterator<ChangeListener> iter = registeredListeners.iterator();
while (iter.hasNext()) {
ChangeListener listener = iter.next();
if (operation == ModelChange.Operation.ADD) {
listener.addedStatement(sparqlTriple(triple), graphURI);
} else {
listener.removedStatement(sparqlTriple(triple).toString(), graphURI);
}
}
}
public synchronized void notifyListenersOfEvent(Object event) {
Iterator<ChangeListener> iter = registeredListeners.iterator();
while (iter.hasNext()) {
ChangeListener listener = iter.next();
// TODO what is the graphURI parameter for?
listener.notifyEvent(null, event);
}
}
protected boolean isPreconditionSatisfied(String query,
RDFService.SPARQLQueryType queryType)
throws RDFServiceException {
Model model = ModelFactory.createDefaultModel();
switch (queryType) {
case DESCRIBE:
model.read(sparqlDescribeQuery(query,RDFService.ModelSerializationFormat.N3), null);
return !model.isEmpty();
case CONSTRUCT:
model.read(sparqlConstructQuery(query,RDFService.ModelSerializationFormat.N3), null);
return !model.isEmpty();
case SELECT:
return sparqlSelectQueryHasResults(query);
case ASK:
return sparqlAskQuery(query);
default:
throw new RDFServiceException("unrecognized SPARQL query type");
}
}
protected static String getSerializationFormatString(RDFService.ModelSerializationFormat format) {
switch (format) {
case RDFXML:
return "RDF/XML";
case N3:
return "N3";
default:
log.error("unexpected format in getFormatString");
return null;
}
}
protected boolean sparqlSelectQueryHasResults(String queryStr) throws RDFServiceException {
ResultSet rs = XMLInput.fromXML(sparqlSelectQuery(queryStr, ResultFormat.XML));
return rs.hasNext();
}
protected static String sparqlTriple(Triple triple) {
StringBuffer serializedTriple = new StringBuffer();
serializedTriple.append(sparqlNodeUpdate(triple.getSubject(), ""));
serializedTriple.append(" ");
serializedTriple.append(sparqlNodeUpdate(triple.getPredicate(), ""));
serializedTriple.append(" ");
serializedTriple.append(sparqlNodeUpdate(triple.getObject(), ""));
serializedTriple.append(" .");
return serializedTriple.toString();
}
protected static String sparqlNodeUpdate(Node node, String varName) {
if (node.isBlank()) {
return "_:" + node.getBlankNodeLabel().replaceAll("\\W", "");
} else {
return sparqlNode(node, varName);
}
}
protected static String sparqlNode(Node node, String varName) {
if (node == null || node.isVariable()) {
return varName;
} else if (node.isBlank()) {
return "<fake:blank>"; // or throw exception?
} else if (node.isURI()) {
StringBuffer uriBuff = new StringBuffer();
return uriBuff.append("<").append(node.getURI()).append(">").toString();
} else if (node.isLiteral()) {
StringBuffer literalBuff = new StringBuffer();
literalBuff.append("\"");
pyString(literalBuff, node.getLiteralLexicalForm());
literalBuff.append("\"");
if (node.getLiteralDatatypeURI() != null) {
literalBuff.append("^^<").append(node.getLiteralDatatypeURI()).append(">");
} else if (node.getLiteralLanguage() != null && node.getLiteralLanguage() != "") {
literalBuff.append("@").append(node.getLiteralLanguage());
}
return literalBuff.toString();
} else {
return varName;
}
}
// see http://www.python.org/doc/2.5.2/ref/strings.html
// or see jena's n3 grammar jena/src/com/hp/hpl/jena/n3/n3.g
protected static void pyString(StringBuffer sbuff, String s) {
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
// Escape escapes and quotes
if (c == '\\' || c == '"' )
{
sbuff.append('\\') ;
sbuff.append(c) ;
continue ;
}
// Whitespace
if (c == '\n'){ sbuff.append("\\n");continue; }
if (c == '\t'){ sbuff.append("\\t");continue; }
if (c == '\r'){ sbuff.append("\\r");continue; }
if (c == '\f'){ sbuff.append("\\f");continue; }
if (c == '\b'){ sbuff.append("\\b");continue; }
if( c == 7 ) { sbuff.append("\\a");continue; }
// Output as is (subject to UTF-8 encoding on output that is)
sbuff.append(c) ;
}
}
}

View file

@ -0,0 +1,43 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import javax.servlet.ServletContext;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
public class RDFServiceUtils {
private static final String RDFSERVICEFACTORY_ATTR =
RDFServiceUtils.class.getName() + ".RDFServiceFactory";
public static RDFServiceFactory getRDFServiceFactory(ServletContext context) {
Object o = context.getAttribute(RDFSERVICEFACTORY_ATTR);
return (o instanceof RDFServiceFactory) ? (RDFServiceFactory) o : null;
}
public static void setRDFServiceFactory(ServletContext context,
RDFServiceFactory factory) {
context.setAttribute(RDFSERVICEFACTORY_ATTR, factory);
}
public static InputStream toInputStream(String serializedRDF) {
try {
return new ByteArrayInputStream(serializedRDF.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static RDFService getRDFService(VitroRequest vreq) {
return getRDFServiceFactory(
vreq.getSession().getServletContext()).getRDFService();
}
}

View file

@ -0,0 +1,217 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Capabilities;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.GraphEventManager;
import com.hp.hpl.jena.graph.GraphStatisticsHandler;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Reifier;
import com.hp.hpl.jena.graph.TransactionHandler;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.graph.TripleMatch;
import com.hp.hpl.jena.graph.impl.GraphWithPerform;
import com.hp.hpl.jena.graph.impl.SimpleBulkUpdateHandler;
import com.hp.hpl.jena.graph.impl.SimpleEventManager;
import com.hp.hpl.jena.graph.query.QueryHandler;
import com.hp.hpl.jena.graph.query.SimpleQueryHandler;
import com.hp.hpl.jena.shared.AddDeniedException;
import com.hp.hpl.jena.shared.DeleteDeniedException;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.shared.impl.PrefixMappingImpl;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import edu.cornell.mannlib.vitro.webapp.dao.jena.EmptyReifier;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
public class ListeningGraph implements GraphWithPerform {
private static final Log log = LogFactory.getLog(ListeningGraph.class);
private RDFServiceSDB rdfServiceSDB;
private String graphURI;
private BulkUpdateHandler bulkUpdateHandler;
private GraphEventManager eventManager;
private PrefixMapping prefixMapping = new PrefixMappingImpl();
private Reifier reifier = new EmptyReifier(this);
private QueryHandler queryHandler;
public ListeningGraph(String graphURI, RDFServiceSDB rdfServiceSDB) {
this.graphURI = graphURI;
this.rdfServiceSDB = rdfServiceSDB;
}
@Override
public void add(Triple triple) throws AddDeniedException {
performAdd(triple);
}
@Override
public void performAdd(Triple triple) throws AddDeniedException {
this.rdfServiceSDB.notifyListeners(triple, ModelChange.Operation.ADD, graphURI);
}
@Override
public void delete(Triple triple) throws DeleteDeniedException {
performDelete(triple);
}
@Override
public void performDelete(Triple triple) throws DeleteDeniedException {
this.rdfServiceSDB.notifyListeners(triple, ModelChange.Operation.REMOVE, graphURI);
}
@Override
public void close() {
}
@Override
public boolean contains(Triple arg0) {
return contains(arg0.getSubject(), arg0.getPredicate(), arg0.getObject());
}
@Override
public boolean contains(Node subject, Node predicate, Node object) {
return false;
}
@Override
public boolean dependsOn(Graph arg0) {
return false; // who knows?
}
@Override
public ExtendedIterator<Triple> find(TripleMatch arg0) {
Triple t = arg0.asTriple();
return find(t.getSubject(), t.getPredicate(), t.getObject());
}
@Override
public ExtendedIterator<Triple> find(Node subject, Node predicate, Node object) {
List<Triple> triplist = new ArrayList<Triple>();
return WrappedIterator.create(triplist.iterator());
}
@Override
public BulkUpdateHandler getBulkUpdateHandler() {
if (this.bulkUpdateHandler == null) {
this.bulkUpdateHandler = new SimpleBulkUpdateHandler(this);
}
return this.bulkUpdateHandler;
}
@Override
public Capabilities getCapabilities() {
return capabilities;
}
@Override
public GraphEventManager getEventManager() {
if (eventManager == null) {
eventManager = new SimpleEventManager(this);
}
return eventManager;
}
@Override
public PrefixMapping getPrefixMapping() {
return prefixMapping;
}
@Override
public Reifier getReifier() {
return reifier;
}
@Override
public GraphStatisticsHandler getStatisticsHandler() {
return null;
}
@Override
public TransactionHandler getTransactionHandler() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isClosed() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isEmpty() {
return (size() == 0);
}
@Override
public boolean isIsomorphicWith(Graph arg0) {
throw new UnsupportedOperationException("isIsomorphicWith() not supported " +
"by SPARQL graphs");
}
@Override
public QueryHandler queryHandler() {
if (queryHandler == null) {
queryHandler = new SimpleQueryHandler(this);
}
return queryHandler;
}
@Override
public int size() {
int size = find(null, null, null).toList().size();
return size;
}
private final static Capabilities capabilities = new Capabilities() {
public boolean addAllowed() {
return false;
}
public boolean addAllowed(boolean everyTriple) {
return false;
}
public boolean canBeEmpty() {
return true;
}
public boolean deleteAllowed() {
return false;
}
public boolean deleteAllowed(boolean everyTriple) {
return false;
}
public boolean findContractSafe() {
return true;
}
public boolean handlesLiteralTyping() {
return true;
}
public boolean iteratorRemoveAllowed() {
return false;
}
public boolean sizeAccurate() {
return true;
}
};
}

View file

@ -0,0 +1,25 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb;
import org.apache.commons.dbcp.BasicDataSource;
import com.hp.hpl.jena.sdb.StoreDesc;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
public class RDFServiceFactorySDB {
private BasicDataSource bds;
private StoreDesc storeDesc;
public RDFServiceFactorySDB(BasicDataSource dataSource, StoreDesc storeDesc) {
this.bds = dataSource;
this.storeDesc = storeDesc;
}
public RDFService getRDFService() {
return new RDFServiceSDB(bds, storeDesc);
}
}

View file

@ -0,0 +1,446 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.DataSource;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.DatasetFactory;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.ResultSetFormatter;
import com.hp.hpl.jena.rdf.listeners.StatementListener;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.OWL;
import edu.cornell.mannlib.vitro.webapp.dao.jena.DatasetWrapper;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceImpl;
public class RDFServiceSDB extends RDFServiceImpl implements RDFService {
private final static Log log = LogFactory.getLog(RDFServiceSDB.class);
private BasicDataSource bds;
private StoreDesc storeDesc;
public RDFServiceSDB(BasicDataSource dataSource, StoreDesc storeDesc) {
this.bds = dataSource;
this.storeDesc = storeDesc;
}
protected DatasetWrapper getDatasetWrapper() {
try {
SDBConnection conn = new SDBConnection(bds.getConnection());
return new DatasetWrapper(getDataset(conn), conn);
} catch (SQLException sqle) {
log.error(sqle, sqle);
throw new RuntimeException(sqle);
}
}
protected Dataset getDataset(SDBConnection conn) {
Store store = SDBFactory.connectStore(conn, storeDesc);
store.getLoader().setUseThreading(false);
return SDBFactory.connectDataset(store);
}
@Override
public boolean changeSetUpdate(ChangeSet changeSet)
throws RDFServiceException {
if (changeSet.getPreconditionQuery() != null
&& !isPreconditionSatisfied(
changeSet.getPreconditionQuery(),
changeSet.getPreconditionQueryType())) {
return false;
}
SDBConnection conn = null;
try {
conn = new SDBConnection(bds.getConnection());
} catch (SQLException sqle) {
log.error(sqle, sqle);
throw new RDFServiceException(sqle);
}
Dataset dataset = getDataset(conn);
boolean transaction = conn.getTransactionHandler().transactionsSupported();
try {
if (transaction) {
conn.getTransactionHandler().begin();
}
for (Object o : changeSet.getPreChangeEvents()) {
this.notifyListenersOfEvent(o);
}
Iterator<ModelChange> csIt = changeSet.getModelChanges().iterator();
while (csIt.hasNext()) {
ModelChange modelChange = csIt.next();
modelChange.getSerializedModel().mark(Integer.MAX_VALUE);
dataset.getLock().enterCriticalSection(Lock.WRITE);
try {
Model model = dataset.getNamedModel(modelChange.getGraphURI());
operateOnModel(model, modelChange, dataset);
} finally {
dataset.getLock().leaveCriticalSection();
}
}
if (transaction) {
conn.getTransactionHandler().commit();
}
// notify listeners of triple changes
csIt = changeSet.getModelChanges().iterator();
while (csIt.hasNext()) {
ModelChange modelChange = csIt.next();
modelChange.getSerializedModel().reset();
Model model = ModelFactory.createModelForGraph(
new ListeningGraph(modelChange.getGraphURI(), this));
operateOnModel(model, modelChange, null);
}
for (Object o : changeSet.getPostChangeEvents()) {
this.notifyListenersOfEvent(o);
}
} catch (Exception e) {
log.error(e, e);
if (transaction) {
conn.getTransactionHandler().abort();
}
throw new RDFServiceException(e);
} finally {
conn.close();
}
return true;
}
private void operateOnModel(Model model, ModelChange modelChange, Dataset dataset) {
model.enterCriticalSection(Lock.WRITE);
try {
if (modelChange.getOperation() == ModelChange.Operation.ADD) {
model.read(modelChange.getSerializedModel(), null,
getSerializationFormatString(modelChange.getSerializationFormat()));
} else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
model.remove(parseModel(modelChange));
if (dataset != null) {
removeBlankNodesWithSparqlUpdate(dataset, model, modelChange.getGraphURI());
}
} else {
log.error("unrecognized operation type");
}
} finally {
model.leaveCriticalSection();
}
}
private void removeBlankNodesWithSparqlUpdate(Dataset dataset, Model model, String graphURI) {
Model blankNodeModel = ModelFactory.createDefaultModel();
StmtIterator stmtIt = model.listStatements();
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.nextStatement();
if (stmt.getSubject().isAnon() || stmt.getObject().isAnon()) {
blankNodeModel.add(stmt);
}
}
String rootFinder = "SELECT ?s WHERE { ?s ?p ?o OPTIONAL { ?ss ?pp ?s } FILTER (!bound(?ss)) }";
Query rootFinderQuery = QueryFactory.create(rootFinder);
QueryExecution qe = QueryExecutionFactory.create(rootFinderQuery, blankNodeModel);
try {
ResultSet rs = qe.execSelect();
while (rs.hasNext()) {
QuerySolution qs = rs.next();
Resource s = qs.getResource("s");
String treeFinder = makeDescribe(s);
Query treeFinderQuery = QueryFactory.create(treeFinder);
QueryExecution qee = QueryExecutionFactory.create(treeFinderQuery, blankNodeModel);
try {
Model tree = qee.execDescribe();
StmtIterator sit = tree.listStatements(s, null, (RDFNode) null);
while (sit.hasNext()) {
Statement stmt = sit.nextStatement();
RDFNode n = stmt.getObject();
Model m2 = ModelFactory.createDefaultModel();
if (n.isResource()) {
Resource s2 = (Resource) n;
// now run yet another describe query
String smallerTree = makeDescribe(s2);
Query smallerTreeQuery = QueryFactory.create(smallerTree);
QueryExecution qe3 = QueryExecutionFactory.create(
smallerTreeQuery, tree);
try {
qe3.execDescribe(m2);
} finally {
qe3.close();
}
}
m2.add(stmt);
DataSource ds = DatasetFactory.create();
ds.addNamedModel(graphURI, dataset.getNamedModel(graphURI));
removeUsingSparqlUpdate(ds, m2, graphURI);
}
} finally {
qee.close();
}
}
} finally {
qe.close();
}
}
private String makeDescribe(Resource s) {
StringBuffer query = new StringBuffer("DESCRIBE <") ;
if (s.isAnon()) {
query.append("_:" + s.getId().toString());
} else {
query.append(s.getURI());
}
query.append(">");
return query.toString();
}
private void removeUsingSparqlUpdate(Dataset dataset, Model model, String graphURI) {
StringBuffer patternBuff = new StringBuffer();
StmtIterator stmtIt = model.listStatements();
if (!stmtIt.hasNext()) {
stmtIt.close();
return;
}
while(stmtIt.hasNext()) {
Triple t = stmtIt.next().asTriple();
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getSubject(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getPredicate(), null));
patternBuff.append(" ");
patternBuff.append(SparqlGraph.sparqlNodeDelete(t.getObject(), null));
patternBuff.append(" .\n");
}
StringBuffer queryBuff = new StringBuffer();
queryBuff.append("CONSTRUCT { \n");
queryBuff.append(patternBuff);
queryBuff.append("} WHERE { \n");
if (graphURI != null) {
queryBuff.append(" GRAPH <" + graphURI + "> { \n");
}
queryBuff.append(patternBuff);
if (graphURI != null) {
queryBuff.append(" } \n");
}
queryBuff.append("} \n");
//log.debug(queryBuff.toString());
Query construct = QueryFactory.create(queryBuff.toString());
// make a plain dataset to force the query to be run in a way that
// won't overwhelm MySQL with too many joins
DataSource ds = DatasetFactory.create();
ds.addNamedModel(graphURI, (graphURI != null)
? dataset.getNamedModel(graphURI) : dataset.getDefaultModel());
QueryExecution qe = QueryExecutionFactory.create(construct, ds);
try {
Model m = qe.execConstruct();
if (graphURI != null) {
dataset.getNamedModel(graphURI).remove(m);
} else {
dataset.getDefaultModel().remove(m);
}
} finally {
qe.close();
}
}
private Model parseModel(ModelChange modelChange) {
Model model = ModelFactory.createDefaultModel();
model.read(modelChange.getSerializedModel(), null,
getSerializationFormatString(modelChange.getSerializationFormat()));
return model;
}
@Override
public void newIndividual(String individualURI, String individualTypeURI,
String graphURI) throws RDFServiceException {
// TODO Auto-generated method stub
}
private InputStream getRDFResultStream(String query, boolean construct,
ModelSerializationFormat resultFormat) throws RDFServiceException {
DatasetWrapper dw = getDatasetWrapper();
try {
Dataset d = dw.getDataset();
Query q = QueryFactory.create(query);
QueryExecution qe = QueryExecutionFactory.create(q, d);
ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
try {
// TODO pipe this
Model m = construct ? qe.execConstruct() : qe.execDescribe();
m.write(serializedModel, getSerializationFormatString(resultFormat));
InputStream result = new ByteArrayInputStream(serializedModel.toByteArray());
return result;
} finally {
qe.close();
}
} finally {
dw.close();
}
}
private static final boolean CONSTRUCT = true;
private static final boolean DESCRIBE = false;
@Override
public InputStream sparqlConstructQuery(String query,
ModelSerializationFormat resultFormat) throws RDFServiceException {
return getRDFResultStream(query, CONSTRUCT, resultFormat);
}
@Override
public InputStream sparqlDescribeQuery(String query,
ModelSerializationFormat resultFormat) throws RDFServiceException {
return getRDFResultStream(query, DESCRIBE, resultFormat);
}
@Override
public InputStream sparqlSelectQuery(String query, ResultFormat resultFormat)
throws RDFServiceException {
DatasetWrapper dw = getDatasetWrapper();
try {
Dataset d = dw.getDataset();
Query q = QueryFactory.create(query);
QueryExecution qe = QueryExecutionFactory.create(q, d);
try {
ResultSet resultSet = qe.execSelect();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
switch (resultFormat) {
case CSV:
ResultSetFormatter.outputAsCSV(outputStream,resultSet);
break;
case TEXT:
ResultSetFormatter.out(outputStream,resultSet);
break;
case JSON:
ResultSetFormatter.outputAsJSON(outputStream, resultSet);
break;
case XML:
ResultSetFormatter.outputAsXML(outputStream, resultSet);
break;
default:
throw new RDFServiceException("unrecognized result format");
}
InputStream result = new ByteArrayInputStream(outputStream.toByteArray());
return result;
} finally {
qe.close();
}
} finally {
dw.close();
}
}
@Override
public boolean sparqlAskQuery(String query) throws RDFServiceException {
DatasetWrapper dw = getDatasetWrapper();
try {
Dataset d = dw.getDataset();
Query q = QueryFactory.create(query);
QueryExecution qe = QueryExecutionFactory.create(q, d);
try {
return qe.execAsk();
} finally {
qe.close();
}
} finally {
dw.close();
}
}
@Override
public List<String> getGraphURIs() throws RDFServiceException {
DatasetWrapper dw = getDatasetWrapper();
try {
Dataset d = dw.getDataset();
List<String> graphURIs = new ArrayList<String>();
Iterator<String> nameIt = d.listNames();
while (nameIt.hasNext()) {
graphURIs.add(nameIt.next());
}
return graphURIs;
} finally {
dw.close();
}
}
@Override
public void getGraphMetadata() throws RDFServiceException {
// TODO Auto-generated method stub
}
@Override
public void close() {
// nothing
}
private class ModelListener extends StatementListener {
private String graphURI;
private RDFServiceImpl s;
public ModelListener(String graphURI, RDFServiceImpl s) {
this.graphURI = graphURI;
this.s = s;
}
public void addedStatement(Statement stmt) {
s.notifyListeners(stmt.asTriple(), ModelChange.Operation.ADD, graphURI);
}
public void removedStatement(Statement stmt) {
s.notifyListeners(stmt.asTriple(), ModelChange.Operation.REMOVE, graphURI);
}
}
}

View file

@ -0,0 +1,469 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.model.Resource;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.Update;
import org.openrdf.query.UpdateExecutionException;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.ResultSetFormatter;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.ChangeSetImpl;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceImpl;
/*
* API to write, read, and update Vitro's RDF store, with support
* to allow listening, logging and auditing.
*/
public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
private String endpointURI;
private Repository repository;
/**
* Returns an RDFService for a remote repository
* @param String - URI of the SPARQL endpoint for the knowledge base
* @param String - URI of the default write graph within the knowledge base.
* this is the graph that will be written to when a graph
* is not explicitly specified.
*
* The default read graph is the union of all graphs in the
* knowledge base
*/
public RDFServiceSparql(String endpointURI, String defaultWriteGraphURI) {
this.endpointURI = endpointURI;
this.repository = new HTTPRepository(endpointURI);
}
/**
* Returns an RDFService for a remote repository
* @param String - URI of the SPARQL endpoint for the knowledge base
*
* The default read graph is the union of all graphs in the
* knowledge base
*/
public RDFServiceSparql(String endpointURI) {
this(endpointURI, null);
}
public void close() {
try {
this.repository.shutDown();
} catch (RepositoryException re) {
log.error(re, re);
}
}
/**
* Perform a series of additions to and or removals from specified graphs
* in the RDF store. preConditionSparql will be executed against the
* union of all the graphs in the knowledge base before any updates are made.
* If the precondition query returns a non-empty result no updates
* will be made.
*
* @param ChangeSet - a set of changes to be performed on the RDF store.
*
* @return boolean - indicates whether the precondition was satisfied
*/
@Override
public boolean changeSetUpdate(ChangeSet changeSet) throws RDFServiceException {
if (changeSet.getPreconditionQuery() != null
&& !isPreconditionSatisfied(
changeSet.getPreconditionQuery(),
changeSet.getPreconditionQueryType())) {
return false;
}
Iterator<ModelChange> csIt = changeSet.getModelChanges().iterator();
while (csIt.hasNext()) {
ModelChange modelChange = csIt.next();
if (modelChange.getOperation() == ModelChange.Operation.ADD) {
performAdd(modelChange);
} else if (modelChange.getOperation() == ModelChange.Operation.REMOVE) {
performRemove(modelChange);
} else {
log.error("unrecognized operation type");
}
}
return true;
}
/**
* Performs a SPARQL construct query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param String query - the SPARQL query to be executed against the RDF store
* @param RDFService.ModelSerializationFormat resultFormat - type of serialization for RDF result of the SPARQL query
* @param OutputStream outputStream - the result of the query
*
*/
@Override
public InputStream sparqlConstructQuery(String queryStr,
RDFServiceImpl.ModelSerializationFormat resultFormat) throws RDFServiceException {
Model model = ModelFactory.createDefaultModel();
Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query);
try {
qe.execConstruct(model);
} finally {
qe.close();
}
ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
model.write(serializedModel,getSerializationFormatString(resultFormat));
InputStream result = new ByteArrayInputStream(serializedModel.toByteArray());
return result;
}
/**
* Performs a SPARQL describe query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param String query - the SPARQL query to be executed against the RDF store
* @param RDFService.ModelSerializationFormat resultFormat - type of serialization for RDF result of the SPARQL query
*
* @return InputStream - the result of the query
*
*/
@Override
public InputStream sparqlDescribeQuery(String queryStr,
RDFServiceImpl.ModelSerializationFormat resultFormat) throws RDFServiceException {
Model model = ModelFactory.createDefaultModel();
Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query);
try {
qe.execDescribe(model);
} finally {
qe.close();
}
ByteArrayOutputStream serializedModel = new ByteArrayOutputStream();
model.write(serializedModel,getSerializationFormatString(resultFormat));
InputStream result = new ByteArrayInputStream(serializedModel.toByteArray());
return result;
}
/**
* Performs a SPARQL select query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param String query - the SPARQL query to be executed against the RDF store
* @param RDFService.ResultFormat resultFormat - format for the result of the Select query
*
* @return InputStream - the result of the query
*
*/
@Override
public InputStream sparqlSelectQuery(String queryStr, RDFService.ResultFormat resultFormat) throws RDFServiceException {
Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query);
try {
ResultSet resultSet = qe.execSelect();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
switch (resultFormat) {
case CSV:
ResultSetFormatter.outputAsCSV(outputStream,resultSet);
break;
case TEXT:
ResultSetFormatter.out(outputStream,resultSet);
break;
case JSON:
ResultSetFormatter.outputAsJSON(outputStream, resultSet);
break;
case XML:
ResultSetFormatter.outputAsXML(outputStream, resultSet);
break;
default:
throw new RDFServiceException("unrecognized result format");
}
InputStream result = new ByteArrayInputStream(outputStream.toByteArray());
return result;
} finally {
qe.close();
}
}
/**
* Performs a SPARQL ASK query against the knowledge base. The query may have
* an embedded graph identifier.
*
* @param String query - the SPARQL query to be executed against the RDF store
*
* @return boolean - the result of the SPARQL query
*/
@Override
public boolean sparqlAskQuery(String queryStr) throws RDFServiceException {
Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query);
try {
return qe.execAsk();
} finally {
qe.close();
}
}
/**
* Get a list of all the graph URIs in the RDF store.
*
* @return List<String> - list of all the graph URIs in the RDF store
*/
//TODO - need to verify that the sesame getContextIDs method is implemented
// in such a way that it works with all triple stores that support the
// graph update API
@Override
public List<String> getGraphURIs() throws RDFServiceException {
List<String> graphNodeList = new ArrayList<String>();
try {
RepositoryConnection conn = getConnection();
try {
RepositoryResult<Resource> conResult = conn.getContextIDs();
while (conResult.hasNext()) {
Resource res = conResult.next();
graphNodeList.add(res.stringValue());
}
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
return graphNodeList;
}
/**
* TODO - what is the definition of this method?
* @return
*/
@Override
public void getGraphMetadata() throws RDFServiceException {
}
/**
* Get the URI of the default write graph
*
* @return String URI of default write graph
*/
@Override
public String getDefaultWriteGraphURI() throws RDFServiceException {
return defaultWriteGraphURI;
}
/**
* Register a listener to listen to changes in any graph in
* the RDF store.
*
*/
@Override
public synchronized void registerListener(ChangeListener changeListener) throws RDFServiceException {
if (!registeredListeners.contains(changeListener)) {
registeredListeners.add(changeListener);
}
}
/**
* Unregister a listener from listening to changes in any graph
* in the RDF store.
*
*/
@Override
public synchronized void unregisterListener(ChangeListener changeListener) throws RDFServiceException {
registeredListeners.remove(changeListener);
}
/**
* Create a ChangeSet object
*
* @return a ChangeSet object
*/
@Override
public ChangeSet manufactureChangeSet() {
return new ChangeSetImpl();
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Non-override methods below
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
protected String getEndpointURI() {
return endpointURI;
}
protected RepositoryConnection getConnection() {
try {
return this.repository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
protected void executeUpdate(String updateString) {
try {
RepositoryConnection conn = getConnection();
try {
Update u = conn.prepareUpdate(QueryLanguage.SPARQL, updateString);
u.execute();
} catch (MalformedQueryException e) {
throw new RuntimeException(e);
} catch (UpdateExecutionException e) {
log.error(e,e);
log.error("Update command: \n" + updateString);
throw new RuntimeException(e);
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
}
protected void addTriple(Triple t, String graphURI) {
StringBuffer updateString = new StringBuffer();
updateString.append("INSERT DATA { ");
updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getObject(), ""));
updateString.append(" }");
updateString.append((graphURI != null) ? " } " : "");
executeUpdate(updateString.toString());
notifyListeners(t, ModelChange.Operation.ADD, graphURI);
}
protected void removeTriple(Triple t, String graphURI) {
StringBuffer updateString = new StringBuffer();
updateString.append("DELETE DATA { ");
updateString.append((graphURI != null) ? "GRAPH <" + graphURI + "> { " : "" );
updateString.append(sparqlNodeUpdate(t.getSubject(), ""));
updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getPredicate(), ""));
updateString.append(" ");
updateString.append(sparqlNodeUpdate(t.getObject(), ""));
updateString.append(" }");
updateString.append((graphURI != null) ? " } " : "");
executeUpdate(updateString.toString());
notifyListeners(t, ModelChange.Operation.REMOVE, graphURI);
}
@Override
protected boolean isPreconditionSatisfied(String query,
RDFService.SPARQLQueryType queryType)
throws RDFServiceException {
Model model = ModelFactory.createDefaultModel();
switch (queryType) {
case DESCRIBE:
model.read(sparqlDescribeQuery(query,RDFService.ModelSerializationFormat.N3), null);
return !model.isEmpty();
case CONSTRUCT:
model.read(sparqlConstructQuery(query,RDFService.ModelSerializationFormat.N3), null);
return !model.isEmpty();
case SELECT:
return sparqlSelectQueryHasResults(query);
case ASK:
return sparqlAskQuery(query);
default:
throw new RDFServiceException("unrecognized SPARQL query type");
}
}
@Override
protected boolean sparqlSelectQueryHasResults(String queryStr) throws RDFServiceException {
Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, query);
try {
ResultSet resultSet = qe.execSelect();
return resultSet.hasNext();
} finally {
qe.close();
}
}
protected void performAdd(ModelChange modelChange) throws RDFServiceException {
Model model = ModelFactory.createDefaultModel();
model.read(modelChange.getSerializedModel(),getSerializationFormatString(modelChange.getSerializationFormat()));
StmtIterator stmtIt = model.listStatements();
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.next();
Triple triple = new Triple(stmt.getSubject().asNode(), stmt.getPredicate().asNode(), stmt.getObject().asNode());
addTriple(triple, modelChange.getGraphURI());
}
}
protected void performRemove(ModelChange modelChange) throws RDFServiceException {
Model model = ModelFactory.createDefaultModel();
model.read(modelChange.getSerializedModel(),getSerializationFormatString(modelChange.getSerializationFormat()));
StmtIterator stmtIt = model.listStatements();
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.next();
Triple triple = new Triple(stmt.getSubject().asNode(), stmt.getPredicate().asNode(), stmt.getObject().asNode());
removeTriple(triple, modelChange.getGraphURI());
}
}
}

View file

@ -39,8 +39,13 @@ import com.hp.hpl.jena.vocabulary.OWL;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ABoxJenaChangeListener;
import edu.cornell.mannlib.vitro.webapp.dao.jena.CumulativeDeltaModeler;
import edu.cornell.mannlib.vitro.webapp.dao.jena.DifferenceGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.BulkUpdateEvent;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
/**
* Allows for real-time incremental materialization or retraction of RDFS-
@ -53,6 +58,7 @@ public class SimpleReasoner extends StatementListener {
private static final Log log = LogFactory.getLog(SimpleReasoner.class);
private RDFService rdfService;
private OntModel tboxModel; // asserted and inferred TBox axioms
private OntModel aboxModel; // ABox assertions
private Model inferenceModel; // ABox inferences
@ -79,10 +85,13 @@ public class SimpleReasoner extends StatementListener {
* @param inferenceRebuildModel - output. This the model temporarily used when the whole ABox inference model is rebuilt
* @param inferenceScratchpadModel - output. This the model temporarily used when the whole ABox inference model is rebuilt
*/
public SimpleReasoner(OntModel tboxModel, OntModel aboxModel, Model inferenceModel,
public SimpleReasoner(OntModel tboxModel, RDFService rdfService, Model inferenceModel,
Model inferenceRebuildModel, Model scratchpadModel) {
this.rdfService = rdfService;
this.tboxModel = tboxModel;
this.aboxModel = aboxModel;
this.aboxModel = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, ModelFactory.createModelForGraph(
new DifferenceGraph(new RDFServiceGraph(rdfService), inferenceModel.getGraph())));
this.inferenceModel = inferenceModel;
this.inferenceRebuildModel = inferenceRebuildModel;
this.scratchpadModel = scratchpadModel;
@ -91,8 +100,16 @@ public class SimpleReasoner extends StatementListener {
aBoxDeltaModeler1 = new CumulativeDeltaModeler();
aBoxDeltaModeler2 = new CumulativeDeltaModeler();
stopRequested = false;
aboxModel.getBaseModel().register(this);
if (rdfService == null) {
aboxModel.register(this);
} else {
try {
rdfService.registerListener(new ABoxJenaChangeListener(this));
} catch (RDFServiceException e) {
throw new RuntimeException("Unable to register change listener", e);
}
}
}
/**
@ -174,8 +191,7 @@ public class SimpleReasoner extends StatementListener {
* Synchronized part of removedStatement. Interacts
* with DeltaComputer.
*/
protected synchronized void handleRemovedStatement(Statement stmt) {
protected synchronized void handleRemovedStatement(Statement stmt) {
if (batchMode1) {
aBoxDeltaModeler1.removedStatement(stmt);
} else if (batchMode2) {
@ -443,6 +459,7 @@ public class SimpleReasoner extends StatementListener {
Iterator<OntClass> parentIt = parents.iterator();
while (parentIt.hasNext()) {
OntClass parentClass = parentIt.next();
// VIVO doesn't materialize statements that assert anonymous types
@ -451,7 +468,9 @@ public class SimpleReasoner extends StatementListener {
// of classes not individuals.
if (parentClass.isAnon()) continue;
if (entailedType(stmt.getSubject(),parentClass)) continue; // if a type is still entailed without the
if (entailedType(stmt.getSubject(),parentClass)) {
continue; // if a type is still entailed without the
}
// removed statement, then don't remove it
// from the inferences
@ -900,7 +919,6 @@ public class SimpleReasoner extends StatementListener {
List<OntClass> subclasses = null;
subclasses = (cls.listSubClasses(false)).toList();
subclasses.addAll((cls.listEquivalentClasses()).toList());
Iterator<OntClass> iter = subclasses.iterator();
while (iter.hasNext()) {
OntClass childClass = iter.next();
@ -908,8 +926,9 @@ public class SimpleReasoner extends StatementListener {
Iterator<Resource> sameIter = sameIndividuals.iterator();
while (sameIter.hasNext()) {
Statement stmt = ResourceFactory.createStatement(sameIter.next(), RDF.type, childClass);
if (aboxModel.contains(stmt))
return true;
if (aboxModel.contains(stmt)) {
return true;
}
}
}
return false;
@ -1678,7 +1697,7 @@ public class SimpleReasoner extends StatementListener {
@Override
public synchronized void notifyEvent(Model model, Object event) {
if (event instanceof BulkUpdateEvent) {
if (((BulkUpdateEvent) event).getBegin()) {

View file

@ -19,245 +19,246 @@ import javax.servlet.ServletContextListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.ontology.OntDocumentManager;
import com.hp.hpl.jena.ontology.OntDocumentManager;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.util.StoreUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SparqlGraph;
// This ContextListener must run after the JenaDataSourceSetup ContextListener
public class FileGraphSetup implements ServletContextListener {
private static final String ABOX = "abox";
private static final String TBOX = "tbox";
private static final String PATH_ROOT = "/WEB-INF/filegraph/";
public static final String FILEGRAPH_URI_ROOT = "http://vitro.mannlib.cornell.edu/filegraph/";
private static final Log log = LogFactory.getLog(FileGraphSetup.class);
public void contextInitialized(ServletContextEvent sce) {
boolean aboxChanged = false; // indicates whether any ABox file graph model has changed
boolean tboxChanged = false; // indicates whether any TBox file graph model has changed
OntModelSelector baseOms = null;
try {
OntDocumentManager.getInstance().setProcessImports(true);
baseOms = ModelContext.getBaseOntModelSelector(sce.getServletContext());
Store kbStore = (Store) sce.getServletContext().getAttribute("kbStore");
// ABox files
Set<String> pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + ABOX);
cleanupDB(kbStore, pathToURI(pathSet, ABOX), ABOX);
if (pathSet != null) {
OntModel aboxBaseModel = baseOms.getABoxModel();
aboxChanged = readGraphs(sce, pathSet, kbStore, ABOX, aboxBaseModel);
}
// TBox files
pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + TBOX);
cleanupDB(kbStore, pathToURI(pathSet, TBOX),TBOX);
if (pathSet != null) {
OntModel tboxBaseModel = baseOms.getTBoxModel();
tboxChanged = readGraphs(sce, pathSet, kbStore, TBOX, tboxBaseModel);
}
} catch (ClassCastException cce) {
String errMsg = "Unable to cast servlet context attribute to the appropriate type " + cce.getLocalizedMessage();
log.error(errMsg);
throw new ClassCastException(errMsg);
} catch (Throwable t) {
System.out.println("Throwable in listener " + this.getClass().getName());
log.error(t);
t.printStackTrace();
} finally {
OntDocumentManager.getInstance().setProcessImports(false);
}
if (isUpdateRequired(sce.getServletContext())) {
log.info("mostSpecificType will be computed because a knowledge base migration was performed." );
private static final String ABOX = "abox";
private static final String TBOX = "tbox";
private static final String PATH_ROOT = "/WEB-INF/filegraph/";
public static final String FILEGRAPH_URI_ROOT = "http://vitro.mannlib.cornell.edu/filegraph/";
private static final Log log = LogFactory.getLog(FileGraphSetup.class);
public void contextInitialized(ServletContextEvent sce) {
boolean aboxChanged = false; // indicates whether any ABox file graph model has changed
boolean tboxChanged = false; // indicates whether any TBox file graph model has changed
OntModelSelector baseOms = null;
try {
OntDocumentManager.getInstance().setProcessImports(true);
baseOms = ModelContext.getBaseOntModelSelector(sce.getServletContext());
Dataset dataset = JenaDataSourceSetupBase.getStartupDataset(sce.getServletContext());
// ABox files
Set<String> pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + ABOX);
cleanupDB(dataset, pathToURI(pathSet, ABOX), ABOX);
if (pathSet != null) {
OntModel aboxBaseModel = baseOms.getABoxModel();
aboxChanged = readGraphs(sce, pathSet, dataset, ABOX, aboxBaseModel);
}
// TBox files
pathSet = sce.getServletContext().getResourcePaths(PATH_ROOT + TBOX);
cleanupDB(dataset, pathToURI(pathSet, TBOX),TBOX);
if (pathSet != null) {
OntModel tboxBaseModel = baseOms.getTBoxModel();
tboxChanged = readGraphs(sce, pathSet, dataset, TBOX, tboxBaseModel);
}
} catch (ClassCastException cce) {
String errMsg = "Unable to cast servlet context attribute to the appropriate type " + cce.getLocalizedMessage();
log.error(errMsg);
throw new ClassCastException(errMsg);
} catch (Throwable t) {
log.error(t, t);
} finally {
OntDocumentManager.getInstance().setProcessImports(false);
}
if (isUpdateRequired(sce.getServletContext())) {
log.info("mostSpecificType will be computed because a knowledge base migration was performed." );
SimpleReasonerSetup.setMSTComputeRequired(sce.getServletContext());
} else if (aboxChanged || tboxChanged) {
log.info("a full recompute of the Abox will be performed because" +
" the filegraph abox(s) and/or tbox(s) have changed or are being read for the first time." );
SimpleReasonerSetup.setRecomputeRequired(sce.getServletContext());
}
}
/*
* Reads the graphs stored as files in sub-directories of
* 1. updates the SDB store to reflect the current contents of the graph.
* 2. adds the graph as an in-memory submodel of the base in-memory graph
*
* Note: no connection needs to be maintained between the in-memory copy of the
* graph and the DB copy.
*/
public boolean readGraphs(ServletContextEvent sce, Set<String> pathSet, Store kbStore, String type, OntModel baseModel) {
int count = 0;
boolean modelChanged = false;
// For each file graph in the target directory update or add that graph to
// the Jena SDB, and attach the graph as a submodel of the base model
for ( String p : pathSet ) {
} else if (aboxChanged || tboxChanged) {
log.info("a full recompute of the Abox will be performed because" +
" the filegraph abox(s) and/or tbox(s) have changed or are being read for the first time." );
SimpleReasonerSetup.setRecomputeRequired(sce.getServletContext());
}
}
count++; // note this will count the empty files too
File file = new File(sce.getServletContext().getRealPath(p));
try {
FileInputStream fis = new FileInputStream( file );
try {
OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
if ( p.endsWith(".n3") || p.endsWith(".N3") || p.endsWith(".ttl") || p.endsWith(".TTL") ) {
model.read( fis, null, "N3" );
} else if ( p.endsWith(".owl") || p.endsWith(".OWL") || p.endsWith(".rdf") || p.endsWith(".RDF") || p.endsWith(".xml") || p.endsWith(".XML") ) {
model.read( fis, null, "RDF/XML" );
} else {
log.warn("Ignoring " + type + " file graph " + p + " because the file extension is unrecognized.");
}
if ( !model.isEmpty() ) {
baseModel.addSubModel(model);
log.info("Attached file graph as " + type + " submodel " + p);
}
modelChanged = modelChanged | updateGraphInDB(kbStore, model, type, p);
} catch (Exception ioe) {
log.error("Unable to process file graph " + p, ioe);
System.out.println("Unable to process file graph " + p);
ioe.printStackTrace();
} finally {
fis.close();
}
} catch (FileNotFoundException fnfe) {
log.warn(p + " not found. Unable to process file graph" +
((fnfe.getLocalizedMessage() != null) ?
fnfe.getLocalizedMessage() : "") );
} catch (IOException ioe) {
// this is for the fis.close() above.
log.warn("Exception while trying to close file graph file: " + p,ioe);
}
} // end - for
System.out.println("Read " + count + " " + type + " file graph" + ((count == 1) ? "" : "s") + " from " + PATH_ROOT + type);
return modelChanged;
}
/*
* If a graph with the given name doesn't exist in the DB then add it.
/*
* Reads the graphs stored as files in sub-directories of
* 1. updates the SDB store to reflect the current contents of the graph.
* 2. adds the graph as an in-memory submodel of the base in-memory graph
*
* Note: no connection needs to be maintained between the in-memory copy of the
* graph and the DB copy.
*/
public boolean readGraphs(ServletContextEvent sce, Set<String> pathSet, Dataset dataset, String type, OntModel baseModel) {
int count = 0;
boolean modelChanged = false;
// For each file graph in the target directory update or add that graph to
// the Jena SDB, and attach the graph as a submodel of the base model
for ( String p : pathSet ) {
count++; // note this will count the empty files too
File file = new File(sce.getServletContext().getRealPath(p));
try {
FileInputStream fis = new FileInputStream( file );
try {
OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
if ( p.endsWith(".n3") || p.endsWith(".N3") || p.endsWith(".ttl") || p.endsWith(".TTL") ) {
model.read( fis, null, "N3" );
} else if ( p.endsWith(".owl") || p.endsWith(".OWL") || p.endsWith(".rdf") || p.endsWith(".RDF") || p.endsWith(".xml") || p.endsWith(".XML") ) {
model.read( fis, null, "RDF/XML" );
} else {
log.warn("Ignoring " + type + " file graph " + p + " because the file extension is unrecognized.");
}
if ( !model.isEmpty() ) {
baseModel.addSubModel(model);
log.info("Attached file graph as " + type + " submodel " + p);
}
modelChanged = modelChanged | updateGraphInDB(dataset, model, type, p);
} catch (Exception ioe) {
log.error("Unable to process file graph " + p, ioe);
System.out.println("Unable to process file graph " + p);
ioe.printStackTrace();
} finally {
fis.close();
}
} catch (FileNotFoundException fnfe) {
log.warn(p + " not found. Unable to process file graph" +
((fnfe.getLocalizedMessage() != null) ?
fnfe.getLocalizedMessage() : "") );
} catch (IOException ioe) {
// this is for the fis.close() above.
log.warn("Exception while trying to close file graph file: " + p,ioe);
}
} // end - for
System.out.println("Read " + count + " " + type + " file graph" + ((count == 1) ? "" : "s") + " from " + PATH_ROOT + type);
return modelChanged;
}
/*
* If a graph with the given name doesn't exist in the DB then add it.
*
* Otherwise, if a graph with the given name is in the DB and is not isomorphic with
* the graph that was read from the file system then replace the graph
* in the DB with the one read from the file system.
*
* Otherwise, if a graph with the given name is in the DB and is isomorphic with
* the graph that was read from the files system, then do nothing.
*/
public boolean updateGraphInDB(Store kbStore, Model fileModel, String type, String path) {
String graphURI = pathToURI(path,type);
Model dbModel = SDBFactory.connectNamedModel(kbStore, graphURI);
boolean modelChanged = false;
if (dbModel.isEmpty() ) {
dbModel.add(fileModel);
modelChanged = true;
} else if (!dbModel.isIsomorphicWith(fileModel)) {
dbModel.removeAll();
dbModel.add(fileModel);
modelChanged = true;
}
return modelChanged;
}
/*
* Deletes any file graphs that are no longer present in the file system
* from the DB.
*
* @param uriSet (input) - a set of graph URIs representing the file
* graphs (of the given type) in the file
* system.
* @param type (input) - abox or tbox.
* @param kbStore (output) - the SDB store for the application
*/
public void cleanupDB(Store kbStore, Set<String> uriSet, String type) {
Pattern graphURIPat = Pattern.compile("^" + FILEGRAPH_URI_ROOT + type);
Iterator<Node> iter = StoreUtils.storeGraphNames(kbStore);
while (iter.hasNext()) {
Node node = iter.next();
Matcher matcher = graphURIPat.matcher(node.getURI());
* Otherwise, if a graph with the given name is in the DB and is not isomorphic with
* the graph that was read from the file system then replace the graph
* in the DB with the one read from the file system.
*
* Otherwise, if a graph with the given name is in the DB and is isomorphic with
* the graph that was read from the files system, then do nothing.
*/
public boolean updateGraphInDB(Dataset dataset, Model fileModel, String type, String path) {
String graphURI = pathToURI(path,type);
Model dbModel = dataset.getNamedModel(graphURI);
boolean modelChanged = false;
boolean isIsomorphic = dbModel.isIsomorphicWith(fileModel);
if (dbModel.isEmpty() ) {
dbModel.add(fileModel);
modelChanged = true;
} else if (!isIsomorphic) {
log.info("Updating " + path + " because graphs are not isomorphic");
log.info("dbModel: " + dbModel.size() + " ; fileModel: " + fileModel.size());
dbModel.removeAll();
dbModel.add(fileModel);
modelChanged = true;
}
return modelChanged;
}
/*
* Deletes any file graphs that are no longer present in the file system
* from the DB.
*
* @param uriSet (input) - a set of graph URIs representing the file
* graphs (of the given type) in the file
* system.
* @param type (input) - abox or tbox.
* @param kbStore (output) - the SDB store for the application
*/
public void cleanupDB(Dataset dataset, Set<String> uriSet, String type) {
Pattern graphURIPat = Pattern.compile("^" + FILEGRAPH_URI_ROOT + type);
Iterator<String> iter = dataset.listNames();
while (iter.hasNext()) {
String graphURI = iter.next();
Matcher matcher = graphURIPat.matcher(graphURI);
if (matcher.find()) {
if (!uriSet.contains(node.getURI())) {
Model model = SDBFactory.connectNamedModel(kbStore, node.getURI());
model.removeAll(); // delete the graph from the DB
log.info("Removed " + type + " file graph " + node.getURI() + " from the DB store because the file no longer exists in the file system");
}
if (!uriSet.contains(graphURI)) {
Model model = dataset.getNamedModel(graphURI);
model.removeAll(); // delete the graph from the DB
log.info("Removed " + type + " file graph " + graphURI + " from the DB store because the file no longer exists in the file system");
}
}
}
return;
}
/*
* Takes a set of path names for file graphs and returns a set containing
* a graph uri for each path name in the input set. If pathSet is null
* returns an empty set.
*/
public Set<String> pathToURI (Set<String> pathSet, String type) {
HashSet<String> uriSet = new HashSet<String>();
if (pathSet != null) {
for ( String path : pathSet ) {
uriSet.add(pathToURI(path,type));
}
}
return uriSet;
}
}
/*
* Takes a path name for a file graph and returns the corresponding SDB URI
* for the graph. The correspondence is by defined convention.
*/
public String pathToURI(String path, String type) {
String uri = null;
if (path != null) {
File file = new File(path);
uri = FILEGRAPH_URI_ROOT + type + "/" + file.getName();
}
return uri;
}
return;
}
public void contextDestroyed( ServletContextEvent sce ) {
// nothing to do
}
private static boolean isUpdateRequired(ServletContext ctx) {
return (ctx.getAttribute(UpdateKnowledgeBase.KBM_REQURIED_AT_STARTUP) != null);
}
/*
* Takes a set of path names for file graphs and returns a set containing
* a graph uri for each path name in the input set. If pathSet is null
* returns an empty set.
*/
public Set<String> pathToURI (Set<String> pathSet, String type) {
HashSet<String> uriSet = new HashSet<String>();
if (pathSet != null) {
for ( String path : pathSet ) {
uriSet.add(pathToURI(path,type));
}
}
return uriSet;
}
/*
* Takes a path name for a file graph and returns the corresponding SDB URI
* for the graph. The correspondence is by defined convention.
*/
public String pathToURI(String path, String type) {
String uri = null;
if (path != null) {
File file = new File(path);
uri = FILEGRAPH_URI_ROOT + type + "/" + file.getName();
}
return uri;
}
public void contextDestroyed( ServletContextEvent sce ) {
// nothing to do
}
private static boolean isUpdateRequired(ServletContext ctx) {
return (ctx.getAttribute(UpdateKnowledgeBase.KBM_REQURIED_AT_STARTUP) != null);
}
}

View file

@ -2,9 +2,9 @@
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.File;
import java.sql.SQLException;
import java.util.Set;
@ -18,6 +18,7 @@ import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
@ -25,6 +26,7 @@ import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ModelMaker;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.store.DatabaseType;
@ -73,7 +75,6 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
public static String APPPATH = BASE+"app/";
//these files are loaded everytime the system starts up
public static String APPPATH_LOAD = APPPATH + "menuload/";
protected static String SUBMODELS = "/WEB-INF/submodels/";
//All files in this directory will be reloaded every startup
//and attached as sub-models to the displayOntModel.
@ -404,7 +405,7 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
}
private static VitroJenaModelMaker vjmm = null;
private static VitroJenaSDBModelMaker vsmm = null;
private static ModelMaker vsmm = null;
private static VitroModelSource vms = null;
private static final String sdbModelMaker = "vitroJenaSDBModelMaker";
private static final String rdbModelMaker = "vitroJenaModelMaker";
@ -428,7 +429,7 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
} else if (TripleStoreType.SDB.equals(type)) {
StoreDesc storeDesc = new StoreDesc(
LayoutType.LayoutTripleNodesHash, DatabaseType.fetch(dbtypeStr));
BasicDataSource bds = WebappDaoSDBSetup.makeBasicDataSource(
BasicDataSource bds = WebappDaoSetup.makeBasicDataSource(
getDbDriverClassName(ctx), jdbcUrl, username, password, ctx);
bds.setMaxActive(4); // for now, the SDB model makers should not use more
// than a small handful of connections
@ -485,7 +486,7 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
ctx.setAttribute(rdbModelMaker, vjmm);
}
public static void setVitroJenaSDBModelMaker(VitroJenaSDBModelMaker vsmm,
public static void setVitroJenaSDBModelMaker(ModelMaker vsmm,
ServletContext ctx){
ctx.setAttribute(sdbModelMaker, vsmm);
}
@ -510,7 +511,7 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
return vjmm;
}
protected VitroJenaSDBModelMaker getVitroJenaSDBModelMaker(){
protected ModelMaker getVitroJenaSDBModelMaker(){
return vsmm;
}
@ -533,6 +534,15 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon {
return ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.validationQuery", "SELECT 1");
}
public static void setStartupDataset(Dataset dataset, ServletContext ctx) {
ctx.setAttribute("startupDataset", dataset);
}
public static Dataset getStartupDataset(ServletContext ctx) {
Object o = ctx.getAttribute("startupDataset");
return (o instanceof Dataset) ? ((Dataset) o) : null;
}
protected OntModel ontModelFromContextAttribute(ServletContext ctx,
String attribute) {

View file

@ -1,239 +0,0 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.DISPLAY_ONT_MODEL;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.TripleStoreType;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
import edu.cornell.mannlib.vitro.webapp.utils.jena.InitialJenaModelUtils;
/**
* Setup the ABox, TBox, inference and Union models.
* Also setup the OntModelSelectors.
*/
public class ModelSetup extends JenaDataSourceSetupBase
implements javax.servlet.ServletContextListener {
private static final Log log = LogFactory.getLog(ModelSetup.class);
@Override
public void contextInitialized(ServletContextEvent sce) {
ServletContext ctx = sce.getServletContext();
StartupStatus ss = StartupStatus.getBean(ctx);
BasicDataSource bds = getApplicationDataSource(ctx);
if( bds == null ){
ss.fatal(this, "A DataSource must be setup before ModelSetup "+
"is run. Make sure that JenaPersistentDataSourceSetup runs before "+
"ModelSetup.");
return;
}
setupModels(ctx,ss,bds);
}
private void setupModels(ServletContext ctx, StartupStatus ss, BasicDataSource bds){
log.info("Setting up model makers and union models");
///////////////////////////////////////////////////////////////
//set up the OntModelSelectors
OntModelSelectorImpl baseOms = new OntModelSelectorImpl();
OntModelSelectorImpl inferenceOms = new OntModelSelectorImpl();
OntModelSelectorImpl unionOms = new OntModelSelectorImpl();
//Put OntModelSelectorImpl objs into the context
ModelContext.setOntModelSelector(unionOms, ctx);
ModelContext.setUnionOntModelSelector(unionOms, ctx);
// assertions and inferences
ModelContext.setBaseOntModelSelector(baseOms, ctx);
// assertions
ModelContext.setInferenceOntModelSelector(inferenceOms, ctx);
// inferences
//add userAccountsModel to OntModelSelectors
OntModel userAccountsModel = ontModelFromContextAttribute(
ctx, "userAccountsOntModel");
baseOms.setUserAccountsModel(userAccountsModel);
inferenceOms.setUserAccountsModel(userAccountsModel);
unionOms.setUserAccountsModel(userAccountsModel);
//add display to OntModelSelectors
OntModel displayModel = ontModelFromContextAttribute(
ctx,DISPLAY_ONT_MODEL);
baseOms.setDisplayModel(displayModel);
inferenceOms.setDisplayModel(displayModel);
unionOms.setDisplayModel(displayModel);
// The code below, which sets up the OntModelSelectors, controls whether
// each model is maintained in memory, in the DB, or both while the
// application is running.
// Populate the three OntModelSelectors (BaseOntModel = assertions,
// InferenceOntModel = inferences and JenaOntModel = union of assertions
// and inferences) with the post-SDB-conversion models.
// ABox assertions
Model aboxAssertions = makeDBModel(
bds, JenaDataSourceSetupBase.JENA_DB_MODEL, DB_ONT_MODEL_SPEC,
TripleStoreType.SDB, ctx);
Model listenableAboxAssertions = ModelFactory.createUnion(
aboxAssertions, ModelFactory.createDefaultModel());
baseOms.setABoxModel(
ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, listenableAboxAssertions));
// ABox inferences
Model aboxInferences = makeDBModel(
bds, JenaDataSourceSetupBase.JENA_INF_MODEL, DB_ONT_MODEL_SPEC,
TripleStoreType.SDB, ctx);
Model listenableAboxInferences = ModelFactory.createUnion(
aboxInferences, ModelFactory.createDefaultModel());
inferenceOms.setABoxModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, listenableAboxInferences));
// Since the TBox models are in memory, they do not have timeout issues
// like the like the ABox models do (and so don't need the extra step
// to make them listenable.)
// TBox assertions
try {
Model tboxAssertionsDB = makeDBModel(
bds, JENA_TBOX_ASSERTIONS_MODEL, DB_ONT_MODEL_SPEC,
TripleStoreType.SDB, ctx);
OntModel tboxAssertions = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxAssertionsDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox assertions into memory");
tboxAssertions.add(tboxAssertionsDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox assertions");
}
tboxAssertions.getBaseModel().register(new ModelSynchronizer(
tboxAssertionsDB));
baseOms.setTBoxModel(tboxAssertions);
} catch (Throwable e) {
log.error("Unable to load tbox assertion cache from DB", e);
}
// TBox inferences
try {
Model tboxInferencesDB = makeDBModel(
bds, JENA_TBOX_INF_MODEL, DB_ONT_MODEL_SPEC,
TripleStoreType.SDB, ctx);
OntModel tboxInferences = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxInferencesDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox inferences into memory");
tboxInferences.add(tboxInferencesDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox inferences");
}
tboxInferences.getBaseModel().register(new ModelSynchronizer(
tboxInferencesDB));
inferenceOms.setTBoxModel(tboxInferences);
} catch (Throwable e) {
log.error("Unable to load tbox inference cache from DB", e);
}
// union ABox
OntModel unionABoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getABoxModel(), inferenceOms.getABoxModel()));
unionOms.setABoxModel(unionABoxModel);
// union TBox
OntModel unionTBoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getTBoxModel(), inferenceOms.getTBoxModel()));
unionOms.setTBoxModel(unionTBoxModel);
// Application metadata model is cached in memory.
try {
Model applicationMetadataModelDB = makeDBModel(
bds, JENA_APPLICATION_METADATA_MODEL, DB_ONT_MODEL_SPEC,
TripleStoreType.SDB, ctx);
OntModel applicationMetadataModel =
ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached application metadata model into memory");
applicationMetadataModel.add(applicationMetadataModelDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load application metadata model " +
"assertions of size " + applicationMetadataModel.size());
applicationMetadataModel.getBaseModel().register(
new ModelSynchronizer(applicationMetadataModelDB));
if (isFirstStartup()) {
applicationMetadataModel.add(
InitialJenaModelUtils.loadInitialModel(
ctx, getDefaultNamespace(ctx)));
} else if (applicationMetadataModelDB.size() == 0) {
repairAppMetadataModel(
applicationMetadataModel, aboxAssertions,
aboxInferences);
}
baseOms.setApplicationMetadataModel(applicationMetadataModel);
inferenceOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
unionOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
} catch (Throwable e) {
log.error("Unable to load application metadata model cache from DB"
, e);
}
// create TBox + ABox union models
OntModel baseUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
ModelFactory.createUnion(baseOms.getABoxModel(),
baseOms.getTBoxModel()));
baseOms.setFullModel(baseUnion);
ModelContext.setBaseOntModel(baseOms.getFullModel(), ctx);
log.info("Model makers and union set up");
}
@Override
public void contextDestroyed(ServletContextEvent arg0) {
// nothing to do.
}
}

View file

@ -2,10 +2,7 @@
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;

View file

@ -0,0 +1,139 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import java.sql.SQLException;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.sdb.SDB;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.LayoutType;
import com.hp.hpl.jena.sdb.util.StoreUtils;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceFactorySingle;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sdb.RDFServiceSDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql.RDFServiceSparql;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
public class RDFServiceSetup extends JenaDataSourceSetupBase
implements javax.servlet.ServletContextListener {
private static final Log log = LogFactory.getLog(RDFServiceSetup.class);
@Override
public void contextDestroyed(ServletContextEvent arg0) {
// nothing to do
}
@Override
public void contextInitialized(ServletContextEvent sce) {
ServletContext ctx = sce.getServletContext();
StartupStatus ss = StartupStatus.getBean(ctx);
try {
String endpointURI = ConfigurationProperties.getBean(sce).getProperty(
"VitroConnection.DataSource.endpointURI");
if (endpointURI != null) {
useEndpoint(endpointURI, ctx);
} else {
useSDB(ctx, ss);
}
} catch (SQLException e) {
ss.fatal(this, "Exception in RDFServiceSetup", e);
}
}
private void useEndpoint(String endpointURI, ServletContext ctx) {
RDFService rdfService = new RDFServiceSparql(endpointURI);
RDFServiceFactory rdfServiceFactory = new RDFServiceFactorySingle(rdfService);
RDFServiceUtils.setRDFServiceFactory(ctx, rdfServiceFactory);
}
private void useSDB(ServletContext ctx, StartupStatus ss) throws SQLException {
BasicDataSource bds = getApplicationDataSource(ctx);
if( bds == null ){
ss.fatal(this, "A DataSource must be setup before SDBSetup "+
"is run. Make sure that JenaPersistentDataSourceSetup runs before "+
"SDBSetup.");
return;
}
// union default graph
SDB.getContext().set(SDB.unionDefaultGraph, true) ;
StoreDesc storeDesc = makeStoreDesc(ctx);
setApplicationStoreDesc(storeDesc, ctx);
Store store = connectStore(bds, storeDesc);
setApplicationStore(store, ctx);
if (!isSetUp(store)) {
JenaPersistentDataSourceSetup.thisIsFirstStartup();
setupSDB(ctx, store);
}
RDFService rdfService = new RDFServiceSDB(bds, storeDesc);
RDFServiceFactory rdfServiceFactory = new RDFServiceFactorySingle(rdfService);
RDFServiceUtils.setRDFServiceFactory(ctx, rdfServiceFactory);
log.info("SDB store ready for use");
}
/**
* Tests whether an SDB store has been formatted and populated for use.
* @param store
* @return
*/
private boolean isSetUp(Store store) throws SQLException {
if (!(StoreUtils.isFormatted(store))) {
return false;
}
// even if the store exists, it may be empty
try {
return (SDBFactory.connectNamedModel(
store,
JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL))
.size() > 0;
} catch (Exception e) {
return false;
}
}
public static StoreDesc makeStoreDesc(ServletContext ctx) {
String layoutStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.sdb.layout", "layout2/hash");
String dbtypeStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.dbtype", "MySQL");
return new StoreDesc(
LayoutType.fetch(layoutStr),
DatabaseType.fetch(dbtypeStr) );
}
public static Store connectStore(BasicDataSource bds, StoreDesc storeDesc)
throws SQLException {
SDBConnection conn = new SDBConnection(bds.getConnection());
return SDBFactory.connectStore(conn, storeDesc);
}
protected static void setupSDB(ServletContext ctx, Store store) {
log.info("Initializing SDB store");
store.getTableFormatter().create();
store.getTableFormatter().truncate();
}
}

View file

@ -1,231 +0,0 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import java.sql.SQLException;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.sdb.SDB;
import com.hp.hpl.jena.sdb.SDBFactory;
import com.hp.hpl.jena.sdb.Store;
import com.hp.hpl.jena.sdb.StoreDesc;
import com.hp.hpl.jena.sdb.sql.SDBConnection;
import com.hp.hpl.jena.sdb.store.DatabaseType;
import com.hp.hpl.jena.sdb.store.LayoutType;
import com.hp.hpl.jena.sdb.util.StoreUtils;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
public class SDBSetup extends JenaDataSourceSetupBase
implements javax.servlet.ServletContextListener {
private static final Log log = LogFactory.getLog(SDBSetup.class);
@Override
public void contextDestroyed(ServletContextEvent arg0) {
// nothing to do
}
@Override
public void contextInitialized(ServletContextEvent sce) {
ServletContext ctx = sce.getServletContext();
StartupStatus ss = StartupStatus.getBean(ctx);
try {
setupSDB(ctx, ss);
log.info("SDB store ready for use");
} catch (SQLException e) {
ss.fatal(this, "Exception in setupSDB", e);
}
}
private void setupSDB(ServletContext ctx, StartupStatus ss) throws SQLException {
BasicDataSource bds = getApplicationDataSource(ctx);
if( bds == null ){
ss.fatal(this, "A DataSource must be setup before SDBSetup "+
"is run. Make sure that JenaPersistentDataSourceSetup runs before "+
"SDBSetup.");
return;
}
// union default graph
SDB.getContext().set(SDB.unionDefaultGraph, true) ;
StoreDesc storeDesc = makeStoreDesc(ctx);
setApplicationStoreDesc(storeDesc, ctx);
Store store = connectStore(bds, storeDesc);
setApplicationStore(store, ctx);
if (!isSetUp(store)) {
JenaPersistentDataSourceSetup.thisIsFirstStartup();
setupSDB(ctx, store);
}
}
/**
* Tests whether an SDB store has been formatted and populated for use.
* @param store
* @return
*/
private boolean isSetUp(Store store) throws SQLException {
if (!(StoreUtils.isFormatted(store))) {
return false;
}
// even if the store exists, it may be empty
try {
return (SDBFactory.connectNamedModel(
store,
JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL))
.size() > 0;
} catch (Exception e) {
return false;
}
}
public static StoreDesc makeStoreDesc(ServletContext ctx) {
String layoutStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.sdb.layout", "layout2/hash");
String dbtypeStr = ConfigurationProperties.getBean(ctx).getProperty(
"VitroConnection.DataSource.dbtype", "MySQL");
return new StoreDesc(
LayoutType.fetch(layoutStr),
DatabaseType.fetch(dbtypeStr) );
}
public static Store connectStore(BasicDataSource bds, StoreDesc storeDesc)
throws SQLException {
SDBConnection conn = new SDBConnection(bds.getConnection());
return SDBFactory.connectStore(conn, storeDesc);
}
protected static void setupSDB(ServletContext ctx, Store store) {
setupSDB(ctx, store, ModelFactory.createDefaultModel(),
ModelFactory.createDefaultModel());
}
protected static void setupSDB(ServletContext ctx, Store store,
Model memModel, Model inferenceModel) {
log.info("Initializing SDB store");
store.getTableFormatter().create();
store.getTableFormatter().truncate();
store.getTableFormatter().dropIndexes(); // improve load performance
try {
// This is a one-time copy of stored KB data - from a Jena RDB store
// to a Jena SDB store. In the process, we will also separate out
// the TBox from the Abox; these are in the same graph in pre-1.2
// VIVO versions and will now be stored and maintained in separate
// models. Access to the Jena RDB data is through the
// OntModelSelectors that have been set up earlier in the current
// session by JenaPersistentDataSourceSetup.java. In the code
// below, note that the current getABoxModel() methods on the
// OntModelSelectors return a graph with both ABox and TBox data.
OntModel submodels = ModelFactory
.createOntologyModel(MEM_ONT_MODEL_SPEC);
readOntologyFilesInPathSet(SUBMODELS, ctx, submodels);
Model tboxAssertions = SDBFactory.connectNamedModel(store,
JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL);
// initially putting the results in memory so we have a
// cheaper way of computing the difference when we copy the ABox
Model memTboxAssertions = ModelFactory.createDefaultModel();
getTBoxModel(memModel, submodels, memTboxAssertions);
tboxAssertions.add(memTboxAssertions);
Model tboxInferences = SDBFactory.connectNamedModel(store,
JenaDataSourceSetupBase.JENA_TBOX_INF_MODEL);
// initially putting the results in memory so we have a
// cheaper way of computing the difference when we copy the ABox
Model memTboxInferences = ModelFactory.createDefaultModel();
getTBoxModel(inferenceModel, submodels, memTboxInferences);
tboxInferences.add(memTboxInferences);
Model aboxAssertions = SDBFactory.connectNamedModel(store,
JenaDataSourceSetupBase.JENA_DB_MODEL);
copyDifference(memModel, memTboxAssertions, aboxAssertions);
Model aboxInferences = SDBFactory.connectNamedModel(store,
JenaDataSourceSetupBase.JENA_INF_MODEL);
copyDifference(inferenceModel, memTboxInferences, aboxInferences);
// Set up the application metadata model
Model applicationMetadataModel = SDBFactory.connectNamedModel(
store,
JenaDataSourceSetupBase.JENA_APPLICATION_METADATA_MODEL);
getAppMetadata(memModel, applicationMetadataModel);
log.info("During initial SDB setup, created an application "
+ "metadata model of size "
+ applicationMetadataModel.size());
// remove application metadata from ABox model
aboxAssertions.remove(applicationMetadataModel);
aboxInferences.remove(applicationMetadataModel);
// Make sure the reasoner takes into account the newly-set-up data.
SimpleReasonerSetup.setRecomputeRequired(ctx);
} finally {
log.info("Adding indexes to SDB database tables.");
store.getTableFormatter().addIndexes();
log.info("Indexes created.");
}
}
/*
* Copy all statements from model 1 that are not in model 2 to model 3.
*/
private static void copyDifference(Model model1, Model model2, Model model3) {
StmtIterator iter = model1.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (!model2.contains(stmt)) {
model3.add(stmt);
}
}
return;
}
private static void getTBoxModel(Model fullModel, Model submodels,
Model tboxModel) {
JenaModelUtils modelUtils = new JenaModelUtils();
Model tempModel = ModelFactory.createUnion(fullModel, submodels);
Model tempTBoxModel = modelUtils.extractTBox(tempModel);
// copy intersection of tempTBoxModel and fullModel to tboxModel.
StmtIterator iter = tempTBoxModel.listStatements();
while (iter.hasNext()) {
Statement stmt = iter.next();
if (fullModel.contains(stmt)) {
tboxModel.add(stmt);
}
}
return;
}
}

View file

@ -16,7 +16,6 @@ import javax.servlet.ServletContextListener;
import org.apache.commons.dbcp.BasicDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.mindswap.pellet.PelletOptions;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.vocabulary.OWL;
@ -27,6 +26,8 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena;
import edu.cornell.mannlib.vitro.webapp.dao.jena.pellet.PelletListener;
import edu.cornell.mannlib.vitro.webapp.dao.jena.pellet.ReasonerConfiguration;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.reasoner.ReasonerPlugin;
import edu.cornell.mannlib.vitro.webapp.reasoner.SimpleReasoner;
import edu.cornell.mannlib.vitro.webapp.reasoner.SimpleReasonerTBoxListener;
@ -45,6 +46,7 @@ public class SimpleReasonerSetup implements ServletContextListener {
@Override
public void contextInitialized(ServletContextEvent sce) {
try {
// set up Pellet reasoning for the TBox
@ -101,7 +103,9 @@ public class SimpleReasonerSetup implements ServletContextListener {
// the simple reasoner will register itself as a listener to the ABox assertions
SimpleReasoner simpleReasoner = new SimpleReasoner(unionOms.getTBoxModel(), assertionsOms.getABoxModel(), inferencesOms.getABoxModel(), rebuildModel, scratchModel);
RDFService rdfService = RDFServiceUtils.getRDFServiceFactory(ctx).getRDFService();
SimpleReasoner simpleReasoner = new SimpleReasoner(
unionOms.getTBoxModel(), rdfService, inferencesOms.getABoxModel(), rebuildModel, scratchModel);
sce.getServletContext().setAttribute(SimpleReasoner.class.getName(),simpleReasoner);
StartupStatus ss = StartupStatus.getBean(ctx);

View file

@ -20,16 +20,8 @@ import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.ontology.update.KnowledgeBaseUpdater;

View file

@ -2,6 +2,8 @@
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.DISPLAY_ONT_MODEL;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
@ -15,8 +17,10 @@ import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ModelMaker;
import com.hp.hpl.jena.rdf.model.ResIterator;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
@ -27,25 +31,33 @@ import com.hp.hpl.jena.util.ResourceUtils;
import com.hp.hpl.jena.util.iterator.ClosableIterator;
import com.hp.hpl.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSDBModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.TripleStoreType;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
import edu.cornell.mannlib.vitro.webapp.utils.jena.InitialJenaModelUtils;
/**
* Primarily sets up webapp DAO factories.
*/
public class WebappDaoSDBSetup extends JenaDataSourceSetupBase
public class WebappDaoSetup extends JenaDataSourceSetupBase
implements javax.servlet.ServletContextListener {
private static final Log log = LogFactory.getLog(WebappDaoSDBSetup.class);
private static final Log log = LogFactory.getLog(WebappDaoSetup.class);
@Override
public void contextInitialized(ServletContextEvent sce) {
@ -56,11 +68,7 @@ public class WebappDaoSDBSetup extends JenaDataSourceSetupBase
long startTime = System.currentTimeMillis();
setUpJenaDataSource(ctx, ss);
log.info((System.currentTimeMillis() - startTime) / 1000 +
" seconds to set up SDB store");
} catch (SQLException sqle) {
// SQL exceptions are fatal and should halt startup
log.error("Error using SQL database; startup aborted.", sqle);
ss.fatal(this, "Error using SQL database; startup aborted.", sqle);
" seconds to set up models and DAO factories");
} catch (Throwable t) {
log.error("Throwable in " + this.getClass().getName(), t);
ss.fatal(this, "Throwable in " + this.getClass().getName(), t);
@ -68,53 +76,158 @@ public class WebappDaoSDBSetup extends JenaDataSourceSetupBase
}
private void setUpJenaDataSource(ServletContext ctx, StartupStatus ss) throws SQLException {
private void setUpJenaDataSource(ServletContext ctx, StartupStatus ss) {
OntModelSelectorImpl baseOms = new OntModelSelectorImpl();
OntModelSelectorImpl inferenceOms = new OntModelSelectorImpl();
OntModelSelectorImpl unionOms = new OntModelSelectorImpl();
BasicDataSource bds = getApplicationDataSource(ctx);
if( bds == null ){
ss.fatal(this, "A DataSource must be setup before "+ WebappDaoSDBSetup.class.getName() +
"is run. Make sure that JenaPersistentDataSourceSetup runs before "+
WebappDaoSDBSetup.class.getName() );
return;
OntModel userAccountsModel = ontModelFromContextAttribute(
ctx, "userAccountsOntModel");
baseOms.setUserAccountsModel(userAccountsModel);
inferenceOms.setUserAccountsModel(userAccountsModel);
unionOms.setUserAccountsModel(userAccountsModel);
OntModel displayModel = ontModelFromContextAttribute(
ctx,DISPLAY_ONT_MODEL);
baseOms.setDisplayModel(displayModel);
inferenceOms.setDisplayModel(displayModel);
unionOms.setDisplayModel(displayModel);
RDFServiceFactory rdfServiceFactory = RDFServiceUtils.getRDFServiceFactory(ctx);
RDFService rdfService = rdfServiceFactory.getRDFService();
Dataset dataset = new RDFServiceDataset(rdfService);
setStartupDataset(dataset, ctx);
// ABox assertions
Model aboxAssertions = dataset.getNamedModel(
JenaDataSourceSetupBase.JENA_DB_MODEL);
baseOms.setABoxModel(
ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, aboxAssertions));
// ABox inferences
Model aboxInferences = dataset.getNamedModel(
JenaDataSourceSetupBase.JENA_INF_MODEL);
inferenceOms.setABoxModel(ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, aboxInferences));
// TBox assertions
try {
Model tboxAssertionsDB = dataset.getNamedModel(
JENA_TBOX_ASSERTIONS_MODEL);
OntModel tboxAssertions = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxAssertionsDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox assertions into memory");
tboxAssertions.add(tboxAssertionsDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox assertions");
}
tboxAssertions.getBaseModel().register(new ModelSynchronizer(
tboxAssertionsDB));
baseOms.setTBoxModel(tboxAssertions);
} catch (Throwable e) {
log.error("Unable to load tbox assertion cache from DB", e);
}
//Get the OntModelSelectors
OntModelSelectorImpl baseOms =
(OntModelSelectorImpl) ModelContext.getBaseOntModelSelector(ctx);
OntModelSelectorImpl inferenceOms =
(OntModelSelectorImpl) ModelContext.getInferenceOntModelSelector(ctx);
OntModelSelectorImpl unionOms =
(OntModelSelectorImpl) ModelContext.getUnionOntModelSelector(ctx);
// TBox inferences
try {
Model tboxInferencesDB = dataset.getNamedModel(JENA_TBOX_INF_MODEL);
OntModel tboxInferences = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC);
if (tboxInferencesDB != null) {
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached tbox inferences into memory");
tboxInferences.add(tboxInferencesDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load tbox inferences");
}
tboxInferences.getBaseModel().register(new ModelSynchronizer(
tboxInferencesDB));
inferenceOms.setTBoxModel(tboxInferences);
} catch (Throwable e) {
log.error("Unable to load tbox inference cache from DB", e);
}
// union ABox
OntModel unionABoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getABoxModel(), inferenceOms.getABoxModel()));
unionOms.setABoxModel(unionABoxModel);
///////////////////////////////////////////////////////////////
// Check for namespace mismatch
// union TBox
OntModel unionTBoxModel = ModelFactory.createOntologyModel(
MEM_ONT_MODEL_SPEC,ModelFactory.createUnion(
baseOms.getTBoxModel(), inferenceOms.getTBoxModel()));
unionOms.setTBoxModel(unionTBoxModel);
// Application metadata model is cached in memory.
try {
Model applicationMetadataModelDB = dataset.getNamedModel(
JENA_APPLICATION_METADATA_MODEL);
OntModel applicationMetadataModel =
ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC);
long startTime = System.currentTimeMillis();
System.out.println(
"Copying cached application metadata model into memory");
applicationMetadataModel.add(applicationMetadataModelDB);
System.out.println((System.currentTimeMillis() - startTime)
/ 1000 + " seconds to load application metadata model " +
"assertions of size " + applicationMetadataModel.size());
applicationMetadataModel.getBaseModel().register(
new ModelSynchronizer(applicationMetadataModelDB));
if (applicationMetadataModel.size()== 0 /* isFirstStartup() */) {
applicationMetadataModel.add(
InitialJenaModelUtils.loadInitialModel(
ctx, getDefaultNamespace(ctx)));
}
baseOms.setApplicationMetadataModel(applicationMetadataModel);
inferenceOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
unionOms.setApplicationMetadataModel(
baseOms.getApplicationMetadataModel());
} catch (Throwable e) {
log.error("Unable to load application metadata model cache from DB"
, e);
}
checkForNamespaceMismatch( baseOms.getApplicationMetadataModel(), ctx );
ctx.setAttribute("defaultNamespace", getDefaultNamespace(ctx));
///////////////////////////////////////////////////////////////
// first startup?
if (isFirstStartup()) {
loadDataFromFilesystem(baseOms, ctx);
}
log.info("Setting up DAO factories");
///////////////////////////////////////////////////////////////
//create assertions webapp DAO factory
log.info("Setting up union models and DAO factories");
StoreDesc storeDesc = getApplicationStoreDesc(ctx);
// create TBox + ABox union models and set up webapp DAO factories
OntModel baseUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
ModelFactory.createUnion(baseOms.getABoxModel(),
baseOms.getTBoxModel()));
baseOms.setFullModel(baseUnion);
ModelContext.setBaseOntModel(baseOms.getFullModel(), ctx);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(getDefaultNamespace(ctx));
WebappDaoFactory baseWadf = new WebappDaoFactorySDB(
baseOms, bds, storeDesc, config,
rdfService, baseOms, config,
WebappDaoFactorySDB.SDBDatasetMode.ASSERTIONS_ONLY);
ctx.setAttribute("assertionsWebappDaoFactory",baseWadf);
///////////////////////////////////////////////////////////////
//create inference webapp DAO factory
OntModel inferenceUnion = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM,
ModelFactory.createUnion(
@ -123,35 +236,40 @@ public class WebappDaoSDBSetup extends JenaDataSourceSetupBase
inferenceOms.setFullModel(inferenceUnion);
ModelContext.setInferenceOntModel(inferenceOms.getFullModel(), ctx);
WebappDaoFactory infWadf = new WebappDaoFactorySDB(
inferenceOms, bds, storeDesc, config,
rdfService, inferenceOms, config,
WebappDaoFactorySDB.SDBDatasetMode.INFERENCES_ONLY);
ctx.setAttribute("deductionsWebappDaoFactory", infWadf);
///////////////////////////////////////////////////////////////
//create default union webapp DAO factory
OntModel masterUnion = ModelFactory.createOntologyModel(
DB_ONT_MODEL_SPEC, makeDBModel(
bds, WebappDaoFactorySDB.UNION_GRAPH,
DB_ONT_MODEL_SPEC, TripleStoreType.SDB, ctx));
DB_ONT_MODEL_SPEC, dataset.getDefaultModel());
unionOms.setFullModel(masterUnion);
ctx.setAttribute("jenaOntModel", masterUnion);
WebappDaoFactory wadf = new WebappDaoFactorySDB(
unionOms, bds, storeDesc, config);
ctx.setAttribute("webappDaoFactory",wadf);
rdfService, unionOms, config);
ctx.setAttribute("webappDaoFactory",wadf);
ModelContext.setOntModelSelector(unionOms, ctx);
ModelContext.setUnionOntModelSelector(unionOms, ctx);
// assertions and inferences
ModelContext.setBaseOntModelSelector(baseOms, ctx);
// assertions
ModelContext.setInferenceOntModelSelector(inferenceOms, ctx);
// inferences
ctx.setAttribute("defaultNamespace", getDefaultNamespace(ctx));
makeModelMakerFromConnectionProperties(TripleStoreType.RDB, ctx);
VitroJenaModelMaker vjmm = getVitroJenaModelMaker();
setVitroJenaModelMaker(vjmm, ctx);
makeModelMakerFromConnectionProperties(TripleStoreType.SDB, ctx);
VitroJenaSDBModelMaker vsmm = getVitroJenaSDBModelMaker();
RDFServiceModelMaker vsmm = new RDFServiceModelMaker(rdfServiceFactory);
setVitroJenaSDBModelMaker(vsmm, ctx);
//bdc34: I have no reason for vsmm vs vjmm.
//I don't know what are the implications of this choice.
setVitroModelSource( new VitroModelSource(vsmm,ctx), ctx);
log.info("DAOs set up");
log.info("Model makers set up");
}

View file

@ -100,7 +100,7 @@ public class ClassGroupPageData extends DataGetterBase implements DataGetter{
}
}
}else{
log.error("classgroup " + classGroupUri + " does not exist in the system");
throw new RuntimeException("classgroup " + classGroupUri + " does not exist in the system");
}
}

View file

@ -207,7 +207,7 @@ public class GroupedPropertyList extends BaseTemplateModel {
ObjectPropertyDao opDao = wdf.getObjectPropertyDao();
ObjectProperty op = opDao.getObjectPropertyByURI(propertyUri);
if (op == null) {
log.error("ObjectProperty op returned null from opDao.getObjectPropertyByURI()");
log.error("ObjectProperty op returned null from opDao.getObjectPropertyByURI(" + propertyUri + ")");
} else if (op.getURI() == null) {
log.error("ObjectProperty op returned with null propertyURI from opDao.getObjectPropertyByURI()");
} else {

View file

@ -111,7 +111,8 @@ public abstract class PropertyTemplateModel extends BaseTemplateModel {
}
public String getAddUrl() {
return addUrl;
//log.info("addUrl=" + addUrl);
return (addUrl != null) ? addUrl : "";
}
public Map<String, Object> getVerboseDisplay() {