Merge branch 'maint-rel-1.6' into develop
This commit is contained in:
commit
c1995bec3f
11 changed files with 233 additions and 125 deletions
|
@ -64,7 +64,9 @@ public class ListDatatypePropertiesController extends FreemarkerHttpServlet {
|
||||||
List<DataProperty> props = new ArrayList<DataProperty>();
|
List<DataProperty> props = new ArrayList<DataProperty>();
|
||||||
if (vreq.getParameter("propsForClass") != null) {
|
if (vreq.getParameter("propsForClass") != null) {
|
||||||
noResultsMsgStr = "There are no data properties that apply to this class.";
|
noResultsMsgStr = "There are no data properties that apply to this class.";
|
||||||
Collection <DataProperty> dataProps = dao.getDataPropertiesForVClass(vreq.getParameter("vclassUri"));
|
Collection <DataProperty> dataProps = vreq.getLanguageNeutralWebappDaoFactory()
|
||||||
|
.getDataPropertyDao().getDataPropertiesForVClass(
|
||||||
|
vreq.getParameter("vclassUri"));
|
||||||
Iterator<DataProperty> dataPropIt = dataProps.iterator();
|
Iterator<DataProperty> dataPropIt = dataProps.iterator();
|
||||||
HashSet<String> propURIs = new HashSet<String>();
|
HashSet<String> propURIs = new HashSet<String>();
|
||||||
while (dataPropIt.hasNext()) {
|
while (dataPropIt.hasNext()) {
|
||||||
|
|
|
@ -59,7 +59,7 @@ public class ListPropertyWebappsController extends FreemarkerHttpServlet {
|
||||||
String ontologyUri = vreq.getParameter("ontologyUri");
|
String ontologyUri = vreq.getParameter("ontologyUri");
|
||||||
|
|
||||||
ObjectPropertyDao dao = vreq.getUnfilteredWebappDaoFactory().getObjectPropertyDao();
|
ObjectPropertyDao dao = vreq.getUnfilteredWebappDaoFactory().getObjectPropertyDao();
|
||||||
PropertyInstanceDao piDao = vreq.getUnfilteredWebappDaoFactory().getPropertyInstanceDao();
|
PropertyInstanceDao piDao = vreq.getLanguageNeutralWebappDaoFactory().getPropertyInstanceDao();
|
||||||
VClassDao vcDao = vreq.getUnfilteredWebappDaoFactory().getVClassDao();
|
VClassDao vcDao = vreq.getUnfilteredWebappDaoFactory().getVClassDao();
|
||||||
PropertyGroupDao pgDao = vreq.getUnfilteredWebappDaoFactory().getPropertyGroupDao();
|
PropertyGroupDao pgDao = vreq.getUnfilteredWebappDaoFactory().getPropertyGroupDao();
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ public class ListVClassWebappsController extends FreemarkerHttpServlet {
|
||||||
List<VClass> classes = null;
|
List<VClass> classes = null;
|
||||||
|
|
||||||
if (vreq.getParameter("showPropertyRestrictions") != null) {
|
if (vreq.getParameter("showPropertyRestrictions") != null) {
|
||||||
PropertyDao pdao = vreq.getUnfilteredWebappDaoFactory().getObjectPropertyDao();
|
PropertyDao pdao = vreq.getLanguageNeutralWebappDaoFactory().getObjectPropertyDao();
|
||||||
classes = pdao.getClassesWithRestrictionOnProperty(vreq.getParameter("propertyURI"));
|
classes = pdao.getClassesWithRestrictionOnProperty(vreq.getParameter("propertyURI"));
|
||||||
} else {
|
} else {
|
||||||
VClassDao vcdao = vreq.getUnfilteredWebappDaoFactory().getVClassDao();
|
VClassDao vcdao = vreq.getUnfilteredWebappDaoFactory().getVClassDao();
|
||||||
|
|
|
@ -44,6 +44,7 @@ import edu.cornell.mannlib.vitro.webapp.beans.ObjectProperty;
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.Property;
|
import edu.cornell.mannlib.vitro.webapp.beans.Property;
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.PropertyInstance;
|
import edu.cornell.mannlib.vitro.webapp.beans.PropertyInstance;
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
|
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyDao;
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.PropertyDao;
|
import edu.cornell.mannlib.vitro.webapp.dao.PropertyDao;
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
|
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
|
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
|
||||||
|
@ -761,8 +762,7 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
||||||
? foundRanges[1]
|
? foundRanges[1]
|
||||||
: op.getRange();
|
: op.getRange();
|
||||||
Resource domainRes = op.getDomain();
|
Resource domainRes = op.getDomain();
|
||||||
propInsts.add(getPropInst(
|
propInsts.add(getPropInst(op, domainRes, rangeRes));
|
||||||
op, domainRes, rangeRes, applicableProperties));
|
|
||||||
List<Pair<String,String>> additionalFauxSubpropertyDomainAndRangeURIs =
|
List<Pair<String,String>> additionalFauxSubpropertyDomainAndRangeURIs =
|
||||||
getAdditionalFauxSubpropertyDomainAndRangeURIsForPropertyURI(
|
getAdditionalFauxSubpropertyDomainAndRangeURIsForPropertyURI(
|
||||||
propertyURI);
|
propertyURI);
|
||||||
|
@ -787,8 +787,8 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
||||||
propInsts.add(getPropInst(
|
propInsts.add(getPropInst(
|
||||||
op,
|
op,
|
||||||
ResourceFactory.createResource(domainAndRangeURIs.getLeft()),
|
ResourceFactory.createResource(domainAndRangeURIs.getLeft()),
|
||||||
ResourceFactory.createResource(domainAndRangeURIs.getRight()),
|
ResourceFactory.createResource(domainAndRangeURIs.getRight())
|
||||||
applicableProperties));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -798,12 +798,31 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
||||||
ontModel.leaveCriticalSection();
|
ontModel.leaveCriticalSection();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// add any faux properties with applicable domain where the predicate URI
|
||||||
|
// is not already on the list
|
||||||
|
List<ObjectProperty> stragglers = getAdditionalFauxSubpropertiesForVClasses(
|
||||||
|
vclasses, propInsts);
|
||||||
|
for (ObjectProperty op : stragglers) {
|
||||||
|
propInsts.add(makePropInst(op));
|
||||||
|
}
|
||||||
|
|
||||||
return propInsts;
|
return propInsts;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private PropertyInstance getPropInst(OntProperty op, Resource domainRes, Resource rangeRes,
|
private PropertyInstance makePropInst(ObjectProperty op) {
|
||||||
Map<String, Resource[]> applicableProperties) {
|
PropertyInstance pi = new PropertyInstance();
|
||||||
|
pi.setDomainClassURI(op.getDomainVClassURI());
|
||||||
|
pi.setRangeClassURI(op.getRangeVClassURI());
|
||||||
|
pi.setSubjectSide(true);
|
||||||
|
pi.setPropertyURI(op.getURI());
|
||||||
|
pi.setPropertyName(op.getLabel());
|
||||||
|
pi.setDomainPublic(op.getDomainPublic());
|
||||||
|
return pi;
|
||||||
|
}
|
||||||
|
|
||||||
|
private PropertyInstance getPropInst(OntProperty op, Resource domainRes,
|
||||||
|
Resource rangeRes) {
|
||||||
if (log.isDebugEnabled() && domainRes != null && rangeRes != null) {
|
if (log.isDebugEnabled() && domainRes != null && rangeRes != null) {
|
||||||
log.debug("getPropInst() op: " + op.getURI() + " domain: " +
|
log.debug("getPropInst() op: " + op.getURI() + " domain: " +
|
||||||
domainRes.getURI() + " range: " + rangeRes.getURI());
|
domainRes.getURI() + " range: " + rangeRes.getURI());
|
||||||
|
@ -851,6 +870,58 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
||||||
return pi;
|
return pi;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private List<ObjectProperty> getAdditionalFauxSubpropertiesForVClasses(
|
||||||
|
List<VClass> vclasses, List<PropertyInstance> propInsts) {
|
||||||
|
|
||||||
|
List<ObjectProperty> opList = new ArrayList<ObjectProperty>();
|
||||||
|
if (vclasses.size() == 0) {
|
||||||
|
return opList;
|
||||||
|
}
|
||||||
|
ObjectPropertyDao opDao = getWebappDaoFactory().getObjectPropertyDao();
|
||||||
|
String propQuery = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n" +
|
||||||
|
"PREFIX config: <http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationConfiguration#> \n" +
|
||||||
|
"PREFIX vitro: <http://vitro.mannlib.cornell.edu/ns/vitro/0.7#> \n" +
|
||||||
|
"SELECT ?property ?domain ?range WHERE { \n" +
|
||||||
|
" ?context config:configContextFor ?property . \n" +
|
||||||
|
" ?context config:qualifiedByDomain ?domain . \n" +
|
||||||
|
" ?context config:qualifiedBy ?range . \n";
|
||||||
|
for(PropertyInstance propInst : propInsts) {
|
||||||
|
propQuery += " FILTER (?property != <" + propInst.getPropertyURI() + "> ) \n";
|
||||||
|
}
|
||||||
|
Iterator<VClass> classIt = vclasses.iterator();
|
||||||
|
if(classIt.hasNext()) {
|
||||||
|
propQuery += " FILTER ( \n";
|
||||||
|
propQuery += " (?domain = <" + OWL.Thing.getURI() + "> )\n";
|
||||||
|
while (classIt.hasNext()) {
|
||||||
|
VClass vclass = classIt.next();
|
||||||
|
if(vclass.isAnonymous()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
propQuery += " || (?domain = <" + vclass.getURI() + "> ) \n";
|
||||||
|
}
|
||||||
|
propQuery += ") \n";
|
||||||
|
}
|
||||||
|
propQuery += "} \n";
|
||||||
|
log.debug(propQuery);
|
||||||
|
Query q = QueryFactory.create(propQuery);
|
||||||
|
QueryExecution qe = QueryExecutionFactory.create(
|
||||||
|
q, getOntModelSelector().getDisplayModel());
|
||||||
|
try {
|
||||||
|
ResultSet rs = qe.execSelect();
|
||||||
|
while (rs.hasNext()) {
|
||||||
|
QuerySolution qsoln = rs.nextSolution();
|
||||||
|
String propertyURI = qsoln.getResource("property").getURI();
|
||||||
|
String domainURI = qsoln.getResource("domain").getURI();
|
||||||
|
String rangeURI = qsoln.getResource("range").getURI();
|
||||||
|
opList.add(opDao.getObjectPropertyByURIs(
|
||||||
|
propertyURI, domainURI, rangeURI, null));
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
qe.close();
|
||||||
|
}
|
||||||
|
return opList;
|
||||||
|
}
|
||||||
|
|
||||||
private List<Pair<String,String>> getAdditionalFauxSubpropertyDomainAndRangeURIsForPropertyURI(String propertyURI) {
|
private List<Pair<String,String>> getAdditionalFauxSubpropertyDomainAndRangeURIsForPropertyURI(String propertyURI) {
|
||||||
List<Pair<String,String>> domainAndRangeURIs = new ArrayList<Pair<String,String>>();
|
List<Pair<String,String>> domainAndRangeURIs = new ArrayList<Pair<String,String>>();
|
||||||
String propQuery = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n" +
|
String propQuery = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n" +
|
||||||
|
|
|
@ -600,7 +600,6 @@ public class ABoxUpdater {
|
||||||
Iterator<String> graphIt = dataset.listNames();
|
Iterator<String> graphIt = dataset.listNames();
|
||||||
while(graphIt.hasNext()) {
|
while(graphIt.hasNext()) {
|
||||||
String graph = graphIt.next();
|
String graph = graphIt.next();
|
||||||
//log.info(System.currentTimeMillis() - start + " to get graph");
|
|
||||||
if(!KnowledgeBaseUpdater.isUpdatableABoxGraph(graph)){
|
if(!KnowledgeBaseUpdater.isUpdatableABoxGraph(graph)){
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -608,7 +607,7 @@ public class ABoxUpdater {
|
||||||
|
|
||||||
Model renamePropAddModel = ModelFactory.createDefaultModel();
|
Model renamePropAddModel = ModelFactory.createDefaultModel();
|
||||||
Model renamePropRetractModel = ModelFactory.createDefaultModel();
|
Model renamePropRetractModel = ModelFactory.createDefaultModel();
|
||||||
log.info("renaming " + oldProperty.getURI() + " in graph " + graph);
|
log.debug("renaming " + oldProperty.getURI() + " in graph " + graph);
|
||||||
aboxModel.enterCriticalSection(Lock.WRITE);
|
aboxModel.enterCriticalSection(Lock.WRITE);
|
||||||
try {
|
try {
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
|
@ -623,7 +622,7 @@ public class ABoxUpdater {
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
renamePropRetractModel.add( aboxModel.listStatements(
|
renamePropRetractModel.add( aboxModel.listStatements(
|
||||||
(Resource) null, oldProperty, (RDFNode) null));
|
(Resource) null, oldProperty, (RDFNode) null));
|
||||||
log.info(System.currentTimeMillis() - start + " to list " + renamePropRetractModel.size() + " old statements");
|
log.debug(System.currentTimeMillis() - start + " to list " + renamePropRetractModel.size() + " old statements");
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
StmtIterator stmItr = renamePropRetractModel.listStatements();
|
StmtIterator stmItr = renamePropRetractModel.listStatements();
|
||||||
while(stmItr.hasNext()) {
|
while(stmItr.hasNext()) {
|
||||||
|
@ -632,13 +631,13 @@ public class ABoxUpdater {
|
||||||
newProperty,
|
newProperty,
|
||||||
tempStatement.getObject() );
|
tempStatement.getObject() );
|
||||||
}
|
}
|
||||||
log.info(System.currentTimeMillis() - start + " to make new statements");
|
log.debug(System.currentTimeMillis() - start + " to make new statements");
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
aboxModel.remove(renamePropRetractModel);
|
aboxModel.remove(renamePropRetractModel);
|
||||||
log.info(System.currentTimeMillis() - start + " to retract old statements");
|
log.debug(System.currentTimeMillis() - start + " to retract old statements");
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
aboxModel.add(renamePropAddModel);
|
aboxModel.add(renamePropAddModel);
|
||||||
log.info(System.currentTimeMillis() - start + " to add new statements");
|
log.debug(System.currentTimeMillis() - start + " to add new statements");
|
||||||
} finally {
|
} finally {
|
||||||
aboxModel.leaveCriticalSection();
|
aboxModel.leaveCriticalSection();
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,10 +96,10 @@ public class KnowledgeBaseUpdater {
|
||||||
|
|
||||||
// update ABox data any time
|
// update ABox data any time
|
||||||
log.info("performing SPARQL CONSTRUCT additions");
|
log.info("performing SPARQL CONSTRUCT additions");
|
||||||
//performSparqlConstructs(settings.getSparqlConstructAdditionsDir(), settings.getRDFService(), ADD);
|
performSparqlConstructs(settings.getSparqlConstructAdditionsDir(), settings.getRDFService(), ADD);
|
||||||
|
|
||||||
log.info("performing SPARQL CONSTRUCT retractions");
|
log.info("performing SPARQL CONSTRUCT retractions");
|
||||||
//performSparqlConstructs(settings.getSparqlConstructDeletionsDir(), settings.getRDFService(), RETRACT);
|
performSparqlConstructs(settings.getSparqlConstructDeletionsDir(), settings.getRDFService(), RETRACT);
|
||||||
|
|
||||||
log.info("\tupdating the abox");
|
log.info("\tupdating the abox");
|
||||||
updateABox(changes);
|
updateABox(changes);
|
||||||
|
@ -207,11 +207,9 @@ public class KnowledgeBaseUpdater {
|
||||||
StmtIterator sit = anonModel.listStatements();
|
StmtIterator sit = anonModel.listStatements();
|
||||||
while (sit.hasNext()) {
|
while (sit.hasNext()) {
|
||||||
Statement stmt = sit.nextStatement();
|
Statement stmt = sit.nextStatement();
|
||||||
long start = System.currentTimeMillis();
|
|
||||||
Iterator<String> graphIt = dataset.listNames();
|
Iterator<String> graphIt = dataset.listNames();
|
||||||
while(graphIt.hasNext()) {
|
while(graphIt.hasNext()) {
|
||||||
String graph = graphIt.next();
|
String graph = graphIt.next();
|
||||||
log.info(System.currentTimeMillis() - start + " to get graph");
|
|
||||||
if(!isUpdatableABoxGraph(graph)) {
|
if(!isUpdatableABoxGraph(graph)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,8 +12,10 @@ import edu.cornell.mannlib.vitro.webapp.beans.Individual;
|
||||||
import edu.cornell.mannlib.vitro.webapp.search.IndexingException;
|
import edu.cornell.mannlib.vitro.webapp.search.IndexingException;
|
||||||
import edu.cornell.mannlib.vitro.webapp.search.beans.IndexerIface;
|
import edu.cornell.mannlib.vitro.webapp.search.beans.IndexerIface;
|
||||||
import edu.cornell.mannlib.vitro.webapp.search.solr.documentBuilding.IndividualToSolrDocument;
|
import edu.cornell.mannlib.vitro.webapp.search.solr.documentBuilding.IndividualToSolrDocument;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.utils.threads.VitroBackgroundThread;
|
||||||
|
|
||||||
class IndexWorkerThread extends Thread{
|
class IndexWorkerThread extends VitroBackgroundThread{
|
||||||
|
private static final Log log = LogFactory.getLog(IndexWorkerThread.class);
|
||||||
|
|
||||||
protected final int threadNum;
|
protected final int threadNum;
|
||||||
protected IndividualToSolrDocument individualToSolrDoc;
|
protected IndividualToSolrDocument individualToSolrDoc;
|
||||||
|
@ -21,7 +23,6 @@ class IndexWorkerThread extends Thread{
|
||||||
protected final Iterator<Individual> individualsToIndex;
|
protected final Iterator<Individual> individualsToIndex;
|
||||||
protected boolean stopRequested = false;
|
protected boolean stopRequested = false;
|
||||||
|
|
||||||
private Log log = LogFactory.getLog(IndexWorkerThread.class);
|
|
||||||
private static AtomicLong countCompleted= new AtomicLong();
|
private static AtomicLong countCompleted= new AtomicLong();
|
||||||
private static AtomicLong countToIndex= new AtomicLong();
|
private static AtomicLong countToIndex= new AtomicLong();
|
||||||
private static long starttime = 0;
|
private static long starttime = 0;
|
||||||
|
@ -38,6 +39,7 @@ class IndexWorkerThread extends Thread{
|
||||||
}
|
}
|
||||||
|
|
||||||
public void run(){
|
public void run(){
|
||||||
|
setWorkLevel(WorkLevel.WORKING, "indexing " + individualsToIndex + " individuals");
|
||||||
|
|
||||||
while( ! stopRequested ){
|
while( ! stopRequested ){
|
||||||
|
|
||||||
|
@ -48,6 +50,8 @@ class IndexWorkerThread extends Thread{
|
||||||
// done so shut this thread down.
|
// done so shut this thread down.
|
||||||
stopRequested = true;
|
stopRequested = true;
|
||||||
}
|
}
|
||||||
|
setWorkLevel(WorkLevel.IDLE);
|
||||||
|
|
||||||
log.debug("Worker number " + threadNum + " exiting.");
|
log.debug("Worker number " + threadNum + " exiting.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,8 +86,8 @@ class IndexWorkerThread extends Thread{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}catch(Throwable th){
|
}catch(Throwable th){
|
||||||
//on tomcat shutdown odd exceptions get thrown and log can be null
|
//on tomcat shutdown odd exceptions get thrown
|
||||||
if( log != null && ! stopRequested )
|
if( ! stopRequested )
|
||||||
log.error("Exception during index building",th);
|
log.error("Exception during index building",th);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -479,10 +479,10 @@ public class FakeApplicationOntologyService {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Object> getData(Map<String, Object> pageData) {
|
public Map<String, Object> getData(Map<String, Object> pageData) {
|
||||||
Map<String, String[]> parms = new HashMap<String, String[]>();
|
Map<String, Object> parms = new HashMap<>();
|
||||||
parms.put("uri", new String[] { individualUri });
|
parms.put("uri", individualUri);
|
||||||
|
|
||||||
return doQuery(parms, getModel(ctx, vreq, null));
|
return super.getData(parms);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -146,7 +146,7 @@ public class FileGraphSetup implements ServletContextListener {
|
||||||
* base model.
|
* base model.
|
||||||
*/
|
*/
|
||||||
public boolean readGraphs(Set<Path> pathSet, RDFServiceModelMaker dataset, String type, OntModel baseModel) {
|
public boolean readGraphs(Set<Path> pathSet, RDFServiceModelMaker dataset, String type, OntModel baseModel) {
|
||||||
return readGraphs(pathSet, dataset, type, baseModel, false);
|
return readGraphs(pathSet, dataset, type, baseModel, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -29,8 +29,18 @@ import com.hp.hpl.jena.shared.Lock;
|
||||||
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary;
|
import edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.dao.jena.QueryUtils;
|
||||||
|
|
||||||
public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
|
private final static Log log = LogFactory.getLog(SparqlQueryDataGetter.class);
|
||||||
|
|
||||||
|
private static final String queryPropertyURI = "<" + DisplayVocabulary.QUERY + ">";
|
||||||
|
private static final String saveToVarPropertyURI= "<" + DisplayVocabulary.SAVE_TO_VAR+ ">";
|
||||||
|
private static final String queryModelPropertyURI= "<" + DisplayVocabulary.QUERY_MODEL+ ">";
|
||||||
|
|
||||||
|
public static final String defaultVarNameForResults = "results";
|
||||||
|
private static final String defaultTemplate = "menupage--defaultSparql.ftl";
|
||||||
|
|
||||||
String dataGetterURI;
|
String dataGetterURI;
|
||||||
String queryText;
|
String queryText;
|
||||||
String saveToVar;
|
String saveToVar;
|
||||||
|
@ -38,11 +48,6 @@ public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
VitroRequest vreq;
|
VitroRequest vreq;
|
||||||
ServletContext context;
|
ServletContext context;
|
||||||
|
|
||||||
|
|
||||||
final static Log log = LogFactory.getLog(SparqlQueryDataGetter.class);
|
|
||||||
//default template
|
|
||||||
private final static String defaultTemplate = "menupage--defaultSparql.ftl";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor with display model and data getter URI that will be called by reflection.
|
* Constructor with display model and data getter URI that will be called by reflection.
|
||||||
*/
|
*/
|
||||||
|
@ -50,20 +55,10 @@ public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
this.configure(vreq, displayModel,dataGetterURI);
|
this.configure(vreq, displayModel,dataGetterURI);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<String, Object> getData(Map<String, Object> pageData) {
|
|
||||||
// Merge the pageData with the request parameters. PageData overrides
|
|
||||||
Map<String, String[]> merged = new HashMap<String, String[]>();
|
|
||||||
merged.putAll(vreq.getParameterMap());
|
|
||||||
for (String key: pageData.keySet()) {
|
|
||||||
merged.put(key, new String[] {String.valueOf(pageData.get(key))});
|
|
||||||
}
|
|
||||||
return doQuery( merged, getModel(context, vreq, modelURI));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configure this instance based on the URI and display model.
|
* Configure this instance based on the URI and display model.
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("hiding")
|
||||||
protected void configure(VitroRequest vreq, Model displayModel, String dataGetterURI) {
|
protected void configure(VitroRequest vreq, Model displayModel, String dataGetterURI) {
|
||||||
if( vreq == null )
|
if( vreq == null )
|
||||||
throw new IllegalArgumentException("VitroRequest may not be null.");
|
throw new IllegalArgumentException("VitroRequest may not be null.");
|
||||||
|
@ -79,7 +74,6 @@ public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
QuerySolutionMap initBindings = new QuerySolutionMap();
|
QuerySolutionMap initBindings = new QuerySolutionMap();
|
||||||
initBindings.add("dataGetterURI", ResourceFactory.createResource(this.dataGetterURI));
|
initBindings.add("dataGetterURI", ResourceFactory.createResource(this.dataGetterURI));
|
||||||
|
|
||||||
int count = 0;
|
|
||||||
Query dataGetterConfigurationQuery = QueryFactory.create(dataGetterQuery) ;
|
Query dataGetterConfigurationQuery = QueryFactory.create(dataGetterQuery) ;
|
||||||
displayModel.enterCriticalSection(Lock.READ);
|
displayModel.enterCriticalSection(Lock.READ);
|
||||||
try{
|
try{
|
||||||
|
@ -88,7 +82,6 @@ public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
ResultSet res = qexec.execSelect();
|
ResultSet res = qexec.execSelect();
|
||||||
try{
|
try{
|
||||||
while( res.hasNext() ){
|
while( res.hasNext() ){
|
||||||
count++;
|
|
||||||
QuerySolution soln = res.next();
|
QuerySolution soln = res.next();
|
||||||
|
|
||||||
//query is NOT OPTIONAL
|
//query is NOT OPTIONAL
|
||||||
|
@ -121,52 +114,116 @@ public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Do the query and return a result. This is in its own method
|
* Query to get the definition of the SparqlDataGetter for a given URI.
|
||||||
* to make testing easy.
|
|
||||||
*/
|
*/
|
||||||
protected Map<String, Object> doQuery(Map<String, String[]>parameterMap, Model queryModel){
|
private static final String dataGetterQuery =
|
||||||
|
"PREFIX display: <" + DisplayVocabulary.DISPLAY_NS +"> \n" +
|
||||||
|
"SELECT ?query ?saveToVar ?queryModel WHERE { \n" +
|
||||||
|
" ?dataGetterURI "+queryPropertyURI+" ?query . \n" +
|
||||||
|
" OPTIONAL{ ?dataGetterURI "+saveToVarPropertyURI+" ?saveToVar } \n " +
|
||||||
|
" OPTIONAL{ ?dataGetterURI "+queryModelPropertyURI+" ?queryModel } \n" +
|
||||||
|
"}";
|
||||||
|
|
||||||
if( this.queryText == null ){
|
|
||||||
log.error("no SPARQL query defined for page " + this.dataGetterURI);
|
@Override
|
||||||
|
public Map<String, Object> getData(Map<String, Object> pageData) {
|
||||||
|
Map<String, String> merged = mergeParameters(vreq.getParameterMap(), pageData);
|
||||||
|
|
||||||
|
String boundQueryText = bindParameters(queryText, merged);
|
||||||
|
|
||||||
|
if (modelURI != null) {
|
||||||
|
return doQueryOnModel(boundQueryText, getModel(context, vreq, modelURI));
|
||||||
|
} else {
|
||||||
|
return doQueryOnRDFService(boundQueryText);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Merge the pageData with the request parameters. PageData overrides. */
|
||||||
|
private Map<String, String> mergeParameters(
|
||||||
|
Map<String, String[]> parameterMap, Map<String, Object> pageData) {
|
||||||
|
Map<String, String> merged = new HashMap<>();
|
||||||
|
for (String key: parameterMap.keySet()) {
|
||||||
|
merged.put(key, parameterMap.get(key)[0]);
|
||||||
|
}
|
||||||
|
for (String key: pageData.keySet()) {
|
||||||
|
merged.put(key, String.valueOf(pageData.get(key)));
|
||||||
|
}
|
||||||
|
return merged;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* InitialBindings don't always work, and besides, RDFService doesn't accept
|
||||||
|
* them. So do a text-based substitution.
|
||||||
|
*
|
||||||
|
* This assumes that every parameter is a URI. What if we want to substitute
|
||||||
|
* a string value?
|
||||||
|
*/
|
||||||
|
private String bindParameters(String text, Map<String, String> merged) {
|
||||||
|
String bound = text;
|
||||||
|
for (String key : merged.keySet()) {
|
||||||
|
bound.replace('?' + key, '<' + merged.get(key) + '>');
|
||||||
|
}
|
||||||
|
log.debug("query after binding parameters: " + bound);
|
||||||
|
return bound;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Do the query and return a result. This is in its own method, with
|
||||||
|
* protected access, to make testing easy.
|
||||||
|
*/
|
||||||
|
protected Map<String, Object> doQueryOnRDFService(String q) {
|
||||||
|
log.debug("Going to RDFService with " + q);
|
||||||
|
ResultSet results = QueryUtils.getQueryResults(q, vreq);
|
||||||
|
return assembleMap(parseResults(results));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Do the query and return a result. This is in its own method, with
|
||||||
|
* protected access, to make testing easy.
|
||||||
|
*/
|
||||||
|
protected Map<String, Object> doQueryOnModel(String q, Model queryModel){
|
||||||
|
log.debug("Going to model " + modelURI + " with " + q);
|
||||||
|
if (q == null) {
|
||||||
return Collections.emptyMap();
|
return Collections.emptyMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
//this may throw a SPARQL syntax error
|
Query query = makeQuery(q);
|
||||||
Query query = QueryFactory.create( this.queryText );
|
if (query == null) {
|
||||||
|
return Collections.emptyMap();
|
||||||
//build query bindings
|
|
||||||
QuerySolutionMap initialBindings = createBindings( parameterMap);
|
|
||||||
|
|
||||||
//execute query
|
|
||||||
List<Map<String,String>> results = executeQuery( query, queryModel, initialBindings);
|
|
||||||
|
|
||||||
//put results in page data, what key to use for results?
|
|
||||||
Map<String, Object> rmap = new HashMap<String,Object>();
|
|
||||||
//also store the variable name within which results will be returned
|
|
||||||
rmap.put("variableName", this.saveToVar);
|
|
||||||
rmap.put(this.saveToVar, results);
|
|
||||||
//This will be overridden at page level in display model if template specified there
|
|
||||||
rmap.put("bodyTemplate", defaultTemplate);
|
|
||||||
return rmap;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<Map<String, String>> executeQuery(Query query, Model model,
|
return assembleMap(executeQuery( query, queryModel));
|
||||||
QuerySolutionMap initialBindings) {
|
}
|
||||||
|
|
||||||
List<Map<String,String>> rows = new ArrayList<Map<String,String>>();
|
private Query makeQuery(String q) {
|
||||||
|
try {
|
||||||
|
return QueryFactory.create(q);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to build a query from ''", e);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<Map<String, String>> executeQuery(Query query, Model model) {
|
||||||
model.enterCriticalSection(Lock.READ);
|
model.enterCriticalSection(Lock.READ);
|
||||||
try{
|
try{
|
||||||
QueryExecution qexec= QueryExecutionFactory.create(query, model,initialBindings );
|
QueryExecution qexec= QueryExecutionFactory.create(query, model );
|
||||||
ResultSet results = qexec.execSelect();
|
ResultSet results = qexec.execSelect();
|
||||||
try{
|
try{
|
||||||
|
return parseResults(results);
|
||||||
|
}finally{ qexec.close(); }
|
||||||
|
}finally{ model.leaveCriticalSection(); }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a ResultSet into a List of Maps.
|
||||||
|
*/
|
||||||
|
private List<Map<String, String>> parseResults(ResultSet results) {
|
||||||
|
List<Map<String,String>> rows = new ArrayList<Map<String,String>>();
|
||||||
while (results.hasNext()) {
|
while (results.hasNext()) {
|
||||||
QuerySolution soln = results.nextSolution();
|
QuerySolution soln = results.nextSolution();
|
||||||
rows.add( toRow( soln ) );
|
rows.add( toRow( soln ) );
|
||||||
}
|
}
|
||||||
}finally{ qexec.close(); }
|
|
||||||
}finally{ model.leaveCriticalSection(); }
|
|
||||||
|
|
||||||
return rows;
|
return rows;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,8 +243,8 @@ public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
private String toCell(RDFNode rdfNode) {
|
private String toCell(RDFNode rdfNode) {
|
||||||
if( rdfNode == null){
|
if( rdfNode == null){
|
||||||
return "";
|
return "";
|
||||||
}else if( rdfNode.canAs( Literal.class )){
|
}else if( rdfNode.isLiteral() ){
|
||||||
return ((Literal)rdfNode.as(Literal.class)).getLexicalForm();
|
return rdfNode.asLiteral().getLexicalForm();
|
||||||
}else if( rdfNode.isResource() ){
|
}else if( rdfNode.isResource() ){
|
||||||
Resource resource = (Resource)rdfNode;
|
Resource resource = (Resource)rdfNode;
|
||||||
if( ! resource.isAnon() ){
|
if( ! resource.isAnon() ){
|
||||||
|
@ -200,40 +257,17 @@ public class SparqlQueryDataGetter extends DataGetterBase implements DataGetter{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<String, Object> assembleMap(List<Map<String, String>> results) {
|
||||||
|
Map<String, Object> rmap = new HashMap<String,Object>();
|
||||||
|
|
||||||
|
//put results in page data
|
||||||
|
rmap.put(this.saveToVar, results);
|
||||||
|
//also store the variable name within which results will be returned
|
||||||
|
rmap.put("variableName", this.saveToVar);
|
||||||
|
//This will be overridden at page level in display model if template specified there
|
||||||
|
rmap.put("bodyTemplate", defaultTemplate);
|
||||||
|
|
||||||
private QuerySolutionMap createBindings(Map<String, String[]>parameterMap) {
|
return rmap;
|
||||||
QuerySolutionMap initBindings = new QuerySolutionMap();
|
|
||||||
|
|
||||||
//could have bindings from HTTP parameters
|
|
||||||
for( String var : parameterMap.keySet() ) {
|
|
||||||
String[] values = parameterMap.get(var);
|
|
||||||
if( values != null && values.length == 1 ){
|
|
||||||
//what do do when we don't want a Resource?
|
|
||||||
initBindings.add(var, ResourceFactory.createResource(values[0]) );
|
|
||||||
}else if( values.length > 1){
|
|
||||||
log.error("more than 1 http parameter for " + var);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return initBindings;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final String queryPropertyURI = "<" + DisplayVocabulary.QUERY + ">";
|
|
||||||
private static final String saveToVarPropertyURI= "<" + DisplayVocabulary.SAVE_TO_VAR+ ">";
|
|
||||||
private static final String queryModelPropertyURI= "<" + DisplayVocabulary.QUERY_MODEL+ ">";
|
|
||||||
|
|
||||||
public static final String defaultVarNameForResults = "results";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Query to get the definition of the SparqlDataGetter for a given URI.
|
|
||||||
*/
|
|
||||||
private static final String dataGetterQuery =
|
|
||||||
"PREFIX display: <" + DisplayVocabulary.DISPLAY_NS +"> \n" +
|
|
||||||
"SELECT ?query ?saveToVar ?queryModel WHERE { \n" +
|
|
||||||
" ?dataGetterURI "+queryPropertyURI+" ?query . \n" +
|
|
||||||
" OPTIONAL{ ?dataGetterURI "+saveToVarPropertyURI+" ?saveToVar } \n " +
|
|
||||||
" OPTIONAL{ ?dataGetterURI "+queryModelPropertyURI+" ?queryModel } \n" +
|
|
||||||
"}";
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,9 +67,9 @@ public class SparqlQueryDataGetterTest extends AbstractTestClass{
|
||||||
String bobURI = "http://example.com/p/bob";
|
String bobURI = "http://example.com/p/bob";
|
||||||
dataModel.add(ResourceFactory.createResource(bobURI), RDF.type, ResourceFactory.createResource("http://xmlns.com/foaf/0.1/Person"));
|
dataModel.add(ResourceFactory.createResource(bobURI), RDF.type, ResourceFactory.createResource("http://xmlns.com/foaf/0.1/Person"));
|
||||||
|
|
||||||
Map<String, String[]> params = Collections.emptyMap();
|
Map<String, String> params = Collections.emptyMap();
|
||||||
|
|
||||||
Map<String,Object> mapOut = sdg.doQuery(params, dataModel);
|
Map<String,Object> mapOut = sdg.doQueryOnModel(sdg.queryText, dataModel);
|
||||||
|
|
||||||
Assert.assertNotNull(mapOut);
|
Assert.assertNotNull(mapOut);
|
||||||
Assert.assertTrue("should contain key people" , mapOut.containsKey("people"));
|
Assert.assertTrue("should contain key people" , mapOut.containsKey("people"));
|
||||||
|
|
Loading…
Add table
Reference in a new issue