Merge branch 'maint-rel-1.6' into develop
This commit is contained in:
commit
a2ab207ef9
5 changed files with 74 additions and 18 deletions
|
@ -4,13 +4,13 @@
|
||||||
DELETE FROM `orng_apps`;
|
DELETE FROM `orng_apps`;
|
||||||
|
|
||||||
INSERT INTO `orng_apps` (`appid`, `name`, `url`, `PersonFilterID`, `enabled`, `channels`) VALUES
|
INSERT INTO `orng_apps` (`appid`, `name`, `url`, `PersonFilterID`, `enabled`, `channels`) VALUES
|
||||||
(100, 'Google Search', 'http://dev-profiles.ucsf.edu/orng/GoogleSearch.xml', NULL, 1, NULL),
|
(100, 'Google Search', 'http://stage-profiles.ucsf.edu/apps/ucsfsearch.xml', NULL, 1, NULL),
|
||||||
(101, 'Featured Presentations', 'http://dev-profiles.ucsf.edu/orng/SlideShare.xml', NULL, 1, NULL),
|
(101, 'Featured Presentations', 'http://stage-profiles.ucsf.edu/apps/SlideShare.xml', NULL, 1, NULL),
|
||||||
(102, 'Faculty Mentor', 'http://dev-profiles.ucsf.edu/orng/Mentor.xml', NULL, 0, NULL),
|
(102, 'Faculty Mentor', 'http://stage-profiles.ucsf.edu/apps/Mentor.xml', NULL, 0, NULL),
|
||||||
(103, 'Websites', 'http://dev-profiles.ucsf.edu/orng/Links.xml', NULL, 1, NULL),
|
(103, 'Websites', 'http://stage-profiles.ucsf.edu/apps/Links.xml', NULL, 1, NULL),
|
||||||
(104, 'Profile List', 'http://dev-profiles.ucsf.edu/orng/ProfileListTool.xml', NULL, 1, 'JSONPersonIds'),
|
(104, 'Profile List', 'http://stage-profiles.ucsf.edu/apps/ProfileListTool.xml', NULL, 1, 'JSONPersonIds'),
|
||||||
(106, 'RDF Test Gadget', 'http://dev-profiles.ucsf.edu/orng/RDFTest.xml', NULL, 1, NULL),
|
(106, 'RDF Test Gadget', 'http://stage-profiles.ucsf.edu/apps/RDFTest.xml', NULL, 1, NULL),
|
||||||
(112, 'Twitter', 'http://dev-profiles.ucsf.edu/ORNG/Twitter.xml', NULL, 1, NULL);
|
(112, 'Twitter', 'http://stage-profiles.ucsf.edu/apps/Twitter.xml', NULL, 1, NULL);
|
||||||
|
|
||||||
DELETE FROM `orng_app_views`;
|
DELETE FROM `orng_app_views`;
|
||||||
|
|
||||||
|
|
|
@ -241,6 +241,9 @@ public class FormUtils {
|
||||||
option.setValue(vclass.getURI());
|
option.setValue(vclass.getURI());
|
||||||
option.setBody(vclass.getPickListName());
|
option.setBody(vclass.getPickListName());
|
||||||
vclassOptionList.add(option);
|
vclassOptionList.add(option);
|
||||||
|
if(selectedVClassURI != null && selectedVClassURI.equals(vclass.getURI())) {
|
||||||
|
option.setSelected(true);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return vclassOptionList;
|
return vclassOptionList;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
package edu.cornell.mannlib.vitro.webapp.controller.individual;
|
package edu.cornell.mannlib.vitro.webapp.controller.individual;
|
||||||
|
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -343,7 +344,9 @@ public class IndividualRdfAssembler {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Literal createDateLiteral(OntModel o) {
|
private Literal createDateLiteral(OntModel o) {
|
||||||
return o.createTypedLiteral(new Date(), XSDDatatype.XSDdate);
|
String date = new SimpleDateFormat("YYYY-MM-dd'T'HH:mm:ss")
|
||||||
|
.format(new Date());
|
||||||
|
return o.createTypedLiteral(date, XSDDatatype.XSDdateTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class KnowledgeBaseUpdater {
|
||||||
logger.closeLogs();
|
logger.closeLogs();
|
||||||
|
|
||||||
long elapsedSecs = (System.currentTimeMillis() - startTime)/1000;
|
long elapsedSecs = (System.currentTimeMillis() - startTime)/1000;
|
||||||
log.info("Finished knowledge base migration in " + elapsedSecs + " second" + (elapsedSecs != 1 ? "s" : ""));
|
log.info("Finished checking knowledge base in " + elapsedSecs + " second" + (elapsedSecs != 1 ? "s" : ""));
|
||||||
|
|
||||||
return record.hasRecordedChanges();
|
return record.hasRecordedChanges();
|
||||||
}
|
}
|
||||||
|
@ -95,20 +95,20 @@ public class KnowledgeBaseUpdater {
|
||||||
AtomicOntologyChangeLists changes = new AtomicOntologyChangeLists(rawChanges,settings.getNewTBoxModel(),settings.getOldTBoxModel());
|
AtomicOntologyChangeLists changes = new AtomicOntologyChangeLists(rawChanges,settings.getNewTBoxModel(),settings.getOldTBoxModel());
|
||||||
|
|
||||||
// update ABox data any time
|
// update ABox data any time
|
||||||
log.info("performing SPARQL CONSTRUCT additions");
|
log.debug("performing SPARQL CONSTRUCT additions");
|
||||||
performSparqlConstructs(settings.getSparqlConstructAdditionsDir(), settings.getRDFService(), ADD);
|
performSparqlConstructs(settings.getSparqlConstructAdditionsDir(), settings.getRDFService(), ADD);
|
||||||
|
|
||||||
log.info("performing SPARQL CONSTRUCT retractions");
|
log.debug("performing SPARQL CONSTRUCT retractions");
|
||||||
performSparqlConstructs(settings.getSparqlConstructDeletionsDir(), settings.getRDFService(), RETRACT);
|
performSparqlConstructs(settings.getSparqlConstructDeletionsDir(), settings.getRDFService(), RETRACT);
|
||||||
|
|
||||||
log.info("\tupdating the abox");
|
log.info("\tchecking the abox");
|
||||||
updateABox(changes);
|
updateABox(changes);
|
||||||
|
|
||||||
log.info("performing post-processing SPARQL CONSTRUCT additions");
|
log.debug("performing post-processing SPARQL CONSTRUCT additions");
|
||||||
performSparqlConstructs(settings.getSparqlConstructAdditionsDir() + "/post/",
|
performSparqlConstructs(settings.getSparqlConstructAdditionsDir() + "/post/",
|
||||||
settings.getRDFService(), ADD);
|
settings.getRDFService(), ADD);
|
||||||
|
|
||||||
log.info("performing post-processing SPARQL CONSTRUCT retractions");
|
log.debug("performing post-processing SPARQL CONSTRUCT retractions");
|
||||||
performSparqlConstructs(settings.getSparqlConstructDeletionsDir() + "/post/",
|
performSparqlConstructs(settings.getSparqlConstructDeletionsDir() + "/post/",
|
||||||
settings.getRDFService(), RETRACT);
|
settings.getRDFService(), RETRACT);
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ public class KnowledgeBaseUpdater {
|
||||||
boolean add) throws IOException {
|
boolean add) throws IOException {
|
||||||
Dataset dataset = new RDFServiceDataset(rdfService);
|
Dataset dataset = new RDFServiceDataset(rdfService);
|
||||||
File sparqlConstructDirectory = new File(sparqlConstructDir);
|
File sparqlConstructDirectory = new File(sparqlConstructDir);
|
||||||
log.info("Using SPARQL CONSTRUCT directory " + sparqlConstructDirectory);
|
log.debug("Using SPARQL CONSTRUCT directory " + sparqlConstructDirectory);
|
||||||
if (!sparqlConstructDirectory.isDirectory()) {
|
if (!sparqlConstructDirectory.isDirectory()) {
|
||||||
String logMsg = this.getClass().getName() +
|
String logMsg = this.getClass().getName() +
|
||||||
"performSparqlConstructs() expected to find a directory " +
|
"performSparqlConstructs() expected to find a directory " +
|
||||||
|
@ -178,7 +178,7 @@ public class KnowledgeBaseUpdater {
|
||||||
}
|
}
|
||||||
Model anonModel = ModelFactory.createDefaultModel();
|
Model anonModel = ModelFactory.createDefaultModel();
|
||||||
try {
|
try {
|
||||||
log.info("\t\tprocessing SPARQL construct query from file " + sparqlFile.getName());
|
log.debug("\t\tprocessing SPARQL construct query from file " + sparqlFile.getName());
|
||||||
|
|
||||||
anonModel = RDFServiceUtils.parseModel(
|
anonModel = RDFServiceUtils.parseModel(
|
||||||
rdfService.sparqlConstructQuery(fileContents.toString(),
|
rdfService.sparqlConstructQuery(fileContents.toString(),
|
||||||
|
|
|
@ -12,6 +12,7 @@ import java.nio.file.Path;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -24,6 +25,10 @@ import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
import com.hp.hpl.jena.ontology.OntModel;
|
import com.hp.hpl.jena.ontology.OntModel;
|
||||||
import com.hp.hpl.jena.ontology.OntModelSpec;
|
import com.hp.hpl.jena.ontology.OntModelSpec;
|
||||||
|
import com.hp.hpl.jena.query.Query;
|
||||||
|
import com.hp.hpl.jena.query.QueryExecution;
|
||||||
|
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||||
|
import com.hp.hpl.jena.query.QueryFactory;
|
||||||
import com.hp.hpl.jena.rdf.model.Model;
|
import com.hp.hpl.jena.rdf.model.Model;
|
||||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
||||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
import com.hp.hpl.jena.rdf.model.RDFNode;
|
||||||
|
@ -42,8 +47,6 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
|
||||||
import edu.cornell.mannlib.vitro.webapp.ontology.update.KnowledgeBaseUpdater;
|
import edu.cornell.mannlib.vitro.webapp.ontology.update.KnowledgeBaseUpdater;
|
||||||
import edu.cornell.mannlib.vitro.webapp.ontology.update.UpdateSettings;
|
import edu.cornell.mannlib.vitro.webapp.ontology.update.UpdateSettings;
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
|
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
|
||||||
import edu.cornell.mannlib.vitro.webapp.reasoner.ABoxRecomputer;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.reasoner.SimpleReasoner;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
|
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -158,6 +161,8 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
removeBadRestrictions(settings.getAssertionOntModelSelector().getTBoxModel());
|
||||||
|
|
||||||
log.info("Simple reasoner connected for the ABox");
|
log.info("Simple reasoner connected for the ABox");
|
||||||
if(JenaDataSourceSetupBase.isFirstStartup()
|
if(JenaDataSourceSetupBase.isFirstStartup()
|
||||||
|| (migrationChangesMade && requiredUpdate)) {
|
|| (migrationChangesMade && requiredUpdate)) {
|
||||||
|
@ -531,6 +536,51 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove restrictions with missing owl:onProperty or obsolete core class
|
||||||
|
* This should be worked into the main migration later.
|
||||||
|
*/
|
||||||
|
private void removeBadRestrictions(Model tboxModel) {
|
||||||
|
List<String> queryStrs = Arrays.asList("PREFIX owl: <http://www.w3.org/2002/07/owl#> \n " +
|
||||||
|
"CONSTRUCT { \n" +
|
||||||
|
" ?rest ?p ?o . \n" +
|
||||||
|
" ?oo ?pp ?rest \n" +
|
||||||
|
"} WHERE { \n" +
|
||||||
|
" ?rest a owl:Restriction . \n" +
|
||||||
|
" FILTER NOT EXISTS { ?rest owl:onProperty ?x } \n" +
|
||||||
|
" ?rest ?p ?o . \n" +
|
||||||
|
" ?oo ?pp ?rest \n" +
|
||||||
|
"} \n" ,
|
||||||
|
"PREFIX owl: <http://www.w3.org/2002/07/owl#> \n " +
|
||||||
|
"CONSTRUCT { \n" +
|
||||||
|
" ?rest ?p ?o . \n" +
|
||||||
|
" ?oo ?pp ?rest \n" +
|
||||||
|
"} WHERE { \n" +
|
||||||
|
" ?rest a owl:Restriction . \n" +
|
||||||
|
" { ?rest owl:someValuesFrom ?c } UNION { ?rest owl:allValuesFrom ?c } \n" +
|
||||||
|
" FILTER (regex(str(?c), \"vivoweb.org\")) \n" +
|
||||||
|
" FILTER NOT EXISTS { ?c ?cp ?co } \n" +
|
||||||
|
" ?rest ?p ?o . \n" +
|
||||||
|
" ?oo ?pp ?rest \n" +
|
||||||
|
"} \n" );
|
||||||
|
for (String queryStr : queryStrs) {
|
||||||
|
Query query = QueryFactory.create(queryStr);
|
||||||
|
QueryExecution qe = QueryExecutionFactory.create(query, tboxModel);
|
||||||
|
try {
|
||||||
|
Model bad = qe.execConstruct();
|
||||||
|
tboxModel.remove(bad);
|
||||||
|
if (bad.size() > 0) {
|
||||||
|
log.info("Deleted " + bad.size() +
|
||||||
|
" triples of syntactically invalid restrictions");
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (qe != null) {
|
||||||
|
qe.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void contextDestroyed(ServletContextEvent arg0) {
|
public void contextDestroyed(ServletContextEvent arg0) {
|
||||||
// nothing to do
|
// nothing to do
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue