continued work on ISF conversion and data migration
This commit is contained in:
parent
cbdbe629d1
commit
dffc40dd3b
7 changed files with 42 additions and 105 deletions
|
@ -955,7 +955,7 @@ public class ObjectPropertyDaoJena extends PropertyDaoJena implements ObjectProp
|
|||
}
|
||||
} else {
|
||||
String filename = soln.getLiteral("filename").getLexicalForm();
|
||||
log.info("putting " + prop.getURI() + " " + rangeUri + " " + filename + " into list view map");
|
||||
log.debug("putting " + prop.getURI() + " " + rangeUri + " " + filename + " into list view map");
|
||||
customListViewConfigFileMap.put(new Pair<ObjectProperty, String>(prop, rangeUri), filename);
|
||||
}
|
||||
}
|
||||
|
@ -964,7 +964,7 @@ public class ObjectPropertyDaoJena extends PropertyDaoJena implements ObjectProp
|
|||
|
||||
String customListViewConfigFileName = customListViewConfigFileMap.get(new Pair<ObjectProperty, String>(op, op.getRangeVClassURI()));
|
||||
if (customListViewConfigFileName == null) {
|
||||
log.info("no list view found for " + op.getURI() + " qualified by " + op.getRangeVClassURI());
|
||||
log.debug("no list view found for " + op.getURI() + " qualified by " + op.getRangeVClassURI());
|
||||
customListViewConfigFileName = customListViewConfigFileMap.get(new Pair<ObjectProperty, String>(op, OWL.Thing.getURI()));
|
||||
}
|
||||
|
||||
|
|
|
@ -721,7 +721,7 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
|||
propInsts.add(getPropInstForPropertyAndRange(op, rangeRes, applicableProperties));
|
||||
List<String> additionalFauxSubpropertyRangeURIs = getAdditionalFauxSubpropertyRangeURIsForPropertyURI(propertyURI);
|
||||
for (String rangeURI : additionalFauxSubpropertyRangeURIs) {
|
||||
if (getWebappDaoFactory().getVClassDao().isSubClassOf(rangeURI, rangeRes.getURI())) {
|
||||
if (rangeRes == null || getWebappDaoFactory().getVClassDao().isSubClassOf(rangeURI, rangeRes.getURI())) {
|
||||
propInsts.add(getPropInstForPropertyAndRange(
|
||||
op, ResourceFactory.createResource(rangeURI), applicableProperties));
|
||||
}
|
||||
|
@ -740,7 +740,9 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
|||
Map<String, Resource[]> applicableProperties) {
|
||||
PropertyInstance pi = new PropertyInstance();
|
||||
String domainURIStr = getURIStr(op.getDomain());
|
||||
if (rangeRes != null) {
|
||||
if (rangeRes == null) {
|
||||
pi.setRangeClassURI(OWL.Thing.getURI()); // TODO see above
|
||||
} else {
|
||||
String rangeClassURI;
|
||||
if (rangeRes.isAnon()) {
|
||||
rangeClassURI = PSEUDO_BNODE_NS + rangeRes.getId()
|
||||
|
@ -757,8 +759,6 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
|||
range.setName(range.getLocalName());
|
||||
}
|
||||
pi.setRangeClassName(range.getName());
|
||||
} else {
|
||||
pi.setRangeClassURI(OWL.Thing.getURI()); // TODO see above
|
||||
}
|
||||
pi.setDomainClassURI(domainURIStr);
|
||||
VClass domain = getWebappDaoFactory().getVClassDao()
|
||||
|
|
|
@ -462,9 +462,11 @@ public class VClassDaoJena extends JenaBaseDao implements VClassDao {
|
|||
while (classIt.hasNext()) {
|
||||
try {
|
||||
Individual classInd = classIt.next();
|
||||
OntClass cls = classInd.as(OntClass.class);
|
||||
if (!cls.isAnon() && !(NONUSER_NAMESPACES.contains(cls.getNameSpace()))) {
|
||||
classes.add(new VClassJena(cls,getWebappDaoFactory()));
|
||||
if(classInd.canAs(OntClass.class)) {
|
||||
OntClass cls = classInd.as(OntClass.class);
|
||||
if (!cls.isAnon() && !(NONUSER_NAMESPACES.contains(cls.getNameSpace()))) {
|
||||
classes.add(new VClassJena(cls,getWebappDaoFactory()));
|
||||
}
|
||||
}
|
||||
} catch (ClassCastException cce) {
|
||||
log.error(cce, cce);
|
||||
|
|
|
@ -73,7 +73,7 @@ public class KnowledgeBaseUpdater {
|
|||
performUpdate(servletContext);
|
||||
} catch (Exception e) {
|
||||
logger.logError(e.getMessage());
|
||||
e.printStackTrace();
|
||||
log.error(e,e);
|
||||
}
|
||||
|
||||
if (!logger.errorsWritten()) {
|
||||
|
@ -300,7 +300,7 @@ public class KnowledgeBaseUpdater {
|
|||
* needs to be updated to conform to a new ontology version
|
||||
*/
|
||||
public boolean updateRequired(ServletContext servletContext) throws IOException {
|
||||
boolean required = false;
|
||||
boolean required = true;
|
||||
|
||||
String sparqlQueryStr = loadSparqlQuery(settings.getAskUpdatedQueryFile());
|
||||
if (sparqlQueryStr == null) {
|
||||
|
@ -342,7 +342,7 @@ public class KnowledgeBaseUpdater {
|
|||
|
||||
File file = new File(filePath);
|
||||
if (!file.exists()) {
|
||||
return null;
|
||||
throw new RuntimeException("SPARQL file not found at " + filePath);
|
||||
}
|
||||
BufferedReader reader = new BufferedReader(new FileReader(file));
|
||||
StringBuffer fileContents = new StringBuffer();
|
||||
|
|
|
@ -1,85 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.servlet.setup;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.servlet.ServletContextEvent;
|
||||
import javax.servlet.ServletContextListener;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import com.hp.hpl.jena.assembler.Assembler;
|
||||
import com.hp.hpl.jena.ontology.OntModel;
|
||||
import com.hp.hpl.jena.rdf.model.Model;
|
||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
||||
import com.hp.hpl.jena.rdf.model.Resource;
|
||||
import com.hp.hpl.jena.rdf.model.ResourceFactory;
|
||||
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
|
||||
import com.hp.hpl.jena.vocabulary.RDF;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess;
|
||||
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer;
|
||||
|
||||
/**
|
||||
* This is the beginning of a more sane and flexible model management system,
|
||||
* especially necessary for DataStaR.
|
||||
* Don't use it yet; it's going to change.
|
||||
* (That part is still insane, I know.)
|
||||
* @author bjl23
|
||||
*/
|
||||
public class AssembleModelsSetup implements ServletContextListener {
|
||||
|
||||
private static final Log log = LogFactory.getLog(AssembleModelsSetup.class);
|
||||
|
||||
private List<Model> assembledModels = new LinkedList<Model>();
|
||||
|
||||
private String ASSEMBLERS_DIR_PATH = "/WEB-INF/assemblers/";
|
||||
private Resource ASSEMBLER_OBJECT = ResourceFactory.createProperty("http://jena.hpl.hp.com/2005/11/Assembler#Object");
|
||||
private String SYNTAX = "N3";
|
||||
|
||||
public void contextInitialized(ServletContextEvent sce) {
|
||||
OntModel jenaOntModel = ModelAccess.on(sce.getServletContext()).getBaseOntModel();
|
||||
// read assemblers out of assemblers directory
|
||||
Set pathSet = sce.getServletContext().getResourcePaths(ASSEMBLERS_DIR_PATH);
|
||||
for (String path : (Set<String>)pathSet) {
|
||||
InputStream assemblerInputStream = sce.getServletContext().getResourceAsStream(path);
|
||||
Model assemblerModel = ModelFactory.createDefaultModel();
|
||||
try {
|
||||
assemblerModel.read(assemblerInputStream, null, SYNTAX);
|
||||
ExtendedIterator assemblerIt = assemblerModel.listResourcesWithProperty(RDF.type,ASSEMBLER_OBJECT);
|
||||
while (assemblerIt.hasNext()) {
|
||||
Resource assemblerObj = (Resource) assemblerIt.next();
|
||||
Model assembledModel = Assembler.general.openModel(assemblerObj);
|
||||
/* special stuff here */
|
||||
Model memModel = ModelFactory.createDefaultModel();
|
||||
memModel.add(assembledModel);
|
||||
memModel.register(new ModelSynchronizer(assembledModel));
|
||||
/* end special stuff */
|
||||
if (assembledModel != null) {
|
||||
jenaOntModel.addSubModel(memModel);
|
||||
}
|
||||
}
|
||||
if (assemblerIt != null) {
|
||||
assemblerIt.close();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to use assembler at "+path);
|
||||
}
|
||||
}
|
||||
System.out.println("ContextListener AssembleModelsSetup done");
|
||||
}
|
||||
|
||||
public void contextDestroyed(ServletContextEvent sce) {
|
||||
for (Model model : assembledModels) {
|
||||
if (model != null) {
|
||||
model.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -54,6 +54,9 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
|||
private final static Log log = LogFactory.getLog(UpdateKnowledgeBase.class);
|
||||
|
||||
private static final String DATA_DIR = "/WEB-INF/ontologies/update/";
|
||||
private static final String DIFF_FILE = DATA_DIR + "diff.tab.txt";
|
||||
private static final String ASK_QUERY_FILE = DATA_DIR + "askUpdated.sparql";
|
||||
private static final String SUCCESS_ASSERTIONS_FILE = DATA_DIR + "success.n3";
|
||||
private static final String OLD_TBOX_MODEL_DIR = DATA_DIR + "oldVersion/";
|
||||
private static final String NEW_TBOX_MODEL_DIR = "/WEB-INF/filegraph/tbox/";
|
||||
private static final String OLD_TBOX_ANNOTATIONS_DIR = DATA_DIR + "oldAnnotations/";
|
||||
|
@ -75,6 +78,7 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
|||
try {
|
||||
UpdateSettings settings = new UpdateSettings();
|
||||
putReportingPathsIntoSettings(ctx, settings);
|
||||
putNonReportingPathsIntoSettings(ctx, settings);
|
||||
|
||||
WebappDaoFactory wadf = ModelAccess.on(ctx).getWebappDaoFactory();
|
||||
settings.setDefaultNamespace(wadf.getDefaultNamespace());
|
||||
|
@ -113,7 +117,7 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
|||
OntModel oldDisplayModelVivoListView = loadModelFromFile(ctx.getRealPath(OLD_DISPLAYMODEL_VIVOLISTVIEW_PATH));
|
||||
settings.setVivoListViewConfigDisplayModel(oldDisplayModelVivoListView);
|
||||
} catch (Exception e) {
|
||||
log.info("unable to read display model migration files, display model not migrated. " + e.getMessage());
|
||||
log.info("Unable to read display model migration files. " + e.getMessage());
|
||||
tryMigrateDisplay = false;
|
||||
}
|
||||
|
||||
|
@ -121,8 +125,11 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
|||
KnowledgeBaseUpdater ontologyUpdater = new KnowledgeBaseUpdater(settings);
|
||||
|
||||
try {
|
||||
if (ontologyUpdater.updateRequired(ctx)) {
|
||||
if (!ontologyUpdater.updateRequired(ctx)) {
|
||||
log.info("No data migration required.");
|
||||
} else {
|
||||
ctx.setAttribute(KBM_REQURIED_AT_STARTUP, Boolean.TRUE);
|
||||
log.info("Data migration required");
|
||||
ontologyUpdater.update(ctx);
|
||||
if (tryMigrateDisplay) {
|
||||
try {
|
||||
|
@ -144,6 +151,17 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the paths for the files that specify how to perform the update
|
||||
*/
|
||||
private void putNonReportingPathsIntoSettings(ServletContext ctx, UpdateSettings settings) {
|
||||
settings.setAskUpdatedQueryFile(ctx.getRealPath(ASK_QUERY_FILE));
|
||||
settings.setDiffFile(ctx.getRealPath(DIFF_FILE));
|
||||
settings.setSuccessAssertionsFile(ctx.getRealPath(SUCCESS_ASSERTIONS_FILE));
|
||||
settings.setSuccessRDFFormat("N3");
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the directories where we will report on the update.
|
||||
* Put the paths for the directories and files into the settings object.
|
||||
|
@ -156,11 +174,6 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
|||
settings.setDataDir(dataDir.toString());
|
||||
StartupStatus.getBean(ctx).info(this, "Updating knowledge base: reports are in '" + dataDir + "'");
|
||||
|
||||
settings.setAskUpdatedQueryFile(dataDir.resolve("askUpdated.sparql").toString());
|
||||
settings.setDiffFile(dataDir.resolve("diff.tab.txt").toString());
|
||||
settings.setSuccessAssertionsFile(dataDir.resolve("success.n3").toString());
|
||||
settings.setSuccessRDFFormat("N3");
|
||||
|
||||
settings.setSparqlConstructAdditionsDir(createDirectory(dataDir, "sparqlConstructs", "additions").toString());
|
||||
settings.setSparqlConstructDeletionsDir(createDirectory(dataDir, "sparqlConstructs", "deletions").toString());
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import edu.cornell.mannlib.vitro.webapp.beans.PropertyGroup;
|
|||
import edu.cornell.mannlib.vitro.webapp.beans.PropertyInstance;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
import edu.cornell.mannlib.vitro.webapp.dao.DataPropertyDao;
|
||||
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess;
|
||||
import edu.cornell.mannlib.vitro.webapp.dao.ObjectPropertyDao;
|
||||
import edu.cornell.mannlib.vitro.webapp.dao.PropertyGroupDao;
|
||||
import edu.cornell.mannlib.vitro.webapp.dao.PropertyInstanceDao;
|
||||
|
@ -175,7 +176,13 @@ public class GroupedPropertyList extends BaseTemplateModel {
|
|||
// There is no ObjectPropertyDao.getAllPossibleObjectPropertiesForIndividual() parallel to
|
||||
// DataPropertyDao.getAllPossibleDatapropsForIndividual(). The comparable method for object properties
|
||||
// is defined using PropertyInstance rather than ObjectProperty.
|
||||
PropertyInstanceDao piDao = wdf.getPropertyInstanceDao();
|
||||
|
||||
// Getting WebappDaoFactory from the session because we can't have the filtering
|
||||
// that gets applied to the request. This breaks blank node structures in the
|
||||
// restrictions that determine applicable properties.
|
||||
WebappDaoFactory wadf = ModelAccess.on(vreq.getSession().getServletContext()).getWebappDaoFactory();
|
||||
PropertyInstanceDao piDao = wadf.getPropertyInstanceDao();
|
||||
|
||||
Collection<PropertyInstance> allPropInstColl = piDao
|
||||
.getAllPossiblePropInstForIndividual(subject.getURI());
|
||||
if (allPropInstColl != null) {
|
||||
|
|
Loading…
Add table
Reference in a new issue