VCard migration
This commit is contained in:
parent
57055a4efc
commit
91aedd5255
3 changed files with 133 additions and 121 deletions
|
@ -616,22 +616,36 @@ public class PropertyDaoJena extends JenaBaseDao implements PropertyDao {
|
||||||
return classes;
|
return classes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final int DEPTH_LIMIT = 20;
|
||||||
|
|
||||||
private List<Restriction> getRelatedRestrictions(OntClass ontClass) {
|
private List<Restriction> getRelatedRestrictions(OntClass ontClass) {
|
||||||
List<Restriction> relatedRestrictions = new ArrayList<Restriction>();
|
return getRelatedRestrictions(ontClass, new ArrayList<Restriction>(), DEPTH_LIMIT);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<Restriction> getRelatedRestrictions(OntClass ontClass,
|
||||||
|
List<Restriction> relatedRestrictions, int limit) {
|
||||||
|
limit--;
|
||||||
if (ontClass.isRestriction()) {
|
if (ontClass.isRestriction()) {
|
||||||
relatedRestrictions.add(ontClass.as(Restriction.class));
|
relatedRestrictions.add(ontClass.as(Restriction.class));
|
||||||
} else if (ontClass.isIntersectionClass()) {
|
} else if (ontClass.isIntersectionClass()) {
|
||||||
IntersectionClass inter = ontClass.as(IntersectionClass.class);
|
IntersectionClass inter = ontClass.as(IntersectionClass.class);
|
||||||
Iterator<? extends OntClass> operIt = inter.listOperands();
|
Iterator<? extends OntClass> operIt = inter.listOperands();
|
||||||
while (operIt.hasNext()) {
|
while (operIt.hasNext()) {
|
||||||
relatedRestrictions.addAll(getRelatedRestrictions(operIt.next()));
|
OntClass operand = operIt.next();
|
||||||
|
if (!relatedRestrictions.contains(operand) && limit > 0) {
|
||||||
|
relatedRestrictions.addAll(
|
||||||
|
getRelatedRestrictions(
|
||||||
|
operand, relatedRestrictions, limit));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
List<OntClass> superClasses = listSuperClasses(ontClass);
|
List<OntClass> superClasses = listSuperClasses(ontClass);
|
||||||
superClasses.addAll(listEquivalentClasses(ontClass));
|
superClasses.addAll(listEquivalentClasses(ontClass));
|
||||||
for (OntClass sup : superClasses) {
|
for (OntClass sup : superClasses) {
|
||||||
if (!sup.equals(ontClass)) {
|
if (sup.isAnon() && !sup.equals(ontClass)
|
||||||
relatedRestrictions.addAll(getRelatedRestrictions(sup));
|
&& !relatedRestrictions.contains(ontClass) && limit > 0) {
|
||||||
|
relatedRestrictions.addAll(
|
||||||
|
getRelatedRestrictions(sup, relatedRestrictions, limit));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,8 @@ import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -112,126 +114,123 @@ public class KnowledgeBaseUpdater {
|
||||||
log.error("unable to migrate migration metadata " + e.getMessage());
|
log.error("unable to migrate migration metadata " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.warn("KnowledgeBaseUpdater needs to be modified to work on all graphs!");
|
||||||
|
OntModel readModel = settings.getUnionOntModelSelector().getABoxModel();
|
||||||
|
OntModel writeModel = settings.getAssertionOntModelSelector().getABoxModel();
|
||||||
|
// TODO make sure the ABox update applies to all graphs
|
||||||
|
|
||||||
log.info("\tupdating the abox");
|
log.info("\tupdating the abox");
|
||||||
updateABox(changes);
|
updateABox(changes);
|
||||||
}
|
|
||||||
|
|
||||||
private void performSparqlConstructAdditions(String sparqlConstructDir, OntModel readModel, OntModel writeModel) throws IOException {
|
log.info("performing SPARQL CONSTRUCT additions");
|
||||||
|
performSparqlConstructs(settings.getSparqlConstructAdditionsDir(), readModel, writeModel, ADD);
|
||||||
|
|
||||||
Model anonModel = performSparqlConstructs(sparqlConstructDir, readModel, true);
|
log.info("performing SPARQL CONSTRUCT retractions");
|
||||||
|
performSparqlConstructs(settings.getSparqlConstructDeletionsDir(), readModel, writeModel, RETRACT);
|
||||||
if (anonModel == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
writeModel.enterCriticalSection(Lock.WRITE);
|
|
||||||
try {
|
|
||||||
JenaIngestUtils jiu = new JenaIngestUtils();
|
|
||||||
Model additions = jiu.renameBNodes(anonModel, settings.getDefaultNamespace() + "n", writeModel);
|
|
||||||
Model actualAdditions = ModelFactory.createDefaultModel();
|
|
||||||
StmtIterator stmtIt = additions.listStatements();
|
|
||||||
|
|
||||||
while (stmtIt.hasNext()) {
|
|
||||||
Statement stmt = stmtIt.nextStatement();
|
|
||||||
if (!writeModel.contains(stmt)) {
|
|
||||||
actualAdditions.add(stmt);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
writeModel.add(actualAdditions);
|
|
||||||
record.recordAdditions(actualAdditions);
|
|
||||||
} finally {
|
|
||||||
writeModel.leaveCriticalSection();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void performSparqlConstructRetractions(String sparqlConstructDir, OntModel readModel, OntModel writeModel) throws IOException {
|
|
||||||
|
|
||||||
Model retractions = performSparqlConstructs(sparqlConstructDir, readModel, false);
|
|
||||||
|
|
||||||
if (retractions == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
writeModel.enterCriticalSection(Lock.WRITE);
|
|
||||||
|
|
||||||
try {
|
|
||||||
writeModel.remove(retractions);
|
|
||||||
record.recordRetractions(retractions);
|
|
||||||
} finally {
|
|
||||||
writeModel.leaveCriticalSection();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
private static final boolean ADD = true;
|
||||||
* Performs a set of arbitrary SPARQL CONSTRUCT queries on the
|
private static final boolean RETRACT = !ADD;
|
||||||
* data, for changes that cannot be expressed as simple property
|
|
||||||
* or class additions, deletions, or renamings.
|
|
||||||
* Blank nodes created by the queries are given random URIs.
|
|
||||||
* @param sparqlConstructDir
|
|
||||||
* @param aboxModel
|
|
||||||
*/
|
|
||||||
private Model performSparqlConstructs(String sparqlConstructDir,
|
|
||||||
OntModel readModel,
|
|
||||||
boolean add) throws IOException {
|
|
||||||
|
|
||||||
Model anonModel = ModelFactory.createDefaultModel();
|
/**
|
||||||
File sparqlConstructDirectory = new File(sparqlConstructDir);
|
* Performs a set of arbitrary SPARQL CONSTRUCT queries on the
|
||||||
|
* data, for changes that cannot be expressed as simple property
|
||||||
if (!sparqlConstructDirectory.isDirectory()) {
|
* or class additions, deletions, or renamings.
|
||||||
logger.logError(this.getClass().getName() +
|
* Blank nodes created by the queries are given random URIs.
|
||||||
"performSparqlConstructs() expected to find a directory " +
|
* @param sparqlConstructDir
|
||||||
" at " + sparqlConstructDir + ". Unable to execute " +
|
* @param readModel
|
||||||
" SPARQL CONSTRUCTS.");
|
* @param writeModel
|
||||||
return null;
|
* @param add (add = true; retract = false)
|
||||||
}
|
*/
|
||||||
|
private void performSparqlConstructs(String sparqlConstructDir,
|
||||||
File[] sparqlFiles = sparqlConstructDirectory.listFiles();
|
OntModel readModel, OntModel writeModel,
|
||||||
for (int i = 0; i < sparqlFiles.length; i ++) {
|
boolean add) throws IOException {
|
||||||
File sparqlFile = sparqlFiles[i];
|
File sparqlConstructDirectory = new File(sparqlConstructDir);
|
||||||
try {
|
log.info("Using SPARQL CONSTRUCT director " + sparqlConstructDirectory);
|
||||||
BufferedReader reader = new BufferedReader(new FileReader(sparqlFile));
|
if (!sparqlConstructDirectory.isDirectory()) {
|
||||||
StringBuffer fileContents = new StringBuffer();
|
String logMsg = this.getClass().getName() +
|
||||||
String ln;
|
"performSparqlConstructs() expected to find a directory " +
|
||||||
|
" at " + sparqlConstructDir + ". Unable to execute " +
|
||||||
while ( (ln = reader.readLine()) != null) {
|
" SPARQL CONSTRUCTS.";
|
||||||
fileContents.append(ln).append('\n');
|
logger.logError(logMsg);
|
||||||
}
|
log.error(logMsg);
|
||||||
|
return;
|
||||||
try {
|
}
|
||||||
log.debug("\t\tprocessing SPARQL construct query from file " + sparqlFiles[i].getName());
|
List<File> sparqlFiles = Arrays.asList(sparqlConstructDirectory.listFiles());
|
||||||
Query q = QueryFactory.create(fileContents.toString(), Syntax.syntaxARQ);
|
Collections.sort(sparqlFiles); // queries may depend on being run in a certain order
|
||||||
readModel.enterCriticalSection(Lock.READ);
|
JenaIngestUtils jiu = new JenaIngestUtils();
|
||||||
try {
|
for (File sparqlFile : sparqlFiles) {
|
||||||
QueryExecution qe = QueryExecutionFactory.create(q, readModel);
|
Model anonModel = ModelFactory.createDefaultModel();
|
||||||
long numBefore = anonModel.size();
|
StringBuffer fileContents = new StringBuffer();
|
||||||
qe.execConstruct(anonModel);
|
try {
|
||||||
long numAfter = anonModel.size();
|
BufferedReader reader = new BufferedReader(new FileReader(sparqlFile));
|
||||||
long num = numAfter - numBefore;
|
String ln;
|
||||||
|
while ( (ln = reader.readLine()) != null) {
|
||||||
if (num > 0) {
|
fileContents.append(ln).append('\n');
|
||||||
logger.log((add ? "Added " : "Removed ") + num +
|
}
|
||||||
" statement" + ((num > 1) ? "s" : "") +
|
} catch (FileNotFoundException fnfe) {
|
||||||
" using the SPARQL construct query from file " + sparqlFiles[i].getParentFile().getName() + "/" + sparqlFiles[i].getName());
|
String logMsg = "WARNING: performSparqlConstructs() could not find " +
|
||||||
|
" SPARQL CONSTRUCT file " + sparqlFile + ". Skipping.";
|
||||||
|
logger.log(logMsg);
|
||||||
|
log.info(logMsg);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
log.info("\t\tprocessing SPARQL construct query from file " + sparqlFile.getName());
|
||||||
|
Query q = QueryFactory.create(fileContents.toString(), Syntax.syntaxARQ);
|
||||||
|
readModel.enterCriticalSection(Lock.READ);
|
||||||
|
try {
|
||||||
|
QueryExecution qe = QueryExecutionFactory.create(q, readModel);
|
||||||
|
long numBefore = anonModel.size();
|
||||||
|
qe.execConstruct(anonModel);
|
||||||
|
long numAfter = anonModel.size();
|
||||||
|
long num = numAfter - numBefore;
|
||||||
|
if (num > 0) {
|
||||||
|
String logMsg = (add ? "Added " : "Removed ") + num +
|
||||||
|
" statement" + ((num > 1) ? "s" : "") +
|
||||||
|
" using the SPARQL construct query from file " +
|
||||||
|
sparqlFile.getParentFile().getName() +
|
||||||
|
"/" + sparqlFile.getName();
|
||||||
|
logger.log(logMsg);
|
||||||
|
log.info(logMsg);
|
||||||
|
}
|
||||||
|
qe.close();
|
||||||
|
} finally {
|
||||||
|
readModel.leaveCriticalSection();
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.logError(this.getClass().getName() +
|
||||||
|
".performSparqlConstructs() unable to execute " +
|
||||||
|
"query at " + sparqlFile + ". Error message is: " + e.getMessage());
|
||||||
|
log.error(e,e);
|
||||||
|
}
|
||||||
|
writeModel.enterCriticalSection(Lock.WRITE);
|
||||||
|
try {
|
||||||
|
if(!add) {
|
||||||
|
writeModel.remove(anonModel);
|
||||||
|
record.recordRetractions(anonModel);
|
||||||
|
//log.info("removed " + anonModel.size() + " statements from SPARQL CONSTRUCTs");
|
||||||
|
} else {
|
||||||
|
Model additions = jiu.renameBNodes(
|
||||||
|
anonModel, settings.getDefaultNamespace() + "n", writeModel);
|
||||||
|
Model actualAdditions = ModelFactory.createDefaultModel();
|
||||||
|
StmtIterator stmtIt = additions.listStatements();
|
||||||
|
while (stmtIt.hasNext()) {
|
||||||
|
Statement stmt = stmtIt.nextStatement();
|
||||||
|
if (!writeModel.contains(stmt)) {
|
||||||
|
actualAdditions.add(stmt);
|
||||||
}
|
}
|
||||||
qe.close();
|
}
|
||||||
} finally {
|
writeModel.add(actualAdditions);
|
||||||
readModel.leaveCriticalSection();
|
//log.info("added " + actualAdditions.size() + " statements from SPARQL CONSTRUCTs");
|
||||||
}
|
record.recordAdditions(actualAdditions);
|
||||||
} catch (Exception e) {
|
}
|
||||||
logger.logError(this.getClass().getName() +
|
} finally {
|
||||||
".performSparqlConstructs() unable to execute " +
|
writeModel.leaveCriticalSection();
|
||||||
"query at " + sparqlFile + ". Error message is: " + e.getMessage());
|
}
|
||||||
}
|
}
|
||||||
} catch (FileNotFoundException fnfe) {
|
}
|
||||||
logger.log("WARNING: performSparqlConstructs() could not find " +
|
|
||||||
" SPARQL CONSTRUCT file " + sparqlFile + ". Skipping.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return anonModel;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private List<AtomicOntologyChange> getAtomicOntologyChanges()
|
private List<AtomicOntologyChange> getAtomicOntologyChanges()
|
||||||
|
|
|
@ -158,6 +158,8 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
||||||
private void putNonReportingPathsIntoSettings(ServletContext ctx, UpdateSettings settings) {
|
private void putNonReportingPathsIntoSettings(ServletContext ctx, UpdateSettings settings) {
|
||||||
settings.setAskUpdatedQueryFile(ctx.getRealPath(ASK_QUERY_FILE));
|
settings.setAskUpdatedQueryFile(ctx.getRealPath(ASK_QUERY_FILE));
|
||||||
settings.setDiffFile(ctx.getRealPath(DIFF_FILE));
|
settings.setDiffFile(ctx.getRealPath(DIFF_FILE));
|
||||||
|
settings.setSparqlConstructAdditionsDir(ctx.getRealPath(DATA_DIR + "sparqlConstructs/additions"));
|
||||||
|
settings.setSparqlConstructDeletionsDir(ctx.getRealPath(DATA_DIR + "sparqlConstructs/deletions"));
|
||||||
settings.setSuccessAssertionsFile(ctx.getRealPath(SUCCESS_ASSERTIONS_FILE));
|
settings.setSuccessAssertionsFile(ctx.getRealPath(SUCCESS_ASSERTIONS_FILE));
|
||||||
settings.setSuccessRDFFormat("N3");
|
settings.setSuccessRDFFormat("N3");
|
||||||
}
|
}
|
||||||
|
@ -174,9 +176,6 @@ public class UpdateKnowledgeBase implements ServletContextListener {
|
||||||
settings.setDataDir(dataDir.toString());
|
settings.setDataDir(dataDir.toString());
|
||||||
StartupStatus.getBean(ctx).info(this, "Updating knowledge base: reports are in '" + dataDir + "'");
|
StartupStatus.getBean(ctx).info(this, "Updating knowledge base: reports are in '" + dataDir + "'");
|
||||||
|
|
||||||
settings.setSparqlConstructAdditionsDir(createDirectory(dataDir, "sparqlConstructs", "additions").toString());
|
|
||||||
settings.setSparqlConstructDeletionsDir(createDirectory(dataDir, "sparqlConstructs", "deletions").toString());
|
|
||||||
|
|
||||||
Path changedDir = createDirectory(dataDir, "changedData");
|
Path changedDir = createDirectory(dataDir, "changedData");
|
||||||
settings.setAddedDataFile(changedDir.resolve("addedData.n3").toString());
|
settings.setAddedDataFile(changedDir.resolve("addedData.n3").toString());
|
||||||
settings.setRemovedDataFile(changedDir.resolve("removedData.n3").toString());
|
settings.setRemovedDataFile(changedDir.resolve("removedData.n3").toString());
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue