NIHVIVO-3481 fixed 'create classgroups automatically' option

This commit is contained in:
brianjlowe 2011-12-12 16:48:50 +00:00
parent 83c4e54a34
commit 549d489bac
2 changed files with 557 additions and 517 deletions

View file

@ -34,6 +34,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils; import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils;
import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelContext;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSpecialModelMaker; import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaSpecialModelMaker;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.BulkUpdateEvent; import edu.cornell.mannlib.vitro.webapp.dao.jena.event.BulkUpdateEvent;
@ -48,149 +49,146 @@ public class RDFUploadController extends BaseEditController {
private static final String LOAD_RDF_DATA_JSP="/jenaIngest/loadRDFData.jsp"; private static final String LOAD_RDF_DATA_JSP="/jenaIngest/loadRDFData.jsp";
private static final String LIST_MODELS_JSP = "/jenaIngest/listModels.jsp"; private static final String LIST_MODELS_JSP = "/jenaIngest/listModels.jsp";
public void doPost(HttpServletRequest rawRequest, public void doPost(HttpServletRequest rawRequest,
HttpServletResponse response) throws ServletException, IOException { HttpServletResponse response) throws ServletException, IOException {
if (!isAuthorizedToDisplayPage(rawRequest, response, new Actions( if (!isAuthorizedToDisplayPage(rawRequest, response, new Actions(
new UseAdvancedDataToolsPages()))) { new UseAdvancedDataToolsPages()))) {
return; return;
} }
FileUploadServletRequest req = FileUploadServletRequest.parseRequest( FileUploadServletRequest req = FileUploadServletRequest.parseRequest(
rawRequest, maxFileSizeInBytes); rawRequest, maxFileSizeInBytes);
if (req.hasFileUploadException()) { if (req.hasFileUploadException()) {
forwardToFileUploadError( forwardToFileUploadError(
req.getFileUploadException().getLocalizedMessage(), req.getFileUploadException().getLocalizedMessage(),
req, response); req, response);
return; return;
} }
Map<String, List<FileItem>> fileStreams = req.getFiles(); Map<String, List<FileItem>> fileStreams = req.getFiles();
VitroRequest request = new VitroRequest(req); VitroRequest request = new VitroRequest(req);
LoginStatusBean loginBean = LoginStatusBean.getBean(request); LoginStatusBean loginBean = LoginStatusBean.getBean(request);
String modelName = req.getParameter("modelName"); String modelName = req.getParameter("modelName");
if(modelName!=null){ if(modelName!=null){
loadRDF(req,request,response); loadRDF(req,request,response);
return; return;
} }
boolean remove = "remove".equals(request.getParameter("mode")); boolean remove = "remove".equals(request.getParameter("mode"));
String verb = remove?"Removed":"Added"; String verb = remove?"Removed":"Added";
String languageStr = request.getParameter("language"); String languageStr = request.getParameter("language");
boolean makeClassgroups = boolean makeClassgroups = ("true".equals(request.getParameter(
(request.getParameter("makeClassgroups") != null); "makeClassgroups")));
// add directly to the ABox model without reading first into // add directly to the ABox model without reading first into
// a temporary in-memory model // a temporary in-memory model
boolean directRead = ("directAddABox".equals(request.getParameter( boolean directRead = ("directAddABox".equals(request.getParameter(
"mode"))); "mode")));
String uploadDesc =""; String uploadDesc ="";
OntModel uploadModel = (directRead) OntModel uploadModel = (directRead)
? getABoxModel(request.getSession(), getServletContext()) ? getABoxModel(request.getSession(), getServletContext())
: ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); : ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
/* ********************* GET RDF by URL ********************** */ /* ********************* GET RDF by URL ********************** */
String RDFUrlStr = request.getParameter("rdfUrl"); String RDFUrlStr = request.getParameter("rdfUrl");
if (RDFUrlStr != null && RDFUrlStr.length() > 0) { if (RDFUrlStr != null && RDFUrlStr.length() > 0) {
try { try {
uploadModel.enterCriticalSection(Lock.WRITE); uploadModel.enterCriticalSection(Lock.WRITE);
try { try {
uploadModel.read(RDFUrlStr, languageStr); uploadModel.read(RDFUrlStr, languageStr);
// languageStr may be null and default would be RDF/XML // languageStr may be null and default would be RDF/XML
} finally { } finally {
uploadModel.leaveCriticalSection(); uploadModel.leaveCriticalSection();
} }
uploadDesc = verb + " RDF from " + RDFUrlStr; uploadDesc = verb + " RDF from " + RDFUrlStr;
} catch (JenaException ex){ } catch (JenaException ex){
forwardToFileUploadError("Could not parse file to " + forwardToFileUploadError("Could not parse file to " +
languageStr + ": " + ex.getMessage(), req, response); languageStr + ": " + ex.getMessage(), req, response);
return; return;
}catch (Exception e) { }catch (Exception e) {
forwardToFileUploadError("Could not load from URL: " + forwardToFileUploadError("Could not load from URL: " +
e.getMessage(), req, response); e.getMessage(), req, response);
return; return;
} }
} else { } else {
/* **************** upload RDF from POST ********************* */ /* **************** upload RDF from POST ********************* */
if( fileStreams.get("rdfStream") != null if( fileStreams.get("rdfStream") != null
&& fileStreams.get("rdfStream").size() > 0 ) { && fileStreams.get("rdfStream").size() > 0 ) {
FileItem rdfStream = fileStreams.get("rdfStream").get(0); FileItem rdfStream = fileStreams.get("rdfStream").get(0);
try { try {
uploadModel.enterCriticalSection(Lock.WRITE); uploadModel.enterCriticalSection(Lock.WRITE);
try { try {
uploadModel.read( uploadModel.read(
rdfStream.getInputStream(), null, languageStr); rdfStream.getInputStream(), null, languageStr);
} finally { } finally {
uploadModel.leaveCriticalSection(); uploadModel.leaveCriticalSection();
} }
uploadDesc = verb + " RDF from file " + rdfStream.getName(); uploadDesc = verb + " RDF from file " + rdfStream.getName();
} catch (IOException e) { } catch (IOException e) {
forwardToFileUploadError("Could not read file: " + forwardToFileUploadError("Could not read file: " +
e.getLocalizedMessage(), req, response); e.getLocalizedMessage(), req, response);
return; return;
}catch (JenaException ex){ }catch (JenaException ex){
forwardToFileUploadError("Could not parse file to " + forwardToFileUploadError("Could not parse file to " +
languageStr + ": " + ex.getMessage(), languageStr + ": " + ex.getMessage(),
req, response); req, response);
return; return;
}catch (Exception e) { }catch (Exception e) {
forwardToFileUploadError("Could not load from file: " + forwardToFileUploadError("Could not load from file: " +
e.getMessage(), req, response); e.getMessage(), req, response);
return; return;
}finally{ }finally{
rdfStream.delete(); rdfStream.delete();
} }
} }
} }
/* ********** Do the model changes *********** */ /* ********** Do the model changes *********** */
if( !directRead && uploadModel != null ){ if( !directRead && uploadModel != null ){
long tboxstmtCount = 0L; long tboxstmtCount = 0L;
long aboxstmtCount = 0L; long aboxstmtCount = 0L;
JenaModelUtils xutil = new JenaModelUtils(); JenaModelUtils xutil = new JenaModelUtils();
OntModel tboxModel = getTBoxModel(
request.getSession(), getServletContext()); OntModel tboxModel = getTBoxModel(
OntModel aboxModel = getABoxModel( request.getSession(), getServletContext());
request.getSession(), getServletContext()); OntModel aboxModel = getABoxModel(
OntModel tboxChangeModel=null; request.getSession(), getServletContext());
Model aboxChangeModel=null; OntModel tboxChangeModel = null;
if (tboxModel != null) { Model aboxChangeModel = null;
boolean AGGRESSIVE = true; OntModelSelector ontModelSelector = ModelContext.getOntModelSelector(
tboxChangeModel = xutil.extractTBox(uploadModel, AGGRESSIVE); getServletContext());
// aggressively seek all statements that are part of the TBox
tboxstmtCount = operateOnModel( if (tboxModel != null) {
request.getFullWebappDaoFactory(), boolean AGGRESSIVE = true;
tboxModel,tboxChangeModel, tboxChangeModel = xutil.extractTBox(uploadModel, AGGRESSIVE);
remove, // aggressively seek all statements that are part of the TBox
makeClassgroups, tboxstmtCount = operateOnModel(request.getFullWebappDaoFactory(),
loginBean.getUserURI()); tboxModel, tboxChangeModel, ontModelSelector,
} remove, makeClassgroups, loginBean.getUserURI());
if (aboxModel != null) { }
aboxChangeModel = uploadModel.remove(tboxChangeModel); if (aboxModel != null) {
aboxstmtCount = operateOnModel( aboxChangeModel = uploadModel.remove(tboxChangeModel);
request.getFullWebappDaoFactory(), aboxstmtCount = operateOnModel(request.getFullWebappDaoFactory(),
aboxModel, aboxModel, aboxChangeModel, ontModelSelector,
aboxChangeModel, remove, makeClassgroups, loginBean.getUserURI());
remove, }
makeClassgroups, request.setAttribute("uploadDesc", uploadDesc + ". " + verb + " " +
loginBean.getUserURI());
}
request.setAttribute("uploadDesc", uploadDesc + ". " + verb + " " +
(tboxstmtCount + aboxstmtCount) + " statements."); (tboxstmtCount + aboxstmtCount) + " statements.");
} else { } else {
request.setAttribute("uploadDesc", "RDF upload successful."); request.setAttribute("uploadDesc", "RDF upload successful.");
} }
RequestDispatcher rd = request.getRequestDispatcher( RequestDispatcher rd = request.getRequestDispatcher(
Controllers.BASIC_JSP); Controllers.BASIC_JSP);
request.setAttribute( request.setAttribute(
"bodyJsp", "/templates/edit/specific/upload_rdf_result.jsp"); "bodyJsp", "/templates/edit/specific/upload_rdf_result.jsp");
request.setAttribute("title","Ingest RDF Data"); request.setAttribute("title","Ingest RDF Data");
try { try {
@ -200,31 +198,31 @@ public class RDFUploadController extends BaseEditController {
} }
} }
public void loadRDF(FileUploadServletRequest req, public void loadRDF(FileUploadServletRequest req,
VitroRequest request, VitroRequest request,
HttpServletResponse response) HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {
Map<String, List<FileItem>> fileStreams = req.getFiles(); Map<String, List<FileItem>> fileStreams = req.getFiles();
String filePath = fileStreams.get("filePath").get(0).getName(); String filePath = fileStreams.get("filePath").get(0).getName();
fileStream = fileStreams.get("filePath").get(0); fileStream = fileStreams.get("filePath").get(0);
String modelName = req.getParameter("modelName"); String modelName = req.getParameter("modelName");
String docLoc = req.getParameter("docLoc"); String docLoc = req.getParameter("docLoc");
String languageStr = request.getParameter("language"); String languageStr = request.getParameter("language");
ModelMaker maker = getVitroJenaModelMaker(request); ModelMaker maker = getVitroJenaModelMaker(request);
if (docLoc!=null && modelName != null) { if (docLoc!=null && modelName != null) {
doLoadRDFData(modelName,docLoc,filePath,languageStr,maker); doLoadRDFData(modelName,docLoc,filePath,languageStr,maker);
//request.setAttribute("title","Ingest Menu"); //request.setAttribute("title","Ingest Menu");
//request.setAttribute("bodyJsp",INGEST_MENU_JSP); //request.setAttribute("bodyJsp",INGEST_MENU_JSP);
request.setAttribute("title","Available Models"); request.setAttribute("title","Available Models");
request.setAttribute("bodyJsp",LIST_MODELS_JSP); request.setAttribute("bodyJsp",LIST_MODELS_JSP);
} else { } else {
request.setAttribute("title","Load RDF Data"); request.setAttribute("title","Load RDF Data");
request.setAttribute("bodyJsp",LOAD_RDF_DATA_JSP); request.setAttribute("bodyJsp",LOAD_RDF_DATA_JSP);
} }
RequestDispatcher rd = request.getRequestDispatcher( RequestDispatcher rd = request.getRequestDispatcher(
Controllers.BASIC_JSP); Controllers.BASIC_JSP);
try { try {
rd.forward(request, response); rd.forward(request, response);
@ -234,39 +232,53 @@ public class RDFUploadController extends BaseEditController {
throw new ServletException(errMsg, e); throw new ServletException(errMsg, e);
} }
} }
private long operateOnModel(WebappDaoFactory webappDaoFactory, private long operateOnModel(WebappDaoFactory webappDaoFactory,
OntModel mainModel, OntModel mainModel,
Model changesModel, Model changesModel,
boolean remove, OntModelSelector ontModelSelector,
boolean makeClassgroups, boolean remove,
String userURI) { boolean makeClassgroups,
String userURI) {
EditEvent startEvent = null, endEvent = null;
if (remove) {
startEvent = new BulkUpdateEvent(userURI, true);
endEvent = new BulkUpdateEvent(userURI, false);
} else {
startEvent = new EditEvent(userURI, true);
endEvent = new EditEvent(userURI, false);
}
Model[] classgroupModel = null;
if (makeClassgroups) {
classgroupModel = JenaModelUtils.makeClassGroupsFromRootClasses(
webappDaoFactory, changesModel);
OntModel appMetadataModel = ontModelSelector
.getApplicationMetadataModel();
appMetadataModel.enterCriticalSection(Lock.WRITE);
try {
appMetadataModel.add(classgroupModel[0]);
} finally {
appMetadataModel.leaveCriticalSection();
}
}
mainModel.enterCriticalSection(Lock.WRITE); mainModel.enterCriticalSection(Lock.WRITE);
try { try {
EditEvent startEvent = null, endEvent = null;
if (remove) {
startEvent = new BulkUpdateEvent(userURI, true);
endEvent = new BulkUpdateEvent(userURI, false);
} else {
startEvent = new EditEvent(userURI, true);
endEvent = new EditEvent(userURI, false);
}
mainModel.getBaseModel().notifyEvent(startEvent); mainModel.getBaseModel().notifyEvent(startEvent);
try { try {
if (makeClassgroups) {
Model classgroupModel =
JenaModelUtils.makeClassGroupsFromRootClasses(
webappDaoFactory, changesModel, changesModel);
mainModel.add(classgroupModel);
}
if (remove) { if (remove) {
mainModel.remove(changesModel); mainModel.remove(changesModel);
} else { } else {
mainModel.add(changesModel); mainModel.add(changesModel);
if (classgroupModel != null) {
mainModel.add(classgroupModel[1]);
}
} }
} finally { } finally {
mainModel.getBaseModel().notifyEvent(endEvent); mainModel.getBaseModel().notifyEvent(endEvent);
@ -279,45 +291,45 @@ public class RDFUploadController extends BaseEditController {
private void doLoadRDFData(String modelName, private void doLoadRDFData(String modelName,
String docLoc, String docLoc,
String filePath, String filePath,
String language, String language,
ModelMaker modelMaker) { ModelMaker modelMaker) {
Model m = modelMaker.getModel(modelName); Model m = modelMaker.getModel(modelName);
m.enterCriticalSection(Lock.WRITE); m.enterCriticalSection(Lock.WRITE);
try { try {
if ( (docLoc != null) && (docLoc.length()>0) ) { if ( (docLoc != null) && (docLoc.length()>0) ) {
m.read(docLoc, language); m.read(docLoc, language);
} else if ( (filePath != null) && (filePath.length()>0) ) { } else if ( (filePath != null) && (filePath.length()>0) ) {
File file = new File(filePath); File file = new File(filePath);
File[] files; File[] files;
if (file.isDirectory()) { if (file.isDirectory()) {
files = file.listFiles(); files = file.listFiles();
} else { } else {
files = new File[1]; files = new File[1];
files[0] = file; files[0] = file;
} }
for (int i=0; i<files.length; i++) { for (int i=0; i<files.length; i++) {
File currentFile = files[i]; File currentFile = files[i];
log.debug("Reading file " + currentFile.getName()); log.debug("Reading file " + currentFile.getName());
try { try {
m.read(fileStream.getInputStream(), null, language); m.read(fileStream.getInputStream(), null, language);
fileStream.delete(); fileStream.delete();
} catch (IOException ioe) { } catch (IOException ioe) {
String errMsg = "Error loading RDF from " + String errMsg = "Error loading RDF from " +
currentFile.getName(); currentFile.getName();
log.error(errMsg, ioe); log.error(errMsg, ioe);
throw new RuntimeException(errMsg, ioe); throw new RuntimeException(errMsg, ioe);
} }
} }
} }
} finally { } finally {
m.leaveCriticalSection(); m.leaveCriticalSection();
} }
} }
private void forwardToFileUploadError( String errrorMsg , private void forwardToFileUploadError( String errrorMsg ,
HttpServletRequest req, HttpServletRequest req,
HttpServletResponse response) HttpServletResponse response)
throws ServletException{ throws ServletException{
VitroRequest vreq = new VitroRequest(req); VitroRequest vreq = new VitroRequest(req);
req.setAttribute("title","RDF Upload Error "); req.setAttribute("title","RDF Upload Error ");
@ -332,21 +344,21 @@ public class RDFUploadController extends BaseEditController {
try { try {
rd.forward(req, response); rd.forward(req, response);
} catch (IOException e1) { } catch (IOException e1) {
log.error(e1); log.error(e1);
throw new ServletException(e1); throw new ServletException(e1);
} }
return; return;
} }
private ModelMaker getVitroJenaModelMaker(HttpServletRequest request) { private ModelMaker getVitroJenaModelMaker(HttpServletRequest request) {
ModelMaker myVjmm = (ModelMaker) request.getSession().getAttribute( ModelMaker myVjmm = (ModelMaker) request.getSession().getAttribute(
"vitroJenaModelMaker"); "vitroJenaModelMaker");
myVjmm = (myVjmm == null) myVjmm = (myVjmm == null)
? (ModelMaker) getServletContext().getAttribute( ? (ModelMaker) getServletContext().getAttribute(
"vitroJenaModelMaker") "vitroJenaModelMaker")
: myVjmm; : myVjmm;
return new VitroJenaSpecialModelMaker(myVjmm, request); return new VitroJenaSpecialModelMaker(myVjmm, request);
} }
private OntModel getABoxModel(HttpSession session, ServletContext ctx) { private OntModel getABoxModel(HttpSession session, ServletContext ctx) {
if (session != null if (session != null
@ -374,6 +386,6 @@ public class RDFUploadController extends BaseEditController {
} }
} }
private static final Log log = LogFactory.getLog( private static final Log log = LogFactory.getLog(
RDFUploadController.class.getName()); RDFUploadController.class.getName());
} }

View file

@ -11,7 +11,6 @@ import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.Individual; import com.hp.hpl.jena.ontology.Individual;
import com.hp.hpl.jena.ontology.OntClass; import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
@ -25,6 +24,7 @@ import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory; import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.shared.Lock; import com.hp.hpl.jena.shared.Lock;
@ -39,258 +39,286 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
public class JenaModelUtils { public class JenaModelUtils {
private static final Log log = LogFactory.getLog(JenaModelUtils.class.getName()); private static final Log log = LogFactory.getLog(JenaModelUtils.class.getName());
private static final Set<String> nonIndividualTypeURIs ; private static final Set<String> nonIndividualTypeURIs ;
static { static {
nonIndividualTypeURIs = new HashSet<String>(); nonIndividualTypeURIs = new HashSet<String>();
nonIndividualTypeURIs.add(OWL.Class.getURI()); nonIndividualTypeURIs.add(OWL.Class.getURI());
nonIndividualTypeURIs.add(OWL.Restriction.getURI()); nonIndividualTypeURIs.add(OWL.Restriction.getURI());
nonIndividualTypeURIs.add(OWL.ObjectProperty.getURI()); nonIndividualTypeURIs.add(OWL.ObjectProperty.getURI());
nonIndividualTypeURIs.add(OWL.DatatypeProperty.getURI()); nonIndividualTypeURIs.add(OWL.DatatypeProperty.getURI());
nonIndividualTypeURIs.add(OWL.AnnotationProperty.getURI()); nonIndividualTypeURIs.add(OWL.AnnotationProperty.getURI());
nonIndividualTypeURIs.add(OWL.Ontology.getURI()); nonIndividualTypeURIs.add(OWL.Ontology.getURI());
nonIndividualTypeURIs.add(RDFS.Class.getURI()); nonIndividualTypeURIs.add(RDFS.Class.getURI());
nonIndividualTypeURIs.add(RDF.Property.getURI()); nonIndividualTypeURIs.add(RDF.Property.getURI());
} }
public synchronized static void makeClassGroupsFromRootClasses(WebappDaoFactory wadf, Model ontModel) { /**
makeClassGroupsFromRootClasses(wadf, ontModel, ontModel); * Creates a set of vitro:ClassGroup resources for each root class in
} * an ontology. Also creates annotations to place each root class and all
* of its children in the appropriate groups. In the case of multiple
* inheritance, classgroup assignment will be arbitrary.
* @param wadf
* @param tboxModel containing ontology classes
* @return resultArray of OntModels, where resultArray[0] is the model containing
* the triples about the classgroups, and resultArray[1] is the model containing
* annotation triples assigning ontology classes to classgroups.
*/
public synchronized static OntModel[] makeClassGroupsFromRootClasses(
WebappDaoFactory wadf, Model tboxModel) {
public synchronized static OntModel makeClassGroupsFromRootClasses(WebappDaoFactory wadf, Model baseModel, Model vitroInternalsSubmodel) { OntModel ontModel = ModelFactory.createOntologyModel(
OntModel ontModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM,baseModel); OntModelSpec.OWL_DL_MEM, tboxModel);
OntModel modelForClassgroups = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM); OntModel modelForClassgroups = ModelFactory.createOntologyModel(
SimpleOntModelSelector oms = new SimpleOntModelSelector(); OntModelSpec.OWL_DL_MEM);
oms.setTBoxModel(ontModel); OntModel modelForClassgroupAnnotations = ModelFactory.createOntologyModel(
oms.setApplicationMetadataModel(modelForClassgroups); OntModelSpec.OWL_DL_MEM);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig(); SimpleOntModelSelector oms = new SimpleOntModelSelector();
config.setDefaultNamespace(wadf.getDefaultNamespace()); oms.setTBoxModel(ontModel);
WebappDaoFactory myWebappDaoFactory = new WebappDaoFactoryJena( oms.setApplicationMetadataModel(modelForClassgroups);
new SimpleOntModelSelector(ontModel), config, null); WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
OntModel tempModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM); config.setDefaultNamespace(wadf.getDefaultNamespace());
Resource classGroupClass = ResourceFactory.createResource(VitroVocabulary.CLASSGROUP); WebappDaoFactory myWebappDaoFactory = new WebappDaoFactoryJena(
Property inClassGroupProperty = ResourceFactory.createProperty(VitroVocabulary.IN_CLASSGROUP); new SimpleOntModelSelector(ontModel), config, null);
ontModel.enterCriticalSection(Lock.READ);
try {
try {
for (Iterator rootClassIt = myWebappDaoFactory.getVClassDao().getRootClasses().iterator(); rootClassIt.hasNext(); ) {
VClass rootClass = (VClass) rootClassIt.next();
Individual classGroup = tempModel.createIndividual(wadf.getDefaultNamespace()+"vitroClassGroup"+rootClass.getLocalName(), classGroupClass);
classGroup.addProperty(tempModel.getProperty(VitroVocabulary.DISPLAY_RANK_ANNOT),"50",XSDDatatype.XSDint);
classGroup.setLabel(rootClass.getName(),null);
OntClass rootClassOntClass = ontModel.getOntClass(rootClass.getURI());
tempModel.add(rootClassOntClass, inClassGroupProperty, classGroup);
for (Iterator childIt = myWebappDaoFactory.getVClassDao().getAllSubClassURIs(rootClass.getURI()).iterator(); childIt.hasNext(); ) {
String childURI = (String) childIt.next();
OntClass childClass = ontModel.getOntClass(childURI);
childClass.addProperty(inClassGroupProperty, classGroup);
}
}
} catch (Exception e) {
log.error("Unable to create class groups automatically based on class hierarchy");
}
vitroInternalsSubmodel.enterCriticalSection(Lock.WRITE);
try {
vitroInternalsSubmodel.add(tempModel);
} finally {
vitroInternalsSubmodel.leaveCriticalSection();
}
} finally {
ontModel.leaveCriticalSection();
}
return modelForClassgroups;
}
private final OntModelSpec DEFAULT_ONT_MODEL_SPEC = OntModelSpec.OWL_MEM; Resource classGroupClass = ResourceFactory.createResource(
private final boolean NORMAL = false; VitroVocabulary.CLASSGROUP);
private final boolean AGGRESSIVE = true; Property inClassGroupProperty = ResourceFactory.createProperty(
VitroVocabulary.IN_CLASSGROUP);
ontModel.enterCriticalSection(Lock.READ);
try {
try {
for (Iterator rootClassIt = myWebappDaoFactory.getVClassDao()
.getRootClasses().iterator(); rootClassIt.hasNext(); ) {
VClass rootClass = (VClass) rootClassIt.next();
Individual classGroup = modelForClassgroups.createIndividual(
wadf.getDefaultNamespace() + "vitroClassGroup" +
rootClass.getLocalName(), classGroupClass);
classGroup.setLabel(rootClass.getName(), null);
Resource rootClassRes = modelForClassgroupAnnotations.getResource(
rootClass.getURI());
modelForClassgroupAnnotations.add(
rootClassRes, inClassGroupProperty, classGroup);
for (Iterator<String> childIt = myWebappDaoFactory.getVClassDao()
.getAllSubClassURIs(rootClass.getURI()).iterator();
childIt.hasNext(); ) {
String childURI = (String) childIt.next();
Resource childClass = modelForClassgroupAnnotations
.getResource(childURI);
if (!modelForClassgroupAnnotations.contains(
childClass, inClassGroupProperty, (RDFNode) null)) {
childClass.addProperty(inClassGroupProperty, classGroup);
}
}
}
} catch (Exception e) {
String errMsg = "Unable to create class groups automatically " +
"based on class hierarchy";
log.error(errMsg, e);
throw new RuntimeException(errMsg, e);
}
} finally {
ontModel.leaveCriticalSection();
}
OntModel[] resultArray = new OntModel[2];
resultArray[0] = modelForClassgroups;
resultArray[1] = modelForClassgroupAnnotations;
return resultArray;
}
private final OntModelSpec DEFAULT_ONT_MODEL_SPEC = OntModelSpec.OWL_MEM;
private final boolean NORMAL = false;
private final boolean AGGRESSIVE = true;
public OntModel extractTBox( Model inputModel ) { public OntModel extractTBox( Model inputModel ) {
return extractTBox(inputModel, null); return extractTBox(inputModel, null);
} }
public OntModel extractTBox( Model inputModel, boolean MODE ) { public OntModel extractTBox( Model inputModel, boolean MODE ) {
Dataset dataset = DatasetFactory.create(inputModel); Dataset dataset = DatasetFactory.create(inputModel);
return extractTBox(dataset, null, null, MODE); return extractTBox(dataset, null, null, MODE);
} }
public OntModel extractTBox( Model inputModel, String namespace ) { public OntModel extractTBox( Model inputModel, String namespace ) {
Dataset dataset = DatasetFactory.create(inputModel); Dataset dataset = DatasetFactory.create(inputModel);
return extractTBox( dataset, namespace, null, NORMAL ); return extractTBox( dataset, namespace, null, NORMAL );
} }
public OntModel extractTBox( Dataset dataset, String namespace, String graphURI) { public OntModel extractTBox( Dataset dataset, String namespace, String graphURI) {
return extractTBox( dataset, namespace, graphURI, NORMAL); return extractTBox( dataset, namespace, graphURI, NORMAL);
} }
public OntModel extractTBox( Dataset dataset, String namespace, String graphURI, boolean mode ) { public OntModel extractTBox( Dataset dataset, String namespace, String graphURI, boolean mode ) {
OntModel tboxModel = ModelFactory.createOntologyModel(DEFAULT_ONT_MODEL_SPEC); OntModel tboxModel = ModelFactory.createOntologyModel(DEFAULT_ONT_MODEL_SPEC);
List<String> queryStrList = new LinkedList<String>(); List<String> queryStrList = new LinkedList<String>();
// Use SPARQL DESCRIBE queries to extract the RDF for named ontology entities // Use SPARQL DESCRIBE queries to extract the RDF for named ontology entities
queryStrList.add( makeDescribeQueryStr( OWL.Class.getURI(), namespace, graphURI ) ); queryStrList.add( makeDescribeQueryStr( OWL.Class.getURI(), namespace, graphURI ) );
queryStrList.add( makeDescribeQueryStr( OWL.Restriction.getURI(), namespace, graphURI ) ); queryStrList.add( makeDescribeQueryStr( OWL.Restriction.getURI(), namespace, graphURI ) );
queryStrList.add( makeDescribeQueryStr( OWL.ObjectProperty.getURI(), namespace, graphURI ) ); queryStrList.add( makeDescribeQueryStr( OWL.ObjectProperty.getURI(), namespace, graphURI ) );
queryStrList.add( makeDescribeQueryStr( OWL.DatatypeProperty.getURI(), namespace, graphURI ) ); queryStrList.add( makeDescribeQueryStr( OWL.DatatypeProperty.getURI(), namespace, graphURI ) );
queryStrList.add( makeDescribeQueryStr( OWL.AnnotationProperty.getURI(), namespace, graphURI ) ); queryStrList.add( makeDescribeQueryStr( OWL.AnnotationProperty.getURI(), namespace, graphURI ) );
// if we're using to a hash namespace, the URI of the Ontology resource will be // if we're using to a hash namespace, the URI of the Ontology resource will be
// that namespace minus the final hash mark. // that namespace minus the final hash mark.
if ( namespace != null && namespace.endsWith("#") ) { if ( namespace != null && namespace.endsWith("#") ) {
queryStrList.add( makeDescribeQueryStr( OWL.Ontology.getURI(), namespace.substring(0,namespace.length()-1), graphURI ) ); queryStrList.add( makeDescribeQueryStr( OWL.Ontology.getURI(), namespace.substring(0,namespace.length()-1), graphURI ) );
} else { } else {
queryStrList.add( makeDescribeQueryStr( OWL.Ontology.getURI(), namespace, graphURI ) ); queryStrList.add( makeDescribeQueryStr( OWL.Ontology.getURI(), namespace, graphURI ) );
} }
// Perform the SPARQL DESCRIBEs // Perform the SPARQL DESCRIBEs
for ( String queryStr : queryStrList ) { for ( String queryStr : queryStrList ) {
Query tboxSparqlQuery = QueryFactory.create(queryStr); Query tboxSparqlQuery = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.create(tboxSparqlQuery,dataset); QueryExecution qe = QueryExecutionFactory.create(tboxSparqlQuery,dataset);
try { try {
dataset.getLock().enterCriticalSection(Lock.READ); dataset.getLock().enterCriticalSection(Lock.READ);
qe.execDescribe(tboxModel); qe.execDescribe(tboxModel);
} finally { } finally {
dataset.getLock().leaveCriticalSection(); dataset.getLock().leaveCriticalSection();
} }
} }
// Perform possibly-redundant extraction to try ensure we don't miss // Perform possibly-redundant extraction to try ensure we don't miss
// individual axioms floating around. We still might miss things; // individual axioms floating around. We still might miss things;
// this approach isn't perfect. // this approach isn't perfect.
if (mode = AGGRESSIVE) { if (mode = AGGRESSIVE) {
tboxModel.add(construct(dataset, namespace, graphURI, RDFS.subClassOf)); tboxModel.add(construct(dataset, namespace, graphURI, RDFS.subClassOf));
tboxModel.add(construct(dataset, namespace, graphURI, RDFS.subPropertyOf)); tboxModel.add(construct(dataset, namespace, graphURI, RDFS.subPropertyOf));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.equivalentClass)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.equivalentClass));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.unionOf)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.unionOf));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.intersectionOf)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.intersectionOf));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.complementOf)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.complementOf));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.onProperty)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.onProperty));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.allValuesFrom)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.allValuesFrom));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.someValuesFrom)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.someValuesFrom));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.hasValue)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.hasValue));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.minCardinality)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.minCardinality));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.maxCardinality)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.maxCardinality));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.cardinality)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.cardinality));
tboxModel.add(construct(dataset, namespace, graphURI, OWL.disjointWith)); tboxModel.add(construct(dataset, namespace, graphURI, OWL.disjointWith));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.DISPLAY_LIMIT))); VitroVocabulary.DISPLAY_LIMIT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.DISPLAY_RANK_ANNOT))); VitroVocabulary.DISPLAY_RANK_ANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.IN_CLASSGROUP))); VitroVocabulary.IN_CLASSGROUP)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.IN_CLASSGROUP))); VitroVocabulary.IN_CLASSGROUP)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_INPROPERTYGROUPANNOT))); VitroVocabulary.PROPERTY_INPROPERTYGROUPANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROHIBITED_FROM_UPDATE_BELOW_ROLE_LEVEL_ANNOT))); VitroVocabulary.PROHIBITED_FROM_UPDATE_BELOW_ROLE_LEVEL_ANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.HIDDEN_FROM_DISPLAY_BELOW_ROLE_LEVEL_ANNOT))); VitroVocabulary.HIDDEN_FROM_DISPLAY_BELOW_ROLE_LEVEL_ANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.DESCRIPTION_ANNOT))); VitroVocabulary.DESCRIPTION_ANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.SHORTDEF))); VitroVocabulary.SHORTDEF)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.EXAMPLE_ANNOT))); VitroVocabulary.EXAMPLE_ANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.vitroURI + "extendedLinkedData"))); VitroVocabulary.vitroURI + "extendedLinkedData")));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_OFFERCREATENEWOPTIONANNOT))); VitroVocabulary.PROPERTY_OFFERCREATENEWOPTIONANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_COLLATEBYSUBCLASSANNOT))); VitroVocabulary.PROPERTY_COLLATEBYSUBCLASSANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_CUSTOM_LIST_VIEW_ANNOT))); VitroVocabulary.PROPERTY_CUSTOM_LIST_VIEW_ANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_CUSTOMDISPLAYVIEWANNOT))); VitroVocabulary.PROPERTY_CUSTOMDISPLAYVIEWANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_CUSTOMENTRYFORMANNOT))); VitroVocabulary.PROPERTY_CUSTOMENTRYFORMANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_CUSTOMSEARCHVIEWANNOT))); VitroVocabulary.PROPERTY_CUSTOMSEARCHVIEWANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_CUSTOMSHORTVIEWANNOT))); VitroVocabulary.PROPERTY_CUSTOMSHORTVIEWANNOT)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_ENTITYSORTDIRECTION))); VitroVocabulary.PROPERTY_ENTITYSORTDIRECTION)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_ENTITYSORTFIELD))); VitroVocabulary.PROPERTY_ENTITYSORTFIELD)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_OBJECTINDIVIDUALSORTPROPERTY))); VitroVocabulary.PROPERTY_OBJECTINDIVIDUALSORTPROPERTY)));
tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource( tboxModel.add(construct(dataset, namespace, graphURI, ResourceFactory.createResource(
VitroVocabulary.PROPERTY_SELECTFROMEXISTINGANNOT))); VitroVocabulary.PROPERTY_SELECTFROMEXISTINGANNOT)));
} }
return tboxModel; return tboxModel;
} }
private Model construct(Dataset dataset, private Model construct(Dataset dataset,
String namespace, String namespace,
String graphURI, String graphURI,
Resource property) { Resource property) {
dataset.getLock().enterCriticalSection(Lock.READ); dataset.getLock().enterCriticalSection(Lock.READ);
try { try {
StringBuffer buff = new StringBuffer(); StringBuffer buff = new StringBuffer();
buff.append("PREFIX afn: <http://jena.hpl.hp.com/ARQ/function#> \n") buff.append("PREFIX afn: <http://jena.hpl.hp.com/ARQ/function#> \n")
.append("CONSTRUCT { \n") .append("CONSTRUCT { \n")
.append(" ?res <" + property.getURI() + "> ?o } WHERE { \n"); .append(" ?res <" + property.getURI() + "> ?o } WHERE { \n");
if (graphURI != null) { if (graphURI != null) {
buff.append(" GRAPH " + graphURI + " { \n"); buff.append(" GRAPH " + graphURI + " { \n");
} }
buff.append(" ?res <" + property.getURI() + "> ?o \n"); buff.append(" ?res <" + property.getURI() + "> ?o \n");
buff.append(getNamespaceFilter(namespace)); buff.append(getNamespaceFilter(namespace));
if (graphURI != null) { if (graphURI != null) {
buff.append(" } \n"); buff.append(" } \n");
} }
buff.append("}"); buff.append("}");
Query constructProp = QueryFactory.create(buff.toString()); Query constructProp = QueryFactory.create(buff.toString());
QueryExecution qe = QueryExecutionFactory.create(constructProp, dataset); QueryExecution qe = QueryExecutionFactory.create(constructProp, dataset);
try { try {
return qe.execConstruct(); return qe.execConstruct();
} finally { } finally {
qe.close(); qe.close();
} }
} finally { } finally {
dataset.getLock().leaveCriticalSection(); dataset.getLock().leaveCriticalSection();
} }
} }
private String makeDescribeQueryStr( String typeURI, String namespace ) { private String makeDescribeQueryStr( String typeURI, String namespace ) {
return makeDescribeQueryStr( typeURI, namespace, null ); return makeDescribeQueryStr( typeURI, namespace, null );
} }
private String makeDescribeQueryStr( String typeURI, String namespace, String graphURI ) { private String makeDescribeQueryStr( String typeURI, String namespace, String graphURI ) {
StringBuffer describeQueryStrBuff = new StringBuffer() StringBuffer describeQueryStrBuff = new StringBuffer()
.append("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \n") .append("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \n")
.append("PREFIX afn: <http://jena.hpl.hp.com/ARQ/function#> \n") .append("PREFIX afn: <http://jena.hpl.hp.com/ARQ/function#> \n")
.append("DESCRIBE ?res WHERE { \n"); .append("DESCRIBE ?res WHERE { \n");
if (graphURI != null) { if (graphURI != null) {
describeQueryStrBuff describeQueryStrBuff
.append("GRAPH " + graphURI + "{ \n"); .append("GRAPH " + graphURI + "{ \n");
} }
describeQueryStrBuff describeQueryStrBuff
.append(" ?res rdf:type <").append(typeURI).append("> . \n"); .append(" ?res rdf:type <").append(typeURI).append("> . \n");
describeQueryStrBuff describeQueryStrBuff
.append(" FILTER (!isBlank(?res)) \n") .append(" FILTER (!isBlank(?res)) \n")
.append(getNamespaceFilter(namespace)); .append(getNamespaceFilter(namespace));
if (graphURI != null) { if (graphURI != null) {
describeQueryStrBuff describeQueryStrBuff
.append("} \n"); .append("} \n");
} }
describeQueryStrBuff.append("} \n"); describeQueryStrBuff.append("} \n");
return describeQueryStrBuff.toString(); return describeQueryStrBuff.toString();
} }
private String getNamespaceFilter(String namespace) { private String getNamespaceFilter(String namespace) {
StringBuffer buff = new StringBuffer(); StringBuffer buff = new StringBuffer();
if (namespace == null) { if (namespace == null) {
// exclude resources in the Vitro internal namespace or in the // exclude resources in the Vitro internal namespace or in the
// OWL namespace, but allow all others // OWL namespace, but allow all others
buff buff
@ -307,73 +335,73 @@ public class JenaModelUtils {
.append(namespace) .append(namespace)
.append("\")) \n"); .append("\")) \n");
} }
return buff.toString(); return buff.toString();
} }
public Model extractABox(Model inputModel){ public Model extractABox(Model inputModel){
Dataset dataset = DatasetFactory.create(inputModel); Dataset dataset = DatasetFactory.create(inputModel);
return extractABox(dataset, null, null); return extractABox(dataset, null, null);
} }
public Model extractABox( Dataset unionDataset, Dataset baseOrInfDataset, String graphURI ) { public Model extractABox( Dataset unionDataset, Dataset baseOrInfDataset, String graphURI ) {
Model aboxModel = ModelFactory.createDefaultModel(); Model aboxModel = ModelFactory.createDefaultModel();
// iterate through all classes and DESCRIBE each of their instances // iterate through all classes and DESCRIBE each of their instances
// Note that this could be simplified if we knew that the model was a // Note that this could be simplified if we knew that the model was a
// reasoning model: we could then simply describe all instances of // reasoning model: we could then simply describe all instances of
// owl:Thing. // owl:Thing.
//OntModel ontModel = ( inputModel instanceof OntModel ) //OntModel ontModel = ( inputModel instanceof OntModel )
//? (OntModel)inputModel //? (OntModel)inputModel
//: ModelFactory.createOntologyModel( DEFAULT_ONT_MODEL_SPEC, inputModel ); //: ModelFactory.createOntologyModel( DEFAULT_ONT_MODEL_SPEC, inputModel );
OntModel ontModel = extractTBox(unionDataset, null, graphURI); OntModel ontModel = extractTBox(unionDataset, null, graphURI);
try { try {
ontModel.enterCriticalSection(Lock.READ); ontModel.enterCriticalSection(Lock.READ);
Iterator classIt = ontModel.listNamedClasses(); Iterator classIt = ontModel.listNamedClasses();
QueryExecution qe = null; QueryExecution qe = null;
while ( classIt.hasNext() ) { while ( classIt.hasNext() ) {
OntClass ontClass = (OntClass) classIt.next(); OntClass ontClass = (OntClass) classIt.next();
//if ( !(ontClass.getNameSpace().startsWith(OWL.getURI()) ) //if ( !(ontClass.getNameSpace().startsWith(OWL.getURI()) )
// && !(ontClass.getNameSpace().startsWith(VitroVocabulary.vitroURI)) ) { // && !(ontClass.getNameSpace().startsWith(VitroVocabulary.vitroURI)) ) {
if(!(ontClass.getNameSpace().startsWith(OWL.getURI()))){ if(!(ontClass.getNameSpace().startsWith(OWL.getURI()))){
String queryStr = makeDescribeQueryStr( ontClass.getURI(), null, graphURI ); String queryStr = makeDescribeQueryStr( ontClass.getURI(), null, graphURI );
Query aboxSparqlQuery = QueryFactory.create(queryStr); Query aboxSparqlQuery = QueryFactory.create(queryStr);
if(baseOrInfDataset != null){ if(baseOrInfDataset != null){
qe = QueryExecutionFactory.create(aboxSparqlQuery,baseOrInfDataset); qe = QueryExecutionFactory.create(aboxSparqlQuery,baseOrInfDataset);
} }
else{ else{
qe = QueryExecutionFactory.create(aboxSparqlQuery,unionDataset); qe = QueryExecutionFactory.create(aboxSparqlQuery,unionDataset);
} }
if(baseOrInfDataset != null){ if(baseOrInfDataset != null){
try { try {
baseOrInfDataset.getLock().enterCriticalSection(Lock.READ); baseOrInfDataset.getLock().enterCriticalSection(Lock.READ);
qe.execDescribe(aboxModel); // puts the statements about each resource into aboxModel. qe.execDescribe(aboxModel); // puts the statements about each resource into aboxModel.
} finally { } finally {
baseOrInfDataset.getLock().leaveCriticalSection(); baseOrInfDataset.getLock().leaveCriticalSection();
} }
} }
else{ else{
try { try {
unionDataset.getLock().enterCriticalSection(Lock.READ); unionDataset.getLock().enterCriticalSection(Lock.READ);
qe.execDescribe(aboxModel); // puts the statements about each resource into aboxModel. qe.execDescribe(aboxModel); // puts the statements about each resource into aboxModel.
} finally { } finally {
unionDataset.getLock().leaveCriticalSection(); unionDataset.getLock().leaveCriticalSection();
} }
} }
} }
} }
} finally { } finally {
ontModel.leaveCriticalSection(); ontModel.leaveCriticalSection();
} }
return aboxModel; return aboxModel;
} }
} }