diff --git a/webapp/config/log4j.properties b/webapp/config/log4j.properties index fbb00c755..13c54f2c9 100644 --- a/webapp/config/log4j.properties +++ b/webapp/config/log4j.properties @@ -36,7 +36,6 @@ log4j.rootLogger=INFO, AllAppender # These classes are too chatty to display INFO messages. log4j.logger.edu.cornell.mannlib.vitro.webapp.startup.StartupStatus=WARN log4j.logger.edu.cornell.mannlib.vitro.webapp.dao.jena.pellet.PelletListener=WARN -log4j.logger.edu.cornell.mannlib.vitro.webapp.dao.jena.RDBGraphGenerator=WARN log4j.logger.edu.cornell.mannlib.vitro.webapp.servlet.setup.UpdateKnowledgeBase=DEBUG # Spring as a whole is too chatty to display INFO messages. @@ -44,7 +43,6 @@ log4j.logger.org.springframework=WARN # suppress odd warnings from libraries log4j.logger.com.hp.hpl.jena.sdb.layout2.LoaderTuplesNodes=FATAL -log4j.logger.com.hp.hpl.jena.db.impl.PSet_TripleStore_RDB=FATAL log4j.logger.com.hp.hpl.jena.sdb.sql.SDBConnection=ERROR log4j.logger.org.openjena.riot=FATAL log4j.logger.org.directwebremoting=FATAL diff --git a/webapp/lib/arq-2.8.7.jar b/webapp/lib/arq-2.8.7.jar deleted file mode 100644 index e6fdf2aba..000000000 Binary files a/webapp/lib/arq-2.8.7.jar and /dev/null differ diff --git a/webapp/lib/commons-codec-1.3.jar b/webapp/lib/commons-codec-1.3.jar deleted file mode 100644 index 957b6752a..000000000 Binary files a/webapp/lib/commons-codec-1.3.jar and /dev/null differ diff --git a/webapp/lib/commons-codec-1.4.jar b/webapp/lib/commons-codec-1.4.jar deleted file mode 100644 index 458d432da..000000000 Binary files a/webapp/lib/commons-codec-1.4.jar and /dev/null differ diff --git a/webapp/lib/commons-codec-1.6.jar b/webapp/lib/commons-codec-1.6.jar new file mode 100644 index 000000000..ee1bc49ac Binary files /dev/null and b/webapp/lib/commons-codec-1.6.jar differ diff --git a/webapp/lib/jena-2.6.4.jar b/webapp/lib/jena-2.6.4.jar deleted file mode 100644 index efc64a94f..000000000 Binary files a/webapp/lib/jena-2.6.4.jar and /dev/null differ diff --git a/webapp/lib/jena-arq-2.10.1.jar b/webapp/lib/jena-arq-2.10.1.jar new file mode 100644 index 000000000..b8e9547f3 Binary files /dev/null and b/webapp/lib/jena-arq-2.10.1.jar differ diff --git a/webapp/lib/jena-core-2.10.1.jar b/webapp/lib/jena-core-2.10.1.jar new file mode 100644 index 000000000..d2a389c96 Binary files /dev/null and b/webapp/lib/jena-core-2.10.1.jar differ diff --git a/webapp/lib/jena-iri-0.9.6.jar b/webapp/lib/jena-iri-0.9.6.jar new file mode 100644 index 000000000..e4ee0dc2a Binary files /dev/null and b/webapp/lib/jena-iri-0.9.6.jar differ diff --git a/webapp/lib/jena-sdb-1.3.6.jar b/webapp/lib/jena-sdb-1.3.6.jar new file mode 100644 index 000000000..43ee9ffcc Binary files /dev/null and b/webapp/lib/jena-sdb-1.3.6.jar differ diff --git a/webapp/lib/jena-tdb-0.10.0.jar b/webapp/lib/jena-tdb-0.10.0.jar new file mode 100644 index 000000000..ad7101a20 Binary files /dev/null and b/webapp/lib/jena-tdb-0.10.0.jar differ diff --git a/webapp/lib/junit-4.8.1.jar b/webapp/lib/junit-4.8.1.jar deleted file mode 100644 index 524cd65ce..000000000 Binary files a/webapp/lib/junit-4.8.1.jar and /dev/null differ diff --git a/webapp/lib/junit-4.9.jar b/webapp/lib/junit-4.9.jar new file mode 100644 index 000000000..142081561 Binary files /dev/null and b/webapp/lib/junit-4.9.jar differ diff --git a/webapp/lib/pellet-core.jar b/webapp/lib/pellet-core.jar index d383d5e61..ebaf120bb 100644 Binary files a/webapp/lib/pellet-core.jar and b/webapp/lib/pellet-core.jar differ diff --git a/webapp/lib/pellet-datatypes.jar b/webapp/lib/pellet-datatypes.jar index 320f9ccea..2e1722bb2 100644 Binary files a/webapp/lib/pellet-datatypes.jar and b/webapp/lib/pellet-datatypes.jar differ diff --git a/webapp/lib/pellet-el.jar b/webapp/lib/pellet-el.jar index 49a9553e0..90f0d01c3 100644 Binary files a/webapp/lib/pellet-el.jar and b/webapp/lib/pellet-el.jar differ diff --git a/webapp/lib/pellet-jena.jar b/webapp/lib/pellet-jena.jar index e655efb33..e3b7458ce 100644 Binary files a/webapp/lib/pellet-jena.jar and b/webapp/lib/pellet-jena.jar differ diff --git a/webapp/lib/pellet-rules.jar b/webapp/lib/pellet-rules.jar index 821a4ecd5..b7aa4d850 100644 Binary files a/webapp/lib/pellet-rules.jar and b/webapp/lib/pellet-rules.jar differ diff --git a/webapp/lib/sdb-1.3.4.jar b/webapp/lib/sdb-1.3.4.jar deleted file mode 100644 index e05df4095..000000000 Binary files a/webapp/lib/sdb-1.3.4.jar and /dev/null differ diff --git a/webapp/lib/tdb-0.8.7.jar b/webapp/lib/tdb-0.8.7.jar deleted file mode 100644 index 5b7965d65..000000000 Binary files a/webapp/lib/tdb-0.8.7.jar and /dev/null differ diff --git a/webapp/lib/xercesImpl-2.11.0.jar b/webapp/lib/xercesImpl-2.11.0.jar new file mode 100644 index 000000000..0aaa990f3 Binary files /dev/null and b/webapp/lib/xercesImpl-2.11.0.jar differ diff --git a/webapp/lib/xercesImpl.jar b/webapp/lib/xercesImpl.jar deleted file mode 100644 index eac75ae8e..000000000 Binary files a/webapp/lib/xercesImpl.jar and /dev/null differ diff --git a/webapp/lib/xml-apis-1.4.01.jar b/webapp/lib/xml-apis-1.4.01.jar new file mode 100644 index 000000000..46733464f Binary files /dev/null and b/webapp/lib/xml-apis-1.4.01.jar differ diff --git a/webapp/lib/xml-apis.jar b/webapp/lib/xml-apis.jar deleted file mode 100644 index 243eaeaeb..000000000 Binary files a/webapp/lib/xml-apis.jar and /dev/null differ diff --git a/webapp/lib/xsdlib.jar b/webapp/lib/xsdlib.jar deleted file mode 100644 index 0749a25ca..000000000 Binary files a/webapp/lib/xsdlib.jar and /dev/null differ diff --git a/webapp/src/com/hp/hpl/jena/reasoner/BaseInfGraph.java b/webapp/src/com/hp/hpl/jena/reasoner/BaseInfGraph.java new file mode 100644 index 000000000..69f40938d --- /dev/null +++ b/webapp/src/com/hp/hpl/jena/reasoner/BaseInfGraph.java @@ -0,0 +1,544 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.hp.hpl.jena.reasoner; + +import com.hp.hpl.jena.graph.*; +import com.hp.hpl.jena.graph.compose.MultiUnion; +import com.hp.hpl.jena.graph.impl.*; +import com.hp.hpl.jena.shared.*; +import com.hp.hpl.jena.util.iterator.*; +import java.util.Iterator; + +/** + * A base level implementation of the InfGraph interface. + */ +public abstract class BaseInfGraph extends GraphBase implements InfGraph { + + /** The Reasoner instance which performs all inferences and Tbox lookups */ + protected Reasoner reasoner; + + /** The graph of raw data which is being reasoned over */ + protected FGraph fdata; + + /** Flag, if set to true then derivations are recorded */ + protected boolean recordDerivations; + + /** Flag to record if the preparation call has been made and so the graph is ready for queries */ + protected volatile boolean isPrepared = false; + + /** version count */ + protected volatile int version = 0; + + /** + Inference graphs share the prefix-mapping of their underlying raw graph. + @see com.hp.hpl.jena.graph.Graph#getPrefixMapping() + */ + @Override + public PrefixMapping getPrefixMapping() + { return getRawGraph().getPrefixMapping(); } + + /** + * Constructor + * @param data the raw data file to be augmented with entailments + * @param reasoner the engine, with associated tbox data, whose find interface + * can be used to extract all entailments from the data. + */ + public BaseInfGraph(Graph data, Reasoner reasoner) { + super( ); + this.fdata = new FGraph( data ); + this.reasoner = reasoner; + } + + /** + Answer the InfCapabilities of this InfGraph. + */ + @Override + public Capabilities getCapabilities() { + if (capabilities == null) { + return getReasoner().getGraphCapabilities(); + } else { + return capabilities; + } + } + + /** + An InfCapabilities notes that size may not be accurate, and some + triples may be irremovable. + + TODO accomodate the properties of the base graph, too. + */ + public static class InfCapabilities extends AllCapabilities + { + @Override + public boolean sizeAccurate() { return false; } + @Override + public boolean deleteAllowed( boolean every ) { return !every; } + @Override + public boolean iteratorRemoveAllowed() { return false; } + @Override + public boolean findContractSafe() { return false; } + } + + /** + An InfCapabilities notes that size may not be accurate, and some + triples may be irremovable. + + TODO accomodate the properties of the base graph, too. + */ + public static class InfFindSafeCapabilities extends InfCapabilities + { + @Override + public boolean findContractSafe() { return true; } + } + + /** + @deprecated Bulk update operations are going to be removed. + @see GraphUtil for convenience helpers. + */ + + @Override + @Deprecated + public BulkUpdateHandler getBulkUpdateHandler() + { + if (bulkHandler == null) bulkHandler = new InfBulkUpdateHandler( this ); + return bulkHandler; + } + + /** + InfBulkUpdateHandler - a bulk update handler specialised for inference + graphs by code for removeAll(). + */ + static class InfBulkUpdateHandler extends SimpleBulkUpdateHandler + { + public InfBulkUpdateHandler( BaseInfGraph graph ) + { super(graph); } + + @Override + @Deprecated + public void remove( Node s, Node p, Node o ) + { + BaseInfGraph g = (BaseInfGraph) graph; + g.getRawGraph().remove( s, p, o ); + g.discardState(); + g.rebind(); + manager.notifyEvent( graph, GraphEvents.remove( s, p, o ) ); + } + + @Override + @Deprecated + public void removeAll() + { + BaseInfGraph g = (BaseInfGraph) graph; + g.getRawGraph().clear(); + g.discardState(); + g.rebind(); + g.getEventManager().notifyEvent( g, GraphEvents.removeAll ); + } + } + + @Override + public void remove( Node s, Node p, Node o ) + { + getRawGraph().remove( s, p, o ); + discardState(); + rebind(); + getEventManager().notifyEvent( this, GraphEvents.remove( s, p, o ) ); + } + + @Override + public void clear() + { + getRawGraph().clear() ; + discardState(); + rebind(); + getEventManager().notifyEvent( this, GraphEvents.removeAll ); + } + + + @Override + public TransactionHandler getTransactionHandler() + { return new InfTransactionHandler( this ); } + + public static class InfTransactionHandler + extends TransactionHandlerBase implements TransactionHandler + { + protected final BaseInfGraph base; + + public InfTransactionHandler( BaseInfGraph base ) + { this.base = base; } + + @Override + public boolean transactionsSupported() + { return getBaseHandler().transactionsSupported(); } + + protected TransactionHandler getBaseHandler() + { return base.getRawGraph().getTransactionHandler(); } + + @Override + public void begin() + { getBaseHandler().begin(); } + + @Override + public void abort() + { getBaseHandler().abort(); + base.rebind(); } + + @Override + public void commit() + { getBaseHandler().commit(); } + } + + /** + discard any state that depends on the content of fdata, because + it's just been majorly trashed, solid gone. + */ + protected void discardState() + {} + + /** + * Return the raw RDF data Graph being processed (i.e. the argument + * to the Reasonder.bind call that created this InfGraph). + */ + @Override + public Graph getRawGraph() { + return fdata.getGraph(); + } + + /** + * Return the Reasoner which is being used to answer queries to this graph. + */ + @Override + public Reasoner getReasoner() { + return reasoner; + } + + /** + * Replace the underlying data graph for this inference graph and start any + * inferences over again. This is primarily using in setting up ontology imports + * processing to allow an imports multiunion graph to be inserted between the + * inference graph and the raw data, before processing. + * @param data the new raw data graph + */ + @Override + public synchronized void rebind(Graph data) { + fdata = new FGraph(data); + isPrepared = false; + } + + /** + * Cause the inference graph to reconsult the underlying graph to take + * into account changes. Normally changes are made through the InfGraph's add and + * remove calls are will be handled appropriately. However, in some cases changes + * are made "behind the InfGraph's back" and this forces a full reconsult of + * the changed data. + */ + @Override + public synchronized void rebind() { + version++; + isPrepared = false; + } + + /** + * Reset any internal caches. Some systems, such as the tabled backchainer, + * retain information after each query. A reset will wipe this information preventing + * unbounded memory use at the expense of more expensive future queries. A reset + * does not cause the raw data to be reconsulted and so is less expensive than a rebind. + */ + @Override + public void reset() { + version++; + } + + /** + * Perform any initial processing and caching. This call is optional. Most + * engines either have negligable set up work or will perform an implicit + * "prepare" if necessary. The call is provided for those occasions where + * substantial preparation work is possible (e.g. running a forward chaining + * rule system) and where an application might wish greater control over when + * this prepration is done. + */ + @Override + public synchronized void prepare() { + // Default is to do no preparation + isPrepared = true; + } + + /** + * Returns a derivations graph. The rule reasoners typically create a + * graph containing those triples added to the base graph due to rule firings. + * In some applications it can useful to be able to access those deductions + * directly, without seeing the raw data which triggered them. In particular, + * this allows the forward rules to be used as if they were rewrite transformation + * rules. + * @return the deductions graph, if relevant for this class of inference + * engine or null if not. + */ + @Override + public Graph getDeductionsGraph() { + return null; + } + + /** + * Test a global boolean property of the graph. This might included + * properties like consistency, OWLSyntacticValidity etc. + * It remains to be seen what level of generality is needed here. We could + * replace this by a small number of specific tests for common concepts. + * @param property the URI of the property to be tested + * @return a Node giving the value of the global property, this may + * be a boolean literal, some other literal value (e.g. a size). + */ + @Override + public Node getGlobalProperty(Node property) { + throw new ReasonerException("Global property not implemented: " + property); + } + + /** + * A convenience version of getGlobalProperty which can only return + * a boolean result. + */ + @Override + public boolean testGlobalProperty(Node property) { + Node resultNode = getGlobalProperty(property); + if (resultNode.isLiteral()) { + Object result = resultNode.getLiteralValue(); + if (result instanceof Boolean) { + return ((Boolean)result).booleanValue(); + } + } + throw new ReasonerException("Global property test returned non-boolean value" + + "\nTest was: " + property + + "\nResult was: " + resultNode); + } + + /** + * Test the consistency of the bound data. This normally tests + * the validity of the bound instance data against the bound + * schema data. + * @return a ValidityReport structure + */ + @Override + public ValidityReport validate() { + checkOpen(); + return new StandardValidityReport(); + } + + /** + * An extension of the Graph.find interface which allows the caller to + * encode complex expressions in RDF and then refer to those expressions + * within the query triple. For example, one might encode a class expression + * and then ask if there are any instances of this class expression in the + * InfGraph. + * @param subject the subject Node of the query triple, may be a Node in + * the graph or a node in the parameter micro-graph or null + * @param property the property to be retrieved or null + * @param object the object Node of the query triple, may be a Node in + * the graph or a node in the parameter micro-graph. + * @param param a small graph encoding an expression which the subject and/or + * object nodes refer. + */ + @Override + public ExtendedIterator find(Node subject, Node property, Node object, Graph param) { + return cloneWithPremises(param).find(subject, property, object); + } + + /** + * Returns an iterator over Triples. + * + *

This code used to have the .filterKeep component uncommented. We + * think this is because of earlier history, before .matches on a literal node + * was implemented as sameValueAs rather than equals. If it turns out that + * the filter is needed, it can be commented back in, AND a corresponding + * filter added to find(Node x 3) -- and test cases, of course. + */ + @Override + public ExtendedIterator graphBaseFind(TripleMatch m) { + return graphBaseFind(m.getMatchSubject(), m.getMatchPredicate(), m.getMatchObject()) + // .filterKeep(new TripleMatchFilter(m.asTriple())) + ; + } + + /** + * Returns an iterator over Triples. + * This implementation assumes that the underlying findWithContinuation + * will have also consulted the raw data. + */ + @Override + public ExtendedIterator graphBaseFind(Node subject, Node property, Node object) { + return findWithContinuation(new TriplePattern(subject, property, object), fdata); + } + + /** + * Extended find interface used in situations where the implementator + * may or may not be able to answer the complete query. It will + * attempt to answer the pattern but if its answers are not known + * to be complete then it will also pass the request on to the nested + * Finder to append more results. + * @param pattern a TriplePattern to be matched against the data + * @param continuation either a Finder or a normal Graph which + * will be asked for additional match results if the implementor + * may not have completely satisfied the query. + */ + abstract public ExtendedIterator findWithContinuation(TriplePattern pattern, Finder continuation); + + + /** + * Basic pattern lookup interface. + * This implementation assumes that the underlying findWithContinuation + * will have also consulted the raw data. + * @param pattern a TriplePattern to be matched against the data + * @return a ExtendedIterator over all Triples in the data set + * that match the pattern + */ + public ExtendedIterator find(TriplePattern pattern) { + checkOpen(); + return findWithContinuation(pattern, fdata); + } + + /** + * Switch on/off drivation logging + */ + @Override + public void setDerivationLogging(boolean logOn) { + recordDerivations = logOn; + } + + /** + * Return the derivation of the given triple (which is the result of + * some previous find operation). + * Not all reasoneers will support derivations. + * @return an iterator over Derivation records or null if there is no derivation information + * available for this triple. + */ + @Override + public Iterator getDerivation(Triple triple) { + return null; + } + + /** + * Return the number of triples in the just the base graph + */ + @Override + public int graphBaseSize() { + checkOpen(); + return fdata.getGraph().size(); + } + + /** + Answer true iff this graph is empty. [Used to be in QueryHandler, but moved in + here because it's a more primitive operation.] + */ + @Override + public boolean isEmpty() { + return fdata.getGraph().isEmpty(); + } + + /** + * Free all resources, any further use of this Graph is an error. + */ + @Override + public void close() { + if (!closed) { + fdata.getGraph().close(); + fdata = null; + super.close(); + } + } + + /** + * Return a version stamp for this graph which can be + * used to fast-fail concurrent modification exceptions. + */ + public int getVersion() { + return version; + } + + /** + * Add one triple to the data graph, run any rules triggered by + * the new data item, recursively adding any generated triples. + */ + @Override + public synchronized void performAdd(Triple t) { + version++; + this.requirePrepared(); + fdata.getGraph().add(t); + } + + /** + * Removes the triple t (if possible) from the set belonging to this graph. + */ + @Override + public void performDelete(Triple t) { + version++; + this.requirePrepared(); + fdata.getGraph().delete(t); + } + + /** + * Return the schema graph, if any, bound into this inference graph. + */ + public abstract Graph getSchemaGraph(); + + /** + * Return a new inference graph which is a clone of the current graph + * together with an additional set of data premises. The default + * implementation loses ALL partial deductions so far. Some subclasses + * may be able to a more efficient job. + */ + public InfGraph cloneWithPremises(Graph premises) { + MultiUnion union = new MultiUnion(); + Graph raw = getRawGraph(); + union.addGraph( raw ); + union.setBaseGraph( raw ); + union.addGraph( premises ); + Graph schema = getSchemaGraph(); + if (schema != null) { + if (schema instanceof BaseInfGraph) { + BaseInfGraph ischema = (BaseInfGraph)schema; + Graph sschema = ischema.getSchemaGraph(); + if (sschema != null) union.addGraph( sschema ); + Graph rschema = ischema.getRawGraph(); + if (rschema != null) union.addGraph( rschema ); + } + + } + return getReasoner().bind(union); + } + + /** + Answer true iff this graph has been through the prepare() step. + For testing purposes. + * @return Whether the graph is prepared + */ + public synchronized boolean isPrepared() + { return isPrepared; } + + /** + * Reset prepared state to false + */ + protected synchronized void setPreparedState(boolean state) { + this.isPrepared = state; + } + + /** + * Checks whether the graph is prepared and calls {@link #prepare()} if it is not + */ + protected synchronized void requirePrepared() { + if (!this.isPrepared) this.prepare(); + } +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/SparqlQueryBuilderServlet.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/SparqlQueryBuilderServlet.java index 2fb49e222..c5b98bf3c 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/SparqlQueryBuilderServlet.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/SparqlQueryBuilderServlet.java @@ -16,7 +16,7 @@ import org.apache.commons.logging.LogFactory; import com.hp.hpl.jena.query.Syntax; import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.sparql.resultset.ResultSetFormat; +import com.hp.hpl.jena.sparql.resultset.ResultsFormat; import edu.cornell.mannlib.vedit.controller.BaseEditController; import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission; @@ -31,13 +31,13 @@ public class SparqlQueryBuilderServlet extends BaseEditController { protected static final Syntax SYNTAX = Syntax.syntaxARQ; - protected static HashMapformatSymbols = new HashMap(); + protected static HashMapformatSymbols = new HashMap(); static{ - formatSymbols.put( ResultSetFormat.syntaxXML.getSymbol(), ResultSetFormat.syntaxXML); - formatSymbols.put( ResultSetFormat.syntaxRDF_XML.getSymbol(), ResultSetFormat.syntaxRDF_XML); - formatSymbols.put( ResultSetFormat.syntaxRDF_N3.getSymbol(), ResultSetFormat.syntaxRDF_N3); - formatSymbols.put( ResultSetFormat.syntaxText.getSymbol() , ResultSetFormat.syntaxText); - formatSymbols.put( ResultSetFormat.syntaxJSON.getSymbol() , ResultSetFormat.syntaxJSON); + formatSymbols.put( ResultsFormat.FMT_RS_XML.getSymbol(), ResultsFormat.FMT_RS_XML); + formatSymbols.put( ResultsFormat.FMT_RDF_XML.getSymbol(), ResultsFormat.FMT_RDF_XML); + formatSymbols.put( ResultsFormat.FMT_RDF_N3.getSymbol(), ResultsFormat.FMT_RDF_N3); + formatSymbols.put( ResultsFormat.FMT_TEXT.getSymbol() , ResultsFormat.FMT_TEXT); + formatSymbols.put( ResultsFormat.FMT_RS_JSON.getSymbol() , ResultsFormat.FMT_RS_JSON); formatSymbols.put( "vitro:csv", null); } @@ -52,11 +52,11 @@ public class SparqlQueryBuilderServlet extends BaseEditController { protected static HashMapmimeTypes = new HashMap(); static{ - mimeTypes.put( ResultSetFormat.syntaxXML.getSymbol() , "text/xml" ); - mimeTypes.put( ResultSetFormat.syntaxRDF_XML.getSymbol(), "application/rdf+xml" ); - mimeTypes.put( ResultSetFormat.syntaxRDF_N3.getSymbol(), "text/plain" ); - mimeTypes.put( ResultSetFormat.syntaxText.getSymbol() , "text/plain"); - mimeTypes.put( ResultSetFormat.syntaxJSON.getSymbol(), "application/javascript" ); + mimeTypes.put( ResultsFormat.FMT_RS_XML.getSymbol() , "text/xml" ); + mimeTypes.put( ResultsFormat.FMT_RDF_XML.getSymbol(), "application/rdf+xml" ); + mimeTypes.put( ResultsFormat.FMT_RDF_N3.getSymbol(), "text/plain" ); + mimeTypes.put( ResultsFormat.FMT_TEXT.getSymbol() , "text/plain"); + mimeTypes.put( ResultsFormat.FMT_RS_JSON.getSymbol(), "application/javascript" ); mimeTypes.put( "vitro:csv", "text/csv"); } @@ -100,15 +100,6 @@ public class SparqlQueryBuilderServlet extends BaseEditController { } private void doHelp(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { - - //res.setStatus(HttpServletResponse.SC_BAD_REQUEST); - - VitroRequest vreq = new VitroRequest(req); - - /* Code change completed */ - - // nac26: 2009-09-25 - this was causing problems in safari on localhost installations because the href did not include the context. The edit.css is not being used here anyway (or anywhere else for that matter) - // req.setAttribute("css", ""); req.setAttribute("title","SPARQL Query Builder"); req.setAttribute("bodyJsp", "/admin/sparql.jsp"); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiResultSetProducer.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiResultSetProducer.java index adfa0594d..d9b238a2d 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiResultSetProducer.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiResultSetProducer.java @@ -14,7 +14,7 @@ import org.apache.commons.io.IOUtils; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFactory; import com.hp.hpl.jena.query.ResultSetFormatter; -import com.hp.hpl.jena.sparql.resultset.ResultSetFormat; +import com.hp.hpl.jena.sparql.resultset.ResultsFormat; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; @@ -56,7 +56,7 @@ abstract class SparqlQueryApiResultSetProducer extends SparqlQueryApiExecutor { pipeWithReplacement(rawResult, out); } else { ResultSet rs = ResultSetFactory.fromJSON(rawResult); - ResultSetFormat format = ResultSetFormat.lookup(mediaType + ResultsFormat format = ResultsFormat.lookup(mediaType .getJenaResponseFormat()); ResultSetFormatter.output(out, rs, format); } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/individual/IndividualTemplateLocator.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/individual/IndividualTemplateLocator.java index 84e811051..7c4eba08e 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/individual/IndividualTemplateLocator.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/individual/IndividualTemplateLocator.java @@ -82,7 +82,7 @@ class IndividualTemplateLocator { } } } - // If still no custom template defined, and inferencing is asynchronous (under RDB), check + // If still no custom template defined and inferencing is asynchronous, check // the superclasses of the vclass for a custom template specification. SimpleReasoner simpleReasoner = (SimpleReasoner) ctx.getAttribute(SimpleReasoner.class.getName()); if (customTemplate == null && simpleReasoner != null && simpleReasoner.isABoxReasoningAsynchronous()) { diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaCsv2RdfController.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaCsv2RdfController.java index 57e414eb2..c5c5da47d 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaCsv2RdfController.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaCsv2RdfController.java @@ -81,7 +81,7 @@ public class JenaCsv2RdfController extends JenaIngestController { forwardToFileUploadError(ex.getMessage(),request,response); return; } - ModelMaker maker = getVitroJenaModelMaker(request); + ModelMaker maker = getModelMaker(request); Boolean csv2rdf = true; JenaIngestUtils utils = new JenaIngestUtils(); List resultList = new ArrayList(); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaIngestController.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaIngestController.java index b2535cde2..85eac24f4 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaIngestController.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaIngestController.java @@ -67,6 +67,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.Controllers; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID; +import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelMakerID; import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao; import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceGraph; import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent; @@ -122,28 +123,28 @@ public class JenaIngestController extends BaseEditController { } VitroRequest vreq = new VitroRequest(request); - ModelMaker maker = getVitroJenaModelMaker(vreq); - String modelType = getModelType(vreq); + ModelMaker maker = getModelMaker(vreq); + ModelMakerID modelType = getModelType(vreq); String actionStr = vreq.getParameter("action"); actionStr = (actionStr != null) ? actionStr : ""; if("listModels".equals(actionStr)) { processListModelsRequest(vreq, maker, modelType); - } else if("rdbModels".equals(actionStr)){ - processRDBModelsRequest(vreq, maker, modelType); - } else if("sdbModels".equals(actionStr)){ - processSDBModelsRequest(vreq, maker, modelType); + } else if("configModels".equals(actionStr)){ + processConfigModelsRequest(vreq); + } else if("contentModels".equals(actionStr)){ + processContentModelsRequest(vreq); } else if("createModel".equals(actionStr)) { processCreateModelRequest(vreq, maker, modelType); } else if("removeModel".equals(actionStr)) { processRemoveModelRequest(vreq, maker, modelType); } else if("loadRDFData".equals(actionStr)) { - processLoadRDFDataRequest(vreq, maker, modelType); + processLoadRDFDataRequest(vreq, maker); } else if("cleanLiterals".equals(actionStr)) { - processCleanLiteralsRequest(vreq, maker, modelType); + processCleanLiteralsRequest(vreq); } else if("outputModel".equals(actionStr)) { - processOutputModelRequest(vreq, response, maker, modelType); + processOutputModelRequest(vreq, response); return; // don't attempt to display a JSP } else if("clearModel".equals(actionStr)) { processClearModelRequest(vreq, maker, modelType); @@ -152,35 +153,35 @@ public class JenaIngestController extends BaseEditController { } else if("detachModel".equals(actionStr)) { processDetachModelRequest(vreq, maker, modelType); } else if("renameBNodes".equals(actionStr)) { - processRenameBNodesRequest(vreq, maker, modelType); + processRenameBNodesRequest(vreq, maker); } else if("renameBNodesURISelect".equals(actionStr)){ - processRenameBNodesURISelectRequest(vreq, maker, modelType); + processRenameBNodesURISelectRequest(vreq, maker); } else if("smushSingleModel".equals(actionStr)) { - processSmushSingleModelRequest(vreq, maker, modelType); + processSmushSingleModelRequest(vreq); } else if("connectDB".equals(actionStr)) { - processConnectDBRequest(vreq, maker, modelType); + processConnectDBRequest(vreq); } else if("csv2rdf".equals(actionStr)) { - processCsv2rdfRequest(vreq, maker, modelType); + processCsv2rdfRequest(vreq); } else if("processStrings".equals(actionStr)) { - processProcessStringsRequest(vreq, maker, modelType); + processProcessStringsRequest(vreq); } else if("splitPropertyValues".equals(actionStr)) { - processSplitPropertyValuesRequest(vreq, maker, modelType); + processSplitPropertyValuesRequest(vreq); } else if("subtractModels".equals(actionStr)) { - processSubtractModelRequest(vreq, maker, modelType); + processSubtractModelRequest(vreq); } else if("executeWorkflow".equals(actionStr)) { - processExecuteWorkflowRequest(vreq, maker, modelType); + processExecuteWorkflowRequest(vreq); } else if("executeSparql".equals(actionStr)) { - processExecuteSparqlRequest(vreq, maker, modelType); + processExecuteSparqlRequest(vreq); } else if ("generateTBox".equals(actionStr)) { - processGenerateTBoxRequest(vreq, maker, modelType); + processGenerateTBoxRequest(vreq); } else if("permanentURI".equals(actionStr)){ - processPermanentURIRequest(vreq, maker, modelType); + processPermanentURIRequest(vreq, maker); } else if("mergeResources".equals(actionStr)){ - processMergeResourceRequest(vreq, maker, modelType); + processMergeResourceRequest(vreq); } else if("renameResource".equals(actionStr)){ - processRenameResourceRequest(vreq, response, maker, modelType); + processRenameResourceRequest(vreq); } else if("mergeResult".equals(actionStr)){ - processMergeResultRequest(vreq, response, maker, modelType); + processMergeResultRequest(vreq, response); return; } @@ -189,7 +190,7 @@ public class JenaIngestController extends BaseEditController { request.setAttribute("bodyJsp",INGEST_MENU_JSP); } - maker = getVitroJenaModelMaker(vreq); + maker = getModelMaker(vreq); request.setAttribute("modelNames", maker.listModels().toList()); RequestDispatcher rd = request.getRequestDispatcher( @@ -203,27 +204,27 @@ public class JenaIngestController extends BaseEditController { } - private void processListModelsRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processListModelsRequest(VitroRequest vreq, ModelMaker maker, ModelMakerID modelType) { showModelList(vreq, maker, modelType); } - public static boolean isUsingMainStoreForIngest(HttpServletRequest req) { + protected static boolean isUsingMainStoreForIngest(HttpServletRequest req) { return CONFIGURATION != req.getSession().getAttribute(WHICH_MODEL_MAKER); } - private void processRDBModelsRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processConfigModelsRequest(VitroRequest vreq) { ModelMaker vjmm = ModelAccess.on(getServletContext()).getModelMaker(CONFIGURATION); vreq.getSession().setAttribute(WHICH_MODEL_MAKER, CONFIGURATION); - showModelList(vreq, vjmm, "rdb"); + showModelList(vreq, vjmm, CONFIGURATION); } - private void processSDBModelsRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processContentModelsRequest(VitroRequest vreq) { ModelMaker vsmm = ModelAccess.on(getServletContext()).getModelMaker(CONTENT); vreq.getSession().setAttribute(WHICH_MODEL_MAKER, CONTENT); - showModelList(vreq, vsmm, "sdb"); + showModelList(vreq, vsmm, CONTENT); } - private void processCreateModelRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processCreateModelRequest(VitroRequest vreq, ModelMaker maker, ModelMakerID modelType) { String modelName = vreq.getParameter("modelName"); if (modelName != null) { @@ -242,13 +243,13 @@ public class JenaIngestController extends BaseEditController { throw new RuntimeException("the model name must be a valid URI"); } } else { - vreq.setAttribute("modelType", modelType); + vreq.setAttribute("modelType", modelType.toString()); vreq.setAttribute("title","Create New Model"); vreq.setAttribute("bodyJsp",CREATE_MODEL_JSP); } } - private void processRemoveModelRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processRemoveModelRequest(VitroRequest vreq, ModelMaker maker, ModelMakerID modelType) { String modelName = vreq.getParameter("modelName"); if (modelName!=null) { doRemoveModel(modelName, maker); @@ -256,7 +257,7 @@ public class JenaIngestController extends BaseEditController { showModelList(vreq, maker, modelType); } - private void processClearModelRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processClearModelRequest(VitroRequest vreq, ModelMaker maker, ModelMakerID modelType) { String modelName = vreq.getParameter("modelName"); if (modelName != null) { doClearModel(modelName,maker); @@ -264,7 +265,7 @@ public class JenaIngestController extends BaseEditController { showModelList(vreq, maker, modelType); } - private void processLoadRDFDataRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processLoadRDFDataRequest(VitroRequest vreq, ModelMaker maker) { String docLoc = vreq.getParameter("docLoc"); String filePath = vreq.getParameter("filePath"); String modelName = vreq.getParameter("modelName"); @@ -281,9 +282,7 @@ public class JenaIngestController extends BaseEditController { } private void processOutputModelRequest(VitroRequest vreq, - HttpServletResponse response, - ModelMaker maker, - String modelType) { + HttpServletResponse response) { String modelNameStr = vreq.getParameter("modelName"); Model model = getModel(modelNameStr,vreq); JenaOutputUtils.setNameSpacePrefixes(model,vreq.getWebappDaoFactory()); @@ -310,7 +309,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processCleanLiteralsRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processCleanLiteralsRequest(VitroRequest vreq) { String modelNameStr = vreq.getParameter("modelName"); Model model = getModel(modelNameStr,vreq); doCleanLiterals(model); @@ -318,7 +317,7 @@ public class JenaIngestController extends BaseEditController { vreq.setAttribute("bodyJsp",INGEST_MENU_JSP); } - private void processAttachModelRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processAttachModelRequest(VitroRequest vreq, ModelMaker maker, ModelMakerID modelType) { String modelName = vreq.getParameter("modelName"); if (modelName != null) { doAttachModel(modelName,maker); @@ -326,15 +325,15 @@ public class JenaIngestController extends BaseEditController { showModelList(vreq, maker, modelType); } - private void processDetachModelRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processDetachModelRequest(VitroRequest vreq, ModelMaker maker, ModelMakerID modelType) { String modelName = vreq.getParameter("modelName"); if (modelName != null) { - doDetachModel(modelName,maker); + doDetachModel(modelName); } showModelList(vreq, maker, modelType); } - private void processRenameBNodesRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processRenameBNodesRequest(VitroRequest vreq, ModelMaker maker) { String[] sourceModel = vreq.getParameterValues("sourceModelName"); JenaIngestUtils utils = new JenaIngestUtils(); if(sourceModel != null && sourceModel.length != 0) { @@ -359,19 +358,19 @@ public class JenaIngestController extends BaseEditController { } } - private void processRenameBNodesURISelectRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processRenameBNodesURISelectRequest(VitroRequest vreq, ModelMaker maker) { String namespaceEtcStr = vreq.getParameter("namespaceEtcStr"); String pattern = vreq.getParameter("pattern"); String concatenate = vreq.getParameter("concatenate"); - String[] sourceModel = (String[]) vreq.getParameterValues("sourceModelName"); + String[] sourceModel = vreq.getParameterValues("sourceModelName"); if(namespaceEtcStr != null) { if (namespaceEtcStr.isEmpty()) { if ("true".equals(vreq.getParameter("csv2rdf"))) { - processCsv2rdfRequest(vreq, maker, modelType); + processCsv2rdfRequest(vreq); return; } else { vreq.setAttribute("errorMsg", "Please enter a value."); - processRenameBNodesRequest(vreq, maker, modelType); + processRenameBNodesRequest(vreq, maker); return; } } @@ -390,7 +389,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processSmushSingleModelRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processSmushSingleModelRequest(VitroRequest vreq) { String propertyURIStr = vreq.getParameter("propertyURI"); if (propertyURIStr != null) { doSmushSingleModel(vreq); @@ -402,19 +401,15 @@ public class JenaIngestController extends BaseEditController { } } - private void processConnectDBRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processConnectDBRequest(VitroRequest vreq) { String jdbcUrl = vreq.getParameter("jdbcUrl"); String tripleStore = vreq.getParameter("tripleStore"); if (jdbcUrl != null) { doConnectDB(vreq); if ("SDB".equals(tripleStore)) { - getServletContext().setAttribute("modelT", "sdb"); - getServletContext().setAttribute("info", "SDB models"); vreq.setAttribute("modelType", "sdb"); vreq.setAttribute("infoLine", "SDB models"); } else { - getServletContext().setAttribute("modelT", "rdb"); - getServletContext().setAttribute("info", "RDB models"); vreq.setAttribute("modelType", "rdb"); vreq.setAttribute("infoLine", "RDB models"); } @@ -429,7 +424,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processCsv2rdfRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processCsv2rdfRequest(VitroRequest vreq) { String csvUrl = vreq.getParameter("csvUrl"); if (csvUrl != null) { /*doExecuteCsv2Rdf(vreq);*/ @@ -441,7 +436,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processProcessStringsRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processProcessStringsRequest(VitroRequest vreq) { String className = vreq.getParameter("className"); if (className != null) { doProcessStrings(vreq); @@ -453,7 +448,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processSplitPropertyValuesRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processSplitPropertyValuesRequest(VitroRequest vreq) { String splitRegex = vreq.getParameter("splitRegex"); if (splitRegex != null) { doSplitPropertyValues(vreq); @@ -465,7 +460,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processSubtractModelRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processSubtractModelRequest(VitroRequest vreq) { String modela = vreq.getParameter("modela"); if (modela != null) { doSubtractModels(vreq); @@ -477,7 +472,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processExecuteWorkflowRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processExecuteWorkflowRequest(VitroRequest vreq) { String workflowURIStr = vreq.getParameter("workflowURI"); String workflowStepURIStr = vreq.getParameter("workflowStepURI"); if (workflowURIStr != null && workflowStepURIStr != null) { @@ -489,7 +484,7 @@ public class JenaIngestController extends BaseEditController { OntModel jenaOntModel = (OntModel) getModel("vitro:jenaOntModel",vreq); vreq.setAttribute("workflowSteps", new JenaIngestWorkflowProcessor( jenaOntModel.getIndividual(workflowURIStr), - getVitroJenaModelMaker(vreq)).getWorkflowSteps(null)); + getModelMaker(vreq)).getWorkflowSteps(null)); vreq.setAttribute("title", "Choose Workflow Step"); vreq.setAttribute("bodyJsp", WORKFLOW_STEP_JSP); } else { @@ -508,7 +503,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processExecuteSparqlRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processExecuteSparqlRequest(VitroRequest vreq) { String sparqlQueryStr = vreq.getParameter("sparqlQueryStr"); OntModel jenaOntModel = ModelAccess.on(getServletContext()).getJenaOntModel(); jenaOntModel.enterCriticalSection(Lock.READ); @@ -521,12 +516,12 @@ public class JenaIngestController extends BaseEditController { } /*ass92*/ OntologyDao daoObj = vreq.getUnfilteredWebappDaoFactory().getOntologyDao(); - List ontologiesObj = daoObj.getAllOntologies(); - ArrayList prefixList = new ArrayList(); + List ontologiesObj = daoObj.getAllOntologies(); + List prefixList = new ArrayList<>(); if(ontologiesObj !=null && ontologiesObj.size()>0){ - Iterator ontItr = ontologiesObj.iterator(); + Iterator ontItr = ontologiesObj.iterator(); while(ontItr.hasNext()){ - Ontology ont = (Ontology) ontItr.next(); + Ontology ont = ontItr.next(); prefixList.add(ont.getPrefix() == null ? "(not yet specified)" : ont.getPrefix()); prefixList.add(ont.getURI() == null ? "" : ont.getURI()); } @@ -576,7 +571,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processGenerateTBoxRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processGenerateTBoxRequest(VitroRequest vreq) { String testParam = vreq.getParameter("sourceModelName"); if (testParam != null) { doGenerateTBox(vreq); @@ -588,7 +583,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processPermanentURIRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processPermanentURIRequest(VitroRequest vreq, ModelMaker maker) { String modelName = vreq.getParameter("modelName"); String oldModel = vreq.getParameter("oldModel"); String newModel = vreq.getParameter("newModel"); @@ -599,7 +594,7 @@ public class JenaIngestController extends BaseEditController { newNamespace = (dNamespace != null) ? dNamespace : newNamespace; if(modelName!=null){ Model m = maker.getModel(modelName); - ArrayList namespaceList = new ArrayList(); + List namespaceList = new ArrayList<>(); ResIterator resItr = m.listResourcesWithProperty((Property)null); if(resItr!=null){ while(resItr.hasNext()){ @@ -625,7 +620,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processMergeResourceRequest(VitroRequest vreq, ModelMaker maker, String modelType) { + private void processMergeResourceRequest(VitroRequest vreq) { String uri1 = vreq.getParameter("uri1"); // get primary uri String uri2 = vreq.getParameter("uri2"); // get secondary uri String usePrimaryLabelOnly = vreq.getParameter("usePrimaryLabelOnly"); @@ -655,10 +650,7 @@ public class JenaIngestController extends BaseEditController { } } - private void processRenameResourceRequest(VitroRequest vreq, - HttpServletResponse response, - ModelMaker maker, - String modelType) { + private void processRenameResourceRequest(VitroRequest vreq) { String oldNamespace = vreq.getParameter("oldNamespace"); String newNamespace = vreq.getParameter("newNamespace"); String errorMsg = ""; @@ -679,7 +671,7 @@ public class JenaIngestController extends BaseEditController { vreq.setAttribute("title","Rename Resource"); vreq.setAttribute("bodyJsp",RENAME_RESOURCE); } else { - String result = doRename(oldNamespace, newNamespace, response); + String result = doRename(oldNamespace, newNamespace); vreq.setAttribute("result",result); vreq.setAttribute("title","Rename Resources"); vreq.setAttribute("bodyJsp",RENAME_RESULT); @@ -691,9 +683,7 @@ public class JenaIngestController extends BaseEditController { } private void processMergeResultRequest(VitroRequest vreq, - HttpServletResponse response, - ModelMaker maker, - String modelType) { + HttpServletResponse response) { Model lmodel = (Model) vreq.getSession().getAttribute("leftoverModel"); response.setContentType("RDF/XML-ABBREV"); @@ -708,17 +698,24 @@ public class JenaIngestController extends BaseEditController { throw new RuntimeException(ioe); } } - - protected String getModelType(VitroRequest vreq) { + + /** + * Get the model type from the request, or from the session. + */ + protected ModelMakerID getModelType(VitroRequest vreq) { String modelType = vreq.getParameter("modelType"); - if (modelType == null) { - if (isUsingMainStoreForIngest(vreq)) { - modelType = "sdb"; - } else { - modelType = "rdb"; - } - } - return modelType; + if (modelType != null) { + if (modelType.equals(CONFIGURATION.toString())) { + return CONFIGURATION; + } else { + return CONTENT; + } + } + if (vreq.getSession().getAttribute(WHICH_MODEL_MAKER) == CONFIGURATION) { + return CONFIGURATION; + } else { + return CONTENT; + } } private void doCreateModel(String modelName, ModelMaker modelMaker) { @@ -727,7 +724,7 @@ public class JenaIngestController extends BaseEditController { private void doRemoveModel(String modelName, ModelMaker modelMaker) { //Try to detach first since it cause problems to remove an attached model. - doDetachModel(modelName, modelMaker); + doDetachModel(modelName); log.debug("Removing " + modelName + " from webapp"); modelMaker.removeModel(modelName); } @@ -779,7 +776,7 @@ public class JenaIngestController extends BaseEditController { private void doAttachModel(String modelName, ModelMaker modelMaker) { if (attachedModels.containsKey(modelName)) { - doDetachModel(modelName, modelMaker); + doDetachModel(modelName); } Model m = ModelFactory.createDefaultModel(); m.add(modelMaker.getModel(modelName)); @@ -788,7 +785,7 @@ public class JenaIngestController extends BaseEditController { log.info("Attached " + modelName + " (" + m.hashCode() + ") to webapp"); } - private void doDetachModel(String modelName, ModelMaker modelMaker) { + private void doDetachModel(String modelName) { Model m = attachedModels.get(modelName); if (m == null) { return; @@ -804,7 +801,7 @@ public class JenaIngestController extends BaseEditController { Boolean csv2rdf = false; try { - csv2rdf = (Boolean) Boolean.parseBoolean(vreq.getParameter("csv2rdf")); + csv2rdf = Boolean.parseBoolean(vreq.getParameter("csv2rdf")); } catch (Exception e) { log.error(e, e); } @@ -987,17 +984,17 @@ public class JenaIngestController extends BaseEditController { } Model additionsModel = ModelFactory.createDefaultModel(); Model retractionsModel = ModelFactory.createDefaultModel(); - Class stringProcessorClass = Class.forName(className); + Class stringProcessorClass = Class.forName(className); Object processor = stringProcessorClass.newInstance(); - Class[] methArgs = {String.class}; + Class[] methArgs = {String.class}; Method meth = stringProcessorClass.getMethod(methodName,methArgs); Property prop = ResourceFactory.createProperty(propertyName); Property newProp = ResourceFactory.createProperty(newPropertyName); destination.enterCriticalSection(Lock.READ); try { - ClosableIterator closeIt = destination.listStatements((Resource)null,prop,(RDFNode)null); - for (Iterator stmtIt = closeIt; stmtIt.hasNext(); ) { - Statement stmt = (Statement) stmtIt.next(); + ClosableIterator closeIt = destination.listStatements((Resource)null,prop,(RDFNode)null); + for (Iterator stmtIt = closeIt; stmtIt.hasNext(); ) { + Statement stmt = stmtIt.next(); if (stmt.getObject().isLiteral()) { Literal lit = (Literal) stmt.getObject(); String lex = lit.getLexicalForm(); @@ -1047,10 +1044,10 @@ public class JenaIngestController extends BaseEditController { Model additionsModel = ModelFactory.createDefaultModel(); model.enterCriticalSection(Lock.WRITE); try { - ClosableIterator closeIt = model.listStatements(); + ClosableIterator closeIt = model.listStatements(); try { - for (Iterator stmtIt = closeIt; stmtIt.hasNext();) { - Statement stmt = (Statement) stmtIt.next(); + for (Iterator stmtIt = closeIt; stmtIt.hasNext();) { + Statement stmt = stmtIt.next(); if (stmt.getObject().isLiteral()) { Literal lit = (Literal) stmt.getObject(); String lex = lit.getLexicalForm(); @@ -1099,11 +1096,11 @@ public class JenaIngestController extends BaseEditController { String workflowStepURI = vreq.getParameter("workflowStepURI"); OntModel jenaOntModel = (OntModel) getModel("vitro:jenaOntModel",vreq); new JenaIngestWorkflowProcessor( - jenaOntModel.getIndividual(workflowURI),getVitroJenaModelMaker( + jenaOntModel.getIndividual(workflowURI),getModelMaker( vreq)).run(jenaOntModel.getIndividual(workflowStepURI)); } - private String doRename(String oldNamespace,String newNamespace,HttpServletResponse response){ + private String doRename(String oldNamespace,String newNamespace){ String uri = null; String result = null; Integer counter = 0; @@ -1200,13 +1197,12 @@ public class JenaIngestController extends BaseEditController { } } - protected void showModelList(VitroRequest vreq, ModelMaker maker, String modelType) { - if(modelType.equals("rdb")){ - vreq.setAttribute("modelType", "rdb"); - vreq.setAttribute("infoLine", "RDB models"); + protected void showModelList(VitroRequest vreq, ModelMaker maker, ModelMakerID modelType) { + vreq.setAttribute("modelType", modelType.toString()); + if(modelType == CONTENT){ + vreq.setAttribute("infoLine", "Main Store models"); } else { - vreq.setAttribute("modelType", "sdb"); - vreq.setAttribute("infoLine", "main store models"); + vreq.setAttribute("infoLine", "Configuration models"); } vreq.setAttribute("modelNames", maker.listModels().toList()); vreq.setAttribute("bodyAttr", "onLoad=\"init()\""); @@ -1222,17 +1218,18 @@ public class JenaIngestController extends BaseEditController { this.collator = vreq.getCollator(); } - public int compare(String s1, String s2) { + @Override + public int compare(String s1, String s2) { return collator.compare(s1, s2); } } public static Model getModel(String name, HttpServletRequest request) { - return getVitroJenaModelMaker(request).getModel(name); + return getModelMaker(request).getModel(name); } - protected static ModelMaker getVitroJenaModelMaker(HttpServletRequest req){ + protected static ModelMaker getModelMaker(HttpServletRequest req){ ServletContext ctx = req.getSession().getServletContext(); if (isUsingMainStoreForIngest(req)) { return ModelAccess.on(ctx).getModelMaker(CONTENT); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaXMLFileUpload.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaXMLFileUpload.java index f5127dd64..a0a7673f9 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaXMLFileUpload.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/JenaXMLFileUpload.java @@ -114,7 +114,7 @@ public class JenaXMLFileUpload extends JenaIngestController { return; } - ModelMaker modelMaker = getVitroJenaModelMaker(vreq); + ModelMaker modelMaker = getModelMaker(vreq); String targetModel = request.getParameter("targetModel"); if (targetModel == null) { throw new ServletException("targetModel not specified."); @@ -172,7 +172,7 @@ public class JenaXMLFileUpload extends JenaIngestController { request.setAttribute("title","Upload file and convert to RDF"); request.setAttribute("bodyJsp","/jenaIngest/xmlFileUpload.jsp"); - request.setAttribute("modelNames", getVitroJenaModelMaker(vreq).listModels().toList()); + request.setAttribute("modelNames", getModelMaker(vreq).listModels().toList()); request.setAttribute("models", null); RequestDispatcher rd = request.getRequestDispatcher(Controllers.BASIC_JSP); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/RDFUploadController.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/RDFUploadController.java index 51c892dbc..26ec4a830 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/RDFUploadController.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/jena/RDFUploadController.java @@ -2,6 +2,8 @@ package edu.cornell.mannlib.vitro.webapp.controller.jena; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelMakerID.CONFIGURATION; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; @@ -36,6 +38,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.Controllers; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID; +import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelMakerID; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory; import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaModelUtils; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; @@ -65,7 +68,8 @@ public class RDFUploadController extends JenaIngestController { return true; } - public void doPost(HttpServletRequest req, + @Override + public void doPost(HttpServletRequest req, HttpServletResponse response) throws ServletException, IOException { if (!isAuthorizedToDisplayPage(req, response, SimplePermission.USE_ADVANCED_DATA_TOOLS_PAGES.ACTION)) { @@ -249,7 +253,7 @@ public class RDFUploadController extends JenaIngestController { String modelName = request.getParameter("modelName"); String docLoc = request.getParameter("docLoc"); String languageStr = request.getParameter("language"); - ModelMaker maker = getVitroJenaModelMaker(request); + ModelMaker maker = getModelMaker(request); if (modelName == null) { request.setAttribute("title","Load RDF Data"); @@ -261,7 +265,7 @@ public class RDFUploadController extends JenaIngestController { } finally { rdfService.close(); } - String modelType = getModelType(request); + ModelMakerID modelType = getModelType(request); showModelList(request, maker, modelType); } @@ -279,7 +283,7 @@ public class RDFUploadController extends JenaIngestController { } private RDFService getRDFService(VitroRequest vreq, ModelMaker maker, String modelName) { - if (JenaIngestController.isUsingMainStoreForIngest(vreq)) { + if (isUsingMainStoreForIngest(vreq)) { log.debug("Using main RDFService"); return RDFServiceUtils.getRDFServiceFactory( getServletContext()).getRDFService(); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/ModelAccess.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/ModelAccess.java index e3c128068..a964e188a 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/ModelAccess.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/ModelAccess.java @@ -39,7 +39,6 @@ import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; * VitroModelSource.openModel(name) * VitroModelSource.openModelIfPresent(string) * ServletContext.getAttribute("pelletOntModel") - * VitroJenaModelMaker * JenaDataSourceSetupBase.getApplicationDataSource(ctx) * JenaDataSourceSetupBase.getStartupDataset() * HttpSession.getAttribute("jenaAuditModel") diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/WebappDaoFactoryConfig.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/WebappDaoFactoryConfig.java index ee5eaf2de..b629a2583 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/WebappDaoFactoryConfig.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/WebappDaoFactoryConfig.java @@ -8,7 +8,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.openjena.atlas.lib.Pair; +import org.apache.jena.atlas.lib.Pair; import edu.cornell.mannlib.vitro.webapp.beans.ObjectProperty; diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/BlankNodeFilteringGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/BlankNodeFilteringGraph.java index c9c62df8c..e46fe3c01 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/BlankNodeFilteringGraph.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/BlankNodeFilteringGraph.java @@ -11,11 +11,9 @@ import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.GraphEventManager; import com.hp.hpl.jena.graph.GraphStatisticsHandler; import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; import com.hp.hpl.jena.graph.TransactionHandler; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.TripleMatch; -import com.hp.hpl.jena.graph.query.QueryHandler; import com.hp.hpl.jena.shared.AddDeniedException; import com.hp.hpl.jena.shared.DeleteDeniedException; import com.hp.hpl.jena.shared.PrefixMapping; @@ -84,6 +82,7 @@ public class BlankNodeFilteringGraph implements Graph { } @Override + @Deprecated public BulkUpdateHandler getBulkUpdateHandler() { return graph.getBulkUpdateHandler(); } @@ -103,11 +102,6 @@ public class BlankNodeFilteringGraph implements Graph { return graph.getPrefixMapping(); } - @Override - public Reifier getReifier() { - return graph.getReifier(); - } - @Override public GraphStatisticsHandler getStatisticsHandler() { return graph.getStatisticsHandler(); @@ -133,13 +127,18 @@ public class BlankNodeFilteringGraph implements Graph { return graph.isIsomorphicWith(arg0); } - @Override - public QueryHandler queryHandler() { - return graph.queryHandler(); - } - @Override public int size() { return graph.size(); } + + @Override + public void clear() { + graph.clear(); + } + + @Override + public void remove(Node arg0, Node arg1, Node arg2) { + graph.remove(arg0, arg1, arg2); + } } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/DifferenceGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/DifferenceGraph.java index cd45a9795..e247ebeac 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/DifferenceGraph.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/DifferenceGraph.java @@ -10,11 +10,9 @@ import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.GraphEventManager; import com.hp.hpl.jena.graph.GraphStatisticsHandler; import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; import com.hp.hpl.jena.graph.TransactionHandler; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.TripleMatch; -import com.hp.hpl.jena.graph.query.QueryHandler; import com.hp.hpl.jena.shared.AddDeniedException; import com.hp.hpl.jena.shared.DeleteDeniedException; import com.hp.hpl.jena.shared.PrefixMapping; @@ -51,6 +49,11 @@ public class DifferenceGraph implements Graph { g.delete(arg0); } + @Override + public void remove(Node arg0, Node arg1, Node arg2) { + g.remove(arg0, arg1, arg2); + } + @Override public boolean dependsOn(Graph arg0) { return g.dependsOn(arg0); @@ -71,6 +74,7 @@ public class DifferenceGraph implements Graph { } @Override + @Deprecated public BulkUpdateHandler getBulkUpdateHandler() { return g.getBulkUpdateHandler(); } @@ -90,11 +94,6 @@ public class DifferenceGraph implements Graph { return g.getPrefixMapping(); } - @Override - public Reifier getReifier() { - return g.getReifier(); - } - @Override public GraphStatisticsHandler getStatisticsHandler() { return g.getStatisticsHandler(); @@ -120,11 +119,6 @@ public class DifferenceGraph implements Graph { return g.isIsomorphicWith(arg0); } - @Override - public QueryHandler queryHandler() { - return g.queryHandler(); - } - @Override public int size() { return g.size() - subtract.size(); @@ -135,4 +129,9 @@ public class DifferenceGraph implements Graph { g.add(arg0); } + @Override + public void clear() { + g.clear(); + } + } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/EmptyReifier.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/EmptyReifier.java deleted file mode 100644 index ac29c9077..000000000 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/EmptyReifier.java +++ /dev/null @@ -1,118 +0,0 @@ -/* $This file is distributed under the terms of the license in /doc/license.txt$ */ - -package edu.cornell.mannlib.vitro.webapp.dao.jena; - -import java.util.Collections; - -import org.apache.commons.collections.iterators.EmptyIterator; - -import com.hp.hpl.jena.graph.Graph; -import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; -import com.hp.hpl.jena.graph.Triple; -import com.hp.hpl.jena.graph.TripleMatch; -import com.hp.hpl.jena.shared.ReificationStyle; -import com.hp.hpl.jena.util.iterator.ExtendedIterator; -import com.hp.hpl.jena.util.iterator.WrappedIterator; - -public class EmptyReifier implements Reifier { - - private Graph g; - - public EmptyReifier(Graph g) { - this.g = g; - } - - @Override - public Triple getTriple(Node arg0) { - // TODO Auto-generated method stub - return null; - } - - @Override - public ExtendedIterator allNodes() { - return WrappedIterator.create(Collections.EMPTY_LIST.iterator()); - } - - @Override - public ExtendedIterator allNodes(Triple arg0) { - return WrappedIterator.create(Collections.EMPTY_LIST.iterator()); - } - - @Override - public void close() { - // TODO Auto-generated method stub - - } - - @Override - public ExtendedIterator find(TripleMatch arg0) { - return g.find(arg0); - } - - @Override - public ExtendedIterator findEither(TripleMatch arg0, boolean arg1) { - return WrappedIterator.create(EmptyIterator.INSTANCE); - } - - @Override - public ExtendedIterator findExposed(TripleMatch arg0) { - return WrappedIterator.create(EmptyIterator.INSTANCE); - } - - @Override - public Graph getParentGraph() { - return g; - } - - @Override - public ReificationStyle getStyle() { - return ReificationStyle.Minimal; - } - - @Override - public boolean handledAdd(Triple arg0) { - g.add(arg0); - return true; - } - - @Override - public boolean handledRemove(Triple arg0) { - g.delete(arg0); - return true; - } - - @Override - public boolean hasTriple(Node arg0) { - // TODO Auto-generated method stub - return false; - } - - @Override - public boolean hasTriple(Triple arg0) { - // TODO Auto-generated method stub - return false; - } - - @Override - public Node reifyAs(Node arg0, Triple arg1) { - // TODO Auto-generated method stub - return null; - } - - @Override - public void remove(Triple arg0) { - g.delete(arg0); - } - - @Override - public void remove(Node arg0, Triple arg1) { - g.delete(arg1); - } - - @Override - public int size() { - return g.size(); - } - -} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/ObjectPropertyDaoJena.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/ObjectPropertyDaoJena.java index dba241ecc..580aa34f5 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/ObjectPropertyDaoJena.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/ObjectPropertyDaoJena.java @@ -13,7 +13,7 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.openjena.atlas.lib.Pair; +import org.apache.jena.atlas.lib.Pair; import com.hp.hpl.jena.datatypes.xsd.XSDDatatype; import com.hp.hpl.jena.ontology.ConversionException; diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDBGraphGenerator.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDBGraphGenerator.java deleted file mode 100644 index 4f24a3009..000000000 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDBGraphGenerator.java +++ /dev/null @@ -1,74 +0,0 @@ -/* $This file is distributed under the terms of the license in /doc/license.txt$ */ - -package edu.cornell.mannlib.vitro.webapp.dao.jena; - -import java.sql.Connection; -import java.sql.SQLException; - -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import com.hp.hpl.jena.db.DBConnection; -import com.hp.hpl.jena.db.GraphRDB; -import com.hp.hpl.jena.db.IDBConnection; -import com.hp.hpl.jena.db.ModelRDB; -import com.hp.hpl.jena.graph.Graph; - - - -public class RDBGraphGenerator implements SQLGraphGenerator { - - private static final Log log = LogFactory.getLog(RDBGraphGenerator.class.getName()); - - private DataSource ds = null; - private Connection connection = null; - private String dbTypeStr = null; - private String graphID = null; - - public RDBGraphGenerator(DataSource ds, String dbTypeStr, String graphID) { - this.ds = ds; - this.dbTypeStr = dbTypeStr; - this.graphID = graphID; - } - - public boolean isGraphClosed() { - try { - return (connection == null || connection.isClosed()); - } catch (SQLException e) { - throw new RuntimeException(e); - } - } - - public Graph generateGraph() { - try { -// if (log.isDebugEnabled()) { -// log.debug(ds.getNumActive() + " active SQL connections"); -// log.debug(ds.getNumIdle() + " idle SQL connections"); -// } - if ( ( this.connection != null ) && ( !this.connection.isClosed() ) ) { - this.connection.close(); - } - this.connection = ds.getConnection(); - IDBConnection idbConn = new DBConnection(this.connection, dbTypeStr); - Graph requestedProperties = null; - boolean modelExists = idbConn.containsModel(graphID); - if (!modelExists) { - requestedProperties = ModelRDB.getDefaultModelProperties(idbConn).getGraph(); - } - Graph graphRDB = new GraphRDB(idbConn, graphID, requestedProperties, GraphRDB.OPTIMIZE_ALL_REIFICATIONS_AND_HIDE_NOTHING, !modelExists); - return graphRDB; - } catch (SQLException e) { - log.error(e, e); - throw new RuntimeException("SQLException: unable to regenerate graph", e); - } - } - - public Connection getConnection() { - return connection; - } - -} - - diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDataset.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDataset.java index 6517b495f..dee92e313 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDataset.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDataset.java @@ -7,9 +7,12 @@ import java.util.Iterator; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.query.Dataset; +import com.hp.hpl.jena.query.LabelExistsException; +import com.hp.hpl.jena.query.ReadWrite; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.shared.Lock; import com.hp.hpl.jena.sparql.core.DatasetGraph; +import com.hp.hpl.jena.sparql.util.Context; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; @@ -66,4 +69,73 @@ public class RDFServiceDataset implements Dataset { return nameList.iterator(); } + @Override + public void addNamedModel(String uri, Model model) + throws LabelExistsException { + Iterator graphNodes = g.listGraphNodes(); + while (graphNodes.hasNext()) { + Node graphNode = graphNodes.next(); + if (graphNode.hasURI(uri)) { + throw new LabelExistsException("Can't add named model '"+ uri + + "': model already exists"); + } + } + g.addGraph(Node.createURI(uri), model.getGraph()); + } + + @Override + public Context getContext() { + return g.getContext(); + } + + @Override + public void removeNamedModel(String uri) { + g.removeGraph(Node.createURI(uri)); + } + + @Override + public void replaceNamedModel(String uri, Model model) { + removeNamedModel(uri); + addNamedModel(uri, model); + } + + @Override + public void setDefaultModel(Model model) { + g.setDefaultGraph(model.getGraph()); + } + + @Override + public boolean supportsTransactions() { + return false; + } + + @Override + public boolean isInTransaction() { + return false; + } + + @Override + public void begin(ReadWrite arg0) { + throw new UnsupportedOperationException(this.getClass().getSimpleName() + + " does not support transactions."); + } + + @Override + public void commit() { + throw new UnsupportedOperationException(this.getClass().getSimpleName() + + " does not support transactions."); + } + + @Override + public void abort() { + throw new UnsupportedOperationException(this.getClass().getSimpleName() + + " does not support transactions."); + } + + @Override + public void end() { + throw new UnsupportedOperationException(this.getClass().getSimpleName() + + " does not support transactions."); + } + } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDatasetGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDatasetGraph.java index 23f9aaff2..6ed046a0d 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDatasetGraph.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceDatasetGraph.java @@ -48,8 +48,13 @@ public class RDFServiceDatasetGraph implements DatasetGraph { getGraphFor(arg0).add(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject())); } + @Override + public void add(Node g, Node s, Node p, Node o) { + add(new Quad(g, s, p, o)); + } + @Override - public void addGraph(Node arg0, Graph arg1) { + public void addGraph(Node uri, Graph arg1) { // TODO Auto-generated method stub } @@ -79,6 +84,11 @@ public class RDFServiceDatasetGraph implements DatasetGraph { getGraphFor(arg0).delete(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject())); } + @Override + public void delete(Node g, Node s, Node p, Node o) { + delete(new Quad(g, s, p, o)); + } + @Override public void deleteAny(Node arg0, Node arg1, Node arg2, Node arg3) { // TODO check this @@ -99,9 +109,9 @@ public class RDFServiceDatasetGraph implements DatasetGraph { public Iterator find(Node graph, Node subject, Node predicate, Node object) { if (!isVar(subject) && !isVar(predicate) && !isVar(object) &&!isVar(graph)) { if (contains(subject, predicate, object, graph)) { - return new SingletonIterator(new Triple(subject, predicate, object)); + return new SingletonIterator(new Quad(subject, predicate, object, graph)); } else { - return WrappedIterator.create(Collections.EMPTY_LIST.iterator()); + return WrappedIterator.create(Collections.emptyIterator()); } } StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n"); @@ -153,7 +163,6 @@ public class RDFServiceDatasetGraph implements DatasetGraph { @Override public Context getContext() { - // TODO Auto-generated method stub return null; } @@ -212,7 +221,5 @@ public class RDFServiceDatasetGraph implements DatasetGraph { private boolean isVar(Node node) { return (node == null || node.isVariable() || node == Node.ANY); } - - } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraph.java index c7fb46680..58489a1b4 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraph.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraph.java @@ -16,19 +16,15 @@ import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.GraphEventManager; import com.hp.hpl.jena.graph.GraphStatisticsHandler; import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; import com.hp.hpl.jena.graph.TransactionHandler; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.TripleMatch; import com.hp.hpl.jena.graph.impl.GraphWithPerform; import com.hp.hpl.jena.graph.impl.SimpleEventManager; -import com.hp.hpl.jena.graph.query.QueryHandler; -import com.hp.hpl.jena.graph.query.SimpleQueryHandler; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.listeners.StatementListener; import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.shared.AddDeniedException; import com.hp.hpl.jena.shared.DeleteDeniedException; import com.hp.hpl.jena.shared.PrefixMapping; @@ -41,6 +37,7 @@ import com.hp.hpl.jena.util.iterator.WrappedIterator; import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; +import edu.cornell.mannlib.vitro.webapp.rdfservice.adapters.VitroModelFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; public class RDFServiceGraph implements GraphWithPerform { @@ -52,8 +49,6 @@ public class RDFServiceGraph implements GraphWithPerform { private BulkUpdateHandler bulkUpdateHandler; private PrefixMapping prefixMapping = new PrefixMappingImpl(); private GraphEventManager eventManager; - private Reifier reifier = new EmptyReifier(this); - private QueryHandler queryHandler; /** * Returns a SparqlGraph for the union of named graphs in a remote repository @@ -174,6 +169,13 @@ public class RDFServiceGraph implements GraphWithPerform { performDelete(arg0); } + @Override + public void remove(Node subject, Node predicate, Node object) { + for (Triple t : find(subject, predicate, object).toList()) { + delete(t); + } + } + @Override public boolean dependsOn(Graph arg0) { return false; // who knows? @@ -230,9 +232,9 @@ public class RDFServiceGraph implements GraphWithPerform { public ExtendedIterator find(Node subject, Node predicate, Node object) { if (!isVar(subject) && !isVar(predicate) && !isVar(object)) { if (contains(subject, predicate, object)) { - return new SingletonIterator(new Triple(subject, predicate, object)); + return new SingletonIterator(new Triple(subject, predicate, object)); } else { - return WrappedIterator.create(Collections.EMPTY_LIST.iterator()); + return WrappedIterator.create(Collections.emptyIterator()); } } StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n"); @@ -296,11 +298,6 @@ public class RDFServiceGraph implements GraphWithPerform { return prefixMapping; } - @Override - public Reifier getReifier() { - return reifier; - } - @Override public GraphStatisticsHandler getStatisticsHandler() { return null; @@ -329,54 +326,60 @@ public class RDFServiceGraph implements GraphWithPerform { "by SPARQL graphs"); } - @Override - public QueryHandler queryHandler() { - if (queryHandler == null) { - queryHandler = new SimpleQueryHandler(this); - } - return queryHandler; - } - @Override public int size() { int size = find(null, null, null).toList().size(); return size; } - private final static Capabilities capabilities = new Capabilities() { + @Override + public void clear() { + removeAll(); + } + + private final static Capabilities capabilities = new Capabilities() { - public boolean addAllowed() { + @Override + public boolean addAllowed() { return false; } + @Override public boolean addAllowed(boolean everyTriple) { return false; } + @Override public boolean canBeEmpty() { return true; } + @Override public boolean deleteAllowed() { return false; } + @Override public boolean deleteAllowed(boolean everyTriple) { return false; } + @Override public boolean findContractSafe() { return true; } + @Override public boolean handlesLiteralTyping() { return true; } + @Override public boolean iteratorRemoveAllowed() { return false; } + @Override public boolean sizeAccurate() { return true; } @@ -440,7 +443,7 @@ public class RDFServiceGraph implements GraphWithPerform { } public static Model createRDFServiceModel(final RDFServiceGraph g) { - Model m = ModelFactory.createModelForGraph(g); + Model m = VitroModelFactory.createModelForGraph(g); m.register(new StatementListener() { @Override public void notifyEvent(Model m, Object event) { diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraphBulkUpdater.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraphBulkUpdater.java index 8a1170204..320120b45 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraphBulkUpdater.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceGraphBulkUpdater.java @@ -10,16 +10,17 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import com.hp.hpl.jena.graph.BulkUpdateHandler; import com.hp.hpl.jena.graph.Graph; +import com.hp.hpl.jena.graph.GraphEventManager; import com.hp.hpl.jena.graph.GraphEvents; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Triple; -import com.hp.hpl.jena.graph.impl.SimpleBulkUpdateHandler; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.StmtIterator; -import com.hp.hpl.jena.sparql.util.graph.GraphFactory; +import com.hp.hpl.jena.sparql.graph.GraphFactory; import com.hp.hpl.jena.util.iterator.ExtendedIterator; import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet; @@ -27,14 +28,15 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; -public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler { +public class RDFServiceGraphBulkUpdater implements BulkUpdateHandler { private static final Log log = LogFactory.getLog(RDFServiceGraphBulkUpdater.class); - private RDFServiceGraph graph; + private final RDFServiceGraph graph; + private final GraphEventManager manager; public RDFServiceGraphBulkUpdater(RDFServiceGraph graph) { - super(graph); this.graph = graph; + this.manager = graph.getEventManager(); } @Override @@ -105,6 +107,34 @@ public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler { } + @Override + public void delete(Triple[] arg0) { + Graph g = GraphFactory.createPlainGraph(); + for (int i = 0 ; i < arg0.length ; i++) { + g.add(arg0[i]); + } + delete(g); + } + + @Override + public void delete(List arg0) { + Graph g = GraphFactory.createPlainGraph(); + for (Triple t : arg0) { + g.add(t); + } + delete(g); + } + + @Override + public void delete(Iterator arg0) { + Graph g = GraphFactory.createPlainGraph(); + while (arg0.hasNext()) { + Triple t = arg0.next(); + g.add(t); + } + delete(g); + } + @Override public void delete(Graph g, boolean withReifications) { delete(g); @@ -149,7 +179,7 @@ public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler { notifyRemoveAll(); } - protected void notifyRemoveAll() { + protected void notifyRemoveAll() { manager.notifyEvent(graph, GraphEvents.removeAll); } @@ -159,7 +189,7 @@ public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler { manager.notifyEvent(graph, GraphEvents.remove(s, p, o)); } - public static void removeAll(Graph g, Node s, Node p, Node o) + private static void removeAll(Graph g, Node s, Node p, Node o) { if (!(g instanceof RDFServiceGraph)) { removeAllTripleByTriple(g, s, p, o); @@ -245,7 +275,7 @@ public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler { * see http://www.python.org/doc/2.5.2/ref/strings.html * or see jena's n3 grammar jena/src/com/hp/hpl/jena/n3/n3.g */ - protected static void pyString(StringBuffer sbuff, String s) + private static void pyString(StringBuffer sbuff, String s) { for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); @@ -280,7 +310,7 @@ public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler { } } - public static void removeAllTripleByTriple(Graph g, Node s, Node p, Node o) + private static void removeAllTripleByTriple(Graph g, Node s, Node p, Node o) { ExtendedIterator it = g.find( s, p, o ); try { @@ -295,9 +325,4 @@ public class RDFServiceGraphBulkUpdater extends SimpleBulkUpdateHandler { } } - public static void removeAll( Graph g ) - { - g.getBulkUpdateHandler().delete(g); - } - } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceModelMaker.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceModelMaker.java index 004eadbb9..ec490d477 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceModelMaker.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RDFServiceModelMaker.java @@ -34,6 +34,7 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; +import edu.cornell.mannlib.vitro.webapp.rdfservice.adapters.VitroModelFactory; public class RDFServiceModelMaker implements ModelMaker { @@ -198,17 +199,14 @@ public class RDFServiceModelMaker implements ModelMaker { Graph bnodeFilteringGraph = new BlankNodeFilteringGraph(model.getGraph()); Model bnodeFilteringModel = ModelFactory.createModelForGraph(bnodeFilteringGraph); + + Model specialUnionModel = VitroModelFactory.createUnion(bnodeFilteringModel, bnodeModel); + bnodeFilteringModel.register(new BlankNodeStatementListener(bnodeModel)); - BulkUpdateHandler bulkUpdateHandler = model.getGraph().getBulkUpdateHandler(); - Model unionModel = ModelFactory.createUnion(bnodeFilteringModel, bnodeModel); - Graph specialGraph = new SpecialBulkUpdateHandlerGraph(unionModel.getGraph(), bulkUpdateHandler); - Model specialUnionModel = ModelFactory.createModelForGraph(specialGraph); - bnodeFilteringModel.register(new BlankNodeStatementListener(bnodeModel)); - return specialUnionModel; } - - public void removeModel(String arg0) { + + public void removeModel(String arg0) { Model m = getModel(arg0); m.removeAll(null,null,null); Model metadataModel = getMetadataModel(); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RegeneratingGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RegeneratingGraph.java index 3195685dd..fe7b0184c 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RegeneratingGraph.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/RegeneratingGraph.java @@ -11,16 +11,14 @@ import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.GraphEventManager; import com.hp.hpl.jena.graph.GraphStatisticsHandler; import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; import com.hp.hpl.jena.graph.TransactionHandler; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.TripleMatch; -import com.hp.hpl.jena.graph.query.QueryHandler; import com.hp.hpl.jena.shared.AddDeniedException; import com.hp.hpl.jena.shared.DeleteDeniedException; import com.hp.hpl.jena.shared.PrefixMapping; import com.hp.hpl.jena.util.iterator.ExtendedIterator; -import com.hp.hpl.jena.vocabulary.DAML_OIL; +import com.hp.hpl.jena.vocabulary.DCTerms; import com.hp.hpl.jena.vocabulary.RDF; public class RegeneratingGraph implements Graph, Regenerable { @@ -40,6 +38,7 @@ public class RegeneratingGraph implements Graph, Regenerable { this.generator = graphGenerator; } + @Override public void regenerate() { this.g = generator.generateGraph(); } @@ -49,25 +48,25 @@ public class RegeneratingGraph implements Graph, Regenerable { */ private void sendTestQuery() { this.g.contains( - DAML_OIL.Thing.asNode(),RDF.type.asNode(),DAML_OIL.Thing.asNode()); + DCTerms.Agent.asNode(),RDF.type.asNode(),DCTerms.Agent.asNode()); } + @Override protected void finalize() { close(); } + @Override public void close() { try { g.close(); - if (generator instanceof RDBGraphGenerator) { - ((RDBGraphGenerator) generator).getConnection().close(); - } } catch (Exception e) { regenerate(); g.close(); } } + @Override public boolean contains(Triple arg0) { try { regenerateIfClosed(); @@ -78,6 +77,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } } + @Override public boolean contains(Node arg0, Node arg1, Node arg2) { try { regenerateIfClosed(); @@ -88,6 +88,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } } + @Override public void delete(Triple arg0) throws DeleteDeniedException { try { regenerateIfClosed(); @@ -98,6 +99,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } } + @Override public boolean dependsOn(Graph arg0) { try { regenerateIfClosed(); @@ -108,7 +110,8 @@ public class RegeneratingGraph implements Graph, Regenerable { } } - public ExtendedIterator find(TripleMatch arg0) { + @Override + public ExtendedIterator find(TripleMatch arg0) { try { regenerateIfClosed(); return g.find(arg0); @@ -118,7 +121,8 @@ public class RegeneratingGraph implements Graph, Regenerable { } } - public ExtendedIterator find(Node arg0, Node arg1, Node arg2) { + @Override + public ExtendedIterator find(Node arg0, Node arg1, Node arg2) { try { regenerateIfClosed(); return g.find(arg0,arg1,arg2); @@ -128,6 +132,8 @@ public class RegeneratingGraph implements Graph, Regenerable { } } + @Override + @Deprecated public BulkUpdateHandler getBulkUpdateHandler() { try { regenerateIfClosed(); @@ -139,6 +145,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } } + @Override public Capabilities getCapabilities() { try { regenerateIfClosed(); @@ -151,6 +158,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } + @Override public GraphEventManager getEventManager() { try { regenerateIfClosed(); @@ -163,6 +171,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } + @Override public PrefixMapping getPrefixMapping() { try { regenerateIfClosed(); @@ -175,18 +184,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } - public Reifier getReifier() { - try { - regenerateIfClosed(); - sendTestQuery(); - return g.getReifier(); - } catch (Exception e) { - regenerate(); - return g.getReifier(); - } - } - - + @Override public GraphStatisticsHandler getStatisticsHandler() { try { regenerateIfClosed(); @@ -199,6 +197,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } + @Override public TransactionHandler getTransactionHandler() { try { regenerateIfClosed(); @@ -211,6 +210,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } + @Override public boolean isClosed() { try { regenerateIfClosed(); @@ -222,6 +222,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } + @Override public boolean isEmpty() { try { regenerateIfClosed(); @@ -233,6 +234,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } + @Override public boolean isIsomorphicWith(Graph arg0) { try { regenerateIfClosed(); @@ -242,20 +244,8 @@ public class RegeneratingGraph implements Graph, Regenerable { return g.isIsomorphicWith(arg0); } } - - - public QueryHandler queryHandler() { - try { - regenerateIfClosed(); - sendTestQuery(); - return g.queryHandler(); - } catch (Exception e) { - regenerate(); - return g.queryHandler(); - } - } - + @Override public int size() { try { regenerateIfClosed(); @@ -267,6 +257,7 @@ public class RegeneratingGraph implements Graph, Regenerable { } + @Override public void add(Triple arg0) throws AddDeniedException { try { regenerateIfClosed(); @@ -277,6 +268,28 @@ public class RegeneratingGraph implements Graph, Regenerable { } } + @Override + public void clear() { + try { + regenerateIfClosed(); + g.clear(); + } catch (Exception e) { + regenerate(); + g.clear(); + } + } + + @Override + public void remove(Node arg0, Node arg1, Node arg2) { + try { + regenerateIfClosed(); + g.remove(arg0, arg1, arg2); + } catch (Exception e) { + regenerate(); + g.remove(arg0, arg1, arg2); + } + } + private void regenerateIfClosed() { if (generator.isGraphClosed()) { regenerate(); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraph.java index 00ca0c550..a5014262e 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraph.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraph.java @@ -9,6 +9,13 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.message.BasicNameValuePair; import com.hp.hpl.jena.graph.BulkUpdateHandler; import com.hp.hpl.jena.graph.Capabilities; @@ -16,14 +23,11 @@ import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.GraphEventManager; import com.hp.hpl.jena.graph.GraphStatisticsHandler; import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; import com.hp.hpl.jena.graph.TransactionHandler; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.TripleMatch; import com.hp.hpl.jena.graph.impl.GraphWithPerform; import com.hp.hpl.jena.graph.impl.SimpleEventManager; -import com.hp.hpl.jena.graph.query.QueryHandler; -import com.hp.hpl.jena.graph.query.SimpleQueryHandler; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; @@ -39,18 +43,6 @@ import com.hp.hpl.jena.util.iterator.ExtendedIterator; import com.hp.hpl.jena.util.iterator.SingletonIterator; import com.hp.hpl.jena.util.iterator.WrappedIterator; -import org.apache.http.client.entity.UrlEncodedFormEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.apache.http.message.BasicNameValuePair; - -import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; - public class SparqlGraph implements GraphWithPerform { private String endpointURI; @@ -61,10 +53,6 @@ public class SparqlGraph implements GraphWithPerform { private BulkUpdateHandler bulkUpdateHandler; private PrefixMapping prefixMapping = new PrefixMappingImpl(); private GraphEventManager eventManager; - private Reifier reifier = new EmptyReifier(this); - private GraphStatisticsHandler graphStatisticsHandler; - private TransactionHandler transactionHandler; - private QueryHandler queryHandler; /** * Returns a SparqlGraph for the union of named graphs in a remote repository @@ -205,6 +193,18 @@ public class SparqlGraph implements GraphWithPerform { performDelete(arg0); } + @Override + public void clear() { + removeAll(); + } + + @Override + public void remove(Node subject, Node predicate, Node object) { + for (Triple t: find(subject, predicate, object).toList()) { + delete(t); + } + } + @Override public boolean dependsOn(Graph arg0) { return false; // who knows? @@ -261,9 +261,9 @@ public class SparqlGraph implements GraphWithPerform { public ExtendedIterator find(Node subject, Node predicate, Node object) { if (!isVar(subject) && !isVar(predicate) && !isVar(object)) { if (contains(subject, predicate, object)) { - return new SingletonIterator(new Triple(subject, predicate, object)); + return new SingletonIterator(new Triple(subject, predicate, object)); } else { - return WrappedIterator.create(Collections.EMPTY_LIST.iterator()); + return WrappedIterator.create(Collections.emptyIterator()); } } StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n"); @@ -335,14 +335,6 @@ public class SparqlGraph implements GraphWithPerform { return prefixMapping; } - @Override - public Reifier getReifier() { - //if (reifier == null) { - // reifier = new SimpleReifier(this, ReificationStyle.Standard); - //} - return reifier; - } - @Override public GraphStatisticsHandler getStatisticsHandler() { return null; @@ -372,14 +364,6 @@ public class SparqlGraph implements GraphWithPerform { "by SPARQL graphs"); } - @Override - public QueryHandler queryHandler() { - if (queryHandler == null) { - queryHandler = new SimpleQueryHandler(this); - } - return queryHandler; - } - @Override public int size() { int size = find(null, null, null).toList().size(); @@ -388,38 +372,47 @@ public class SparqlGraph implements GraphWithPerform { private final static Capabilities capabilities = new Capabilities() { - public boolean addAllowed() { + @Override + public boolean addAllowed() { return false; } + @Override public boolean addAllowed(boolean everyTriple) { return false; } + @Override public boolean canBeEmpty() { return true; } + @Override public boolean deleteAllowed() { return false; } + @Override public boolean deleteAllowed(boolean everyTriple) { return false; } + @Override public boolean findContractSafe() { return true; } + @Override public boolean handlesLiteralTyping() { return true; } + @Override public boolean iteratorRemoveAllowed() { return false; } + @Override public boolean sizeAccurate() { return true; } @@ -455,13 +448,11 @@ public class SparqlGraph implements GraphWithPerform { // log.info((System.currentTimeMillis() - startTime1) + " to execute via sesame"); - long startTime = System.currentTimeMillis(); Query askQuery = QueryFactory.create(queryStr); QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery); try { return new ResultSetMem(qe.execSelect()); } finally { - //log.info((System.currentTimeMillis() - startTime) + " to execute via Jena"); qe.close(); } } @@ -505,5 +496,4 @@ public class SparqlGraph implements GraphWithPerform { // sbuff.append(hexstr); } } - } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphBulkUpdater.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphBulkUpdater.java index 4c2e4c39a..a89cf8fe5 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphBulkUpdater.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphBulkUpdater.java @@ -19,7 +19,7 @@ import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.StmtIterator; -import com.hp.hpl.jena.sparql.util.graph.GraphFactory; +import com.hp.hpl.jena.sparql.graph.GraphFactory; import com.hp.hpl.jena.util.iterator.ExtendedIterator; public class SparqlGraphBulkUpdater extends SimpleBulkUpdateHandler { diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphMultilingual.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphMultilingual.java index 1e6aeb9de..96ee35fcc 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphMultilingual.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SparqlGraphMultilingual.java @@ -17,7 +17,6 @@ import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.TripleMatch; import com.hp.hpl.jena.graph.impl.GraphWithPerform; import com.hp.hpl.jena.shared.AddDeniedException; -import com.hp.hpl.jena.sparql.util.NodeFactory; import com.hp.hpl.jena.util.iterator.ExtendedIterator; import com.hp.hpl.jena.util.iterator.WrappedIterator; @@ -51,7 +50,7 @@ public class SparqlGraphMultilingual extends SparqlGraph implements GraphWithPer && t.getObject().getLiteral().getDatatypeURI() == null) { log.info("adding language tag"); super.performAdd(Triple.create(t.getSubject(), - t.getPredicate(), NodeFactory.createLiteralNode( + t.getPredicate(), Node.createLiteral( t.getObject().getLiteralLexicalForm(), langs.get(0), null))); } else { log.info("adding original triple " + t); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SpecialBulkUpdateHandlerGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SpecialBulkUpdateHandlerGraph.java deleted file mode 100644 index b71cc6165..000000000 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/SpecialBulkUpdateHandlerGraph.java +++ /dev/null @@ -1,111 +0,0 @@ -/* $This file is distributed under the terms of the license in /doc/license.txt$ */ - -package edu.cornell.mannlib.vitro.webapp.dao.jena; - -import com.hp.hpl.jena.graph.BulkUpdateHandler; -import com.hp.hpl.jena.graph.Capabilities; -import com.hp.hpl.jena.graph.Graph; -import com.hp.hpl.jena.graph.GraphEventManager; -import com.hp.hpl.jena.graph.GraphStatisticsHandler; -import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; -import com.hp.hpl.jena.graph.TransactionHandler; -import com.hp.hpl.jena.graph.Triple; -import com.hp.hpl.jena.graph.TripleMatch; -import com.hp.hpl.jena.graph.query.QueryHandler; -import com.hp.hpl.jena.shared.AddDeniedException; -import com.hp.hpl.jena.shared.DeleteDeniedException; -import com.hp.hpl.jena.shared.PrefixMapping; -import com.hp.hpl.jena.util.iterator.ExtendedIterator; - -public class SpecialBulkUpdateHandlerGraph implements Graph { - - private Graph g; - private BulkUpdateHandler b; - - public SpecialBulkUpdateHandlerGraph(Graph g, BulkUpdateHandler b) { - this.g = g; - this.b = b; - } - - public void add(Triple arg0) throws AddDeniedException { - g.add(arg0); - } - - public void close() { - g.close(); - } - - public boolean contains(Node arg0, Node arg1, Node arg2) { - return g.contains(arg0, arg1, arg2); - } - - public boolean contains(Triple arg0) { - return g.contains(arg0); - } - - public void delete(Triple arg0) throws DeleteDeniedException { - g.delete(arg0); - } - - public boolean dependsOn(Graph arg0) { - return g.dependsOn(arg0); - } - - public ExtendedIterator find(Node arg0, Node arg1, Node arg2) { - return g.find(arg0, arg1, arg2); - } - - public ExtendedIterator find(TripleMatch arg0) { - return g.find(arg0); - } - - public BulkUpdateHandler getBulkUpdateHandler() { - return b; - } - - public Capabilities getCapabilities() { - return g.getCapabilities(); - } - - public GraphEventManager getEventManager() { - return g.getEventManager(); - } - - public PrefixMapping getPrefixMapping() { - return g.getPrefixMapping(); - } - - public Reifier getReifier() { - return g.getReifier(); - } - - public GraphStatisticsHandler getStatisticsHandler() { - return g.getStatisticsHandler(); - } - - public TransactionHandler getTransactionHandler() { - return g.getTransactionHandler(); - } - - public boolean isClosed() { - return g.isClosed(); - } - - public boolean isEmpty() { - return g.isEmpty(); - } - - public boolean isIsomorphicWith(Graph arg0) { - return g.isIsomorphicWith(arg0); - } - - public QueryHandler queryHandler() { - return g.queryHandler(); - } - - public int size() { - return g.size(); - } - -} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/VitroInterceptingModelMaker.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/VitroInterceptingModelMaker.java index 3255d2cd4..06b57a56c 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/VitroInterceptingModelMaker.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/VitroInterceptingModelMaker.java @@ -2,21 +2,13 @@ package edu.cornell.mannlib.vitro.webapp.dao.jena; -import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.*; -import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.DISPLAY; -import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.INFERRED_FULL; -import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.UNION_FULL; -import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.*; - import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; - -import javax.servlet.ServletContext; +import java.util.TreeSet; import com.hp.hpl.jena.graph.GraphMaker; import com.hp.hpl.jena.rdf.model.Model; @@ -26,10 +18,6 @@ import com.hp.hpl.jena.shared.AlreadyExistsException; import com.hp.hpl.jena.util.iterator.ExtendedIterator; import com.hp.hpl.jena.util.iterator.NiceIterator; -import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; -import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID; -import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase; - /** * A decorator on top of a model maker. It looks for requests on particular * URIs, and shunts them to the pre-made models on ModelAccess. @@ -44,13 +32,11 @@ import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase; */ public class VitroInterceptingModelMaker implements ModelMaker { private final ModelMaker innerMM; - private final ServletContext ctx; private final Map specialMap; - public VitroInterceptingModelMaker(ModelMaker innerMM, ServletContext ctx) { + public VitroInterceptingModelMaker(ModelMaker innerMM, Map specialMap) { this.innerMM = innerMM; - this.ctx = ctx; - this.specialMap = populateSpecialMap(); + this.specialMap = Collections.unmodifiableMap(new HashMap<>(specialMap)); } @Override @@ -147,33 +133,6 @@ public class VitroInterceptingModelMaker implements ModelMaker { // Intercepting mechanism // ---------------------------------------------------------------------- - private Map populateSpecialMap() { - Map map = new HashMap<>(); - - map.put("vitro:jenaOntModel", - ModelAccess.on(ctx).getOntModel(UNION_FULL)); - map.put("vitro:baseOntModel", ModelAccess.on(ctx) - .getOntModel(BASE_FULL)); - map.put("vitro:inferenceOntModel", - ModelAccess.on(ctx).getOntModel(INFERRED_FULL)); - map.put(JENA_DISPLAY_METADATA_MODEL, - ModelAccess.on(ctx).getOntModel(DISPLAY)); - map.put(JENA_DISPLAY_TBOX_MODEL, - ModelAccess.on(ctx).getOntModel(DISPLAY_TBOX)); - map.put(JENA_DISPLAY_DISPLAY_MODEL, - ModelAccess.on(ctx).getOntModel(DISPLAY_DISPLAY)); - map.put(JENA_USER_ACCOUNTS_MODEL, - ModelAccess.on(ctx).getOntModel(USER_ACCOUNTS)); - map.put(JENA_TBOX_ASSERTIONS_MODEL, - ModelAccess.on(ctx).getOntModel(BASE_TBOX)); - map.put(JENA_TBOX_INF_MODEL, - ModelAccess.on(ctx).getOntModel(INFERRED_TBOX)); - map.put(JENA_APPLICATION_METADATA_MODEL, - ModelAccess.on(ctx).getOntModel(APPLICATION_METADATA)); - - return Collections.unmodifiableMap(map); - } - private Collection getSpecialNames() { return specialMap.keySet(); } @@ -191,7 +150,7 @@ public class VitroInterceptingModelMaker implements ModelMaker { @SafeVarargs public SetsExtendedIterator(Collection... collections) { - Set set = new HashSet<>(); + Set set = new TreeSet<>(); for (Collection c : collections) { set.addAll(c); } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/VitroJenaModelMaker.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/VitroJenaModelMaker.java deleted file mode 100644 index 155f3f543..000000000 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/VitroJenaModelMaker.java +++ /dev/null @@ -1,328 +0,0 @@ -/* $This file is distributed under the terms of the license in /doc/license.txt$ */ - -package edu.cornell.mannlib.vitro.webapp.dao.jena; - -import java.sql.SQLException; -import java.util.HashMap; -import java.util.List; - -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServletRequest; -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import com.hp.hpl.jena.db.DBConnection; -import com.hp.hpl.jena.graph.Graph; -import com.hp.hpl.jena.graph.GraphMaker; -import com.hp.hpl.jena.ontology.OntModel; -import com.hp.hpl.jena.ontology.OntModelSpec; -import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; -import com.hp.hpl.jena.rdf.model.ModelMaker; -import com.hp.hpl.jena.rdf.model.ModelReader; -import com.hp.hpl.jena.rdf.model.Resource; -import com.hp.hpl.jena.util.iterator.ExtendedIterator; -import com.hp.hpl.jena.util.iterator.WrappedIterator; - -import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties; -import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; -import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase; - -/** - * This is a bit of a nutty idea but we'll see if it works. This can wrap an RDBModelMaker and return a memory model - * synced with the underlying RDB model. Note, however, that a Jena RDBModelMaker won't auto-reconnect. Maybe I can - * revisit the reconnecting IDBConnection issue or make a special RDBModelMaker that uses the reconnection system. - * - * @author bjl23 - * - */ - -public class VitroJenaModelMaker implements ModelMaker { - - private static final Log log = LogFactory.getLog(VitroJenaModelMaker.class); - private static final String DEFAULT_DRIVER = "com.mysql.jdbc.Driver"; - - private String jdbcUrl; - private String username; - private String password; - private String dbTypeStr; - private DataSource dataSource; - private HashMap modelCache; - private HttpServletRequest request = null; - - public VitroJenaModelMaker(String jdbcUrl, String username, String password, String dbTypeStr, ServletContext ctx) { - this.jdbcUrl = jdbcUrl; - this.username = username; - this.password = password; - this.dbTypeStr = dbTypeStr; - String driverName = ConfigurationProperties.getBean(ctx).getProperty( - "VitroConnection.DataSource.driver"); - // This property is no longer used? - // We'll change it all around in 1.2 anyway. - if(driverName == null) { - driverName = DEFAULT_DRIVER; - } - this.dataSource = JenaDataSourceSetupBase.makeBasicDataSource( - driverName, - jdbcUrl, username, password, ctx); - modelCache = new HashMap(); - } - - protected HashMap getCache() { - return this.modelCache; - } - - @Override - public void close() { - // TODO Auto-generated method stub - // So, in theory, this should close database connections and drop references - // to in-memory models and all that kind of stuff. - } - - @Override - public Model createModel(String arg0) { - Model specialModel = null; - if ( (specialModel = getSpecialModel(arg0)) != null ) { return specialModel; } - Model cachedModel = modelCache.get(arg0); - if (cachedModel != null) { - log.debug("Returning "+arg0+" ("+cachedModel.hashCode()+") from cache"); - return cachedModel; - } else { - Model newModel = makeDBModel(arg0); - modelCache.put(arg0,newModel); - log.debug("Returning "+arg0+" ("+newModel.hashCode()+") from cache"); - return newModel; - } - } - - @Override - public Model createModel(String arg0, boolean arg1) { - Model specialModel = null; - if ( (specialModel = getSpecialModel(arg0)) != null ) { return specialModel; } - Model cachedModel = modelCache.get(arg0); - if (cachedModel != null) { - return cachedModel; - } else { - Model newModel = makeDBModel(arg0); - modelCache.put(arg0,newModel); - return newModel; - } - } - - @Override - public GraphMaker getGraphMaker() { - throw new UnsupportedOperationException(this.getClass().getName() + - " does not support getGraphMaker()"); - } - - @Override - public boolean hasModel(String arg0) { - DBConnection conn = new DBConnection(jdbcUrl, username, password, dbTypeStr); - try { - return ModelFactory.createModelRDBMaker(conn).hasModel(arg0); - } finally { - try { - conn.close(); - } catch (SQLException sqle) { - throw new RuntimeException(sqle); - } - } - } - - @Override - public ExtendedIterator listModels() { - DBConnection conn = new DBConnection(jdbcUrl, username, password, dbTypeStr); - try { - List modelList = ModelFactory.createModelRDBMaker(conn).listModels().toList(); - return WrappedIterator.create(modelList.iterator()); - } finally { - try { - conn.close(); - } catch (SQLException sqle) { - throw new RuntimeException(sqle); - } - } - } - - @Override - public Model openModel(String arg0, boolean arg1) { - Model specialModel = null; - if ( (specialModel = getSpecialModel(arg0)) != null ) { return specialModel; } - Model cachedModel = modelCache.get(arg0); - if (cachedModel != null) { - return cachedModel; - } else { - Model newModel = makeDBModel(arg0); - modelCache.put(arg0,newModel); - return newModel; - } - } - - @Override - public void removeModel(String arg0) { - Model m = modelCache.get(arg0); - if (m != null) { - m.close(); - modelCache.remove(arg0); - } - DBConnection conn = new DBConnection(jdbcUrl, username, password, dbTypeStr); - try { - ModelFactory.createModelRDBMaker(conn).removeModel(arg0); - } finally { - try { - conn.close(); - } catch (SQLException sqle) { - throw new RuntimeException(sqle); - } - } - } - - - public Model addDescription(Model arg0, Resource arg1) { - // TODO Auto-generated method stub - return null; - } - - - public Model createModelOver(String arg0) { - // TODO Auto-generated method stub - return null; - } - - - public Model getDescription() { - // TODO Auto-generated method stub - return null; - } - - - public Model getDescription(Resource arg0) { - // TODO Auto-generated method stub - return null; - } - - @Override - public Model createDefaultModel() { - throw new UnsupportedOperationException(this.getClass().getName() + - " does not support createDefaultModel()"); - } - - - @Override - public Model createFreshModel() { - throw new UnsupportedOperationException(this.getClass().getName() + - " does not support createFreshModel()"); } - - @Deprecated - public Model createModel() { - // TODO Auto-generated method stub - return null; - } - - @Deprecated - public Model getModel() { - // TODO Auto-generated method stub - return null; - } - - - @Override - public Model openModel(String arg0) { - Model specialModel = null; - if ( (specialModel = getSpecialModel(arg0)) != null ) { return specialModel; } - Model cachedModel = modelCache.get(arg0); - if (cachedModel != null) { - return cachedModel; - } else { - Model newModel = makeDBModel(arg0); - modelCache.put(arg0,newModel); - return newModel; - } - } - - - @Override - public Model openModelIfPresent(String arg0) { - Model specialModel = null; - if ( (specialModel = getSpecialModel(arg0)) != null ) { return specialModel; } - Model cachedModel = modelCache.get(arg0); - if (cachedModel != null) { - return cachedModel; - } else { - Model newModel = makeDBModel(arg0); - modelCache.put(arg0,newModel); - return newModel; - } - } - - - @Override - public Model getModel(String arg0) { - Model specialModel = null; - if ( (specialModel = getSpecialModel(arg0)) != null ) { return specialModel; } - Model cachedModel = modelCache.get(arg0); - if (cachedModel != null) { - return cachedModel; - } else { - Model newModel = makeDBModel(arg0); - modelCache.put(arg0,newModel); - return newModel; - } - } - - - @Override - public Model getModel(String arg0, ModelReader arg1) { - Model specialModel = null; - if ( (specialModel = getSpecialModel(arg0)) != null ) { return specialModel; } - Model cachedModel = modelCache.get(arg0); - if (cachedModel != null) { - return cachedModel; - } else { - Model newModel = makeDBModel(arg0); - modelCache.put(arg0,newModel); - return newModel; - } - } - - /** - * This will trap for strings like "vitro:jenaOntModel" and return the - * appropriate in-memory model used by the current webapp context. - * To use this functionality, the VitroJenaModelMaker must be constructed - * with a VitroRequest parameter - */ - private Model getSpecialModel(String modelName) { - if (request != null) { - if ("vitro:jenaOntModel".equals(modelName)) { - return ModelAccess.on(request.getSession()).getJenaOntModel(); - } else if ("vitro:baseOntModel".equals(modelName)) { - return ModelAccess.on(request.getSession()).getBaseOntModel(); - } else if ("vitro:inferenceOntModel".equals(modelName)) { - return ModelAccess.on(request.getSession()).getInferenceOntModel(); - } else { - return null; - } - } - return null; - } - - private OntModel makeDBModel(String jenaDbModelName) { - OntModel memCache = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); - RDBGraphGenerator gen = new RDBGraphGenerator(dataSource, dbTypeStr, jenaDbModelName); - Graph g = gen.generateGraph(); - Model m = ModelFactory.createModelForGraph(g); - memCache.add(m); - memCache.register(new MemToDBModelSynchronizer(gen)); - m.close(); - try { - gen.getConnection().close(); - } catch (SQLException e) { - log.warn("Unable to close connection for graph", e); - } - // This next piece is so that we return a fresh model object each time so we don't get cross-contamination of extra listeners, etc. - return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, ModelFactory.createUnion(memCache, ModelFactory.createDefaultModel())); - } - -} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/WebappDaoFactoryJena.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/WebappDaoFactoryJena.java index d75f4a204..8939758a4 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/WebappDaoFactoryJena.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/dao/jena/WebappDaoFactoryJena.java @@ -17,7 +17,6 @@ import com.hp.hpl.jena.iri.IRIFactory; import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.ontology.OntResource; -import com.hp.hpl.jena.query.DataSource; import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.DatasetFactory; import com.hp.hpl.jena.rdf.model.Literal; @@ -155,7 +154,7 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { public static Dataset makeInMemoryDataset(Model assertions, Model inferences) { - DataSource dataset = DatasetFactory.create(); + Dataset dataset = DatasetFactory.createMem(); OntModel union = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); if (assertions != null) { dataset.addNamedModel( @@ -177,20 +176,20 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { return this.properties; } - public String checkURI(String uriStr) { + @Override + public String checkURI(String uriStr) { return checkURI(uriStr, true); } - public String checkURI(String uriStr, boolean checkUniqueness) { + @Override + public String checkURI(String uriStr, boolean checkUniqueness) { uriStr = (uriStr == null) ? " " : uriStr; - boolean validURI = true; String errorMsg = ""; String duplicateMsg = "URI is already in use. " + "Please enter another URI. "; IRIFactory factory = IRIFactory.jenaImplementation(); IRI iri = factory.create( uriStr ); if (iri.hasViolation(false) ) { - validURI = false; errorMsg += (iri.violations(false).next()) .getShortMessage() + " "; } else if (checkUniqueness) { @@ -198,11 +197,6 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { if(existingURI) { errorMsg+="Not a valid URI. Please enter another URI. "; errorMsg+=duplicateMsg; - //the original code included an extra line "Not a valid URI. Please enter another URI. " - //in the error message in addition to the duplicate error message in the case where the uri - //is in the subject position of any of the statements in the system - but not so where the - //uri was only in the object position or was a propery. In this code, the same error message - //is returned for all duplicate uris } } return (errorMsg.length()>0) ? errorMsg : null; @@ -211,33 +205,36 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { //Check if URI already in use or not either as resource OR as property - public boolean hasExistingURI(String uriStr) { + @Override + public boolean hasExistingURI(String uriStr) { OntModel ontModel = ontModelSelector.getFullModel(); return URIUtils.hasExistingURI(uriStr, ontModel); } - - - - public WebappDaoFactory getUserAwareDaoFactory(String userURI) { + @Override + public WebappDaoFactory getUserAwareDaoFactory(String userURI) { return new WebappDaoFactoryJena(this, userURI); } - public String getUserURI() { + @Override + public String getUserURI() { return userURI; } /* **************** accessors ***************** */ - public String getDefaultNamespace() { + @Override + public String getDefaultNamespace() { return config.getDefaultNamespace(); } - public List getPreferredLanguages() { + @Override + public List getPreferredLanguages() { return config.getPreferredLanguages(); } - public Set getNonuserNamespaces() { + @Override + public Set getNonuserNamespaces() { return config.getNonUserNamespaces(); } @@ -253,7 +250,8 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { return this.pelletListener; } - public List getCommentsForResource(String resourceURI) { + @Override + public List getCommentsForResource(String resourceURI) { List commentList = new LinkedList(); OntModel ontModel = ontModelSelector.getFullModel(); ontModel.enterCriticalSection(Lock.READ); @@ -276,14 +274,16 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { return commentList; } - public IndividualDao getIndividualDao() { + @Override + public IndividualDao getIndividualDao() { if (entityWebappDao != null) return entityWebappDao; else return entityWebappDao = new IndividualDaoJena(this); } - public ApplicationDao getApplicationDao() { + @Override + public ApplicationDao getApplicationDao() { if (applicationDao != null) { return applicationDao; } else { @@ -291,21 +291,24 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { } } - public VClassGroupDao getVClassGroupDao() { + @Override + public VClassGroupDao getVClassGroupDao() { if (vClassGroupDao != null) return vClassGroupDao; else return vClassGroupDao = new VClassGroupDaoJena(this); } - public PropertyGroupDao getPropertyGroupDao() { + @Override + public PropertyGroupDao getPropertyGroupDao() { if (propertyGroupDao != null) return propertyGroupDao; else return propertyGroupDao = new PropertyGroupDaoJena(this); } - public UserAccountsDao getUserAccountsDao() { + @Override + public UserAccountsDao getUserAccountsDao() { if (userAccountsDao != null) return userAccountsDao; else @@ -313,7 +316,8 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { } DataPropertyStatementDao dataPropertyStatementDao = null; - public DataPropertyStatementDao getDataPropertyStatementDao() { + @Override + public DataPropertyStatementDao getDataPropertyStatementDao() { if( dataPropertyStatementDao == null ) dataPropertyStatementDao = new DataPropertyStatementDaoJena( dwf, this); @@ -321,14 +325,16 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { } DatatypeDao datatypeDao = null; - public DatatypeDao getDatatypeDao() { + @Override + public DatatypeDao getDatatypeDao() { if( datatypeDao == null ) datatypeDao = new DatatypeDaoJena(this); return datatypeDao; } DataPropertyDao dataPropertyDao = null; - public DataPropertyDao getDataPropertyDao() { + @Override + public DataPropertyDao getDataPropertyDao() { if( dataPropertyDao == null ) dataPropertyDao = new DataPropertyDaoJena(rdfService, dwf, this); return dataPropertyDao; @@ -342,7 +348,8 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { } ObjectPropertyStatementDao objectPropertyStatementDao = null; - public ObjectPropertyStatementDao getObjectPropertyStatementDao() { + @Override + public ObjectPropertyStatementDao getObjectPropertyStatementDao() { if( objectPropertyStatementDao == null ) // TODO supply a valid RDFService as the first argument if we keep this // implementation @@ -352,14 +359,16 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { } private OntologyDao ontologyDao = null; - public OntologyDao getOntologyDao() { + @Override + public OntologyDao getOntologyDao() { if( ontologyDao == null ) ontologyDao = new OntologyDaoJena(this); return ontologyDao; } private ObjectPropertyDao objectPropertyDao = null; - public ObjectPropertyDao getObjectPropertyDao() { + @Override + public ObjectPropertyDao getObjectPropertyDao() { if( objectPropertyDao == null ) objectPropertyDao = new ObjectPropertyDaoJena( rdfService, dwf, config.customListViewConfigFileMap, this); @@ -367,14 +376,16 @@ public class WebappDaoFactoryJena implements WebappDaoFactory { } private PropertyInstanceDao propertyInstanceDao = null; - public PropertyInstanceDao getPropertyInstanceDao() { + @Override + public PropertyInstanceDao getPropertyInstanceDao() { if( propertyInstanceDao == null ) propertyInstanceDao = new PropertyInstanceDaoJena(rdfService, dwf, this); return propertyInstanceDao; } protected VClassDao vClassDao = null; - public VClassDao getVClassDao() { + @Override + public VClassDao getVClassDao() { if( vClassDao == null ) vClassDao = new VClassDaoJena(this, config.isUnderlyingStoreReasoned()); return vClassDao; diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/ModelSwitcher.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/ModelSwitcher.java index 94eb2dffa..b1de19092 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/ModelSwitcher.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/ModelSwitcher.java @@ -7,10 +7,11 @@ import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.SWITCH_TO_D import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.USE_DISPLAY_MODEL_PARAM; import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.USE_MODEL_PARAM; import static edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary.USE_TBOX_MODEL_PARAM; +import static edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils.WhichService.CONFIGURATION; +import static edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils.WhichService.CONTENT; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; -import javax.sql.DataSource; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; @@ -23,14 +24,16 @@ import com.hp.hpl.jena.rdf.model.ModelFactory; import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission; import edu.cornell.mannlib.vitro.webapp.auth.policy.PolicyHelper; -import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory; +import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset; import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroModelSource; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactoryJena; -import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase; +import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; +import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; +import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils.WhichService; /** * Handle model switching, if requested for the editing framework. @@ -97,13 +100,9 @@ public class ModelSwitcher { // If they asked for other models by URI, set them. if (anyOtherSpecialProperties(vreq)) { - DataSource bds = JenaDataSourceSetupBase.getApplicationDataSource(_context); - String dbType = ConfigurationProperties.getBean(_context) - .getProperty("VitroConnection.DataSource.dbtype", "MySQL"); - - OntModel mainOntModel = createSpecialModel(vreq, USE_MODEL_PARAM, bds, dbType); - OntModel tboxOntModel = createSpecialModel(vreq, USE_TBOX_MODEL_PARAM, bds, dbType); - OntModel displayOntModel = createSpecialModel(vreq, USE_DISPLAY_MODEL_PARAM, bds, dbType); + OntModel mainOntModel = createSpecialModel(vreq, USE_MODEL_PARAM); + OntModel tboxOntModel = createSpecialModel(vreq, USE_TBOX_MODEL_PARAM); + OntModel displayOntModel = createSpecialModel(vreq, USE_DISPLAY_MODEL_PARAM); vreq.setAttribute(VitroRequest.ID_FOR_ABOX_MODEL, vreq.getParameter(USE_MODEL_PARAM)); vreq.setAttribute(VitroRequest.ID_FOR_WRITE_MODEL, vreq.getParameter(USE_MODEL_PARAM)); @@ -160,23 +159,39 @@ public class ModelSwitcher { * @throws IllegalStateException * if it's not found. */ - private OntModel createSpecialModel(VitroRequest vreq, String key, - DataSource bds, String dbType) { + private OntModel createSpecialModel(VitroRequest vreq, String key) { if (!isParameterPresent(vreq, key)) { return null; } String modelUri = vreq.getParameter(key); - Model model = JenaDataSourceSetupBase.makeDBModel(bds, modelUri, - OntModelSpec.OWL_MEM, - JenaDataSourceSetupBase.TripleStoreType.RDB, dbType, vreq.getSession().getServletContext()); - if (model != null) { - return ModelFactory - .createOntologyModel(OntModelSpec.OWL_MEM, model); - } else { + + OntModel ont = findModelInRdfService(vreq, modelUri, CONFIGURATION); + if (ont == null) { + ont = findModelInRdfService(vreq, modelUri, CONTENT); + } + if (ont == null) { throw new IllegalStateException("Main Model Uri " + modelUri + " did not retrieve model"); } + return ont; + } + + private OntModel findModelInRdfService(VitroRequest vreq, String modelUri, + WhichService which) { + try { + RDFService rdfService = RDFServiceUtils.getRDFService(vreq, which); + if (!rdfService.getGraphURIs().contains(modelUri)) { + return null; + } + + Model m = new RDFServiceDataset(rdfService).getNamedModel(modelUri); + return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, m); + } catch (Exception e) { + log.error("failed to find model: '" + modelUri + "' in RDFService " + + which, e); + return null; + } } } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/RequestModelsPrep.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/RequestModelsPrep.java index e1384d907..ef847272e 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/RequestModelsPrep.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/filters/RequestModelsPrep.java @@ -26,15 +26,12 @@ import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.openjena.atlas.lib.Pair; +import org.apache.jena.atlas.lib.Pair; -import com.hp.hpl.jena.graph.BulkUpdateHandler; -import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.query.Dataset; -import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; +//import com.hp.hpl.jena.rdf.model.ModelFactory; import edu.cornell.mannlib.vitro.webapp.auth.identifier.RequestIdentifiers; import edu.cornell.mannlib.vitro.webapp.auth.policy.ServletPolicyList; @@ -52,10 +49,10 @@ import edu.cornell.mannlib.vitro.webapp.dao.filtering.filters.HideFromDisplayByP import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelectorImpl; import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset; -import edu.cornell.mannlib.vitro.webapp.dao.jena.SpecialBulkUpdateHandlerGraph; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; +import edu.cornell.mannlib.vitro.webapp.rdfservice.adapters.VitroModelFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.filter.LanguageFilteringRDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.filter.LanguageFilteringUtils; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; @@ -197,19 +194,20 @@ public class RequestModelsPrep implements Filter { // Anything derived from the ABOX is not memory-mapped, so create // versions from the short-term RDF service. - OntModel baseABoxModel = createNamedModelFromDataset(dataset, - JENA_DB_MODEL); - OntModel inferenceABoxModel = createNamedModelFromDataset(dataset, - JENA_INF_MODEL); - OntModel unionABoxModel = createCombinedBulkUpdatingModel( + OntModel baseABoxModel = VitroModelFactory.createOntologyModel(dataset + .getNamedModel(JENA_DB_MODEL)); + OntModel inferenceABoxModel = VitroModelFactory + .createOntologyModel(dataset.getNamedModel(JENA_INF_MODEL)); + OntModel unionABoxModel = VitroModelFactory.createUnion( baseABoxModel, inferenceABoxModel); - OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel, + OntModel baseFullModel = VitroModelFactory.createUnion(baseABoxModel, ModelAccess.on(vreq).getOntModel(ModelID.BASE_TBOX)); - OntModel inferenceFullModel = createCombinedModel(inferenceABoxModel, + OntModel inferenceFullModel = VitroModelFactory.createUnion( + inferenceABoxModel, ModelAccess.on(vreq).getOntModel(ModelID.INFERRED_TBOX)); - OntModel unionFullModel = ModelFactory.createOntologyModel( - OntModelSpec.OWL_MEM, dataset.getDefaultModel()); + OntModel unionFullModel = VitroModelFactory.createOntologyModel( + dataset.getDefaultModel()); ModelAccess.on(vreq).setOntModel(ModelID.BASE_ABOX, baseABoxModel); ModelAccess.on(vreq).setOntModel(ModelID.INFERRED_ABOX, inferenceABoxModel); @@ -224,24 +222,6 @@ public class RequestModelsPrep implements Filter { ModelAccess.on(vreq).setOntModel(modelId, contextModel); } - private OntModel createNamedModelFromDataset(Dataset dataset, String name) { - return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, dataset.getNamedModel(name)); - } - - private OntModel createCombinedModel(OntModel oneModel, OntModel otherModel) { - return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, - ModelFactory.createUnion(oneModel, otherModel)); - } - - private OntModel createCombinedBulkUpdatingModel(OntModel baseModel, - OntModel otherModel) { - BulkUpdateHandler bulkUpdateHandler = baseModel.getGraph().getBulkUpdateHandler(); - Graph unionGraph = ModelFactory.createUnion(baseModel, otherModel).getGraph(); - Model unionModel = ModelFactory.createModelForGraph( - new SpecialBulkUpdateHandlerGraph(unionGraph, bulkUpdateHandler)); - return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, unionModel); - } - /** Create an OntModelSelector that will hold the un-language-filtered models. */ private OntModelSelector createLanguageNeutralOntModelSelector( VitroRequest vreq) { diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/AbstractModelDecorator.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/AbstractModelDecorator.java new file mode 100644 index 000000000..5212ff1fe --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/AbstractModelDecorator.java @@ -0,0 +1,1059 @@ +package edu.cornell.mannlib.vitro.webapp.rdfservice.adapters; + +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Reader; +import java.io.Writer; +import java.util.Calendar; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import com.hp.hpl.jena.datatypes.RDFDatatype; +import com.hp.hpl.jena.graph.Graph; +import com.hp.hpl.jena.graph.Node; +import com.hp.hpl.jena.graph.Triple; +import com.hp.hpl.jena.rdf.model.Alt; +import com.hp.hpl.jena.rdf.model.AnonId; +import com.hp.hpl.jena.rdf.model.Bag; +import com.hp.hpl.jena.rdf.model.Literal; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelChangedListener; +import com.hp.hpl.jena.rdf.model.NodeIterator; +import com.hp.hpl.jena.rdf.model.NsIterator; +import com.hp.hpl.jena.rdf.model.Property; +import com.hp.hpl.jena.rdf.model.RDFList; +import com.hp.hpl.jena.rdf.model.RDFNode; +import com.hp.hpl.jena.rdf.model.RDFReader; +import com.hp.hpl.jena.rdf.model.RDFWriter; +import com.hp.hpl.jena.rdf.model.RSIterator; +import com.hp.hpl.jena.rdf.model.ReifiedStatement; +import com.hp.hpl.jena.rdf.model.ResIterator; +import com.hp.hpl.jena.rdf.model.Resource; +import com.hp.hpl.jena.rdf.model.ResourceF; +import com.hp.hpl.jena.rdf.model.Selector; +import com.hp.hpl.jena.rdf.model.Seq; +import com.hp.hpl.jena.rdf.model.Statement; +import com.hp.hpl.jena.rdf.model.StmtIterator; +import com.hp.hpl.jena.shared.Command; +import com.hp.hpl.jena.shared.Lock; +import com.hp.hpl.jena.shared.PrefixMapping; +import com.hp.hpl.jena.shared.ReificationStyle; + +/** + * The base class for a delegating model decorator. + * + * As implemented, all methods simply delegate to the inner model. Subclasses + * should override selected methods to provide functionality. + */ +@SuppressWarnings("deprecation") +public abstract class AbstractModelDecorator implements Model { + private final Model inner; + + protected AbstractModelDecorator(Model m) { + if (m == null) { + throw new NullPointerException("m may not be null."); + } + this.inner = m; + } + + @Override + public Resource getResource(String uri, ResourceF f) { + return inner.getResource(uri, f); + } + + @Override + public Property getProperty(String uri) { + return inner.getProperty(uri); + } + + @Override + public Bag getBag(String uri) { + return inner.getBag(uri); + } + + @Override + public Bag getBag(Resource r) { + return inner.getBag(r); + } + + @Override + public Alt getAlt(String uri) { + return inner.getAlt(uri); + } + + @Override + public Alt getAlt(Resource r) { + return inner.getAlt(r); + } + + @Override + public Seq getSeq(String uri) { + return inner.getSeq(uri); + } + + @Override + public Seq getSeq(Resource r) { + return inner.getSeq(r); + } + + @Override + public Resource createResource(Resource type) { + return inner.createResource(type); + } + + @Override + public RDFNode getRDFNode(Node n) { + return inner.getRDFNode(n); + } + + @Override + public Resource createResource(String uri, Resource type) { + return inner.createResource(uri, type); + } + + @Override + public Resource createResource(ResourceF f) { + return inner.createResource(f); + } + + @Override + public Resource createResource(String uri, ResourceF f) { + return inner.createResource(uri, f); + } + + @Override + public Property createProperty(String uri) { + return inner.createProperty(uri); + } + + @Override + public Literal createLiteral(String v) { + return inner.createLiteral(v); + } + + @Override + public Literal createTypedLiteral(boolean v) { + return inner.createTypedLiteral(v); + } + + @Override + public Literal createTypedLiteral(int v) { + return inner.createTypedLiteral(v); + } + + @Override + public Literal createTypedLiteral(long v) { + return inner.createTypedLiteral(v); + } + + @Override + public Literal createTypedLiteral(Calendar d) { + return inner.createTypedLiteral(d); + } + + @Override + public Literal createTypedLiteral(char v) { + return inner.createTypedLiteral(v); + } + + @Override + public Literal createTypedLiteral(float v) { + return inner.createTypedLiteral(v); + } + + @Override + public Literal createTypedLiteral(double v) { + return inner.createTypedLiteral(v); + } + + @Override + public Literal createTypedLiteral(String v) { + return inner.createTypedLiteral(v); + } + + @Override + public Literal createTypedLiteral(String lex, String typeURI) { + return inner.createTypedLiteral(lex, typeURI); + } + + @Override + public Literal createTypedLiteral(Object value, String typeURI) { + return inner.createTypedLiteral(value, typeURI); + } + + @Override + public Statement createLiteralStatement(Resource s, Property p, boolean o) { + return inner.createLiteralStatement(s, p, o); + } + + @Override + public Statement createLiteralStatement(Resource s, Property p, float o) { + return inner.createLiteralStatement(s, p, o); + } + + @Override + public Statement createLiteralStatement(Resource s, Property p, double o) { + return inner.createLiteralStatement(s, p, o); + } + + @Override + public Statement createLiteralStatement(Resource s, Property p, long o) { + return inner.createLiteralStatement(s, p, o); + } + + @Override + public Statement createLiteralStatement(Resource s, Property p, int o) { + return inner.createLiteralStatement(s, p, o); + } + + @Override + public Statement createLiteralStatement(Resource s, Property p, char o) { + return inner.createLiteralStatement(s, p, o); + } + + @Override + public Statement createLiteralStatement(Resource s, Property p, Object o) { + return inner.createLiteralStatement(s, p, o); + } + + @Override + public Statement createStatement(Resource s, Property p, String o) { + return inner.createStatement(s, p, o); + } + + @Override + public Statement createStatement(Resource s, Property p, String o, String l) { + return inner.createStatement(s, p, o, l); + } + + @Override + public Statement createStatement(Resource s, Property p, String o, + boolean wellFormed) { + return inner.createStatement(s, p, o, wellFormed); + } + + @Override + public Statement createStatement(Resource s, Property p, String o, + String l, boolean wellFormed) { + return inner.createStatement(s, p, o, l, wellFormed); + } + + @Override + public Bag createBag() { + return inner.createBag(); + } + + @Override + public Bag createBag(String uri) { + return inner.createBag(uri); + } + + @Override + public Alt createAlt() { + return inner.createAlt(); + } + + @Override + public PrefixMapping setNsPrefix(String prefix, String uri) { + return inner.setNsPrefix(prefix, uri); + } + + @Override + public PrefixMapping removeNsPrefix(String prefix) { + return inner.removeNsPrefix(prefix); + } + + @Override + public PrefixMapping setNsPrefixes(PrefixMapping other) { + return inner.setNsPrefixes(other); + } + + @Override + public PrefixMapping setNsPrefixes(Map map) { + return inner.setNsPrefixes(map); + } + + @Override + public PrefixMapping withDefaultMappings(PrefixMapping map) { + return inner.withDefaultMappings(map); + } + + @Override + public String getNsPrefixURI(String prefix) { + return inner.getNsPrefixURI(prefix); + } + + @Override + public String getNsURIPrefix(String uri) { + return inner.getNsURIPrefix(uri); + } + + @Override + public Map getNsPrefixMap() { + return inner.getNsPrefixMap(); + } + + @Override + public String expandPrefix(String prefixed) { + return inner.expandPrefix(prefixed); + } + + @Override + public String shortForm(String uri) { + return inner.shortForm(uri); + } + + @Override + public String qnameFor(String uri) { + return inner.qnameFor(uri); + } + + @Override + public PrefixMapping lock() { + return inner.lock(); + } + + @Override + public boolean samePrefixMappingAs(PrefixMapping other) { + return inner.samePrefixMappingAs(other); + } + + @Override + public Statement asStatement(Triple t) { + return inner.asStatement(t); + } + + @Override + public Graph getGraph() { + return inner.getGraph(); + } + + @Override + public RDFNode asRDFNode(Node n) { + return inner.asRDFNode(n); + } + + @Override + public Resource wrapAsResource(Node n) { + return inner.wrapAsResource(n); + } + + @Override + public RDFReader getReader() { + return inner.getReader(); + } + + @Override + public RDFReader getReader(String lang) { + return inner.getReader(lang); + } + + @Override + public String setReaderClassName(String lang, String className) { + return inner.setReaderClassName(lang, className); + } + + @Override + public RDFWriter getWriter() { + return inner.getWriter(); + } + + @Override + public RDFWriter getWriter(String lang) { + return inner.getWriter(lang); + } + + @Override + public String setWriterClassName(String lang, String className) { + return inner.setWriterClassName(lang, className); + } + + @Override + public Alt createAlt(String uri) { + return inner.createAlt(uri); + } + + @Override + public Seq createSeq() { + return inner.createSeq(); + } + + @Override + public Seq createSeq(String uri) { + return inner.createSeq(uri); + } + + @Override + public Model add(Resource s, Property p, RDFNode o) { + return inner.add(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, boolean o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, long o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, int o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, char o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, float o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, double o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, Object o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model addLiteral(Resource s, Property p, Literal o) { + return inner.addLiteral(s, p, o); + } + + @Override + public Model add(Resource s, Property p, String o) { + return inner.add(s, p, o); + } + + @Override + public Model add(Resource s, Property p, String lex, RDFDatatype datatype) { + return inner.add(s, p, lex, datatype); + } + + @Override + public Model add(Resource s, Property p, String o, boolean wellFormed) { + return inner.add(s, p, o, wellFormed); + } + + @Override + public Model add(Resource s, Property p, String o, String l) { + return inner.add(s, p, o, l); + } + + @Override + public Model remove(Resource s, Property p, RDFNode o) { + return inner.remove(s, p, o); + } + + @Override + public Model remove(StmtIterator iter) { + return inner.remove(iter); + } + + @Override + public Model remove(Model m) { + return m.remove(m); + } + + @Override + public Model remove(Model m, boolean suppressReifications) { + return m.remove(m, suppressReifications); + } + + @Override + public StmtIterator listLiteralStatements(Resource subject, + Property predicate, boolean object) { + return inner.listLiteralStatements(subject, predicate, object); + } + + @Override + public StmtIterator listLiteralStatements(Resource subject, + Property predicate, char object) { + return inner.listLiteralStatements(subject, predicate, object); + } + + @Override + public StmtIterator listLiteralStatements(Resource subject, + Property predicate, long object) { + return inner.listLiteralStatements(subject, predicate, object); + } + + @Override + public StmtIterator listLiteralStatements(Resource subject, + Property predicate, float object) { + return inner.listLiteralStatements(subject, predicate, object); + } + + @Override + public StmtIterator listLiteralStatements(Resource subject, + Property predicate, double object) { + return inner.listLiteralStatements(subject, predicate, object); + } + + @Override + public StmtIterator listStatements(Resource subject, Property predicate, + String object) { + return inner.listStatements(subject, predicate, object); + } + + @Override + public StmtIterator listStatements(Resource subject, Property predicate, + String object, String lang) { + return inner.listStatements(subject, predicate, object, lang); + } + + @Override + public ResIterator listResourcesWithProperty(Property p, boolean o) { + return inner.listResourcesWithProperty(p, o); + } + + @Override + public ResIterator listResourcesWithProperty(Property p, long o) { + return inner.listResourcesWithProperty(p, o); + } + + @Override + public ResIterator listResourcesWithProperty(Property p, char o) { + return inner.listResourcesWithProperty(p, o); + } + + @Override + public ResIterator listResourcesWithProperty(Property p, float o) { + return inner.listResourcesWithProperty(p, o); + } + + @Override + public ResIterator listResourcesWithProperty(Property p, double o) { + return inner.listResourcesWithProperty(p, o); + } + + @Override + public ResIterator listResourcesWithProperty(Property p, Object o) { + return inner.listResourcesWithProperty(p, o); + } + + @Override + public ResIterator listSubjectsWithProperty(Property p, String o) { + return inner.listSubjectsWithProperty(p, o); + } + + @Override + public ResIterator listSubjectsWithProperty(Property p, String o, String l) { + return inner.listSubjectsWithProperty(p, o, l); + } + + @Override + public boolean containsLiteral(Resource s, Property p, boolean o) { + return inner.containsLiteral(s, p, o); + } + + @Override + public boolean containsLiteral(Resource s, Property p, long o) { + return inner.containsLiteral(s, p, o); + } + + @Override + public boolean containsLiteral(Resource s, Property p, int o) { + return inner.containsLiteral(s, p, o); + } + + @Override + public boolean containsLiteral(Resource s, Property p, char o) { + return inner.containsLiteral(s, p, o); + } + + @Override + public boolean containsLiteral(Resource s, Property p, float o) { + return inner.containsLiteral(s, p, o); + } + + @Override + public boolean containsLiteral(Resource s, Property p, double o) { + return inner.containsLiteral(s, p, o); + } + + @Override + public boolean containsLiteral(Resource s, Property p, Object o) { + return inner.containsLiteral(s, p, o); + } + + @Override + public boolean contains(Resource s, Property p, String o) { + return inner.contains(s, p, o); + } + + @Override + public boolean contains(Resource s, Property p, String o, String l) { + return inner.contains(s, p, o, l); + } + + @Override + public void enterCriticalSection(boolean readLockRequested) { + inner.enterCriticalSection(readLockRequested); + } + + @Override + public void leaveCriticalSection() { + inner.leaveCriticalSection(); + } + + @Override + public long size() { + return inner.size(); + } + + @Override + public boolean isEmpty() { + return inner.isEmpty(); + } + + @Override + public ResIterator listSubjects() { + return inner.listSubjects(); + } + + @Override + public NsIterator listNameSpaces() { + return inner.listNameSpaces(); + } + + @Override + public Resource getResource(String uri) { + return inner.getResource(uri); + } + + @Override + public Property getProperty(String nameSpace, String localName) { + return inner.getProperty(nameSpace, localName); + } + + @Override + public Resource createResource() { + return inner.createResource(); + } + + @Override + public Resource createResource(AnonId id) { + return inner.createResource(id); + } + + @Override + public Resource createResource(String uri) { + return inner.createResource(uri); + } + + @Override + public Property createProperty(String nameSpace, String localName) { + return inner.createProperty(nameSpace, localName); + } + + @Override + public Literal createLiteral(String v, String language) { + return inner.createLiteral(v, language); + } + + @Override + public Literal createLiteral(String v, boolean wellFormed) { + return inner.createLiteral(v, wellFormed); + } + + @Override + public Literal createTypedLiteral(String lex, RDFDatatype dtype) { + return inner.createTypedLiteral(lex, dtype); + } + + @Override + public Literal createTypedLiteral(Object value, RDFDatatype dtype) { + return inner.createTypedLiteral(value, dtype); + } + + @Override + public Literal createTypedLiteral(Object value) { + return inner.createTypedLiteral(value); + } + + @Override + public Statement createStatement(Resource s, Property p, RDFNode o) { + return inner.createStatement(s, p, o); + } + + @Override + public RDFList createList() { + return inner.createList(); + } + + @Override + public RDFList createList(Iterator members) { + return inner.createList(members); + } + + @Override + public RDFList createList(RDFNode[] members) { + return inner.createList(members); + } + + @Override + public Model add(Statement s) { + return inner.add(s); + } + + @Override + public Model add(Statement[] statements) { + return inner.add(statements); + } + + @Override + public Model remove(Statement[] statements) { + return inner.remove(statements); + } + + @Override + public Model add(List statements) { + return inner.add(statements); + } + + @Override + public Model remove(List statements) { + return inner.remove(statements); + } + + @Override + public Model add(StmtIterator iter) { + return inner.add(iter); + } + + @Override + public Model add(Model m) { + return m.add(m); + } + + @Override + public Model add(Model m, boolean suppressReifications) { + return m.add(m, suppressReifications); + } + + @Override + public Model read(String url) { + return inner.read(url); + } + + @Override + public Model read(InputStream in, String base) { + return inner.read(in, base); + } + + @Override + public Model read(InputStream in, String base, String lang) { + return inner.read(in, base, lang); + } + + @Override + public Model read(Reader reader, String base) { + return inner.read(reader, base); + } + + @Override + public Model read(String url, String lang) { + return inner.read(url, lang); + } + + @Override + public Model read(Reader reader, String base, String lang) { + return inner.read(reader, base, lang); + } + + @Override + public Model read(String url, String base, String lang) { + return inner.read(url, base, lang); + } + + @Override + public Model write(Writer writer) { + return inner.write(writer); + } + + @Override + public Model write(Writer writer, String lang) { + return inner.write(writer, lang); + } + + @Override + public Model write(Writer writer, String lang, String base) { + return inner.write(writer, lang, base); + } + + @Override + public Model write(OutputStream out) { + return inner.write(out); + } + + @Override + public Model write(OutputStream out, String lang) { + return inner.write(out, lang); + } + + @Override + public Model write(OutputStream out, String lang, String base) { + return inner.write(out, lang, base); + } + + @Override + public Model remove(Statement s) { + return inner.remove(s); + } + + @Override + public Statement getRequiredProperty(Resource s, Property p) { + return inner.getRequiredProperty(s, p); + } + + @Override + public Statement getProperty(Resource s, Property p) { + return inner.getProperty(s, p); + } + + @Override + public ResIterator listSubjectsWithProperty(Property p) { + return inner.listSubjectsWithProperty(p); + } + + @Override + public ResIterator listResourcesWithProperty(Property p) { + return inner.listResourcesWithProperty(p); + } + + @Override + public ResIterator listSubjectsWithProperty(Property p, RDFNode o) { + return inner.listSubjectsWithProperty(p, o); + } + + @Override + public ResIterator listResourcesWithProperty(Property p, RDFNode o) { + return inner.listResourcesWithProperty(p, o); + } + + @Override + public NodeIterator listObjects() { + return inner.listObjects(); + } + + @Override + public NodeIterator listObjectsOfProperty(Property p) { + return inner.listObjectsOfProperty(p); + } + + @Override + public NodeIterator listObjectsOfProperty(Resource s, Property p) { + return inner.listObjectsOfProperty(s, p); + } + + @Override + public boolean contains(Resource s, Property p) { + return inner.contains(s, p); + } + + @Override + public boolean containsResource(RDFNode r) { + return inner.containsResource(r); + } + + @Override + public boolean contains(Resource s, Property p, RDFNode o) { + return inner.contains(s, p, o); + } + + @Override + public boolean contains(Statement s) { + return inner.contains(s); + } + + @Override + public boolean containsAny(StmtIterator iter) { + return inner.containsAny(iter); + } + + @Override + public boolean containsAll(StmtIterator iter) { + return inner.containsAll(iter); + } + + @Override + public boolean containsAny(Model model) { + return inner.containsAny(model); + } + + @Override + public boolean containsAll(Model model) { + return inner.containsAll(model); + } + + @Override + public boolean isReified(Statement s) { + return inner.isReified(s); + } + + @Override + public Resource getAnyReifiedStatement(Statement s) { + return inner.getAnyReifiedStatement(s); + } + + @Override + public void removeAllReifications(Statement s) { + inner.removeAllReifications(s); + } + + @Override + public void removeReification(ReifiedStatement rs) { + inner.removeReification(rs); + } + + @Override + public StmtIterator listStatements() { + return inner.listStatements(); + } + + @Override + public StmtIterator listStatements(Selector s) { + return inner.listStatements(s); + } + + @Override + public StmtIterator listStatements(Resource s, Property p, RDFNode o) { + return inner.listStatements(s, p, o); + } + + @Override + public ReifiedStatement createReifiedStatement(Statement s) { + return inner.createReifiedStatement(s); + } + + @Override + public ReifiedStatement createReifiedStatement(String uri, Statement s) { + return inner.createReifiedStatement(uri, s); + } + + @Override + public RSIterator listReifiedStatements() { + return inner.listReifiedStatements(); + } + + @Override + public RSIterator listReifiedStatements(Statement st) { + return inner.listReifiedStatements(st); + } + + @Override + public ReificationStyle getReificationStyle() { + return inner.getReificationStyle(); + } + + @Override + public Model query(Selector s) { + return inner.query(s); + } + + @Override + public Model union(Model model) { + return inner.union(model); + } + + @Override + public Model intersection(Model model) { + return inner.intersection(model); + } + + @Override + public Model difference(Model model) { + return inner.difference(model); + } + + @Override + public Model begin() { + return inner.begin(); + } + + @Override + public Model abort() { + return inner.abort(); + } + + @Override + public Model commit() { + return inner.commit(); + } + + @Override + public Object executeInTransaction(Command cmd) { + return inner.executeInTransaction(cmd); + } + + @Override + public boolean independent() { + return inner.independent(); + } + + @Override + public boolean supportsTransactions() { + return inner.supportsTransactions(); + } + + @Override + public boolean supportsSetOperations() { + return inner.supportsSetOperations(); + } + + @Override + public boolean isIsomorphicWith(Model g) { + return inner.isIsomorphicWith(g); + } + + @Override + public void close() { + inner.close(); + } + + @Override + public Lock getLock() { + return inner.getLock(); + } + + @Override + public Model register(ModelChangedListener listener) { + return inner.register(listener); + } + + @Override + public Model unregister(ModelChangedListener listener) { + return inner.unregister(listener); + } + + @Override + public Model notifyEvent(Object e) { + return inner.notifyEvent(e); + } + + @Override + public Model removeAll() { + return inner.removeAll(); + } + + @Override + public Model removeAll(Resource s, Property p, RDFNode r) { + return inner.removeAll(s, p, r); + } + + @Override + public boolean isClosed() { + return inner.isClosed(); + } + +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/BulkUpdatingModel.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/BulkUpdatingModel.java new file mode 100644 index 000000000..03f81a9ed --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/BulkUpdatingModel.java @@ -0,0 +1,209 @@ +/* $This file is distributed under the terms of the license in /doc/license.txt$ */ + +package edu.cornell.mannlib.vitro.webapp.rdfservice.adapters; + +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.net.URL; +import java.util.Iterator; +import java.util.List; + +import com.hp.hpl.jena.graph.BulkUpdateHandler; +import com.hp.hpl.jena.graph.Graph; +import com.hp.hpl.jena.graph.Triple; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.RDFReaderF; +import com.hp.hpl.jena.rdf.model.Statement; +import com.hp.hpl.jena.rdf.model.StmtIterator; +import com.hp.hpl.jena.rdf.model.impl.ModelCom; +import com.hp.hpl.jena.rdf.model.impl.RDFReaderFImpl; +import com.hp.hpl.jena.rdf.model.impl.StatementImpl; +import com.hp.hpl.jena.shared.WrappedIOException; +import com.hp.hpl.jena.util.iterator.Map1; + +/** + * A model that still handles bulk updates in the old-fashioned way: with a + * BulkUpdateHandler. + */ +public class BulkUpdatingModel extends ModelCom { + private static final RDFReaderF readerFactory = new RDFReaderFImpl(); + + private final BulkUpdateHandler buh; + + @SuppressWarnings("deprecation") + public BulkUpdatingModel(Graph base) { + super(base); + this.buh = base.getBulkUpdateHandler(); + } + + public BulkUpdatingModel(Graph base, BulkUpdateHandler buh) { + super(base); + this.buh = buh; + } + + @SuppressWarnings("deprecation") + @Override + public Model add(StmtIterator iter) { + try { + buh.add(asTriples(iter)); + } finally { + iter.close(); + } + return this; + } + + @Override + public Model add(Model m) { + return add(m, false); + } + + @Deprecated + @Override + public Model add(Model m, boolean suppressReifications) { + // suppressReifications is a no-op. + buh.add(m.getGraph()); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model read(String url) { + Model m = ModelFactory.createDefaultModel(); + readerFactory.getReader().read(m, url); + buh.add(m.getGraph()); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model read(Reader reader, String base) { + Model m = ModelFactory.createDefaultModel(); + readerFactory.getReader().read(m, reader, base); + buh.add(m.getGraph()); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model read(InputStream reader, String base) { + Model m = ModelFactory.createDefaultModel(); + readerFactory.getReader().read(m, reader, base); + buh.add(m.getGraph()); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model read(String url, String lang) { + Model m = ModelFactory.createDefaultModel(); + readerFactory.getReader(lang).read(m, url); + buh.add(m.getGraph()); + return this; + } + + @Override + public Model read(String url, String base, String lang) { + try { + InputStream is = new URL(url).openStream(); + try { + read(is, base, lang); + } finally { + if (null != is) { + is.close(); + } + } + } catch (IOException e) { + throw new WrappedIOException(e); + } + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model read(Reader reader, String base, String lang) { + Model m = ModelFactory.createDefaultModel(); + readerFactory.getReader(lang).read(m, reader, base); + buh.add(m.getGraph()); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model read(InputStream reader, String base, String lang) { + Model m = ModelFactory.createDefaultModel(); + readerFactory.getReader(lang).read(m, reader, base); + buh.add(m.getGraph()); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model remove(StmtIterator iter) { + buh.delete(asTriples(iter)); + return this; + } + + @Override + public Model remove(Model m) { + return remove(m, false); + } + + @Override + @Deprecated + public Model remove(Model m, boolean suppressReifications) { + buh.delete(m.getGraph()); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model removeAll() { + buh.removeAll(); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model add(Statement[] statements) { + buh.add(StatementImpl.asTriples(statements)); + return this; + } + + @Override + public Model add(List statements) { + add(statements.toArray(new Statement[statements.size()])); + return this; + } + + @SuppressWarnings("deprecation") + @Override + public Model remove(Statement[] statements) { + buh.delete(StatementImpl.asTriples(statements)); + return this; + } + + @Override + public Model remove(List statements) { + remove(statements.toArray(new Statement[statements.size()])); + return this; + } + + @Override + public String toString() { + return "<" + this.getClass().getSimpleName() + " " + getGraph() + ">"; + } + + private Iterator asTriples(StmtIterator it) { + return it.mapWith(mapAsTriple); + } + + private Map1 mapAsTriple = new Map1() { + @Override + public Triple map1(Statement s) { + return s.asTriple(); + } + }; + +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/BulkUpdatingOntModel.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/BulkUpdatingOntModel.java new file mode 100644 index 000000000..7ef64f742 --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/BulkUpdatingOntModel.java @@ -0,0 +1,3193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package edu.cornell.mannlib.vitro.webapp.rdfservice.adapters; + +import java.io.InputStream; +import java.io.OutputStream; +import java.io.Reader; +import java.io.Writer; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.hp.hpl.jena.enhanced.EnhNode; +import com.hp.hpl.jena.graph.Graph; +import com.hp.hpl.jena.graph.Node; +import com.hp.hpl.jena.graph.NodeFactory; +import com.hp.hpl.jena.graph.Triple; +import com.hp.hpl.jena.graph.compose.MultiUnion; +import com.hp.hpl.jena.ontology.AllDifferent; +import com.hp.hpl.jena.ontology.AllValuesFromRestriction; +import com.hp.hpl.jena.ontology.AnnotationProperty; +import com.hp.hpl.jena.ontology.CardinalityQRestriction; +import com.hp.hpl.jena.ontology.CardinalityRestriction; +import com.hp.hpl.jena.ontology.ComplementClass; +import com.hp.hpl.jena.ontology.ConversionException; +import com.hp.hpl.jena.ontology.DataRange; +import com.hp.hpl.jena.ontology.DatatypeProperty; +import com.hp.hpl.jena.ontology.EnumeratedClass; +import com.hp.hpl.jena.ontology.FunctionalProperty; +import com.hp.hpl.jena.ontology.HasValueRestriction; +import com.hp.hpl.jena.ontology.Individual; +import com.hp.hpl.jena.ontology.IntersectionClass; +import com.hp.hpl.jena.ontology.InverseFunctionalProperty; +import com.hp.hpl.jena.ontology.LanguageConsistencyException; +import com.hp.hpl.jena.ontology.MaxCardinalityQRestriction; +import com.hp.hpl.jena.ontology.MaxCardinalityRestriction; +import com.hp.hpl.jena.ontology.MinCardinalityQRestriction; +import com.hp.hpl.jena.ontology.MinCardinalityRestriction; +import com.hp.hpl.jena.ontology.ObjectProperty; +import com.hp.hpl.jena.ontology.OntClass; +import com.hp.hpl.jena.ontology.OntDocumentManager; +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.ontology.OntModelSpec; +import com.hp.hpl.jena.ontology.OntProperty; +import com.hp.hpl.jena.ontology.OntResource; +import com.hp.hpl.jena.ontology.Ontology; +import com.hp.hpl.jena.ontology.OntologyException; +import com.hp.hpl.jena.ontology.Profile; +import com.hp.hpl.jena.ontology.ProfileException; +import com.hp.hpl.jena.ontology.ProfileRegistry; +import com.hp.hpl.jena.ontology.QualifiedRestriction; +import com.hp.hpl.jena.ontology.Restriction; +import com.hp.hpl.jena.ontology.SomeValuesFromRestriction; +import com.hp.hpl.jena.ontology.SymmetricProperty; +import com.hp.hpl.jena.ontology.TransitiveProperty; +import com.hp.hpl.jena.ontology.UnionClass; +import com.hp.hpl.jena.ontology.impl.OntModelImpl; +import com.hp.hpl.jena.rdf.listeners.StatementListener; +import com.hp.hpl.jena.rdf.model.InfModel; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.ModelMaker; +import com.hp.hpl.jena.rdf.model.Property; +import com.hp.hpl.jena.rdf.model.RDFList; +import com.hp.hpl.jena.rdf.model.RDFNode; +import com.hp.hpl.jena.rdf.model.Resource; +import com.hp.hpl.jena.rdf.model.Statement; +import com.hp.hpl.jena.rdf.model.StmtIterator; +import com.hp.hpl.jena.rdf.model.impl.IteratorFactory; +import com.hp.hpl.jena.rdf.model.impl.ModelCom; +import com.hp.hpl.jena.reasoner.Derivation; +import com.hp.hpl.jena.reasoner.InfGraph; +import com.hp.hpl.jena.reasoner.Reasoner; +import com.hp.hpl.jena.reasoner.ValidityReport; +import com.hp.hpl.jena.util.iterator.ExtendedIterator; +import com.hp.hpl.jena.util.iterator.Filter; +import com.hp.hpl.jena.util.iterator.Map1; +import com.hp.hpl.jena.util.iterator.NullIterator; +import com.hp.hpl.jena.util.iterator.UniqueFilter; +import com.hp.hpl.jena.util.iterator.WrappedIterator; +import com.hp.hpl.jena.vocabulary.RDF; +import com.hp.hpl.jena.vocabulary.RDFS; +import com.hp.hpl.jena.vocabulary.ReasonerVocabulary; + +/** + * An OntModel based on a BulkUpdatingModel, instead of the default ModelCom. + */ +public class BulkUpdatingOntModel extends BulkUpdatingModel implements OntModel { + // Constants + ////////////////////////////////// + + /** + * This variable is how the OntModel knows how to construct + * a syntax checker. This part of the design may change. + */ + static public String owlSyntaxCheckerClassName = "com.hp.hpl.jena.ontology.tidy.JenaChecker"; + + + // Static variables + ////////////////////////////////// + + static private Logger s_log = LoggerFactory.getLogger( OntModelImpl.class ); + + // Instance variables + ////////////////////////////////// + + /** The model specification this model is using to define its structure */ + protected OntModelSpec m_spec; + + /** List of URI strings of documents that have been imported into this one */ + protected Set m_imported = new HashSet(); + + /** Mode switch for strict checking mode */ + protected boolean m_strictMode = true; + + /** The union graph that contains the imports closure - there is always one of these, which may also be _the_ graph for the model */ + protected MultiUnion m_union = new MultiUnion(); + + /** The listener that detects dynamically added or removed imports statements */ + protected ImportsListener m_importsListener = null; + + /** Cached deductions model */ + private Model m_deductionsModel = null; + + + public BulkUpdatingOntModel() { + this(null); + } + + public BulkUpdatingOntModel( Model model ) { + // we haven't built the full graph yet, so we pass a vestigial form up to the super constructor + super( generateGraph( spec(), nonNullModel(model).getGraph() )); + m_spec = spec(); + + // extract the union graph from whatever generateGraph() created + m_union = (getGraph() instanceof MultiUnion) ? + ((MultiUnion) getGraph()) : + (MultiUnion) ((InfGraph) getGraph()).getRawGraph(); + + loadImports(); + + // set the default prefixes + if (m_spec != null && m_spec.getKnownPrefixes() != null) { + try { + // Protect in case the graph is read-only. + // Prefixes are hints + String[][] p = m_spec.getKnownPrefixes(); + for (int i = 0; i < p.length; i++) { + String[] pair = p[i]; + setNsPrefix( pair[0], pair[1] ); + } + } catch (Exception ex) { + // Forget about it. + } + } + + // force the inference engine, if we have one, to see the new graph data + rebind(); + } + + private static OntModelSpec spec() { + return OntModelSpec.OWL_DL_MEM; + } + + private static Model nonNullModel(Model model) { + return (model == null ? new ModelCom( + com.hp.hpl.jena.graph.Factory.createGraphMem()) : model); + } + + // External signature methods + ////////////////////////////////// + + /** + *

+ * Answer a reference to the document manager that this model is using to manage + * ontology <-> mappings, and to load the imports closure. Note + * the default ontology model {@linkplain OntModelSpec specifications} each have + * a contained default document manager. Changing the document managers specified by + * these default specification may (in fact, probably will) + * affect other models built with the same specification + * policy. This may or may not be as desired by the programmer! + *

+ * @return A reference to this model's document manager, obtained from the specification object + */ + @Override + public OntDocumentManager getDocumentManager() { + return m_spec.getDocumentManager(); + } + + + /** + *

+ * Answer an iterator that ranges over the ontology resources in this model, i.e. + * the resources with rdf:type Ontology or equivalent. These resources + * typically contain metadata about the ontology document that contains them. + *

+ *

+ * Specifically, the resources in this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model, see + * {@link Profile#ONTOLOGY}. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over ontology resources. + */ + @Override + public ExtendedIterator listOntologies() { + checkProfileEntry( getProfile().ONTOLOGY(), "ONTOLOGY" ); + return findByTypeAs( getProfile().ONTOLOGY(), Ontology.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the property resources in this model, i.e. + * the resources with rdf:type Property or equivalent. An OntProperty + * is equivalent to an rdfs:Property in a normal RDF graph; this type is + * provided as a common super-type for the more specific {@link ObjectProperty} and + * {@link DatatypeProperty} property types. + *

+ *

Note This method searches for nodes in the underlying model whose + * rdf:type is rdf:Property. This type is entailed by + * specific property sub-types, such as owl:ObjectProperty. An important + * consequence of this is that in models without an attached reasoner (e.g. in the + * OWL_MEM {@link OntModelSpec}), the entailed type will not be present + * and this method will omit such properties from the returned iterator.
+ * Solution There are two + * ways to address to this issue: either use a reasoning engine to ensure that type entailments + * are taking place correctly, or call {@link #listAllOntProperties()}. Note + * that listAllOntProperties is potentially less efficient than this method.

+ *

+ * The resources returned by this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model. + *

+ * + * @return An iterator over property resources. + */ + @Override + public ExtendedIterator listOntProperties() { + ExtendedIterator i = findByTypeAs( RDF.Property, OntProperty.class ) + .filterKeep( new UniqueFilter()); + + // if we are in OWL_FULL, the properties should also include the annotation properties + if (getReasoner() != null && getProfile().equals( ProfileRegistry.getInstance().getProfile( ProfileRegistry.OWL_LANG ) )) { + // we are using a reasoner, and in OWL Full + // so add the annotation properties too + i = i.andThen( listAnnotationProperties() ); + } + + return i; + } + + /** + *

Answer an iterator over all of the ontology properties in this model, including + * object properties, datatype properties, annotation properties, etc. This method + * takes a different approach to calculating the set of property resources to return, + * and is robust against the absence of a reasoner attached to the model (see note + * in {@link #listOntProperties()} for explanation). However, the calculation used by + * this method is potentially less efficient than the alternative listOntProperties(). + * Users whose models have an attached reasoner are recommended to use + * {@link #listOntProperties()}.

+ * @return An iterator over all available properties in a model, irrespective of + * whether a reasoner is available to perform rdf:type entailments. + * Each property will appear exactly once in the iterator. + */ + @Override + public ExtendedIterator listAllOntProperties() { + ExtendedIterator i = findByTypeAs( RDF.Property, OntProperty.class ) + .andThen( listObjectProperties() ) + .andThen( listDatatypeProperties() ) + .andThen( listAnnotationProperties() ) + .andThen( listFunctionalProperties() ) + .andThen( listTransitiveProperties() ) + .andThen( listSymmetricProperties() ); + + // we must filter for uniqueness + return i.filterKeep( new UniqueFilter()); + } + + /** + *

+ * Answer an iterator that ranges over the object property resources in this model, i.e. + * the resources with rdf:type ObjectProperty or equivalent. An object + * property is a property that is defined in the ontology language semantics as a + * one whose range comprises individuals (rather than datatyped literals). + *

+ *

+ * Specifically, the resources in this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model: see + * {@link Profile#OBJECT_PROPERTY}. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over object property resources. + */ + @Override + public ExtendedIterator listObjectProperties() { + checkProfileEntry( getProfile().OBJECT_PROPERTY(), "OBJECT_PROPERTY" ); + return findByTypeAs( getProfile().OBJECT_PROPERTY(), ObjectProperty.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the datatype property resources in this model, i.e. + * the resources with rdf:type DatatypeProperty or equivalent. An datatype + * property is a property that is defined in the ontology language semantics as a + * one whose range comprises datatyped literals (rather than individuals). + *

+ *

+ * Specifically, the resources in this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model: see + * {@link Profile#DATATYPE_PROPERTY}. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over datatype property resources. + */ + @Override + public ExtendedIterator listDatatypeProperties() { + checkProfileEntry( getProfile().DATATYPE_PROPERTY(), "DATATYPE_PROPERTY" ); + return findByTypeAs( getProfile().DATATYPE_PROPERTY(), DatatypeProperty.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the functional property resources in this model, i.e. + * the resources with rdf:type FunctionalProperty or equivalent. A functional + * property is a property that is defined in the ontology language semantics as having + * a unique domain element for each instance of the relationship. + *

+ *

+ * Specifically, the resources in this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model: see + * {@link Profile#FUNCTIONAL_PROPERTY}. + *

+ * + * @return An iterator over functional property resources. + */ + @Override + public ExtendedIterator listFunctionalProperties() { + checkProfileEntry( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY" ); + return findByTypeAs( getProfile().FUNCTIONAL_PROPERTY(), FunctionalProperty.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the transitive property resources in this model, i.e. + * the resources with rdf:type TransitiveProperty or equivalent. + *

+ *

+ * Specifically, the resources in this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model: see + * {@link Profile#TRANSITIVE_PROPERTY}. + *

+ * + * @return An iterator over transitive property resources. + */ + @Override + public ExtendedIterator listTransitiveProperties() { + checkProfileEntry( getProfile().TRANSITIVE_PROPERTY(), "TRANSITIVE_PROPERTY" ); + return findByTypeAs( getProfile().TRANSITIVE_PROPERTY(), TransitiveProperty.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the symmetric property resources in this model, i.e. + * the resources with rdf:type SymmetricProperty or equivalent. + *

+ *

+ * Specifically, the resources in this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model: see + * {@link Profile#SYMMETRIC_PROPERTY}. + *

+ * + * @return An iterator over symmetric property resources. + */ + @Override + public ExtendedIterator listSymmetricProperties() { + checkProfileEntry( getProfile().SYMMETRIC_PROPERTY(), "SYMMETRIC_PROPERTY" ); + return findByTypeAs( getProfile().SYMMETRIC_PROPERTY(), SymmetricProperty.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the inverse functional property resources in this model, i.e. + * the resources with rdf:type InverseFunctionalProperty or equivalent. + *

+ *

+ * Specifically, the resources in this iterator will those whose type corresponds + * to the value given in the ontology vocabulary associated with this model: see + * {@link Profile#INVERSE_FUNCTIONAL_PROPERTY}. + *

+ * + * @return An iterator over inverse functional property resources. + */ + @Override + public ExtendedIterator listInverseFunctionalProperties() { + checkProfileEntry( getProfile().INVERSE_FUNCTIONAL_PROPERTY(), "INVERSE_FUNCTIONAL_PROPERTY" ); + return findByTypeAs( getProfile().INVERSE_FUNCTIONAL_PROPERTY(), InverseFunctionalProperty.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator over the individuals in this model. Where possible, an individual + * is defined as an instance of the top class in an ontology, i.e. owl:Thing + * or daml:Thing. However, since this test relies on the presence of an inference + * capability, and is not defined in cases where there is no top class (such as RDFS), + * a secondary heuristic is used when needed: an individual is an instance of a class defined + * in the ontology (i.e. it is a resource with an rdf:type, where the + * rdf:type of that resource is a class or restriction in the ontology. + *

+ * + * @return An iterator over Individuals. + */ + @Override + public ExtendedIterator listIndividuals() { + // since the reasoner implements some OWL full functionality for RDF compatibility, we + // have to decide which strategy to use for identifying individuals depending on whether + // or not a powerful reasoner (i.e. owl:Thing/daml:Thing aware) is being used with this model + boolean supportsIndAsThing = false; + if (getGraph() instanceof InfGraph) { + supportsIndAsThing = ((InfGraph) getGraph()).getReasoner() + .getReasonerCapabilities() + .contains( null, ReasonerVocabulary.supportsP, ReasonerVocabulary.individualAsThingP ); + } + + if (!supportsIndAsThing || (getProfile().THING() == null) || getProfile().CLASS().equals( RDFS.Class )) { + // no inference, or we are in RDFS land, so we pick things that have rdf:type whose rdf:type is Class + + // it's tricky to make this efficient and cover all possible cases. I've changed the code to + // make use of the isIndividual() test on OntResource, at the expense of some redundant queries + // to the model, which could become expensive in the case of a DB model - ijd Apr-23-09 + Set results = new HashSet(); + for (Iterator i = listStatements( null, RDF.type, (RDFNode) null); i.hasNext(); ) { + OntResource r = i.next().getSubject().as( OntResource.class ); + if (r.isIndividual()) { + results.add( r.as( Individual.class ) ); + } + } + + return WrappedIterator.create( results.iterator() ); + } + else { + // we have inference, so we pick the nodes that are of type Thing + return findByTypeAs( getProfile().THING(), Individual.class ).filterKeep( new UniqueFilter()); + } + } + + + /** + *

+ * Answer an iterator that ranges over the resources in this model that are + * instances of the given class. + *

+ * + * @return An iterator over individual resources whose rdf:type + * is cls. + */ + @Override + public ExtendedIterator listIndividuals( Resource cls ) { + return findByTypeAs( cls, Individual.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over all of the various forms of class description resource + * in this model. Class descriptions include {@link #listEnumeratedClasses enumerated} + * classes, {@link #listUnionClasses union} classes, {@link #listComplementClasses complement} + * classes, {@link #listIntersectionClasses intersection} classes, {@link #listClasses named} + * classes and {@link #listRestrictions property restrictions}. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over class description resources. + */ + @Override + public ExtendedIterator listClasses() { + return findByTypeAs( getProfile().getClassDescriptionTypes(), OntClass.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

Answer an iterator over the classes in this ontology model that represent + * the uppermost nodes of the class hierarchy. Depending on the underlying + * reasoner configuration, if any, these will be calculated as the classes + * that have Top (i.e. owl:Thing or daml:Thing) + * as a direct super-class, or the classes which have no declared super-class.

+ * @return An iterator of the root classes in the local class hierarchy + */ + @Override + public ExtendedIterator listHierarchyRootClasses() { + // no easy shortcut, so we use brute force + return listClasses() + .filterDrop( new Filter() { + @Override + public boolean accept( OntClass o ) { + return ((OntResource) o).isOntLanguageTerm(); + }} ) + .filterKeep( new Filter() { + @Override + public boolean accept( OntClass o ) { + return o.isHierarchyRoot(); + }} ) + ; + } + + + /** + *

+ * Answer an iterator that ranges over the enumerated class class-descriptions + * in this model, i.e. the class resources specified to have a property + * oneOf (or equivalent) and a list of values. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over enumerated class resources. + * @see Profile#ONE_OF + */ + @Override + public ExtendedIterator listEnumeratedClasses() { + checkProfileEntry( getProfile().ONE_OF(), "ONE_OF" ); + return findByDefiningPropertyAs( getProfile().ONE_OF(), EnumeratedClass.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the union class-descriptions + * in this model, i.e. the class resources specified to have a property + * unionOf (or equivalent) and a list of values. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over union class resources. + * @see Profile#UNION_OF + */ + @Override + public ExtendedIterator listUnionClasses() { + checkProfileEntry( getProfile().UNION_OF(), "UNION_OF" ); + return findByDefiningPropertyAs( getProfile().UNION_OF(), UnionClass.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the complement class-descriptions + * in this model, i.e. the class resources specified to have a property + * complementOf (or equivalent) and a list of values. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over complement class resources. + * @see Profile#COMPLEMENT_OF + */ + @Override + public ExtendedIterator listComplementClasses() { + checkProfileEntry( getProfile().COMPLEMENT_OF(), "COMPLEMENT_OF" ); + return findByDefiningPropertyAs( getProfile().COMPLEMENT_OF(), ComplementClass.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the intersection class-descriptions + * in this model, i.e. the class resources specified to have a property + * intersectionOf (or equivalent) and a list of values. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over complement class resources. + * @see Profile#INTERSECTION_OF + */ + @Override + public ExtendedIterator listIntersectionClasses() { + checkProfileEntry( getProfile().INTERSECTION_OF(), "INTERSECTION_OF" ); + return findByDefiningPropertyAs( getProfile().INTERSECTION_OF(), IntersectionClass.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the named class-descriptions + * in this model, i.e. resources with rdf:type + * Class (or equivalent) and a node URI. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over named class resources. + */ + @Override + public ExtendedIterator listNamedClasses() { + return listClasses().filterDrop( + new Filter() { + @Override + public boolean accept( OntClass x ) { + return x.isAnon(); + } + } + ); + } + + + /** + *

+ * Answer an iterator that ranges over the property restriction class-descriptions + * in this model, i.e. resources with rdf:type + * Restriction (or equivalent). + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over restriction class resources. + * @see Profile#RESTRICTION + */ + @Override + public ExtendedIterator listRestrictions() { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + return findByTypeAs( getProfile().RESTRICTION(), Restriction.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the nodes that denote pair-wise disjointness between + * sets of classes. + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over AllDifferent nodes. + */ + @Override + public ExtendedIterator listAllDifferent() { + checkProfileEntry( getProfile().ALL_DIFFERENT(), "ALL_DIFFERENT" ); + return findByTypeAs( getProfile().ALL_DIFFERENT(), AllDifferent.class ) + .filterKeep( new UniqueFilter()); + } + + /** + *

Answer an iterator over the DataRange objects in this ontology, if there + * are any.

+ * @return An iterator, whose values are {@link DataRange} objects. + */ + @Override + public ExtendedIterator listDataRanges() { + checkProfileEntry( getProfile().DATARANGE(), "DATARANGE" ); + return findByTypeAs( getProfile().DATARANGE(), DataRange.class ) + .filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator that ranges over the properties in this model that are declared + * to be annotation properties. Not all supported languages define annotation properties + * (the category of annotation properties is chiefly an OWL innovation). + *

+ *

+ * Note: the number of nodes returned by this iterator will vary according to + * the completeness of the deductive extension of the underlying graph. See class + * overview for more details. + *

+ * + * @return An iterator over annotation properties. + * @see Profile#getAnnotationProperties() + */ + @Override + public ExtendedIterator listAnnotationProperties() { + checkProfileEntry( getProfile().ANNOTATION_PROPERTY(), "ANNOTATION_PROPERTY" ); + Resource r = getProfile().ANNOTATION_PROPERTY(); + + if (r == null) { + return new NullIterator(); + } + else { + return findByType( r ) + .mapWith( new SubjectNodeAs( AnnotationProperty.class ) ) + .filterKeep( new UniqueFilter()); + } + } + + + /** + *

+ * Answer a resource that represents an ontology description node in this model. If a resource + * with the given uri exists in the model, and can be viewed as an Ontology, return the + * Ontology facet, otherwise return null. + *

+ * + * @param uri The uri for the ontology node. Conventionally, this corresponds to the base URI + * of the document itself. + * @return An Ontology resource or null. + */ + @Override + public Ontology getOntology( String uri ) { + return (Ontology) findByURIAs( uri, Ontology.class ); + } + + + /** + *

+ * Answer a resource that represents an Individual node in this model. If a resource + * with the given uri exists in the model, and can be viewed as an Individual, return the + * Individual facet, otherwise return null. + *

+ * + * @param uri The URI for the requried individual + * @return An Individual resource or null. + */ + @Override + public Individual getIndividual( String uri ) { + return (Individual) findByURIAs( uri, Individual.class ); + } + + + /** + *

+ * Answer a resource representing an generic property in this model. If a property + * with the given uri exists in the model, return the + * OntProperty facet, otherwise return null. + *

+ * + * @param uri The uri for the property. + * @return An OntProperty resource or null. + */ + @Override + public OntProperty getOntProperty( String uri ) { + return (OntProperty) findByURIAs( uri, OntProperty.class ); + } + + + /** + *

+ * Answer a resource representing an object property in this model. If a resource + * with the given uri exists in the model, and can be viewed as an ObjectProperty, return the + * ObjectProperty facet, otherwise return null. + *

+ * + * @param uri The uri for the object property. May not be null. + * @return An ObjectProperty resource or null. + */ + @Override + public ObjectProperty getObjectProperty( String uri ) { + return (ObjectProperty) findByURIAs( uri, ObjectProperty.class ); + } + + + /** + *

Answer a resource representing a transitive property. If a resource + * with the given uri exists in the model, and can be viewed as a TransitiveProperty, return the + * TransitiveProperty facet, otherwise return null.

+ * @param uri The uri for the property. May not be null. + * @return A TransitiveProperty resource or null + */ + @Override + public TransitiveProperty getTransitiveProperty( String uri ) { + return (TransitiveProperty) findByURIAs( uri, TransitiveProperty.class ); + } + + + /** + *

Answer a resource representing a symmetric property. If a resource + * with the given uri exists in the model, and can be viewed as a SymmetricProperty, return the + * SymmetricProperty facet, otherwise return null.

+ * @param uri The uri for the property. May not be null. + * @return A SymmetricProperty resource or null + */ + @Override + public SymmetricProperty getSymmetricProperty( String uri ) { + return (SymmetricProperty) findByURIAs( uri, SymmetricProperty.class ); + } + + + /** + *

Answer a resource representing an inverse functional property. If a resource + * with the given uri exists in the model, and can be viewed as a InverseFunctionalProperty, return the + * InverseFunctionalProperty facet, otherwise return null.

+ * @param uri The uri for the property. May not be null. + * @return An InverseFunctionalProperty resource or null + */ + @Override + public InverseFunctionalProperty getInverseFunctionalProperty( String uri ) { + return (InverseFunctionalProperty) findByURIAs( uri, InverseFunctionalProperty.class ); + } + + + /** + *

+ * Answer a resource that represents datatype property in this model. . If a resource + * with the given uri exists in the model, and can be viewed as a DatatypeProperty, return the + * DatatypeProperty facet, otherwise return null. + *

+ * + * @param uri The uri for the datatype property. May not be null. + * @return A DatatypeProperty resource or null + */ + @Override + public DatatypeProperty getDatatypeProperty( String uri ) { + return (DatatypeProperty) findByURIAs( uri, DatatypeProperty.class ); + } + + + /** + *

+ * Answer a resource that represents an annotation property in this model. If a resource + * with the given uri exists in the model, and can be viewed as an AnnotationProperty, return the + * AnnotationProperty facet, otherwise return null. + *

+ * + * @param uri The uri for the annotation property. May not be null. + * @return An AnnotationProperty resource or null + */ + @Override + public AnnotationProperty getAnnotationProperty( String uri ) { + return (AnnotationProperty) findByURIAs( uri, AnnotationProperty.class ); + } + + + /** + *

+ * Answer a resource that represents a class description node in this model. If a resource + * with the given uri exists in the model, and can be viewed as an OntClass, return the + * OntClass facet, otherwise return null. + *

+ * + * @param uri The uri for the class node, or null for an anonymous class. + * @return An OntClass resource or null. + */ + @Override + public OntClass getOntClass( String uri ) { + OntClass c = (OntClass) findByURIAs( uri, OntClass.class ); + + // special case for nothing and thing + if (c == null) { + Resource thing = getProfile().THING(); + if (thing != null && thing.getURI().equals( uri )) { + c = thing.inModel( this ).as( OntClass.class ); + } + + Resource nothing = getProfile().NOTHING(); + if (nothing != null && nothing.getURI().equals( uri )) { + c = nothing.inModel( this ).as( OntClass.class ); + } + } + + return c; + } + + + /** + *

Answer a resource representing the class that is the complement of another class. If a resource + * with the given uri exists in the model, and can be viewed as a ComplementClass, return the + * ComplementClass facet, otherwise return null.

+ * @param uri The URI of the new complement class. + * @return A complement class or null + */ + @Override + public ComplementClass getComplementClass( String uri ) { + return (ComplementClass) findByURIAs( uri, ComplementClass.class ); + } + + + /** + *

Answer a resource representing the class that is the enumeration of a list of individuals. If a resource + * with the given uri exists in the model, and can be viewed as an EnumeratedClass, return the + * EnumeratedClass facet, otherwise return null.

+ * @param uri The URI of the new enumeration class. + * @return An enumeration class or null + */ + @Override + public EnumeratedClass getEnumeratedClass( String uri ) { + return (EnumeratedClass) findByURIAs( uri, EnumeratedClass.class ); + } + + + /** + *

Answer a resource representing the class that is the union of a list of class desctiptions. If a resource + * with the given uri exists in the model, and can be viewed as a UnionClass, return the + * UnionClass facet, otherwise return null.

+ * @param uri The URI of the new union class. + * @return A union class description or null + */ + @Override + public UnionClass getUnionClass( String uri ) { + return (UnionClass) findByURIAs( uri, UnionClass.class ); + } + + + /** + *

Answer a resource representing the class that is the intersection of a list of class descriptions. If a resource + * with the given uri exists in the model, and can be viewed as a IntersectionClass, return the + * IntersectionClass facet, otherwise return null.

+ * @param uri The URI of the new intersection class. + * @return An intersection class description or null + */ + @Override + public IntersectionClass getIntersectionClass( String uri ) { + return (IntersectionClass) findByURIAs( uri, IntersectionClass.class ); + } + + + /** + *

+ * Answer a resource that represents a property restriction in this model. If a resource + * with the given uri exists in the model, and can be viewed as a Restriction, return the + * Restriction facet, otherwise return null. + *

+ * + * @param uri The uri for the restriction node. + * @return A Restriction resource or null + */ + @Override + public Restriction getRestriction( String uri ) { + return (Restriction) findByURIAs( uri, Restriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have the given + * resource as the value of the given property. If a resource + * with the given uri exists in the model, and can be viewed as a HasValueRestriction, return the + * HasValueRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a has-value restriction or null + */ + @Override + public HasValueRestriction getHasValueRestriction( String uri ) { + return (HasValueRestriction) findByURIAs( uri, HasValueRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at least + * one property with a value belonging to the given class. If a resource + * with the given uri exists in the model, and can be viewed as a SomeValuesFromRestriction, return the + * SomeValuesFromRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a some-values-from restriction, or null + */ + @Override + public SomeValuesFromRestriction getSomeValuesFromRestriction( String uri ) { + return (SomeValuesFromRestriction) findByURIAs( uri, SomeValuesFromRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals for which all values + * of the given property belong to the given class. If a resource + * with the given uri exists in the model, and can be viewed as an AllValuesFromResriction, return the + * AllValuesFromRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing an all-values-from restriction or null + */ + @Override + public AllValuesFromRestriction getAllValuesFromRestriction( String uri ) { + return (AllValuesFromRestriction) findByURIAs( uri, AllValuesFromRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have exactly + * the given number of values for the given property. If a resource + * with the given uri exists in the model, and can be viewed as a CardinalityRestriction, return the + * CardinalityRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a has-value restriction, or null + */ + @Override + public CardinalityRestriction getCardinalityRestriction( String uri ) { + return (CardinalityRestriction) findByURIAs( uri, CardinalityRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at least + * the given number of values for the given property. If a resource + * with the given uri exists in the model, and can be viewed as a MinCardinalityRestriction, return the + * MinCardinalityRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a min-cardinality restriction, or null + */ + @Override + public MinCardinalityRestriction getMinCardinalityRestriction( String uri ) { + return (MinCardinalityRestriction) findByURIAs( uri, MinCardinalityRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at most + * the given number of values for the given property. If a resource + * with the given uri exists in the model, and can be viewed as a MaxCardinalityRestriction, return the + * MaxCardinalityRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a mas-cardinality restriction, or null + */ + @Override + public MaxCardinalityRestriction getMaxCardinalityRestriction( String uri ) { + return (MaxCardinalityRestriction) findByURIAs( uri, MaxCardinalityRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have a property + * p, all values of which are members of a given class. Typically used with a cardinality constraint. + * If a resource + * with the given uri exists in the model, and can be viewed as a QualifiedRestriction, return the + * QualifiedRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a qualified restriction, or null + */ + @Override + public QualifiedRestriction getQualifiedRestriction( String uri ) { + return (QualifiedRestriction) findByURIAs( uri, QualifiedRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have a property + * p, with cardinality N, all values of which are members of a given class. + * If a resource + * with the given uri exists in the model, and can be viewed as a CardinalityQRestriction, return the + * CardinalityQRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a qualified cardinality restriction, or null + */ + @Override + public CardinalityQRestriction getCardinalityQRestriction( String uri ) { + return (CardinalityQRestriction) findByURIAs( uri, CardinalityQRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have a property + * p, with min cardinality N, all values of which are members of a given class. + * If a resource + * with the given uri exists in the model, and can be viewed as a MinCardinalityQRestriction, return the + * MinCardinalityQRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a qualified min cardinality restriction, or null + */ + @Override + public MinCardinalityQRestriction getMinCardinalityQRestriction( String uri ) { + return (MinCardinalityQRestriction) findByURIAs( uri, MinCardinalityQRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have a property + * p, with max cardinality N, all values of which are members of a given class. + * If a resource + * with the given uri exists in the model, and can be viewed as a MaxCardinalityQRestriction, return the + * MaxCardinalityQRestriction facet, otherwise return null.

+ * + * @param uri The URI for the restriction + * @return A resource representing a qualified max cardinality restriction, or null + */ + @Override + public MaxCardinalityQRestriction getMaxCardinalityQRestriction( String uri ) { + return (MaxCardinalityQRestriction) findByURIAs( uri, MaxCardinalityQRestriction.class ); + } + + + /** + *

+ * Answer a resource that represents an ontology description node in this model. If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ * + * @param uri The uri for the ontology node. Conventionally, this corresponds to the base URI + * of the document itself. + * @return An Ontology resource. + */ + @Override + public Ontology createOntology( String uri ) { + checkProfileEntry( getProfile().ONTOLOGY(), "ONTOLOGY" ); + return createOntResource( Ontology.class, getProfile().ONTOLOGY(), uri ); + } + + + /** + *

+ * Answer a resource that represents an Indvidual node in this model. A new anonymous resource + * will be created in the updateable sub-graph of the ontology model. + *

+ * + * @param cls Resource representing the ontology class to which the individual belongs + * @return A new anoymous Individual of the given class. + */ + @Override + public Individual createIndividual( Resource cls ) { + return createOntResource( Individual.class, cls, null ); + } + + + /** + *

+ * Answer a resource that represents an Individual node in this model. If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ * + * @param cls Resource representing the ontology class to which the individual belongs + * @param uri The uri for the individual, or null for an anonymous individual. + * @return An Individual resource. + */ + @Override + public Individual createIndividual( String uri, Resource cls ) { + return createOntResource( Individual.class, cls, uri ); + } + + + /** + *

+ * Answer a resource representing an generic property in this model. Effectively + * this method is an alias for {@link #createProperty( String )}, except that + * the return type is {@link OntProperty}, which allow more convenient access to + * a property's position in the property hierarchy, domain, range, etc. + *

+ * + * @param uri The uri for the property. May not be null. + * @return An OntProperty resource. + */ + @Override + public OntProperty createOntProperty( String uri ) { + Property p = createProperty( uri ); + p.addProperty( RDF.type, getProfile().PROPERTY() ); + return p.as( OntProperty.class ); + } + + + /** + *

+ * Answer a resource representing an object property in this model, + * and that is not a functional property. + *

+ * + * @param uri The uri for the object property. May not be null. + * @return An ObjectProperty resource. + * @see #createObjectProperty( String, boolean ) + */ + @Override + public ObjectProperty createObjectProperty( String uri ) { + return createObjectProperty( uri, false ); + } + + + /** + *

+ * Answer a resource that represents an object property in this model. An object property + * is defined to have a range of individuals, rather than datatypes. + * If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ * + * @param uri The uri for the object property. May not be null. + * @param functional If true, the resource will also be typed as a {@link FunctionalProperty}, + * that is, a property that has a unique range value for any given domain value. + * @return An ObjectProperty resource, optionally also functional. + */ + @Override + public ObjectProperty createObjectProperty( String uri, boolean functional ) { + checkProfileEntry( getProfile().OBJECT_PROPERTY(), "OBJECT_PROPERTY" ); + ObjectProperty p = createOntResource( ObjectProperty.class, getProfile().OBJECT_PROPERTY(), uri ); + + if (functional) { + checkProfileEntry( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY" ); + p.addProperty( RDF.type, getProfile().FUNCTIONAL_PROPERTY() ); + } + + return p; + } + + + /** + *

Answer a resource representing a transitive property

+ * @param uri The uri for the property. May not be null. + * @return An TransitiveProperty resource + * @see #createTransitiveProperty( String, boolean ) + */ + @Override + public TransitiveProperty createTransitiveProperty( String uri ) { + return createTransitiveProperty( uri, false ); + } + + + /** + *

Answer a resource representing a transitive property, which is optionally + * also functional. Note: although it is permitted in OWL full + * to have functional transitive properties, it makes the language undecideable. + * Functional transitive properties are not permitted in OWL Lite or OWL DL.

+ * @param uri The uri for the property. May not be null. + * @param functional If true, the property is also functional + * @return An TransitiveProperty resource, optionally also functional. + */ + @Override + public TransitiveProperty createTransitiveProperty( String uri, boolean functional ) { + checkProfileEntry( getProfile().TRANSITIVE_PROPERTY(), "TRANSITIVE_PROPERTY" ); + TransitiveProperty p = createOntResource( TransitiveProperty.class, getProfile().TRANSITIVE_PROPERTY(), uri ); + + if (functional) { + checkProfileEntry( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY" ); + p.addProperty( RDF.type, getProfile().FUNCTIONAL_PROPERTY() ); + } + + return p; + } + + + /** + *

Answer a resource representing a symmetric property

+ * @param uri The uri for the property. May not be null. + * @return An SymmetricProperty resource + * @see #createSymmetricProperty( String, boolean ) + */ + @Override + public SymmetricProperty createSymmetricProperty( String uri ) { + return createSymmetricProperty( uri, false ); + } + + + /** + *

Answer a resource representing a symmetric property, which is optionally + * also functional.

+ * @param uri The uri for the property. May not be null. + * @param functional If true, the property is also functional + * @return An SymmetricProperty resource, optionally also functional. + */ + @Override + public SymmetricProperty createSymmetricProperty( String uri, boolean functional ) { + checkProfileEntry( getProfile().SYMMETRIC_PROPERTY(), "SYMMETRIC_PROPERTY" ); + SymmetricProperty p = createOntResource( SymmetricProperty.class, getProfile().SYMMETRIC_PROPERTY(), uri ); + + if (functional) { + checkProfileEntry( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY" ); + p.addProperty( RDF.type, getProfile().FUNCTIONAL_PROPERTY() ); + } + + return p; + } + + + /** + *

Answer a resource representing an inverse functional property

+ * @param uri The uri for the property. May not be null. + * @return An InverseFunctionalProperty resource + * @see #createInverseFunctionalProperty( String, boolean ) + */ + @Override + public InverseFunctionalProperty createInverseFunctionalProperty( String uri ) { + return createInverseFunctionalProperty( uri, false ); + } + + + /** + *

Answer a resource representing an inverse functional property, which is optionally + * also functional.

+ * @param uri The uri for the property. May not be null. + * @param functional If true, the property is also functional + * @return An InverseFunctionalProperty resource, optionally also functional. + */ + @Override + public InverseFunctionalProperty createInverseFunctionalProperty( String uri, boolean functional ) { + checkProfileEntry( getProfile().INVERSE_FUNCTIONAL_PROPERTY(), "INVERSE_FUNCTIONAL_PROPERTY" ); + InverseFunctionalProperty p = createOntResource( InverseFunctionalProperty.class, getProfile().INVERSE_FUNCTIONAL_PROPERTY(), uri ); + + if (functional) { + checkProfileEntry( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY" ); + p.addProperty( RDF.type, getProfile().FUNCTIONAL_PROPERTY() ); + } + + return p; + } + + + /** + *

+ * Answer a resource that represents datatype property in this model, and that is + * not a functional property. + *

+ * + * @param uri The uri for the datatype property. May not be null. + * @return A DatatypeProperty resource. + * @see #createDatatypeProperty( String, boolean ) + */ + @Override + public DatatypeProperty createDatatypeProperty( String uri ) { + return createDatatypeProperty( uri, false ); + } + + + /** + *

+ * Answer a resource that represents datatype property in this model. A datatype property + * is defined to have a range that is a concrete datatype, rather than an individual. + * If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ * + * @param uri The uri for the datatype property. May not be null. + * @param functional If true, the resource will also be typed as a {@link FunctionalProperty}, + * that is, a property that has a unique range value for any given domain value. + * @return A DatatypeProperty resource. + */ + @Override + public DatatypeProperty createDatatypeProperty( String uri, boolean functional ) { + checkProfileEntry( getProfile().DATATYPE_PROPERTY(), "DATATYPE_PROPERTY" ); + DatatypeProperty p = createOntResource( DatatypeProperty.class, getProfile().DATATYPE_PROPERTY(), uri ); + + if (functional) { + checkProfileEntry( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY" ); + p.addProperty( RDF.type, getProfile().FUNCTIONAL_PROPERTY() ); + } + + return p; + } + + + /** + *

+ * Answer a resource that represents an annotation property in this model. If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ * + * @param uri The uri for the annotation property. + * @return An AnnotationProperty resource. + */ + @Override + public AnnotationProperty createAnnotationProperty( String uri ) { + checkProfileEntry( getProfile().ANNOTATION_PROPERTY(), "ANNOTATION_PROPERTY" ); + return createOntResource( AnnotationProperty.class, getProfile().ANNOTATION_PROPERTY(), uri ); + } + + + /** + *

+ * Answer a resource that represents an anonymous class description in this model. A new + * anonymous resource of rdf:type C, where C is the class type from the + * language profile. + *

+ * + * @return An anonymous Class resource. + */ + @Override + public OntClass createClass() { + checkProfileEntry( getProfile().CLASS(), "CLASS" ); + return createOntResource( OntClass.class, getProfile().CLASS(), null ); + } + + + /** + *

+ * Answer a resource that represents a class description node in this model. If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ * + * @param uri The uri for the class node, or null for an anonymous class. + * @return A Class resource. + */ + @Override + public OntClass createClass( String uri ) { + checkProfileEntry( getProfile().CLASS(), "CLASS" ); + return createOntResource( OntClass.class, getProfile().CLASS(), uri ); + } + + + /** + *

Answer a resource representing the class that is the complement of the given argument class

+ * @param uri The URI of the new complement class, or null for an anonymous class description. + * @param cls Resource denoting the class that the new class is a complement of + * @return A complement class + */ + @Override + public ComplementClass createComplementClass( String uri, Resource cls ) { + checkProfileEntry( getProfile().CLASS(), "CLASS" ); + OntClass c = createOntResource( OntClass.class, getProfile().CLASS(), uri ); + + checkProfileEntry( getProfile().COMPLEMENT_OF(), "COMPLEMENT_OF" ); + // if the class that this class is a complement of is not specified, use owl:nothing or daml:nothing + c.addProperty( getProfile().COMPLEMENT_OF(), (cls == null) ? getProfile().NOTHING() : cls ); + + return c.as( ComplementClass.class ); + } + + + /** + *

Answer a resource representing the class that is the enumeration of the given list of individuals

+ * @param uri The URI of the new enumeration class, or null for an anonymous class description. + * @param members An optional list of resources denoting the individuals in the enumeration + * @return An enumeration class + */ + @Override + public EnumeratedClass createEnumeratedClass( String uri, RDFList members ) { + checkProfileEntry( getProfile().CLASS(), "CLASS" ); + OntClass c = createOntResource( OntClass.class, getProfile().CLASS(), uri ); + + checkProfileEntry( getProfile().ONE_OF(), "ONE_OF" ); + c.addProperty( getProfile().ONE_OF(), (members == null) ? createList() : members ); + + return c.as( EnumeratedClass.class ); + } + + + /** + *

Answer a resource representing the class that is the union of the given list of class desctiptions

+ * @param uri The URI of the new union class, or null for an anonymous class description. + * @param members A list of resources denoting the classes that comprise the union + * @return A union class description + */ + @Override + public UnionClass createUnionClass( String uri, RDFList members ) { + checkProfileEntry( getProfile().CLASS(), "CLASS" ); + OntClass c = createOntResource( OntClass.class, getProfile().CLASS(), uri ); + + checkProfileEntry( getProfile().UNION_OF(), "UNION_OF" ); + c.addProperty( getProfile().UNION_OF(), (members == null) ? createList() : members ); + + return c.as( UnionClass.class ); + } + + + /** + *

Answer a resource representing the class that is the intersection of the given list of class descriptions.

+ * @param uri The URI of the new intersection class, or null for an anonymous class description. + * @param members A list of resources denoting the classes that comprise the intersection + * @return An intersection class description + */ + @Override + public IntersectionClass createIntersectionClass( String uri, RDFList members ) { + checkProfileEntry( getProfile().CLASS(), "CLASS" ); + OntClass c = createOntResource( OntClass.class, getProfile().CLASS(), uri ); + + checkProfileEntry( getProfile().INTERSECTION_OF(), "INTERSECTION_OF" ); + c.addProperty( getProfile().INTERSECTION_OF(), (members == null) ? createList() : members ); + + return c.as( IntersectionClass.class ); + } + + + /** + *

+ * Answer a resource that represents an anonymous property restriction in this model. A new + * anonymous resource of rdf:type R, where R is the restriction type from the + * language profile. + *

+ * + * @param p The property that is restricted by this restriction + * @return An anonymous Restriction resource. + */ + @Override + public Restriction createRestriction( Property p ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), null ); + if (p != null) { + r.setOnProperty( p ); + } + + return r; + } + + + /** + *

+ * Answer a resource that represents a property restriction in this model. If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ * + * @param uri The uri for the restriction node, or null for an anonymous restriction. + * @param p The property that is restricted by this restriction + * @return A Restriction resource. + */ + @Override + public Restriction createRestriction( String uri, Property p ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + if (p != null) { + r.setOnProperty( p ); + } + + return r; + } + + + /** + *

Answer a class description defined as the class of those individuals that have the given + * resource as the value of the given property

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param value The value of the property, as a resource or RDF literal + * @return A new resource representing a has-value restriction + */ + @Override + public HasValueRestriction createHasValueRestriction( String uri, Property prop, RDFNode value ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + if (prop == null || value == null) { + throw new IllegalArgumentException( "Cannot create hasValueRestriction with a null property or value" ); + } + + checkProfileEntry( getProfile().HAS_VALUE(), "HAS_VALUE" ); + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().HAS_VALUE(), value ); + + return r.as( HasValueRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at least + * one property with a value belonging to the given class

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cls The class to which at least one value of the property belongs + * @return A new resource representing a some-values-from restriction + */ + @Override + public SomeValuesFromRestriction createSomeValuesFromRestriction( String uri, Property prop, Resource cls ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + if (prop == null || cls == null) { + throw new IllegalArgumentException( "Cannot create someValuesFromRestriction with a null property or class" ); + } + + checkProfileEntry( getProfile().SOME_VALUES_FROM(), "SOME_VALUES_FROM" ); + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().SOME_VALUES_FROM(), cls ); + + return r.as( SomeValuesFromRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals for which all values + * of the given property belong to the given class

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cls The class to which any value of the property belongs + * @return A new resource representing an all-values-from restriction + */ + @Override + public AllValuesFromRestriction createAllValuesFromRestriction( String uri, Property prop, Resource cls ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + if (prop == null || cls == null) { + throw new IllegalArgumentException( "Cannot create allValuesFromRestriction with a null property or class" ); + } + + checkProfileEntry( getProfile().ALL_VALUES_FROM(), "ALL_VALUES_FROM" ); + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().ALL_VALUES_FROM(), cls ); + + return r.as( AllValuesFromRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have exactly + * the given number of values for the given property.

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cardinality The exact cardinality of the property + * @return A new resource representing a has-value restriction + */ + @Override + public CardinalityRestriction createCardinalityRestriction( String uri, Property prop, int cardinality ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + if (prop == null) { + throw new IllegalArgumentException( "Cannot create cardinalityRestriction with a null property" ); + } + + checkProfileEntry( getProfile().CARDINALITY(), "CARDINALITY" ); + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().CARDINALITY(), createTypedLiteral( cardinality ) ); + + return r.as( CardinalityRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at least + * the given number of values for the given property.

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cardinality The minimum cardinality of the property + * @return A new resource representing a min-cardinality restriction + */ + @Override + public MinCardinalityRestriction createMinCardinalityRestriction( String uri, Property prop, int cardinality ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + if (prop == null) { + throw new IllegalArgumentException( "Cannot create minCardinalityRestriction with a null property" ); + } + + checkProfileEntry( getProfile().MIN_CARDINALITY(), "MIN_CARDINALITY" ); + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().MIN_CARDINALITY(), createTypedLiteral( cardinality ) ); + + return r.as( MinCardinalityRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at most + * the given number of values for the given property.

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cardinality The maximum cardinality of the property + * @return A new resource representing a mas-cardinality restriction + */ + @Override + public MaxCardinalityRestriction createMaxCardinalityRestriction( String uri, Property prop, int cardinality ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + if (prop == null) { + throw new IllegalArgumentException( "Cannot create maxCardinalityRestriction with a null property" ); + } + + checkProfileEntry( getProfile().MAX_CARDINALITY(), "MAX_CARDINALITY" ); + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().MAX_CARDINALITY(), createTypedLiteral( cardinality ) ); + + return r.as( MaxCardinalityRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at most + * the given number of values for the given property, all values of which belong to the given + * class.

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cardinality The maximum cardinality of the property + * @param cls The class to which all values of the restricted property should belong + * @return A new resource representing a mas-cardinality restriction + */ + @Override + public MaxCardinalityQRestriction createMaxCardinalityQRestriction( String uri, Property prop, int cardinality, OntClass cls ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + checkProfileEntry( getProfile().ON_PROPERTY(), "ON_PROPERTY" ); + checkProfileEntry( getProfile().MAX_CARDINALITY_Q(), "MAX_CARDINALITY_Q" ); + checkProfileEntry( getProfile().HAS_CLASS_Q(), "HAS_CLASS_Q" ); + + if (prop == null) { + throw new IllegalArgumentException( "Cannot create MaxCardinalityQRestriction with a null property" ); + } + if (cls == null) { + throw new IllegalArgumentException( "Cannot create MaxCardinalityQRestriction with a null class" ); + } + + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().MAX_CARDINALITY_Q(), createTypedLiteral( cardinality ) ); + r.addProperty( getProfile().HAS_CLASS_Q(), cls ); + + return r.as( MaxCardinalityQRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have at least + * the given number of values for the given property, all values of which belong to the given + * class.

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cardinality The minimun cardinality of the property + * @param cls The class to which all values of the restricted property should belong + * @return A new resource representing a mas-cardinality restriction + */ + @Override + public MinCardinalityQRestriction createMinCardinalityQRestriction( String uri, Property prop, int cardinality, OntClass cls ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + checkProfileEntry( getProfile().ON_PROPERTY(), "ON_PROPERTY" ); + checkProfileEntry( getProfile().MIN_CARDINALITY_Q(), "MIN_CARDINALITY_Q" ); + checkProfileEntry( getProfile().HAS_CLASS_Q(), "HAS_CLASS_Q" ); + + if (prop == null) { + throw new IllegalArgumentException( "Cannot create MinCardinalityQRestriction with a null property" ); + } + if (cls == null) { + throw new IllegalArgumentException( "Cannot create MinCardinalityQRestriction with a null class" ); + } + + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().MIN_CARDINALITY_Q(), createTypedLiteral( cardinality ) ); + r.addProperty( getProfile().HAS_CLASS_Q(), cls ); + + return r.as( MinCardinalityQRestriction.class ); + } + + + /** + *

Answer a class description defined as the class of those individuals that have exactly + * the given number of values for the given property, all values of which belong to the given + * class.

+ * + * @param uri The optional URI for the restriction, or null for an anonymous restriction (which + * should be the normal case) + * @param prop The property the restriction applies to + * @param cardinality The cardinality of the property + * @param cls The class to which all values of the restricted property should belong + * @return A new resource representing a mas-cardinality restriction + */ + @Override + public CardinalityQRestriction createCardinalityQRestriction( String uri, Property prop, int cardinality, OntClass cls ) { + checkProfileEntry( getProfile().RESTRICTION(), "RESTRICTION" ); + checkProfileEntry( getProfile().ON_PROPERTY(), "ON_PROPERTY" ); + checkProfileEntry( getProfile().CARDINALITY_Q(), "CARDINALITY_Q" ); + checkProfileEntry( getProfile().HAS_CLASS_Q(), "HAS_CLASS_Q" ); + + if (prop == null) { + throw new IllegalArgumentException( "Cannot create CardinalityQRestriction with a null property" ); + } + if (cls == null) { + throw new IllegalArgumentException( "Cannot create CardinalityQRestriction with a null class" ); + } + + Restriction r = createOntResource( Restriction.class, getProfile().RESTRICTION(), uri ); + + r.addProperty( getProfile().ON_PROPERTY(), prop ); + r.addProperty( getProfile().CARDINALITY_Q(), createTypedLiteral( cardinality ) ); + r.addProperty( getProfile().HAS_CLASS_Q(), cls ); + + return r.as( CardinalityQRestriction.class ); + } + + + /** + *

Answer a data range defined as the given set of concrete data values. DataRange resources + * are necessarily bNodes.

+ * + * @param literals An iterator over a set of literals that will be the members of the data range, + * or null to define an empty data range + * @return A new data range containing the given literals as permissible values + */ + @Override + public DataRange createDataRange( RDFList literals ) { + checkProfileEntry( getProfile().DATARANGE(), "DATARANGE" ); + DataRange d = createOntResource( DataRange.class, getProfile().DATARANGE(), null ); + + checkProfileEntry( getProfile().ONE_OF(), "ONE_OF" ); + d.addProperty( getProfile().ONE_OF(), (literals == null) ? createList() : literals ); + + return d; + } + + + /** + *

+ * Answer a new, anonymous node representing the fact that a given set of classes are all + * pair-wise distinct. AllDifferent is a feature of OWL only, and is something + * of an anomoly in that it exists only to give a place to anchor the distinctMembers + * property, which is the actual expression of the fact. + *

+ * + * @return A new AllDifferent resource + */ + @Override + public AllDifferent createAllDifferent() { + return createAllDifferent( null ); + } + + + /** + *

+ * Answer a new, anonymous node representing the fact that a given set of classes are all + * pair-wise distinct. AllDifferent is a feature of OWL only, and is something + * of an anomoly in that it exists only to give a place to anchor the distinctMembers + * property, which is the actual expression of the fact. + *

+ * @param differentMembers A list of the class expressions that denote a set of mutually disjoint classes + * @return A new AllDifferent resource + */ + @Override + public AllDifferent createAllDifferent( RDFList differentMembers ) { + checkProfileEntry( getProfile().ALL_DIFFERENT(), "ALL_DIFFERENT" ); + AllDifferent ad = createOntResource( AllDifferent.class, getProfile().ALL_DIFFERENT(), null ); + + ad.setDistinctMembers( (differentMembers == null) ? createList() : differentMembers ); + + return ad; + } + + + /** + *

+ * Answer a resource that represents a generic ontology node in this model. If a resource + * with the given uri exists in the model, it will be re-used. If not, a new one is created in + * the updateable sub-graph of the ontology model. + *

+ *

+ * This is a generic method for creating any known ontology value. The selector that determines + * which resource to create is the same as as the argument to the {@link RDFNode#as as()} + * method: the Java class object of the desired abstraction. For example, to create an + * ontology class via this mechanism, use: + *

+     *     OntClass c = (OntClass) myModel.createOntResource( OntClass.class, null,
+     *                                                        "http://example.org/ex#Parrot" );
+     * 
+ *

+ * + * @param javaClass The Java class object that represents the ontology abstraction to create + * @param rdfType Optional resource denoting the ontology class to which an individual or + * axiom belongs, if that is the type of resource being created. + * @param uri The uri for the ontology resource, or null for an anonymous resource. + * @return An ontology resource, of the type specified by the javaClass + */ + @Override + public T createOntResource( Class javaClass, Resource rdfType, String uri ) { + return getResourceWithType( uri, rdfType ).as( javaClass ); + } + + /** + *

Answer a resource presenting the {@link OntResource} facet, which has the + * given URI.

+ * @param uri The URI of the resource, or null for an anonymous resource (aka bNode) + * @return An OntResource with the given URI + */ + @Override + public OntResource createOntResource( String uri ) { + return getResource( uri ).as( OntResource.class ); + } + + + /** + *

Answer a new empty list. This method overrides the list create method in ModelCom, + * to allow both DAML and RDFS lists to be created.

+ * @return An RDF-encoded list of no elements, using the current language profile + */ + @Override + public RDFList createList() { + Resource list = getResource( getProfile().NIL().getURI() ); + + return list.as( RDFList.class ); + } + + + /** + *

+ * Answer the language profile (for example, OWL or DAML+OIL) that this model is + * working to. + *

+ * + * @return A language profile + */ + @Override + public Profile getProfile() { + return m_spec.getProfile(); + } + + + /** + *

Determine which models this model imports (by looking for, for example, + * owl:imports statements, and load each of those models as an + * import. A check is made to determine if a model has already been imported, + * if so, the import is ignored. Thus this method is safe against circular + * sets of import statements. Note that actual implementation is delegated to + * the associated {@link OntDocumentManager}. + */ + @Override + public void loadImports() { + // load the imports closure, according to the policies in my document manager + getDocumentManager().loadImports( this ); + } + + + /** + *

+ * Answer true if this model has had the given URI document imported into it. This is + * important to know since an import only occurs once, and we also want to be able to + * detect cycles of imports. + *

+ * + * @param uri An ontology URI + * @return True if the document corresponding to the URI has been successfully loaded + * into this model + */ + @Override + public boolean hasLoadedImport( String uri ) { + return m_imported.contains( uri ); + } + + + /** + *

+ * Record that this model has now imported the document with the given + * URI, so that it will not be re-imported in the future. + *

+ * + * @param uri A document URI that has now been imported into the model. + */ + @Override + public void addLoadedImport( String uri ) { + m_imported.add( uri ); + } + + + /** + *

+ * Record that this model no longer imports the document with the given + * URI. + *

+ * + * @param uri A document URI that is no longer imported into the model. + */ + @Override + public void removeLoadedImport( String uri ) { + m_imported.remove( uri ); + } + + + /** + *

+ * Answer a list of the imported URI's in this ontology model. Detection of imports + * statments will be according to the local language profile + *

+ * + * @return The imported ontology URI's as a set. Note that since the underlying graph is + * not ordered, the order of values in the list in successive calls to this method is + * not guaranteed to be preserved. + */ + @Override + public Set listImportedOntologyURIs() { + return listImportedOntologyURIs( false ); + } + + + /** + *

+ * Answer a list of the imported URI's in this ontology model, and optionally in the closure + * of this model's imports. Detection of imports + * statments will be according to the local language profile. Note that, in order to allow this + * method to be called during the imports closure process, we only query the base model, + * thus side-stepping the any attached reasoner. + *

+ * @param closure If true, the set of uri's returned will include not only those directly + * imported by this model, but those imported by the model's imports transitively. + * @return The imported ontology URI's as a list. Note that since the underlying graph is + * not ordered, the order of values in the list in successive calls to this method is + * not guaranteed to be preserved. + */ + @Override + public Set listImportedOntologyURIs( boolean closure ) { + Set results = new HashSet(); + List queue = new ArrayList(); + queue.add( getBaseModel() ); + + while (!queue.isEmpty()) { + Model m = queue.remove( 0 ); + + // list the ontology nodes + if (getProfile().ONTOLOGY() != null && getProfile().IMPORTS() != null) { + StmtIterator i = m.listStatements(null, getProfile().IMPORTS(), (RDFNode)null); + while (i.hasNext()) { + Statement s = i.nextStatement(); + String uri = s.getResource().getURI(); + + if (!results.contains( uri )) { + // this is a new uri, so we add it + results.add( uri ); + + // and push the model on the stack if we know it + Model mi = getDocumentManager().getModel( uri ); + if (closure && mi != null && !queue.contains( mi )) { + queue.add( mi ); + } + } + } + } + } + + return results; + } + + + /** + *

+ * Answer the model maker associated with this model (used for constructing the + * constituent models of the imports closure). + *

+ * + * @return The local graph factory + */ + @Override + public ModelMaker getImportModelMaker() { + return m_spec.getImportModelMaker(); + } + + /** + @deprecated use getImportModelMaker instead. + */ + @Override + @Deprecated + public ModelMaker getModelMaker() { + return getImportModelMaker(); + } + + /** + *

Read statements into the model from the given source, and then load + * imported ontologies (according to the document manager policy).

+ * @param uri URI to read from, may be mapped to a local source by the document manager + */ + @Override + public Model read( String uri ) { + return read( uri, null, null ); + } + + /** + *

Read statements into the model from the given source, and then load + * imported ontologies (according to the document manager policy).

+ * @param reader An input reader + * @param base The base URI + */ + @Override + public Model read( Reader reader, String base ) { + super.read( reader, base ); + + loadImports(); + rebind(); + return this; + } + + /** + *

Read statements into the model from the given source, and then load + * imported ontologies (according to the document manager policy).

+ * @param reader An input stream + * @param base The base URI + */ + @Override + public Model read(InputStream reader, String base) { + super.read( reader, base ); + + loadImports(); + rebind(); + return this; + } + + /** + *

Read statements into the model from the given source, and then load + * imported ontologies (according to the document manager policy).

+ * @param uri URI to read from, may be mapped to a local source by the document manager + * @param syntax The source syntax + * @return This model, to allow chaining calls + */ + @Override + public Model read( String uri, String syntax ) { + return read( uri, null, syntax ); + } + + /** + *

Read statements into the model from the given source, and then load + * imported ontologies (according to the document manager policy).

+ * @param uri URI to read from, may be mapped to a local source by the document manager + * @param base The base URI for this model + * @param syntax The source syntax + * @return This model, to allow chaining calls + */ + @Override + public Model read( String uri, String base, String syntax ) { + // we don't want to load this document again if imported by one of the imports + addLoadedImport( uri ); + + OntDocumentManager odm = getDocumentManager(); + + String sourceURL = odm.doAltURLMapping( uri ); + + // invoke the read hook from the ODM + String source = odm.getReadHook().beforeRead( this, sourceURL, odm ); + if (source == null) { + s_log.warn( "ReadHook returned null, so skipping assuming previous value: " + sourceURL ); + source = sourceURL; + } + else { + // now we can actually do the read, check first if we should use negotiation + if (base == null && // require non-null base + !ignoreFileURI( source ) && // and that negotiation makes sense (don't conneg to file:) + source.equals( uri ) // and that we haven't remapped the URI + ) + { + if (syntax == null ) { + readDelegate( source ); + } + else { + readDelegate( source, syntax ); + } + } + else { + // if we were given the base, use it ... otherwise default to the base being the source + readDelegate( source, (base == null ? uri : base), syntax ); + } + } + + // the post read hook + odm.getReadHook().afterRead( this, source, odm ); + + // cache this model against the public uri (if caching enabled) + getDocumentManager().addModel( uri, this ); + + loadImports(); + rebind(); + return this; + } + + /** + *

Read statements into the model from the given source, and then load + * imported ontologies (according to the document manager policy).

+ * @param reader An input reader + * @param base The base URI + * @param syntax The source syntax + * @return This model, to allow chaining calls + */ + @Override + public Model read(Reader reader, String base, String syntax) { + super.read( reader, base, syntax ); + + loadImports(); + rebind(); + return this; + } + + /** + *

Read statements into the model from the given source, and then load + * imported ontologies (according to the document manager policy).

+ * @param reader An input stream + * @param base The base URI + * @param syntax The source syntax + * @return This model, to allow chaining calls + */ + @Override + public Model read(InputStream reader, String base, String syntax) { + super.read( reader, base, syntax ); + + loadImports(); + rebind(); + return this; + } + + + /** + *

+ * Answer the sub-graphs of this model. A sub-graph is defined as a graph that + * is used to contain the triples from an imported document. + *

+ * + * @return A list of sub graphs for this ontology model + */ + @Override + public List getSubGraphs() { + return getUnionGraph().getSubGraphs(); + } + + + /** + *

Answer an iterator over the ontologies that this ontology imports, + * each of which will have been wrapped as an ontology model using the same + * {@link OntModelSpec} as this model. If this model has no imports, + * the iterator will be non-null but will not have any values.

+ * @return An iterator, each value of which will be an OntModel + * representing an imported ontology. + * @deprecated This method has been re-named to listSubModels, + * but note that to obtain the same behaviour as listImportedModels + * from Jena 2.4 and earlier, callers should invoke {@link #listSubModels(boolean)} + * with parameter true. + * @see #listSubModels() + * @see #listSubModels(boolean) + */ + @Override + @Deprecated + public ExtendedIterator listImportedModels() { + return listSubModels( true ); + } + + + /** + *

Answer an iterator over the ontology models that are sub-models of + * this model. Sub-models are used, for example, to represent composite + * documents such as the imports of a model. So if ontology A imports + * ontologies B and C, each of B and C will be available as one of + * the sub-models of the model containing A. This method replaces the + * older {@link #listImportedModels}. Note that to fully replicate + * the behaviour of listImportedModels, the + * withImports flag must be set to true. Each model + * returned by this method will have been wrapped as an ontology model using the same + * {@link OntModelSpec} as this model. If this model has no sub-models, + * the returned iterator will be non-null but will not have any values.

+ * + * @param withImports If true, each sub-model returned by this method + * will also include its import models. So if model A imports D, and D + * imports D, when called with withImports set to true, the + * return value for modelA.listSubModels(true) will be an + * iterator, whose only value is a model for D, and that model will contain + * a sub-model representing the import of E. If withImports + * is false, E will not be included as a sub-model of D. + * @return An iterator, each value of which will be an OntModel + * representing a sub-model of this ontology. + */ + @Override + public ExtendedIterator listSubModels( final boolean withImports ) { + ExtendedIterator i = WrappedIterator.create( getSubGraphs().iterator() ); + + return i.mapWith( new Map1() { + @Override + public OntModel map1( Graph o ) { + Model base = ModelFactory.createModelForGraph( o ); + OntModel om = new OntModelImpl( m_spec, base ); + return om; + }} ); + } + + + /** + *

Answer an iterator over the ontology models that are sub-models of + * this model. Sub-models are used, for example, to represent composite + * documents such as the imports of a model. So if ontology A imports + * ontologies B and C, each of B and C will be available as one of + * the sub-models of the model containing A. + * Important note on behaviour change: please see + * the comment on {@link #listSubModels(boolean)} for explanation + * of the withImports flag. This zero-argument form + * of listSubModels sets withImports to + * false, so the returned models will not themselves contain imports. + * This behaviour differs from the zero-argument method + * {@link #listImportedModels()} in Jena 2.4 an earlier.

+ * @return An iterator, each value of which will be an OntModel + * representing a sub-model of this ontology. + * @see #listSubModels(boolean) + */ + @Override + public ExtendedIterator listSubModels() { + return listSubModels( false ); + } + + + /** + *

Answer the number of sub-models of this model, not including the + * base model.

+ * @return The number of sub-models, ≥ zero. + */ + @Override + public int countSubModels() { + int count = 0; + for (Iterator i = getSubGraphs().iterator(); i.hasNext(); ) { + count++; + i.next(); + } + return count; + } + + /** + *

Answer an OntModel representing the imported ontology + * with the given URI. If an ontology with that URI has not been imported, + * answer null.

+ * @param uri The URI of an ontology that may have been imported into the + * ontology represented by this model + * @return A model representing the imported ontology with the given URI, or + * null. + */ + @Override + public OntModel getImportedModel( String uri ) { + if (listImportedOntologyURIs( true ).contains( uri )) { + Model mi = getDocumentManager().getModel( uri ); + + if (mi != null) { + if (mi instanceof OntModel) { + // already a suitable ont model + return (OntModel) mi; + } + else { + // not in ont-model clothing yet, so re-wrap + return ModelFactory.createOntologyModel( m_spec, mi ); + } + } + } + + return null; + } + + + /** + *

+ * Answer the base-graph of this model. The base-graph is the graph that + * contains the triples read from the source document for this ontology. + *

+ * + * @return The base-graph for this ontology model + */ + public Graph getBaseGraph() { + return getUnionGraph().getBaseGraph(); + } + + + /** + *

+ * Answer the base model of this model. The base model is the model wrapping + * the graph that contains the triples read from the source document for this + * ontology. It is therefore the model that will be updated if statements are + * added to a model that is built from a union of documents (via the + * imports statements in the source document). + *

+ * + * @return The base model for this ontology model + */ + @Override + public Model getBaseModel() { + return ModelFactory.createModelForGraph( getBaseGraph() ); + } + + + /** + *

+ * Add the given model as one of the sub-models of the enclosed ontology union model. + * Note that if model is a composite model (i.e. an + * {@link OntModel} or {@link InfModel}), the model and all of its submodels will + * be added to the union of sub-models of this model. If this is not required, + * callers should explicitly add only the base model: + *

+ *
+     * parent.addSubModel( child.getBaseModel() );
+     * 
+ * + * @param model A sub-model to add + */ + @Override + public void addSubModel( Model model) { + addSubModel( model, true ); + } + + + /** + *

+ * Add the given model as one of the sub-models of the enclosed ontology union model. + * Note that if model is a composite model (i.e. an + * {@link OntModel} or {@link InfModel}), the model and all of its submodels will + * be added to the union of sub-models of this model. If this is not required, + * callers should explicitly add only the base model: + *

+ *
+     * parent.addSubModel( child.getBaseModel(), true );
+     * 
+ * + * @param model A sub-model to add + * @param rebind If true, rebind any associated inferencing engine to the new data (which + * may be an expensive operation) + */ + @Override + public void addSubModel( Model model, boolean rebind ) { + getUnionGraph().addGraph( model.getGraph() ); + if (rebind) { + rebind(); + } + } + + + /** + *

+ * Remove the given model as one of the sub-models of the enclosed ontology union model. Will + * cause the associated infererence engine (if any) to update, so this may be + * an expensive operation in some cases. + *

+ * + * @param model A sub-model to remove + * @see #addSubModel( Model, boolean ) + */ + @Override + public void removeSubModel( Model model ) { + removeSubModel( model, true ); + } + + + /** + *

+ * Remove the given model as one of the sub-models of the enclosed ontology union model. + *

+ * + * @param model A sub-model to remove + * @param rebind If true, rebind any associated inferencing engine to the new data (which + * may be an expensive operation) + */ + @Override + public void removeSubModel( Model model, boolean rebind ) { + Graph subG = model.getGraph(); + getUnionGraph().removeGraph( subG ); + + // note that it may be the base graph of the given model that was added + // originally + if (subG instanceof MultiUnion) { + // we need to get the base graph when removing a ontmodel + getUnionGraph().removeGraph( ((MultiUnion) subG).getBaseGraph() ); + } + + if (rebind) { + rebind(); + } + } + + + /** + *

Answer true if the given node is a member of the base model of this ontology model. + * This is an important distiction, because only the base model receives updates when the + * ontology model is updated. Thus, removing properties of a resource that is not in the base + * model will not actually side-effect the overall model.

+ * @param node An RDF node (Resource, Property or Literal) to test + * @return True if the given node is from the base model + */ + @Override + public boolean isInBaseModel( RDFNode node ) { + Node n = node.asNode(); + Graph b = getBaseGraph(); + return b.contains( n, Node.ANY, Node.ANY ) || + b.contains( Node.ANY, n, Node.ANY ) || + b.contains( Node.ANY, Node.ANY, n ); + } + + + /** + *

Answer true if the given statement is defined in the base model of this ontology model. + * This is an important distiction, because only the base model receives updates when the + * ontology model is updated. Thus, removing a statement that is not in the base + * model will not actually side-effect the overall model.

+ * @param stmt A statement to test + * @return True if the given statement is from the base model + */ + @Override + public boolean isInBaseModel( Statement stmt ) { + Node s = stmt.getSubject().asNode(); + Node p = stmt.getPredicate().asNode(); + Node o = stmt.getObject().asNode(); + Graph b = getBaseGraph(); + return b.contains( s, p, o ); + } + + + /** + *

+ * Answer true if this model is currently in strict checking mode. Strict + * mode means + * that converting a common resource to a particular language element, such as + * an ontology class, will be subject to some simple syntactic-level checks for + * appropriateness. + *

+ * + * @return True if in strict checking mode + */ + @Override + public boolean strictMode() { + return m_strictMode; + } + + + /** + *

+ * Set the checking mode to strict or non-strict. + *

+ * + * @param strict + * @see #strictMode() + */ + @Override + public void setStrictMode( boolean strict ) { + m_strictMode = strict; + } + + + /** + *

Set the flag that controls whether adding or removing imports + * statements into the + * model will result in the imports closure changing dynamically.

+ * @param dynamic If true, adding or removing an imports statement to the + * model will result in a change in the imports closure. If false, changes + * to the imports are not monitored dynamically. Default false. + */ + @Override + public void setDynamicImports( boolean dynamic ) { + if (dynamic) { + if (m_importsListener == null) { + // turn on dynamic processing + m_importsListener = new ImportsListener(); + register( m_importsListener ); + } + } + else { + if (m_importsListener != null) { + // turn off dynamic processing + unregister( m_importsListener ); + m_importsListener = null; + } + } + } + + + /** + *

Answer true if the imports closure of the model will be dynamically + * updated as imports statements are added and removed.

+ * @return True if the imports closure is updated dynamically. + */ + @Override + public boolean getDynamicImports() { + return m_importsListener != null; + } + + + /** + *

Answer the ontology model specification that was used to construct this model

+ * @return An ont model spec instance. + */ + @Override + public OntModelSpec getSpecification() { + return m_spec; + } + + // output operations - delegate to base model + + @Override + public Model write( Writer writer ) { return getBaseModel().write( writer ); } + @Override + public Model write( Writer writer, String lang ) { return getBaseModel().write( writer, lang ); } + @Override + public Model write( Writer writer, String lang, String base ) { return getBaseModel().write( writer, lang, base ); } + @Override + public Model write( OutputStream out ) { return getBaseModel().write( out ); } + @Override + public Model write( OutputStream out, String lang ) { return getBaseModel().write( out, lang ); } + @Override + public Model write( OutputStream out, String lang, String base) { return getBaseModel().write( out, lang, base ); } + + @Override + public Model writeAll( Writer writer, String lang, String base ) { + return super.write( writer, lang, base ); + } + + @Override + public Model writeAll( OutputStream out, String lang, String base ) { + return super.write( out, lang, base ); + } + + + // Implementation of inf model interface methods + + /** + * Return the raw RDF model being processed (i.e. the argument + * to the Reasonder.bind call that created this InfModel). + */ + @Override + public Model getRawModel() { + return getBaseModel(); + } + + /** + * Return the Reasoner which is being used to answer queries to this graph. + */ + @Override + public Reasoner getReasoner() { + return (getGraph() instanceof InfGraph) ? ((InfGraph) getGraph()).getReasoner() : null; + } + + /** + * Cause the inference model to reconsult the underlying data to take + * into account changes. Normally changes are made through the InfModel's add and + * remove calls are will be handled appropriately. However, in some cases changes + * are made "behind the InfModels's back" and this forces a full reconsult of + * the changed data. + */ + @Override + public void rebind() { + if (getGraph() instanceof InfGraph) { + ((InfGraph) getGraph()).rebind(); + } + } + + /** + * Perform any initial processing and caching. This call is optional. Most + * engines either have negligable set up work or will perform an implicit + * "prepare" if necessary. The call is provided for those occasions where + * substantial preparation work is possible (e.g. running a forward chaining + * rule system) and where an application might wish greater control over when + * this prepration is done rather than just leaving to be done at first query time. + */ + @Override + public void prepare() { + if (getGraph() instanceof InfGraph) { + ((InfGraph) getGraph()).prepare(); + } + } + + /** + * Reset any internal caches. Some systems, such as the tabled backchainer, + * retain information after each query. A reset will wipe this information preventing + * unbounded memory use at the expense of more expensive future queries. A reset + * does not cause the raw data to be reconsulted and so is less expensive than a rebind. + */ + @Override + public void reset() { + if (getGraph() instanceof InfGraph) { + ((InfGraph) getGraph()).reset(); + } + } + + /** + *

Returns a derivations model. The rule reasoners typically create a + * graph containing those triples added to the base graph due to rule firings. + * In some applications it can useful to be able to access those deductions + * directly, without seeing the raw data which triggered them. In particular, + * this allows the forward rules to be used as if they were rewrite transformation + * rules.

+ * + * @return The derivations model, if one is defined, or else null + */ + @Override + public Model getDeductionsModel() { + if (m_deductionsModel == null) { + InfGraph infGraph = getInfGraph(); + if (infGraph != null) { + Graph deductionsGraph = infGraph.getDeductionsGraph(); + if (deductionsGraph != null) { + m_deductionsModel = ModelFactory.createModelForGraph( deductionsGraph ); + } + } + } + else { + // ensure that the cached model sees the updated changes from the + // underlying reasoner graph + getInfGraph().prepare(); + } + + return m_deductionsModel; + } + + + /** + * Test the consistency of the underlying data. This normally tests + * the validity of the bound instance data against the bound + * schema data. + * @return a ValidityReport structure + */ + @Override + public ValidityReport validate() { + return (getGraph() instanceof InfGraph) ? ((InfGraph) getGraph()).validate() : null; + } + + /** Find all the statements matching a pattern. + *

Return an iterator over all the statements in a model + * that match a pattern. The statements selected are those + * whose subject matches the subject argument, + * whose predicate matches the predicate argument + * and whose object matches the object argument. + * If an argument is null it matches anything.

+ *

+ * The s/p/o terms may refer to resources which are temporarily defined in the "posit" model. + * This allows one, for example, to query what resources are of type CE where CE is a + * class expression rather than a named class - put CE in the posit arg.

+ * + * @return an iterator over the subjects + * @param subject The subject sought + * @param predicate The predicate sought + * @param object The value sought + * @param posit Model containing additional assertions to be considered when matching statements + */ + @Override + public StmtIterator listStatements( Resource subject, Property predicate, RDFNode object, Model posit ) { + if (getGraph() instanceof InfGraph) { + Graph gp = posit == null ? ModelFactory.createDefaultModel().getGraph() : posit.getGraph(); + Iterator iter = getInfGraph().find( asNode(subject), asNode(predicate), asNode(object), gp ); + return IteratorFactory.asStmtIterator(iter,this); + } + else { + return null; + } + } + + /** + * Switch on/off drivation logging. If this is switched on then every time an inference + * is a made that fact is recorded and the resulting record can be access through a later + * getDerivation call. This may consume a lot of space! + */ + @Override + public void setDerivationLogging(boolean logOn) { + if (getGraph() instanceof InfGraph) { + ((InfGraph) getGraph()).setDerivationLogging( logOn ); + } + } + + /** + * Return the derivation of the given statement (which should be the result of + * some previous list operation). + * Not all reasoneers will support derivations. + * @return an iterator over Derivation records or null if there is no derivation information + * available for this triple. + */ + @Override + public Iterator getDerivation(Statement statement) { + return (getGraph() instanceof InfGraph) ? ((InfGraph) getGraph()).getDerivation( statement.asTriple() ) : null; + } + + + + // Internal implementation methods + ////////////////////////////////// + + + /** + *

Helper method to the constructor, which interprets the spec and generates an appropriate + * graph for this model

+ * @param spec The model spec to interpret + * @param base The base model, or null + */ + private static Graph generateGraph( OntModelSpec spec, Graph base ) { + // create a empty union graph + MultiUnion u = new MultiUnion(); + u.addGraph( base ); + u.setBaseGraph( base ); + + Reasoner r = spec.getReasoner(); + // if we have a reasoner in the spec, bind to the union graph and return + return r == null ? (Graph) u : r.bind( u ); + } + + + /** + *

Answer the union graph that contains the imports closure for this ontology

+ * @return The union graph + */ + protected MultiUnion getUnionGraph() { + return m_union; + } + + + /** Answer the resource with the given URI, if present, as the given facet */ + protected Resource findByURIAs( String uri, Class asKey ) { + if (uri == null) { + throw new IllegalArgumentException( "Cannot get() ontology value with a null URI" ); + } + + Node n = NodeFactory.createURI( uri ); + + if (getGraph().contains( n, Node.ANY, Node.ANY )) { + // this resource is a subject in the graph + try { + return getNodeAs( n, asKey ); + } + catch (ConversionException ignore) {/**/} + } + + // not present, or cannot be as'ed to the desired facet + return null; + } + + /** + *

+ * Answer an iterator over all of the resources that have + * rdf:type type. + *

+ * + * @param type The resource that is the value of rdf:type we + * want to match + * @return An iterator over all triples _x rdf:type type + */ + protected ExtendedIterator findByType( Resource type ) { + return getGraph().find( null, RDF.type.asNode(), type.asNode() ); + } + + + /** + *

+ * Answer an iterator over all of the resources that have + * rdf:type type, or optionally, one of the alternative types. + *

+ * + * @param type The resource that is the value of rdf:type we + * want to match + * @param alternates An iterator over alternative types to search for, or null + * @return An iterator over all triples _x rdf:type t where t + * is type or one of the values from types. + */ + protected ExtendedIterator findByType( Resource type, Iterator alternates ) { + ExtendedIterator i = findByType( type ); + // compose onto i the find iterators for the alternate types + if (alternates != null) { + while (alternates.hasNext()) { + i = i.andThen( findByType( alternates.next() ) ); + } + } + return i.filterKeep( new UniqueFilter()); + } + + + /** + *

+ * Answer an iterator over all of the resources that have + * rdf:type type, or optionally, one of the alternative types, + * and present the results as() the given class. + *

+ * + * @param type The resource that is the value of rdf:type we + * want to match + * @param types An iterator over alternative types to search for, or null + * @param asKey The value to use to present the polymorphic results + * @return An iterator over all triples _x rdf:type type + */ + protected ExtendedIterator findByTypeAs( Resource type, Iterator types, Class asKey ) { + return findByType( type, types ).mapWith( new SubjectNodeAs( asKey ) ); + } + + /** + *

+ * Answer an iterator over all of the resources that has an + * rdf:type from the types iterator, + * and present the results as() the given class. + *

+ * + * @param types An iterator over types to search for. An exception will + * be raised if this iterator does not have at least one next() element. + * @param asKey The value to use to present the polymorphic results + * @return An iterator over all triples _x rdf:type type + */ + protected ExtendedIterator findByTypeAs( Iterator types, Class asKey ) { + return findByTypeAs( types.next(), types, asKey ); + } + + + /** + *

+ * Answer an iterator over resources with the given rdf:type; for each value + * in the iterator, ensure that is is presented as() the + * polymorphic object denoted by the given class key. + *

+ * + * @param type The rdf:type to search for + * @param asKey The key to pass to as() on the subject nodes + * @return An iterator over subjects with the given type, presenting as + * the given polymorphic class. + */ + protected ExtendedIterator findByTypeAs( Resource type, Class asKey ) { + return findByType( type ).mapWith( new SubjectNodeAs( asKey ) ); + } + + /** + *

+ * Answer an iterator over nodes that have p as a subject + *

+ * + * @param p A property + * @return ExtendedIterator over subjects of p. + */ + protected ExtendedIterator findByDefiningProperty( Property p ) { + return getGraph().find( null, p.asNode(), null ); + } + + + /** + *

+ * Answer an iterator over nodes that have p as a subject, presented as + * polymorphic enh resources of the given facet. + *

+ * + * @param p A property + * @param asKey A facet type + * @return ExtendedIterator over subjects of p, presented as the facet. + */ + protected ExtendedIterator findByDefiningPropertyAs( Property p, Class asKey ) { + return findByDefiningProperty( p ).mapWith( new SubjectNodeAs( asKey ) ); + } + + + /** + *

+ * Answer the resource with the given uri and that optionally has the given rdf:type, + * creating the resource if necessary. + *

+ * + * @param uri The uri to use, or null for an anonymous resource + * @param rdfType The resource to assert as the rdf:type, or null to leave untyped + * @return A new or existing Resource + */ + protected Resource getResourceWithType( String uri, Resource rdfType ) { + Resource r = getResource( uri ); + if (rdfType != null) { + r.addProperty( RDF.type, rdfType ); + } + return r; + } + + + /** + *

Answer a resource presenting the {@link OntResource} facet, which has the given + * URI. If no such resource is currently present in the model, return null.

+ * @param uri The URI of a resource + * @return An OntResource with the given URI, or null + */ + @Override + public OntResource getOntResource( String uri ) { + Resource r = getResource( uri ); + if (containsResource( r )) { + return r.as( OntResource.class ); + } + return null; + } + + /** + *

Answer a resource presenting the {@link OntResource} facet, which + * corresponds to the given resource but attached to this model.

+ * @param res An existing resource + * @return An {@link OntResource} attached to this model that has the same URI + * or anonID as the given resource + */ + @Override + public OntResource getOntResource( Resource res ) { + return res.inModel( this ).as( OntResource.class ); + } + + /** + *

Throw an OntologyException if the term is not in language profile

+ * + * @param profileTerm The entry from the profile + * @param desc A label for the profile term + * @exception OntologyException if profileTerm is null. + */ + protected void checkProfileEntry( Object profileTerm, String desc ) { + if (profileTerm == null) { + // not in the profile + throw new ProfileException( desc, getProfile() ); + } + } + + + /** + *

Check that every member of the given list has the given rdf:type, and throw an exception if not.

+ * @param list The list to be checked + * @param rdfType The rdf:type value to check for + * @exception LanguageConsistencyException if any member of the list does not have rdf:type rdfType + */ + protected void checkListMembersRdfType( RDFList list, Resource rdfType ) { + if (strictMode() && ! ((Boolean) list.reduce( new RdfTypeTestFn( rdfType), Boolean.TRUE )).booleanValue()) { + // not all of the members of the list are of the given type + throw new LanguageConsistencyException( "The members of the given list are expected to be of rdf:type " + rdfType.toString() ); + } + } + + /** + *

Answer the InfGraph that this model is wrapping, or null if this ontology + * model is not wrapping an inf graph.

+ * @return The model's graph as an InfGraph, or null + */ + private InfGraph getInfGraph() { + return (getGraph() instanceof InfGraph) ? ((InfGraph) getGraph()) : null; + } + + + /** + * Test for whether we ignore file: URI's when testing for content + * negotiation. + * @param source + * @return + */ + protected boolean ignoreFileURI( String source ) { + return source.startsWith( "file:" ); + } + + /* delegation points to allow unit testing of read operations */ + + protected Model readDelegate( String url ) { return super.read( url ); } + protected Model readDelegate( String url, String lang ) { return super.read( url, lang ); } + protected Model readDelegate( String url, String base, String lang ) { return super.read( url, base, lang ); } + + + + //============================================================================== + // Inner class definitions + //============================================================================== + + /** Map triple subjects or single nodes to subject enh nodes, presented as() the given class */ + protected class SubjectNodeAs implements Map1 + { + protected Class m_asKey; + + protected SubjectNodeAs( Class asKey ) { m_asKey = asKey; } + + @Override + public To map1( Triple x ) { + return getNodeAs( x.getSubject(), m_asKey ); + } + + } + + /** Map triple subjects or single nodes to subject enh nodes, presented as() the given class */ + protected class NodeAs implements Map1 + { + protected Class m_asKey; + protected NodeAs( Class asKey ) { m_asKey = asKey; } + + @Override + public To map1( Node x ) { + return getNodeAs( x, m_asKey ); + } + } + + protected class NodeCanAs extends Filter + { + protected Class m_asKey; + protected NodeCanAs( Class asKey ) { m_asKey = asKey; } + + @Override + public boolean accept( Node x ) { + try { getNodeAs( x, m_asKey ); } + catch (Exception ignore) { return false; } + return true; + } + + + } + + /** Filter that accepts nodes that can be mapped to the given facet */ + protected class SubjectNodeCanAs extends Filter + { + protected Class m_asKey; + protected SubjectNodeCanAs( Class asKey ) { m_asKey = asKey; } + + @Override + public boolean accept( T x ) { + Node n = (x instanceof Triple) + ? ((Triple) x).getSubject() + : ((x instanceof EnhNode) ? ((EnhNode) x).asNode() : (Node) x); + try { + getNodeAs( n, m_asKey ); + } + catch (Exception ignore) { + return false; + } + return true; + } + } + + /** Function to test the rdf type of a list */ + protected class RdfTypeTestFn implements RDFList.ReduceFn + { + protected Resource m_type; + protected RdfTypeTestFn( Resource type ) { m_type = type; } + @Override + public Object reduce( RDFNode node, Object accumulator ) { + Boolean acc = (Boolean) accumulator; + if (acc.booleanValue()) { + // true so far + Resource r = (Resource) node; + return new Boolean( r.hasProperty( RDF.type, m_type ) ); + } + else { + return acc; + } + } + } + + /** Listener for model changes that indicate a change in the imports to the model */ + protected class ImportsListener + extends StatementListener + { + @Override + public void addedStatement( Statement added ) { + if (added.getPredicate().equals( getProfile().IMPORTS() )) { + getDocumentManager().loadImport( BulkUpdatingOntModel.this, added.getResource().getURI() ); + } + } + + @Override + public void removedStatement( Statement removed ) { + if (removed.getPredicate().equals( getProfile().IMPORTS() )) { + getDocumentManager().unloadImport( BulkUpdatingOntModel.this, removed.getResource().getURI() ); + } + } + } +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/SpecialBulkUpdatingGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/SpecialBulkUpdatingGraph.java new file mode 100644 index 000000000..4b269252e --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/SpecialBulkUpdatingGraph.java @@ -0,0 +1,132 @@ +/* $This file is distributed under the terms of the license in /doc/license.txt$ */ + +package edu.cornell.mannlib.vitro.webapp.rdfservice.adapters; + +import com.hp.hpl.jena.graph.BulkUpdateHandler; +import com.hp.hpl.jena.graph.Capabilities; +import com.hp.hpl.jena.graph.Graph; +import com.hp.hpl.jena.graph.GraphEventManager; +import com.hp.hpl.jena.graph.GraphStatisticsHandler; +import com.hp.hpl.jena.graph.Node; +import com.hp.hpl.jena.graph.TransactionHandler; +import com.hp.hpl.jena.graph.Triple; +import com.hp.hpl.jena.graph.TripleMatch; +import com.hp.hpl.jena.shared.AddDeniedException; +import com.hp.hpl.jena.shared.DeleteDeniedException; +import com.hp.hpl.jena.shared.PrefixMapping; +import com.hp.hpl.jena.util.iterator.ExtendedIterator; + +/** + * This Graph wrapper uses a BulkUpdatingHandler different from the one owned by + * the wrapped Graph. + */ +public class SpecialBulkUpdatingGraph implements Graph { + + private Graph g; + private BulkUpdateHandler b; + + public SpecialBulkUpdatingGraph(Graph g, BulkUpdateHandler b) { + this.g = g; + this.b = b; + } + + @Override + public void add(Triple arg0) throws AddDeniedException { + g.add(arg0); + } + + @Override + public void close() { + g.close(); + } + + @Override + public boolean contains(Node arg0, Node arg1, Node arg2) { + return g.contains(arg0, arg1, arg2); + } + + @Override + public boolean contains(Triple arg0) { + return g.contains(arg0); + } + + @Override + public void delete(Triple arg0) throws DeleteDeniedException { + g.delete(arg0); + } + + @Override + public boolean dependsOn(Graph arg0) { + return g.dependsOn(arg0); + } + + @Override + public ExtendedIterator find(Node arg0, Node arg1, Node arg2) { + return g.find(arg0, arg1, arg2); + } + + @Override + public ExtendedIterator find(TripleMatch arg0) { + return g.find(arg0); + } + + @Override + public BulkUpdateHandler getBulkUpdateHandler() { + return b; + } + + @Override + public Capabilities getCapabilities() { + return g.getCapabilities(); + } + + @Override + public GraphEventManager getEventManager() { + return g.getEventManager(); + } + + @Override + public PrefixMapping getPrefixMapping() { + return g.getPrefixMapping(); + } + + @Override + public GraphStatisticsHandler getStatisticsHandler() { + return g.getStatisticsHandler(); + } + + @Override + public TransactionHandler getTransactionHandler() { + return g.getTransactionHandler(); + } + + @Override + public boolean isClosed() { + return g.isClosed(); + } + + @Override + public boolean isEmpty() { + return g.isEmpty(); + } + + @Override + public boolean isIsomorphicWith(Graph arg0) { + return g.isIsomorphicWith(arg0); + } + + @Override + public int size() { + return g.size(); + } + + @Override + public void clear() { + g.clear(); + } + + @Override + public void remove(Node arg0, Node arg1, Node arg2) { + g.remove(arg0, arg1, arg2); + } +} \ No newline at end of file diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/VitroModelFactory.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/VitroModelFactory.java new file mode 100644 index 000000000..ab93dd295 --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/adapters/VitroModelFactory.java @@ -0,0 +1,42 @@ +/* $This file is distributed under the terms of the license in /doc/license.txt$ */ + +package edu.cornell.mannlib.vitro.webapp.rdfservice.adapters; + +import com.hp.hpl.jena.graph.BulkUpdateHandler; +import com.hp.hpl.jena.graph.Graph; +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.rdf.model.ModelFactory; + +/** + * Make models that will do proper bulk updates. + */ +public class VitroModelFactory { + + public static OntModel createOntologyModel() { + return new BulkUpdatingOntModel(); + } + + public static OntModel createOntologyModel(Model model) { + return new BulkUpdatingOntModel(model); + } + + public static Model createUnion(Model baseModel, Model otherModel) { + @SuppressWarnings("deprecation") + BulkUpdateHandler buh = baseModel.getGraph().getBulkUpdateHandler(); + + Graph unionGraph = ModelFactory.createUnion(baseModel, otherModel) + .getGraph(); + return new BulkUpdatingModel(unionGraph, buh); + } + + public static OntModel createUnion(OntModel baseModel, OntModel otherModel) { + return new BulkUpdatingOntModel(createUnion((Model) baseModel, + (Model) otherModel)); + } + + public static Model createModelForGraph(Graph g) { + return new BulkUpdatingModel(g); + } + +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ChangeSetImpl.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ChangeSetImpl.java index 7a2f5cfb8..85a621fe8 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ChangeSetImpl.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ChangeSetImpl.java @@ -89,5 +89,14 @@ public class ChangeSetImpl implements ChangeSet { @Override public List getPostChangeEvents() { return this.postChangeEvents; - } + } + + @Override + public String toString() { + return "ChangeSetImpl [preconditionQuery=" + preconditionQuery + + ", queryType=" + queryType + ", modelChanges=" + modelChanges + + ", preChangeEvents=" + preChangeEvents + + ", postChangeEvents=" + postChangeEvents + "]"; + } + } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ModelChangeImpl.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ModelChangeImpl.java index f728cd0df..812f53ce4 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ModelChangeImpl.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/ModelChangeImpl.java @@ -2,7 +2,11 @@ package edu.cornell.mannlib.vitro.webapp.rdfservice.impl; +import java.io.IOException; import java.io.InputStream; +import java.util.List; + +import org.apache.commons.io.IOUtils; import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; @@ -66,4 +70,26 @@ public class ModelChangeImpl implements ModelChange { public void setGraphURI(String graphURI) { this.graphURI = graphURI; } + + @Override + public String toString() { + return "ModelChangeImpl [serializedModel=" + + streamToString(serializedModel) + ", serializationFormat=" + + serializationFormat + ", operation=" + operation + + ", graphURI=" + graphURI + "]"; + } + + private String streamToString(InputStream stream) { + if (!stream.markSupported()) { + return String.valueOf(stream); + } + try { + stream.mark(Integer.MAX_VALUE); + List lines = IOUtils.readLines(stream); + stream.reset(); + return String.valueOf(lines); + } catch (IOException e) { + return "Failed to read input stream: " + e; + } + } } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/RDFServiceUtils.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/RDFServiceUtils.java index 43cfbb6f0..77e864436 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/RDFServiceUtils.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/RDFServiceUtils.java @@ -15,7 +15,7 @@ import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFactory; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; -import com.hp.hpl.jena.sparql.resultset.ResultSetFormat; +import com.hp.hpl.jena.sparql.resultset.ResultsFormat; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; @@ -26,32 +26,44 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.logging.LoggingRDFServiceFactory; public class RDFServiceUtils { - - static Log log = LogFactory.getLog(RDFServiceUtils.class); + private static final Log log = LogFactory.getLog(RDFServiceUtils.class); private static final String RDFSERVICEFACTORY_ATTR = RDFServiceUtils.class.getName() + ".RDFServiceFactory"; + public enum WhichService {CONTENT, CONFIGURATION} public static RDFServiceFactory getRDFServiceFactory(ServletContext context) { - Object o = context.getAttribute(RDFSERVICEFACTORY_ATTR); - if (o instanceof RDFServiceFactory) { - RDFServiceFactory factory = (RDFServiceFactory) o; - - /* - * Every factory is wrapped in a logger, so we can dynamically - * enable or disable logging. - */ - return new LoggingRDFServiceFactory(factory); - } else { - log.error("Expecting an RDFServiceFactory on the context, but found " + o); - return null; - } + return getRDFServiceFactory(context, WhichService.CONTENT); + } + + /* + * Every factory is wrapped in a logger, so we can dynamically enable or + * disable logging. + */ + public static RDFServiceFactory getRDFServiceFactory( + ServletContext context, WhichService which) { + String attribute = RDFSERVICEFACTORY_ATTR + "." + which.name(); + Object o = context.getAttribute(attribute); + if (o instanceof RDFServiceFactory) { + RDFServiceFactory factory = (RDFServiceFactory) o; + return new LoggingRDFServiceFactory(factory); + } else { + throw new IllegalStateException( + "Expecting an RDFServiceFactory on the context at '" + + attribute + "', but found " + o); + } + } + + public static void setRDFServiceFactory(ServletContext context, + RDFServiceFactory factory) { + setRDFServiceFactory(context, factory, WhichService.CONTENT); } public static void setRDFServiceFactory(ServletContext context, - RDFServiceFactory factory) { - context.setAttribute(RDFSERVICEFACTORY_ATTR, factory); + RDFServiceFactory factory, WhichService which) { + String attribute = RDFSERVICEFACTORY_ATTR + "." + which.name(); + context.setAttribute(attribute, factory); } public static InputStream toInputStream(String serializedRDF) { @@ -69,16 +81,16 @@ public class RDFServiceUtils { return model; } - public static ResultSetFormat getJenaResultSetFormat(ResultFormat resultFormat) { + public static ResultsFormat getJenaResultSetFormat(ResultFormat resultFormat) { switch(resultFormat) { case JSON: - return ResultSetFormat.syntaxJSON; + return ResultsFormat.FMT_RS_JSON; case CSV: - return ResultSetFormat.syntaxCSV; + return ResultsFormat.FMT_RS_CSV; case XML: - return ResultSetFormat.syntaxXML; + return ResultsFormat.FMT_RS_XML; case TEXT: - return ResultSetFormat.syntaxText; + return ResultsFormat.FMT_TEXT; default: throw new RuntimeException("unsupported ResultFormat"); } @@ -112,10 +124,14 @@ public class RDFServiceUtils { } public static RDFService getRDFService(VitroRequest vreq) { - return getRDFServiceFactory( - vreq.getSession().getServletContext()).getRDFService(); + return getRDFService(vreq, WhichService.CONTENT); } + public static RDFService getRDFService(VitroRequest vreq, WhichService which) { + return getRDFServiceFactory( + vreq.getSession().getServletContext(), which).getRDFService(); + } + public static ResultSet sparqlSelectQuery(String query, RDFService rdfService) { ResultSet resultSet = null; diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/ListeningGraph.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/ListeningGraph.java index 122d0c42f..b9b4e590d 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/ListeningGraph.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/ListeningGraph.java @@ -14,15 +14,12 @@ import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.graph.GraphEventManager; import com.hp.hpl.jena.graph.GraphStatisticsHandler; import com.hp.hpl.jena.graph.Node; -import com.hp.hpl.jena.graph.Reifier; import com.hp.hpl.jena.graph.TransactionHandler; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.graph.TripleMatch; import com.hp.hpl.jena.graph.impl.GraphWithPerform; import com.hp.hpl.jena.graph.impl.SimpleBulkUpdateHandler; import com.hp.hpl.jena.graph.impl.SimpleEventManager; -import com.hp.hpl.jena.graph.query.QueryHandler; -import com.hp.hpl.jena.graph.query.SimpleQueryHandler; import com.hp.hpl.jena.shared.AddDeniedException; import com.hp.hpl.jena.shared.DeleteDeniedException; import com.hp.hpl.jena.shared.PrefixMapping; @@ -30,7 +27,6 @@ import com.hp.hpl.jena.shared.impl.PrefixMappingImpl; import com.hp.hpl.jena.util.iterator.ExtendedIterator; import com.hp.hpl.jena.util.iterator.WrappedIterator; -import edu.cornell.mannlib.vitro.webapp.dao.jena.EmptyReifier; import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceImpl; @@ -44,8 +40,6 @@ public class ListeningGraph implements GraphWithPerform { private BulkUpdateHandler bulkUpdateHandler; private GraphEventManager eventManager; private PrefixMapping prefixMapping = new PrefixMappingImpl(); - private Reifier reifier = new EmptyReifier(this); - private QueryHandler queryHandler; public ListeningGraph(String graphURI, RDFServiceImpl rdfServiceImpl) { this.graphURI = graphURI; @@ -80,6 +74,7 @@ public class ListeningGraph implements GraphWithPerform { @Override public void close() { + // Nothing to close. } @Override @@ -109,6 +104,20 @@ public class ListeningGraph implements GraphWithPerform { return WrappedIterator.create(triplist.iterator()); } + @Override + public void clear() { + for (Triple t: find(null, null, null).toList()) { + delete(t); + } + } + + @Override + public void remove(Node subject, Node predicate, Node object) { + for (Triple t: find(subject, predicate, object).toList()) { + delete(t); + } + } + @Override public BulkUpdateHandler getBulkUpdateHandler() { if (this.bulkUpdateHandler == null) { @@ -135,11 +144,6 @@ public class ListeningGraph implements GraphWithPerform { return prefixMapping; } - @Override - public Reifier getReifier() { - return reifier; - } - @Override public GraphStatisticsHandler getStatisticsHandler() { return null; @@ -147,13 +151,11 @@ public class ListeningGraph implements GraphWithPerform { @Override public TransactionHandler getTransactionHandler() { - // TODO Auto-generated method stub return null; } @Override public boolean isClosed() { - // TODO Auto-generated method stub return false; } @@ -168,14 +170,6 @@ public class ListeningGraph implements GraphWithPerform { "by SPARQL graphs"); } - @Override - public QueryHandler queryHandler() { - if (queryHandler == null) { - queryHandler = new SimpleQueryHandler(this); - } - return queryHandler; - } - @Override public int size() { int size = find(null, null, null).toList().size(); @@ -184,41 +178,50 @@ public class ListeningGraph implements GraphWithPerform { private final static Capabilities capabilities = new Capabilities() { - public boolean addAllowed() { + @Override + public boolean addAllowed() { return false; } + @Override public boolean addAllowed(boolean everyTriple) { return false; } + @Override public boolean canBeEmpty() { return true; } + @Override public boolean deleteAllowed() { return false; } + @Override public boolean deleteAllowed(boolean everyTriple) { return false; } + @Override public boolean findContractSafe() { return true; } + @Override public boolean handlesLiteralTyping() { return true; } + @Override public boolean iteratorRemoveAllowed() { return false; } + @Override public boolean sizeAccurate() { return true; } }; - + } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/RDFServiceJena.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/RDFServiceJena.java index a8e2ece70..0267f256c 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/RDFServiceJena.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/RDFServiceJena.java @@ -4,20 +4,20 @@ package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentLinkedQueue; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.log4j.lf5.util.StreamUtils; -import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Triple; -import com.hp.hpl.jena.query.DataSource; import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.DatasetFactory; import com.hp.hpl.jena.query.Query; @@ -49,11 +49,61 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic protected abstract DatasetWrapper getDatasetWrapper(); - public abstract boolean changeSetUpdate(ChangeSet changeSet) throws RDFServiceException; + @Override + public abstract boolean changeSetUpdate(ChangeSet changeSet) throws RDFServiceException; + protected void notifyListenersOfPreChangeEvents(ChangeSet changeSet) { + for (Object o : changeSet.getPreChangeEvents()) { + this.notifyListenersOfEvent(o); + } + } + + protected void insureThatInputStreamsAreResettable(ChangeSet changeSet) throws IOException { + for (ModelChange modelChange: changeSet.getModelChanges()) { + if (!modelChange.getSerializedModel().markSupported()) { + byte[] bytes = IOUtils.toByteArray(modelChange.getSerializedModel()); + modelChange.setSerializedModel(new ByteArrayInputStream(bytes)); + } + modelChange.getSerializedModel().mark(Integer.MAX_VALUE); + } + } + + protected void applyChangeSetToModel(ChangeSet changeSet, Dataset dataset) { + for (ModelChange modelChange: changeSet.getModelChanges()) { + dataset.getLock().enterCriticalSection(Lock.WRITE); + try { + Model model = (modelChange.getGraphURI() == null) ? + dataset.getDefaultModel() : + dataset.getNamedModel(modelChange.getGraphURI()); + operateOnModel(model, modelChange, dataset); + } finally { + dataset.getLock().leaveCriticalSection(); + } + } + } + + protected void notifyListenersOfChanges(ChangeSet changeSet) + throws IOException { + for (ModelChange modelChange: changeSet.getModelChanges()) { + modelChange.getSerializedModel().reset(); + Model model = ModelFactory.createModelForGraph( + new ListeningGraph(modelChange.getGraphURI(), this)); + operateOnModel(model, modelChange, null); + } + } + + protected void notifyListenersOfPostChangeEvents(ChangeSet changeSet) { + for (Object o : changeSet.getPostChangeEvents()) { + this.notifyListenersOfEvent(o); + } + } + protected void operateOnModel(Model model, ModelChange modelChange, Dataset dataset) { model.enterCriticalSection(Lock.WRITE); try { + if (log.isDebugEnabled()) { + dumpOperation(model, modelChange); + } if (modelChange.getOperation() == ModelChange.Operation.ADD) { model.read(modelChange.getSerializedModel(), null, getSerializationFormatString(modelChange.getSerializationFormat())); @@ -71,7 +121,63 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic } } - private void removeBlankNodesWithSparqlUpdate(Dataset dataset, Model model, String graphURI) { + /** + * As a debug statement, log info about the model change operation: add or + * delete, model URI, model class, punctuation count, beginning of the + * string. + */ + private void dumpOperation(Model model, ModelChange modelChange) { + String op = String.valueOf(modelChange.getOperation()); + + byte[] changeBytes = new byte[0]; + try { + modelChange.getSerializedModel().mark(Integer.MAX_VALUE); + changeBytes = StreamUtils + .getBytes(modelChange.getSerializedModel()); + modelChange.getSerializedModel().reset(); + } catch (IOException e) { + // leave it empty. + } + + int puncCount = 0; + boolean inUri = false; + boolean inQuotes = false; + for (byte b : changeBytes) { + if (inQuotes) { + if (b == '"') { + inQuotes = false; + } + } else if (inUri) { + if (b == '>') { + inUri = false; + } + } else { + if (b == '"') { + inQuotes = true; + } else if (b == '<') { + inUri = true; + } else if ((b == ',') || (b == ';') || (b == '.')) { + puncCount++; + } + } + } + + String changeString = new String(changeBytes).replace('\n', ' '); + + String graphUri = modelChange.getGraphURI(); + int delimHere = Math.max(graphUri.lastIndexOf('#'), + graphUri.lastIndexOf('/')); + String graphLocalName = graphUri.substring(delimHere + 1); + + String modelClassName = model.getClass().getSimpleName(); + + log.debug(String + .format(">>>>OPERATION: %3.3s %03dpunc, name='%s', class=%s, start=%.200s", + op, puncCount, graphLocalName, modelClassName, + changeString)); + } + + private void removeBlankNodesWithSparqlUpdate(Dataset dataset, Model model, String graphURI) { List blankNodeStatements = new ArrayList(); StmtIterator stmtIt = model.listStatements(); @@ -112,7 +218,7 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic QueryExecution qee = QueryExecutionFactory.create(treeFinderQuery, blankNodeModel); try { Model tree = qee.execDescribe(); - DataSource ds = DatasetFactory.create(); + Dataset ds = DatasetFactory.createMem(); if (graphURI == null) { ds.setDefaultModel(dataset.getDefaultModel()); } else { @@ -195,7 +301,7 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic Query construct = QueryFactory.create(queryBuff.toString()); // make a plain dataset to force the query to be run in a way that // won't overwhelm MySQL with too many joins - DataSource ds = DatasetFactory.create(); + Dataset ds = DatasetFactory.createMem(); if (graphURI == null) { ds.setDefaultModel(dataset.getDefaultModel()); } else { @@ -259,15 +365,6 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic return output; } - private String getHexString(Node node) { - String label = node.getBlankNodeLabel().replaceAll("\\W", "").toUpperCase(); - if (label.length() > 7) { - return label.substring(label.length() - 7); - } else { - return label; - } - } - private static final boolean WHERE_CLAUSE = true; private void addStatementPatterns(List stmts, StringBuffer patternBuff, boolean whereClause) { @@ -298,7 +395,7 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic } private InputStream getRDFResultStream(String query, boolean construct, - ModelSerializationFormat resultFormat) throws RDFServiceException { + ModelSerializationFormat resultFormat) { DatasetWrapper dw = getDatasetWrapper(); try { Dataset d = dw.getDataset(); @@ -407,6 +504,7 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic @Override public void getGraphMetadata() throws RDFServiceException { + // nothing to do } @Override @@ -414,11 +512,6 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic // nothing } - @Override - public void notifyListeners(Triple triple, ModelChange.Operation operation, String graphURI) { - super.notifyListeners(triple, operation, graphURI); - } - protected QueryExecution createQueryExecution(String queryString, Query q, Dataset d) { return QueryExecutionFactory.create(q, d); } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/model/RDFServiceModel.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/model/RDFServiceModel.java index 6e89e5a95..8526c5972 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/model/RDFServiceModel.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/model/RDFServiceModel.java @@ -9,12 +9,10 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import com.hp.hpl.jena.query.DataSource; import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.DatasetFactory; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; -import com.hp.hpl.jena.shared.Lock; import edu.cornell.mannlib.vitro.webapp.dao.jena.DatasetWrapper; import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet; @@ -41,7 +39,7 @@ public class RDFServiceModel extends RDFServiceJena implements RDFService { @Override protected DatasetWrapper getDatasetWrapper() { - DataSource d = DatasetFactory.create(); + Dataset d = DatasetFactory.createMem(); if (modelName == null) { d.setDefaultModel(this.model); } else { diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/sdb/RDFServiceSDB.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/sdb/RDFServiceSDB.java index 2c5df4229..4cc1f1542 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/sdb/RDFServiceSDB.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/sdb/RDFServiceSDB.java @@ -2,16 +2,13 @@ package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.sdb; -import java.io.ByteArrayInputStream; import java.sql.Connection; import java.sql.SQLException; -import java.util.Iterator; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.sql.DataSource; -import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -19,21 +16,16 @@ import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; -import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.sdb.SDBFactory; import com.hp.hpl.jena.sdb.Store; import com.hp.hpl.jena.sdb.StoreDesc; import com.hp.hpl.jena.sdb.sql.SDBConnection; -import com.hp.hpl.jena.shared.Lock; import edu.cornell.mannlib.vitro.webapp.dao.jena.DatasetWrapper; import edu.cornell.mannlib.vitro.webapp.dao.jena.StaticDatasetFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet; -import edu.cornell.mannlib.vitro.webapp.rdfservice.ModelChange; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; -import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.ListeningGraph; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.RDFServiceJena; public class RDFServiceSDB extends RDFServiceJena implements RDFService { @@ -63,8 +55,8 @@ public class RDFServiceSDB extends RDFServiceJena implements RDFService { if (staticDatasetFactory != null) { return staticDatasetFactory.getDatasetWrapper(); } - SDBConnection conn = new SDBConnection(ds.getConnection()); - return new DatasetWrapper(getDataset(conn), conn); + SDBConnection sdbConn = new SDBConnection(ds.getConnection()); + return new DatasetWrapper(getDataset(sdbConn), sdbConn); } catch (SQLException sqle) { log.error(sqle, sqle); throw new RuntimeException(sqle); @@ -82,93 +74,72 @@ public class RDFServiceSDB extends RDFServiceJena implements RDFService { return false; } - SDBConnection conn = null; - try { - conn = new SDBConnection(getConnection()); - } catch (SQLException sqle) { - log.error(sqle, sqle); - throw new RDFServiceException(sqle); - } - - Dataset dataset = getDataset(conn); - boolean transaction = conn.getTransactionHandler().transactionsSupported(); + SDBConnection sdbConn = getSDBConnection(); + Dataset dataset = getDataset(sdbConn); try { + insureThatInputStreamsAreResettable(changeSet); - if (transaction) { - conn.getTransactionHandler().begin(); - } - - for (Object o : changeSet.getPreChangeEvents()) { - this.notifyListenersOfEvent(o); - } + beginTransaction(sdbConn); - Iterator csIt = changeSet.getModelChanges().iterator(); - while (csIt.hasNext()) { - ModelChange modelChange = csIt.next(); - if (!modelChange.getSerializedModel().markSupported()) { - byte[] bytes = IOUtils.toByteArray(modelChange.getSerializedModel()); - modelChange.setSerializedModel(new ByteArrayInputStream(bytes)); - } - modelChange.getSerializedModel().mark(Integer.MAX_VALUE); - dataset.getLock().enterCriticalSection(Lock.WRITE); - try { - Model model = (modelChange.getGraphURI() == null) - ? dataset.getDefaultModel() - : dataset.getNamedModel(modelChange.getGraphURI()); - operateOnModel(model, modelChange, dataset); - } finally { - dataset.getLock().leaveCriticalSection(); - } - } + notifyListenersOfPreChangeEvents(changeSet); + applyChangeSetToModel(changeSet, dataset); - if (transaction) { - conn.getTransactionHandler().commit(); - } + commitTransaction(sdbConn); - // notify listeners of triple changes - csIt = changeSet.getModelChanges().iterator(); - while (csIt.hasNext()) { - ModelChange modelChange = csIt.next(); - modelChange.getSerializedModel().reset(); - Model model = ModelFactory.createModelForGraph( - new ListeningGraph(modelChange.getGraphURI(), this)); - operateOnModel(model, modelChange, null); - } - - for (Object o : changeSet.getPostChangeEvents()) { - this.notifyListenersOfEvent(o); - } + notifyListenersOfChanges(changeSet); + notifyListenersOfPostChangeEvents(changeSet); + return true; } catch (Exception e) { log.error(e, e); - if (transaction) { - conn.getTransactionHandler().abort(); - } + abortTransaction(sdbConn); throw new RDFServiceException(e); } finally { - close(conn); + close(sdbConn); } - - return true; - } + } - protected Connection getConnection() throws SQLException { - return (conn != null) ? conn : ds.getConnection(); + private SDBConnection getSDBConnection() throws RDFServiceException { + try { + Connection c = (conn != null) ? conn : ds.getConnection(); + return new SDBConnection(c); + } catch (SQLException sqle) { + log.error(sqle, sqle); + throw new RDFServiceException(sqle); + } } - protected void close(SDBConnection sdbConn) { + private void close(SDBConnection sdbConn) { if (!sdbConn.getSqlConnection().equals(conn)) { sdbConn.close(); } } - protected Dataset getDataset(SDBConnection conn) { - Store store = SDBFactory.connectStore(conn, storeDesc); + private Dataset getDataset(SDBConnection sdbConn) { + Store store = SDBFactory.connectStore(sdbConn, storeDesc); store.getLoader().setUseThreading(false); return SDBFactory.connectDataset(store); } + private void beginTransaction(SDBConnection sdbConn) { + if (sdbConn.getTransactionHandler().transactionsSupported()) { + sdbConn.getTransactionHandler().begin(); + } + } + + private void commitTransaction(SDBConnection sdbConn) { + if (sdbConn.getTransactionHandler().transactionsSupported()) { + sdbConn.getTransactionHandler().commit(); + } + } + + private void abortTransaction(SDBConnection sdbConn) { + if (sdbConn.getTransactionHandler().transactionsSupported()) { + sdbConn.getTransactionHandler().abort(); + } + } + private static final Pattern OPTIONAL_PATTERN = Pattern.compile("optional", Pattern.CASE_INSENSITIVE); private static final Pattern GRAPH_PATTERN = Pattern.compile("graph", Pattern.CASE_INSENSITIVE); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/tdb/RDFServiceFactoryTDB.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/tdb/RDFServiceFactoryTDB.java new file mode 100644 index 000000000..1cfcc4ed4 --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/tdb/RDFServiceFactoryTDB.java @@ -0,0 +1,51 @@ +/* $This file is distributed under the terms of the license in /doc/license.txt$ */ + +package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.tdb; + +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener; +import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; +import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; +import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; + +/** + * TODO + */ +public class RDFServiceFactoryTDB implements RDFServiceFactory { + private static final Log log = LogFactory + .getLog(RDFServiceFactoryTDB.class); + + + private final RDFServiceTDB service; + + public RDFServiceFactoryTDB(String directoryPath) throws IOException { + this.service = new RDFServiceTDB(directoryPath); + } + + @Override + public RDFService getRDFService() { + return service; + } + + @Override + public RDFService getShortTermRDFService() { + return service; + } + + @Override + public void registerListener(ChangeListener changeListener) + throws RDFServiceException { + service.registerListener(changeListener); + } + + @Override + public void unregisterListener(ChangeListener changeListener) + throws RDFServiceException { + service.unregisterListener(changeListener); + } + +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/tdb/RDFServiceTDB.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/tdb/RDFServiceTDB.java new file mode 100644 index 000000000..44028a43f --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/rdfservice/impl/jena/tdb/RDFServiceTDB.java @@ -0,0 +1,82 @@ +/* $This file is distributed under the terms of the license in /doc/license.txt$ */ + +package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.tdb; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import com.hp.hpl.jena.query.Dataset; +import com.hp.hpl.jena.tdb.TDB; +import com.hp.hpl.jena.tdb.TDBFactory; + +import edu.cornell.mannlib.vitro.webapp.dao.jena.DatasetWrapper; +import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet; +import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; +import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.RDFServiceJena; + +/** + * TODO + */ +public class RDFServiceTDB extends RDFServiceJena { + private static final Log log = LogFactory.getLog(RDFServiceTDB.class); + + private final Dataset dataset; + + public RDFServiceTDB(String directoryPath) throws IOException { + Path tdbDir = Paths.get(directoryPath); + + if (!Files.exists(tdbDir)) { + Path parentDir = tdbDir.getParent(); + if (!Files.exists(parentDir)) { + throw new IllegalArgumentException( + "Cannot create TDB directory '" + tdbDir + + "': parent directory does not exist."); + } + Files.createDirectory(tdbDir); + } + + this.dataset = TDBFactory.createDataset(directoryPath); + } + + @Override + protected DatasetWrapper getDatasetWrapper() { + return new DatasetWrapper(dataset); + } + + @Override + public boolean changeSetUpdate(ChangeSet changeSet) + throws RDFServiceException { + + if (changeSet.getPreconditionQuery() != null + && !isPreconditionSatisfied(changeSet.getPreconditionQuery(), + changeSet.getPreconditionQueryType())) { + return false; + } + + try { + insureThatInputStreamsAreResettable(changeSet); + + if (log.isDebugEnabled()) { + log.debug("Change Set: " + changeSet); + } + notifyListenersOfPreChangeEvents(changeSet); + + applyChangeSetToModel(changeSet, dataset); + TDB.sync(dataset); + + notifyListenersOfChanges(changeSet); + notifyListenersOfPostChangeEvents(changeSet); + + return true; + } catch (Exception e) { + log.error(e, e); + throw new RDFServiceException(e); + } + } + +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ApplicationModelSetup.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ApplicationModelSetup.java deleted file mode 100644 index 7bb26c752..000000000 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ApplicationModelSetup.java +++ /dev/null @@ -1,117 +0,0 @@ -/* $This file is distributed under the terms of the license in /doc/license.txt$ */ -package edu.cornell.mannlib.vitro.webapp.servlet.setup; - -import javax.servlet.ServletContext; -import javax.servlet.ServletContextEvent; -import javax.servlet.ServletContextListener; -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import com.hp.hpl.jena.ontology.OntModel; -import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; - -import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; -import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID; -import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer; -import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus; - -/** - * Setups the Application Configuration TBox and ABox. This is sometimes - * called the display model. - * - * @author bdc34 - */ - -public class ApplicationModelSetup extends JenaDataSourceSetupBase -implements ServletContextListener { - - private static final Log log = LogFactory.getLog( - ApplicationModelSetup.class.getName()); - - /** - * Setup the application configuration model. It is frequently called the - * display model. If this is a new DB, populate the display model with the - * initial data. - * - * Also load any files that get loaded to the display model at each tomcat - * startup. - * - * Also, at each start of tomcat, load The display TBox and the - * display/display model. - */ - private void setupDisplayModel(DataSource bds, ServletContext ctx, - StartupStatus ss) { - - // display, editing and navigation Model - try { - Model displayDbModel = makeDBModel(bds, - JENA_DISPLAY_METADATA_MODEL, DB_ONT_MODEL_SPEC, ctx); - RDFFilesLoader.loadFirstTimeFiles(ctx, "display", displayDbModel, displayDbModel.isEmpty()); - - OntModel displayModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC); - displayModel.add(displayDbModel); - displayModel.getBaseModel().register(new ModelSynchronizer(displayDbModel)); - ModelAccess.on(ctx).setDisplayModel(displayModel); - - //at each startup load all RDF files from directory to sub-models of display model - RDFFilesLoader.loadEveryTimeFiles(ctx, "display", displayModel); - } catch (Throwable t) { - log.error("Unable to load user application configuration model", t); - ss.fatal(this, "Unable to load user application configuration model", t); - } - - //display tbox - currently reading in every time - try { - Model displayTboxModel = makeDBModel(bds, - JENA_DISPLAY_TBOX_MODEL, DB_ONT_MODEL_SPEC, ctx); - - OntModel appTBOXModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC); - appTBOXModel.add(displayTboxModel); - appTBOXModel.getBaseModel().register(new ModelSynchronizer(displayTboxModel)); - ModelAccess.on(ctx).setOntModel(ModelID.DISPLAY_TBOX, appTBOXModel); - - //Reading in every time, needs to be cleared/removed every time - RDFFilesLoader.loadEveryTimeFiles(ctx, "displayTbox", appTBOXModel); - } catch (Throwable t) { - log.error("Unable to load user application configuration model TBOX", t); - ss.fatal(this, "Unable to load user application configuration model TBOX", t); - } - - //Display Display model, currently reading in every time - try { - Model displayDisplayModel = makeDBModel(bds, - JENA_DISPLAY_DISPLAY_MODEL, DB_ONT_MODEL_SPEC, ctx); - - OntModel appDisplayDisplayModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC); - appDisplayDisplayModel.add(displayDisplayModel); - appDisplayDisplayModel.getBaseModel().register(new ModelSynchronizer(displayDisplayModel)); - ModelAccess.on(ctx).setOntModel(ModelID.DISPLAY_DISPLAY, appDisplayDisplayModel); - - //Reading in every time, needs to be cleared/removed every time - RDFFilesLoader.loadEveryTimeFiles(ctx, "displayDisplay", appDisplayDisplayModel); - } catch (Throwable t) { - log.error("Unable to load user application configuration model Display Model", t); - ss.fatal(this, "Unable to load user application configuration model Display Model", t); - } - } - - @Override - public void contextDestroyed(ServletContextEvent arg0) { - // does nothing. - } - - @Override - public void contextInitialized(ServletContextEvent sce) { - ServletContext ctx = sce.getServletContext(); - StartupStatus ss = StartupStatus.getBean(ctx); - DataSource bds = getApplicationDataSource(ctx); - - setupDisplayModel(bds, ctx, ss); - } - - - -} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ConfigurationModelsSetup.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ConfigurationModelsSetup.java new file mode 100644 index 000000000..09d13448d --- /dev/null +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ConfigurationModelsSetup.java @@ -0,0 +1,111 @@ +/* $This file is distributed under the terms of the license in /doc/license.txt$ */ + +package edu.cornell.mannlib.vitro.webapp.servlet.setup; + +import static edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils.WhichService.CONFIGURATION; + +import javax.servlet.ServletContext; +import javax.servlet.ServletContextEvent; +import javax.servlet.ServletContextListener; + +import com.hp.hpl.jena.ontology.OntModel; +import com.hp.hpl.jena.query.Dataset; +import com.hp.hpl.jena.rdf.model.Model; + +import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; +import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID; +import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer; +import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset; +import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; +import edu.cornell.mannlib.vitro.webapp.rdfservice.adapters.VitroModelFactory; +import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; +import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus; + +/** + * Set up the models that use the CONFIGURATION RDFService. They are all mapped + * to memory-based models. + * + * TODO This should be divorced from JenaDataSourceSetupBase, which it only uses + * for constants. + */ +public class ConfigurationModelsSetup extends JenaDataSourceSetupBase implements + ServletContextListener { + + @Override + public void contextInitialized(ServletContextEvent sce) { + ServletContext ctx = sce.getServletContext(); + StartupStatus ss = StartupStatus.getBean(ctx); + + try { + setupModel(ctx, JENA_DISPLAY_METADATA_MODEL, "display", + ModelID.DISPLAY); + + setupModel(ctx, JENA_DISPLAY_TBOX_MODEL, "displayTbox", + ModelID.DISPLAY_TBOX); + + setupModel(ctx, JENA_DISPLAY_DISPLAY_MODEL, "displayDisplay", + ModelID.DISPLAY_DISPLAY); + + ss.info(this, "Set up the display models."); + + setupModel(ctx, JENA_USER_ACCOUNTS_MODEL, "auth", + ModelID.USER_ACCOUNTS); + + ss.info(this, "Set up the user accounts model."); + } catch (Exception e) { + ss.fatal(this, e.getMessage(), e.getCause()); + } + } + + private void setupModel(ServletContext ctx, String modelUri, + String modelPath, ModelID modelId) { + try { + Dataset dataset = getConfigurationModelsDataset(ctx); + OntModel baseModel = getNamedOntModel(modelUri, dataset); + + loadFirstTimeFiles(ctx, modelPath, baseModel); + loadEveryTimeFiles(ctx, modelPath, baseModel); + + OntModel memoryModel = wrapWithMemoryModel(baseModel); + ModelAccess.on(ctx).setOntModel(modelId, memoryModel); + } catch (Exception e) { + throw new RuntimeException("Failed to create the '" + modelPath + + "' model (" + modelUri + ").", e); + } + } + + private Dataset getConfigurationModelsDataset(ServletContext ctx) { + RDFServiceFactory factory = RDFServiceUtils.getRDFServiceFactory(ctx, + CONFIGURATION); + return new RDFServiceDataset(factory.getRDFService()); + } + + private OntModel getNamedOntModel(String modelUri, Dataset dataset) { + Model model = dataset.getNamedModel(modelUri); + return VitroModelFactory.createOntologyModel(model); + } + + private void loadFirstTimeFiles(ServletContext ctx, String modelPath, + OntModel baseModel) { + RDFFilesLoader.loadFirstTimeFiles(ctx, modelPath, baseModel, + baseModel.isEmpty()); + } + + private OntModel wrapWithMemoryModel(OntModel baseModel) { + OntModel memoryModel = VitroModelFactory.createOntologyModel(); + memoryModel.add(baseModel); + memoryModel.getBaseModel().register(new ModelSynchronizer(baseModel)); + return memoryModel; + } + + private void loadEveryTimeFiles(ServletContext ctx, String modelPath, + OntModel memoryModel) { + RDFFilesLoader.loadEveryTimeFiles(ctx, modelPath, memoryModel); + } + + @Override + public void contextDestroyed(ServletContextEvent arg0) { + // Nothing to tear down. + } + +} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ContentModelSetup.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ContentModelSetup.java index 3e90ccdaa..d3415724e 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ContentModelSetup.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ContentModelSetup.java @@ -13,13 +13,9 @@ import javax.servlet.ServletContextEvent; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import com.hp.hpl.jena.graph.BulkUpdateHandler; -import com.hp.hpl.jena.graph.Graph; import com.hp.hpl.jena.ontology.OntModel; -import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ResIterator; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.shared.Lock; @@ -36,10 +32,10 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig; import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer; import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector; import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset; -import edu.cornell.mannlib.vitro.webapp.dao.jena.SpecialBulkUpdateHandlerGraph; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; +import edu.cornell.mannlib.vitro.webapp.rdfservice.adapters.VitroModelFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus; @@ -77,8 +73,8 @@ public class ContentModelSetup extends JenaDataSourceSetupBase OntModel inferenceABoxModel = createNamedModelFromDataset(dataset, JENA_INF_MODEL); OntModel baseTBoxModel = createdMemoryMappedModel(dataset, JENA_TBOX_ASSERTIONS_MODEL, "tbox assertions"); OntModel inferenceTBoxModel = createdMemoryMappedModel(dataset, JENA_TBOX_INF_MODEL, "tbox inferences"); - OntModel unionABoxModel = createCombinedBulkUpdatingModel(baseABoxModel, inferenceABoxModel); - OntModel unionTBoxModel = createCombinedBulkUpdatingModel(baseTBoxModel, inferenceTBoxModel); + OntModel unionABoxModel = VitroModelFactory.createUnion(baseABoxModel, inferenceABoxModel); + OntModel unionTBoxModel = VitroModelFactory.createUnion(baseTBoxModel, inferenceTBoxModel); if (isFirstStartup()) { @@ -92,9 +88,9 @@ public class ContentModelSetup extends JenaDataSourceSetupBase RDFFilesLoader.loadEveryTimeFiles(ctx, "tbox", baseTBoxModel); log.info("Setting up full models"); - OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel, baseTBoxModel); - OntModel inferenceFullModel = createCombinedModel(inferenceABoxModel, inferenceTBoxModel); - OntModel unionFullModel = ModelFactory.createOntologyModel(DB_ONT_MODEL_SPEC, dataset.getDefaultModel()); + OntModel baseFullModel = VitroModelFactory.createUnion(baseABoxModel, baseTBoxModel); + OntModel inferenceFullModel = VitroModelFactory.createUnion(inferenceABoxModel, inferenceTBoxModel); + OntModel unionFullModel = VitroModelFactory.createOntologyModel(dataset.getDefaultModel()); models.setOntModel(ModelID.APPLICATION_METADATA, applicationMetadataModel); @@ -130,13 +126,13 @@ public class ContentModelSetup extends JenaDataSourceSetupBase } private OntModel createNamedModelFromDataset(Dataset dataset, String name) { - return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, dataset.getNamedModel(name)); + return VitroModelFactory.createOntologyModel(dataset.getNamedModel(name)); } private OntModel createdMemoryMappedModel(Dataset dataset, String name, String label) { try { Model dbModel = dataset.getNamedModel(name); - OntModel memoryModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC); + OntModel memoryModel = VitroModelFactory.createOntologyModel(); if (dbModel != null) { long begin = System.currentTimeMillis(); @@ -151,20 +147,6 @@ public class ContentModelSetup extends JenaDataSourceSetupBase } } - private OntModel createCombinedModel(OntModel oneModel, OntModel otherModel) { - return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, - ModelFactory.createUnion(oneModel, otherModel)); - } - - private OntModel createCombinedBulkUpdatingModel(OntModel baseModel, - OntModel otherModel) { - BulkUpdateHandler bulkUpdateHandler = baseModel.getGraph().getBulkUpdateHandler(); - Graph unionGraph = ModelFactory.createUnion(baseModel, otherModel).getGraph(); - Model unionModel = ModelFactory.createModelForGraph( - new SpecialBulkUpdateHandlerGraph(unionGraph, bulkUpdateHandler)); - return ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC, unionModel); - } - private long secondsSince(long startTime) { return (System.currentTimeMillis() - startTime) / 1000; } @@ -180,7 +162,7 @@ public class ContentModelSetup extends JenaDataSourceSetupBase */ private void initializeApplicationMetadata(ServletContext ctx, OntModel applicationMetadataModel) { - OntModel temporaryAMModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC); + OntModel temporaryAMModel = VitroModelFactory.createOntologyModel(); RDFFilesLoader.loadFirstTimeFiles(ctx, "applicationMetadata", temporaryAMModel, true); setPortalUriOnFirstTime(temporaryAMModel, ctx); applicationMetadataModel.add(temporaryAMModel); diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/JenaDataSourceSetupBase.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/JenaDataSourceSetupBase.java index 58ff6a92d..59b9f67a5 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/JenaDataSourceSetupBase.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/JenaDataSourceSetupBase.java @@ -2,9 +2,6 @@ package edu.cornell.mannlib.vitro.webapp.servlet.setup; -import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelMakerID.CONFIGURATION; -import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelMakerID.CONTENT; - import java.beans.PropertyVetoException; import javax.servlet.ServletContext; @@ -15,32 +12,15 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import com.hp.hpl.jena.graph.Graph; -import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.query.Dataset; -import com.hp.hpl.jena.query.Query; -import com.hp.hpl.jena.query.QueryExecution; -import com.hp.hpl.jena.query.QueryExecutionFactory; -import com.hp.hpl.jena.query.QueryFactory; -import com.hp.hpl.jena.query.Syntax; -import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; -import com.hp.hpl.jena.rdf.model.ModelMaker; import com.hp.hpl.jena.sdb.Store; import com.hp.hpl.jena.sdb.StoreDesc; -import com.hp.hpl.jena.sdb.store.DatabaseType; -import com.hp.hpl.jena.sdb.store.LayoutType; import com.mchange.v2.c3p0.ComboPooledDataSource; import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties; import edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary; -import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; -import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary; import edu.cornell.mannlib.vitro.webapp.dao.jena.JenaBaseDaoCon; -import edu.cornell.mannlib.vitro.webapp.dao.jena.RDBGraphGenerator; -import edu.cornell.mannlib.vitro.webapp.dao.jena.RegeneratingGraph; -import edu.cornell.mannlib.vitro.webapp.dao.jena.SDBGraphGenerator; public class JenaDataSourceSetupBase extends JenaBaseDaoCon { private static final String VITRO_DEFAULT_NAMESPACE = "Vitro.defaultNamespace"; @@ -143,38 +123,12 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon { return jdbcUrl; } - - /** - * Sets up a Model and DB connection using values from - * a properties file. - */ - public final Model makeDBModelFromConfigurationProperties( - String jenaDbModelName, - OntModelSpec jenaDbOntModelSpec, ServletContext ctx) { - - String jdbcUrl = getJdbcUrl(ctx); - - String username = ConfigurationProperties.getBean(ctx).getProperty( - "VitroConnection.DataSource.username"); - String password = ConfigurationProperties.getBean(ctx).getProperty( - "VitroConnection.DataSource.password"); - DataSource ds = makeC3poDataSource( - getDbDriverClassName(ctx), jdbcUrl, username, password, ctx); -// DataSource ds = makeBasicDataSource( -// getDbDriverClassName(ctx), jdbcUrl, username, password, ctx); - jenaDbOntModelSpec = (jenaDbOntModelSpec != null) - ? jenaDbOntModelSpec - : DB_ONT_MODEL_SPEC; - - return makeDBModel(ds, jenaDbModelName, jenaDbOntModelSpec, ctx); - - } /** * Sets up a DataSource using values from * a properties file. */ - public final DataSource makeDataSourceFromConfigurationProperties( + protected final DataSource makeDataSourceFromConfigurationProperties( ServletContext ctx) { String dbDriverClassname = ConfigurationProperties.getBean(ctx) .getProperty("VitroConnection.DataSource.driver", @@ -303,10 +257,6 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon { return ds; } - public enum TripleStoreType { - RDB, SDB - } - public static boolean isFirstStartup() { return firstStartup; } @@ -315,67 +265,6 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon { firstStartup = true; } - protected Model makeDBModel(DataSource ds, - String jenaDbModelname, - OntModelSpec jenaDbOntModelSpec, - ServletContext ctx) { - return makeDBModel( - ds, jenaDbModelname, jenaDbOntModelSpec, TripleStoreType.RDB, ctx); - } - - protected Model makeDBModel(DataSource ds, - String jenaDbModelName, - OntModelSpec jenaDbOntModelSpec, - TripleStoreType storeType, ServletContext ctx) { - return makeDBModel (ds, jenaDbModelName, jenaDbOntModelSpec, storeType, - getDbType(ctx), ctx); - } - - public static Model makeDBModel(DataSource ds, - String jenaDbModelName, - OntModelSpec jenaDbOntModelSpec, - TripleStoreType storeType, String dbType, - ServletContext ctx) { - Model dbModel = null; - try { - // open the db model - try { - Graph g = null; - switch (storeType) { - case RDB: - g = new RegeneratingGraph( - new RDBGraphGenerator( - ds, dbType, jenaDbModelName)); - break; - case SDB: - String layoutStr = ConfigurationProperties.getBean(ctx) - .getProperty( - "VitroConnection.DataSource.sdb.layout", - "layout2/hash"); - String dbtypeStr = ConfigurationProperties.getBean(ctx) - .getProperty("VitroConnection.DataSource.dbtype", - "MySQL"); - StoreDesc desc = new StoreDesc( - LayoutType.fetch(layoutStr), - DatabaseType.fetch(dbtypeStr) ); - g = new RegeneratingGraph( - new SDBGraphGenerator( - ds, desc, jenaDbModelName)); - break; - default: throw new RuntimeException ( - "Unsupported store type " + storeType); - } - dbModel = ModelFactory.createModelForGraph(g); - //log.debug("Using database at " + ds.getUrl()); - } catch (Throwable t) { - t.printStackTrace(); - } - } catch (Throwable t) { - t.printStackTrace(); - } - return dbModel; - } - protected String getDefaultNamespace(ServletContext ctx) { String dns = ConfigurationProperties.getBean(ctx).getProperty( VITRO_DEFAULT_NAMESPACE); @@ -388,7 +277,7 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon { } private static String getDbType(ServletContext ctx) { - return ConfigurationProperties.getBean(ctx).getProperty( // database type + return ConfigurationProperties.getBean(ctx).getProperty( "VitroConnection.DataSource.dbtype", "MySQL"); } @@ -412,50 +301,6 @@ public class JenaDataSourceSetupBase extends JenaBaseDaoCon { return (o instanceof Dataset) ? ((Dataset) o) : null; } - protected OntModel ontModelFromContextAttribute(ServletContext ctx, - String attribute) { - OntModel ontModel; - Object attributeValue = ctx.getAttribute(attribute); - if (attributeValue != null && attributeValue instanceof OntModel) { - ontModel = (OntModel) attributeValue; - } else { - ontModel = ModelFactory.createOntologyModel(MEM_ONT_MODEL_SPEC); - ctx.setAttribute(attribute, ontModel); - } - return ontModel; - } - - protected static void repairAppMetadataModel(Model applicationMetadataModel, - Model aboxAssertions, - Model aboxInferences) { - - log.info("Moving application metadata from ABox to dedicated model"); - getAppMetadata(aboxAssertions, applicationMetadataModel); - getAppMetadata(aboxInferences, applicationMetadataModel); - aboxAssertions.remove(applicationMetadataModel); - aboxInferences.remove(applicationMetadataModel); - - return; - } - - protected static void getAppMetadata(Model source, Model target) { - - String amdQuery = "DESCRIBE ?x WHERE { " + - "{?x a <" + VitroVocabulary.PORTAL +"> } UNION " + - "{?x a <" + VitroVocabulary.PROPERTYGROUP +"> } UNION " + - "{?x a <" + VitroVocabulary.CLASSGROUP +"> } } "; - - try { - Query q = QueryFactory.create(amdQuery, Syntax.syntaxARQ); - QueryExecution qe = QueryExecutionFactory.create(q, source); - qe.execDescribe(target); - } catch (Exception e) { - log.error("unable to create the application metadata model",e); - } - - return; - } - private static final String STOREDESC_ATTR = "storeDesc"; private static final String STORE_ATTR = "kbStore"; diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ModelMakerSetup.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ModelMakerSetup.java index 2960cdab8..ead94b6a8 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ModelMakerSetup.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/ModelMakerSetup.java @@ -2,8 +2,29 @@ package edu.cornell.mannlib.vitro.webapp.servlet.setup; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.APPLICATION_METADATA; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.BASE_FULL; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.BASE_TBOX; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.DISPLAY; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.DISPLAY_DISPLAY; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.DISPLAY_TBOX; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.INFERRED_FULL; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.INFERRED_TBOX; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.UNION_FULL; +import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID.USER_ACCOUNTS; import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelMakerID.CONFIGURATION; import static edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelMakerID.CONTENT; +import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_APPLICATION_METADATA_MODEL; +import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_DISPLAY_DISPLAY_MODEL; +import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_DISPLAY_METADATA_MODEL; +import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_DISPLAY_TBOX_MODEL; +import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_TBOX_ASSERTIONS_MODEL; +import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_TBOX_INF_MODEL; +import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_USER_ACCOUNTS_MODEL; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; @@ -11,13 +32,14 @@ import javax.servlet.ServletContextEvent; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties; +import com.hp.hpl.jena.rdf.model.Model; + import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceModelMaker; import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroInterceptingModelMaker; -import edu.cornell.mannlib.vitro.webapp.dao.jena.VitroJenaModelMaker; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; +import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils.WhichService; import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus; /** @@ -35,31 +57,62 @@ public class ModelMakerSetup extends JenaDataSourceSetupBase implements createConfigurationModelMaker(ctx); createContentModelMaker(ctx); - ss.info(this, "Created model makers and model source"); + ss.info(this, "Created model makers."); } private void createConfigurationModelMaker(ServletContext ctx) { - String jdbcUrl = getJdbcUrl(ctx); - String dbtypeStr = ConfigurationProperties.getBean(ctx).getProperty( - "VitroConnection.DataSource.dbtype", "MySQL"); - String username = ConfigurationProperties.getBean(ctx).getProperty( - "VitroConnection.DataSource.username"); - String password = ConfigurationProperties.getBean(ctx).getProperty( - "VitroConnection.DataSource.password"); - VitroJenaModelMaker vjmm = new VitroJenaModelMaker(jdbcUrl, username, - password, dbtypeStr, ctx); - VitroInterceptingModelMaker vimm = new VitroInterceptingModelMaker( - vjmm, ctx); - ModelAccess.on(ctx).setModelMaker(CONFIGURATION, vimm); + RDFServiceFactory rdfServiceFactory = RDFServiceUtils + .getRDFServiceFactory(ctx, WhichService.CONFIGURATION); + RDFServiceModelMaker configMM = new RDFServiceModelMaker( + rdfServiceFactory); + Map specials = populateConfigurationSpecialMap(ctx); + VitroInterceptingModelMaker viMM = new VitroInterceptingModelMaker( + configMM, specials); + ModelAccess.on(ctx).setModelMaker(CONFIGURATION, viMM); } private void createContentModelMaker(ServletContext ctx) { RDFServiceFactory rdfServiceFactory = RDFServiceUtils .getRDFServiceFactory(ctx); - RDFServiceModelMaker vsmm = new RDFServiceModelMaker(rdfServiceFactory); - VitroInterceptingModelMaker vimm = new VitroInterceptingModelMaker( - vsmm, ctx); - ModelAccess.on(ctx).setModelMaker(CONTENT, vimm); + RDFServiceModelMaker contentMM = new RDFServiceModelMaker( + rdfServiceFactory); + Map specials = populateContentSpecialMap(ctx); + VitroInterceptingModelMaker viMM = new VitroInterceptingModelMaker( + contentMM, specials); + ModelAccess.on(ctx).setModelMaker(CONTENT, viMM); + } + + private Map populateConfigurationSpecialMap( + ServletContext ctx) { + Map map = new HashMap<>(); + map.put(JENA_DISPLAY_METADATA_MODEL, + ModelAccess.on(ctx).getOntModel(DISPLAY)); + map.put(JENA_DISPLAY_TBOX_MODEL, + ModelAccess.on(ctx).getOntModel(DISPLAY_TBOX)); + map.put(JENA_DISPLAY_DISPLAY_MODEL, + ModelAccess.on(ctx).getOntModel(DISPLAY_DISPLAY)); + map.put(JENA_USER_ACCOUNTS_MODEL, + ModelAccess.on(ctx).getOntModel(USER_ACCOUNTS)); + return map; + } + + private Map populateContentSpecialMap(ServletContext ctx) { + Map map = new HashMap<>(); + + map.put("vitro:jenaOntModel", + ModelAccess.on(ctx).getOntModel(UNION_FULL)); + map.put("vitro:baseOntModel", ModelAccess.on(ctx) + .getOntModel(BASE_FULL)); + map.put("vitro:inferenceOntModel", + ModelAccess.on(ctx).getOntModel(INFERRED_FULL)); + map.put(JENA_TBOX_ASSERTIONS_MODEL, + ModelAccess.on(ctx).getOntModel(BASE_TBOX)); + map.put(JENA_TBOX_INF_MODEL, + ModelAccess.on(ctx).getOntModel(INFERRED_TBOX)); + map.put(JENA_APPLICATION_METADATA_MODEL, ModelAccess.on(ctx) + .getOntModel(APPLICATION_METADATA)); + + return map; } @Override diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/RDFServiceSetup.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/RDFServiceSetup.java index 469b56f13..380b94df6 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/RDFServiceSetup.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/RDFServiceSetup.java @@ -1,6 +1,10 @@ /* $This file is distributed under the terms of the license in /doc/license.txt$ */ package edu.cornell.mannlib.vitro.webapp.servlet.setup; +import static edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils.WhichService.CONFIGURATION; + +import java.io.File; +import java.io.IOException; import java.sql.SQLException; import javax.servlet.ServletContext; @@ -25,6 +29,7 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceFactorySingle; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.sdb.RDFServiceFactorySDB; +import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.tdb.RDFServiceFactoryTDB; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql.RDFServiceSparql; import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus; @@ -56,12 +61,22 @@ implements javax.servlet.ServletContextListener { //RDFServiceFactory factory = RDFServiceUtils.getRDFServiceFactory(ctx); //RDFServiceUtils.setRDFServiceFactory(ctx, new SameAsFilteringRDFServiceFactory(factory)); - } catch (SQLException e) { + useTDBForConfigurationModels(ctx); + + } catch (Exception e) { ss.fatal(this, "Exception in RDFServiceSetup", e); } } - private void useEndpoint(String endpointURI, String updateEndpointURI, ServletContext ctx) { + private void useTDBForConfigurationModels(ServletContext ctx) throws IOException { + String vitroHome = ConfigurationProperties.getBean(ctx).getProperty( + "vitro.home") ; + String directoryPath = vitroHome + File.separatorChar + "tdbModels"; + RDFServiceFactory factory = new RDFServiceFactoryTDB(directoryPath); + RDFServiceUtils.setRDFServiceFactory(ctx, factory, CONFIGURATION); + } + + private void useEndpoint(String endpointURI, String updateEndpointURI, ServletContext ctx) { RDFService rdfService = null; if (updateEndpointURI == null) { diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/UpdateKnowledgeBase.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/UpdateKnowledgeBase.java index 8c8f53533..5d1e01160 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/UpdateKnowledgeBase.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/UpdateKnowledgeBase.java @@ -155,7 +155,7 @@ public class UpdateKnowledgeBase implements ServletContextListener { } // reload the display model since the TBoxUpdater may have // modified it - new ApplicationModelSetup().contextInitialized(sce); + new ConfigurationModelsSetup().contextInitialized(sce); } catch (Exception ioe) { ss.fatal(this, "Exception updating knowledge base for ontology changes: ", ioe); } diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/UserModelSetup.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/UserModelSetup.java deleted file mode 100644 index efbae4a80..000000000 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/servlet/setup/UserModelSetup.java +++ /dev/null @@ -1,76 +0,0 @@ -/* $This file is distributed under the terms of the license in /doc/license.txt$ */ -package edu.cornell.mannlib.vitro.webapp.servlet.setup; - -import javax.servlet.ServletContext; -import javax.servlet.ServletContextEvent; -import javax.servlet.ServletContextListener; -import javax.sql.DataSource; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import com.hp.hpl.jena.ontology.OntModel; -import com.hp.hpl.jena.rdf.model.Model; -import com.hp.hpl.jena.rdf.model.ModelFactory; - -import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess; -import edu.cornell.mannlib.vitro.webapp.dao.jena.ModelSynchronizer; -import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus; - -/** - * Setup the user account model. If it does not exist in the database, create - * and populate it. - */ -public class UserModelSetup extends JenaDataSourceSetupBase implements - ServletContextListener { - private static final Log log = LogFactory.getLog(UserModelSetup.class - .getName()); - - @Override - public void contextInitialized(ServletContextEvent sce) { - ServletContext ctx = sce.getServletContext(); - StartupStatus ss = StartupStatus.getBean(ctx); - - DataSource bds = getApplicationDataSource(ctx); - if (bds == null) { - ss.fatal( - this, - "A DataSource must be setup before ModelSetup " - + "is run. Make sure that JenaPersistentDataSourceSetup runs before " - + "ModelSetup."); - return; - } - - setupUserAccountModel(bds, ctx, ss); - } - - @Override - public void contextDestroyed(ServletContextEvent arg0) { - // Does nothing. - } - - private void setupUserAccountModel(DataSource bds, ServletContext ctx, - StartupStatus ss) { - try { - Model userAccountsDbModel = makeDBModel(bds, - JENA_USER_ACCOUNTS_MODEL, DB_ONT_MODEL_SPEC, ctx); - OntModel userAccountsModel = ModelFactory - .createOntologyModel(MEM_ONT_MODEL_SPEC); - - userAccountsModel.add(userAccountsDbModel); - userAccountsModel.getBaseModel().register( - new ModelSynchronizer(userAccountsDbModel)); - - // This is used in Selenium testing, to load accounts from a file. - RDFFilesLoader.loadFirstTimeFiles(ctx, "auth", userAccountsModel, - userAccountsDbModel.isEmpty()); - // This gets the permissions configuration. - RDFFilesLoader.loadEveryTimeFiles(ctx, "auth", userAccountsModel); - - ModelAccess.on(ctx).setUserAccountsModel(userAccountsModel); - } catch (Throwable t) { - log.error("Unable to load user accounts model from DB", t); - ss.fatal(this, "Unable to load user accounts model from DB", t); - } - } -} diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/web/templatemodels/individual/BaseIndividualTemplateModel.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/web/templatemodels/individual/BaseIndividualTemplateModel.java index 0d934e559..a233014fc 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/web/templatemodels/individual/BaseIndividualTemplateModel.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/web/templatemodels/individual/BaseIndividualTemplateModel.java @@ -58,7 +58,7 @@ public abstract class BaseIndividualTemplateModel extends BaseTemplateModel { protected boolean isVClass(String vClassUri) { boolean isVClass = individual.isVClass(vClassUri); - // If reasoning is asynchronous (under RDB), this inference may not have been made yet. + // If reasoning is asynchronous, this inference may not have been made yet. // Check the superclasses of the individual's vclass. SimpleReasoner simpleReasoner = (SimpleReasoner) ctx.getAttribute(SimpleReasoner.class.getName()); if (!isVClass && simpleReasoner != null && simpleReasoner.isABoxReasoningAsynchronous()) { diff --git a/webapp/test/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiExecutorTest.java b/webapp/test/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiExecutorTest.java index c144db43e..f288b7d46 100644 --- a/webapp/test/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiExecutorTest.java +++ b/webapp/test/edu/cornell/mannlib/vitro/webapp/controller/api/sparqlquery/SparqlQueryApiExecutorTest.java @@ -121,8 +121,8 @@ public class SparqlQueryApiExecutorTest extends AbstractTestClass { + " .\n"; private static final String CONSTRUCT_RESULT_TURTLE = "" // + "@prefix rdfs: .\n" // - + "@prefix owl: .\n" // + "@prefix xsd: .\n" // + + "@prefix owl: .\n" // + "@prefix rdf: .\n" // + "\n" // + "\n" // @@ -173,8 +173,8 @@ public class SparqlQueryApiExecutorTest extends AbstractTestClass { + " .\n"; private static final String DESCRIBE_RESULT_TURTLE = "" // + "@prefix rdfs: .\n" // - + "@prefix owl: .\n" // + "@prefix xsd: .\n" // + + "@prefix owl: .\n" // + "@prefix rdf: .\n" // + "\n" // + "\n" // @@ -424,7 +424,7 @@ public class SparqlQueryApiExecutorTest extends AbstractTestClass { rdfService, queryString, acceptHeader); executor.executeAndFormat(out); - assertEquals(message, expected, out.toString().replace("\r", "")); + assertEquals(message, expected.replaceAll("\\s+", " "), out.toString().replaceAll("\\s+", " ")); } private void executeWithInvalidAcceptHeader(String message, diff --git a/webapp/web/WEB-INF/resources/startup_listeners.txt b/webapp/web/WEB-INF/resources/startup_listeners.txt index 78e2e230d..8c18c5407 100644 --- a/webapp/web/WEB-INF/resources/startup_listeners.txt +++ b/webapp/web/WEB-INF/resources/startup_listeners.txt @@ -24,8 +24,7 @@ edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaPersistentDataSourceSetup edu.cornell.mannlib.vitro.webapp.servlet.setup.RDFServiceSetup -edu.cornell.mannlib.vitro.webapp.servlet.setup.ApplicationModelSetup -edu.cornell.mannlib.vitro.webapp.servlet.setup.UserModelSetup +edu.cornell.mannlib.vitro.webapp.servlet.setup.ConfigurationModelsSetup edu.cornell.mannlib.vitro.webapp.servlet.setup.ContentModelSetup edu.cornell.mannlib.vitro.webapp.servlet.setup.ModelMakerSetup diff --git a/webapp/web/jenaIngest/listModels.jsp b/webapp/web/jenaIngest/listModels.jsp index 3297b4184..a4b9d7de7 100644 --- a/webapp/web/jenaIngest/listModels.jsp +++ b/webapp/web/jenaIngest/listModels.jsp @@ -12,12 +12,12 @@ @@ -27,26 +27,23 @@ function init(){
-
- + +
-
- - + + +
-
+ -
-
-
Currently showing ${infoLine}