eliminating Sesame dependencies

This commit is contained in:
brianjlowe 2014-05-09 15:47:04 -04:00
parent a29b61d277
commit 710844a2fb
54 changed files with 84 additions and 608 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -112,6 +112,14 @@ public class BaseResourceBean implements ResourceBean {
}
}
public BaseResourceBean() {
// default constructor
}
public BaseResourceBean(String uri) {
buildLocalAndNS(uri);
}
@Override
public boolean isAnonymous() {
return (this.URI==null || VitroVocabulary.PSEUDO_BNODE_NS.equals(this.getNamespace()));

View file

@ -14,7 +14,6 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.joda.time.DateTime;
import org.openrdf.model.impl.URIImpl;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
@ -184,7 +183,7 @@ public class ObjectProperty extends Property implements Comparable<ObjectPropert
this.localNameInverse = null;
} else {
this.URIInverse = URIInverse;
URIImpl uriInverse = new URIImpl(URIInverse);
BaseResourceBean uriInverse = new BaseResourceBean(URIInverse);
this.namespaceInverse = uriInverse.getNamespace();
this.localNameInverse = uriInverse.getLocalName();
}

View file

@ -6,8 +6,6 @@ import java.text.Collator;
import java.util.ArrayList;
import java.util.List;
import org.openrdf.model.impl.URIImpl;
/**
* A Java class representing an ontology ("Vitro") class
*
@ -124,14 +122,8 @@ public class VClass extends BaseResourceBean implements Comparable<VClass>
*/
public VClass( String uriString )
{
// The URIImpl class can be used to parse a URI string into its component parts
URIImpl uri = new URIImpl(uriString);
// Use the URIImpl to obtain parts of this URI for local storage
myName = uri.getLocalName();
URI = uriString;
namespace = uri.getNamespace();
localName = uri.getLocalName();
super(uriString);
myName = getLocalName();
}
/**

View file

@ -12,7 +12,6 @@ import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.model.impl.URIImpl;
public class VClassGroup extends LinkedList <VClass> implements Comparable<VClassGroup> {
@ -56,7 +55,7 @@ public class VClassGroup extends LinkedList <VClass> implements Comparable<VClas
public VClassGroup(String uri, String name, int rank) {
super();
this.URI = uri;
URIImpl theURI = new URIImpl(uri);
BaseResourceBean theURI = new BaseResourceBean(uri);
this.namespace = theURI.getNamespace();
this.localName = theURI.getLocalName();
this.displayRank = rank;

View file

@ -1,64 +0,0 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Iterator;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
public class SparqlDataset implements Dataset {
private SparqlDatasetGraph g;
public SparqlDataset(SparqlDatasetGraph g) {
this.g = g;
}
@Override
public DatasetGraph asDatasetGraph() {
return g;
}
@Override
public void close() {
g.close();
}
@Override
public boolean containsNamedModel(String arg0) {
return g.containsGraph(Node.createURI(arg0));
}
@Override
public Model getDefaultModel() {
return ModelFactory.createModelForGraph(g.getDefaultGraph());
}
@Override
public Lock getLock() {
return g.getLock();
}
@Override
public Model getNamedModel(String arg0) {
return ModelFactory.createModelForGraph(g.getGraph(Node.createURI(arg0)));
}
@Override
public Iterator<String> listNames() {
ArrayList<String> nameList = new ArrayList<String>();
Iterator<Node> nodeIt = g.listGraphNodes();
while (nodeIt.hasNext()) {
Node n = nodeIt.next();
nameList.add(n.getURI());
}
return nameList.iterator();
}
}

View file

@ -1,266 +0,0 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.openrdf.model.Resource;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.shared.LockMRSW;
import com.hp.hpl.jena.sparql.core.DatasetGraph;
import com.hp.hpl.jena.sparql.core.Quad;
import com.hp.hpl.jena.sparql.resultset.ResultSetMem;
import com.hp.hpl.jena.sparql.util.Context;
import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
public class SparqlDatasetGraph implements DatasetGraph {
private String endpointURI;
private Repository repository;
private Lock lock = new LockMRSW();
public SparqlDatasetGraph(String endpointURI) {
this.endpointURI = endpointURI;
this.repository = new HTTPRepository(endpointURI);
}
private Graph getGraphFor(Quad q) {
return getGraphFor(q.getGraph());
}
private Graph getGraphFor(Node g) {
return (g == Node.ANY)
? new SparqlGraph(endpointURI)
: new SparqlGraph(endpointURI, g.getURI());
}
@Override
public void add(Quad arg0) {
getGraphFor(arg0).add(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void addGraph(Node arg0, Graph arg1) {
// TODO Auto-generated method stub
}
@Override
public void close() {
// TODO Auto-generated method stub
}
@Override
public boolean contains(Quad arg0) {
return getGraphFor(arg0).contains(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public boolean contains(Node arg0, Node arg1, Node arg2, Node arg3) {
return getGraphFor(arg0).contains(arg1, arg2, arg3);
}
@Override
public boolean containsGraph(Node arg0) {
// TODO Auto-generated method stub
return true;
}
@Override
public void delete(Quad arg0) {
getGraphFor(arg0).delete(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
}
@Override
public void deleteAny(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
getGraphFor(arg0).delete(new Triple(arg1, arg2, arg3));
}
@Override
public Iterator<Quad> find() {
return find(Node.ANY, Node.ANY, Node.ANY, Node.ANY);
}
@Override
public Iterator<Quad> find(Quad arg0) {
return find(arg0.getSubject(), arg0.getPredicate(), arg0.getObject(), arg0.getGraph());
}
@Override
public Iterator<Quad> find(Node graph, Node subject, Node predicate, Node object) {
if (!isVar(subject) && !isVar(predicate) && !isVar(object) &&!isVar(graph)) {
if (contains(subject, predicate, object, graph)) {
return new SingletonIterator(new Triple(subject, predicate, object));
} else {
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
}
}
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
String graphURI = !isVar(graph) ? graph.getURI() : null;
findQuery.append(" GRAPH ");
if (graphURI != null) {
findQuery.append(" <" + graphURI + ">");
} else {
findQuery.append("?g");
}
findQuery.append(" { ");
findQuery.append(SparqlGraph.sparqlNode(subject, "?s"))
.append(" ")
.append(SparqlGraph.sparqlNode(predicate, "?p"))
.append(" ")
.append(SparqlGraph.sparqlNode(object, "?o"));
findQuery.append(" } ");
findQuery.append("\n}");
//log.info(findQuery.toString());
ResultSet rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString());
//rs = execSelect(findQuery.toString());
List<Quad> quadlist = new ArrayList<Quad>();
while (rs.hasNext()) {
QuerySolution soln = rs.nextSolution();
Quad q = new Quad(isVar(graph) ? soln.get("?g").asNode() : graph,
isVar(subject) ? soln.get("?s").asNode() : subject,
isVar(predicate) ? soln.get("?p").asNode() : predicate,
isVar(object) ? soln.get("?o").asNode() : object);
//log.info(t);
quadlist.add(q);
}
//log.info(triplist.size() + " results");
return WrappedIterator.create(quadlist.iterator()); }
@Override
public Iterator<Quad> findNG(Node arg0, Node arg1, Node arg2, Node arg3) {
// TODO check this
return find(arg0, arg1, arg2, arg3);
}
@Override
public Context getContext() {
// TODO Auto-generated method stub
return null;
}
@Override
public Graph getDefaultGraph() {
return new SparqlGraph(endpointURI);
}
@Override
public Graph getGraph(Node arg0) {
return new SparqlGraph(endpointURI, arg0.getURI());
}
@Override
public Lock getLock() {
return lock;
}
@Override
public boolean isEmpty() {
// TODO Auto-generated method stub
return false;
}
@Override
public Iterator<Node> listGraphNodes() {
List<Node> graphNodeList = new ArrayList<Node>();
try {
RepositoryConnection conn = getConnection();
try {
RepositoryResult<Resource> conResult = conn.getContextIDs();
while (conResult.hasNext()) {
Resource con = conResult.next();
graphNodeList.add(Node.createURI(con.stringValue()));
}
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
return graphNodeList.iterator();
}
private RepositoryConnection getConnection() {
try {
return this.repository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
@Override
public void removeGraph(Node arg0) {
// TODO Auto-generated method stub
}
@Override
public void setDefaultGraph(Graph arg0) {
// TODO Auto-generated method stub
}
@Override
public long size() {
// TODO Auto-generated method stub
return 0;
}
private boolean isVar(Node node) {
return (node == null || node.isVariable() || node == Node.ANY);
}
private ResultSet execSelect(String queryStr) {
// long startTime1 = System.currentTimeMillis();
// try {
//
// RepositoryConnection conn = getConnection();
// try {
// GraphQuery q = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
// q.evaluate();
// } catch (MalformedQueryException e) {
// throw new RuntimeException(e);
// } finally {
// conn.close();
// }
// } catch (Exception re) {
// //log.info(re,re);
// }
// log.info((System.currentTimeMillis() - startTime1) + " to execute via sesame");
long startTime = System.currentTimeMillis();
Query askQuery = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery);
try {
return new ResultSetMem(qe.execSelect());
} finally {
//log.info((System.currentTimeMillis() - startTime) + " to execute via Jena");
qe.close();
}
}
}

View file

@ -3,19 +3,12 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.Update;
import org.openrdf.query.UpdateExecutionException;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Capabilities;
@ -46,10 +39,23 @@ import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.util.iterator.SingletonIterator;
import com.hp.hpl.jena.util.iterator.WrappedIterator;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
public class SparqlGraph implements GraphWithPerform {
private String endpointURI;
private String graphURI;
private CloseableHttpClient httpClient;
private static final Log log = LogFactory.getLog(SparqlGraph.class);
private BulkUpdateHandler bulkUpdateHandler;
@ -60,8 +66,6 @@ public class SparqlGraph implements GraphWithPerform {
private TransactionHandler transactionHandler;
private QueryHandler queryHandler;
private Repository repository;
/**
* Returns a SparqlGraph for the union of named graphs in a remote repository
* @param endpointURI
@ -78,7 +82,10 @@ public class SparqlGraph implements GraphWithPerform {
public SparqlGraph(String endpointURI, String graphURI) {
this.endpointURI = endpointURI;
this.graphURI = graphURI;
this.repository = new HTTPRepository(endpointURI);
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
cm.setDefaultMaxPerRoute(50);
this.httpClient = HttpClients.custom().setConnectionManager(cm).build();
}
public String getEndpointURI() {
@ -88,14 +95,6 @@ public class SparqlGraph implements GraphWithPerform {
public String getGraphURI() {
return graphURI;
}
public RepositoryConnection getConnection() {
try {
return this.repository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
@Override
public void add(Triple arg0) throws AddDeniedException {
@ -104,22 +103,24 @@ public class SparqlGraph implements GraphWithPerform {
public void executeUpdate(String updateString) {
try {
RepositoryConnection conn = getConnection();
HttpPost meth = new HttpPost(endpointURI);
meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(
new BasicNameValuePair("update", updateString))));
CloseableHttpResponse response = httpClient.execute(meth);
try {
Update u = conn.prepareUpdate(QueryLanguage.SPARQL, updateString);
u.execute();
} catch (MalformedQueryException e) {
throw new RuntimeException(e);
} catch (UpdateExecutionException e) {
log.error(e,e);
log.error("Update command: \n" + updateString);
throw new RuntimeException(e);
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode > 399) {
log.error("response " + statusCode + " to update. \n");
throw new RuntimeException("Unable to perform SPARQL UPDATE: \n"
+ updateString);
}
} finally {
conn.close();
}
} catch (RepositoryException re) {
throw new RuntimeException(re);
}
response.close();
}
} catch (Exception e) {
throw new RuntimeException("Unable to perform SPARQL UPDATE", e);
}
}
@Override

View file

@ -14,9 +14,8 @@ import javax.servlet.http.HttpServletResponseWrapper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.model.URI;
import org.openrdf.model.impl.URIImpl;
import edu.cornell.mannlib.vitro.webapp.beans.IndividualImpl;
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.utils.NamespaceMapper;
@ -136,7 +135,7 @@ public class URLRewritingHttpServletResponse extends HttpServletResponseWrapper/
qpIndex++;
if ( ("uri".equals(keyAndValue[0])) && (keyAndValue.length>1) && (keyAndValue[1] != null) ) {
try {
URI uri = new URIImpl(keyAndValue[1]);
IndividualImpl uri = new IndividualImpl(keyAndValue[1]);
String namespace = uri.getNamespace();
String localName = uri.getLocalName();
if ( (namespace != null) && (localName != null) ) {

View file

@ -19,7 +19,6 @@ import javax.servlet.ServletContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.model.impl.URIImpl;
import com.hp.hpl.jena.iri.IRI;
import com.hp.hpl.jena.iri.IRIFactory;

View file

@ -26,11 +26,6 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import org.openrdf.model.Resource;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.http.HTTPRepository;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.query.Query;
@ -44,6 +39,7 @@ import com.hp.hpl.jena.query.ResultSetFormatter;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
@ -67,10 +63,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
protected String readEndpointURI;
protected String updateEndpointURI;
private HTTPRepository readRepository;
private HTTPRepository updateRepository;
private CloseableHttpClient httpClient;
private boolean useSesameContextQuery = true;
// the number of triples to be
private static final int CHUNK_SIZE = 1000; // added/removed in a single
// SPARQL UPDATE
@ -89,8 +82,6 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
public RDFServiceSparql(String readEndpointURI, String updateEndpointURI, String defaultWriteGraphURI) {
this.readEndpointURI = readEndpointURI;
this.updateEndpointURI = updateEndpointURI;
this.readRepository = new HTTPRepository(readEndpointURI);
this.updateRepository = new HTTPRepository(updateEndpointURI);
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
cm.setDefaultMaxPerRoute(50);
@ -135,12 +126,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
}
public void close() {
try {
this.readRepository.shutDown();
this.updateRepository.shutDown();
} catch (RepositoryException re) {
log.error(re, re);
}
// nothing for now
}
/**
@ -234,6 +220,8 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
try {
qe.execConstruct(model);
} catch (Exception e) {
log.error("Error executing CONSTRUCT against remote endpoint: " + queryStr);
} finally {
qe.close();
}
@ -359,47 +347,40 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
*
* @return List<String> - list of all the graph URIs in the RDF store
*/
//TODO - need to verify that the sesame getContextIDs method is implemented
// in such a way that it works with all triple stores that support the
// graph update API
@Override
public List<String> getGraphURIs() throws RDFServiceException {
if (!this.useSesameContextQuery) {
return getGraphURIsFromSparqlQuery();
} else {
try {
return getGraphURIsFromSesameContexts();
} catch (RepositoryException re) {
this.useSesameContextQuery = false;
return getGraphURIsFromSparqlQuery();
}
}
return getGraphURIsFromSparqlQuery();
}
private List<String> getGraphURIsFromSesameContexts() throws RepositoryException {
List<String> graphURIs = new ArrayList<String>();
RepositoryConnection conn = getReadConnection();
try {
RepositoryResult<Resource> conResult = conn.getContextIDs();
while (conResult.hasNext()) {
Resource res = conResult.next();
graphURIs.add(res.stringValue());
}
} finally {
conn.close();
}
return graphURIs;
}
private List<String> getGraphURIsFromSparqlQuery() throws RDFServiceException {
private List<String> getGraphURIsFromSparqlQuery() throws RDFServiceException {
String fastJenaQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g {} } ORDER BY ?g";
String standardQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } ORDER BY ?g";
List<String> graphURIs = new ArrayList<String>();
try {
String graphURIString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } ORDER BY ?g";
graphURIs = getGraphURIsFromSparqlQuery(fastJenaQuery);
} catch (Exception e) {
log.debug("Unable to use non-standard ARQ query for graph list", e);
}
if (graphURIs.isEmpty()) {
graphURIs = getGraphURIsFromSparqlQuery(standardQuery);
}
return graphURIs;
}
private List<String> getGraphURIsFromSparqlQuery(String queryString) throws RDFServiceException {
List<String> graphURIs = new ArrayList<String>();
try {
ResultSet rs = ResultSetFactory.fromJSON(
sparqlSelectQuery(graphURIString, RDFService.ResultFormat.JSON));
sparqlSelectQuery(queryString, RDFService.ResultFormat.JSON));
while (rs.hasNext()) {
QuerySolution qs = rs.nextSolution();
graphURIs.add(qs.getResource("g").getURI());
if (qs != null) { // no idea how this happens, but it seems to
RDFNode n = qs.getResource("g");
if (n != null && n.isResource()) {
graphURIs.add(((Resource) n).getURI());
}
}
}
} catch (Exception e) {
throw new RDFServiceException("Unable to list graph URIs", e);
@ -470,22 +451,6 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
return updateEndpointURI;
}
protected RepositoryConnection getReadConnection() {
try {
return this.readRepository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
protected RepositoryConnection getWriteConnection() {
try {
return this.updateRepository.getConnection();
} catch (RepositoryException e) {
throw new RuntimeException(e);
}
}
protected void executeUpdate(String updateString) throws RDFServiceException {
try {
HttpPost meth = new HttpPost(updateEndpointURI);

View file

@ -15,9 +15,9 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.OWL;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
import com.hp.hpl.jena.vocabulary.XSD;
@ -99,8 +99,8 @@ public class GetAllPrefix extends BaseEditController {
}
// add standard namespaces
addPrefixIfNecessary("owl", OWL.NAMESPACE, prefixMap);
addPrefixIfNecessary("rdf", RDF.NAMESPACE, prefixMap);
addPrefixIfNecessary("owl", OWL.getURI(), prefixMap);
addPrefixIfNecessary("rdf", RDF.getURI(), prefixMap);
addPrefixIfNecessary("rdfs", RDFS.getURI(), prefixMap);
addPrefixIfNecessary("swrl", "http://www.w3.org/2003/11/swrl#", prefixMap);
addPrefixIfNecessary("swrlb", "http://www.w3.org/2003/11/swrlb#", prefixMap);

View file

@ -1,156 +0,0 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.utils.jena;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.model.Resource;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.http.HTTPRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.vocabulary.OWL;
public class SesameSyncUtils {
private static final Log log = LogFactory.getLog(SesameSyncUtils.class);
public void writeModelToSesameContext
(Model jenaModel, String serverURI, String repositoryId, String contextId)
throws RepositoryException, IOException, RDFParseException {
Repository myRepository = new HTTPRepository(serverURI, repositoryId);
myRepository.initialize();
RepositoryConnection myConn = myRepository.getConnection();
myConn.setAutoCommit(false);
try {
Resource contextRes = (contextId != null)
? new URIImpl(contextId) : null ;
if (contextRes != null) {
myConn.clear(contextRes);
} else {
myConn.clear();
}
PipedInputStream in = new PipedInputStream();
PipedOutputStream out = new PipedOutputStream(in);
try {
new Thread(new JenaOutputter(jenaModel, out, myConn), "SesameSyncUtilities.JenaOutputter").start();
if (contextRes != null) {
myConn.add(in,"http://example.org/base/", RDFFormat.NTRIPLES, contextRes);
} else {
myConn.add(in,"http://example.org/base/", RDFFormat.NTRIPLES);
}
} finally {
in.close();
}
myConn.commit();
} catch (Throwable e) {
myConn.rollback();
e.printStackTrace();
log.error("Error writing to Sesame repository", e);
throw new RuntimeException("Error writing to Sesame repository", e);
} finally {
myConn.close();
}
}
private List<String> getIndividualURIs(Model model) {
List<String> individualURIs = new ArrayList<String>();
String queryStr = "SELECT DISTINCT ?s WHERE { \n" +
" ?s a <" + OWL.Thing.getURI() + "> \n" +
"}";
Query query = QueryFactory.create(queryStr);
QueryExecution qe = QueryExecutionFactory.create(query, model);
try {
ResultSet rs = qe.execSelect();
while (rs.hasNext()) {
QuerySolution qsoln = rs.nextSolution();
String individualURI = qsoln.getResource("s").getURI();
if (individualURI != null) {
individualURIs.add(individualURI);
}
}
} finally {
qe.close();
}
return individualURIs;
}
private class JenaOutputter implements Runnable {
private Model model;
private OutputStream out;
private RepositoryConnection rconn;
public JenaOutputter(Model model, OutputStream out, RepositoryConnection rconn) {
this.model = model;
this.out = out;
this.rconn = rconn;
}
public void run() {
Model t = ModelFactory.createDefaultModel();
try {
List<String> individualURIs = getIndividualURIs(model);
log.info(individualURIs.size() + " individuals to send to Sesame");
int i = 0;
for (String individualURI : individualURIs) {
t.removeAll();
t.add(model.listStatements(
model.getResource(
individualURI), null, (RDFNode) null));
t.write(out, "N-TRIPLE");
i++;
if (i % 100 == 0) {
try {
rconn.commit();
} catch (Throwable e) {
log.error(e, e);
}
log.info(i + " individuals sent to Sesame");
}
}
} finally {
try {
out.flush();
out.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
}
}

View file

@ -4,8 +4,6 @@ package edu.cornell.mannlib.vitro.webapp.web.templatemodels.individual;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openrdf.model.URI;
import org.openrdf.model.impl.URIImpl;
import com.hp.hpl.jena.rdf.model.Literal;
@ -14,6 +12,8 @@ import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.RequestedAction;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.EditDataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.IndividualImpl;
import edu.cornell.mannlib.vitro.webapp.beans.Property;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
@ -51,7 +51,7 @@ public class NameStatementTemplateModel extends PropertyStatementTemplateModel {
// If the individual has no rdfs:label, use the local name. It will not be editable. (This replicates previous behavior;
// perhaps we would want to allow a label to be added. But such individuals do not usually have their profiles viewed or
// edited directly.)
URI uri = new URIImpl(subjectUri);
Individual uri = new IndividualImpl(subjectUri);
this.stringValue = uri.getLocalName();
this.editUrl = "";
} else {