eliminating Sesame dependencies
This commit is contained in:
parent
a29b61d277
commit
710844a2fb
54 changed files with 84 additions and 608 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -112,6 +112,14 @@ public class BaseResourceBean implements ResourceBean {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public BaseResourceBean() {
|
||||||
|
// default constructor
|
||||||
|
}
|
||||||
|
|
||||||
|
public BaseResourceBean(String uri) {
|
||||||
|
buildLocalAndNS(uri);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isAnonymous() {
|
public boolean isAnonymous() {
|
||||||
return (this.URI==null || VitroVocabulary.PSEUDO_BNODE_NS.equals(this.getNamespace()));
|
return (this.URI==null || VitroVocabulary.PSEUDO_BNODE_NS.equals(this.getNamespace()));
|
||||||
|
|
|
@ -14,7 +14,6 @@ import java.util.List;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.openrdf.model.impl.URIImpl;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
|
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
|
||||||
|
|
||||||
|
@ -184,7 +183,7 @@ public class ObjectProperty extends Property implements Comparable<ObjectPropert
|
||||||
this.localNameInverse = null;
|
this.localNameInverse = null;
|
||||||
} else {
|
} else {
|
||||||
this.URIInverse = URIInverse;
|
this.URIInverse = URIInverse;
|
||||||
URIImpl uriInverse = new URIImpl(URIInverse);
|
BaseResourceBean uriInverse = new BaseResourceBean(URIInverse);
|
||||||
this.namespaceInverse = uriInverse.getNamespace();
|
this.namespaceInverse = uriInverse.getNamespace();
|
||||||
this.localNameInverse = uriInverse.getLocalName();
|
this.localNameInverse = uriInverse.getLocalName();
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,8 +6,6 @@ import java.text.Collator;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.openrdf.model.impl.URIImpl;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Java class representing an ontology ("Vitro") class
|
* A Java class representing an ontology ("Vitro") class
|
||||||
*
|
*
|
||||||
|
@ -124,14 +122,8 @@ public class VClass extends BaseResourceBean implements Comparable<VClass>
|
||||||
*/
|
*/
|
||||||
public VClass( String uriString )
|
public VClass( String uriString )
|
||||||
{
|
{
|
||||||
// The URIImpl class can be used to parse a URI string into its component parts
|
super(uriString);
|
||||||
URIImpl uri = new URIImpl(uriString);
|
myName = getLocalName();
|
||||||
|
|
||||||
// Use the URIImpl to obtain parts of this URI for local storage
|
|
||||||
myName = uri.getLocalName();
|
|
||||||
URI = uriString;
|
|
||||||
namespace = uri.getNamespace();
|
|
||||||
localName = uri.getLocalName();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -12,7 +12,6 @@ import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.openrdf.model.impl.URIImpl;
|
|
||||||
|
|
||||||
public class VClassGroup extends LinkedList <VClass> implements Comparable<VClassGroup> {
|
public class VClassGroup extends LinkedList <VClass> implements Comparable<VClassGroup> {
|
||||||
|
|
||||||
|
@ -56,7 +55,7 @@ public class VClassGroup extends LinkedList <VClass> implements Comparable<VClas
|
||||||
public VClassGroup(String uri, String name, int rank) {
|
public VClassGroup(String uri, String name, int rank) {
|
||||||
super();
|
super();
|
||||||
this.URI = uri;
|
this.URI = uri;
|
||||||
URIImpl theURI = new URIImpl(uri);
|
BaseResourceBean theURI = new BaseResourceBean(uri);
|
||||||
this.namespace = theURI.getNamespace();
|
this.namespace = theURI.getNamespace();
|
||||||
this.localName = theURI.getLocalName();
|
this.localName = theURI.getLocalName();
|
||||||
this.displayRank = rank;
|
this.displayRank = rank;
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
|
||||||
|
|
||||||
package edu.cornell.mannlib.vitro.webapp.dao.jena;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Iterator;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.graph.Node;
|
|
||||||
import com.hp.hpl.jena.query.Dataset;
|
|
||||||
import com.hp.hpl.jena.rdf.model.Model;
|
|
||||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
|
||||||
import com.hp.hpl.jena.shared.Lock;
|
|
||||||
import com.hp.hpl.jena.sparql.core.DatasetGraph;
|
|
||||||
|
|
||||||
public class SparqlDataset implements Dataset {
|
|
||||||
|
|
||||||
private SparqlDatasetGraph g;
|
|
||||||
|
|
||||||
public SparqlDataset(SparqlDatasetGraph g) {
|
|
||||||
this.g = g;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DatasetGraph asDatasetGraph() {
|
|
||||||
return g;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
g.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean containsNamedModel(String arg0) {
|
|
||||||
return g.containsGraph(Node.createURI(arg0));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Model getDefaultModel() {
|
|
||||||
return ModelFactory.createModelForGraph(g.getDefaultGraph());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Lock getLock() {
|
|
||||||
return g.getLock();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Model getNamedModel(String arg0) {
|
|
||||||
return ModelFactory.createModelForGraph(g.getGraph(Node.createURI(arg0)));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iterator<String> listNames() {
|
|
||||||
ArrayList<String> nameList = new ArrayList<String>();
|
|
||||||
Iterator<Node> nodeIt = g.listGraphNodes();
|
|
||||||
while (nodeIt.hasNext()) {
|
|
||||||
Node n = nodeIt.next();
|
|
||||||
nameList.add(n.getURI());
|
|
||||||
}
|
|
||||||
return nameList.iterator();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,266 +0,0 @@
|
||||||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
|
||||||
|
|
||||||
package edu.cornell.mannlib.vitro.webapp.dao.jena;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.openrdf.model.Resource;
|
|
||||||
import org.openrdf.repository.Repository;
|
|
||||||
import org.openrdf.repository.RepositoryConnection;
|
|
||||||
import org.openrdf.repository.RepositoryException;
|
|
||||||
import org.openrdf.repository.RepositoryResult;
|
|
||||||
import org.openrdf.repository.http.HTTPRepository;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.graph.Graph;
|
|
||||||
import com.hp.hpl.jena.graph.Node;
|
|
||||||
import com.hp.hpl.jena.graph.Triple;
|
|
||||||
import com.hp.hpl.jena.query.Query;
|
|
||||||
import com.hp.hpl.jena.query.QueryExecution;
|
|
||||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
|
||||||
import com.hp.hpl.jena.query.QueryFactory;
|
|
||||||
import com.hp.hpl.jena.query.QuerySolution;
|
|
||||||
import com.hp.hpl.jena.query.ResultSet;
|
|
||||||
import com.hp.hpl.jena.shared.Lock;
|
|
||||||
import com.hp.hpl.jena.shared.LockMRSW;
|
|
||||||
import com.hp.hpl.jena.sparql.core.DatasetGraph;
|
|
||||||
import com.hp.hpl.jena.sparql.core.Quad;
|
|
||||||
import com.hp.hpl.jena.sparql.resultset.ResultSetMem;
|
|
||||||
import com.hp.hpl.jena.sparql.util.Context;
|
|
||||||
import com.hp.hpl.jena.util.iterator.SingletonIterator;
|
|
||||||
import com.hp.hpl.jena.util.iterator.WrappedIterator;
|
|
||||||
|
|
||||||
public class SparqlDatasetGraph implements DatasetGraph {
|
|
||||||
|
|
||||||
private String endpointURI;
|
|
||||||
private Repository repository;
|
|
||||||
private Lock lock = new LockMRSW();
|
|
||||||
|
|
||||||
public SparqlDatasetGraph(String endpointURI) {
|
|
||||||
this.endpointURI = endpointURI;
|
|
||||||
this.repository = new HTTPRepository(endpointURI);
|
|
||||||
}
|
|
||||||
|
|
||||||
private Graph getGraphFor(Quad q) {
|
|
||||||
return getGraphFor(q.getGraph());
|
|
||||||
}
|
|
||||||
|
|
||||||
private Graph getGraphFor(Node g) {
|
|
||||||
return (g == Node.ANY)
|
|
||||||
? new SparqlGraph(endpointURI)
|
|
||||||
: new SparqlGraph(endpointURI, g.getURI());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void add(Quad arg0) {
|
|
||||||
getGraphFor(arg0).add(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void addGraph(Node arg0, Graph arg1) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(Quad arg0) {
|
|
||||||
return getGraphFor(arg0).contains(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(Node arg0, Node arg1, Node arg2, Node arg3) {
|
|
||||||
return getGraphFor(arg0).contains(arg1, arg2, arg3);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean containsGraph(Node arg0) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void delete(Quad arg0) {
|
|
||||||
getGraphFor(arg0).delete(new Triple(arg0.getSubject(), arg0.getPredicate(), arg0.getObject()));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void deleteAny(Node arg0, Node arg1, Node arg2, Node arg3) {
|
|
||||||
// TODO check this
|
|
||||||
getGraphFor(arg0).delete(new Triple(arg1, arg2, arg3));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iterator<Quad> find() {
|
|
||||||
return find(Node.ANY, Node.ANY, Node.ANY, Node.ANY);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iterator<Quad> find(Quad arg0) {
|
|
||||||
return find(arg0.getSubject(), arg0.getPredicate(), arg0.getObject(), arg0.getGraph());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iterator<Quad> find(Node graph, Node subject, Node predicate, Node object) {
|
|
||||||
if (!isVar(subject) && !isVar(predicate) && !isVar(object) &&!isVar(graph)) {
|
|
||||||
if (contains(subject, predicate, object, graph)) {
|
|
||||||
return new SingletonIterator(new Triple(subject, predicate, object));
|
|
||||||
} else {
|
|
||||||
return WrappedIterator.create(Collections.EMPTY_LIST.iterator());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
StringBuffer findQuery = new StringBuffer("SELECT * WHERE { \n");
|
|
||||||
String graphURI = !isVar(graph) ? graph.getURI() : null;
|
|
||||||
findQuery.append(" GRAPH ");
|
|
||||||
if (graphURI != null) {
|
|
||||||
findQuery.append(" <" + graphURI + ">");
|
|
||||||
} else {
|
|
||||||
findQuery.append("?g");
|
|
||||||
}
|
|
||||||
findQuery.append(" { ");
|
|
||||||
findQuery.append(SparqlGraph.sparqlNode(subject, "?s"))
|
|
||||||
.append(" ")
|
|
||||||
.append(SparqlGraph.sparqlNode(predicate, "?p"))
|
|
||||||
.append(" ")
|
|
||||||
.append(SparqlGraph.sparqlNode(object, "?o"));
|
|
||||||
findQuery.append(" } ");
|
|
||||||
findQuery.append("\n}");
|
|
||||||
|
|
||||||
//log.info(findQuery.toString());
|
|
||||||
ResultSet rs = execSelect(findQuery.toString());
|
|
||||||
//rs = execSelect(findQuery.toString());
|
|
||||||
//rs = execSelect(findQuery.toString());
|
|
||||||
|
|
||||||
List<Quad> quadlist = new ArrayList<Quad>();
|
|
||||||
while (rs.hasNext()) {
|
|
||||||
QuerySolution soln = rs.nextSolution();
|
|
||||||
Quad q = new Quad(isVar(graph) ? soln.get("?g").asNode() : graph,
|
|
||||||
isVar(subject) ? soln.get("?s").asNode() : subject,
|
|
||||||
isVar(predicate) ? soln.get("?p").asNode() : predicate,
|
|
||||||
isVar(object) ? soln.get("?o").asNode() : object);
|
|
||||||
//log.info(t);
|
|
||||||
quadlist.add(q);
|
|
||||||
}
|
|
||||||
//log.info(triplist.size() + " results");
|
|
||||||
return WrappedIterator.create(quadlist.iterator()); }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iterator<Quad> findNG(Node arg0, Node arg1, Node arg2, Node arg3) {
|
|
||||||
// TODO check this
|
|
||||||
return find(arg0, arg1, arg2, arg3);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Context getContext() {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Graph getDefaultGraph() {
|
|
||||||
return new SparqlGraph(endpointURI);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Graph getGraph(Node arg0) {
|
|
||||||
return new SparqlGraph(endpointURI, arg0.getURI());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Lock getLock() {
|
|
||||||
return lock;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEmpty() {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Iterator<Node> listGraphNodes() {
|
|
||||||
List<Node> graphNodeList = new ArrayList<Node>();
|
|
||||||
try {
|
|
||||||
RepositoryConnection conn = getConnection();
|
|
||||||
try {
|
|
||||||
RepositoryResult<Resource> conResult = conn.getContextIDs();
|
|
||||||
while (conResult.hasNext()) {
|
|
||||||
Resource con = conResult.next();
|
|
||||||
graphNodeList.add(Node.createURI(con.stringValue()));
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
conn.close();
|
|
||||||
}
|
|
||||||
} catch (RepositoryException re) {
|
|
||||||
throw new RuntimeException(re);
|
|
||||||
}
|
|
||||||
return graphNodeList.iterator();
|
|
||||||
}
|
|
||||||
|
|
||||||
private RepositoryConnection getConnection() {
|
|
||||||
try {
|
|
||||||
return this.repository.getConnection();
|
|
||||||
} catch (RepositoryException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void removeGraph(Node arg0) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void setDefaultGraph(Graph arg0) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long size() {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean isVar(Node node) {
|
|
||||||
return (node == null || node.isVariable() || node == Node.ANY);
|
|
||||||
}
|
|
||||||
|
|
||||||
private ResultSet execSelect(String queryStr) {
|
|
||||||
|
|
||||||
// long startTime1 = System.currentTimeMillis();
|
|
||||||
// try {
|
|
||||||
//
|
|
||||||
// RepositoryConnection conn = getConnection();
|
|
||||||
// try {
|
|
||||||
// GraphQuery q = conn.prepareGraphQuery(QueryLanguage.SPARQL, queryStr);
|
|
||||||
// q.evaluate();
|
|
||||||
// } catch (MalformedQueryException e) {
|
|
||||||
// throw new RuntimeException(e);
|
|
||||||
// } finally {
|
|
||||||
// conn.close();
|
|
||||||
// }
|
|
||||||
// } catch (Exception re) {
|
|
||||||
// //log.info(re,re);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// log.info((System.currentTimeMillis() - startTime1) + " to execute via sesame");
|
|
||||||
|
|
||||||
long startTime = System.currentTimeMillis();
|
|
||||||
Query askQuery = QueryFactory.create(queryStr);
|
|
||||||
QueryExecution qe = QueryExecutionFactory.sparqlService(endpointURI, askQuery);
|
|
||||||
try {
|
|
||||||
return new ResultSetMem(qe.execSelect());
|
|
||||||
} finally {
|
|
||||||
//log.info((System.currentTimeMillis() - startTime) + " to execute via Jena");
|
|
||||||
qe.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -3,19 +3,12 @@
|
||||||
package edu.cornell.mannlib.vitro.webapp.dao.jena;
|
package edu.cornell.mannlib.vitro.webapp.dao.jena;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.openrdf.query.MalformedQueryException;
|
|
||||||
import org.openrdf.query.QueryLanguage;
|
|
||||||
import org.openrdf.query.Update;
|
|
||||||
import org.openrdf.query.UpdateExecutionException;
|
|
||||||
import org.openrdf.repository.Repository;
|
|
||||||
import org.openrdf.repository.RepositoryConnection;
|
|
||||||
import org.openrdf.repository.RepositoryException;
|
|
||||||
import org.openrdf.repository.http.HTTPRepository;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.graph.BulkUpdateHandler;
|
import com.hp.hpl.jena.graph.BulkUpdateHandler;
|
||||||
import com.hp.hpl.jena.graph.Capabilities;
|
import com.hp.hpl.jena.graph.Capabilities;
|
||||||
|
@ -46,10 +39,23 @@ import com.hp.hpl.jena.util.iterator.ExtendedIterator;
|
||||||
import com.hp.hpl.jena.util.iterator.SingletonIterator;
|
import com.hp.hpl.jena.util.iterator.SingletonIterator;
|
||||||
import com.hp.hpl.jena.util.iterator.WrappedIterator;
|
import com.hp.hpl.jena.util.iterator.WrappedIterator;
|
||||||
|
|
||||||
|
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||||
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
|
import org.apache.http.client.methods.HttpGet;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
import org.apache.http.client.utils.URIBuilder;
|
||||||
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
|
import org.apache.http.impl.client.HttpClients;
|
||||||
|
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||||
|
import org.apache.http.message.BasicNameValuePair;
|
||||||
|
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||||
|
|
||||||
public class SparqlGraph implements GraphWithPerform {
|
public class SparqlGraph implements GraphWithPerform {
|
||||||
|
|
||||||
private String endpointURI;
|
private String endpointURI;
|
||||||
private String graphURI;
|
private String graphURI;
|
||||||
|
private CloseableHttpClient httpClient;
|
||||||
private static final Log log = LogFactory.getLog(SparqlGraph.class);
|
private static final Log log = LogFactory.getLog(SparqlGraph.class);
|
||||||
|
|
||||||
private BulkUpdateHandler bulkUpdateHandler;
|
private BulkUpdateHandler bulkUpdateHandler;
|
||||||
|
@ -60,8 +66,6 @@ public class SparqlGraph implements GraphWithPerform {
|
||||||
private TransactionHandler transactionHandler;
|
private TransactionHandler transactionHandler;
|
||||||
private QueryHandler queryHandler;
|
private QueryHandler queryHandler;
|
||||||
|
|
||||||
private Repository repository;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a SparqlGraph for the union of named graphs in a remote repository
|
* Returns a SparqlGraph for the union of named graphs in a remote repository
|
||||||
* @param endpointURI
|
* @param endpointURI
|
||||||
|
@ -78,7 +82,10 @@ public class SparqlGraph implements GraphWithPerform {
|
||||||
public SparqlGraph(String endpointURI, String graphURI) {
|
public SparqlGraph(String endpointURI, String graphURI) {
|
||||||
this.endpointURI = endpointURI;
|
this.endpointURI = endpointURI;
|
||||||
this.graphURI = graphURI;
|
this.graphURI = graphURI;
|
||||||
this.repository = new HTTPRepository(endpointURI);
|
|
||||||
|
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
|
||||||
|
cm.setDefaultMaxPerRoute(50);
|
||||||
|
this.httpClient = HttpClients.custom().setConnectionManager(cm).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getEndpointURI() {
|
public String getEndpointURI() {
|
||||||
|
@ -88,14 +95,6 @@ public class SparqlGraph implements GraphWithPerform {
|
||||||
public String getGraphURI() {
|
public String getGraphURI() {
|
||||||
return graphURI;
|
return graphURI;
|
||||||
}
|
}
|
||||||
|
|
||||||
public RepositoryConnection getConnection() {
|
|
||||||
try {
|
|
||||||
return this.repository.getConnection();
|
|
||||||
} catch (RepositoryException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void add(Triple arg0) throws AddDeniedException {
|
public void add(Triple arg0) throws AddDeniedException {
|
||||||
|
@ -104,22 +103,24 @@ public class SparqlGraph implements GraphWithPerform {
|
||||||
|
|
||||||
public void executeUpdate(String updateString) {
|
public void executeUpdate(String updateString) {
|
||||||
try {
|
try {
|
||||||
RepositoryConnection conn = getConnection();
|
HttpPost meth = new HttpPost(endpointURI);
|
||||||
|
meth.addHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||||
|
meth.setEntity(new UrlEncodedFormEntity(Arrays.asList(
|
||||||
|
new BasicNameValuePair("update", updateString))));
|
||||||
|
CloseableHttpResponse response = httpClient.execute(meth);
|
||||||
try {
|
try {
|
||||||
Update u = conn.prepareUpdate(QueryLanguage.SPARQL, updateString);
|
int statusCode = response.getStatusLine().getStatusCode();
|
||||||
u.execute();
|
if (statusCode > 399) {
|
||||||
} catch (MalformedQueryException e) {
|
log.error("response " + statusCode + " to update. \n");
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException("Unable to perform SPARQL UPDATE: \n"
|
||||||
} catch (UpdateExecutionException e) {
|
+ updateString);
|
||||||
log.error(e,e);
|
}
|
||||||
log.error("Update command: \n" + updateString);
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
} finally {
|
} finally {
|
||||||
conn.close();
|
response.close();
|
||||||
}
|
}
|
||||||
} catch (RepositoryException re) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(re);
|
throw new RuntimeException("Unable to perform SPARQL UPDATE", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -14,9 +14,8 @@ import javax.servlet.http.HttpServletResponseWrapper;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.openrdf.model.URI;
|
|
||||||
import org.openrdf.model.impl.URIImpl;
|
|
||||||
|
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.beans.IndividualImpl;
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess;
|
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess;
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
|
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
|
||||||
import edu.cornell.mannlib.vitro.webapp.utils.NamespaceMapper;
|
import edu.cornell.mannlib.vitro.webapp.utils.NamespaceMapper;
|
||||||
|
@ -136,7 +135,7 @@ public class URLRewritingHttpServletResponse extends HttpServletResponseWrapper/
|
||||||
qpIndex++;
|
qpIndex++;
|
||||||
if ( ("uri".equals(keyAndValue[0])) && (keyAndValue.length>1) && (keyAndValue[1] != null) ) {
|
if ( ("uri".equals(keyAndValue[0])) && (keyAndValue.length>1) && (keyAndValue[1] != null) ) {
|
||||||
try {
|
try {
|
||||||
URI uri = new URIImpl(keyAndValue[1]);
|
IndividualImpl uri = new IndividualImpl(keyAndValue[1]);
|
||||||
String namespace = uri.getNamespace();
|
String namespace = uri.getNamespace();
|
||||||
String localName = uri.getLocalName();
|
String localName = uri.getLocalName();
|
||||||
if ( (namespace != null) && (localName != null) ) {
|
if ( (namespace != null) && (localName != null) ) {
|
||||||
|
|
|
@ -19,7 +19,6 @@ import javax.servlet.ServletContext;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.openrdf.model.impl.URIImpl;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.iri.IRI;
|
import com.hp.hpl.jena.iri.IRI;
|
||||||
import com.hp.hpl.jena.iri.IRIFactory;
|
import com.hp.hpl.jena.iri.IRIFactory;
|
||||||
|
|
|
@ -26,11 +26,6 @@ import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
import org.apache.http.impl.client.HttpClients;
|
import org.apache.http.impl.client.HttpClients;
|
||||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||||
import org.apache.http.message.BasicNameValuePair;
|
import org.apache.http.message.BasicNameValuePair;
|
||||||
import org.openrdf.model.Resource;
|
|
||||||
import org.openrdf.repository.RepositoryConnection;
|
|
||||||
import org.openrdf.repository.RepositoryException;
|
|
||||||
import org.openrdf.repository.RepositoryResult;
|
|
||||||
import org.openrdf.repository.http.HTTPRepository;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.graph.Triple;
|
import com.hp.hpl.jena.graph.Triple;
|
||||||
import com.hp.hpl.jena.query.Query;
|
import com.hp.hpl.jena.query.Query;
|
||||||
|
@ -44,6 +39,7 @@ import com.hp.hpl.jena.query.ResultSetFormatter;
|
||||||
import com.hp.hpl.jena.rdf.model.Model;
|
import com.hp.hpl.jena.rdf.model.Model;
|
||||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
||||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
import com.hp.hpl.jena.rdf.model.RDFNode;
|
||||||
|
import com.hp.hpl.jena.rdf.model.Resource;
|
||||||
import com.hp.hpl.jena.rdf.model.Statement;
|
import com.hp.hpl.jena.rdf.model.Statement;
|
||||||
import com.hp.hpl.jena.rdf.model.StmtIterator;
|
import com.hp.hpl.jena.rdf.model.StmtIterator;
|
||||||
|
|
||||||
|
@ -67,10 +63,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
|
||||||
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
|
private static final Log log = LogFactory.getLog(RDFServiceImpl.class);
|
||||||
protected String readEndpointURI;
|
protected String readEndpointURI;
|
||||||
protected String updateEndpointURI;
|
protected String updateEndpointURI;
|
||||||
private HTTPRepository readRepository;
|
|
||||||
private HTTPRepository updateRepository;
|
|
||||||
private CloseableHttpClient httpClient;
|
private CloseableHttpClient httpClient;
|
||||||
private boolean useSesameContextQuery = true;
|
|
||||||
// the number of triples to be
|
// the number of triples to be
|
||||||
private static final int CHUNK_SIZE = 1000; // added/removed in a single
|
private static final int CHUNK_SIZE = 1000; // added/removed in a single
|
||||||
// SPARQL UPDATE
|
// SPARQL UPDATE
|
||||||
|
@ -89,8 +82,6 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
|
||||||
public RDFServiceSparql(String readEndpointURI, String updateEndpointURI, String defaultWriteGraphURI) {
|
public RDFServiceSparql(String readEndpointURI, String updateEndpointURI, String defaultWriteGraphURI) {
|
||||||
this.readEndpointURI = readEndpointURI;
|
this.readEndpointURI = readEndpointURI;
|
||||||
this.updateEndpointURI = updateEndpointURI;
|
this.updateEndpointURI = updateEndpointURI;
|
||||||
this.readRepository = new HTTPRepository(readEndpointURI);
|
|
||||||
this.updateRepository = new HTTPRepository(updateEndpointURI);
|
|
||||||
|
|
||||||
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
|
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
|
||||||
cm.setDefaultMaxPerRoute(50);
|
cm.setDefaultMaxPerRoute(50);
|
||||||
|
@ -135,12 +126,7 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void close() {
|
public void close() {
|
||||||
try {
|
// nothing for now
|
||||||
this.readRepository.shutDown();
|
|
||||||
this.updateRepository.shutDown();
|
|
||||||
} catch (RepositoryException re) {
|
|
||||||
log.error(re, re);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -234,6 +220,8 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
qe.execConstruct(model);
|
qe.execConstruct(model);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Error executing CONSTRUCT against remote endpoint: " + queryStr);
|
||||||
} finally {
|
} finally {
|
||||||
qe.close();
|
qe.close();
|
||||||
}
|
}
|
||||||
|
@ -359,47 +347,40 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
|
||||||
*
|
*
|
||||||
* @return List<String> - list of all the graph URIs in the RDF store
|
* @return List<String> - list of all the graph URIs in the RDF store
|
||||||
*/
|
*/
|
||||||
//TODO - need to verify that the sesame getContextIDs method is implemented
|
|
||||||
// in such a way that it works with all triple stores that support the
|
|
||||||
// graph update API
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getGraphURIs() throws RDFServiceException {
|
public List<String> getGraphURIs() throws RDFServiceException {
|
||||||
if (!this.useSesameContextQuery) {
|
return getGraphURIsFromSparqlQuery();
|
||||||
return getGraphURIsFromSparqlQuery();
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
return getGraphURIsFromSesameContexts();
|
|
||||||
} catch (RepositoryException re) {
|
|
||||||
this.useSesameContextQuery = false;
|
|
||||||
return getGraphURIsFromSparqlQuery();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> getGraphURIsFromSesameContexts() throws RepositoryException {
|
private List<String> getGraphURIsFromSparqlQuery() throws RDFServiceException {
|
||||||
List<String> graphURIs = new ArrayList<String>();
|
String fastJenaQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g {} } ORDER BY ?g";
|
||||||
RepositoryConnection conn = getReadConnection();
|
String standardQuery = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } ORDER BY ?g";
|
||||||
try {
|
|
||||||
RepositoryResult<Resource> conResult = conn.getContextIDs();
|
|
||||||
while (conResult.hasNext()) {
|
|
||||||
Resource res = conResult.next();
|
|
||||||
graphURIs.add(res.stringValue());
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
conn.close();
|
|
||||||
}
|
|
||||||
return graphURIs;
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<String> getGraphURIsFromSparqlQuery() throws RDFServiceException {
|
|
||||||
List<String> graphURIs = new ArrayList<String>();
|
List<String> graphURIs = new ArrayList<String>();
|
||||||
try {
|
try {
|
||||||
String graphURIString = "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } } ORDER BY ?g";
|
graphURIs = getGraphURIsFromSparqlQuery(fastJenaQuery);
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.debug("Unable to use non-standard ARQ query for graph list", e);
|
||||||
|
}
|
||||||
|
if (graphURIs.isEmpty()) {
|
||||||
|
graphURIs = getGraphURIsFromSparqlQuery(standardQuery);
|
||||||
|
}
|
||||||
|
return graphURIs;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getGraphURIsFromSparqlQuery(String queryString) throws RDFServiceException {
|
||||||
|
List<String> graphURIs = new ArrayList<String>();
|
||||||
|
try {
|
||||||
|
|
||||||
ResultSet rs = ResultSetFactory.fromJSON(
|
ResultSet rs = ResultSetFactory.fromJSON(
|
||||||
sparqlSelectQuery(graphURIString, RDFService.ResultFormat.JSON));
|
sparqlSelectQuery(queryString, RDFService.ResultFormat.JSON));
|
||||||
while (rs.hasNext()) {
|
while (rs.hasNext()) {
|
||||||
QuerySolution qs = rs.nextSolution();
|
QuerySolution qs = rs.nextSolution();
|
||||||
graphURIs.add(qs.getResource("g").getURI());
|
if (qs != null) { // no idea how this happens, but it seems to
|
||||||
|
RDFNode n = qs.getResource("g");
|
||||||
|
if (n != null && n.isResource()) {
|
||||||
|
graphURIs.add(((Resource) n).getURI());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RDFServiceException("Unable to list graph URIs", e);
|
throw new RDFServiceException("Unable to list graph URIs", e);
|
||||||
|
@ -470,22 +451,6 @@ public class RDFServiceSparql extends RDFServiceImpl implements RDFService {
|
||||||
return updateEndpointURI;
|
return updateEndpointURI;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected RepositoryConnection getReadConnection() {
|
|
||||||
try {
|
|
||||||
return this.readRepository.getConnection();
|
|
||||||
} catch (RepositoryException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected RepositoryConnection getWriteConnection() {
|
|
||||||
try {
|
|
||||||
return this.updateRepository.getConnection();
|
|
||||||
} catch (RepositoryException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void executeUpdate(String updateString) throws RDFServiceException {
|
protected void executeUpdate(String updateString) throws RDFServiceException {
|
||||||
try {
|
try {
|
||||||
HttpPost meth = new HttpPost(updateEndpointURI);
|
HttpPost meth = new HttpPost(updateEndpointURI);
|
||||||
|
|
|
@ -15,9 +15,9 @@ import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.openrdf.model.vocabulary.OWL;
|
|
||||||
import org.openrdf.model.vocabulary.RDF;
|
|
||||||
|
|
||||||
|
import com.hp.hpl.jena.vocabulary.OWL;
|
||||||
|
import com.hp.hpl.jena.vocabulary.RDF;
|
||||||
import com.hp.hpl.jena.vocabulary.RDFS;
|
import com.hp.hpl.jena.vocabulary.RDFS;
|
||||||
import com.hp.hpl.jena.vocabulary.XSD;
|
import com.hp.hpl.jena.vocabulary.XSD;
|
||||||
|
|
||||||
|
@ -99,8 +99,8 @@ public class GetAllPrefix extends BaseEditController {
|
||||||
}
|
}
|
||||||
|
|
||||||
// add standard namespaces
|
// add standard namespaces
|
||||||
addPrefixIfNecessary("owl", OWL.NAMESPACE, prefixMap);
|
addPrefixIfNecessary("owl", OWL.getURI(), prefixMap);
|
||||||
addPrefixIfNecessary("rdf", RDF.NAMESPACE, prefixMap);
|
addPrefixIfNecessary("rdf", RDF.getURI(), prefixMap);
|
||||||
addPrefixIfNecessary("rdfs", RDFS.getURI(), prefixMap);
|
addPrefixIfNecessary("rdfs", RDFS.getURI(), prefixMap);
|
||||||
addPrefixIfNecessary("swrl", "http://www.w3.org/2003/11/swrl#", prefixMap);
|
addPrefixIfNecessary("swrl", "http://www.w3.org/2003/11/swrl#", prefixMap);
|
||||||
addPrefixIfNecessary("swrlb", "http://www.w3.org/2003/11/swrlb#", prefixMap);
|
addPrefixIfNecessary("swrlb", "http://www.w3.org/2003/11/swrlb#", prefixMap);
|
||||||
|
|
|
@ -1,156 +0,0 @@
|
||||||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
|
||||||
|
|
||||||
package edu.cornell.mannlib.vitro.webapp.utils.jena;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.io.PipedInputStream;
|
|
||||||
import java.io.PipedOutputStream;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.openrdf.model.Resource;
|
|
||||||
import org.openrdf.model.impl.URIImpl;
|
|
||||||
import org.openrdf.repository.Repository;
|
|
||||||
import org.openrdf.repository.RepositoryConnection;
|
|
||||||
import org.openrdf.repository.RepositoryException;
|
|
||||||
import org.openrdf.repository.http.HTTPRepository;
|
|
||||||
import org.openrdf.rio.RDFFormat;
|
|
||||||
import org.openrdf.rio.RDFParseException;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.query.Query;
|
|
||||||
import com.hp.hpl.jena.query.QueryExecution;
|
|
||||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
|
||||||
import com.hp.hpl.jena.query.QueryFactory;
|
|
||||||
import com.hp.hpl.jena.query.QuerySolution;
|
|
||||||
import com.hp.hpl.jena.query.ResultSet;
|
|
||||||
import com.hp.hpl.jena.rdf.model.Model;
|
|
||||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
|
||||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
|
||||||
import com.hp.hpl.jena.vocabulary.OWL;
|
|
||||||
|
|
||||||
public class SesameSyncUtils {
|
|
||||||
|
|
||||||
private static final Log log = LogFactory.getLog(SesameSyncUtils.class);
|
|
||||||
|
|
||||||
public void writeModelToSesameContext
|
|
||||||
(Model jenaModel, String serverURI, String repositoryId, String contextId)
|
|
||||||
throws RepositoryException, IOException, RDFParseException {
|
|
||||||
Repository myRepository = new HTTPRepository(serverURI, repositoryId);
|
|
||||||
myRepository.initialize();
|
|
||||||
RepositoryConnection myConn = myRepository.getConnection();
|
|
||||||
|
|
||||||
myConn.setAutoCommit(false);
|
|
||||||
try {
|
|
||||||
|
|
||||||
Resource contextRes = (contextId != null)
|
|
||||||
? new URIImpl(contextId) : null ;
|
|
||||||
|
|
||||||
if (contextRes != null) {
|
|
||||||
myConn.clear(contextRes);
|
|
||||||
} else {
|
|
||||||
myConn.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
PipedInputStream in = new PipedInputStream();
|
|
||||||
PipedOutputStream out = new PipedOutputStream(in);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
try {
|
|
||||||
|
|
||||||
new Thread(new JenaOutputter(jenaModel, out, myConn), "SesameSyncUtilities.JenaOutputter").start();
|
|
||||||
|
|
||||||
if (contextRes != null) {
|
|
||||||
myConn.add(in,"http://example.org/base/", RDFFormat.NTRIPLES, contextRes);
|
|
||||||
} else {
|
|
||||||
myConn.add(in,"http://example.org/base/", RDFFormat.NTRIPLES);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
in.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
myConn.commit();
|
|
||||||
|
|
||||||
} catch (Throwable e) {
|
|
||||||
myConn.rollback();
|
|
||||||
e.printStackTrace();
|
|
||||||
log.error("Error writing to Sesame repository", e);
|
|
||||||
throw new RuntimeException("Error writing to Sesame repository", e);
|
|
||||||
} finally {
|
|
||||||
myConn.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<String> getIndividualURIs(Model model) {
|
|
||||||
List<String> individualURIs = new ArrayList<String>();
|
|
||||||
String queryStr = "SELECT DISTINCT ?s WHERE { \n" +
|
|
||||||
" ?s a <" + OWL.Thing.getURI() + "> \n" +
|
|
||||||
"}";
|
|
||||||
Query query = QueryFactory.create(queryStr);
|
|
||||||
QueryExecution qe = QueryExecutionFactory.create(query, model);
|
|
||||||
try {
|
|
||||||
ResultSet rs = qe.execSelect();
|
|
||||||
while (rs.hasNext()) {
|
|
||||||
QuerySolution qsoln = rs.nextSolution();
|
|
||||||
String individualURI = qsoln.getResource("s").getURI();
|
|
||||||
if (individualURI != null) {
|
|
||||||
individualURIs.add(individualURI);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
qe.close();
|
|
||||||
}
|
|
||||||
return individualURIs;
|
|
||||||
}
|
|
||||||
|
|
||||||
private class JenaOutputter implements Runnable {
|
|
||||||
|
|
||||||
private Model model;
|
|
||||||
private OutputStream out;
|
|
||||||
private RepositoryConnection rconn;
|
|
||||||
|
|
||||||
public JenaOutputter(Model model, OutputStream out, RepositoryConnection rconn) {
|
|
||||||
this.model = model;
|
|
||||||
this.out = out;
|
|
||||||
this.rconn = rconn;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void run() {
|
|
||||||
Model t = ModelFactory.createDefaultModel();
|
|
||||||
try {
|
|
||||||
List<String> individualURIs = getIndividualURIs(model);
|
|
||||||
log.info(individualURIs.size() + " individuals to send to Sesame");
|
|
||||||
int i = 0;
|
|
||||||
for (String individualURI : individualURIs) {
|
|
||||||
t.removeAll();
|
|
||||||
t.add(model.listStatements(
|
|
||||||
model.getResource(
|
|
||||||
individualURI), null, (RDFNode) null));
|
|
||||||
t.write(out, "N-TRIPLE");
|
|
||||||
i++;
|
|
||||||
if (i % 100 == 0) {
|
|
||||||
try {
|
|
||||||
rconn.commit();
|
|
||||||
} catch (Throwable e) {
|
|
||||||
log.error(e, e);
|
|
||||||
}
|
|
||||||
log.info(i + " individuals sent to Sesame");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
out.flush();
|
|
||||||
out.close();
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
ioe.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -4,8 +4,6 @@ package edu.cornell.mannlib.vitro.webapp.web.templatemodels.individual;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.openrdf.model.URI;
|
|
||||||
import org.openrdf.model.impl.URIImpl;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.rdf.model.Literal;
|
import com.hp.hpl.jena.rdf.model.Literal;
|
||||||
|
|
||||||
|
@ -14,6 +12,8 @@ import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.RequestedAction;
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.EditDataPropertyStatement;
|
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.EditDataPropertyStatement;
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
|
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
|
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.beans.IndividualImpl;
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.Property;
|
import edu.cornell.mannlib.vitro.webapp.beans.Property;
|
||||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||||
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
|
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
|
||||||
|
@ -51,7 +51,7 @@ public class NameStatementTemplateModel extends PropertyStatementTemplateModel {
|
||||||
// If the individual has no rdfs:label, use the local name. It will not be editable. (This replicates previous behavior;
|
// If the individual has no rdfs:label, use the local name. It will not be editable. (This replicates previous behavior;
|
||||||
// perhaps we would want to allow a label to be added. But such individuals do not usually have their profiles viewed or
|
// perhaps we would want to allow a label to be added. But such individuals do not usually have their profiles viewed or
|
||||||
// edited directly.)
|
// edited directly.)
|
||||||
URI uri = new URIImpl(subjectUri);
|
Individual uri = new IndividualImpl(subjectUri);
|
||||||
this.stringValue = uri.getLocalName();
|
this.stringValue = uri.getLocalName();
|
||||||
this.editUrl = "";
|
this.editUrl = "";
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue