Merge remote-tracking branch 'origin/develop' into dev-isf

Conflicts:
	webapp/rdf/displayDisplay/everytime/displayDisplay.n3
	webapp/web/i18n/all.properties
	webapp/web/templates/freemarker/lib/lib-properties.ftl
This commit is contained in:
brianjlowe 2013-09-23 16:19:05 -04:00
commit 90512bdc1c
16 changed files with 893 additions and 409 deletions

View file

@ -9,9 +9,11 @@ import java.io.PrintWriter;
import java.io.Writer;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
@ -40,6 +42,7 @@ import com.hp.hpl.jena.vocabulary.XSD;
import edu.cornell.mannlib.vedit.controller.BaseEditController;
import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission;
import edu.cornell.mannlib.vitro.webapp.beans.Ontology;
import edu.cornell.mannlib.vitro.webapp.controller.individual.IndividualController;
import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat;
@ -47,45 +50,37 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ResultFormat;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/**
* Services a sparql query. This will return a simple error message and a 501 if
* Services a SPARQL query. This will return a simple error message and a 501 if
* there is no Model.
*
*
* @author bdc34
*
*/
public class SparqlQueryServlet extends BaseEditController {
private static final Log log = LogFactory.getLog(SparqlQueryServlet.class.getName());
private final static boolean CONVERT = true;
/**
* format configurations for SELECT queries.
*/
protected static HashMap<String,RSFormatConfig> rsFormats = new HashMap<String,RSFormatConfig>();
private static RSFormatConfig[] rsfs = {
new RSFormatConfig( "RS_XML", !CONVERT, ResultFormat.XML, null, "text/xml"),
new RSFormatConfig( "RS_TEXT", !CONVERT, ResultFormat.TEXT, null, "text/plain"),
new RSFormatConfig( "vitro:csv", !CONVERT, ResultFormat.CSV, null, "text/csv"),
new RSFormatConfig( "RS_JSON", !CONVERT, ResultFormat.JSON, null, "application/javascript") };
/**
* format configurations for CONSTRUCT/DESCRIBE queries.
*/
protected static HashMap<String,ModelFormatConfig> modelFormats =
new HashMap<String,ModelFormatConfig>();
private static ModelFormatConfig[] fmts = {
new ModelFormatConfig("RDF/XML", !CONVERT, ModelSerializationFormat.RDFXML, null, "application/rdf+xml" ),
new ModelFormatConfig("RDF/XML-ABBREV", CONVERT, ModelSerializationFormat.N3, "RDF/XML-ABBREV", "application/rdf+xml" ),
new ModelFormatConfig("N3", !CONVERT, ModelSerializationFormat.N3, null, "text/n3" ),
new ModelFormatConfig("N-TRIPLE", !CONVERT, ModelSerializationFormat.NTRIPLE, null, "text/plain" ),
new ModelFormatConfig("TTL", CONVERT, ModelSerializationFormat.N3, "TTL", "application/x-turtle" ),
new ModelFormatConfig("JSON-LD", CONVERT, ModelSerializationFormat.N3, "JSON-LD", "application/javascript" ) };
/**
* Use this map to decide which MIME type is suited for the "accept" header.
*/
public static final Map<String, Float> ACCEPTED_CONTENT_TYPES;
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
@ -113,100 +108,85 @@ public class SparqlQueryServlet extends BaseEditController {
String queryParam = vreq.getParameter("query");
log.debug("queryParam was : " + queryParam);
String resultFormatParam = vreq.getParameter("resultFormat");
log.debug("resultFormat was: " + resultFormatParam);
String rdfResultFormatParam = vreq.getParameter("rdfResultFormat");
if (rdfResultFormatParam == null) {
rdfResultFormatParam = "RDF/XML-ABBREV";
}
log.debug("rdfResultFormat was: " + rdfResultFormatParam);
if( queryParam == null || "".equals(queryParam) ||
resultFormatParam == null || "".equals(resultFormatParam) ||
!rsFormats.containsKey( resultFormatParam ) ||
rdfResultFormatParam == null || "".equals(rdfResultFormatParam) ||
!modelFormats.containsKey( rdfResultFormatParam ) ) {
if( queryParam == null || "".equals(queryParam) ){
doHelp(request,response);
return;
}
executeQuery(response, resultFormatParam, rdfResultFormatParam,
queryParam, vreq.getUnfilteredRDFService());
String contentType = checkForContentType(vreq.getHeader("Accept"));
Query query = SparqlQueryUtils.create(queryParam);
if( query.isSelectType() ){
String format = contentType!=null ? contentType:vreq.getParameter("resultFormat");
RSFormatConfig formatConf = rsFormats.get(format);
doSelect(response, queryParam, formatConf, vreq.getRDFService());
}else if( query.isAskType()){
doAsk( queryParam, vreq.getRDFService(), response );
}else if( query.isConstructType() ){
String format = contentType != null ? contentType : vreq.getParameter("rdfResultFormat");
if (format== null) {
format= "RDF/XML-ABBREV";
}
ModelFormatConfig formatConf = modelFormats.get(format);
doConstruct(response, query, formatConf, vreq.getRDFService());
}else{
doHelp(request,response);
}
return;
}
private void executeQuery(HttpServletResponse response,
String resultFormatParam,
String rdfResultFormatParam,
String queryParam,
RDFService rdfService ) throws IOException {
/* BJL23 2008-11-06
* modified to support CSV output.
* Unfortunately, ARQ doesn't make it easy to
* do this by implementing a new ResultSetFormat, because
* ResultSetFormatter is hardwired with expected values.
* This slightly ugly approach will have to do for now.
*/
// if ( !("vitro:csv").equals(resultFormatParam) ) {
// rsf = selectFormatSymbols.get(resultFormatParam);
// }
// String mimeType = rdfFormatToMimeType.get(resultFormatParam);
try{
Query query = SparqlQueryUtils.create(queryParam);
if( query.isSelectType() ){
doSelectQuery( queryParam, rdfService, resultFormatParam, response);
} else if(query.isAskType()){
// Irrespective of the ResultFormatParam,
// this always prints a boolean to the default OutputStream.
String result = (rdfService.sparqlAskQuery(queryParam) == true)
? "true"
: "false";
PrintWriter p = response.getWriter();
p.write(result);
return;
} else {
doModelResultQuery( query, rdfService, rdfResultFormatParam, response);
}
} catch (RDFServiceException e) {
throw new RuntimeException(e);
}
}
private void doAsk(String queryParam, RDFService rdfService,
HttpServletResponse response) throws ServletException, IOException {
// Irrespective of the ResultFormatParam,
// this always prints a boolean to the default OutputStream.
String result;
try {
result = (rdfService.sparqlAskQuery(queryParam) == true)
? "true"
: "false";
} catch (RDFServiceException e) {
throw new ServletException( "Could not execute ask query ", e );
}
PrintWriter p = response.getWriter();
p.write(result);
return;
}
/**
* Execute the query and send the result to out. Attempt to
* send the RDFService the same format as the rdfResultFormatParam
* so that the results from the RDFService can be directly piped to the client.
* @param rdfService
* @throws IOException
* @throws RDFServiceException
*/
private void doSelectQuery( String queryParam,
RDFService rdfService, String resultFormatParam,
HttpServletResponse response) throws IOException, RDFServiceException{
RSFormatConfig config = rsFormats.get( resultFormatParam );
private void doSelect(HttpServletResponse response,
String queryParam,
RSFormatConfig formatConf,
RDFService rdfService
) throws ServletException {
try {
if( ! formatConf.converstionFromWireFormat ){
response.setContentType( formatConf.responseMimeType );
InputStream results;
results = rdfService.sparqlSelectQuery(queryParam, formatConf.wireFormat );
pipe( results, response.getOutputStream() );
}else{
//always use JSON when conversion is needed.
InputStream results = rdfService.sparqlSelectQuery(queryParam, ResultFormat.JSON );
if( ! config.converstionFromWireFormat ){
response.setContentType( config.responseMimeType );
InputStream results = rdfService.sparqlSelectQuery(queryParam, config.wireFormat );
pipe( results, response.getOutputStream() );
}else{
//always use JSON when conversion is needed.
InputStream results = rdfService.sparqlSelectQuery(queryParam, ResultFormat.JSON );
response.setContentType( config.responseMimeType );
ResultSet rs = ResultSetFactory.fromJSON( results );
OutputStream out = response.getOutputStream();
ResultSetFormatter.output(out, rs, config.jenaResponseFormat);
// } else {
// Writer out = response.getWriter();
// toCsv(out, results);
//}
response.setContentType( formatConf.responseMimeType );
ResultSet rs = ResultSetFactory.fromJSON( results );
OutputStream out = response.getOutputStream();
ResultSetFormatter.output(out, rs, formatConf.jenaResponseFormat);
}
} catch (RDFServiceException e) {
throw new ServletException("Cannot get result from the RDFService",e);
} catch (IOException e) {
throw new ServletException("Cannot perform SPARQL SELECT",e);
}
}
/**
* Execute the query and send the result to out. Attempt to
@ -217,40 +197,44 @@ public class SparqlQueryServlet extends BaseEditController {
* @throws RDFServiceException
* @throws
*/
private void doModelResultQuery( Query query,
RDFService rdfService, String rdfResultFormatParam,
HttpServletResponse response) throws IOException, RDFServiceException{
//config drives what formats and conversions to use
ModelFormatConfig config = modelFormats.get( rdfResultFormatParam );
InputStream rawResult = null;
if( query.isConstructType() ){
rawResult= rdfService.sparqlConstructQuery( query.toString(), config.wireFormat );
}else if ( query.isDescribeType() ){
rawResult = rdfService.sparqlDescribeQuery( query.toString(), config.wireFormat );
}
response.setContentType( config.responseMimeType );
if( config.converstionFromWireFormat ){
Model resultModel = RDFServiceUtils.parseModel( rawResult, config.wireFormat );
if( "JSON-LD".equals( config.jenaResponseFormat )){
//since jena 2.6.4 doesn't support JSON-LD we do it
try {
JenaRDFParser parser = new JenaRDFParser();
Object json = JSONLD.fromRDF(resultModel, parser);
JSONUtils.write(response.getWriter(), json);
} catch (JSONLDProcessingError e) {
throw new RDFServiceException("Could not convert from Jena model to JSON-LD", e);
private void doConstruct( HttpServletResponse response,
Query query,
ModelFormatConfig formatConfig,
RDFService rdfService
) throws ServletException{
try{
InputStream rawResult = null;
if( query.isConstructType() ){
rawResult= rdfService.sparqlConstructQuery( query.toString(), formatConfig.wireFormat );
}else if ( query.isDescribeType() ){
rawResult = rdfService.sparqlDescribeQuery( query.toString(), formatConfig.wireFormat );
}
response.setContentType( formatConfig.responseMimeType );
if( formatConfig.converstionFromWireFormat ){
Model resultModel = RDFServiceUtils.parseModel( rawResult, formatConfig.wireFormat );
if( "JSON-LD".equals( formatConfig.jenaResponseFormat )){
//since jena 2.6.4 doesn't support JSON-LD we do it
try {
JenaRDFParser parser = new JenaRDFParser();
Object json = JSONLD.fromRDF(resultModel, parser);
JSONUtils.write(response.getWriter(), json);
} catch (JSONLDProcessingError e) {
throw new RDFServiceException("Could not convert from Jena model to JSON-LD", e);
}
}else{
OutputStream out = response.getOutputStream();
resultModel.write(out, formatConfig.jenaResponseFormat );
}
}else{
OutputStream out = response.getOutputStream();
resultModel.write(out, config.jenaResponseFormat );
pipe( rawResult, out );
}
}else{
OutputStream out = response.getOutputStream();
pipe( rawResult, out );
}catch( IOException ex){
throw new ServletException("could not run SPARQL CONSTRUCT",ex);
} catch (RDFServiceException ex) {
throw new ServletException("could not run SPARQL CONSTRUCT",ex);
}
}
@ -362,13 +346,35 @@ public class SparqlQueryServlet extends BaseEditController {
rd.forward(req,res);
}
/** Simple boolean vaule to improve the legibility of confiugrations. */
private final static boolean CONVERT = true;
public static class ModelFormatConfig{
/** Simple vaule to improve the legibility of confiugrations. */
private final static String NO_CONVERSION = null;
public static class FormatConfig{
public String valueFromForm;
public boolean converstionFromWireFormat;
public RDFService.ModelSerializationFormat wireFormat;
public String jenaResponseFormat;
public String responseMimeType;
}
private static ModelFormatConfig[] fmts = {
new ModelFormatConfig("RDF/XML",
!CONVERT, ModelSerializationFormat.RDFXML, NO_CONVERSION, "application/rdf+xml" ),
new ModelFormatConfig("RDF/XML-ABBREV",
CONVERT, ModelSerializationFormat.N3, "RDF/XML-ABBREV", "application/rdf+xml" ),
new ModelFormatConfig("N3",
!CONVERT, ModelSerializationFormat.N3, NO_CONVERSION, "text/n3" ),
new ModelFormatConfig("N-TRIPLE",
!CONVERT, ModelSerializationFormat.NTRIPLE, NO_CONVERSION, "text/plain" ),
new ModelFormatConfig("TTL",
CONVERT, ModelSerializationFormat.N3, "TTL", "application/x-turtle" ),
new ModelFormatConfig("JSON-LD",
CONVERT, ModelSerializationFormat.N3, "JSON-LD", "application/javascript" ) };
public static class ModelFormatConfig extends FormatConfig{
public RDFService.ModelSerializationFormat wireFormat;
public String jenaResponseFormat;
public ModelFormatConfig( String valueFromForm,
boolean converstionFromWireFormat,
@ -383,12 +389,20 @@ public class SparqlQueryServlet extends BaseEditController {
}
}
public static class RSFormatConfig{
public String valueFromForm;
public boolean converstionFromWireFormat;
private static RSFormatConfig[] rsfs = {
new RSFormatConfig( "RS_XML",
!CONVERT, ResultFormat.XML, null, "text/xml"),
new RSFormatConfig( "RS_TEXT",
!CONVERT, ResultFormat.TEXT, null, "text/plain"),
new RSFormatConfig( "vitro:csv",
!CONVERT, ResultFormat.CSV, null, "text/csv"),
new RSFormatConfig( "RS_JSON",
!CONVERT, ResultFormat.JSON, null, "application/javascript") };
public static class RSFormatConfig extends FormatConfig{
public ResultFormat wireFormat;
public ResultSetFormat jenaResponseFormat;
public String responseMimeType;
public RSFormatConfig( String valueFromForm,
boolean converstionFromWireFormat,
@ -403,14 +417,48 @@ public class SparqlQueryServlet extends BaseEditController {
}
}
static{
/* move the lists of configs into maps for easy lookup */
static{
HashMap<String, Float> map = new HashMap<String, Float>();
/* move the lists of configurations into maps for easy lookup
* by both MIME content type and the parameters from the form */
for( RSFormatConfig rsfc : rsfs ){
rsFormats.put( rsfc.valueFromForm, rsfc );
rsFormats.put( rsfc.responseMimeType, rsfc);
map.put(rsfc.responseMimeType, 1.0f);
}
for( ModelFormatConfig mfc : fmts ){
modelFormats.put( mfc.valueFromForm, mfc);
}
modelFormats.put(mfc.responseMimeType, mfc);
map.put(mfc.responseMimeType, 1.0f);
}
ACCEPTED_CONTENT_TYPES = Collections.unmodifiableMap(map);
}
/**
* Get the content type based on content negotiation.
* Returns null of no content type can be agreed on or
* if there is no accept header.
*/
protected String checkForContentType( String acceptHeader ) {
if (acceptHeader == null)
return null;
try {
Map<String, Float> typesAndQ = ContentType
.getTypesAndQ(acceptHeader);
String ctStr = ContentType
.getBestContentType(typesAndQ,ACCEPTED_CONTENT_TYPES);
if( ACCEPTED_CONTENT_TYPES.containsKey( ctStr )){
return ctStr;
}
} catch (Throwable th) {
log.error("Problem while checking accept header ", th);
}
return null;
}
}

View file

@ -205,7 +205,15 @@ public class VitroRequest extends HttpServletRequestWrapper {
@Override
public String[] getParameterValues(String name) {
return _req.getParameterValues(name);
}
}
public void setLanguageNeutralUnionFullModel(OntModel model) {
setAttribute("languageNeutralUnionFullModel", model);
}
public OntModel getLanguageNeutralUnionFullModel() {
return (OntModel) getAttribute("languageNeutralUnionFullModel");
}
}

View file

@ -0,0 +1,235 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.individual;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.datatypes.TypeMapper;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/**
* TODO Keep this around until release 1.7, in case anyone is relying on it.
*/
@Deprecated
public class ExtendedRdfAssembler {
private static final Log log = LogFactory
.getLog(ExtendedRdfAssembler.class);
private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/";
private static final String PERSON_CLASS_URI = "http://xmlns.com/foaf/0.1/Person";
private static final String INCLUDE_ALL = "all";
@SuppressWarnings("serial")
private static final Map<String, String> namespaces = new HashMap<String, String>() {{
put("display", VitroVocabulary.DISPLAY);
put("vitro", VitroVocabulary.vitroURI);
put("vitroPublic", VitroVocabulary.VITRO_PUBLIC);
}};
private static final Property extendedLinkedDataProperty = ResourceFactory.createProperty(namespaces.get("vitro") + "extendedLinkedData");
private static final Literal xsdTrue = ResourceFactory.createTypedLiteral("true", XSDDatatype.XSDboolean);
private final VitroRequest vreq;
private final ServletContext ctx;
private final Individual individual;
private final ContentType rdfFormat;
public ExtendedRdfAssembler(VitroRequest vreq, Individual individual,
ContentType rdfFormat) {
this.vreq = vreq;
this.ctx = vreq.getSession().getServletContext();
this.individual = individual;
this.rdfFormat = rdfFormat;
}
/**
* @return
*/
public ResponseValues assembleRdf() {
OntModel ontModel = vreq.getJenaOntModel();
String[] includes = vreq.getParameterValues("include");
Model newModel = getRDF(individual, ontModel, ModelFactory.createDefaultModel(), 0, includes);
JenaOutputUtils.setNameSpacePrefixes(newModel, vreq.getWebappDaoFactory());
return new RdfResponseValues(rdfFormat, newModel);
}
private Model getRDF(Individual entity, OntModel contextModel, Model newModel, int recurseDepth, String[] includes) {
Resource subj = newModel.getResource(entity.getURI());
List<DataPropertyStatement> dstates = entity.getDataPropertyStatements();
TypeMapper typeMapper = TypeMapper.getInstance();
for (DataPropertyStatement ds: dstates) {
Property dp = newModel.getProperty(ds.getDatapropURI());
Literal lit = null;
if ((ds.getLanguage()) != null && (ds.getLanguage().length()>0)) {
lit = newModel.createLiteral(ds.getData(),ds.getLanguage());
} else if ((ds.getDatatypeURI() != null) && (ds.getDatatypeURI().length()>0)) {
lit = newModel.createTypedLiteral(ds.getData(),typeMapper.getSafeTypeByName(ds.getDatatypeURI()));
} else {
lit = newModel.createLiteral(ds.getData());
}
newModel.add(newModel.createStatement(subj, dp, lit));
}
if (recurseDepth < 5) {
List<ObjectPropertyStatement> ostates = entity.getObjectPropertyStatements();
for (ObjectPropertyStatement os: ostates) {
Property prop = newModel.getProperty(os.getPropertyURI());
Resource obj = newModel.getResource(os.getObjectURI());
newModel.add(newModel.createStatement(subj, prop, obj));
if ( includeInLinkedData(obj, contextModel)) {
newModel.add(getRDF(os.getObject(), contextModel, newModel, recurseDepth + 1, includes));
} else {
contextModel.enterCriticalSection(Lock.READ);
try {
newModel.add(contextModel.listStatements(obj, RDFS.label, (RDFNode)null));
} finally {
contextModel.leaveCriticalSection();
}
}
}
}
newModel = getLabelAndTypes(entity, contextModel, newModel );
newModel = getStatementsWithUntypedProperties(subj, contextModel, vreq.getAssertionsOntModel(), newModel);
//bdc34: The following code adds all triples where entity is the Subject.
// contextModel.enterCriticalSection(Lock.READ);
// try {
// StmtIterator iter = contextModel.listStatements(subj, (Property) null, (RDFNode) null);
// while (iter.hasNext()) {
// Statement stmt = iter.next();
// if (!newModel.contains(stmt)) {
// newModel.add(stmt);
// }
// }
// } finally {
// contextModel.leaveCriticalSection();
// }
if (recurseDepth == 0 && includes != null && entity.isVClass(PERSON_CLASS_URI)) {
for (String include : includes) {
String rootDir = null;
if (INCLUDE_ALL.equals(include)) {
rootDir = RICH_EXPORT_ROOT;
} else {
rootDir = RICH_EXPORT_ROOT + include + "/";
}
long start = System.currentTimeMillis();
Model extendedModel = ExtendedLinkedDataUtils.createModelFromQueries(ctx, rootDir, contextModel, entity.getURI());
long elapsedTimeMillis = System.currentTimeMillis()-start;
log.info("Time to create rich export model: msecs = " + elapsedTimeMillis);
newModel.add(extendedModel);
}
}
return newModel;
}
public static boolean includeInLinkedData(Resource object, Model contextModel) {
boolean retval = false;
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(object, RDF.type, (RDFNode)null);
while (iter.hasNext()) {
Statement stmt = iter.next();
if (stmt.getObject().isResource() && contextModel.contains(stmt.getObject().asResource(), extendedLinkedDataProperty, xsdTrue)) {
retval = true;
break;
}
}
} finally {
contextModel.leaveCriticalSection();
}
return retval;
}
/* Get the properties that are difficult to get via a filtered WebappDaoFactory. */
private Model getLabelAndTypes(Individual entity, Model ontModel, Model newModel){
for( VClass vclass : entity.getVClasses()){
newModel.add(newModel.getResource(entity.getURI()), RDF.type, newModel.getResource(vclass.getURI()));
}
ontModel.enterCriticalSection(Lock.READ);
try {
newModel.add(ontModel.listStatements(ontModel.getResource(entity.getURI()), RDFS.label, (RDFNode)null));
} finally {
ontModel.leaveCriticalSection();
}
return newModel;
}
/* This method adds in statements in which the property does not
* have an rdf type in the asserted model.
* This was added for release 1.5 to handle cases such as the
* reasoning-plugin inferred dcterms:creator assertion
*/
private Model getStatementsWithUntypedProperties(Resource subject, OntModel contextModel, OntModel assertionsModel, Model newModel) {
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(subject, (Property) null, (RDFNode) null);
while (iter.hasNext()) {
Statement stmt = iter.next();
Property property = stmt.getPredicate();
assertionsModel.enterCriticalSection(Lock.READ);
try {
if (!assertionsModel.contains(property, RDF.type) && !newModel.contains(stmt)) {
newModel.add(stmt);
}
} finally {
assertionsModel.leaveCriticalSection();
}
}
} finally {
contextModel.leaveCriticalSection();
}
return newModel;
}
}

View file

@ -14,6 +14,7 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ExceptionResponseValues;
@ -29,11 +30,15 @@ public class IndividualController extends FreemarkerHttpServlet {
.getLog(IndividualController.class);
private static final String TEMPLATE_HELP = "individual-help.ftl";
@Deprecated
private static final String PROPERTY_EXTENDED_LOD = "serveExtendedLinkedData";
/**
* Use this map to decide which MIME type is suited for the "accept" header.
*/
public static final Map<String, Float> ACCEPTED_CONTENT_TYPES = initializeContentTypes();
private static Map<String, Float> initializeContentTypes() {
HashMap<String, Float> map = new HashMap<String, Float>();
map.put(HTML_MIMETYPE, 0.5f);
@ -82,9 +87,15 @@ public class IndividualController extends FreemarkerHttpServlet {
* If they are asking for RDF using the preferred URL, give it
* to them.
*/
return new IndividualRdfAssembler(vreq,
requestInfo.getIndividual(), requestInfo.getRdfFormat())
.assembleRdf();
if (useExtendedLOD(vreq)) {
return new ExtendedRdfAssembler(vreq,
requestInfo.getIndividual(),
requestInfo.getRdfFormat()).assembleRdf();
} else {
return new IndividualRdfAssembler(vreq,
requestInfo.getIndividual().getURI(),
requestInfo.getRdfFormat()).assembleRdf();
}
default:
/*
* Otherwise, prepare an HTML response for the requested
@ -113,6 +124,11 @@ public class IndividualController extends FreemarkerHttpServlet {
HttpServletResponse.SC_NOT_FOUND);
}
private boolean useExtendedLOD(HttpServletRequest req) {
ConfigurationProperties props = ConfigurationProperties.getBean(req);
return Boolean.valueOf(props.getProperty(PROPERTY_EXTENDED_LOD));
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {

View file

@ -2,233 +2,270 @@
package edu.cornell.mannlib.vitro.webapp.controller.individual;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.HashSet;
import java.util.Set;
import javax.servlet.ServletContext;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.datatypes.TypeMapper;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.auth.policy.PolicyHelper;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.display.DisplayDataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.display.DisplayObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestedAction;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/**
* TODO See where this can be improved.
* Write a smaller set of Linked Data. It consists of:
*
* 1) The data properties of the entity
*
* 2) The object properties in which the entity is either subject or object
*
* 3) The labels and types of the objects that are linked by those properties.
*
* If the request comes with an Accept-language header, use an appropriately
* language-aware data source to filter the data properties and labels.
* Otherwise, show all triples, regardless of language.
*
* Filter the result based on the policy, removing any triples that should not
* be displayed to the public (or to the user, if logged in). Also remove any
* objects which can only be reached by excluded triples.
*
* ----------------
*
* This still permits the use of rich export, by "include" options on the
* request. The only difference from earlier implementations is that the result
* may be made language-aware.
*/
public class IndividualRdfAssembler {
private static final Log log = LogFactory
.getLog(IndividualRdfAssembler.class);
private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/";
private static final String PERSON_CLASS_URI = "http://xmlns.com/foaf/0.1/Person";
private static final String INCLUDE_ALL = "all";
@SuppressWarnings("serial")
private static final Map<String, String> namespaces = new HashMap<String, String>() {{
put("display", VitroVocabulary.DISPLAY);
put("vitro", VitroVocabulary.vitroURI);
put("vitroPublic", VitroVocabulary.VITRO_PUBLIC);
}};
private static final Property extendedLinkedDataProperty = ResourceFactory.createProperty(namespaces.get("vitro") + "extendedLinkedData");
private static final Literal xsdTrue = ResourceFactory.createTypedLiteral("true", XSDDatatype.XSDboolean);
private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/";
private static final String INCLUDE_ALL = "all";
private final VitroRequest vreq;
private final ServletContext ctx;
private final Individual individual;
private final String individualUri;
private final ContentType rdfFormat;
private final String[] richExportIncludes;
private final RDFService rdfService;
private final OntModel contentModel;
private final WebappDaoFactory wadf;
public IndividualRdfAssembler(VitroRequest vreq, Individual individual,
public IndividualRdfAssembler(VitroRequest vreq, String individualUri,
ContentType rdfFormat) {
this.vreq = vreq;
this.ctx = vreq.getSession().getServletContext();
this.individual = individual;
this.rdfFormat = rdfFormat;
}
/**
* @return
*/
public ResponseValues assembleRdf() {
OntModel ontModel = vreq.getJenaOntModel();
this.individualUri = individualUri;
this.rdfFormat = rdfFormat;
String[] includes = vreq.getParameterValues("include");
Model newModel = getRDF(individual, ontModel, ModelFactory.createDefaultModel(), 0, includes);
JenaOutputUtils.setNameSpacePrefixes(newModel, vreq.getWebappDaoFactory());
this.richExportIncludes = (includes == null) ? new String[0] : includes;
if (isLanguageAware()) {
this.rdfService = vreq.getRDFService();
this.contentModel = vreq.getJenaOntModel();
} else {
this.rdfService = vreq.getUnfilteredRDFService();
this.contentModel = vreq.getLanguageNeutralUnionFullModel();
}
wadf = vreq.getWebappDaoFactory();
}
public ResponseValues assembleRdf() {
OntModel newModel = getRdf();
newModel.add(getRichExportRdf());
JenaOutputUtils.setNameSpacePrefixes(newModel, wadf);
return new RdfResponseValues(rdfFormat, newModel);
}
private Model getRDF(Individual entity, OntModel contextModel, Model newModel, int recurseDepth, String[] includes) {
Resource subj = newModel.getResource(entity.getURI());
List<DataPropertyStatement> dstates = entity.getDataPropertyStatements();
TypeMapper typeMapper = TypeMapper.getInstance();
for (DataPropertyStatement ds: dstates) {
Property dp = newModel.getProperty(ds.getDatapropURI());
Literal lit = null;
if ((ds.getLanguage()) != null && (ds.getLanguage().length()>0)) {
lit = newModel.createLiteral(ds.getData(),ds.getLanguage());
} else if ((ds.getDatatypeURI() != null) && (ds.getDatatypeURI().length()>0)) {
lit = newModel.createTypedLiteral(ds.getData(),typeMapper.getSafeTypeByName(ds.getDatatypeURI()));
} else {
lit = newModel.createLiteral(ds.getData());
}
newModel.add(newModel.createStatement(subj, dp, lit));
}
if (recurseDepth < 5) {
List<ObjectPropertyStatement> ostates = entity.getObjectPropertyStatements();
for (ObjectPropertyStatement os: ostates) {
Property prop = newModel.getProperty(os.getPropertyURI());
Resource obj = newModel.getResource(os.getObjectURI());
newModel.add(newModel.createStatement(subj, prop, obj));
if ( includeInLinkedData(obj, contextModel)) {
newModel.add(getRDF(os.getObject(), contextModel, newModel, recurseDepth + 1, includes));
} else {
contextModel.enterCriticalSection(Lock.READ);
try {
newModel.add(contextModel.listStatements(obj, RDFS.label, (RDFNode)null));
} finally {
contextModel.leaveCriticalSection();
}
}
}
}
newModel = getLabelAndTypes(entity, contextModel, newModel );
newModel = getStatementsWithUntypedProperties(subj, contextModel, vreq.getAssertionsOntModel(), newModel);
//bdc34: The following code adds all triples where entity is the Subject.
// contextModel.enterCriticalSection(Lock.READ);
// try {
// StmtIterator iter = contextModel.listStatements(subj, (Property) null, (RDFNode) null);
// while (iter.hasNext()) {
// Statement stmt = iter.next();
// if (!newModel.contains(stmt)) {
// newModel.add(stmt);
// }
// }
// } finally {
// contextModel.leaveCriticalSection();
// }
if (recurseDepth == 0 && includes != null && entity.isVClass(PERSON_CLASS_URI)) {
for (String include : includes) {
String rootDir = null;
if (INCLUDE_ALL.equals(include)) {
rootDir = RICH_EXPORT_ROOT;
} else {
rootDir = RICH_EXPORT_ROOT + include + "/";
}
long start = System.currentTimeMillis();
Model extendedModel = ExtendedLinkedDataUtils.createModelFromQueries(ctx, rootDir, contextModel, entity.getURI());
long elapsedTimeMillis = System.currentTimeMillis()-start;
log.info("Time to create rich export model: msecs = " + elapsedTimeMillis);
newModel.add(extendedModel);
}
private boolean isLanguageAware() {
return StringUtils.isNotEmpty(vreq.getHeader("Accept-Language"));
}
private OntModel getRdf() {
OntModel o = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
o.add(getStatementsAboutEntity());
o.add(getLabelsAndTypesOfRelatedObjects());
filterByPolicy(o);
return o;
}
/**
* Get all statements that have the entity as either the subject or the
* object.
*/
private Model getStatementsAboutEntity() {
Model m = runConstructQuery(String
.format("CONSTRUCT { <%1$s> ?predicate ?object . } "
+ "WHERE { <%1$s> ?predicate ?object } ", individualUri));
m.add(runConstructQuery(String.format(
"CONSTRUCT { ?s ?predicate <%1$s> . } "
+ "WHERE { ?s ?predicate <%1$s> } ", individualUri)));
return m;
}
/**
* Get the labels and types of all related objects.
*/
private Model getLabelsAndTypesOfRelatedObjects() {
Model m = runConstructQuery(String
.format("CONSTRUCT { ?object <%2$s> ?type . } "
+ "WHERE { <%1$s> ?predicate ?object ."
+ " ?object <%2$s> ?type . } ", individualUri, RDF.type));
m.add(runConstructQuery(String.format(
"CONSTRUCT { ?object <%2$s> ?label . } "
+ "WHERE { <%1$s> ?predicate ?object ."
+ " ?object <%2$s> ?label . } ", individualUri,
RDFS.label)));
return m;
}
/**
* Remove any triples that we aren't allowed to see. Then remove any objects
* that we no longer have access to.
*/
private void filterByPolicy(OntModel o) {
removeProhibitedTriples(o);
Set<String> okObjects = determineAccessibleUris(o);
removeOrphanedObjects(o, okObjects);
}
/**
* Remove the triples that we aren't allowed to see.
*/
private void removeProhibitedTriples(OntModel o) {
StmtIterator stmts = o.listStatements();
while (stmts.hasNext()) {
Statement stmt = stmts.next();
String subjectUri = stmt.getSubject().getURI();
String predicateUri = stmt.getPredicate().getURI();
if (stmt.getObject().isLiteral()) {
String value = stmt.getObject().asLiteral().getString();
DataPropertyStatement dps = new DataPropertyStatementImpl(
subjectUri, predicateUri, value);
RequestedAction ddps = new DisplayDataPropertyStatement(dps);
if (!PolicyHelper.isAuthorizedForActions(vreq, ddps)) {
log.debug("not authorized: " + ddps);
stmts.remove();
}
} else if (stmt.getObject().isURIResource()) {
String objectUri = stmt.getObject().asResource().getURI();
ObjectPropertyStatement ops = new ObjectPropertyStatementImpl(
subjectUri, predicateUri, objectUri);
RequestedAction dops = new DisplayObjectPropertyStatement(ops);
if (!PolicyHelper.isAuthorizedForActions(vreq, dops)) {
log.debug("not authorized: " + dops);
stmts.remove();
}
} else {
log.warn("blank node: + stmt");
stmts.remove();
}
}
return newModel;
}
}
public static boolean includeInLinkedData(Resource object, Model contextModel) {
boolean retval = false;
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(object, RDF.type, (RDFNode)null);
while (iter.hasNext()) {
Statement stmt = iter.next();
if (stmt.getObject().isResource() && contextModel.contains(stmt.getObject().asResource(), extendedLinkedDataProperty, xsdTrue)) {
retval = true;
break;
}
}
} finally {
contextModel.leaveCriticalSection();
}
return retval;
}
/**
* Collect the URIs of all objects that are accessible through permitted
* triples.
*/
private Set<String> determineAccessibleUris(OntModel o) {
Resource i = o.getResource(individualUri);
Set<String> uris = new HashSet<>();
uris.add(individualUri);
/* Get the properties that are difficult to get via a filtered WebappDaoFactory. */
private Model getLabelAndTypes(Individual entity, Model ontModel, Model newModel){
for( VClass vclass : entity.getVClasses()){
newModel.add(newModel.getResource(entity.getURI()), RDF.type, newModel.getResource(vclass.getURI()));
}
ontModel.enterCriticalSection(Lock.READ);
StmtIterator stmts;
stmts = o.listStatements(i, null, (RDFNode) null);
while (stmts.hasNext()) {
Statement stmt = stmts.next();
if (stmt.getObject().isURIResource()) {
uris.add(stmt.getObject().asResource().getURI());
}
}
stmts = o.listStatements(null, null, i);
while (stmts.hasNext()) {
Statement stmt = stmts.next();
uris.add(stmt.getSubject().getURI());
}
return uris;
}
/**
* Remove any statements about objects that cannot be reached through
* permitted triples.
*/
private void removeOrphanedObjects(OntModel o, Set<String> okObjects) {
StmtIterator stmts = o.listStatements();
while (stmts.hasNext()) {
Statement stmt = stmts.next();
if (!okObjects.contains(stmt.getSubject().getURI())) {
log.debug("removing orphan triple: " + stmt);
stmts.remove();
}
}
}
private Model runConstructQuery(String query) {
try {
newModel.add(ontModel.listStatements(ontModel.getResource(entity.getURI()), RDFS.label, (RDFNode)null));
} finally {
ontModel.leaveCriticalSection();
return RDFServiceUtils.parseModel(rdfService.sparqlConstructQuery(
query, RDFService.ModelSerializationFormat.N3),
RDFService.ModelSerializationFormat.N3);
} catch (RDFServiceException e) {
throw new RuntimeException(e);
}
return newModel;
}
/* This method adds in statements in which the property does not
* have an rdf type in the asserted model.
* This was added for release 1.5 to handle cases such as the
* reasoning-plugin inferred dcterms:creator assertion
*/
private Model getStatementsWithUntypedProperties(Resource subject, OntModel contextModel, OntModel assertionsModel, Model newModel) {
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(subject, (Property) null, (RDFNode) null);
while (iter.hasNext()) {
Statement stmt = iter.next();
Property property = stmt.getPredicate();
assertionsModel.enterCriticalSection(Lock.READ);
try {
if (!assertionsModel.contains(property, RDF.type) && !newModel.contains(stmt)) {
newModel.add(stmt);
}
} finally {
assertionsModel.leaveCriticalSection();
}
}
} finally {
contextModel.leaveCriticalSection();
}
return newModel;
}
}
private Model getRichExportRdf() {
Model richExportModel = ModelFactory.createDefaultModel();
for (String include : richExportIncludes) {
String rootDir = RICH_EXPORT_ROOT;
if (!INCLUDE_ALL.equals(include)) {
rootDir += include + "/";
}
long start = System.currentTimeMillis();
richExportModel.add(ExtendedLinkedDataUtils.createModelFromQueries(
ctx, rootDir, contentModel, individualUri));
long elapsedTimeMillis = System.currentTimeMillis() - start;
log.debug("Time to create rich export model: msecs = "
+ elapsedTimeMillis);
}
return richExportModel;
}
}

View file

@ -132,10 +132,11 @@ public class IndividualRequestAnalyzer {
* only provide a set of bytes.
*/
protected ContentType checkAcceptHeaderForLinkedDataRequest() {
String acceptHeader = vreq.getHeader("accept");
if (acceptHeader == null) {
return null;
}
String acceptHeader = vreq.getHeader("Accept");
if (acceptHeader == null)
acceptHeader = vreq.getHeader("accept");
if (acceptHeader == null)
return null;
try {
Map<String, Float> typesAndQ = ContentType

View file

@ -28,6 +28,14 @@ public interface WebappDaoFactory {
*/
public String checkURI(String uriStr, boolean checkUniqueness);
/**
* Check if a given URI string exists in the system:
* checks for the following conditions: URI found as subject in a statement or an object or as a property
* @param uriStr
* @return
*/
public boolean hasExistingURI(String uriStr);
public String getDefaultNamespace();
public Set<String> getNonuserNamespaces();

View file

@ -83,6 +83,10 @@ public class WebappDaoFactoryFiltering implements WebappDaoFactory {
return innerWebappDaoFactory.checkURI(uriStr, checkUniqueness);
}
public boolean hasExistingURI(String uriStr) {
return innerWebappDaoFactory.hasExistingURI(uriStr);
}
public WebappDaoFactory getUserAwareDaoFactory(String userURI) {
//TODO: need to clone the filtering factory
return innerWebappDaoFactory.getUserAwareDaoFactory(userURI);

View file

@ -186,40 +186,58 @@ public class WebappDaoFactoryJena implements WebappDaoFactory {
errorMsg += (iri.violations(false).next())
.getShortMessage() + " ";
} else if (checkUniqueness) {
OntModel ontModel = ontModelSelector.getFullModel();
ontModel.enterCriticalSection(Lock.READ);
try {
Resource newURIAsRes = ResourceFactory.createResource(uriStr);
Property newURIAsProp = ResourceFactory.createProperty(uriStr);
StmtIterator closeIt = ontModel.listStatements(
newURIAsRes, null, (RDFNode)null);
if (closeIt.hasNext()) {
validURI = false;
errorMsg+="Not a valid URI. Please enter another URI. ";
errorMsg+=duplicateMsg;
}
if (validURI) {
closeIt = ontModel.listStatements(null, null, newURIAsRes);
if (closeIt.hasNext()) {
validURI = false;
errorMsg+=duplicateMsg;
}
}
if (validURI) {
closeIt = ontModel.listStatements(
null, newURIAsProp, (RDFNode)null);
if (closeIt.hasNext()) {
validURI = false;
errorMsg+=duplicateMsg;
}
}
} finally {
ontModel.leaveCriticalSection();
}
boolean existingURI = this.hasExistingURI(uriStr);
if(existingURI) {
errorMsg+="Not a valid URI. Please enter another URI. ";
errorMsg+=duplicateMsg;
//the original code included an extra line "Not a valid URI. Please enter another URI. "
//in the error message in addition to the duplicate error message in the case where the uri
//is in the subject position of any of the statements in the system - but not so where the
//uri was only in the object position or was a propery. In this code, the same error message
//is returned for all duplicate uris
}
}
return (errorMsg.length()>0) ? errorMsg : null;
}
//Check if URI already in use or not either as resource OR as property
public boolean hasExistingURI(String uriStr) {
boolean existingURI = false;
OntModel ontModel = ontModelSelector.getFullModel();
ontModel.enterCriticalSection(Lock.READ);
try {
Resource newURIAsRes = ResourceFactory.createResource(uriStr);
Property newURIAsProp = ResourceFactory.createProperty(uriStr);
StmtIterator closeIt = ontModel.listStatements(
newURIAsRes, null, (RDFNode)null);
if (closeIt.hasNext()) {
existingURI = true;
}
//if not in the subject position, check in object position
if (!existingURI) {
closeIt = ontModel.listStatements(null, null, newURIAsRes);
if (closeIt.hasNext()) {
existingURI= true;
}
}
//Check for property
if (!existingURI) {
closeIt = ontModel.listStatements(
null, newURIAsProp, (RDFNode)null);
if (closeIt.hasNext()) {
existingURI = true;
}
}
} finally {
ontModel.leaveCriticalSection();
}
return existingURI;
}
public WebappDaoFactory getUserAwareDaoFactory(String userURI) {
return new WebappDaoFactoryJena(this, userURI);
}

View file

@ -2,6 +2,9 @@
package edu.cornell.mannlib.vitro.webapp.filters;
import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_DB_MODEL;
import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_INF_MODEL;
import java.io.IOException;
import java.util.List;
import java.util.regex.Pattern;
@ -19,9 +22,12 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.RequestIdentifiers;
@ -36,8 +42,8 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.WebappDaoFactoryFiltering;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.filters.HideFromDisplayByPolicyFilter;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SpecialBulkUpdateHandlerGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
@ -61,7 +67,7 @@ public class RequestModelsPrep implements Filter {
* parameters, e.g. "/vitro/index.jsp" "/vitro/themes/enhanced/css/edit.css"
*/
private final static Pattern[] skipPatterns = {
Pattern.compile(".*\\.(gif|GIF|jpg|jpeg)$"),
Pattern.compile(".*\\.(gif|GIF|jpg|jpeg|png|PNG)$"),
Pattern.compile(".*\\.css$"), Pattern.compile(".*\\.js$"),
Pattern.compile("/.*/themes/.*/site_icons/.*"),
Pattern.compile("/.*/images/.*") };
@ -130,28 +136,120 @@ public class RequestModelsPrep implements Filter {
HttpServletRequest req) {
VitroRequest vreq = new VitroRequest(req);
setRdfServicesAndDatasets(rawRdfService, vreq);
RDFService rdfService = vreq.getRDFService();
Dataset dataset = vreq.getDataset();
setRawModels(vreq, dataset);
// We need access to the language-ignorant version of this model.
// Grab it before it gets wrapped in language awareness.
vreq.setLanguageNeutralUnionFullModel(ModelAccess.on(vreq).getOntModel(ModelID.UNION_FULL));
wrapModelsWithLanguageAwareness(vreq);
setWebappDaoFactories(vreq, rdfService);
}
/**
* Set language-neutral and language-aware versions of the RdfService and
* Dataset.
*/
private void setRdfServicesAndDatasets(RDFService rawRdfService,
VitroRequest vreq) {
vreq.setUnfilteredRDFService(rawRdfService);
vreq.setUnfilteredDataset(new RDFServiceDataset(rawRdfService));
List<String> langs = getPreferredLanguages(req);
RDFService rdfService = addLanguageAwareness(langs, rawRdfService);
RDFService rdfService = addLanguageAwareness(vreq, rawRdfService);
vreq.setRDFService(rdfService);
Dataset dataset = new RDFServiceDataset(rdfService);
vreq.setDataset(dataset);
}
private void setRawModels(VitroRequest vreq, Dataset dataset) {
// These are memory-mapped (fast), and read-mostly (low contention), so
// just use the ones from the context.
useModelFromContext(vreq, ModelID.APPLICATION_METADATA);
useModelFromContext(vreq, ModelID.USER_ACCOUNTS);
useModelFromContext(vreq, ModelID.DISPLAY);
useModelFromContext(vreq, ModelID.DISPLAY_DISPLAY);
useModelFromContext(vreq, ModelID.DISPLAY_TBOX);
useModelFromContext(vreq, ModelID.BASE_TBOX);
useModelFromContext(vreq, ModelID.INFERRED_TBOX);
useModelFromContext(vreq, ModelID.UNION_TBOX);
WebappDaoFactoryConfig config = createWadfConfig(langs, req);
ModelAccess.on(vreq).setJenaOntModel(
ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM,
dataset.getDefaultModel()));
// Anything derived from the ABOX is not memory-mapped, so create
// versions from the short-term RDF service.
OntModel baseABoxModel = createNamedModelFromDataset(dataset,
JENA_DB_MODEL);
OntModel inferenceABoxModel = createNamedModelFromDataset(dataset,
JENA_INF_MODEL);
OntModel unionABoxModel = createCombinedBulkUpdatingModel(
baseABoxModel, inferenceABoxModel);
addLanguageAwarenessToRequestModel(req, ModelID.DISPLAY);
addLanguageAwarenessToRequestModel(req, ModelID.APPLICATION_METADATA);
addLanguageAwarenessToRequestModel(req, ModelID.UNION_TBOX);
addLanguageAwarenessToRequestModel(req, ModelID.UNION_FULL);
addLanguageAwarenessToRequestModel(req, ModelID.BASE_TBOX);
addLanguageAwarenessToRequestModel(req, ModelID.BASE_FULL);
OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel,
ModelAccess.on(vreq).getOntModel(ModelID.BASE_TBOX));
OntModel inferenceFullModel = createCombinedModel(inferenceABoxModel,
ModelAccess.on(vreq).getOntModel(ModelID.INFERRED_TBOX));
OntModel unionFullModel = ModelFactory.createOntologyModel(
OntModelSpec.OWL_MEM, dataset.getDefaultModel());
ModelAccess.on(vreq).setOntModel(ModelID.BASE_ABOX, baseABoxModel);
ModelAccess.on(vreq).setOntModel(ModelID.INFERRED_ABOX, unionABoxModel);
ModelAccess.on(vreq)
.setOntModel(ModelID.UNION_ABOX, inferenceABoxModel);
ModelAccess.on(vreq).setOntModel(ModelID.BASE_FULL, baseFullModel);
ModelAccess.on(vreq).setOntModel(ModelID.INFERRED_FULL,
inferenceFullModel);
ModelAccess.on(vreq).setOntModel(ModelID.UNION_FULL, unionFullModel);
}
private void useModelFromContext(VitroRequest vreq, ModelID modelId) {
OntModel contextModel = ModelAccess.on(ctx).getOntModel(modelId);
ModelAccess.on(vreq).setOntModel(modelId, contextModel);
}
private OntModel createNamedModelFromDataset(Dataset dataset, String name) {
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, dataset.getNamedModel(name));
}
private OntModel createCombinedModel(OntModel oneModel, OntModel otherModel) {
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM,
ModelFactory.createUnion(oneModel, otherModel));
}
private OntModel createCombinedBulkUpdatingModel(OntModel baseModel,
OntModel otherModel) {
BulkUpdateHandler bulkUpdateHandler = baseModel.getGraph().getBulkUpdateHandler();
Graph unionGraph = ModelFactory.createUnion(baseModel, otherModel).getGraph();
Model unionModel = ModelFactory.createModelForGraph(
new SpecialBulkUpdateHandlerGraph(unionGraph, bulkUpdateHandler));
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, unionModel);
}
private void wrapModelsWithLanguageAwareness(VitroRequest vreq) {
wrapModelWithLanguageAwareness(vreq, ModelID.DISPLAY);
wrapModelWithLanguageAwareness(vreq, ModelID.APPLICATION_METADATA);
wrapModelWithLanguageAwareness(vreq, ModelID.BASE_TBOX);
wrapModelWithLanguageAwareness(vreq, ModelID.UNION_TBOX);
wrapModelWithLanguageAwareness(vreq, ModelID.UNION_FULL);
wrapModelWithLanguageAwareness(vreq, ModelID.BASE_FULL);
}
private void wrapModelWithLanguageAwareness(HttpServletRequest req,
ModelID id) {
if (isLanguageAwarenessEnabled()) {
OntModel unaware = ModelAccess.on(req).getOntModel(id);
OntModel aware = LanguageFilteringUtils
.wrapOntModelInALanguageFilter(unaware, req);
ModelAccess.on(req).setOntModel(id, aware);
}
}
private void setWebappDaoFactories(VitroRequest vreq, RDFService rdfService) {
WebappDaoFactoryConfig config = createWadfConfig(vreq);
WebappDaoFactory unfilteredWadf = new WebappDaoFactorySDB(rdfService,
ModelAccess.on(vreq).getUnionOntModelSelector(), config);
@ -175,14 +273,15 @@ public class RequestModelsPrep implements Filter {
.checkForModelSwitching(vreq, wadf);
HideFromDisplayByPolicyFilter filter = new HideFromDisplayByPolicyFilter(
RequestIdentifiers.getIdBundleForRequest(req),
RequestIdentifiers.getIdBundleForRequest(vreq),
ServletPolicyList.getPolicies(ctx));
WebappDaoFactoryFiltering filteredWadf = new WebappDaoFactoryFiltering(
switchedWadf, filter);
ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNION, filteredWadf);
}
private WebappDaoFactoryConfig createWadfConfig(List<String> langs, HttpServletRequest req) {
private WebappDaoFactoryConfig createWadfConfig(HttpServletRequest req) {
List<String> langs = getPreferredLanguages(req);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace);
config.setPreferredLanguages(langs);
@ -203,8 +302,9 @@ public class RequestModelsPrep implements Filter {
"true"));
}
private RDFService addLanguageAwareness(List<String> langs,
private RDFService addLanguageAwareness(HttpServletRequest req,
RDFService rawRDFService) {
List<String> langs = getPreferredLanguages(req);
if (isLanguageAwarenessEnabled()) {
return new LanguageFilteringRDFService(rawRDFService, langs);
} else {
@ -212,15 +312,6 @@ public class RequestModelsPrep implements Filter {
}
}
private void addLanguageAwarenessToRequestModel(HttpServletRequest req, ModelID id) {
if (isLanguageAwarenessEnabled()) {
OntModel unaware = ModelAccess.on(req.getSession()).getOntModel(id);
OntModel aware = LanguageFilteringUtils
.wrapOntModelInALanguageFilter(unaware, req);
ModelAccess.on(req).setOntModel(id, aware);
}
}
private boolean isStoreReasoned(ServletRequest req) {
String isStoreReasoned = ConfigurationProperties.getBean(req).getProperty(
"VitroConnection.DataSource.isStoreReasoned", "true");

View file

@ -44,6 +44,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.IndividualDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassGroupDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
import edu.cornell.mannlib.vitro.webapp.search.IndexConstants;
import edu.cornell.mannlib.vitro.webapp.search.SearchException;
import edu.cornell.mannlib.vitro.webapp.search.VitroSearchTermNames;
@ -164,9 +165,9 @@ public class PagedSearchController extends FreemarkerHttpServlet {
log.debug("Query text is \""+ queryText + "\"");
String badQueryMsg = badQueryText( queryText );
String badQueryMsg = badQueryText( queryText, vreq );
if( badQueryMsg != null ){
return doFailedSearch(badQueryMsg, queryText, format);
return doFailedSearch(badQueryMsg, queryText, format, vreq);
}
SolrQuery query = getQuery(queryText, hitsPerPage, startIndex, vreq);
@ -176,26 +177,26 @@ public class PagedSearchController extends FreemarkerHttpServlet {
try {
response = solr.query(query);
} catch (Exception ex) {
String msg = makeBadSearchMessage(queryText, ex.getMessage());
String msg = makeBadSearchMessage(queryText, ex.getMessage(), vreq);
log.error("could not run Solr query",ex);
return doFailedSearch(msg, queryText, format);
return doFailedSearch(msg, queryText, format, vreq);
}
if (response == null) {
log.error("Search response was null");
return doFailedSearch("The search request contained errors.", queryText, format);
return doFailedSearch(I18n.text(vreq, "error_in_search_request"), queryText, format, vreq);
}
SolrDocumentList docs = response.getResults();
if (docs == null) {
log.error("Document list for a search was null");
return doFailedSearch("The search request contained errors.", queryText,format);
return doFailedSearch(I18n.text(vreq, "error_in_search_request"), queryText,format, vreq);
}
long hitCount = docs.getNumFound();
log.debug("Number of hits = " + hitCount);
if ( hitCount < 1 ) {
return doNoHits(queryText,format);
return doNoHits(queryText,format, vreq);
}
List<Individual> individuals = new ArrayList<Individual>(docs.size());
@ -275,7 +276,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
body.put("pagingLinks",
getPagingLinks(startIndex, hitsPerPage, hitCount,
vreq.getServletPath(),
pagingLinkParams));
pagingLinkParams, vreq));
if (startIndex != 0) {
body.put("prevPage", getPreviousPageLink(startIndex,
@ -339,12 +340,12 @@ public class PagedSearchController extends FreemarkerHttpServlet {
return startIndex;
}
private String badQueryText(String qtxt) {
private String badQueryText(String qtxt, VitroRequest vreq) {
if( qtxt == null || "".equals( qtxt.trim() ) )
return "Please enter a search term.";
return I18n.text(vreq, "enter_search_term");
if( qtxt.equals("*:*") )
return "Search term was invalid" ;
return I18n.text(vreq, "invalid_search_term") ;
return null;
}
@ -526,7 +527,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
public String getCount() { return Long.toString(count); }
}
protected static List<PagingLink> getPagingLinks(int startIndex, int hitsPerPage, long hitCount, String baseUrl, ParamMap params) {
protected static List<PagingLink> getPagingLinks(int startIndex, int hitsPerPage, long hitCount, String baseUrl, ParamMap params, VitroRequest vreq) {
List<PagingLink> pagingLinks = new ArrayList<PagingLink>();
@ -550,7 +551,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
pagingLinks.add(new PagingLink(pageNumber, baseUrl, params));
}
} else {
pagingLinks.add(new PagingLink("more...", baseUrl, params));
pagingLinks.add(new PagingLink(I18n.text(vreq, "paging_link_more"), baseUrl, params));
break;
}
}
@ -591,20 +592,20 @@ public class PagedSearchController extends FreemarkerHttpServlet {
return new ExceptionResponseValues(getTemplate(f,Result.ERROR), body, e);
}
private TemplateResponseValues doFailedSearch(String message, String querytext, Format f) {
private TemplateResponseValues doFailedSearch(String message, String querytext, Format f, VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Search for '" + querytext + "'");
body.put("title", I18n.text(vreq, "search_for", querytext));
if ( StringUtils.isEmpty(message) ) {
message = "Search failed.";
message = I18n.text(vreq, "search_failed");
}
body.put("message", message);
return new TemplateResponseValues(getTemplate(f,Result.ERROR), body);
}
private TemplateResponseValues doNoHits(String querytext, Format f) {
private TemplateResponseValues doNoHits(String querytext, Format f, VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Search for '" + querytext + "'");
body.put("message", "No matching results.");
body.put("title", I18n.text(vreq, "search_for", querytext));
body.put("message", I18n.text(vreq, "no_matching_results"));
return new TemplateResponseValues(getTemplate(f,Result.ERROR), body);
}
@ -613,7 +614,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
* @param queryText
* @param exceptionMsg
*/
private String makeBadSearchMessage(String querytext, String exceptionMsg){
private String makeBadSearchMessage(String querytext, String exceptionMsg, VitroRequest vreq){
String rv = "";
try{
//try to get the column in the search term that is causing the problems
@ -641,7 +642,8 @@ public class PagedSearchController extends FreemarkerHttpServlet {
if (post > i)
after = querytext.substring(i + 1, post);
rv = "The search term had an error near <span class='searchQuote'>"
rv = I18n.text(vreq, "search_term_error_near") +
" <span class='searchQuote'>"
+ before + "<span class='searchError'>" + querytext.charAt(i)
+ "</span>" + after + "</span>";
} catch (Throwable ex) {

View file

@ -88,6 +88,8 @@ public class ContentModelSetup extends JenaDataSourceSetupBase
} else {
checkForNamespaceMismatch( applicationMetadataModel, ctx );
}
RDFFilesLoader.loadEveryTimeFiles(ctx, "abox", baseABoxModel);
RDFFilesLoader.loadEveryTimeFiles(ctx, "tbox", baseTBoxModel);
log.info("Setting up full models");
OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel, baseTBoxModel);